2003-10-09 Dick Porter <dick@ximian.com>
[mono.git] / mono / mini / mini-x86.c
1 /*
2  * mini-x86.c: x86 backend for the Mono code generator
3  *
4  * Authors:
5  *   Paolo Molaro (lupus@ximian.com)
6  *   Dietmar Maurer (dietmar@ximian.com)
7  *
8  * (C) 2003 Ximian, Inc.
9  */
10 #include "mini.h"
11 #include <string.h>
12 #include <math.h>
13
14 #include <mono/metadata/appdomain.h>
15 #include <mono/metadata/debug-helpers.h>
16 #include <mono/metadata/profiler-private.h>
17
18 #include "mini-x86.h"
19 #include "inssel.h"
20 #include "cpu-pentium.h"
21
22 const char*
23 mono_arch_regname (int reg) {
24         switch (reg) {
25         case X86_EAX: return "%eax";
26         case X86_EBX: return "%ebx";
27         case X86_ECX: return "%ecx";
28         case X86_EDX: return "%edx";
29         case X86_ESP: return "%esp";
30         case X86_EBP: return "%ebp";
31         case X86_EDI: return "%edi";
32         case X86_ESI: return "%esi";
33         }
34         return "unknown";
35 }
36
37 typedef struct {
38         guint16 size;
39         guint16 offset;
40         guint8  pad;
41 } MonoJitArgumentInfo;
42
43 /*
44  * arch_get_argument_info:
45  * @csig:  a method signature
46  * @param_count: the number of parameters to consider
47  * @arg_info: an array to store the result infos
48  *
49  * Gathers information on parameters such as size, alignment and
50  * padding. arg_info should be large enought to hold param_count + 1 entries. 
51  *
52  * Returns the size of the activation frame.
53  */
54 static int
55 arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
56 {
57         int k, frame_size = 0;
58         int size, align, pad;
59         int offset = 8;
60
61         if (MONO_TYPE_ISSTRUCT (csig->ret)) { 
62                 frame_size += sizeof (gpointer);
63                 offset += 4;
64         }
65
66         arg_info [0].offset = offset;
67
68         if (csig->hasthis) {
69                 frame_size += sizeof (gpointer);
70                 offset += 4;
71         }
72
73         arg_info [0].size = frame_size;
74
75         for (k = 0; k < param_count; k++) {
76                 
77                 if (csig->pinvoke)
78                         size = mono_type_native_stack_size (csig->params [k], &align);
79                 else
80                         size = mono_type_stack_size (csig->params [k], &align);
81
82                 /* ignore alignment for now */
83                 align = 1;
84
85                 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1); 
86                 arg_info [k].pad = pad;
87                 frame_size += size;
88                 arg_info [k + 1].pad = 0;
89                 arg_info [k + 1].size = size;
90                 offset += pad;
91                 arg_info [k + 1].offset = offset;
92                 offset += size;
93         }
94
95         align = MONO_ARCH_FRAME_ALIGNMENT;
96         frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
97         arg_info [k].pad = pad;
98
99         return frame_size;
100 }
101
102 static int indent_level = 0;
103
104 static void indent (int diff) {
105         int v = indent_level;
106         while (v-- > 0) {
107                 printf (". ");
108         }
109         indent_level += diff;
110 }
111
112 static void
113 enter_method (MonoMethod *method, char *ebp)
114 {
115         int i, j;
116         MonoClass *class;
117         MonoObject *o;
118         MonoJitArgumentInfo *arg_info;
119         MonoMethodSignature *sig;
120         char *fname;
121
122         fname = mono_method_full_name (method, TRUE);
123         indent (1);
124         printf ("ENTER: %s(", fname);
125         g_free (fname);
126         
127         if (((int)ebp & (MONO_ARCH_FRAME_ALIGNMENT - 1)) != 0) {
128                 g_error ("unaligned stack detected (%p)", ebp);
129         }
130
131         sig = method->signature;
132
133         arg_info = alloca (sizeof (MonoJitArgumentInfo) * (sig->param_count + 1));
134
135         arch_get_argument_info (sig, sig->param_count, arg_info);
136
137         if (MONO_TYPE_ISSTRUCT (method->signature->ret)) {
138                 g_assert (!method->signature->ret->byref);
139
140                 printf ("VALUERET:%p, ", *((gpointer *)(ebp + 8)));
141         }
142
143         if (method->signature->hasthis) {
144                 gpointer *this = (gpointer *)(ebp + arg_info [0].offset);
145                 if (method->klass->valuetype) {
146                         printf ("value:%p, ", *this);
147                 } else {
148                         o = *((MonoObject **)this);
149
150                         if (o) {
151                                 class = o->vtable->klass;
152
153                                 if (class == mono_defaults.string_class) {
154                                         printf ("this:[STRING:%p:%s], ", o, mono_string_to_utf8 ((MonoString *)o));
155                                 } else {
156                                         printf ("this:%p[%s.%s], ", o, class->name_space, class->name);
157                                 }
158                         } else 
159                                 printf ("this:NULL, ");
160                 }
161         }
162
163         for (i = 0; i < method->signature->param_count; ++i) {
164                 gpointer *cpos = (gpointer *)(ebp + arg_info [i + 1].offset);
165                 int size = arg_info [i + 1].size;
166
167                 MonoType *type = method->signature->params [i];
168                 
169                 if (type->byref) {
170                         printf ("[BYREF:%p], ", *cpos); 
171                 } else switch (type->type) {
172                         
173                 case MONO_TYPE_I:
174                 case MONO_TYPE_U:
175                         printf ("%p, ", (gpointer)*((int *)(cpos)));
176                         break;
177                 case MONO_TYPE_BOOLEAN:
178                 case MONO_TYPE_CHAR:
179                 case MONO_TYPE_I1:
180                 case MONO_TYPE_U1:
181                 case MONO_TYPE_I2:
182                 case MONO_TYPE_U2:
183                 case MONO_TYPE_I4:
184                 case MONO_TYPE_U4:
185                         printf ("%d, ", *((int *)(cpos)));
186                         break;
187                 case MONO_TYPE_STRING: {
188                         MonoString *s = *((MonoString **)cpos);
189                         if (s) {
190                                 g_assert (((MonoObject *)s)->vtable->klass == mono_defaults.string_class);
191                                 printf ("[STRING:%p:%s], ", s, mono_string_to_utf8 (s));
192                         } else 
193                                 printf ("[STRING:null], ");
194                         break;
195                 }
196                 case MONO_TYPE_CLASS:
197                 case MONO_TYPE_OBJECT: {
198                         o = *((MonoObject **)cpos);
199                         if (o) {
200                                 class = o->vtable->klass;
201                     
202                                 if (class == mono_defaults.string_class) {
203                                         printf ("[STRING:%p:%s], ", o, mono_string_to_utf8 ((MonoString *)o));
204                                 } else if (class == mono_defaults.int32_class) {
205                                         printf ("[INT32:%p:%d], ", o, *(gint32 *)((char *)o + sizeof (MonoObject)));
206                                 } else
207                                         printf ("[%s.%s:%p], ", class->name_space, class->name, o);
208                         } else {
209                                 printf ("%p, ", *((gpointer *)(cpos)));                         
210                         }
211                         break;
212                 }
213                 case MONO_TYPE_PTR:
214                 case MONO_TYPE_FNPTR:
215                 case MONO_TYPE_ARRAY:
216                 case MONO_TYPE_SZARRAY:
217                         printf ("%p, ", *((gpointer *)(cpos)));
218                         break;
219                 case MONO_TYPE_I8:
220                 case MONO_TYPE_U8:
221                         printf ("0x%016llx, ", *((gint64 *)(cpos)));
222                         break;
223                 case MONO_TYPE_R4:
224                         printf ("%f, ", *((float *)(cpos)));
225                         break;
226                 case MONO_TYPE_R8:
227                         printf ("%f, ", *((double *)(cpos)));
228                         break;
229                 case MONO_TYPE_VALUETYPE: 
230                         printf ("[");
231                         for (j = 0; j < size; j++)
232                                 printf ("%02x,", *((guint8*)cpos +j));
233                         printf ("], ");
234                         break;
235                 default:
236                         printf ("XX, ");
237                 }
238         }
239
240         printf (")\n");
241 }
242
243 static void
244 leave_method (MonoMethod *method, ...)
245 {
246         MonoType *type;
247         char *fname;
248         va_list ap;
249
250         va_start(ap, method);
251
252         fname = mono_method_full_name (method, TRUE);
253         indent (-1);
254         printf ("LEAVE: %s", fname);
255         g_free (fname);
256
257         type = method->signature->ret;
258
259 handle_enum:
260         switch (type->type) {
261         case MONO_TYPE_VOID:
262                 break;
263         case MONO_TYPE_BOOLEAN: {
264                 int eax = va_arg (ap, int);
265                 if (eax)
266                         printf ("TRUE:%d", eax);
267                 else 
268                         printf ("FALSE");
269                         
270                 break;
271         }
272         case MONO_TYPE_CHAR:
273         case MONO_TYPE_I1:
274         case MONO_TYPE_U1:
275         case MONO_TYPE_I2:
276         case MONO_TYPE_U2:
277         case MONO_TYPE_I4:
278         case MONO_TYPE_U4:
279         case MONO_TYPE_I:
280         case MONO_TYPE_U: {
281                 int eax = va_arg (ap, int);
282                 printf ("EAX=%d", eax);
283                 break;
284         }
285         case MONO_TYPE_STRING: {
286                 MonoString *s = va_arg (ap, MonoString *);
287 ;
288                 if (s) {
289                         g_assert (((MonoObject *)s)->vtable->klass == mono_defaults.string_class);
290                         printf ("[STRING:%p:%s]", s, mono_string_to_utf8 (s));
291                 } else 
292                         printf ("[STRING:null], ");
293                 break;
294         }
295         case MONO_TYPE_CLASS: 
296         case MONO_TYPE_OBJECT: {
297                 MonoObject *o = va_arg (ap, MonoObject *);
298
299                 if (o) {
300                         if (o->vtable->klass == mono_defaults.boolean_class) {
301                                 printf ("[BOOLEAN:%p:%d]", o, *((guint8 *)o + sizeof (MonoObject)));            
302                         } else if  (o->vtable->klass == mono_defaults.int32_class) {
303                                 printf ("[INT32:%p:%d]", o, *((gint32 *)((char *)o + sizeof (MonoObject))));    
304                         } else if  (o->vtable->klass == mono_defaults.int64_class) {
305                                 printf ("[INT64:%p:%lld]", o, *((gint64 *)((char *)o + sizeof (MonoObject))));  
306                         } else
307                                 printf ("[%s.%s:%p]", o->vtable->klass->name_space, o->vtable->klass->name, o);
308                 } else
309                         printf ("[OBJECT:%p]", o);
310                
311                 break;
312         }
313         case MONO_TYPE_PTR:
314         case MONO_TYPE_FNPTR:
315         case MONO_TYPE_ARRAY:
316         case MONO_TYPE_SZARRAY: {
317                 gpointer p = va_arg (ap, gpointer);
318                 printf ("EAX=%p", p);
319                 break;
320         }
321         case MONO_TYPE_I8: {
322                 gint64 l =  va_arg (ap, gint64);
323                 printf ("EAX/EDX=0x%16llx", l);
324                 break;
325         }
326         case MONO_TYPE_U8: {
327                 gint64 l =  va_arg (ap, gint64);
328                 printf ("EAX/EDX=0x%16llx", l);
329                 break;
330         }
331         case MONO_TYPE_R8: {
332                 double f = va_arg (ap, double);
333                 printf ("FP=%f\n", f);
334                 break;
335         }
336         case MONO_TYPE_VALUETYPE: 
337                 if (type->data.klass->enumtype) {
338                         type = type->data.klass->enum_basetype;
339                         goto handle_enum;
340                 } else {
341                         guint8 *p = va_arg (ap, gpointer);
342                         int j, size, align;
343                         size = mono_type_size (type, &align);
344                         printf ("[");
345                         for (j = 0; p && j < size; j++)
346                                 printf ("%02x,", p [j]);
347                         printf ("]");
348                 }
349                 break;
350         default:
351                 printf ("(unknown return type %x)", method->signature->ret->type);
352         }
353
354         printf ("\n");
355 }
356
357 static const guchar cpuid_impl [] = {
358         0x55,                           /* push   %ebp */
359         0x89, 0xe5,                     /* mov    %esp,%ebp */
360         0x53,                           /* push   %ebx */
361         0x8b, 0x45, 0x08,               /* mov    0x8(%ebp),%eax */
362         0x0f, 0xa2,                     /* cpuid   */
363         0x50,                           /* push   %eax */
364         0x8b, 0x45, 0x10,               /* mov    0x10(%ebp),%eax */
365         0x89, 0x18,                     /* mov    %ebx,(%eax) */
366         0x8b, 0x45, 0x14,               /* mov    0x14(%ebp),%eax */
367         0x89, 0x08,                     /* mov    %ecx,(%eax) */
368         0x8b, 0x45, 0x18,               /* mov    0x18(%ebp),%eax */
369         0x89, 0x10,                     /* mov    %edx,(%eax) */
370         0x58,                           /* pop    %eax */
371         0x8b, 0x55, 0x0c,               /* mov    0xc(%ebp),%edx */
372         0x89, 0x02,                     /* mov    %eax,(%edx) */
373         0x5b,                           /* pop    %ebx */
374         0xc9,                           /* leave   */
375         0xc3,                           /* ret     */
376 };
377
378 typedef void (*CpuidFunc) (int id, int* p_eax, int* p_ebx, int* p_ecx, int* p_edx);
379
380 static int 
381 cpuid (int id, int* p_eax, int* p_ebx, int* p_ecx, int* p_edx)
382 {
383         int have_cpuid = 0;
384         __asm__  __volatile__ (
385                 "pushfl\n"
386                 "popl %%eax\n"
387                 "movl %%eax, %%edx\n"
388                 "xorl $0x200000, %%eax\n"
389                 "pushl %%eax\n"
390                 "popfl\n"
391                 "pushfl\n"
392                 "popl %%eax\n"
393                 "xorl %%edx, %%eax\n"
394                 "andl $0x200000, %%eax\n"
395                 "movl %%eax, %0"
396                 : "=r" (have_cpuid)
397                 :
398                 : "%eax", "%edx"
399         );
400
401         if (have_cpuid) {
402                 CpuidFunc func = (CpuidFunc)cpuid_impl;
403                 func (id, p_eax, p_ebx, p_ecx, p_edx);
404                 /*
405                  * We use this approach because of issues with gcc and pic code, see:
406                  * http://gcc.gnu.org/cgi-bin/gnatsweb.pl?cmd=view%20audit-trail&database=gcc&pr=7329
407                 __asm__ __volatile__ ("cpuid"
408                         : "=a" (*p_eax), "=b" (*p_ebx), "=c" (*p_ecx), "=d" (*p_edx)
409                         : "a" (id));
410                 */
411                 return 1;
412         }
413         return 0;
414 }
415
416 /*
417  * Initialize the cpu to execute managed code.
418  */
419 void
420 mono_arch_cpu_init (void)
421 {
422         guint16 fpcw;
423
424         /* spec compliance requires running with double precision */
425         __asm__  __volatile__ ("fnstcw %0\n": "=m" (fpcw));
426         fpcw &= ~X86_FPCW_PRECC_MASK;
427         fpcw |= X86_FPCW_PREC_DOUBLE;
428         __asm__  __volatile__ ("fldcw %0\n": : "m" (fpcw));
429         __asm__  __volatile__ ("fnstcw %0\n": "=m" (fpcw));
430
431 }
432
433 /*
434  * This function returns the optimizations supported on this cpu.
435  */
436 guint32
437 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
438 {
439         int eax, ebx, ecx, edx;
440         guint32 opts = 0;
441         
442         *exclude_mask = 0;
443         /* Feature Flags function, flags returned in EDX. */
444         if (cpuid (1, &eax, &ebx, &ecx, &edx)) {
445                 if (edx & (1 << 15)) {
446                         opts |= MONO_OPT_CMOV;
447                         if (edx & 1)
448                                 opts |= MONO_OPT_FCMOV;
449                         else
450                                 *exclude_mask |= MONO_OPT_FCMOV;
451                 } else
452                         *exclude_mask |= MONO_OPT_CMOV;
453         }
454         return opts;
455 }
456
457 static gboolean
458 is_regsize_var (MonoType *t) {
459         if (t->byref)
460                 return TRUE;
461         switch (t->type) {
462         case MONO_TYPE_I4:
463         case MONO_TYPE_U4:
464         case MONO_TYPE_I:
465         case MONO_TYPE_U:
466                 return TRUE;
467         case MONO_TYPE_OBJECT:
468         case MONO_TYPE_STRING:
469         case MONO_TYPE_CLASS:
470         case MONO_TYPE_SZARRAY:
471         case MONO_TYPE_ARRAY:
472                 return TRUE;
473         case MONO_TYPE_VALUETYPE:
474                 if (t->data.klass->enumtype)
475                         return is_regsize_var (t->data.klass->enum_basetype);
476                 return FALSE;
477         }
478         return FALSE;
479 }
480
481 GList *
482 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
483 {
484         GList *vars = NULL;
485         int i;
486
487         for (i = 0; i < cfg->num_varinfo; i++) {
488                 MonoInst *ins = cfg->varinfo [i];
489                 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
490
491                 /* unused vars */
492                 if (vmv->range.first_use.abs_pos > vmv->range.last_use.abs_pos)
493                         continue;
494
495                 if ((ins->flags & (MONO_INST_IS_DEAD|MONO_INST_VOLATILE|MONO_INST_INDIRECT)) || 
496                     (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
497                         continue;
498
499                 /* we dont allocate I1 to registers because there is no simply way to sign extend 
500                  * 8bit quantities in caller saved registers on x86 */
501                 if (is_regsize_var (ins->inst_vtype) || (ins->inst_vtype->type == MONO_TYPE_BOOLEAN) || 
502                     (ins->inst_vtype->type == MONO_TYPE_U1) || (ins->inst_vtype->type == MONO_TYPE_U2)||
503                     (ins->inst_vtype->type == MONO_TYPE_I2) || (ins->inst_vtype->type == MONO_TYPE_CHAR)) {
504                         g_assert (MONO_VARINFO (cfg, i)->reg == -1);
505                         g_assert (i == vmv->idx);
506                         vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
507                 }
508         }
509
510         return vars;
511 }
512
513 GList *
514 mono_arch_get_global_int_regs (MonoCompile *cfg)
515 {
516         GList *regs = NULL;
517
518         /* we can use 3 registers for global allocation */
519         regs = g_list_prepend (regs, (gpointer)X86_EBX);
520         regs = g_list_prepend (regs, (gpointer)X86_ESI);
521         regs = g_list_prepend (regs, (gpointer)X86_EDI);
522
523         return regs;
524 }
525  
526 /*
527  * Set var information according to the calling convention. X86 version.
528  * The locals var stuff should most likely be split in another method.
529  */
530 void
531 mono_arch_allocate_vars (MonoCompile *m)
532 {
533         MonoMethodSignature *sig;
534         MonoMethodHeader *header;
535         MonoInst *inst;
536         int i, offset, size, align, curinst;
537
538         header = ((MonoMethodNormal *)m->method)->header;
539
540         sig = m->method->signature;
541
542         offset = 8;
543         curinst = 0;
544         if (MONO_TYPE_ISSTRUCT (sig->ret)) {
545                 m->ret->opcode = OP_REGOFFSET;
546                 m->ret->inst_basereg = X86_EBP;
547                 m->ret->inst_offset = offset;
548                 offset += sizeof (gpointer);
549         } else {
550                 /* FIXME: handle long and FP values */
551                 switch (sig->ret->type) {
552                 case MONO_TYPE_VOID:
553                         break;
554                 default:
555                         m->ret->opcode = OP_REGVAR;
556                         m->ret->inst_c0 = X86_EAX;
557                         break;
558                 }
559         }
560         if (sig->hasthis) {
561                 inst = m->varinfo [curinst];
562                 if (inst->opcode != OP_REGVAR) {
563                         inst->opcode = OP_REGOFFSET;
564                         inst->inst_basereg = X86_EBP;
565                 }
566                 inst->inst_offset = offset;
567                 offset += sizeof (gpointer);
568                 curinst++;
569         }
570
571         if (sig->call_convention == MONO_CALL_VARARG) {
572                 m->sig_cookie = offset;
573                 offset += sizeof (gpointer);
574         }
575
576         for (i = 0; i < sig->param_count; ++i) {
577                 inst = m->varinfo [curinst];
578                 if (inst->opcode != OP_REGVAR) {
579                         inst->opcode = OP_REGOFFSET;
580                         inst->inst_basereg = X86_EBP;
581                 }
582                 inst->inst_offset = offset;
583                 size = mono_type_size (sig->params [i], &align);
584                 size += 4 - 1;
585                 size &= ~(4 - 1);
586                 offset += size;
587                 curinst++;
588         }
589
590         offset = 0;
591
592         /* reserve space to save LMF and caller saved registers */
593
594         if (m->method->save_lmf) {
595                 offset += sizeof (MonoLMF);
596         } else {
597                 if (m->used_int_regs & (1 << X86_EBX)) {
598                         offset += 4;
599                 }
600
601                 if (m->used_int_regs & (1 << X86_EDI)) {
602                         offset += 4;
603                 }
604
605                 if (m->used_int_regs & (1 << X86_ESI)) {
606                         offset += 4;
607                 }
608         }
609
610         for (i = curinst; i < m->num_varinfo; ++i) {
611                 inst = m->varinfo [i];
612
613                 if ((inst->flags & MONO_INST_IS_DEAD) || inst->opcode == OP_REGVAR)
614                         continue;
615
616                 /* inst->unused indicates native sized value types, this is used by the
617                 * pinvoke wrappers when they call functions returning structure */
618                 if (inst->unused && MONO_TYPE_ISSTRUCT (inst->inst_vtype) && inst->inst_vtype->type != MONO_TYPE_TYPEDBYREF)
619                         size = mono_class_native_size (inst->inst_vtype->data.klass, &align);
620                 else
621                         size = mono_type_size (inst->inst_vtype, &align);
622
623                 offset += size;
624                 offset += align - 1;
625                 offset &= ~(align - 1);
626                 inst->opcode = OP_REGOFFSET;
627                 inst->inst_basereg = X86_EBP;
628                 inst->inst_offset = -offset;
629                 //g_print ("allocating local %d to %d\n", i, -offset);
630         }
631         offset += (MONO_ARCH_FRAME_ALIGNMENT - 1);
632         offset &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
633
634         /* change sign? */
635         m->stack_offset = -offset;
636 }
637
638 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
639  * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info 
640  */
641
642 /* 
643  * take the arguments and generate the arch-specific
644  * instructions to properly call the function in call.
645  * This includes pushing, moving arguments to the right register
646  * etc.
647  * Issue: who does the spilling if needed, and when?
648  */
649 MonoCallInst*
650 mono_arch_call_opcode (MonoCompile *cfg, MonoBasicBlock* bb, MonoCallInst *call, int is_virtual) {
651         MonoInst *arg, *in;
652         MonoMethodSignature *sig;
653         int i, n, stack_size, type;
654         MonoType *ptype;
655
656         stack_size = 0;
657         /* add the vararg cookie before the non-implicit args */
658         if (call->signature->call_convention == MONO_CALL_VARARG) {
659                 MonoInst *sig_arg;
660                 MONO_INST_NEW (cfg, arg, OP_OUTARG);
661                 MONO_INST_NEW (cfg, sig_arg, OP_ICONST);
662                 sig_arg->inst_p0 = call->signature;
663                 arg->inst_left = sig_arg;
664                 arg->type = STACK_PTR;
665                 /* prepend, so they get reversed */
666                 arg->next = call->out_args;
667                 call->out_args = arg;
668                 stack_size += sizeof (gpointer);
669         }
670         sig = call->signature;
671         n = sig->param_count + sig->hasthis;
672
673         if (sig->ret && MONO_TYPE_ISSTRUCT (sig->ret))
674                 stack_size += sizeof (gpointer);
675         for (i = 0; i < n; ++i) {
676                 if (is_virtual && i == 0) {
677                         /* the argument will be attached to the call instrucion */
678                         in = call->args [i];
679                         stack_size += 4;
680                 } else {
681                         MONO_INST_NEW (cfg, arg, OP_OUTARG);
682                         in = call->args [i];
683                         arg->cil_code = in->cil_code;
684                         arg->inst_left = in;
685                         arg->type = in->type;
686                         /* prepend, so they get reversed */
687                         arg->next = call->out_args;
688                         call->out_args = arg;
689                         if (i >= sig->hasthis) {
690                                 ptype = sig->params [i - sig->hasthis];
691                                 if (ptype->byref)
692                                         type = MONO_TYPE_U;
693                                 else
694                                         type = ptype->type;
695 handle_enum:
696                                 /* FIXME: validate arguments... */
697                                 switch (type) {
698                                 case MONO_TYPE_I:
699                                 case MONO_TYPE_U:
700                                 case MONO_TYPE_BOOLEAN:
701                                 case MONO_TYPE_CHAR:
702                                 case MONO_TYPE_I1:
703                                 case MONO_TYPE_U1:
704                                 case MONO_TYPE_I2:
705                                 case MONO_TYPE_U2:
706                                 case MONO_TYPE_I4:
707                                 case MONO_TYPE_U4:
708                                 case MONO_TYPE_STRING:
709                                 case MONO_TYPE_CLASS:
710                                 case MONO_TYPE_OBJECT:
711                                 case MONO_TYPE_PTR:
712                                 case MONO_TYPE_FNPTR:
713                                 case MONO_TYPE_ARRAY:
714                                 case MONO_TYPE_SZARRAY:
715                                         stack_size += 4;
716                                         break;
717                                 case MONO_TYPE_I8:
718                                 case MONO_TYPE_U8:
719                                         stack_size += 8;
720                                         break;
721                                 case MONO_TYPE_R4:
722                                         stack_size += 4;
723                                         arg->opcode = OP_OUTARG_R4;
724                                         break;
725                                 case MONO_TYPE_R8:
726                                         stack_size += 8;
727                                         arg->opcode = OP_OUTARG_R8;
728                                         break;
729                                 case MONO_TYPE_VALUETYPE:
730                                         if (MONO_TYPE_ISSTRUCT (ptype)) {
731                                                 int size;
732                                                 if (sig->pinvoke) 
733                                                         size = mono_type_native_stack_size (&in->klass->byval_arg, NULL);
734                                                 else 
735                                                         size = mono_type_stack_size (&in->klass->byval_arg, NULL);
736
737                                                 stack_size += size;
738                                                 arg->opcode = OP_OUTARG_VT;
739                                                 arg->klass = in->klass;
740                                                 arg->unused = sig->pinvoke;
741                                                 arg->inst_imm = size; 
742                                         } else {
743                                                 type = ptype->data.klass->enum_basetype->type;
744                                                 goto handle_enum;
745                                         }
746                                         break;
747                                 case MONO_TYPE_TYPEDBYREF:
748                                         stack_size += sizeof (MonoTypedRef);
749                                         arg->opcode = OP_OUTARG_VT;
750                                         arg->klass = in->klass;
751                                         arg->unused = sig->pinvoke;
752                                         arg->inst_imm = sizeof (MonoTypedRef); 
753                                         break;
754                                 case MONO_TYPE_GENERICINST:
755                                         type = ptype->data.generic_inst->generic_type->type;
756                                         goto handle_enum;
757
758                                 default:
759                                         g_error ("unknown type 0x%02x in mono_arch_call_opcode\n", type);
760                                 }
761                         } else {
762                                 /* the this argument */
763                                 stack_size += 4;
764                         }
765                 }
766         }
767         /* if the function returns a struct, the called method already does a ret $0x4 */
768         if (sig->ret && MONO_TYPE_ISSTRUCT (sig->ret))
769                 stack_size -= 4;
770         call->stack_usage = stack_size;
771         /* 
772          * should set more info in call, such as the stack space
773          * used by the args that needs to be added back to esp
774          */
775
776         return call;
777 }
778
779 /*
780  * Allow tracing to work with this interface (with an optional argument)
781  */
782
783 /*
784  * This may be needed on some archs or for debugging support.
785  */
786 void
787 mono_arch_instrument_mem_needs (MonoMethod *method, int *stack, int *code)
788 {
789         /* no stack room needed now (may be needed for FASTCALL-trace support) */
790         *stack = 0;
791         /* split prolog-epilog requirements? */
792         *code = 50; /* max bytes needed: check this number */
793 }
794
795 void*
796 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
797 {
798         guchar *code = p;
799
800         /* if some args are passed in registers, we need to save them here */
801         x86_push_reg (code, X86_EBP);
802         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_METHODCONST, cfg->method);
803         x86_push_imm (code, cfg->method);
804         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_ABS, func);
805         x86_call_code (code, 0);
806         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 8);
807
808         return code;
809 }
810
811 enum {
812         SAVE_NONE,
813         SAVE_STRUCT,
814         SAVE_EAX,
815         SAVE_EAX_EDX,
816         SAVE_FP
817 };
818
819 void*
820 mono_arch_instrument_epilog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
821 {
822         guchar *code = p;
823         int arg_size = 0, save_mode = SAVE_NONE;
824         MonoMethod *method = cfg->method;
825         int rtype = method->signature->ret->type;
826         
827 handle_enum:
828         switch (rtype) {
829         case MONO_TYPE_VOID:
830                 /* special case string .ctor icall */
831                 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
832                         save_mode = SAVE_EAX;
833                 else
834                         save_mode = SAVE_NONE;
835                 break;
836         case MONO_TYPE_I8:
837         case MONO_TYPE_U8:
838                 save_mode = SAVE_EAX_EDX;
839                 break;
840         case MONO_TYPE_R4:
841         case MONO_TYPE_R8:
842                 save_mode = SAVE_FP;
843                 break;
844         case MONO_TYPE_VALUETYPE:
845                 if (method->signature->ret->data.klass->enumtype) {
846                         rtype = method->signature->ret->data.klass->enum_basetype->type;
847                         goto handle_enum;
848                 }
849                 save_mode = SAVE_STRUCT;
850                 break;
851         default:
852                 save_mode = SAVE_EAX;
853                 break;
854         }
855
856         switch (save_mode) {
857         case SAVE_EAX_EDX:
858                 x86_push_reg (code, X86_EDX);
859                 x86_push_reg (code, X86_EAX);
860                 if (enable_arguments) {
861                         x86_push_reg (code, X86_EDX);
862                         x86_push_reg (code, X86_EAX);
863                         arg_size = 8;
864                 }
865                 break;
866         case SAVE_EAX:
867                 x86_push_reg (code, X86_EAX);
868                 if (enable_arguments) {
869                         x86_push_reg (code, X86_EAX);
870                         arg_size = 4;
871                 }
872                 break;
873         case SAVE_FP:
874                 x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
875                 x86_fst_membase (code, X86_ESP, 0, TRUE, TRUE);
876                 if (enable_arguments) {
877                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
878                         x86_fst_membase (code, X86_ESP, 0, TRUE, TRUE);
879                         arg_size = 8;
880                 }
881                 break;
882         case SAVE_STRUCT:
883                 if (enable_arguments) {
884                         x86_push_membase (code, X86_EBP, 8);
885                         arg_size = 4;
886                 }
887                 break;
888         case SAVE_NONE:
889         default:
890                 break;
891         }
892
893
894         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_METHODCONST, method);
895         x86_push_imm (code, method);
896         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_ABS, func);
897         x86_call_code (code, 0);
898         x86_alu_reg_imm (code, X86_ADD, X86_ESP, arg_size + 4);
899
900         switch (save_mode) {
901         case SAVE_EAX_EDX:
902                 x86_pop_reg (code, X86_EAX);
903                 x86_pop_reg (code, X86_EDX);
904                 break;
905         case SAVE_EAX:
906                 x86_pop_reg (code, X86_EAX);
907                 break;
908         case SAVE_FP:
909                 x86_fld_membase (code, X86_ESP, 0, TRUE);
910                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, 8);
911                 break;
912         case SAVE_NONE:
913         default:
914                 break;
915         }
916
917         return code;
918 }
919
920 #define EMIT_COND_BRANCH(ins,cond,sign) \
921 if (ins->flags & MONO_INST_BRLABEL) { \
922         if (ins->inst_i0->inst_c0) { \
923                 x86_branch (code, cond, cfg->native_code + ins->inst_i0->inst_c0, sign); \
924         } else { \
925                 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_LABEL, ins->inst_i0); \
926                 x86_branch32 (code, cond, 0, sign); \
927         } \
928 } else { \
929         if (ins->inst_true_bb->native_offset) { \
930                 x86_branch (code, cond, cfg->native_code + ins->inst_true_bb->native_offset, sign); \
931         } else { \
932                 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
933                 if ((cfg->opt & MONO_OPT_BRANCH) && \
934                     x86_is_imm8 (ins->inst_true_bb->max_offset - cpos)) \
935                         x86_branch8 (code, cond, 0, sign); \
936                 else \
937                         x86_branch32 (code, cond, 0, sign); \
938         } \
939 }
940
941 /* emit an exception if condition is fail */
942 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name)            \
943         do {                                                        \
944                 mono_add_patch_info (cfg, code - cfg->native_code,   \
945                                     MONO_PATCH_INFO_EXC, exc_name);  \
946                 x86_branch32 (code, cond, 0, signed);               \
947         } while (0); 
948
949 #define EMIT_FPCOMPARE(code) do { \
950         x86_fcompp (code); \
951         x86_fnstsw (code); \
952 } while (0); 
953
954 static void
955 peephole_pass (MonoCompile *cfg, MonoBasicBlock *bb)
956 {
957         MonoInst *ins, *last_ins = NULL;
958         ins = bb->code;
959
960         while (ins) {
961
962                 switch (ins->opcode) {
963                 case OP_ICONST:
964                         /* reg = 0 -> XOR (reg, reg) */
965                         /* XOR sets cflags on x86, so we cant do it always */
966                         if (ins->inst_c0 == 0 && ins->next &&
967                             (ins->next->opcode == CEE_BR)) { 
968                                 ins->opcode = CEE_XOR;
969                                 ins->sreg1 = ins->dreg;
970                                 ins->sreg2 = ins->dreg;
971                         }
972                         break;
973                 case OP_MUL_IMM: 
974                         /* remove unnecessary multiplication with 1 */
975                         if (ins->inst_imm == 1) {
976                                 if (ins->dreg != ins->sreg1) {
977                                         ins->opcode = OP_MOVE;
978                                 } else {
979                                         last_ins->next = ins->next;                             
980                                         ins = ins->next;                                
981                                         continue;
982                                 }
983                         }
984                         break;
985                 case OP_COMPARE_IMM:
986                         /* OP_COMPARE_IMM (reg, 0) --> OP_X86_TEST_NULL (reg) */
987                         if (ins->inst_imm == 0 && ins->next &&
988                             (ins->next->opcode == CEE_BEQ || ins->next->opcode == CEE_BNE_UN ||
989                              ins->next->opcode == OP_CEQ)) {
990                                 ins->opcode = OP_X86_TEST_NULL;
991                         }     
992                         break;
993                 case OP_LOAD_MEMBASE:
994                 case OP_LOADI4_MEMBASE:
995                         /* 
996                          * OP_STORE_MEMBASE_REG reg, offset(basereg) 
997                          * OP_LOAD_MEMBASE offset(basereg), reg
998                          */
999                         if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG 
1000                                          || last_ins->opcode == OP_STORE_MEMBASE_REG) &&
1001                             ins->inst_basereg == last_ins->inst_destbasereg &&
1002                             ins->inst_offset == last_ins->inst_offset) {
1003                                 if (ins->dreg == last_ins->sreg1) {
1004                                         last_ins->next = ins->next;                             
1005                                         ins = ins->next;                                
1006                                         continue;
1007                                 } else {
1008                                         //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1009                                         ins->opcode = OP_MOVE;
1010                                         ins->sreg1 = last_ins->sreg1;
1011                                 }
1012
1013                         /* 
1014                          * Note: reg1 must be different from the basereg in the second load
1015                          * OP_LOAD_MEMBASE offset(basereg), reg1
1016                          * OP_LOAD_MEMBASE offset(basereg), reg2
1017                          * -->
1018                          * OP_LOAD_MEMBASE offset(basereg), reg1
1019                          * OP_MOVE reg1, reg2
1020                          */
1021                         } if (last_ins && (last_ins->opcode == OP_LOADI4_MEMBASE
1022                                            || last_ins->opcode == OP_LOAD_MEMBASE) &&
1023                               ins->inst_basereg != last_ins->dreg &&
1024                               ins->inst_basereg == last_ins->inst_basereg &&
1025                               ins->inst_offset == last_ins->inst_offset) {
1026
1027                                 if (ins->dreg == last_ins->dreg) {
1028                                         last_ins->next = ins->next;                             
1029                                         ins = ins->next;                                
1030                                         continue;
1031                                 } else {
1032                                         ins->opcode = OP_MOVE;
1033                                         ins->sreg1 = last_ins->dreg;
1034                                 }
1035
1036                                 //g_assert_not_reached ();
1037
1038 #if 0
1039                         /* 
1040                          * OP_STORE_MEMBASE_IMM imm, offset(basereg) 
1041                          * OP_LOAD_MEMBASE offset(basereg), reg
1042                          * -->
1043                          * OP_STORE_MEMBASE_IMM imm, offset(basereg) 
1044                          * OP_ICONST reg, imm
1045                          */
1046                         } else if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_IMM
1047                                                 || last_ins->opcode == OP_STORE_MEMBASE_IMM) &&
1048                                    ins->inst_basereg == last_ins->inst_destbasereg &&
1049                                    ins->inst_offset == last_ins->inst_offset) {
1050                                 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1051                                 ins->opcode = OP_ICONST;
1052                                 ins->inst_c0 = last_ins->inst_imm;
1053                                 g_assert_not_reached (); // check this rule
1054 #endif
1055                         }
1056                         break;
1057                 case OP_LOADU1_MEMBASE:
1058                 case OP_LOADI1_MEMBASE:
1059                   /*
1060                    * FIXME: Missing explanation
1061                    */
1062                         if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
1063                                         ins->inst_basereg == last_ins->inst_destbasereg &&
1064                                         ins->inst_offset == last_ins->inst_offset) {
1065                                 if (ins->dreg == last_ins->sreg1) {
1066                                         last_ins->next = ins->next;                             
1067                                         ins = ins->next;                                
1068                                         continue;
1069                                 } else {
1070                                         //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1071                                         ins->opcode = OP_MOVE;
1072                                         ins->sreg1 = last_ins->sreg1;
1073                                 }
1074                         }
1075                         break;
1076                 case OP_LOADU2_MEMBASE:
1077                 case OP_LOADI2_MEMBASE:
1078                   /*
1079                    * FIXME: Missing explanation
1080                    */
1081                         if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
1082                                         ins->inst_basereg == last_ins->inst_destbasereg &&
1083                                         ins->inst_offset == last_ins->inst_offset) {
1084                                 if (ins->dreg == last_ins->sreg1) {
1085                                         last_ins->next = ins->next;                             
1086                                         ins = ins->next;                                
1087                                         continue;
1088                                 } else {
1089                                         //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1090                                         ins->opcode = OP_MOVE;
1091                                         ins->sreg1 = last_ins->sreg1;
1092                                 }
1093                         }
1094                         break;
1095                 case CEE_CONV_I4:
1096                 case CEE_CONV_U4:
1097                 case OP_MOVE:
1098                         /* 
1099                          * OP_MOVE reg, reg 
1100                          */
1101                         if (ins->dreg == ins->sreg1) {
1102                                 if (last_ins)
1103                                         last_ins->next = ins->next;                             
1104                                 ins = ins->next;
1105                                 continue;
1106                         }
1107                         /* 
1108                          * OP_MOVE sreg, dreg 
1109                          * OP_MOVE dreg, sreg
1110                          */
1111                         if (last_ins && last_ins->opcode == OP_MOVE &&
1112                             ins->sreg1 == last_ins->dreg &&
1113                             ins->dreg == last_ins->sreg1) {
1114                                 last_ins->next = ins->next;                             
1115                                 ins = ins->next;                                
1116                                 continue;
1117                         }
1118                         break;
1119                 }
1120                 last_ins = ins;
1121                 ins = ins->next;
1122         }
1123         bb->last_ins = last_ins;
1124 }
1125
1126 static const int 
1127 branch_cc_table [] = {
1128         X86_CC_EQ, X86_CC_GE, X86_CC_GT, X86_CC_LE, X86_CC_LT,
1129         X86_CC_NE, X86_CC_GE, X86_CC_GT, X86_CC_LE, X86_CC_LT,
1130         X86_CC_O, X86_CC_NO, X86_CC_C, X86_CC_NC
1131 };
1132
1133 #define DEBUG(a) if (cfg->verbose_level > 1) a
1134 //#define DEBUG(a)
1135 #define reg_is_freeable(r) ((r) >= 0 && (r) <= 7 && X86_IS_CALLEE ((r)))
1136
1137 typedef struct {
1138         int born_in;
1139         int killed_in;
1140         int last_use;
1141         int prev_use;
1142 } RegTrack;
1143
1144 static const char*const * ins_spec = pentium_desc;
1145
1146 static void
1147 print_ins (int i, MonoInst *ins)
1148 {
1149         const char *spec = ins_spec [ins->opcode];
1150         g_print ("\t%-2d %s", i, mono_inst_name (ins->opcode));
1151         if (spec [MONO_INST_DEST]) {
1152                 if (ins->dreg >= MONO_MAX_IREGS)
1153                         g_print (" R%d <-", ins->dreg);
1154                 else
1155                         g_print (" %s <-", mono_arch_regname (ins->dreg));
1156         }
1157         if (spec [MONO_INST_SRC1]) {
1158                 if (ins->sreg1 >= MONO_MAX_IREGS)
1159                         g_print (" R%d", ins->sreg1);
1160                 else
1161                         g_print (" %s", mono_arch_regname (ins->sreg1));
1162         }
1163         if (spec [MONO_INST_SRC2]) {
1164                 if (ins->sreg2 >= MONO_MAX_IREGS)
1165                         g_print (" R%d", ins->sreg2);
1166                 else
1167                         g_print (" %s", mono_arch_regname (ins->sreg2));
1168         }
1169         if (spec [MONO_INST_CLOB])
1170                 g_print (" clobbers: %c", spec [MONO_INST_CLOB]);
1171         g_print ("\n");
1172 }
1173
1174 static void
1175 print_regtrack (RegTrack *t, int num)
1176 {
1177         int i;
1178         char buf [32];
1179         const char *r;
1180         
1181         for (i = 0; i < num; ++i) {
1182                 if (!t [i].born_in)
1183                         continue;
1184                 if (i >= MONO_MAX_IREGS) {
1185                         g_snprintf (buf, sizeof(buf), "R%d", i);
1186                         r = buf;
1187                 } else
1188                         r = mono_arch_regname (i);
1189                 g_print ("liveness: %s [%d - %d]\n", r, t [i].born_in, t[i].last_use);
1190         }
1191 }
1192
1193 typedef struct InstList InstList;
1194
1195 struct InstList {
1196         InstList *prev;
1197         InstList *next;
1198         MonoInst *data;
1199 };
1200
1201 static inline InstList*
1202 inst_list_prepend (MonoMemPool *pool, InstList *list, MonoInst *data)
1203 {
1204         InstList *item = mono_mempool_alloc (pool, sizeof (InstList));
1205         item->data = data;
1206         item->prev = NULL;
1207         item->next = list;
1208         if (list)
1209                 list->prev = item;
1210         return item;
1211 }
1212
1213 /*
1214  * Force the spilling of the variable in the symbolic register 'reg'.
1215  */
1216 static int
1217 get_register_force_spilling (MonoCompile *cfg, InstList *item, MonoInst *ins, int reg)
1218 {
1219         MonoInst *load;
1220         int i, sel, spill;
1221         
1222         sel = cfg->rs->iassign [reg];
1223         /*i = cfg->rs->isymbolic [sel];
1224         g_assert (i == reg);*/
1225         i = reg;
1226         spill = ++cfg->spill_count;
1227         cfg->rs->iassign [i] = -spill - 1;
1228         mono_regstate_free_int (cfg->rs, sel);
1229         /* we need to create a spill var and insert a load to sel after the current instruction */
1230         MONO_INST_NEW (cfg, load, OP_LOAD_MEMBASE);
1231         load->dreg = sel;
1232         load->inst_basereg = X86_EBP;
1233         load->inst_offset = mono_spillvar_offset (cfg, spill);
1234         if (item->prev) {
1235                 while (ins->next != item->prev->data)
1236                         ins = ins->next;
1237         }
1238         load->next = ins->next;
1239         ins->next = load;
1240         DEBUG (g_print ("SPILLED LOAD (%d at 0x%08x(%%ebp)) R%d (freed %s)\n", spill, load->inst_offset, i, mono_arch_regname (sel)));
1241         i = mono_regstate_alloc_int (cfg->rs, 1 << sel);
1242         g_assert (i == sel);
1243
1244         return sel;
1245 }
1246
1247 static int
1248 get_register_spilling (MonoCompile *cfg, InstList *item, MonoInst *ins, guint32 regmask, int reg)
1249 {
1250         MonoInst *load;
1251         int i, sel, spill;
1252
1253         DEBUG (g_print ("start regmask to assign R%d: 0x%08x (R%d <- R%d R%d)\n", reg, regmask, ins->dreg, ins->sreg1, ins->sreg2));
1254         /* exclude the registers in the current instruction */
1255         if (reg != ins->sreg1 && (reg_is_freeable (ins->sreg1) || (ins->sreg1 >= MONO_MAX_IREGS && cfg->rs->iassign [ins->sreg1] >= 0))) {
1256                 if (ins->sreg1 >= MONO_MAX_IREGS)
1257                         regmask &= ~ (1 << cfg->rs->iassign [ins->sreg1]);
1258                 else
1259                         regmask &= ~ (1 << ins->sreg1);
1260                 DEBUG (g_print ("excluding sreg1 %s\n", mono_arch_regname (ins->sreg1)));
1261         }
1262         if (reg != ins->sreg2 && (reg_is_freeable (ins->sreg2) || (ins->sreg2 >= MONO_MAX_IREGS && cfg->rs->iassign [ins->sreg2] >= 0))) {
1263                 if (ins->sreg2 >= MONO_MAX_IREGS)
1264                         regmask &= ~ (1 << cfg->rs->iassign [ins->sreg2]);
1265                 else
1266                         regmask &= ~ (1 << ins->sreg2);
1267                 DEBUG (g_print ("excluding sreg2 %s %d\n", mono_arch_regname (ins->sreg2), ins->sreg2));
1268         }
1269         if (reg != ins->dreg && reg_is_freeable (ins->dreg)) {
1270                 regmask &= ~ (1 << ins->dreg);
1271                 DEBUG (g_print ("excluding dreg %s\n", mono_arch_regname (ins->dreg)));
1272         }
1273
1274         DEBUG (g_print ("available regmask: 0x%08x\n", regmask));
1275         g_assert (regmask); /* need at least a register we can free */
1276         sel = -1;
1277         /* we should track prev_use and spill the register that's farther */
1278         for (i = 0; i < MONO_MAX_IREGS; ++i) {
1279                 if (regmask & (1 << i)) {
1280                         sel = i;
1281                         DEBUG (g_print ("selected register %s has assignment %d\n", mono_arch_regname (sel), cfg->rs->iassign [sel]));
1282                         break;
1283                 }
1284         }
1285         i = cfg->rs->isymbolic [sel];
1286         spill = ++cfg->spill_count;
1287         cfg->rs->iassign [i] = -spill - 1;
1288         mono_regstate_free_int (cfg->rs, sel);
1289         /* we need to create a spill var and insert a load to sel after the current instruction */
1290         MONO_INST_NEW (cfg, load, OP_LOAD_MEMBASE);
1291         load->dreg = sel;
1292         load->inst_basereg = X86_EBP;
1293         load->inst_offset = mono_spillvar_offset (cfg, spill);
1294         if (item->prev) {
1295                 while (ins->next != item->prev->data)
1296                         ins = ins->next;
1297         }
1298         load->next = ins->next;
1299         ins->next = load;
1300         DEBUG (g_print ("SPILLED LOAD (%d at 0x%08x(%%ebp)) R%d (freed %s)\n", spill, load->inst_offset, i, mono_arch_regname (sel)));
1301         i = mono_regstate_alloc_int (cfg->rs, 1 << sel);
1302         g_assert (i == sel);
1303         
1304         return sel;
1305 }
1306
1307 static MonoInst*
1308 create_copy_ins (MonoCompile *cfg, int dest, int src, MonoInst *ins)
1309 {
1310         MonoInst *copy;
1311         MONO_INST_NEW (cfg, copy, OP_MOVE);
1312         copy->dreg = dest;
1313         copy->sreg1 = src;
1314         if (ins) {
1315                 copy->next = ins->next;
1316                 ins->next = copy;
1317         }
1318         DEBUG (g_print ("\tforced copy from %s to %s\n", mono_arch_regname (src), mono_arch_regname (dest)));
1319         return copy;
1320 }
1321
1322 static MonoInst*
1323 create_spilled_store (MonoCompile *cfg, int spill, int reg, int prev_reg, MonoInst *ins)
1324 {
1325         MonoInst *store;
1326         MONO_INST_NEW (cfg, store, OP_STORE_MEMBASE_REG);
1327         store->sreg1 = reg;
1328         store->inst_destbasereg = X86_EBP;
1329         store->inst_offset = mono_spillvar_offset (cfg, spill);
1330         if (ins) {
1331                 store->next = ins->next;
1332                 ins->next = store;
1333         }
1334         DEBUG (g_print ("SPILLED STORE (%d at 0x%08x(%%ebp)) R%d (from %s)\n", spill, store->inst_offset, prev_reg, mono_arch_regname (reg)));
1335         return store;
1336 }
1337
1338 static void
1339 insert_before_ins (MonoInst *ins, InstList *item, MonoInst* to_insert)
1340 {
1341         MonoInst *prev;
1342         if (item->next) {
1343                 prev = item->next->data;
1344
1345                 while (prev->next != ins)
1346                         prev = prev->next;
1347                 to_insert->next = ins;
1348                 prev->next = to_insert;
1349         } else {
1350                 to_insert->next = ins;
1351         }
1352         /* 
1353          * needed otherwise in the next instruction we can add an ins to the 
1354          * end and that would get past this instruction.
1355          */
1356         item->data = to_insert; 
1357 }
1358
1359 #if  0
1360 static int
1361 alloc_int_reg (MonoCompile *cfg, InstList *curinst, MonoInst *ins, int sym_reg, guint32 allow_mask)
1362 {
1363         int val = cfg->rs->iassign [sym_reg];
1364         if (val < 0) {
1365                 int spill = 0;
1366                 if (val < -1) {
1367                         /* the register gets spilled after this inst */
1368                         spill = -val -1;
1369                 }
1370                 val = mono_regstate_alloc_int (cfg->rs, allow_mask);
1371                 if (val < 0)
1372                         val = get_register_spilling (cfg, curinst, ins, allow_mask, sym_reg);
1373                 cfg->rs->iassign [sym_reg] = val;
1374                 /* add option to store before the instruction for src registers */
1375                 if (spill)
1376                         create_spilled_store (cfg, spill, val, sym_reg, ins);
1377         }
1378         cfg->rs->isymbolic [val] = sym_reg;
1379         return val;
1380 }
1381 #endif
1382
1383 /*#include "cprop.c"*/
1384
1385 /*
1386  * Local register allocation.
1387  * We first scan the list of instructions and we save the liveness info of
1388  * each register (when the register is first used, when it's value is set etc.).
1389  * We also reverse the list of instructions (in the InstList list) because assigning
1390  * registers backwards allows for more tricks to be used.
1391  */
1392 void
1393 mono_arch_local_regalloc (MonoCompile *cfg, MonoBasicBlock *bb)
1394 {
1395         MonoInst *ins;
1396         MonoRegState *rs = cfg->rs;
1397         int i, val, fpcount;
1398         RegTrack *reginfo, *reginfof;
1399         RegTrack *reginfo1, *reginfo2, *reginfod;
1400         InstList *tmp, *reversed = NULL;
1401         const char *spec;
1402         guint32 src1_mask, src2_mask, dest_mask;
1403
1404         if (!bb->code)
1405                 return;
1406         rs->next_vireg = bb->max_ireg;
1407         rs->next_vfreg = bb->max_freg;
1408         mono_regstate_assign (rs);
1409         reginfo = mono_mempool_alloc0 (cfg->mempool, sizeof (RegTrack) * rs->next_vireg);
1410         reginfof = mono_mempool_alloc0 (cfg->mempool, sizeof (RegTrack) * rs->next_vfreg);
1411         rs->ifree_mask = X86_CALLEE_REGS;
1412
1413         ins = bb->code;
1414
1415         /*if (cfg->opt & MONO_OPT_COPYPROP)
1416                 local_copy_prop (cfg, ins);*/
1417         
1418         i = 1;
1419         fpcount = 0; /* FIXME: track fp stack utilization */
1420         DEBUG (g_print ("LOCAL regalloc: basic block: %d\n", bb->block_num));
1421         /* forward pass on the instructions to collect register liveness info */
1422         while (ins) {
1423                 spec = ins_spec [ins->opcode];
1424                 DEBUG (print_ins (i, ins));
1425                 if (spec [MONO_INST_SRC1]) {
1426                         if (spec [MONO_INST_SRC1] == 'f')
1427                                 reginfo1 = reginfof;
1428                         else
1429                                 reginfo1 = reginfo;
1430                         reginfo1 [ins->sreg1].prev_use = reginfo1 [ins->sreg1].last_use;
1431                         reginfo1 [ins->sreg1].last_use = i;
1432                 } else {
1433                         ins->sreg1 = -1;
1434                 }
1435                 if (spec [MONO_INST_SRC2]) {
1436                         if (spec [MONO_INST_SRC2] == 'f')
1437                                 reginfo2 = reginfof;
1438                         else
1439                                 reginfo2 = reginfo;
1440                         reginfo2 [ins->sreg2].prev_use = reginfo2 [ins->sreg2].last_use;
1441                         reginfo2 [ins->sreg2].last_use = i;
1442                 } else {
1443                         ins->sreg2 = -1;
1444                 }
1445                 if (spec [MONO_INST_DEST]) {
1446                         if (spec [MONO_INST_DEST] == 'f')
1447                                 reginfod = reginfof;
1448                         else
1449                                 reginfod = reginfo;
1450                         if (spec [MONO_INST_DEST] != 'b') /* it's not just a base register */
1451                                 reginfod [ins->dreg].killed_in = i;
1452                         reginfod [ins->dreg].prev_use = reginfod [ins->dreg].last_use;
1453                         reginfod [ins->dreg].last_use = i;
1454                         if (reginfod [ins->dreg].born_in == 0 || reginfod [ins->dreg].born_in > i)
1455                                 reginfod [ins->dreg].born_in = i;
1456                         if (spec [MONO_INST_DEST] == 'l') {
1457                                 /* result in eax:edx, the virtual register is allocated sequentially */
1458                                 reginfod [ins->dreg + 1].prev_use = reginfod [ins->dreg + 1].last_use;
1459                                 reginfod [ins->dreg + 1].last_use = i;
1460                                 if (reginfod [ins->dreg + 1].born_in == 0 || reginfod [ins->dreg + 1].born_in > i)
1461                                         reginfod [ins->dreg + 1].born_in = i;
1462                         }
1463                 } else {
1464                         ins->dreg = -1;
1465                 }
1466                 reversed = inst_list_prepend (cfg->mempool, reversed, ins);
1467                 ++i;
1468                 ins = ins->next;
1469         }
1470
1471         DEBUG (print_regtrack (reginfo, rs->next_vireg));
1472         DEBUG (print_regtrack (reginfof, rs->next_vfreg));
1473         tmp = reversed;
1474         while (tmp) {
1475                 int prev_dreg, prev_sreg1, prev_sreg2;
1476                 dest_mask = src1_mask = src2_mask = X86_CALLEE_REGS;
1477                 --i;
1478                 ins = tmp->data;
1479                 spec = ins_spec [ins->opcode];
1480                 DEBUG (g_print ("processing:"));
1481                 DEBUG (print_ins (i, ins));
1482                 if (spec [MONO_INST_CLOB] == 's') {
1483                         if (rs->ifree_mask & (1 << X86_ECX)) {
1484                                 DEBUG (g_print ("\tshortcut assignment of R%d to ECX\n", ins->sreg2));
1485                                 rs->iassign [ins->sreg2] = X86_ECX;
1486                                 rs->isymbolic [X86_ECX] = ins->sreg2;
1487                                 ins->sreg2 = X86_ECX;
1488                                 rs->ifree_mask &= ~ (1 << X86_ECX);
1489                         } else {
1490                                 int need_ecx_spill = TRUE;
1491                                 /* 
1492                                  * we first check if src1/dreg is already assigned a register
1493                                  * and then we force a spill of the var assigned to ECX.
1494                                  */
1495                                 /* the destination register can't be ECX */
1496                                 dest_mask &= ~ (1 << X86_ECX);
1497                                 src1_mask &= ~ (1 << X86_ECX);
1498                                 val = rs->iassign [ins->dreg];
1499                                 /* 
1500                                  * the destination register is already assigned to ECX:
1501                                  * we need to allocate another register for it and then
1502                                  * copy from this to ECX.
1503                                  */
1504                                 if (val == X86_ECX && ins->dreg != ins->sreg2) {
1505                                         int new_dest = mono_regstate_alloc_int (rs, dest_mask);
1506                                         if (new_dest < 0)
1507                                                 new_dest = get_register_spilling (cfg, tmp, ins, dest_mask, ins->dreg);
1508                                         g_assert (new_dest >= 0);
1509                                         ins->dreg = new_dest;
1510                                         create_copy_ins (cfg, X86_ECX, new_dest, ins);
1511                                         need_ecx_spill = FALSE;
1512                                         /*DEBUG (g_print ("\tforced spill of R%d\n", ins->dreg));
1513                                         val = get_register_force_spilling (cfg, tmp, ins, ins->dreg);
1514                                         rs->iassign [ins->dreg] = val;
1515                                         rs->isymbolic [val] = prev_dreg;
1516                                         ins->dreg = val;*/
1517                                 }
1518                                 val = rs->iassign [ins->sreg1];
1519                                 if (val == X86_ECX) {
1520                                         g_assert_not_reached ();
1521                                 } else if (val >= 0) {
1522                                         /* 
1523                                          * the first src reg was already assigned to a register,
1524                                          * we need to copy it to the dest register because the 
1525                                          * shift instruction clobbers the first operand.
1526                                          */
1527                                         MonoInst *copy = create_copy_ins (cfg, ins->dreg, val, NULL);
1528                                         insert_before_ins (ins, tmp, copy);
1529                                 }
1530                                 val = rs->iassign [ins->sreg2];
1531                                 if (val >= 0 && val != X86_ECX) {
1532                                         MonoInst *move = create_copy_ins (cfg, X86_ECX, val, NULL);
1533                                         DEBUG (g_print ("\tmoved arg from R%d (%d) to ECX\n", val, ins->sreg2));
1534                                         move->next = ins;
1535                                         g_assert_not_reached ();
1536                                         /* FIXME: where is move connected to the instruction list? */
1537                                         //tmp->prev->data->next = move;
1538                                 }
1539                                 if (need_ecx_spill && !(rs->ifree_mask & (1 << X86_ECX))) {
1540                                         DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_ECX]));
1541                                         get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_ECX]);
1542                                         mono_regstate_free_int (rs, X86_ECX);
1543                                 }
1544                                 /* force-set sreg2 */
1545                                 rs->iassign [ins->sreg2] = X86_ECX;
1546                                 rs->isymbolic [X86_ECX] = ins->sreg2;
1547                                 ins->sreg2 = X86_ECX;
1548                                 rs->ifree_mask &= ~ (1 << X86_ECX);
1549                         }
1550                 } else if (spec [MONO_INST_CLOB] == 'd') { /* division */
1551                         int dest_reg = X86_EAX;
1552                         int clob_reg = X86_EDX;
1553                         if (spec [MONO_INST_DEST] == 'd') {
1554                                 dest_reg = X86_EDX; /* reminder */
1555                                 clob_reg = X86_EAX;
1556                         }
1557                         val = rs->iassign [ins->dreg];
1558                         if (0 && val >= 0 && val != dest_reg && !(rs->ifree_mask & (1 << dest_reg))) {
1559                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [dest_reg]));
1560                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [dest_reg]);
1561                                 mono_regstate_free_int (rs, dest_reg);
1562                         }
1563                         if (val < 0) {
1564                                 if (val < -1) {
1565                                         /* the register gets spilled after this inst */
1566                                         int spill = -val -1;
1567                                         dest_mask = 1 << clob_reg;
1568                                         prev_dreg = ins->dreg;
1569                                         val = mono_regstate_alloc_int (rs, dest_mask);
1570                                         if (val < 0)
1571                                                 val = get_register_spilling (cfg, tmp, ins, dest_mask, ins->dreg);
1572                                         rs->iassign [ins->dreg] = val;
1573                                         if (spill)
1574                                                 create_spilled_store (cfg, spill, val, prev_dreg, ins);
1575                                         DEBUG (g_print ("\tassigned dreg %s to dest R%d\n", mono_arch_regname (val), ins->dreg));
1576                                         rs->isymbolic [val] = prev_dreg;
1577                                         ins->dreg = val;
1578                                         if (val != dest_reg) { /* force a copy */
1579                                                 create_copy_ins (cfg, val, dest_reg, ins);
1580                                         }
1581                                 } else {
1582                                         DEBUG (g_print ("\tshortcut assignment of R%d to %s\n", ins->dreg, mono_arch_regname (dest_reg)));
1583                                         rs->iassign [ins->dreg] = dest_reg;
1584                                         rs->isymbolic [dest_reg] = ins->dreg;
1585                                         ins->dreg = dest_reg;
1586                                         rs->ifree_mask &= ~ (1 << dest_reg);
1587                                 }
1588                         } else {
1589                                 //DEBUG (g_print ("dest reg in div assigned: %s\n", mono_arch_regname (val)));
1590                                 if (val != dest_reg) { /* force a copy */
1591                                         create_copy_ins (cfg, val, dest_reg, ins);
1592                                         if (!(rs->ifree_mask & (1 << dest_reg)) && rs->isymbolic [dest_reg] >= MONO_MAX_IREGS) {
1593                                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [dest_reg]));
1594                                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [dest_reg]);
1595                                                 mono_regstate_free_int (rs, dest_reg);
1596                                         }
1597                                 }
1598                         }
1599                         src1_mask = 1 << X86_EAX;
1600                         src2_mask = 1 << X86_ECX;
1601                 }
1602                 if (spec [MONO_INST_DEST] == 'l') {
1603                         if (!(rs->ifree_mask & (1 << X86_EAX))) {
1604                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_EAX]));
1605                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_EAX]);
1606                                 mono_regstate_free_int (rs, X86_EAX);
1607                         }
1608                         if (!(rs->ifree_mask & (1 << X86_EDX))) {
1609                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_EDX]));
1610                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_EDX]);
1611                                 mono_regstate_free_int (rs, X86_EDX);
1612                         }
1613                 }
1614
1615                 /* update for use with FP regs... */
1616                 if (spec [MONO_INST_DEST] != 'f' && ins->dreg >= MONO_MAX_IREGS) {
1617                         val = rs->iassign [ins->dreg];
1618                         prev_dreg = ins->dreg;
1619                         if (val < 0) {
1620                                 int spill = 0;
1621                                 if (val < -1) {
1622                                         /* the register gets spilled after this inst */
1623                                         spill = -val -1;
1624                                 }
1625                                 val = mono_regstate_alloc_int (rs, dest_mask);
1626                                 if (val < 0)
1627                                         val = get_register_spilling (cfg, tmp, ins, dest_mask, ins->dreg);
1628                                 rs->iassign [ins->dreg] = val;
1629                                 if (spill)
1630                                         create_spilled_store (cfg, spill, val, prev_dreg, ins);
1631                         }
1632                         DEBUG (g_print ("\tassigned dreg %s to dest R%d\n", mono_arch_regname (val), ins->dreg));
1633                         rs->isymbolic [val] = prev_dreg;
1634                         ins->dreg = val;
1635                         if (spec [MONO_INST_DEST] == 'l') {
1636                                 int hreg = prev_dreg + 1;
1637                                 val = rs->iassign [hreg];
1638                                 if (val < 0) {
1639                                         int spill = 0;
1640                                         if (val < -1) {
1641                                                 /* the register gets spilled after this inst */
1642                                                 spill = -val -1;
1643                                         }
1644                                         val = mono_regstate_alloc_int (rs, dest_mask);
1645                                         if (val < 0)
1646                                                 val = get_register_spilling (cfg, tmp, ins, dest_mask, hreg);
1647                                         rs->iassign [hreg] = val;
1648                                         if (spill)
1649                                                 create_spilled_store (cfg, spill, val, hreg, ins);
1650                                 }
1651                                 DEBUG (g_print ("\tassigned hreg %s to dest R%d\n", mono_arch_regname (val), hreg));
1652                                 rs->isymbolic [val] = hreg;
1653                                 /* FIXME:? ins->dreg = val; */
1654                                 if (ins->dreg == X86_EAX) {
1655                                         if (val != X86_EDX)
1656                                                 create_copy_ins (cfg, val, X86_EDX, ins);
1657                                 } else if (ins->dreg == X86_EDX) {
1658                                         if (val == X86_EAX) {
1659                                                 /* swap */
1660                                                 g_assert_not_reached ();
1661                                         } else {
1662                                                 /* two forced copies */
1663                                                 create_copy_ins (cfg, val, X86_EDX, ins);
1664                                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1665                                         }
1666                                 } else {
1667                                         if (val == X86_EDX) {
1668                                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1669                                         } else {
1670                                                 /* two forced copies */
1671                                                 create_copy_ins (cfg, val, X86_EDX, ins);
1672                                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1673                                         }
1674                                 }
1675                                 if (reg_is_freeable (val) && hreg >= 0 && reginfo [hreg].born_in >= i) {
1676                                         DEBUG (g_print ("\tfreeable %s (R%d)\n", mono_arch_regname (val), hreg));
1677                                         mono_regstate_free_int (rs, val);
1678                                 }
1679                         } else if (spec [MONO_INST_DEST] == 'a' && ins->dreg != X86_EAX && spec [MONO_INST_CLOB] != 'd') {
1680                                 /* this instruction only outputs to EAX, need to copy */
1681                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1682                         } else if (spec [MONO_INST_DEST] == 'd' && ins->dreg != X86_EDX && spec [MONO_INST_CLOB] != 'd') {
1683                                 create_copy_ins (cfg, ins->dreg, X86_EDX, ins);
1684                         }
1685                 } else {
1686                         prev_dreg = -1;
1687                 }
1688                 if (spec [MONO_INST_DEST] != 'f' && reg_is_freeable (ins->dreg) && prev_dreg >= 0 && reginfo [prev_dreg].born_in >= i) {
1689                         DEBUG (g_print ("\tfreeable %s (R%d) (born in %d)\n", mono_arch_regname (ins->dreg), prev_dreg, reginfo [prev_dreg].born_in));
1690                         mono_regstate_free_int (rs, ins->dreg);
1691                 }
1692                 /* put src1 in EAX if it needs to be */
1693                 if (spec [MONO_INST_SRC1] == 'a') {
1694                         if (!(rs->ifree_mask & (1 << X86_EAX))) {
1695                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_EAX]));
1696                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_EAX]);
1697                                 mono_regstate_free_int (rs, X86_EAX);
1698                         }
1699                         /* force-set sreg1 */
1700                         rs->iassign [ins->sreg1] = X86_EAX;
1701                         rs->isymbolic [X86_EAX] = ins->sreg1;
1702                         ins->sreg1 = X86_EAX;
1703                         rs->ifree_mask &= ~ (1 << X86_EAX);
1704                 }
1705                 if (spec [MONO_INST_SRC1] != 'f' && ins->sreg1 >= MONO_MAX_IREGS) {
1706                         val = rs->iassign [ins->sreg1];
1707                         prev_sreg1 = ins->sreg1;
1708                         if (val < 0) {
1709                                 int spill = 0;
1710                                 if (val < -1) {
1711                                         /* the register gets spilled after this inst */
1712                                         spill = -val -1;
1713                                 }
1714                                 if (0 && ins->opcode == OP_MOVE) {
1715                                         /* 
1716                                          * small optimization: the dest register is already allocated
1717                                          * but the src one is not: we can simply assign the same register
1718                                          * here and peephole will get rid of the instruction later.
1719                                          * This optimization may interfere with the clobbering handling:
1720                                          * it removes a mov operation that will be added again to handle clobbering.
1721                                          * There are also some other issues that should with make testjit.
1722                                          */
1723                                         mono_regstate_alloc_int (rs, 1 << ins->dreg);
1724                                         val = rs->iassign [ins->sreg1] = ins->dreg;
1725                                         //g_assert (val >= 0);
1726                                         DEBUG (g_print ("\tfast assigned sreg1 %s to R%d\n", mono_arch_regname (val), ins->sreg1));
1727                                 } else {
1728                                         //g_assert (val == -1); /* source cannot be spilled */
1729                                         val = mono_regstate_alloc_int (rs, src1_mask);
1730                                         if (val < 0)
1731                                                 val = get_register_spilling (cfg, tmp, ins, src1_mask, ins->sreg1);
1732                                         rs->iassign [ins->sreg1] = val;
1733                                         DEBUG (g_print ("\tassigned sreg1 %s to R%d\n", mono_arch_regname (val), ins->sreg1));
1734                                 }
1735                                 if (spill) {
1736                                         MonoInst *store = create_spilled_store (cfg, spill, val, prev_sreg1, NULL);
1737                                         insert_before_ins (ins, tmp, store);
1738                                 }
1739                         }
1740                         rs->isymbolic [val] = prev_sreg1;
1741                         ins->sreg1 = val;
1742                 } else {
1743                         prev_sreg1 = -1;
1744                 }
1745                 /* handle clobbering of sreg1 */
1746                 if ((spec [MONO_INST_CLOB] == '1' || spec [MONO_INST_CLOB] == 's') && ins->dreg != ins->sreg1) {
1747                         MonoInst *copy = create_copy_ins (cfg, ins->dreg, ins->sreg1, NULL);
1748                         DEBUG (g_print ("\tneed to copy sreg1 %s to dreg %s\n", mono_arch_regname (ins->sreg1), mono_arch_regname (ins->dreg)));
1749                         if (ins->sreg2 == -1 || spec [MONO_INST_CLOB] == 's') {
1750                                 /* note: the copy is inserted before the current instruction! */
1751                                 insert_before_ins (ins, tmp, copy);
1752                                 /* we set sreg1 to dest as well */
1753                                 prev_sreg1 = ins->sreg1 = ins->dreg;
1754                         } else {
1755                                 /* inserted after the operation */
1756                                 copy->next = ins->next;
1757                                 ins->next = copy;
1758                         }
1759                 }
1760                 if (spec [MONO_INST_SRC2] != 'f' && ins->sreg2 >= MONO_MAX_IREGS) {
1761                         val = rs->iassign [ins->sreg2];
1762                         prev_sreg2 = ins->sreg2;
1763                         if (val < 0) {
1764                                 int spill = 0;
1765                                 if (val < -1) {
1766                                         /* the register gets spilled after this inst */
1767                                         spill = -val -1;
1768                                 }
1769                                 val = mono_regstate_alloc_int (rs, src2_mask);
1770                                 if (val < 0)
1771                                         val = get_register_spilling (cfg, tmp, ins, src2_mask, ins->sreg2);
1772                                 rs->iassign [ins->sreg2] = val;
1773                                 DEBUG (g_print ("\tassigned sreg2 %s to R%d\n", mono_arch_regname (val), ins->sreg2));
1774                                 if (spill)
1775                                         create_spilled_store (cfg, spill, val, prev_sreg2, ins);
1776                         }
1777                         rs->isymbolic [val] = prev_sreg2;
1778                         ins->sreg2 = val;
1779                         if (spec [MONO_INST_CLOB] == 's' && ins->sreg2 != X86_ECX) {
1780                                 DEBUG (g_print ("\tassigned sreg2 %s to R%d, but ECX is needed (R%d)\n", mono_arch_regname (val), ins->sreg2, rs->iassign [X86_ECX]));
1781                         }
1782                 } else {
1783                         prev_sreg2 = -1;
1784                 }
1785
1786                 if (spec [MONO_INST_CLOB] == 'c') {
1787                         int j, s;
1788                         guint32 clob_mask = X86_CALLEE_REGS;
1789                         for (j = 0; j < MONO_MAX_IREGS; ++j) {
1790                                 s = 1 << j;
1791                                 if ((clob_mask & s) && !(rs->ifree_mask & s) && j != ins->sreg1) {
1792                                         //g_warning ("register %s busy at call site\n", mono_arch_regname (j));
1793                                 }
1794                         }
1795                 }
1796                 /*if (reg_is_freeable (ins->sreg1) && prev_sreg1 >= 0 && reginfo [prev_sreg1].born_in >= i) {
1797                         DEBUG (g_print ("freeable %s\n", mono_arch_regname (ins->sreg1)));
1798                         mono_regstate_free_int (rs, ins->sreg1);
1799                 }
1800                 if (reg_is_freeable (ins->sreg2) && prev_sreg2 >= 0 && reginfo [prev_sreg2].born_in >= i) {
1801                         DEBUG (g_print ("freeable %s\n", mono_arch_regname (ins->sreg2)));
1802                         mono_regstate_free_int (rs, ins->sreg2);
1803                 }*/
1804                 
1805                 //DEBUG (print_ins (i, ins));
1806                 /* this may result from a insert_before call */
1807                 if (!tmp->next)
1808                         bb->code = tmp->data;
1809                 tmp = tmp->next;
1810         }
1811 }
1812
1813 static unsigned char*
1814 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int size, gboolean is_signed)
1815 {
1816         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 4);
1817         x86_fnstcw_membase(code, X86_ESP, 0);
1818         x86_mov_reg_membase (code, dreg, X86_ESP, 0, 2);
1819         x86_alu_reg_imm (code, X86_OR, dreg, 0xc00);
1820         x86_mov_membase_reg (code, X86_ESP, 2, dreg, 2);
1821         x86_fldcw_membase (code, X86_ESP, 2);
1822         if (size == 8) {
1823                 x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
1824                 x86_fist_pop_membase (code, X86_ESP, 0, TRUE);
1825                 x86_pop_reg (code, dreg);
1826                 /* FIXME: need the high register 
1827                  * x86_pop_reg (code, dreg_high);
1828                  */
1829         } else {
1830                 x86_push_reg (code, X86_EAX); // SP = SP - 4
1831                 x86_fist_pop_membase (code, X86_ESP, 0, FALSE);
1832                 x86_pop_reg (code, dreg);
1833         }
1834         x86_fldcw_membase (code, X86_ESP, 0);
1835         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
1836
1837         if (size == 1)
1838                 x86_widen_reg (code, dreg, dreg, is_signed, FALSE);
1839         else if (size == 2)
1840                 x86_widen_reg (code, dreg, dreg, is_signed, TRUE);
1841         return code;
1842 }
1843
1844 static unsigned char*
1845 mono_emit_stack_alloc (guchar *code, MonoInst* tree)
1846 {
1847         int sreg = tree->sreg1;
1848 #ifdef PLATFORM_WIN32
1849         guint8* br[5];
1850
1851         /*
1852          * Under Windows:
1853          * If requested stack size is larger than one page,
1854          * perform stack-touch operation
1855          */
1856         /*
1857          * Generate stack probe code.
1858          * Under Windows, it is necessary to allocate one page at a time,
1859          * "touching" stack after each successful sub-allocation. This is
1860          * because of the way stack growth is implemented - there is a
1861          * guard page before the lowest stack page that is currently commited.
1862          * Stack normally grows sequentially so OS traps access to the
1863          * guard page and commits more pages when needed.
1864          */
1865         x86_test_reg_imm (code, sreg, ~0xFFF);
1866         br[0] = code; x86_branch8 (code, X86_CC_Z, 0, FALSE);
1867
1868         br[2] = code; /* loop */
1869         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 0x1000);
1870         x86_test_membase_reg (code, X86_ESP, 0, X86_ESP);
1871         x86_alu_reg_imm (code, X86_SUB, sreg, 0x1000);
1872         x86_alu_reg_imm (code, X86_CMP, sreg, 0x1000);
1873         br[3] = code; x86_branch8 (code, X86_CC_AE, 0, FALSE);
1874         x86_patch (br[3], br[2]);
1875         x86_test_reg_reg (code, sreg, sreg);
1876         br[4] = code; x86_branch8 (code, X86_CC_Z, 0, FALSE);
1877         x86_alu_reg_reg (code, X86_SUB, X86_ESP, sreg);
1878
1879         br[1] = code; x86_jump8 (code, 0);
1880
1881         x86_patch (br[0], code);
1882         x86_alu_reg_reg (code, X86_SUB, X86_ESP, sreg);
1883         x86_patch (br[1], code);
1884         x86_patch (br[4], code);
1885 #else /* PLATFORM_WIN32 */
1886         x86_alu_reg_reg (code, X86_SUB, X86_ESP, tree->sreg1);
1887 #endif
1888         if (tree->flags & MONO_INST_INIT) {
1889                 int offset = 0;
1890                 if (tree->dreg != X86_EAX && sreg != X86_EAX) {
1891                         x86_push_reg (code, X86_EAX);
1892                         offset += 4;
1893                 }
1894                 if (tree->dreg != X86_ECX && sreg != X86_ECX) {
1895                         x86_push_reg (code, X86_ECX);
1896                         offset += 4;
1897                 }
1898                 if (tree->dreg != X86_EDI && sreg != X86_EDI) {
1899                         x86_push_reg (code, X86_EDI);
1900                         offset += 4;
1901                 }
1902                 
1903                 x86_shift_reg_imm (code, X86_SHR, sreg, 2);
1904                 if (sreg != X86_ECX)
1905                         x86_mov_reg_reg (code, X86_ECX, sreg, 4);
1906                 x86_alu_reg_reg (code, X86_XOR, X86_EAX, X86_EAX);
1907                                 
1908                 x86_lea_membase (code, X86_EDI, X86_ESP, offset);
1909                 x86_cld (code);
1910                 x86_prefix (code, X86_REP_PREFIX);
1911                 x86_stosl (code);
1912                 
1913                 if (tree->dreg != X86_EDI && sreg != X86_EDI)
1914                         x86_pop_reg (code, X86_EDI);
1915                 if (tree->dreg != X86_ECX && sreg != X86_ECX)
1916                         x86_pop_reg (code, X86_ECX);
1917                 if (tree->dreg != X86_EAX && sreg != X86_EAX)
1918                         x86_pop_reg (code, X86_EAX);
1919         }
1920         return code;
1921 }
1922
1923 #define REAL_PRINT_REG(text,reg) \
1924 mono_assert (reg >= 0); \
1925 x86_push_reg (code, X86_EAX); \
1926 x86_push_reg (code, X86_EDX); \
1927 x86_push_reg (code, X86_ECX); \
1928 x86_push_reg (code, reg); \
1929 x86_push_imm (code, reg); \
1930 x86_push_imm (code, text " %d %p\n"); \
1931 x86_mov_reg_imm (code, X86_EAX, printf); \
1932 x86_call_reg (code, X86_EAX); \
1933 x86_alu_reg_imm (code, X86_ADD, X86_ESP, 3*4); \
1934 x86_pop_reg (code, X86_ECX); \
1935 x86_pop_reg (code, X86_EDX); \
1936 x86_pop_reg (code, X86_EAX);
1937
1938 void
1939 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
1940 {
1941         MonoInst *ins;
1942         MonoCallInst *call;
1943         guint offset;
1944         guint8 *code = cfg->native_code + cfg->code_len;
1945         MonoInst *last_ins = NULL;
1946         guint last_offset = 0;
1947         int max_len, cpos;
1948
1949         if (cfg->opt & MONO_OPT_PEEPHOLE)
1950                 peephole_pass (cfg, bb);
1951
1952 #if 0
1953         /* 
1954          * various stratgies to align BBs. Using real loop detection or simply
1955          * aligning every block leads to more consistent benchmark results,
1956          * but usually slows down the code
1957          * we should do the alignment outside this function or we should adjust
1958          * bb->native offset as well or the code is effectively slowed down!
1959          */
1960         /* align all blocks */
1961 //      if ((pad = (cfg->code_len & (align - 1)))) {
1962         /* poor man loop start detection */
1963 //      if (bb->code && bb->in_count && bb->in_bb [0]->cil_code > bb->cil_code && (pad = (cfg->code_len & (align - 1)))) {
1964         /* consider real loop detection and nesting level */
1965 //      if (bb->loop_blocks && bb->nesting < 3 && (pad = (cfg->code_len & (align - 1)))) {
1966         /* consider real loop detection */
1967         if (bb->loop_blocks && (pad = (cfg->code_len & (align - 1)))) {
1968                 pad = align - pad;
1969                 x86_padding (code, pad);
1970                 cfg->code_len += pad;
1971                 bb->native_offset = cfg->code_len;
1972         }
1973 #endif
1974
1975         if (cfg->verbose_level > 2)
1976                 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
1977
1978         cpos = bb->max_offset;
1979
1980         if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
1981                 MonoProfileCoverageInfo *cov = cfg->coverage_info;
1982                 g_assert (!mono_compile_aot);
1983                 cpos += 6;
1984
1985                 cov->data [bb->dfn].cil_code = bb->cil_code;
1986                 /* this is not thread save, but good enough */
1987                 x86_inc_mem (code, &cov->data [bb->dfn].count); 
1988         }
1989
1990         offset = code - cfg->native_code;
1991
1992         ins = bb->code;
1993         while (ins) {
1994                 offset = code - cfg->native_code;
1995
1996                 max_len = ((guint8 *)ins_spec [ins->opcode])[MONO_INST_LEN];
1997
1998                 if (offset > (cfg->code_size - max_len - 16)) {
1999                         cfg->code_size *= 2;
2000                         cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
2001                         code = cfg->native_code + offset;
2002                         mono_jit_stats.code_reallocs++;
2003                 }
2004
2005                 mono_debug_record_line_number (cfg, ins, offset);
2006
2007                 switch (ins->opcode) {
2008                 case OP_BIGMUL:
2009                         x86_mul_reg (code, ins->sreg2, TRUE);
2010                         break;
2011                 case OP_BIGMUL_UN:
2012                         x86_mul_reg (code, ins->sreg2, FALSE);
2013                         break;
2014                 case OP_X86_SETEQ_MEMBASE:
2015                         x86_set_membase (code, X86_CC_EQ, ins->inst_basereg, ins->inst_offset, TRUE);
2016                         break;
2017                 case OP_STOREI1_MEMBASE_IMM:
2018                         x86_mov_membase_imm (code, ins->inst_destbasereg, ins->inst_offset, ins->inst_imm, 1);
2019                         break;
2020                 case OP_STOREI2_MEMBASE_IMM:
2021                         x86_mov_membase_imm (code, ins->inst_destbasereg, ins->inst_offset, ins->inst_imm, 2);
2022                         break;
2023                 case OP_STORE_MEMBASE_IMM:
2024                 case OP_STOREI4_MEMBASE_IMM:
2025                         x86_mov_membase_imm (code, ins->inst_destbasereg, ins->inst_offset, ins->inst_imm, 4);
2026                         break;
2027                 case OP_STOREI1_MEMBASE_REG:
2028                         x86_mov_membase_reg (code, ins->inst_destbasereg, ins->inst_offset, ins->sreg1, 1);
2029                         break;
2030                 case OP_STOREI2_MEMBASE_REG:
2031                         x86_mov_membase_reg (code, ins->inst_destbasereg, ins->inst_offset, ins->sreg1, 2);
2032                         break;
2033                 case OP_STORE_MEMBASE_REG:
2034                 case OP_STOREI4_MEMBASE_REG:
2035                         x86_mov_membase_reg (code, ins->inst_destbasereg, ins->inst_offset, ins->sreg1, 4);
2036                         break;
2037                 case CEE_LDIND_I:
2038                 case CEE_LDIND_I4:
2039                 case CEE_LDIND_U4:
2040                         x86_mov_reg_mem (code, ins->dreg, ins->inst_p0, 4);
2041                         break;
2042                 case OP_LOADU4_MEM:
2043                         x86_mov_reg_imm (code, ins->dreg, ins->inst_p0);
2044                         x86_mov_reg_membase (code, ins->dreg, ins->dreg, 0, 4);
2045                         break;
2046                 case OP_LOAD_MEMBASE:
2047                 case OP_LOADI4_MEMBASE:
2048                 case OP_LOADU4_MEMBASE:
2049                         x86_mov_reg_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, 4);
2050                         break;
2051                 case OP_LOADU1_MEMBASE:
2052                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, FALSE, FALSE);
2053                         break;
2054                 case OP_LOADI1_MEMBASE:
2055                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, TRUE, FALSE);
2056                         break;
2057                 case OP_LOADU2_MEMBASE:
2058                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, FALSE, TRUE);
2059                         break;
2060                 case OP_LOADI2_MEMBASE:
2061                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, TRUE, TRUE);
2062                         break;
2063                 case CEE_CONV_I1:
2064                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, FALSE);
2065                         break;
2066                 case CEE_CONV_I2:
2067                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, TRUE);
2068                         break;
2069                 case CEE_CONV_U1:
2070                         x86_widen_reg (code, ins->dreg, ins->sreg1, FALSE, FALSE);
2071                         break;
2072                 case CEE_CONV_U2:
2073                         x86_widen_reg (code, ins->dreg, ins->sreg1, FALSE, TRUE);
2074                         break;
2075                 case OP_COMPARE:
2076                         x86_alu_reg_reg (code, X86_CMP, ins->sreg1, ins->sreg2);
2077                         break;
2078                 case OP_COMPARE_IMM:
2079                         x86_alu_reg_imm (code, X86_CMP, ins->sreg1, ins->inst_imm);
2080                         break;
2081                 case OP_X86_COMPARE_MEMBASE_REG:
2082                         x86_alu_membase_reg (code, X86_CMP, ins->inst_basereg, ins->inst_offset, ins->sreg2);
2083                         break;
2084                 case OP_X86_COMPARE_MEMBASE_IMM:
2085                         x86_alu_membase_imm (code, X86_CMP, ins->inst_basereg, ins->inst_offset, ins->inst_imm);
2086                         break;
2087                 case OP_X86_COMPARE_REG_MEMBASE:
2088                         x86_alu_reg_membase (code, X86_CMP, ins->sreg1, ins->sreg2, ins->inst_offset);
2089                         break;
2090                 case OP_X86_TEST_NULL:
2091                         x86_test_reg_reg (code, ins->sreg1, ins->sreg1);
2092                         break;
2093                 case OP_X86_ADD_MEMBASE_IMM:
2094                         x86_alu_membase_imm (code, X86_ADD, ins->inst_basereg, ins->inst_offset, ins->inst_imm);
2095                         break;
2096                 case OP_X86_SUB_MEMBASE_IMM:
2097                         x86_alu_membase_imm (code, X86_SUB, ins->inst_basereg, ins->inst_offset, ins->inst_imm);
2098                         break;
2099                 case OP_X86_INC_MEMBASE:
2100                         x86_inc_membase (code, ins->inst_basereg, ins->inst_offset);
2101                         break;
2102                 case OP_X86_INC_REG:
2103                         x86_inc_reg (code, ins->dreg);
2104                         break;
2105                 case OP_X86_DEC_MEMBASE:
2106                         x86_dec_membase (code, ins->inst_basereg, ins->inst_offset);
2107                         break;
2108                 case OP_X86_DEC_REG:
2109                         x86_dec_reg (code, ins->dreg);
2110                         break;
2111                 case CEE_BREAK:
2112                         x86_breakpoint (code);
2113                         break;
2114                 case OP_ADDCC:
2115                 case CEE_ADD:
2116                         x86_alu_reg_reg (code, X86_ADD, ins->sreg1, ins->sreg2);
2117                         break;
2118                 case OP_ADC:
2119                         x86_alu_reg_reg (code, X86_ADC, ins->sreg1, ins->sreg2);
2120                         break;
2121                 case OP_ADD_IMM:
2122                         x86_alu_reg_imm (code, X86_ADD, ins->dreg, ins->inst_imm);
2123                         break;
2124                 case OP_ADC_IMM:
2125                         x86_alu_reg_imm (code, X86_ADC, ins->dreg, ins->inst_imm);
2126                         break;
2127                 case OP_SUBCC:
2128                 case CEE_SUB:
2129                         x86_alu_reg_reg (code, X86_SUB, ins->sreg1, ins->sreg2);
2130                         break;
2131                 case OP_SBB:
2132                         x86_alu_reg_reg (code, X86_SBB, ins->sreg1, ins->sreg2);
2133                         break;
2134                 case OP_SUB_IMM:
2135                         x86_alu_reg_imm (code, X86_SUB, ins->dreg, ins->inst_imm);
2136                         break;
2137                 case OP_SBB_IMM:
2138                         x86_alu_reg_imm (code, X86_SBB, ins->dreg, ins->inst_imm);
2139                         break;
2140                 case CEE_AND:
2141                         x86_alu_reg_reg (code, X86_AND, ins->sreg1, ins->sreg2);
2142                         break;
2143                 case OP_AND_IMM:
2144                         x86_alu_reg_imm (code, X86_AND, ins->sreg1, ins->inst_imm);
2145                         break;
2146                 case CEE_DIV:
2147                         x86_cdq (code);
2148                         x86_div_reg (code, ins->sreg2, TRUE);
2149                         break;
2150                 case CEE_DIV_UN:
2151                         x86_alu_reg_reg (code, X86_XOR, X86_EDX, X86_EDX);
2152                         x86_div_reg (code, ins->sreg2, FALSE);
2153                         break;
2154                 case OP_DIV_IMM:
2155                         x86_mov_reg_imm (code, ins->sreg2, ins->inst_imm);
2156                         x86_cdq (code);
2157                         x86_div_reg (code, ins->sreg2, TRUE);
2158                         break;
2159                 case CEE_REM:
2160                         x86_cdq (code);
2161                         x86_div_reg (code, ins->sreg2, TRUE);
2162                         break;
2163                 case CEE_REM_UN:
2164                         x86_alu_reg_reg (code, X86_XOR, X86_EDX, X86_EDX);
2165                         x86_div_reg (code, ins->sreg2, FALSE);
2166                         break;
2167                 case OP_REM_IMM:
2168                         x86_mov_reg_imm (code, ins->sreg2, ins->inst_imm);
2169                         x86_cdq (code);
2170                         x86_div_reg (code, ins->sreg2, TRUE);
2171                         break;
2172                 case CEE_OR:
2173                         x86_alu_reg_reg (code, X86_OR, ins->sreg1, ins->sreg2);
2174                         break;
2175                 case OP_OR_IMM:
2176                         x86_alu_reg_imm (code, X86_OR, ins->sreg1, ins->inst_imm);
2177                         break;
2178                 case CEE_XOR:
2179                         x86_alu_reg_reg (code, X86_XOR, ins->sreg1, ins->sreg2);
2180                         break;
2181                 case OP_XOR_IMM:
2182                         x86_alu_reg_imm (code, X86_XOR, ins->sreg1, ins->inst_imm);
2183                         break;
2184                 case CEE_SHL:
2185                         g_assert (ins->sreg2 == X86_ECX);
2186                         x86_shift_reg (code, X86_SHL, ins->dreg);
2187                         break;
2188                 case CEE_SHR:
2189                         g_assert (ins->sreg2 == X86_ECX);
2190                         x86_shift_reg (code, X86_SAR, ins->dreg);
2191                         break;
2192                 case OP_SHR_IMM:
2193                         x86_shift_reg_imm (code, X86_SAR, ins->dreg, ins->inst_imm);
2194                         break;
2195                 case OP_SHR_UN_IMM:
2196                         x86_shift_reg_imm (code, X86_SHR, ins->dreg, ins->inst_imm);
2197                         break;
2198                 case CEE_SHR_UN:
2199                         g_assert (ins->sreg2 == X86_ECX);
2200                         x86_shift_reg (code, X86_SHR, ins->dreg);
2201                         break;
2202                 case OP_SHL_IMM:
2203                         x86_shift_reg_imm (code, X86_SHL, ins->dreg, ins->inst_imm);
2204                         break;
2205                 case CEE_NOT:
2206                         x86_not_reg (code, ins->sreg1);
2207                         break;
2208                 case CEE_NEG:
2209                         x86_neg_reg (code, ins->sreg1);
2210                         break;
2211                 case OP_SEXT_I1:
2212                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, FALSE);
2213                         break;
2214                 case OP_SEXT_I2:
2215                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, TRUE);
2216                         break;
2217                 case CEE_MUL:
2218                         x86_imul_reg_reg (code, ins->sreg1, ins->sreg2);
2219                         break;
2220                 case OP_MUL_IMM:
2221                         x86_imul_reg_reg_imm (code, ins->dreg, ins->sreg1, ins->inst_imm);
2222                         break;
2223                 case CEE_MUL_OVF:
2224                         x86_imul_reg_reg (code, ins->sreg1, ins->sreg2);
2225                         EMIT_COND_SYSTEM_EXCEPTION (X86_CC_O, FALSE, "OverflowException");
2226                         break;
2227                 case CEE_MUL_OVF_UN: {
2228                         /* the mul operation and the exception check should most likely be split */
2229                         int non_eax_reg, saved_eax = FALSE, saved_edx = FALSE;
2230                         /*g_assert (ins->sreg2 == X86_EAX);
2231                         g_assert (ins->dreg == X86_EAX);*/
2232                         if (ins->sreg2 == X86_EAX) {
2233                                 non_eax_reg = ins->sreg1;
2234                         } else if (ins->sreg1 == X86_EAX) {
2235                                 non_eax_reg = ins->sreg2;
2236                         } else {
2237                                 /* no need to save since we're going to store to it anyway */
2238                                 if (ins->dreg != X86_EAX) {
2239                                         saved_eax = TRUE;
2240                                         x86_push_reg (code, X86_EAX);
2241                                 }
2242                                 x86_mov_reg_reg (code, X86_EAX, ins->sreg1, 4);
2243                                 non_eax_reg = ins->sreg2;
2244                         }
2245                         if (ins->dreg == X86_EDX) {
2246                                 if (!saved_eax) {
2247                                         saved_eax = TRUE;
2248                                         x86_push_reg (code, X86_EAX);
2249                                 }
2250                         } else if (ins->dreg != X86_EAX) {
2251                                 saved_edx = TRUE;
2252                                 x86_push_reg (code, X86_EDX);
2253                         }
2254                         x86_mul_reg (code, non_eax_reg, FALSE);
2255                         /* save before the check since pop and mov don't change the flags */
2256                         if (saved_edx)
2257                                 x86_pop_reg (code, X86_EDX);
2258                         if (saved_eax)
2259                                 x86_pop_reg (code, X86_EAX);
2260                         if (ins->dreg != X86_EAX)
2261                                 x86_mov_reg_reg (code, ins->dreg, X86_EAX, 4);
2262                         EMIT_COND_SYSTEM_EXCEPTION (X86_CC_O, FALSE, "OverflowException");
2263                         break;
2264                 }
2265                 case OP_ICONST:
2266                         x86_mov_reg_imm (code, ins->dreg, ins->inst_c0);
2267                         break;
2268                 case OP_AOTCONST:
2269                         mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
2270                         x86_mov_reg_imm (code, ins->dreg, 0);
2271                         break;
2272                 case CEE_CONV_I4:
2273                 case CEE_CONV_U4:
2274                 case OP_MOVE:
2275                         x86_mov_reg_reg (code, ins->dreg, ins->sreg1, 4);
2276                         break;
2277                 case CEE_JMP: {
2278                         /*
2279                          * Note: this 'frame destruction' logic is useful for tail calls, too.
2280                          * Keep in sync with the code in emit_epilog.
2281                          */
2282                         int pos = 0;
2283
2284                         /* FIXME: no tracing support... */
2285                         if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
2286                                 code = mono_arch_instrument_epilog (cfg, mono_profiler_method_leave, code, FALSE);
2287                         /* reset offset to make max_len work */
2288                         offset = code - cfg->native_code;
2289
2290                         g_assert (!cfg->method->save_lmf);
2291
2292                         if (cfg->used_int_regs & (1 << X86_EBX))
2293                                 pos -= 4;
2294                         if (cfg->used_int_regs & (1 << X86_EDI))
2295                                 pos -= 4;
2296                         if (cfg->used_int_regs & (1 << X86_ESI))
2297                                 pos -= 4;
2298                         if (pos)
2299                                 x86_lea_membase (code, X86_ESP, X86_EBP, pos);
2300         
2301                         if (cfg->used_int_regs & (1 << X86_ESI))
2302                                 x86_pop_reg (code, X86_ESI);
2303                         if (cfg->used_int_regs & (1 << X86_EDI))
2304                                 x86_pop_reg (code, X86_EDI);
2305                         if (cfg->used_int_regs & (1 << X86_EBX))
2306                                 x86_pop_reg (code, X86_EBX);
2307         
2308                         /* restore ESP/EBP */
2309                         x86_leave (code);
2310                         offset = code - cfg->native_code;
2311                         mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
2312                         x86_jump32 (code, 0);
2313                         break;
2314                 }
2315                 case OP_CHECK_THIS:
2316                         /* ensure ins->sreg1 is not NULL */
2317                         x86_alu_membase_imm (code, X86_CMP, ins->sreg1, 0, 0);
2318                         break;
2319                 case OP_ARGLIST: {
2320                         int hreg = ins->sreg1 == X86_EAX? X86_ECX: X86_EAX;
2321                         x86_push_reg (code, hreg);
2322                         x86_lea_membase (code, hreg, X86_EBP, cfg->sig_cookie);
2323                         x86_mov_membase_reg (code, ins->sreg1, 0, hreg, 4);
2324                         x86_pop_reg (code, hreg);
2325                         break;
2326                 }
2327                 case OP_FCALL:
2328                 case OP_LCALL:
2329                 case OP_VCALL:
2330                 case OP_VOIDCALL:
2331                 case CEE_CALL:
2332                         call = (MonoCallInst*)ins;
2333                         if (ins->flags & MONO_INST_HAS_METHOD)
2334                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
2335                         else {
2336                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
2337                         }
2338                         x86_call_code (code, 0);
2339                         if (call->stack_usage && (call->signature->call_convention != MONO_CALL_STDCALL))
2340                                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, call->stack_usage);
2341                         break;
2342                 case OP_FCALL_REG:
2343                 case OP_LCALL_REG:
2344                 case OP_VCALL_REG:
2345                 case OP_VOIDCALL_REG:
2346                 case OP_CALL_REG:
2347                         call = (MonoCallInst*)ins;
2348                         x86_call_reg (code, ins->sreg1);
2349                         if (call->stack_usage && (call->signature->call_convention != MONO_CALL_STDCALL))
2350                                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, call->stack_usage);
2351                         break;
2352                 case OP_FCALL_MEMBASE:
2353                 case OP_LCALL_MEMBASE:
2354                 case OP_VCALL_MEMBASE:
2355                 case OP_VOIDCALL_MEMBASE:
2356                 case OP_CALL_MEMBASE:
2357                         call = (MonoCallInst*)ins;
2358                         x86_call_membase (code, ins->sreg1, ins->inst_offset);
2359                         if (call->stack_usage && (call->signature->call_convention != MONO_CALL_STDCALL))
2360                                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, call->stack_usage);
2361                         break;
2362                 case OP_OUTARG:
2363                 case OP_X86_PUSH:
2364                         x86_push_reg (code, ins->sreg1);
2365                         break;
2366                 case OP_X86_PUSH_IMM:
2367                         x86_push_imm (code, ins->inst_imm);
2368                         break;
2369                 case OP_X86_PUSH_MEMBASE:
2370                         x86_push_membase (code, ins->inst_basereg, ins->inst_offset);
2371                         break;
2372                 case OP_X86_PUSH_OBJ: 
2373                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, ins->inst_imm);
2374                         x86_push_reg (code, X86_EDI);
2375                         x86_push_reg (code, X86_ESI);
2376                         x86_push_reg (code, X86_ECX);
2377                         if (ins->inst_offset)
2378                                 x86_lea_membase (code, X86_ESI, ins->inst_basereg, ins->inst_offset);
2379                         else
2380                                 x86_mov_reg_reg (code, X86_ESI, ins->inst_basereg, 4);
2381                         x86_lea_membase (code, X86_EDI, X86_ESP, 12);
2382                         x86_mov_reg_imm (code, X86_ECX, (ins->inst_imm >> 2));
2383                         x86_cld (code);
2384                         x86_prefix (code, X86_REP_PREFIX);
2385                         x86_movsd (code);
2386                         x86_pop_reg (code, X86_ECX);
2387                         x86_pop_reg (code, X86_ESI);
2388                         x86_pop_reg (code, X86_EDI);
2389                         break;
2390                 case OP_X86_LEA:
2391                         x86_lea_memindex (code, ins->dreg, ins->sreg1, ins->inst_imm, ins->sreg2, ins->unused);
2392                         break;
2393                 case OP_X86_LEA_MEMBASE:
2394                         x86_lea_membase (code, ins->dreg, ins->sreg1, ins->inst_imm);
2395                         break;
2396                 case OP_X86_XCHG:
2397                         x86_xchg_reg_reg (code, ins->sreg1, ins->sreg2, 4);
2398                         break;
2399                 case OP_LOCALLOC:
2400                         /* keep alignment */
2401                         x86_alu_reg_imm (code, X86_ADD, ins->sreg1, MONO_ARCH_FRAME_ALIGNMENT - 1);
2402                         x86_alu_reg_imm (code, X86_AND, ins->sreg1, ~(MONO_ARCH_FRAME_ALIGNMENT - 1));
2403                         code = mono_emit_stack_alloc (code, ins);
2404                         x86_mov_reg_reg (code, ins->dreg, X86_ESP, 4);
2405                         break;
2406                 case CEE_RET:
2407                         x86_ret (code);
2408                         break;
2409                 case CEE_THROW: {
2410                         x86_push_reg (code, ins->sreg1);
2411                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, 
2412                                              (gpointer)"mono_arch_throw_exception");
2413                         x86_call_code (code, 0);
2414                         break;
2415                 }
2416                 case OP_CALL_HANDLER: 
2417                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
2418                         x86_call_imm (code, 0);
2419                         break;
2420                 case OP_LABEL:
2421                         ins->inst_c0 = code - cfg->native_code;
2422                         break;
2423                 case CEE_BR:
2424                         //g_print ("target: %p, next: %p, curr: %p, last: %p\n", ins->inst_target_bb, bb->next_bb, ins, bb->last_ins);
2425                         //if ((ins->inst_target_bb == bb->next_bb) && ins == bb->last_ins)
2426                         //break;
2427                         if (ins->flags & MONO_INST_BRLABEL) {
2428                                 if (ins->inst_i0->inst_c0) {
2429                                         x86_jump_code (code, cfg->native_code + ins->inst_i0->inst_c0);
2430                                 } else {
2431                                         mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_LABEL, ins->inst_i0);
2432                                         x86_jump32 (code, 0);
2433                                 }
2434                         } else {
2435                                 if (ins->inst_target_bb->native_offset) {
2436                                         x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset); 
2437                                 } else {
2438                                         mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
2439                                         if ((cfg->opt & MONO_OPT_BRANCH) &&
2440                                             x86_is_imm8 (ins->inst_target_bb->max_offset - cpos))
2441                                                 x86_jump8 (code, 0);
2442                                         else 
2443                                                 x86_jump32 (code, 0);
2444                                 } 
2445                         }
2446                         break;
2447                 case OP_BR_REG:
2448                         x86_jump_reg (code, ins->sreg1);
2449                         break;
2450                 case OP_CEQ:
2451                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2452                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2453                         break;
2454                 case OP_CLT:
2455                         x86_set_reg (code, X86_CC_LT, ins->dreg, TRUE);
2456                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2457                         break;
2458                 case OP_CLT_UN:
2459                         x86_set_reg (code, X86_CC_LT, ins->dreg, FALSE);
2460                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2461                         break;
2462                 case OP_CGT:
2463                         x86_set_reg (code, X86_CC_GT, ins->dreg, TRUE);
2464                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2465                         break;
2466                 case OP_CGT_UN:
2467                         x86_set_reg (code, X86_CC_GT, ins->dreg, FALSE);
2468                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2469                         break;
2470                 case OP_COND_EXC_EQ:
2471                 case OP_COND_EXC_NE_UN:
2472                 case OP_COND_EXC_LT:
2473                 case OP_COND_EXC_LT_UN:
2474                 case OP_COND_EXC_GT:
2475                 case OP_COND_EXC_GT_UN:
2476                 case OP_COND_EXC_GE:
2477                 case OP_COND_EXC_GE_UN:
2478                 case OP_COND_EXC_LE:
2479                 case OP_COND_EXC_LE_UN:
2480                 case OP_COND_EXC_OV:
2481                 case OP_COND_EXC_NO:
2482                 case OP_COND_EXC_C:
2483                 case OP_COND_EXC_NC:
2484                         EMIT_COND_SYSTEM_EXCEPTION (branch_cc_table [ins->opcode - OP_COND_EXC_EQ], 
2485                                                     (ins->opcode < OP_COND_EXC_NE_UN), ins->inst_p1);
2486                         break;
2487                 case CEE_BEQ:
2488                 case CEE_BNE_UN:
2489                 case CEE_BLT:
2490                 case CEE_BLT_UN:
2491                 case CEE_BGT:
2492                 case CEE_BGT_UN:
2493                 case CEE_BGE:
2494                 case CEE_BGE_UN:
2495                 case CEE_BLE:
2496                 case CEE_BLE_UN:
2497                         EMIT_COND_BRANCH (ins, branch_cc_table [ins->opcode - CEE_BEQ], (ins->opcode < CEE_BNE_UN));
2498                         break;
2499
2500                 /* floating point opcodes */
2501                 case OP_R8CONST: {
2502                         double d = *(double *)ins->inst_p0;
2503
2504                         if ((d == 0.0) && (signbit (d) == 0)) {
2505                                 x86_fldz (code);
2506                         } else if (d == 1.0) {
2507                                 x86_fld1 (code);
2508                         } else {
2509                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_R8, ins->inst_p0);
2510                                 x86_fld (code, NULL, TRUE);
2511                         }
2512                         break;
2513                 }
2514                 case OP_R4CONST: {
2515                         float f = *(float *)ins->inst_p0;
2516
2517                         if ((f == 0.0) && (signbit (f) == 0)) {
2518                                 x86_fldz (code);
2519                         } else if (f == 1.0) {
2520                                 x86_fld1 (code);
2521                         } else {
2522                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_R4, ins->inst_p0);
2523                                 x86_fld (code, NULL, FALSE);
2524                         }
2525                         break;
2526                 }
2527                 case OP_STORER8_MEMBASE_REG:
2528                         x86_fst_membase (code, ins->inst_destbasereg, ins->inst_offset, TRUE, TRUE);
2529                         break;
2530                 case OP_LOADR8_MEMBASE:
2531                         x86_fld_membase (code, ins->inst_basereg, ins->inst_offset, TRUE);
2532                         break;
2533                 case OP_STORER4_MEMBASE_REG:
2534                         x86_fst_membase (code, ins->inst_destbasereg, ins->inst_offset, FALSE, TRUE);
2535                         break;
2536                 case OP_LOADR4_MEMBASE:
2537                         x86_fld_membase (code, ins->inst_basereg, ins->inst_offset, FALSE);
2538                         break;
2539                 case CEE_CONV_R4: /* FIXME: change precision */
2540                 case CEE_CONV_R8:
2541                         x86_push_reg (code, ins->sreg1);
2542                         x86_fild_membase (code, X86_ESP, 0, FALSE);
2543                         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
2544                         break;
2545                 case OP_X86_FP_LOAD_I8:
2546                         x86_fild_membase (code, ins->inst_basereg, ins->inst_offset, TRUE);
2547                         break;
2548                 case OP_X86_FP_LOAD_I4:
2549                         x86_fild_membase (code, ins->inst_basereg, ins->inst_offset, FALSE);
2550                         break;
2551                 case OP_FCONV_TO_I1:
2552                         code = emit_float_to_int (cfg, code, ins->dreg, 1, TRUE);
2553                         break;
2554                 case OP_FCONV_TO_U1:
2555                         code = emit_float_to_int (cfg, code, ins->dreg, 1, FALSE);
2556                         break;
2557                 case OP_FCONV_TO_I2:
2558                         code = emit_float_to_int (cfg, code, ins->dreg, 2, TRUE);
2559                         break;
2560                 case OP_FCONV_TO_U2:
2561                         code = emit_float_to_int (cfg, code, ins->dreg, 2, FALSE);
2562                         break;
2563                 case OP_FCONV_TO_I4:
2564                 case OP_FCONV_TO_I:
2565                         code = emit_float_to_int (cfg, code, ins->dreg, 4, TRUE);
2566                         break;
2567                 case OP_FCONV_TO_I8:
2568                         /* we defined this instruction to output only to eax:edx */
2569                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 4);
2570                         x86_fnstcw_membase(code, X86_ESP, 0);
2571                         x86_mov_reg_membase (code, X86_EAX, X86_ESP, 0, 2);
2572                         x86_alu_reg_imm (code, X86_OR, X86_EAX, 0xc00);
2573                         x86_mov_membase_reg (code, X86_ESP, 2, X86_EAX, 2);
2574                         x86_fldcw_membase (code, X86_ESP, 2);
2575                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
2576                         x86_fist_pop_membase (code, X86_ESP, 0, TRUE);
2577                         x86_pop_reg (code, X86_EAX);
2578                         x86_pop_reg (code, X86_EDX);
2579                         x86_fldcw_membase (code, X86_ESP, 0);
2580                         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
2581                         break;
2582                 case OP_LCONV_TO_R_UN: { 
2583                         static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
2584                         guint8 *br;
2585
2586                         /* load 64bit integer to FP stack */
2587                         x86_push_imm (code, 0);
2588                         x86_push_reg (code, ins->sreg2);
2589                         x86_push_reg (code, ins->sreg1);
2590                         x86_fild_membase (code, X86_ESP, 0, TRUE);
2591                         /* store as 80bit FP value */
2592                         x86_fst80_membase (code, X86_ESP, 0);
2593                         
2594                         /* test if lreg is negative */
2595                         x86_test_reg_reg (code, ins->sreg2, ins->sreg2);
2596                         br = code; x86_branch8 (code, X86_CC_GEZ, 0, TRUE);
2597         
2598                         /* add correction constant mn */
2599                         x86_fld80_mem (code, mn);
2600                         x86_fld80_membase (code, X86_ESP, 0);
2601                         x86_fp_op_reg (code, X86_FADD, 1, TRUE);
2602                         x86_fst80_membase (code, X86_ESP, 0);
2603
2604                         x86_patch (br, code);
2605
2606                         x86_fld80_membase (code, X86_ESP, 0);
2607                         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 12);
2608
2609                         break;
2610                 }
2611                 case OP_LCONV_TO_OVF_I: {
2612                         guint8 *br [3], *label [1];
2613
2614                         /* 
2615                          * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
2616                          */
2617                         x86_test_reg_reg (code, ins->sreg1, ins->sreg1);
2618
2619                         /* If the low word top bit is set, see if we are negative */
2620                         br [0] = code; x86_branch8 (code, X86_CC_LT, 0, TRUE);
2621                         /* We are not negative (no top bit set, check for our top word to be zero */
2622                         x86_test_reg_reg (code, ins->sreg2, ins->sreg2);
2623                         br [1] = code; x86_branch8 (code, X86_CC_EQ, 0, TRUE);
2624                         label [0] = code;
2625
2626                         /* throw exception */
2627                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC, "OverflowException");
2628                         x86_jump32 (code, 0);
2629         
2630                         x86_patch (br [0], code);
2631                         /* our top bit is set, check that top word is 0xfffffff */
2632                         x86_alu_reg_imm (code, X86_CMP, ins->sreg2, 0xffffffff);
2633                 
2634                         x86_patch (br [1], code);
2635                         /* nope, emit exception */
2636                         br [2] = code; x86_branch8 (code, X86_CC_NE, 0, TRUE);
2637                         x86_patch (br [2], label [0]);
2638
2639                         if (ins->dreg != ins->sreg1)
2640                                 x86_mov_reg_reg (code, ins->dreg, ins->sreg1, 4);
2641                         break;
2642                 }
2643                 case OP_FADD:
2644                         x86_fp_op_reg (code, X86_FADD, 1, TRUE);
2645                         break;
2646                 case OP_FSUB:
2647                         x86_fp_op_reg (code, X86_FSUB, 1, TRUE);
2648                         break;          
2649                 case OP_FMUL:
2650                         x86_fp_op_reg (code, X86_FMUL, 1, TRUE);
2651                         break;          
2652                 case OP_FDIV:
2653                         x86_fp_op_reg (code, X86_FDIV, 1, TRUE);
2654                         break;          
2655                 case OP_FNEG:
2656                         x86_fchs (code);
2657                         break;          
2658                 case OP_SIN:
2659                         x86_fsin (code);
2660                         break;          
2661                 case OP_COS:
2662                         x86_fcos (code);
2663                         break;          
2664                 case OP_ABS:
2665                         x86_fabs (code);
2666                         break;          
2667                 case OP_TAN: {
2668                         /* 
2669                          * it really doesn't make sense to inline all this code,
2670                          * it's here just to show that things may not be as simple 
2671                          * as they appear.
2672                          */
2673                         guchar *check_pos, *end_tan, *pop_jump;
2674                         x86_push_reg (code, X86_EAX);
2675                         x86_fptan (code);
2676                         x86_fnstsw (code);
2677                         x86_test_reg_imm (code, X86_EAX, 0x400);
2678                         check_pos = code;
2679                         x86_branch8 (code, X86_CC_NE, 0, FALSE);
2680                         x86_fstp (code, 0); /* pop the 1.0 */
2681                         end_tan = code;
2682                         x86_jump8 (code, 0);
2683                         x86_fldpi (code);
2684                         x86_fp_op (code, X86_FADD, 0);
2685                         x86_fxch (code, 1);
2686                         x86_fprem1 (code);
2687                         x86_fstsw (code);
2688                         x86_test_reg_imm (code, X86_EAX, 0x400);
2689                         pop_jump = code;
2690                         x86_branch8 (code, X86_CC_NE, 0, FALSE);
2691                         x86_fstp (code, 1);
2692                         x86_fptan (code);
2693                         x86_patch (pop_jump, code);
2694                         x86_fstp (code, 0); /* pop the 1.0 */
2695                         x86_patch (check_pos, code);
2696                         x86_patch (end_tan, code);
2697                         x86_pop_reg (code, X86_EAX);
2698                         break;
2699                 }
2700                 case OP_ATAN:
2701                         x86_fld1 (code);
2702                         x86_fpatan (code);
2703                         break;          
2704                 case OP_SQRT:
2705                         x86_fsqrt (code);
2706                         break;          
2707                 case OP_X86_FPOP:
2708                         x86_fstp (code, 0);
2709                         break;          
2710                 case OP_FREM: {
2711                         guint8 *l1, *l2;
2712
2713                         x86_push_reg (code, X86_EAX);
2714                         /* we need to exchange ST(0) with ST(1) */
2715                         x86_fxch (code, 1);
2716
2717                         /* this requires a loop, because fprem somtimes 
2718                          * returns a partial remainder */
2719                         l1 = code;
2720                         /* looks like MS is using fprem instead of the IEEE compatible fprem1 */
2721                         /* x86_fprem1 (code); */
2722                         x86_fprem (code);
2723                         x86_fnstsw (code);
2724                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x0400);
2725                         l2 = code + 2;
2726                         x86_branch8 (code, X86_CC_NE, l1 - l2, FALSE);
2727
2728                         /* pop result */
2729                         x86_fstp (code, 1);
2730
2731                         x86_pop_reg (code, X86_EAX);
2732                         break;
2733                 }
2734                 case OP_FCOMPARE:
2735                         if (cfg->opt & MONO_OPT_FCMOV) {
2736                                 x86_fcomip (code, 1);
2737                                 x86_fstp (code, 0);
2738                                 break;
2739                         }
2740                         /* this overwrites EAX */
2741                         EMIT_FPCOMPARE(code);
2742                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2743                         break;
2744                 case OP_FCEQ:
2745                         if (cfg->opt & MONO_OPT_FCMOV) {
2746                                 /* zeroing the register at the start results in 
2747                                  * shorter and faster code (we can also remove the widening op)
2748                                  */
2749                                 guchar *unordered_check;
2750                                 x86_alu_reg_reg (code, X86_XOR, ins->dreg, ins->dreg);
2751                                 x86_fcomip (code, 1);
2752                                 x86_fstp (code, 0);
2753                                 unordered_check = code;
2754                                 x86_branch8 (code, X86_CC_P, 0, FALSE);
2755                                 x86_set_reg (code, X86_CC_EQ, ins->dreg, FALSE);
2756                                 x86_patch (unordered_check, code);
2757                                 break;
2758                         }
2759                         if (ins->dreg != X86_EAX) 
2760                                 x86_push_reg (code, X86_EAX);
2761
2762                         EMIT_FPCOMPARE(code);
2763                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2764                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4000);
2765                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2766                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2767
2768                         if (ins->dreg != X86_EAX) 
2769                                 x86_pop_reg (code, X86_EAX);
2770                         break;
2771                 case OP_FCLT:
2772                 case OP_FCLT_UN:
2773                         if (cfg->opt & MONO_OPT_FCMOV) {
2774                                 /* zeroing the register at the start results in 
2775                                  * shorter and faster code (we can also remove the widening op)
2776                                  */
2777                                 x86_alu_reg_reg (code, X86_XOR, ins->dreg, ins->dreg);
2778                                 x86_fcomip (code, 1);
2779                                 x86_fstp (code, 0);
2780                                 if (ins->opcode == OP_FCLT_UN) {
2781                                         guchar *unordered_check = code;
2782                                         guchar *jump_to_end;
2783                                         x86_branch8 (code, X86_CC_P, 0, FALSE);
2784                                         x86_set_reg (code, X86_CC_GT, ins->dreg, FALSE);
2785                                         jump_to_end = code;
2786                                         x86_jump8 (code, 0);
2787                                         x86_patch (unordered_check, code);
2788                                         x86_inc_reg (code, ins->dreg);
2789                                         x86_patch (jump_to_end, code);
2790                                 } else {
2791                                         x86_set_reg (code, X86_CC_GT, ins->dreg, FALSE);
2792                                 }
2793                                 break;
2794                         }
2795                         if (ins->dreg != X86_EAX) 
2796                                 x86_push_reg (code, X86_EAX);
2797
2798                         EMIT_FPCOMPARE(code);
2799                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2800                         if (ins->opcode == OP_FCLT_UN) {
2801                                 guchar *is_not_zero_check, *end_jump;
2802                                 is_not_zero_check = code;
2803                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2804                                 end_jump = code;
2805                                 x86_jump8 (code, 0);
2806                                 x86_patch (is_not_zero_check, code);
2807                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2808
2809                                 x86_patch (end_jump, code);
2810                         }
2811                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2812                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2813
2814                         if (ins->dreg != X86_EAX) 
2815                                 x86_pop_reg (code, X86_EAX);
2816                         break;
2817                 case OP_FCGT:
2818                 case OP_FCGT_UN:
2819                         if (cfg->opt & MONO_OPT_FCMOV) {
2820                                 /* zeroing the register at the start results in 
2821                                  * shorter and faster code (we can also remove the widening op)
2822                                  */
2823                                 guchar *unordered_check;
2824                                 x86_alu_reg_reg (code, X86_XOR, ins->dreg, ins->dreg);
2825                                 x86_fcomip (code, 1);
2826                                 x86_fstp (code, 0);
2827                                 if (ins->opcode == OP_FCGT) {
2828                                         unordered_check = code;
2829                                         x86_branch8 (code, X86_CC_P, 0, FALSE);
2830                                         x86_set_reg (code, X86_CC_LT, ins->dreg, FALSE);
2831                                         x86_patch (unordered_check, code);
2832                                 } else {
2833                                         x86_set_reg (code, X86_CC_LT, ins->dreg, FALSE);
2834                                 }
2835                                 break;
2836                         }
2837                         if (ins->dreg != X86_EAX) 
2838                                 x86_push_reg (code, X86_EAX);
2839
2840                         EMIT_FPCOMPARE(code);
2841                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2842                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2843                         if (ins->opcode == OP_FCGT_UN) {
2844                                 guchar *is_not_zero_check, *end_jump;
2845                                 is_not_zero_check = code;
2846                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2847                                 end_jump = code;
2848                                 x86_jump8 (code, 0);
2849                                 x86_patch (is_not_zero_check, code);
2850                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2851
2852                                 x86_patch (end_jump, code);
2853                         }
2854                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2855                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2856
2857                         if (ins->dreg != X86_EAX) 
2858                                 x86_pop_reg (code, X86_EAX);
2859                         break;
2860                 case OP_FBEQ:
2861                         if (cfg->opt & MONO_OPT_FCMOV) {
2862                                 guchar *jump = code;
2863                                 x86_branch8 (code, X86_CC_P, 0, TRUE);
2864                                 EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2865                                 x86_patch (jump, code);
2866                                 break;
2867                         }
2868                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4000);
2869                         EMIT_COND_BRANCH (ins, X86_CC_EQ, TRUE);
2870                         break;
2871                 case OP_FBNE_UN:
2872                         if (cfg->opt & MONO_OPT_FCMOV) {
2873                                 EMIT_COND_BRANCH (ins, X86_CC_P, FALSE);
2874                                 EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2875                                 break;
2876                         }
2877                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4000);
2878                         EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2879                         break;
2880                 case OP_FBLT:
2881                         if (cfg->opt & MONO_OPT_FCMOV) {
2882                                 EMIT_COND_BRANCH (ins, X86_CC_GT, FALSE);
2883                                 break;
2884                         }
2885                         EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2886                         break;
2887                 case OP_FBLT_UN:
2888                         if (cfg->opt & MONO_OPT_FCMOV) {
2889                                 EMIT_COND_BRANCH (ins, X86_CC_P, FALSE);
2890                                 EMIT_COND_BRANCH (ins, X86_CC_GT, FALSE);
2891                                 break;
2892                         }
2893                         if (ins->opcode == OP_FBLT_UN) {
2894                                 guchar *is_not_zero_check, *end_jump;
2895                                 is_not_zero_check = code;
2896                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2897                                 end_jump = code;
2898                                 x86_jump8 (code, 0);
2899                                 x86_patch (is_not_zero_check, code);
2900                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2901
2902                                 x86_patch (end_jump, code);
2903                         }
2904                         EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2905                         break;
2906                 case OP_FBGT:
2907                 case OP_FBGT_UN:
2908                         if (cfg->opt & MONO_OPT_FCMOV) {
2909                                 EMIT_COND_BRANCH (ins, X86_CC_LT, FALSE);
2910                                 break;
2911                         }
2912                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2913                         if (ins->opcode == OP_FBGT_UN) {
2914                                 guchar *is_not_zero_check, *end_jump;
2915                                 is_not_zero_check = code;
2916                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2917                                 end_jump = code;
2918                                 x86_jump8 (code, 0);
2919                                 x86_patch (is_not_zero_check, code);
2920                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2921
2922                                 x86_patch (end_jump, code);
2923                         }
2924                         EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2925                         break;
2926                 case OP_FBGE:
2927                 case OP_FBGE_UN:
2928                         if (cfg->opt & MONO_OPT_FCMOV) {
2929                                 EMIT_COND_BRANCH (ins, X86_CC_LE, FALSE);
2930                                 break;
2931                         }
2932                         EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2933                         break;
2934                 case OP_FBLE:
2935                 case OP_FBLE_UN:
2936                         if (cfg->opt & MONO_OPT_FCMOV) {
2937                                 EMIT_COND_BRANCH (ins, X86_CC_P, FALSE);
2938                                 EMIT_COND_BRANCH (ins, X86_CC_GE, FALSE);
2939                                 break;
2940                         }
2941                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2942                         EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2943                         break;
2944                 case CEE_CKFINITE: {
2945                         x86_push_reg (code, X86_EAX);
2946                         x86_fxam (code);
2947                         x86_fnstsw (code);
2948                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4100);
2949                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2950                         x86_pop_reg (code, X86_EAX);
2951                         EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "ArithmeticException");
2952                         break;
2953                 }
2954                 default:
2955                         g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
2956                         g_assert_not_reached ();
2957                 }
2958
2959                 if ((code - cfg->native_code - offset) > max_len) {
2960                         g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %d)",
2961                                    mono_inst_name (ins->opcode), max_len, code - cfg->native_code - offset);
2962                         g_assert_not_reached ();
2963                 }
2964                
2965                 cpos += max_len;
2966
2967                 last_ins = ins;
2968                 last_offset = offset;
2969                 
2970                 ins = ins->next;
2971         }
2972
2973         cfg->code_len = code - cfg->native_code;
2974 }
2975
2976 void
2977 mono_arch_register_lowlevel_calls (void)
2978 {
2979         mono_register_jit_icall (enter_method, "mono_enter_method", NULL, TRUE);
2980         mono_register_jit_icall (leave_method, "mono_leave_method", NULL, TRUE);
2981 }
2982
2983 void
2984 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji)
2985 {
2986         MonoJumpInfo *patch_info;
2987
2988         for (patch_info = ji; patch_info; patch_info = patch_info->next) {
2989                 unsigned char *ip = patch_info->ip.i + code;
2990                 const unsigned char *target = NULL;
2991
2992                 switch (patch_info->type) {
2993                 case MONO_PATCH_INFO_BB:
2994                         target = patch_info->data.bb->native_offset + code;
2995                         break;
2996                 case MONO_PATCH_INFO_ABS:
2997                         target = patch_info->data.target;
2998                         break;
2999                 case MONO_PATCH_INFO_LABEL:
3000                         target = patch_info->data.inst->inst_c0 + code;
3001                         break;
3002                 case MONO_PATCH_INFO_IP:
3003                         *((gpointer *)(ip)) = ip;
3004                         continue;
3005                 case MONO_PATCH_INFO_METHOD_REL:
3006                         *((gpointer *)(ip)) = code + patch_info->data.offset;
3007                         continue;
3008                 case MONO_PATCH_INFO_INTERNAL_METHOD: {
3009                         MonoJitICallInfo *mi = mono_find_jit_icall_by_name (patch_info->data.name);
3010                         if (!mi) {
3011                                 g_warning ("unknown MONO_PATCH_INFO_INTERNAL_METHOD %s", patch_info->data.name);
3012                                 g_assert_not_reached ();
3013                         }
3014                         target = mono_icall_get_wrapper (mi);
3015                         break;
3016                 }
3017                 case MONO_PATCH_INFO_METHOD_JUMP: {
3018                         GSList *list;
3019
3020                         /* get the trampoline to the method from the domain */
3021                         target = mono_arch_create_jump_trampoline (patch_info->data.method);
3022                         if (!domain->jump_target_hash)
3023                                 domain->jump_target_hash = g_hash_table_new (NULL, NULL);
3024                         list = g_hash_table_lookup (domain->jump_target_hash, patch_info->data.method);
3025                         list = g_slist_prepend (list, ip);
3026                         g_hash_table_insert (domain->jump_target_hash, patch_info->data.method, list);
3027                         break;
3028                 }
3029                 case MONO_PATCH_INFO_METHOD:
3030                         if (patch_info->data.method == method) {
3031                                 target = code;
3032                         } else
3033                                 /* get the trampoline to the method from the domain */
3034                                 target = mono_arch_create_jit_trampoline (patch_info->data.method);
3035                         break;
3036                 case MONO_PATCH_INFO_SWITCH: {
3037                         gpointer *jump_table = mono_mempool_alloc (domain->code_mp, sizeof (gpointer) * patch_info->table_size);
3038                         int i;
3039
3040                         *((gconstpointer *)(ip + 2)) = jump_table;
3041
3042                         for (i = 0; i < patch_info->table_size; i++) {
3043                                 jump_table [i] = code + (int)patch_info->data.table [i];
3044                         }
3045                         /* we put into the table the absolute address, no need for x86_patch in this case */
3046                         continue;
3047                 }
3048                 case MONO_PATCH_INFO_METHODCONST:
3049                 case MONO_PATCH_INFO_CLASS:
3050                 case MONO_PATCH_INFO_IMAGE:
3051                 case MONO_PATCH_INFO_FIELD:
3052                         *((gconstpointer *)(ip + 1)) = patch_info->data.target;
3053                         continue;
3054                 case MONO_PATCH_INFO_IID:
3055                         mono_class_init (patch_info->data.klass);
3056                         *((guint32 *)(ip + 1)) = patch_info->data.klass->interface_id;
3057                         continue;                       
3058                 case MONO_PATCH_INFO_VTABLE:
3059                         *((gconstpointer *)(ip + 1)) = mono_class_vtable (domain, patch_info->data.klass);
3060                         continue;
3061                 case MONO_PATCH_INFO_CLASS_INIT: {
3062                         guint8 *code = ip;
3063                         /* Might already been changed to a nop */
3064                         x86_call_imm (code, 0);
3065                         target = mono_create_class_init_trampoline (mono_class_vtable (domain, patch_info->data.klass));
3066                         break;
3067                 }
3068                 case MONO_PATCH_INFO_SFLDA: {
3069                         MonoVTable *vtable = mono_class_vtable (domain, patch_info->data.field->parent);
3070                         if (!vtable->initialized && !(vtable->klass->flags & TYPE_ATTRIBUTE_BEFORE_FIELD_INIT) && mono_class_needs_cctor_run (vtable->klass, method))
3071                                 /* Done by the generated code */
3072                                 ;
3073                         else {
3074                                 mono_runtime_class_init (vtable);
3075                         }
3076                         *((gconstpointer *)(ip + 1)) = 
3077                                 (char*)vtable->data + patch_info->data.field->offset;
3078                         continue;
3079                 }
3080                 case MONO_PATCH_INFO_R4:
3081                 case MONO_PATCH_INFO_R8:
3082                         *((gconstpointer *)(ip + 2)) = patch_info->data.target;
3083                         continue;
3084                 case MONO_PATCH_INFO_EXC_NAME:
3085                         *((gconstpointer *)(ip + 1)) = patch_info->data.name;
3086                         continue;
3087                 case MONO_PATCH_INFO_LDSTR:
3088                         *((gconstpointer *)(ip + 1)) = 
3089                                 mono_ldstr (domain, patch_info->data.token->image, 
3090                                                         mono_metadata_token_index (patch_info->data.token->token));
3091                         continue;
3092                 case MONO_PATCH_INFO_TYPE_FROM_HANDLE: {
3093                         gpointer handle;
3094                         MonoClass *handle_class;
3095
3096                         handle = mono_ldtoken (patch_info->data.token->image, 
3097                                                                    patch_info->data.token->token, &handle_class);
3098                         mono_class_init (handle_class);
3099                         mono_class_init (mono_class_from_mono_type (handle));
3100
3101                         *((gconstpointer *)(ip + 1)) = 
3102                                 mono_type_get_object (domain, handle);
3103                         continue;
3104                 }
3105                 case MONO_PATCH_INFO_LDTOKEN: {
3106                         gpointer handle;
3107                         MonoClass *handle_class;
3108
3109                         handle = mono_ldtoken (patch_info->data.token->image,
3110                                                                    patch_info->data.token->token, &handle_class);
3111                         mono_class_init (handle_class);
3112
3113                         *((gconstpointer *)(ip + 1)) = handle;
3114                         continue;
3115                 }
3116                 default:
3117                         g_assert_not_reached ();
3118                 }
3119                 x86_patch (ip, target);
3120         }
3121 }
3122
3123 int
3124 mono_arch_max_epilog_size (MonoCompile *cfg)
3125 {
3126         int exc_count = 0, max_epilog_size = 16;
3127         MonoJumpInfo *patch_info;
3128         
3129         if (cfg->method->save_lmf)
3130                 max_epilog_size += 128;
3131         
3132         if (mono_jit_trace_calls)
3133                 max_epilog_size += 50;
3134
3135         if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
3136                 max_epilog_size += 50;
3137
3138         /* count the number of exception infos */
3139      
3140         for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
3141                 if (patch_info->type == MONO_PATCH_INFO_EXC)
3142                         exc_count++;
3143         }
3144
3145         /* 
3146          * make sure we have enough space for exceptions
3147          * 16 is the size of two push_imm instructions and a call
3148          */
3149         max_epilog_size += exc_count*16;
3150
3151         return max_epilog_size;
3152 }
3153
3154 guint8 *
3155 mono_arch_emit_prolog (MonoCompile *cfg)
3156 {
3157         MonoMethod *method = cfg->method;
3158         MonoBasicBlock *bb;
3159         MonoMethodSignature *sig;
3160         MonoInst *inst;
3161         int alloc_size, pos, max_offset, i;
3162         guint8 *code;
3163
3164         cfg->code_size =  MAX (((MonoMethodNormal *)method)->header->code_size * 4, 256);
3165         code = cfg->native_code = g_malloc (cfg->code_size);
3166
3167         x86_push_reg (code, X86_EBP);
3168         x86_mov_reg_reg (code, X86_EBP, X86_ESP, 4);
3169
3170         alloc_size = - cfg->stack_offset;
3171         pos = 0;
3172
3173         if (method->save_lmf) {
3174                 pos += sizeof (MonoLMF);
3175
3176                 /* save the current IP */
3177                 mono_add_patch_info (cfg, code + 1 - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
3178                 x86_push_imm (code, 0);
3179
3180                 /* save all caller saved regs */
3181                 x86_push_reg (code, X86_EBX);
3182                 x86_push_reg (code, X86_EDI);
3183                 x86_push_reg (code, X86_ESI);
3184                 x86_push_reg (code, X86_EBP);
3185
3186                 /* save method info */
3187                 x86_push_imm (code, method);
3188
3189                 /* get the address of lmf for the current thread */
3190                 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, 
3191                                      (gpointer)"mono_get_lmf_addr");
3192                 x86_call_code (code, 0);
3193
3194                 /* push lmf */
3195                 x86_push_reg (code, X86_EAX); 
3196                 /* push *lfm (previous_lmf) */
3197                 x86_push_membase (code, X86_EAX, 0);
3198                 /* *(lmf) = ESP */
3199                 x86_mov_membase_reg (code, X86_EAX, 0, X86_ESP, 4);
3200         } else {
3201
3202                 if (cfg->used_int_regs & (1 << X86_EBX)) {
3203                         x86_push_reg (code, X86_EBX);
3204                         pos += 4;
3205                 }
3206
3207                 if (cfg->used_int_regs & (1 << X86_EDI)) {
3208                         x86_push_reg (code, X86_EDI);
3209                         pos += 4;
3210                 }
3211
3212                 if (cfg->used_int_regs & (1 << X86_ESI)) {
3213                         x86_push_reg (code, X86_ESI);
3214                         pos += 4;
3215                 }
3216         }
3217
3218         alloc_size -= pos;
3219
3220         if (alloc_size)
3221                 x86_alu_reg_imm (code, X86_SUB, X86_ESP, alloc_size);
3222
3223         /* compute max_offset in order to use short forward jumps */
3224         max_offset = 0;
3225         if (cfg->opt & MONO_OPT_BRANCH) {
3226                 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
3227                         MonoInst *ins = bb->code;
3228                         bb->max_offset = max_offset;
3229
3230                         if (cfg->prof_options & MONO_PROFILE_COVERAGE)
3231                                 max_offset += 6; 
3232
3233                         while (ins) {
3234                                 max_offset += ((guint8 *)ins_spec [ins->opcode])[MONO_INST_LEN];
3235                                 ins = ins->next;
3236                         }
3237                 }
3238         }
3239
3240         if (mono_jit_trace_calls)
3241                 code = mono_arch_instrument_prolog (cfg, enter_method, code, TRUE);
3242
3243         /* load arguments allocated to register from the stack */
3244         sig = method->signature;
3245         pos = 0;
3246
3247         for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
3248                 inst = cfg->varinfo [pos];
3249                 if (inst->opcode == OP_REGVAR) {
3250                         x86_mov_reg_membase (code, inst->dreg, X86_EBP, inst->inst_offset, 4);
3251                         if (cfg->verbose_level > 2)
3252                                 g_print ("Argument %d assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
3253                 }
3254                 pos++;
3255         }
3256
3257         cfg->code_len = code - cfg->native_code;
3258
3259         return code;
3260 }
3261
3262 void
3263 mono_arch_emit_epilog (MonoCompile *cfg)
3264 {
3265         MonoJumpInfo *patch_info;
3266         MonoMethod *method = cfg->method;
3267         MonoMethodSignature *sig = method->signature;
3268         int pos;
3269         guint32 stack_to_pop;
3270         guint8 *code;
3271
3272         code = cfg->native_code + cfg->code_len;
3273
3274         if (mono_jit_trace_calls)
3275                 code = mono_arch_instrument_epilog (cfg, leave_method, code, TRUE);
3276
3277         /* the code restoring the registers must be kept in sync with CEE_JMP */
3278         pos = 0;
3279         
3280         if (method->save_lmf) {
3281                 pos = -sizeof (MonoLMF);
3282         } else {
3283                 if (cfg->used_int_regs & (1 << X86_EBX)) {
3284                         pos -= 4;
3285                 }
3286                 if (cfg->used_int_regs & (1 << X86_EDI)) {
3287                         pos -= 4;
3288                 }
3289                 if (cfg->used_int_regs & (1 << X86_ESI)) {
3290                         pos -= 4;
3291                 }
3292         }
3293
3294         if (pos)
3295                 x86_lea_membase (code, X86_ESP, X86_EBP, pos);
3296         
3297         if (method->save_lmf) {
3298                 /* ebx = previous_lmf */
3299                 x86_pop_reg (code, X86_EBX);
3300                 /* edi = lmf */
3301                 x86_pop_reg (code, X86_EDI);
3302                 /* *(lmf) = previous_lmf */
3303                 x86_mov_membase_reg (code, X86_EDI, 0, X86_EBX, 4);
3304
3305                 /* discard method info */
3306                 x86_pop_reg (code, X86_ESI);
3307
3308                 /* restore caller saved regs */
3309                 x86_pop_reg (code, X86_EBP);
3310                 x86_pop_reg (code, X86_ESI);
3311                 x86_pop_reg (code, X86_EDI);
3312                 x86_pop_reg (code, X86_EBX);
3313
3314         } else {
3315
3316                 if (cfg->used_int_regs & (1 << X86_ESI)) {
3317                         x86_pop_reg (code, X86_ESI);
3318                 }
3319                 if (cfg->used_int_regs & (1 << X86_EDI)) {
3320                         x86_pop_reg (code, X86_EDI);
3321                 }
3322                 if (cfg->used_int_regs & (1 << X86_EBX)) {
3323                         x86_pop_reg (code, X86_EBX);
3324                 }
3325         }
3326
3327         x86_leave (code);
3328
3329         if (sig->call_convention == MONO_CALL_STDCALL) {
3330           MonoJitArgumentInfo *arg_info = alloca (sizeof (MonoJitArgumentInfo) * (sig->param_count + 1));
3331
3332           stack_to_pop = arch_get_argument_info (sig, sig->param_count, arg_info);
3333         }
3334         else
3335         if (MONO_TYPE_ISSTRUCT (cfg->method->signature->ret))
3336           stack_to_pop = 4;
3337         else
3338           stack_to_pop = 0;
3339
3340         if (stack_to_pop)
3341                 x86_ret_imm (code, stack_to_pop);
3342         else
3343                 x86_ret (code);
3344
3345         /* add code to raise exceptions */
3346         for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
3347                 switch (patch_info->type) {
3348                 case MONO_PATCH_INFO_EXC:
3349                         x86_patch (patch_info->ip.i + cfg->native_code, code);
3350                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);
3351                         x86_push_imm (code, patch_info->data.target);
3352                         mono_add_patch_info (cfg, code + 1 - cfg->native_code, MONO_PATCH_INFO_METHOD_REL, (gpointer)patch_info->ip.i);
3353                         x86_push_imm (code, patch_info->ip.i + cfg->native_code);
3354                         patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
3355                         patch_info->data.name = "mono_arch_throw_exception_by_name";
3356                         patch_info->ip.i = code - cfg->native_code;
3357                         x86_jump_code (code, 0);
3358                         break;
3359                 default:
3360                         /* do nothing */
3361                         break;
3362                 }
3363         }
3364
3365         cfg->code_len = code - cfg->native_code;
3366
3367         g_assert (cfg->code_len < cfg->code_size);
3368
3369 }
3370
3371 void
3372 mono_arch_flush_icache (guint8 *code, gint size)
3373 {
3374         /* not needed */
3375 }
3376