2003-09-26 Zoltan Varga <vargaz@freemail.hu>
[mono.git] / mono / mini / mini-x86.c
1 /*
2  * mini-x86.c: x86 backend for the Mono code generator
3  *
4  * Authors:
5  *   Paolo Molaro (lupus@ximian.com)
6  *   Dietmar Maurer (dietmar@ximian.com)
7  *
8  * (C) 2003 Ximian, Inc.
9  */
10 #include "mini.h"
11 #include <string.h>
12 #include <math.h>
13
14 #include <mono/metadata/appdomain.h>
15 #include <mono/metadata/debug-helpers.h>
16 #include <mono/metadata/profiler-private.h>
17
18 #include "mini-x86.h"
19 #include "inssel.h"
20 #include "cpu-pentium.h"
21
22 const char*
23 mono_arch_regname (int reg) {
24         switch (reg) {
25         case X86_EAX: return "%eax";
26         case X86_EBX: return "%ebx";
27         case X86_ECX: return "%ecx";
28         case X86_EDX: return "%edx";
29         case X86_ESP: return "%esp";
30         case X86_EBP: return "%ebp";
31         case X86_EDI: return "%edi";
32         case X86_ESI: return "%esi";
33         }
34         return "unknown";
35 }
36
37 typedef struct {
38         guint16 size;
39         guint16 offset;
40         guint8  pad;
41 } MonoJitArgumentInfo;
42
43 /*
44  * arch_get_argument_info:
45  * @csig:  a method signature
46  * @param_count: the number of parameters to consider
47  * @arg_info: an array to store the result infos
48  *
49  * Gathers information on parameters such as size, alignment and
50  * padding. arg_info should be large enought to hold param_count + 1 entries. 
51  *
52  * Returns the size of the activation frame.
53  */
54 static int
55 arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
56 {
57         int k, frame_size = 0;
58         int size, align, pad;
59         int offset = 8;
60
61         if (MONO_TYPE_ISSTRUCT (csig->ret)) { 
62                 frame_size += sizeof (gpointer);
63                 offset += 4;
64         }
65
66         arg_info [0].offset = offset;
67
68         if (csig->hasthis) {
69                 frame_size += sizeof (gpointer);
70                 offset += 4;
71         }
72
73         arg_info [0].size = frame_size;
74
75         for (k = 0; k < param_count; k++) {
76                 
77                 if (csig->pinvoke)
78                         size = mono_type_native_stack_size (csig->params [k], &align);
79                 else
80                         size = mono_type_stack_size (csig->params [k], &align);
81
82                 /* ignore alignment for now */
83                 align = 1;
84
85                 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1); 
86                 arg_info [k].pad = pad;
87                 frame_size += size;
88                 arg_info [k + 1].pad = 0;
89                 arg_info [k + 1].size = size;
90                 offset += pad;
91                 arg_info [k + 1].offset = offset;
92                 offset += size;
93         }
94
95         align = MONO_ARCH_FRAME_ALIGNMENT;
96         frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
97         arg_info [k].pad = pad;
98
99         return frame_size;
100 }
101
102 static int indent_level = 0;
103
104 static void indent (int diff) {
105         int v = indent_level;
106         while (v-- > 0) {
107                 printf (". ");
108         }
109         indent_level += diff;
110 }
111
112 static void
113 enter_method (MonoMethod *method, char *ebp)
114 {
115         int i, j;
116         MonoClass *class;
117         MonoObject *o;
118         MonoJitArgumentInfo *arg_info;
119         MonoMethodSignature *sig;
120         char *fname;
121
122         fname = mono_method_full_name (method, TRUE);
123         indent (1);
124         printf ("ENTER: %s(", fname);
125         g_free (fname);
126         
127         if (((int)ebp & (MONO_ARCH_FRAME_ALIGNMENT - 1)) != 0) {
128                 g_error ("unaligned stack detected (%p)", ebp);
129         }
130
131         sig = method->signature;
132
133         arg_info = alloca (sizeof (MonoJitArgumentInfo) * (sig->param_count + 1));
134
135         arch_get_argument_info (sig, sig->param_count, arg_info);
136
137         if (MONO_TYPE_ISSTRUCT (method->signature->ret)) {
138                 g_assert (!method->signature->ret->byref);
139
140                 printf ("VALUERET:%p, ", *((gpointer *)(ebp + 8)));
141         }
142
143         if (method->signature->hasthis) {
144                 gpointer *this = (gpointer *)(ebp + arg_info [0].offset);
145                 if (method->klass->valuetype) {
146                         printf ("value:%p, ", *this);
147                 } else {
148                         o = *((MonoObject **)this);
149
150                         if (o) {
151                                 class = o->vtable->klass;
152
153                                 if (class == mono_defaults.string_class) {
154                                         printf ("this:[STRING:%p:%s], ", o, mono_string_to_utf8 ((MonoString *)o));
155                                 } else {
156                                         printf ("this:%p[%s.%s], ", o, class->name_space, class->name);
157                                 }
158                         } else 
159                                 printf ("this:NULL, ");
160                 }
161         }
162
163         for (i = 0; i < method->signature->param_count; ++i) {
164                 gpointer *cpos = (gpointer *)(ebp + arg_info [i + 1].offset);
165                 int size = arg_info [i + 1].size;
166
167                 MonoType *type = method->signature->params [i];
168                 
169                 if (type->byref) {
170                         printf ("[BYREF:%p], ", *cpos); 
171                 } else switch (type->type) {
172                         
173                 case MONO_TYPE_I:
174                 case MONO_TYPE_U:
175                         printf ("%p, ", (gpointer)*((int *)(cpos)));
176                         break;
177                 case MONO_TYPE_BOOLEAN:
178                 case MONO_TYPE_CHAR:
179                 case MONO_TYPE_I1:
180                 case MONO_TYPE_U1:
181                 case MONO_TYPE_I2:
182                 case MONO_TYPE_U2:
183                 case MONO_TYPE_I4:
184                 case MONO_TYPE_U4:
185                         printf ("%d, ", *((int *)(cpos)));
186                         break;
187                 case MONO_TYPE_STRING: {
188                         MonoString *s = *((MonoString **)cpos);
189                         if (s) {
190                                 g_assert (((MonoObject *)s)->vtable->klass == mono_defaults.string_class);
191                                 printf ("[STRING:%p:%s], ", s, mono_string_to_utf8 (s));
192                         } else 
193                                 printf ("[STRING:null], ");
194                         break;
195                 }
196                 case MONO_TYPE_CLASS:
197                 case MONO_TYPE_OBJECT: {
198                         o = *((MonoObject **)cpos);
199                         if (o) {
200                                 class = o->vtable->klass;
201                     
202                                 if (class == mono_defaults.string_class) {
203                                         printf ("[STRING:%p:%s], ", o, mono_string_to_utf8 ((MonoString *)o));
204                                 } else if (class == mono_defaults.int32_class) {
205                                         printf ("[INT32:%p:%d], ", o, *(gint32 *)((char *)o + sizeof (MonoObject)));
206                                 } else
207                                         printf ("[%s.%s:%p], ", class->name_space, class->name, o);
208                         } else {
209                                 printf ("%p, ", *((gpointer *)(cpos)));                         
210                         }
211                         break;
212                 }
213                 case MONO_TYPE_PTR:
214                 case MONO_TYPE_FNPTR:
215                 case MONO_TYPE_ARRAY:
216                 case MONO_TYPE_SZARRAY:
217                         printf ("%p, ", *((gpointer *)(cpos)));
218                         break;
219                 case MONO_TYPE_I8:
220                 case MONO_TYPE_U8:
221                         printf ("0x%016llx, ", *((gint64 *)(cpos)));
222                         break;
223                 case MONO_TYPE_R4:
224                         printf ("%f, ", *((float *)(cpos)));
225                         break;
226                 case MONO_TYPE_R8:
227                         printf ("%f, ", *((double *)(cpos)));
228                         break;
229                 case MONO_TYPE_VALUETYPE: 
230                         printf ("[");
231                         for (j = 0; j < size; j++)
232                                 printf ("%02x,", *((guint8*)cpos +j));
233                         printf ("], ");
234                         break;
235                 default:
236                         printf ("XX, ");
237                 }
238         }
239
240         printf (")\n");
241 }
242
243 static void
244 leave_method (MonoMethod *method, ...)
245 {
246         MonoType *type;
247         char *fname;
248         va_list ap;
249
250         va_start(ap, method);
251
252         fname = mono_method_full_name (method, TRUE);
253         indent (-1);
254         printf ("LEAVE: %s", fname);
255         g_free (fname);
256
257         type = method->signature->ret;
258
259 handle_enum:
260         switch (type->type) {
261         case MONO_TYPE_VOID:
262                 break;
263         case MONO_TYPE_BOOLEAN: {
264                 int eax = va_arg (ap, int);
265                 if (eax)
266                         printf ("TRUE:%d", eax);
267                 else 
268                         printf ("FALSE");
269                         
270                 break;
271         }
272         case MONO_TYPE_CHAR:
273         case MONO_TYPE_I1:
274         case MONO_TYPE_U1:
275         case MONO_TYPE_I2:
276         case MONO_TYPE_U2:
277         case MONO_TYPE_I4:
278         case MONO_TYPE_U4:
279         case MONO_TYPE_I:
280         case MONO_TYPE_U: {
281                 int eax = va_arg (ap, int);
282                 printf ("EAX=%d", eax);
283                 break;
284         }
285         case MONO_TYPE_STRING: {
286                 MonoString *s = va_arg (ap, MonoString *);
287 ;
288                 if (s) {
289                         g_assert (((MonoObject *)s)->vtable->klass == mono_defaults.string_class);
290                         printf ("[STRING:%p:%s]", s, mono_string_to_utf8 (s));
291                 } else 
292                         printf ("[STRING:null], ");
293                 break;
294         }
295         case MONO_TYPE_CLASS: 
296         case MONO_TYPE_OBJECT: {
297                 MonoObject *o = va_arg (ap, MonoObject *);
298
299                 if (o) {
300                         if (o->vtable->klass == mono_defaults.boolean_class) {
301                                 printf ("[BOOLEAN:%p:%d]", o, *((guint8 *)o + sizeof (MonoObject)));            
302                         } else if  (o->vtable->klass == mono_defaults.int32_class) {
303                                 printf ("[INT32:%p:%d]", o, *((gint32 *)((char *)o + sizeof (MonoObject))));    
304                         } else if  (o->vtable->klass == mono_defaults.int64_class) {
305                                 printf ("[INT64:%p:%lld]", o, *((gint64 *)((char *)o + sizeof (MonoObject))));  
306                         } else
307                                 printf ("[%s.%s:%p]", o->vtable->klass->name_space, o->vtable->klass->name, o);
308                 } else
309                         printf ("[OBJECT:%p]", o);
310                
311                 break;
312         }
313         case MONO_TYPE_PTR:
314         case MONO_TYPE_FNPTR:
315         case MONO_TYPE_ARRAY:
316         case MONO_TYPE_SZARRAY: {
317                 gpointer p = va_arg (ap, gpointer);
318                 printf ("EAX=%p", p);
319                 break;
320         }
321         case MONO_TYPE_I8: {
322                 gint64 l =  va_arg (ap, gint64);
323                 printf ("EAX/EDX=0x%16llx", l);
324                 break;
325         }
326         case MONO_TYPE_U8: {
327                 gint64 l =  va_arg (ap, gint64);
328                 printf ("EAX/EDX=0x%16llx", l);
329                 break;
330         }
331         case MONO_TYPE_R8: {
332                 double f = va_arg (ap, double);
333                 printf ("FP=%f\n", f);
334                 break;
335         }
336         case MONO_TYPE_VALUETYPE: 
337                 if (type->data.klass->enumtype) {
338                         type = type->data.klass->enum_basetype;
339                         goto handle_enum;
340                 } else {
341                         guint8 *p = va_arg (ap, gpointer);
342                         int j, size, align;
343                         size = mono_type_size (type, &align);
344                         printf ("[");
345                         for (j = 0; p && j < size; j++)
346                                 printf ("%02x,", p [j]);
347                         printf ("]");
348                 }
349                 break;
350         default:
351                 printf ("(unknown return type %x)", method->signature->ret->type);
352         }
353
354         printf ("\n");
355 }
356
357 static const guchar cpuid_impl [] = {
358         0x55,                           /* push   %ebp */
359         0x89, 0xe5,                     /* mov    %esp,%ebp */
360         0x53,                           /* push   %ebx */
361         0x8b, 0x45, 0x08,               /* mov    0x8(%ebp),%eax */
362         0x0f, 0xa2,                     /* cpuid   */
363         0x50,                           /* push   %eax */
364         0x8b, 0x45, 0x10,               /* mov    0x10(%ebp),%eax */
365         0x89, 0x18,                     /* mov    %ebx,(%eax) */
366         0x8b, 0x45, 0x14,               /* mov    0x14(%ebp),%eax */
367         0x89, 0x08,                     /* mov    %ecx,(%eax) */
368         0x8b, 0x45, 0x18,               /* mov    0x18(%ebp),%eax */
369         0x89, 0x10,                     /* mov    %edx,(%eax) */
370         0x58,                           /* pop    %eax */
371         0x8b, 0x55, 0x0c,               /* mov    0xc(%ebp),%edx */
372         0x89, 0x02,                     /* mov    %eax,(%edx) */
373         0x5b,                           /* pop    %ebx */
374         0xc9,                           /* leave   */
375         0xc3,                           /* ret     */
376 };
377
378 typedef void (*CpuidFunc) (int id, int* p_eax, int* p_ebx, int* p_ecx, int* p_edx);
379
380 static int 
381 cpuid (int id, int* p_eax, int* p_ebx, int* p_ecx, int* p_edx)
382 {
383         int have_cpuid = 0;
384         __asm__  __volatile__ (
385                 "pushfl\n"
386                 "popl %%eax\n"
387                 "movl %%eax, %%edx\n"
388                 "xorl $0x200000, %%eax\n"
389                 "pushl %%eax\n"
390                 "popfl\n"
391                 "pushfl\n"
392                 "popl %%eax\n"
393                 "xorl %%edx, %%eax\n"
394                 "andl $0x200000, %%eax\n"
395                 "movl %%eax, %0"
396                 : "=r" (have_cpuid)
397                 :
398                 : "%eax", "%edx"
399         );
400
401         if (have_cpuid) {
402                 CpuidFunc func = (CpuidFunc)cpuid_impl;
403                 func (id, p_eax, p_ebx, p_ecx, p_edx);
404                 /*
405                  * We use this approach because of issues with gcc and pic code, see:
406                  * http://gcc.gnu.org/cgi-bin/gnatsweb.pl?cmd=view%20audit-trail&database=gcc&pr=7329
407                 __asm__ __volatile__ ("cpuid"
408                         : "=a" (*p_eax), "=b" (*p_ebx), "=c" (*p_ecx), "=d" (*p_edx)
409                         : "a" (id));
410                 */
411                 return 1;
412         }
413         return 0;
414 }
415
416 /*
417  * Initialize the cpu to execute managed code.
418  */
419 void
420 mono_arch_cpu_init (void)
421 {
422         guint16 fpcw;
423
424         /* spec compliance requires running with double precision */
425         __asm__  __volatile__ ("fnstcw %0\n": "=m" (fpcw));
426         fpcw &= ~X86_FPCW_PRECC_MASK;
427         fpcw |= X86_FPCW_PREC_DOUBLE;
428         __asm__  __volatile__ ("fldcw %0\n": : "m" (fpcw));
429         __asm__  __volatile__ ("fnstcw %0\n": "=m" (fpcw));
430
431 }
432
433 /*
434  * This function returns the optimizations supported on this cpu.
435  */
436 guint32
437 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
438 {
439         int eax, ebx, ecx, edx;
440         guint32 opts = 0;
441         
442         *exclude_mask = 0;
443         /* Feature Flags function, flags returned in EDX. */
444         if (cpuid (1, &eax, &ebx, &ecx, &edx)) {
445                 if (edx & (1 << 15)) {
446                         opts |= MONO_OPT_CMOV;
447                         if (edx & 1)
448                                 opts |= MONO_OPT_FCMOV;
449                         else
450                                 *exclude_mask |= MONO_OPT_FCMOV;
451                 } else
452                         *exclude_mask |= MONO_OPT_CMOV;
453         }
454         return opts;
455 }
456
457 static gboolean
458 is_regsize_var (MonoType *t) {
459         if (t->byref)
460                 return TRUE;
461         switch (t->type) {
462         case MONO_TYPE_I4:
463         case MONO_TYPE_U4:
464         case MONO_TYPE_I:
465         case MONO_TYPE_U:
466                 return TRUE;
467         case MONO_TYPE_OBJECT:
468         case MONO_TYPE_STRING:
469         case MONO_TYPE_CLASS:
470         case MONO_TYPE_SZARRAY:
471         case MONO_TYPE_ARRAY:
472                 return TRUE;
473         case MONO_TYPE_VALUETYPE:
474                 if (t->data.klass->enumtype)
475                         return is_regsize_var (t->data.klass->enum_basetype);
476                 return FALSE;
477         }
478         return FALSE;
479 }
480
481 GList *
482 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
483 {
484         GList *vars = NULL;
485         int i;
486
487         for (i = 0; i < cfg->num_varinfo; i++) {
488                 MonoInst *ins = cfg->varinfo [i];
489                 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
490
491                 /* unused vars */
492                 if (vmv->range.first_use.abs_pos > vmv->range.last_use.abs_pos)
493                         continue;
494
495                 if ((ins->flags & (MONO_INST_IS_DEAD|MONO_INST_VOLATILE|MONO_INST_INDIRECT)) || 
496                     (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
497                         continue;
498
499                 /* we dont allocate I1 to registers because there is no simply way to sign extend 
500                  * 8bit quantities in caller saved registers on x86 */
501                 if (is_regsize_var (ins->inst_vtype) || (ins->inst_vtype->type == MONO_TYPE_BOOLEAN) || 
502                     (ins->inst_vtype->type == MONO_TYPE_U1) || (ins->inst_vtype->type == MONO_TYPE_U2)||
503                     (ins->inst_vtype->type == MONO_TYPE_I2) || (ins->inst_vtype->type == MONO_TYPE_CHAR)) {
504                         g_assert (MONO_VARINFO (cfg, i)->reg == -1);
505                         g_assert (i == vmv->idx);
506                         vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
507                 }
508         }
509
510         return vars;
511 }
512
513 GList *
514 mono_arch_get_global_int_regs (MonoCompile *cfg)
515 {
516         GList *regs = NULL;
517
518         /* we can use 3 registers for global allocation */
519         regs = g_list_prepend (regs, (gpointer)X86_EBX);
520         regs = g_list_prepend (regs, (gpointer)X86_ESI);
521         regs = g_list_prepend (regs, (gpointer)X86_EDI);
522
523         return regs;
524 }
525  
526 /*
527  * Set var information according to the calling convention. X86 version.
528  * The locals var stuff should most likely be split in another method.
529  */
530 void
531 mono_arch_allocate_vars (MonoCompile *m)
532 {
533         MonoMethodSignature *sig;
534         MonoMethodHeader *header;
535         MonoInst *inst;
536         int i, offset, size, align, curinst;
537
538         header = ((MonoMethodNormal *)m->method)->header;
539
540         sig = m->method->signature;
541         
542         offset = 8;
543         curinst = 0;
544         if (MONO_TYPE_ISSTRUCT (sig->ret)) {
545                 m->ret->opcode = OP_REGOFFSET;
546                 m->ret->inst_basereg = X86_EBP;
547                 m->ret->inst_offset = offset;
548                 offset += sizeof (gpointer);
549         } else {
550                 /* FIXME: handle long and FP values */
551                 switch (sig->ret->type) {
552                 case MONO_TYPE_VOID:
553                         break;
554                 default:
555                         m->ret->opcode = OP_REGVAR;
556                         m->ret->inst_c0 = X86_EAX;
557                         break;
558                 }
559         }
560         if (sig->hasthis) {
561                 inst = m->varinfo [curinst];
562                 if (inst->opcode != OP_REGVAR) {
563                         inst->opcode = OP_REGOFFSET;
564                         inst->inst_basereg = X86_EBP;
565                 }
566                 inst->inst_offset = offset;
567                 offset += sizeof (gpointer);
568                 curinst++;
569         }
570
571         if (sig->call_convention == MONO_CALL_VARARG) {
572                 m->sig_cookie = offset;
573                 offset += sizeof (gpointer);
574         }
575
576         for (i = 0; i < sig->param_count; ++i) {
577                 inst = m->varinfo [curinst];
578                 if (inst->opcode != OP_REGVAR) {
579                         inst->opcode = OP_REGOFFSET;
580                         inst->inst_basereg = X86_EBP;
581                 }
582                 inst->inst_offset = offset;
583                 size = mono_type_size (sig->params [i], &align);
584                 size += 4 - 1;
585                 size &= ~(4 - 1);
586                 offset += size;
587                 curinst++;
588         }
589
590         offset = 0;
591
592         /* reserve space to save LMF and caller saved registers */
593
594         if (m->method->save_lmf) {
595                 offset += sizeof (MonoLMF);
596         } else {
597                 if (m->used_int_regs & (1 << X86_EBX)) {
598                         offset += 4;
599                 }
600
601                 if (m->used_int_regs & (1 << X86_EDI)) {
602                         offset += 4;
603                 }
604
605                 if (m->used_int_regs & (1 << X86_ESI)) {
606                         offset += 4;
607                 }
608         }
609
610         for (i = curinst; i < m->num_varinfo; ++i) {
611                 inst = m->varinfo [i];
612
613                 if ((inst->flags & MONO_INST_IS_DEAD) || inst->opcode == OP_REGVAR)
614                         continue;
615
616                 /* inst->unused indicates native sized value types, this is used by the
617                 * pinvoke wrappers when they call functions returning structure */
618                 if (inst->unused && MONO_TYPE_ISSTRUCT (inst->inst_vtype) && inst->inst_vtype->type != MONO_TYPE_TYPEDBYREF)
619                         size = mono_class_native_size (inst->inst_vtype->data.klass, &align);
620                 else
621                         size = mono_type_size (inst->inst_vtype, &align);
622
623                 offset += size;
624                 offset += align - 1;
625                 offset &= ~(align - 1);
626                 inst->opcode = OP_REGOFFSET;
627                 inst->inst_basereg = X86_EBP;
628                 inst->inst_offset = -offset;
629                 //g_print ("allocating local %d to %d\n", i, -offset);
630         }
631         offset += (MONO_ARCH_FRAME_ALIGNMENT - 1);
632         offset &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
633
634         /* change sign? */
635         m->stack_offset = -offset;
636 }
637
638 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
639  * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info 
640  */
641
642 /* 
643  * take the arguments and generate the arch-specific
644  * instructions to properly call the function in call.
645  * This includes pushing, moving arguments to the right register
646  * etc.
647  * Issue: who does the spilling if needed, and when?
648  */
649 MonoCallInst*
650 mono_arch_call_opcode (MonoCompile *cfg, MonoBasicBlock* bb, MonoCallInst *call, int is_virtual) {
651         MonoInst *arg, *in;
652         MonoMethodSignature *sig;
653         int i, n, stack_size, type;
654         MonoType *ptype;
655
656         stack_size = 0;
657         /* add the vararg cookie before the non-implicit args */
658         if (call->signature->call_convention == MONO_CALL_VARARG) {
659                 MonoInst *sig_arg;
660                 MONO_INST_NEW (cfg, arg, OP_OUTARG);
661                 MONO_INST_NEW (cfg, sig_arg, OP_ICONST);
662                 sig_arg->inst_p0 = call->signature;
663                 arg->inst_left = sig_arg;
664                 arg->type = STACK_PTR;
665                 /* prepend, so they get reversed */
666                 arg->next = call->out_args;
667                 call->out_args = arg;
668                 stack_size += sizeof (gpointer);
669         }
670         sig = call->signature;
671         n = sig->param_count + sig->hasthis;
672
673         if (sig->ret && MONO_TYPE_ISSTRUCT (sig->ret))
674                 stack_size += sizeof (gpointer);
675         for (i = 0; i < n; ++i) {
676                 if (is_virtual && i == 0) {
677                         /* the argument will be attached to the call instrucion */
678                         in = call->args [i];
679                         stack_size += 4;
680                 } else {
681                         MONO_INST_NEW (cfg, arg, OP_OUTARG);
682                         in = call->args [i];
683                         arg->cil_code = in->cil_code;
684                         arg->inst_left = in;
685                         arg->type = in->type;
686                         /* prepend, so they get reversed */
687                         arg->next = call->out_args;
688                         call->out_args = arg;
689                         if (i >= sig->hasthis) {
690                                 ptype = sig->params [i - sig->hasthis];
691                                 if (ptype->byref)
692                                         type = MONO_TYPE_U;
693                                 else
694                                         type = ptype->type;
695 handle_enum:
696                                 /* FIXME: validate arguments... */
697                                 switch (type) {
698                                 case MONO_TYPE_I:
699                                 case MONO_TYPE_U:
700                                 case MONO_TYPE_BOOLEAN:
701                                 case MONO_TYPE_CHAR:
702                                 case MONO_TYPE_I1:
703                                 case MONO_TYPE_U1:
704                                 case MONO_TYPE_I2:
705                                 case MONO_TYPE_U2:
706                                 case MONO_TYPE_I4:
707                                 case MONO_TYPE_U4:
708                                 case MONO_TYPE_STRING:
709                                 case MONO_TYPE_CLASS:
710                                 case MONO_TYPE_OBJECT:
711                                 case MONO_TYPE_PTR:
712                                 case MONO_TYPE_FNPTR:
713                                 case MONO_TYPE_ARRAY:
714                                 case MONO_TYPE_SZARRAY:
715                                         stack_size += 4;
716                                         break;
717                                 case MONO_TYPE_I8:
718                                 case MONO_TYPE_U8:
719                                         stack_size += 8;
720                                         break;
721                                 case MONO_TYPE_R4:
722                                         stack_size += 4;
723                                         arg->opcode = OP_OUTARG_R4;
724                                         break;
725                                 case MONO_TYPE_R8:
726                                         stack_size += 8;
727                                         arg->opcode = OP_OUTARG_R8;
728                                         break;
729                                 case MONO_TYPE_VALUETYPE:
730                                         if (MONO_TYPE_ISSTRUCT (ptype)) {
731                                                 int size;
732                                                 if (sig->pinvoke) 
733                                                         size = mono_type_native_stack_size (&in->klass->byval_arg, NULL);
734                                                 else 
735                                                         size = mono_type_stack_size (&in->klass->byval_arg, NULL);
736
737                                                 stack_size += size;
738                                                 arg->opcode = OP_OUTARG_VT;
739                                                 arg->klass = in->klass;
740                                                 arg->unused = sig->pinvoke;
741                                                 arg->inst_imm = size; 
742                                         } else {
743                                                 type = ptype->data.klass->enum_basetype->type;
744                                                 goto handle_enum;
745                                         }
746                                         break;
747                                 case MONO_TYPE_TYPEDBYREF:
748                                         stack_size += sizeof (MonoTypedRef);
749                                         arg->opcode = OP_OUTARG_VT;
750                                         arg->klass = in->klass;
751                                         arg->unused = sig->pinvoke;
752                                         arg->inst_imm = sizeof (MonoTypedRef); 
753                                         break;
754                                 case MONO_TYPE_GENERICINST:
755                                         type = ptype->data.generic_inst->generic_type->type;
756                                         goto handle_enum;
757
758                                 default:
759                                         g_error ("unknown type 0x%02x in mono_arch_call_opcode\n", type);
760                                 }
761                         } else {
762                                 /* the this argument */
763                                 stack_size += 4;
764                         }
765                 }
766         }
767         /* if the function returns a struct, the called method already does a ret $0x4 */
768         if (sig->ret && MONO_TYPE_ISSTRUCT (sig->ret))
769                 stack_size -= 4;
770         call->stack_usage = stack_size;
771         /* 
772          * should set more info in call, such as the stack space
773          * used by the args that needs to be added back to esp
774          */
775
776         return call;
777 }
778
779 /*
780  * Allow tracing to work with this interface (with an optional argument)
781  */
782
783 /*
784  * This may be needed on some archs or for debugging support.
785  */
786 void
787 mono_arch_instrument_mem_needs (MonoMethod *method, int *stack, int *code)
788 {
789         /* no stack room needed now (may be needed for FASTCALL-trace support) */
790         *stack = 0;
791         /* split prolog-epilog requirements? */
792         *code = 50; /* max bytes needed: check this number */
793 }
794
795 void*
796 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
797 {
798         guchar *code = p;
799
800         /* if some args are passed in registers, we need to save them here */
801         x86_push_reg (code, X86_EBP);
802         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_METHODCONST, cfg->method);
803         x86_push_imm (code, cfg->method);
804         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_ABS, func);
805         x86_call_code (code, 0);
806         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 8);
807
808         return code;
809 }
810
811 enum {
812         SAVE_NONE,
813         SAVE_STRUCT,
814         SAVE_EAX,
815         SAVE_EAX_EDX,
816         SAVE_FP
817 };
818
819 void*
820 mono_arch_instrument_epilog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
821 {
822         guchar *code = p;
823         int arg_size = 0, save_mode = SAVE_NONE;
824         MonoMethod *method = cfg->method;
825         int rtype = method->signature->ret->type;
826         
827 handle_enum:
828         switch (rtype) {
829         case MONO_TYPE_VOID:
830                 /* special case string .ctor icall */
831                 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
832                         save_mode = SAVE_EAX;
833                 else
834                         save_mode = SAVE_NONE;
835                 break;
836         case MONO_TYPE_I8:
837         case MONO_TYPE_U8:
838                 save_mode = SAVE_EAX_EDX;
839                 break;
840         case MONO_TYPE_R4:
841         case MONO_TYPE_R8:
842                 save_mode = SAVE_FP;
843                 break;
844         case MONO_TYPE_VALUETYPE:
845                 if (method->signature->ret->data.klass->enumtype) {
846                         rtype = method->signature->ret->data.klass->enum_basetype->type;
847                         goto handle_enum;
848                 }
849                 save_mode = SAVE_STRUCT;
850                 break;
851         default:
852                 save_mode = SAVE_EAX;
853                 break;
854         }
855
856         switch (save_mode) {
857         case SAVE_EAX_EDX:
858                 x86_push_reg (code, X86_EDX);
859                 x86_push_reg (code, X86_EAX);
860                 if (enable_arguments) {
861                         x86_push_reg (code, X86_EDX);
862                         x86_push_reg (code, X86_EAX);
863                         arg_size = 8;
864                 }
865                 break;
866         case SAVE_EAX:
867                 x86_push_reg (code, X86_EAX);
868                 if (enable_arguments) {
869                         x86_push_reg (code, X86_EAX);
870                         arg_size = 4;
871                 }
872                 break;
873         case SAVE_FP:
874                 x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
875                 x86_fst_membase (code, X86_ESP, 0, TRUE, TRUE);
876                 if (enable_arguments) {
877                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
878                         x86_fst_membase (code, X86_ESP, 0, TRUE, TRUE);
879                         arg_size = 8;
880                 }
881                 break;
882         case SAVE_STRUCT:
883                 if (enable_arguments) {
884                         x86_push_membase (code, X86_EBP, 8);
885                         arg_size = 4;
886                 }
887                 break;
888         case SAVE_NONE:
889         default:
890                 break;
891         }
892
893
894         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_METHODCONST, method);
895         x86_push_imm (code, method);
896         mono_add_patch_info (cfg, code-cfg->native_code, MONO_PATCH_INFO_ABS, func);
897         x86_call_code (code, 0);
898         x86_alu_reg_imm (code, X86_ADD, X86_ESP, arg_size + 4);
899
900         switch (save_mode) {
901         case SAVE_EAX_EDX:
902                 x86_pop_reg (code, X86_EAX);
903                 x86_pop_reg (code, X86_EDX);
904                 break;
905         case SAVE_EAX:
906                 x86_pop_reg (code, X86_EAX);
907                 break;
908         case SAVE_FP:
909                 x86_fld_membase (code, X86_ESP, 0, TRUE);
910                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, 8);
911                 break;
912         case SAVE_NONE:
913         default:
914                 break;
915         }
916
917         return code;
918 }
919
920 #define EMIT_COND_BRANCH(ins,cond,sign) \
921 if (ins->flags & MONO_INST_BRLABEL) { \
922         if (ins->inst_i0->inst_c0) { \
923                 x86_branch (code, cond, cfg->native_code + ins->inst_i0->inst_c0, sign); \
924         } else { \
925                 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_LABEL, ins->inst_i0); \
926                 x86_branch32 (code, cond, 0, sign); \
927         } \
928 } else { \
929         if (ins->inst_true_bb->native_offset) { \
930                 x86_branch (code, cond, cfg->native_code + ins->inst_true_bb->native_offset, sign); \
931         } else { \
932                 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
933                 if ((cfg->opt & MONO_OPT_BRANCH) && \
934                     x86_is_imm8 (ins->inst_true_bb->max_offset - cpos)) \
935                         x86_branch8 (code, cond, 0, sign); \
936                 else \
937                         x86_branch32 (code, cond, 0, sign); \
938         } \
939 }
940
941 /* emit an exception if condition is fail */
942 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name)            \
943         do {                                                        \
944                 mono_add_patch_info (cfg, code - cfg->native_code,   \
945                                     MONO_PATCH_INFO_EXC, exc_name);  \
946                 x86_branch32 (code, cond, 0, signed);               \
947         } while (0); 
948
949 #define EMIT_FPCOMPARE(code) do { \
950         x86_fcompp (code); \
951         x86_fnstsw (code); \
952 } while (0); 
953
954 static void
955 peephole_pass (MonoCompile *cfg, MonoBasicBlock *bb)
956 {
957         MonoInst *ins, *last_ins = NULL;
958         ins = bb->code;
959
960         while (ins) {
961
962                 switch (ins->opcode) {
963                 case OP_ICONST:
964                         /* reg = 0 -> XOR (reg, reg) */
965                         /* XOR sets cflags on x86, so we cant do it always */
966                         if (ins->inst_c0 == 0 && ins->next &&
967                             (ins->next->opcode == CEE_BR)) { 
968                                 ins->opcode = CEE_XOR;
969                                 ins->sreg1 = ins->dreg;
970                                 ins->sreg2 = ins->dreg;
971                         }
972                         break;
973                 case OP_MUL_IMM: 
974                         /* remove unnecessary multiplication with 1 */
975                         if (ins->inst_imm == 1) {
976                                 if (ins->dreg != ins->sreg1) {
977                                         ins->opcode = OP_MOVE;
978                                 } else {
979                                         last_ins->next = ins->next;                             
980                                         ins = ins->next;                                
981                                         continue;
982                                 }
983                         }
984                         break;
985                 case OP_COMPARE_IMM:
986                         /* OP_COMPARE_IMM (reg, 0) --> OP_X86_TEST_NULL (reg) */
987                         if (ins->inst_imm == 0 && ins->next &&
988                             (ins->next->opcode == CEE_BEQ || ins->next->opcode == CEE_BNE_UN ||
989                              ins->next->opcode == OP_CEQ)) {
990                                 ins->opcode = OP_X86_TEST_NULL;
991                         }     
992                         break;
993                 case OP_LOAD_MEMBASE:
994                 case OP_LOADI4_MEMBASE:
995                         /* 
996                          * OP_STORE_MEMBASE_REG reg, offset(basereg) 
997                          * OP_LOAD_MEMBASE offset(basereg), reg
998                          */
999                         if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG 
1000                                          || last_ins->opcode == OP_STORE_MEMBASE_REG) &&
1001                             ins->inst_basereg == last_ins->inst_destbasereg &&
1002                             ins->inst_offset == last_ins->inst_offset) {
1003                                 if (ins->dreg == last_ins->sreg1) {
1004                                         last_ins->next = ins->next;                             
1005                                         ins = ins->next;                                
1006                                         continue;
1007                                 } else {
1008                                         //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1009                                         ins->opcode = OP_MOVE;
1010                                         ins->sreg1 = last_ins->sreg1;
1011                                 }
1012
1013                         /* 
1014                          * Note: reg1 must be different from the basereg in the second load
1015                          * OP_LOAD_MEMBASE offset(basereg), reg1
1016                          * OP_LOAD_MEMBASE offset(basereg), reg2
1017                          * -->
1018                          * OP_LOAD_MEMBASE offset(basereg), reg1
1019                          * OP_MOVE reg1, reg2
1020                          */
1021                         } if (last_ins && (last_ins->opcode == OP_LOADI4_MEMBASE
1022                                            || last_ins->opcode == OP_LOAD_MEMBASE) &&
1023                               ins->inst_basereg != last_ins->dreg &&
1024                               ins->inst_basereg == last_ins->inst_basereg &&
1025                               ins->inst_offset == last_ins->inst_offset) {
1026
1027                                 if (ins->dreg == last_ins->dreg) {
1028                                         last_ins->next = ins->next;                             
1029                                         ins = ins->next;                                
1030                                         continue;
1031                                 } else {
1032                                         ins->opcode = OP_MOVE;
1033                                         ins->sreg1 = last_ins->dreg;
1034                                 }
1035
1036                                 //g_assert_not_reached ();
1037
1038 #if 0
1039                         /* 
1040                          * OP_STORE_MEMBASE_IMM imm, offset(basereg) 
1041                          * OP_LOAD_MEMBASE offset(basereg), reg
1042                          * -->
1043                          * OP_STORE_MEMBASE_IMM imm, offset(basereg) 
1044                          * OP_ICONST reg, imm
1045                          */
1046                         } else if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_IMM
1047                                                 || last_ins->opcode == OP_STORE_MEMBASE_IMM) &&
1048                                    ins->inst_basereg == last_ins->inst_destbasereg &&
1049                                    ins->inst_offset == last_ins->inst_offset) {
1050                                 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1051                                 ins->opcode = OP_ICONST;
1052                                 ins->inst_c0 = last_ins->inst_imm;
1053                                 g_assert_not_reached (); // check this rule
1054 #endif
1055                         }
1056                         break;
1057                 case OP_LOADU1_MEMBASE:
1058                 case OP_LOADI1_MEMBASE:
1059                   /*
1060                    * FIXME: Missing explanation
1061                    */
1062                         if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
1063                                         ins->inst_basereg == last_ins->inst_destbasereg &&
1064                                         ins->inst_offset == last_ins->inst_offset) {
1065                                 if (ins->dreg == last_ins->sreg1) {
1066                                         last_ins->next = ins->next;                             
1067                                         ins = ins->next;                                
1068                                         continue;
1069                                 } else {
1070                                         //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1071                                         ins->opcode = OP_MOVE;
1072                                         ins->sreg1 = last_ins->sreg1;
1073                                 }
1074                         }
1075                         break;
1076                 case OP_LOADU2_MEMBASE:
1077                 case OP_LOADI2_MEMBASE:
1078                   /*
1079                    * FIXME: Missing explanation
1080                    */
1081                         if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
1082                                         ins->inst_basereg == last_ins->inst_destbasereg &&
1083                                         ins->inst_offset == last_ins->inst_offset) {
1084                                 if (ins->dreg == last_ins->sreg1) {
1085                                         last_ins->next = ins->next;                             
1086                                         ins = ins->next;                                
1087                                         continue;
1088                                 } else {
1089                                         //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1090                                         ins->opcode = OP_MOVE;
1091                                         ins->sreg1 = last_ins->sreg1;
1092                                 }
1093                         }
1094                         break;
1095                 case CEE_CONV_I4:
1096                 case CEE_CONV_U4:
1097                 case OP_MOVE:
1098                         /* 
1099                          * OP_MOVE reg, reg 
1100                          */
1101                         if (ins->dreg == ins->sreg1) {
1102                                 if (last_ins)
1103                                         last_ins->next = ins->next;                             
1104                                 ins = ins->next;
1105                                 continue;
1106                         }
1107                         /* 
1108                          * OP_MOVE sreg, dreg 
1109                          * OP_MOVE dreg, sreg
1110                          */
1111                         if (last_ins && last_ins->opcode == OP_MOVE &&
1112                             ins->sreg1 == last_ins->dreg &&
1113                             ins->dreg == last_ins->sreg1) {
1114                                 last_ins->next = ins->next;                             
1115                                 ins = ins->next;                                
1116                                 continue;
1117                         }
1118                         break;
1119                 }
1120                 last_ins = ins;
1121                 ins = ins->next;
1122         }
1123         bb->last_ins = last_ins;
1124 }
1125
1126 static const int 
1127 branch_cc_table [] = {
1128         X86_CC_EQ, X86_CC_GE, X86_CC_GT, X86_CC_LE, X86_CC_LT,
1129         X86_CC_NE, X86_CC_GE, X86_CC_GT, X86_CC_LE, X86_CC_LT,
1130         X86_CC_O, X86_CC_NO, X86_CC_C, X86_CC_NC
1131 };
1132
1133 #define DEBUG(a) if (cfg->verbose_level > 1) a
1134 //#define DEBUG(a)
1135 #define reg_is_freeable(r) ((r) >= 0 && (r) <= 7 && X86_IS_CALLEE ((r)))
1136
1137 typedef struct {
1138         int born_in;
1139         int killed_in;
1140         int last_use;
1141         int prev_use;
1142 } RegTrack;
1143
1144 static const char*const * ins_spec = pentium_desc;
1145
1146 static void
1147 print_ins (int i, MonoInst *ins)
1148 {
1149         const char *spec = ins_spec [ins->opcode];
1150         g_print ("\t%-2d %s", i, mono_inst_name (ins->opcode));
1151         if (spec [MONO_INST_DEST]) {
1152                 if (ins->dreg >= MONO_MAX_IREGS)
1153                         g_print (" R%d <-", ins->dreg);
1154                 else
1155                         g_print (" %s <-", mono_arch_regname (ins->dreg));
1156         }
1157         if (spec [MONO_INST_SRC1]) {
1158                 if (ins->sreg1 >= MONO_MAX_IREGS)
1159                         g_print (" R%d", ins->sreg1);
1160                 else
1161                         g_print (" %s", mono_arch_regname (ins->sreg1));
1162         }
1163         if (spec [MONO_INST_SRC2]) {
1164                 if (ins->sreg2 >= MONO_MAX_IREGS)
1165                         g_print (" R%d", ins->sreg2);
1166                 else
1167                         g_print (" %s", mono_arch_regname (ins->sreg2));
1168         }
1169         if (spec [MONO_INST_CLOB])
1170                 g_print (" clobbers: %c", spec [MONO_INST_CLOB]);
1171         g_print ("\n");
1172 }
1173
1174 static void
1175 print_regtrack (RegTrack *t, int num)
1176 {
1177         int i;
1178         char buf [32];
1179         const char *r;
1180         
1181         for (i = 0; i < num; ++i) {
1182                 if (!t [i].born_in)
1183                         continue;
1184                 if (i >= MONO_MAX_IREGS) {
1185                         g_snprintf (buf, sizeof(buf), "R%d", i);
1186                         r = buf;
1187                 } else
1188                         r = mono_arch_regname (i);
1189                 g_print ("liveness: %s [%d - %d]\n", r, t [i].born_in, t[i].last_use);
1190         }
1191 }
1192
1193 typedef struct InstList InstList;
1194
1195 struct InstList {
1196         InstList *prev;
1197         InstList *next;
1198         MonoInst *data;
1199 };
1200
1201 static inline InstList*
1202 inst_list_prepend (MonoMemPool *pool, InstList *list, MonoInst *data)
1203 {
1204         InstList *item = mono_mempool_alloc (pool, sizeof (InstList));
1205         item->data = data;
1206         item->prev = NULL;
1207         item->next = list;
1208         if (list)
1209                 list->prev = item;
1210         return item;
1211 }
1212
1213 /*
1214  * Force the spilling of the variable in the symbolic register 'reg'.
1215  */
1216 static int
1217 get_register_force_spilling (MonoCompile *cfg, InstList *item, MonoInst *ins, int reg)
1218 {
1219         MonoInst *load;
1220         int i, sel, spill;
1221         
1222         sel = cfg->rs->iassign [reg];
1223         /*i = cfg->rs->isymbolic [sel];
1224         g_assert (i == reg);*/
1225         i = reg;
1226         spill = ++cfg->spill_count;
1227         cfg->rs->iassign [i] = -spill - 1;
1228         mono_regstate_free_int (cfg->rs, sel);
1229         /* we need to create a spill var and insert a load to sel after the current instruction */
1230         MONO_INST_NEW (cfg, load, OP_LOAD_MEMBASE);
1231         load->dreg = sel;
1232         load->inst_basereg = X86_EBP;
1233         load->inst_offset = mono_spillvar_offset (cfg, spill);
1234         if (item->prev) {
1235                 while (ins->next != item->prev->data)
1236                         ins = ins->next;
1237         }
1238         load->next = ins->next;
1239         ins->next = load;
1240         DEBUG (g_print ("SPILLED LOAD (%d at 0x%08x(%%ebp)) R%d (freed %s)\n", spill, load->inst_offset, i, mono_arch_regname (sel)));
1241         i = mono_regstate_alloc_int (cfg->rs, 1 << sel);
1242         g_assert (i == sel);
1243
1244         return sel;
1245 }
1246
1247 static int
1248 get_register_spilling (MonoCompile *cfg, InstList *item, MonoInst *ins, guint32 regmask, int reg)
1249 {
1250         MonoInst *load;
1251         int i, sel, spill;
1252
1253         DEBUG (g_print ("start regmask to assign R%d: 0x%08x (R%d <- R%d R%d)\n", reg, regmask, ins->dreg, ins->sreg1, ins->sreg2));
1254         /* exclude the registers in the current instruction */
1255         if (reg != ins->sreg1 && (reg_is_freeable (ins->sreg1) || (ins->sreg1 >= MONO_MAX_IREGS && cfg->rs->iassign [ins->sreg1] >= 0))) {
1256                 if (ins->sreg1 >= MONO_MAX_IREGS)
1257                         regmask &= ~ (1 << cfg->rs->iassign [ins->sreg1]);
1258                 else
1259                         regmask &= ~ (1 << ins->sreg1);
1260                 DEBUG (g_print ("excluding sreg1 %s\n", mono_arch_regname (ins->sreg1)));
1261         }
1262         if (reg != ins->sreg2 && (reg_is_freeable (ins->sreg2) || (ins->sreg2 >= MONO_MAX_IREGS && cfg->rs->iassign [ins->sreg2] >= 0))) {
1263                 if (ins->sreg2 >= MONO_MAX_IREGS)
1264                         regmask &= ~ (1 << cfg->rs->iassign [ins->sreg2]);
1265                 else
1266                         regmask &= ~ (1 << ins->sreg2);
1267                 DEBUG (g_print ("excluding sreg2 %s %d\n", mono_arch_regname (ins->sreg2), ins->sreg2));
1268         }
1269         if (reg != ins->dreg && reg_is_freeable (ins->dreg)) {
1270                 regmask &= ~ (1 << ins->dreg);
1271                 DEBUG (g_print ("excluding dreg %s\n", mono_arch_regname (ins->dreg)));
1272         }
1273
1274         DEBUG (g_print ("available regmask: 0x%08x\n", regmask));
1275         g_assert (regmask); /* need at least a register we can free */
1276         sel = -1;
1277         /* we should track prev_use and spill the register that's farther */
1278         for (i = 0; i < MONO_MAX_IREGS; ++i) {
1279                 if (regmask & (1 << i)) {
1280                         sel = i;
1281                         DEBUG (g_print ("selected register %s has assignment %d\n", mono_arch_regname (sel), cfg->rs->iassign [sel]));
1282                         break;
1283                 }
1284         }
1285         i = cfg->rs->isymbolic [sel];
1286         spill = ++cfg->spill_count;
1287         cfg->rs->iassign [i] = -spill - 1;
1288         mono_regstate_free_int (cfg->rs, sel);
1289         /* we need to create a spill var and insert a load to sel after the current instruction */
1290         MONO_INST_NEW (cfg, load, OP_LOAD_MEMBASE);
1291         load->dreg = sel;
1292         load->inst_basereg = X86_EBP;
1293         load->inst_offset = mono_spillvar_offset (cfg, spill);
1294         if (item->prev) {
1295                 while (ins->next != item->prev->data)
1296                         ins = ins->next;
1297         }
1298         load->next = ins->next;
1299         ins->next = load;
1300         DEBUG (g_print ("SPILLED LOAD (%d at 0x%08x(%%ebp)) R%d (freed %s)\n", spill, load->inst_offset, i, mono_arch_regname (sel)));
1301         i = mono_regstate_alloc_int (cfg->rs, 1 << sel);
1302         g_assert (i == sel);
1303         
1304         return sel;
1305 }
1306
1307 static MonoInst*
1308 create_copy_ins (MonoCompile *cfg, int dest, int src, MonoInst *ins)
1309 {
1310         MonoInst *copy;
1311         MONO_INST_NEW (cfg, copy, OP_MOVE);
1312         copy->dreg = dest;
1313         copy->sreg1 = src;
1314         if (ins) {
1315                 copy->next = ins->next;
1316                 ins->next = copy;
1317         }
1318         DEBUG (g_print ("\tforced copy from %s to %s\n", mono_arch_regname (src), mono_arch_regname (dest)));
1319         return copy;
1320 }
1321
1322 static MonoInst*
1323 create_spilled_store (MonoCompile *cfg, int spill, int reg, int prev_reg, MonoInst *ins)
1324 {
1325         MonoInst *store;
1326         MONO_INST_NEW (cfg, store, OP_STORE_MEMBASE_REG);
1327         store->sreg1 = reg;
1328         store->inst_destbasereg = X86_EBP;
1329         store->inst_offset = mono_spillvar_offset (cfg, spill);
1330         if (ins) {
1331                 store->next = ins->next;
1332                 ins->next = store;
1333         }
1334         DEBUG (g_print ("SPILLED STORE (%d at 0x%08x(%%ebp)) R%d (from %s)\n", spill, store->inst_offset, prev_reg, mono_arch_regname (reg)));
1335         return store;
1336 }
1337
1338 static void
1339 insert_before_ins (MonoInst *ins, InstList *item, MonoInst* to_insert)
1340 {
1341         MonoInst *prev;
1342         if (item->next) {
1343                 prev = item->next->data;
1344
1345                 while (prev->next != ins)
1346                         prev = prev->next;
1347                 to_insert->next = ins;
1348                 prev->next = to_insert;
1349         } else {
1350                 to_insert->next = ins;
1351         }
1352         /* 
1353          * needed otherwise in the next instruction we can add an ins to the 
1354          * end and that would get past this instruction.
1355          */
1356         item->data = to_insert; 
1357 }
1358
1359 #if  0
1360 static int
1361 alloc_int_reg (MonoCompile *cfg, InstList *curinst, MonoInst *ins, int sym_reg, guint32 allow_mask)
1362 {
1363         int val = cfg->rs->iassign [sym_reg];
1364         if (val < 0) {
1365                 int spill = 0;
1366                 if (val < -1) {
1367                         /* the register gets spilled after this inst */
1368                         spill = -val -1;
1369                 }
1370                 val = mono_regstate_alloc_int (cfg->rs, allow_mask);
1371                 if (val < 0)
1372                         val = get_register_spilling (cfg, curinst, ins, allow_mask, sym_reg);
1373                 cfg->rs->iassign [sym_reg] = val;
1374                 /* add option to store before the instruction for src registers */
1375                 if (spill)
1376                         create_spilled_store (cfg, spill, val, sym_reg, ins);
1377         }
1378         cfg->rs->isymbolic [val] = sym_reg;
1379         return val;
1380 }
1381 #endif
1382
1383 /*#include "cprop.c"*/
1384
1385 /*
1386  * Local register allocation.
1387  * We first scan the list of instructions and we save the liveness info of
1388  * each register (when the register is first used, when it's value is set etc.).
1389  * We also reverse the list of instructions (in the InstList list) because assigning
1390  * registers backwards allows for more tricks to be used.
1391  */
1392 void
1393 mono_arch_local_regalloc (MonoCompile *cfg, MonoBasicBlock *bb)
1394 {
1395         MonoInst *ins;
1396         MonoRegState *rs = cfg->rs;
1397         int i, val, fpcount;
1398         RegTrack *reginfo, *reginfof;
1399         RegTrack *reginfo1, *reginfo2, *reginfod;
1400         InstList *tmp, *reversed = NULL;
1401         const char *spec;
1402         guint32 src1_mask, src2_mask, dest_mask;
1403
1404         if (!bb->code)
1405                 return;
1406         rs->next_vireg = bb->max_ireg;
1407         rs->next_vfreg = bb->max_freg;
1408         mono_regstate_assign (rs);
1409         reginfo = mono_mempool_alloc0 (cfg->mempool, sizeof (RegTrack) * rs->next_vireg);
1410         reginfof = mono_mempool_alloc0 (cfg->mempool, sizeof (RegTrack) * rs->next_vfreg);
1411         rs->ifree_mask = X86_CALLEE_REGS;
1412
1413         ins = bb->code;
1414
1415         /*if (cfg->opt & MONO_OPT_COPYPROP)
1416                 local_copy_prop (cfg, ins);*/
1417         
1418         i = 1;
1419         fpcount = 0; /* FIXME: track fp stack utilization */
1420         DEBUG (g_print ("LOCAL regalloc: basic block: %d\n", bb->block_num));
1421         /* forward pass on the instructions to collect register liveness info */
1422         while (ins) {
1423                 spec = ins_spec [ins->opcode];
1424                 DEBUG (print_ins (i, ins));
1425                 if (spec [MONO_INST_SRC1]) {
1426                         if (spec [MONO_INST_SRC1] == 'f')
1427                                 reginfo1 = reginfof;
1428                         else
1429                                 reginfo1 = reginfo;
1430                         reginfo1 [ins->sreg1].prev_use = reginfo1 [ins->sreg1].last_use;
1431                         reginfo1 [ins->sreg1].last_use = i;
1432                 } else {
1433                         ins->sreg1 = -1;
1434                 }
1435                 if (spec [MONO_INST_SRC2]) {
1436                         if (spec [MONO_INST_SRC2] == 'f')
1437                                 reginfo2 = reginfof;
1438                         else
1439                                 reginfo2 = reginfo;
1440                         reginfo2 [ins->sreg2].prev_use = reginfo2 [ins->sreg2].last_use;
1441                         reginfo2 [ins->sreg2].last_use = i;
1442                 } else {
1443                         ins->sreg2 = -1;
1444                 }
1445                 if (spec [MONO_INST_DEST]) {
1446                         if (spec [MONO_INST_DEST] == 'f')
1447                                 reginfod = reginfof;
1448                         else
1449                                 reginfod = reginfo;
1450                         if (spec [MONO_INST_DEST] != 'b') /* it's not just a base register */
1451                                 reginfod [ins->dreg].killed_in = i;
1452                         reginfod [ins->dreg].prev_use = reginfod [ins->dreg].last_use;
1453                         reginfod [ins->dreg].last_use = i;
1454                         if (reginfod [ins->dreg].born_in == 0 || reginfod [ins->dreg].born_in > i)
1455                                 reginfod [ins->dreg].born_in = i;
1456                         if (spec [MONO_INST_DEST] == 'l') {
1457                                 /* result in eax:edx, the virtual register is allocated sequentially */
1458                                 reginfod [ins->dreg + 1].prev_use = reginfod [ins->dreg + 1].last_use;
1459                                 reginfod [ins->dreg + 1].last_use = i;
1460                                 if (reginfod [ins->dreg + 1].born_in == 0 || reginfod [ins->dreg + 1].born_in > i)
1461                                         reginfod [ins->dreg + 1].born_in = i;
1462                         }
1463                 } else {
1464                         ins->dreg = -1;
1465                 }
1466                 reversed = inst_list_prepend (cfg->mempool, reversed, ins);
1467                 ++i;
1468                 ins = ins->next;
1469         }
1470
1471         DEBUG (print_regtrack (reginfo, rs->next_vireg));
1472         DEBUG (print_regtrack (reginfof, rs->next_vfreg));
1473         tmp = reversed;
1474         while (tmp) {
1475                 int prev_dreg, prev_sreg1, prev_sreg2;
1476                 dest_mask = src1_mask = src2_mask = X86_CALLEE_REGS;
1477                 --i;
1478                 ins = tmp->data;
1479                 spec = ins_spec [ins->opcode];
1480                 DEBUG (g_print ("processing:"));
1481                 DEBUG (print_ins (i, ins));
1482                 if (spec [MONO_INST_CLOB] == 's') {
1483                         if (rs->ifree_mask & (1 << X86_ECX)) {
1484                                 DEBUG (g_print ("\tshortcut assignment of R%d to ECX\n", ins->sreg2));
1485                                 rs->iassign [ins->sreg2] = X86_ECX;
1486                                 rs->isymbolic [X86_ECX] = ins->sreg2;
1487                                 ins->sreg2 = X86_ECX;
1488                                 rs->ifree_mask &= ~ (1 << X86_ECX);
1489                         } else {
1490                                 int need_ecx_spill = TRUE;
1491                                 /* 
1492                                  * we first check if src1/dreg is already assigned a register
1493                                  * and then we force a spill of the var assigned to ECX.
1494                                  */
1495                                 /* the destination register can't be ECX */
1496                                 dest_mask &= ~ (1 << X86_ECX);
1497                                 src1_mask &= ~ (1 << X86_ECX);
1498                                 val = rs->iassign [ins->dreg];
1499                                 /* 
1500                                  * the destination register is already assigned to ECX:
1501                                  * we need to allocate another register for it and then
1502                                  * copy from this to ECX.
1503                                  */
1504                                 if (val == X86_ECX && ins->dreg != ins->sreg2) {
1505                                         int new_dest = mono_regstate_alloc_int (rs, dest_mask);
1506                                         if (new_dest < 0)
1507                                                 new_dest = get_register_spilling (cfg, tmp, ins, dest_mask, ins->dreg);
1508                                         g_assert (new_dest >= 0);
1509                                         ins->dreg = new_dest;
1510                                         create_copy_ins (cfg, X86_ECX, new_dest, ins);
1511                                         need_ecx_spill = FALSE;
1512                                         /*DEBUG (g_print ("\tforced spill of R%d\n", ins->dreg));
1513                                         val = get_register_force_spilling (cfg, tmp, ins, ins->dreg);
1514                                         rs->iassign [ins->dreg] = val;
1515                                         rs->isymbolic [val] = prev_dreg;
1516                                         ins->dreg = val;*/
1517                                 }
1518                                 val = rs->iassign [ins->sreg1];
1519                                 if (val == X86_ECX) {
1520                                         g_assert_not_reached ();
1521                                 } else if (val >= 0) {
1522                                         /* 
1523                                          * the first src reg was already assigned to a register,
1524                                          * we need to copy it to the dest register because the 
1525                                          * shift instruction clobbers the first operand.
1526                                          */
1527                                         MonoInst *copy = create_copy_ins (cfg, ins->dreg, val, NULL);
1528                                         insert_before_ins (ins, tmp, copy);
1529                                 }
1530                                 val = rs->iassign [ins->sreg2];
1531                                 if (val >= 0 && val != X86_ECX) {
1532                                         MonoInst *move = create_copy_ins (cfg, X86_ECX, val, NULL);
1533                                         DEBUG (g_print ("\tmoved arg from R%d (%d) to ECX\n", val, ins->sreg2));
1534                                         move->next = ins;
1535                                         g_assert_not_reached ();
1536                                         /* FIXME: where is move connected to the instruction list? */
1537                                         //tmp->prev->data->next = move;
1538                                 }
1539                                 if (need_ecx_spill && !(rs->ifree_mask & (1 << X86_ECX))) {
1540                                         DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_ECX]));
1541                                         get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_ECX]);
1542                                         mono_regstate_free_int (rs, X86_ECX);
1543                                 }
1544                                 /* force-set sreg2 */
1545                                 rs->iassign [ins->sreg2] = X86_ECX;
1546                                 rs->isymbolic [X86_ECX] = ins->sreg2;
1547                                 ins->sreg2 = X86_ECX;
1548                                 rs->ifree_mask &= ~ (1 << X86_ECX);
1549                         }
1550                 } else if (spec [MONO_INST_CLOB] == 'd') { /* division */
1551                         int dest_reg = X86_EAX;
1552                         int clob_reg = X86_EDX;
1553                         if (spec [MONO_INST_DEST] == 'd') {
1554                                 dest_reg = X86_EDX; /* reminder */
1555                                 clob_reg = X86_EAX;
1556                         }
1557                         val = rs->iassign [ins->dreg];
1558                         if (0 && val >= 0 && val != dest_reg && !(rs->ifree_mask & (1 << dest_reg))) {
1559                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [dest_reg]));
1560                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [dest_reg]);
1561                                 mono_regstate_free_int (rs, dest_reg);
1562                         }
1563                         if (val < 0) {
1564                                 if (val < -1) {
1565                                         /* the register gets spilled after this inst */
1566                                         int spill = -val -1;
1567                                         dest_mask = 1 << clob_reg;
1568                                         prev_dreg = ins->dreg;
1569                                         val = mono_regstate_alloc_int (rs, dest_mask);
1570                                         if (val < 0)
1571                                                 val = get_register_spilling (cfg, tmp, ins, dest_mask, ins->dreg);
1572                                         rs->iassign [ins->dreg] = val;
1573                                         if (spill)
1574                                                 create_spilled_store (cfg, spill, val, prev_dreg, ins);
1575                                         DEBUG (g_print ("\tassigned dreg %s to dest R%d\n", mono_arch_regname (val), ins->dreg));
1576                                         rs->isymbolic [val] = prev_dreg;
1577                                         ins->dreg = val;
1578                                         if (val != dest_reg) { /* force a copy */
1579                                                 create_copy_ins (cfg, val, dest_reg, ins);
1580                                         }
1581                                 } else {
1582                                         DEBUG (g_print ("\tshortcut assignment of R%d to %s\n", ins->dreg, mono_arch_regname (dest_reg)));
1583                                         rs->iassign [ins->dreg] = dest_reg;
1584                                         rs->isymbolic [dest_reg] = ins->dreg;
1585                                         ins->dreg = dest_reg;
1586                                         rs->ifree_mask &= ~ (1 << dest_reg);
1587                                 }
1588                         } else {
1589                                 //DEBUG (g_print ("dest reg in div assigned: %s\n", mono_arch_regname (val)));
1590                                 if (val != dest_reg) { /* force a copy */
1591                                         create_copy_ins (cfg, val, dest_reg, ins);
1592                                         if (!(rs->ifree_mask & (1 << dest_reg)) && rs->isymbolic [dest_reg] >= MONO_MAX_IREGS) {
1593                                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [dest_reg]));
1594                                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [dest_reg]);
1595                                                 mono_regstate_free_int (rs, dest_reg);
1596                                         }
1597                                 }
1598                         }
1599                         src1_mask = 1 << X86_EAX;
1600                         src2_mask = 1 << X86_ECX;
1601                 }
1602                 if (spec [MONO_INST_DEST] == 'l') {
1603                         if (!(rs->ifree_mask & (1 << X86_EAX))) {
1604                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_EAX]));
1605                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_EAX]);
1606                                 mono_regstate_free_int (rs, X86_EAX);
1607                         }
1608                         if (!(rs->ifree_mask & (1 << X86_EDX))) {
1609                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_EDX]));
1610                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_EDX]);
1611                                 mono_regstate_free_int (rs, X86_EDX);
1612                         }
1613                 }
1614                 /* update for use with FP regs... */
1615                 if (spec [MONO_INST_DEST] != 'f' && ins->dreg >= MONO_MAX_IREGS) {
1616                         val = rs->iassign [ins->dreg];
1617                         prev_dreg = ins->dreg;
1618                         if (val < 0) {
1619                                 int spill = 0;
1620                                 if (val < -1) {
1621                                         /* the register gets spilled after this inst */
1622                                         spill = -val -1;
1623                                 }
1624                                 val = mono_regstate_alloc_int (rs, dest_mask);
1625                                 if (val < 0)
1626                                         val = get_register_spilling (cfg, tmp, ins, dest_mask, ins->dreg);
1627                                 rs->iassign [ins->dreg] = val;
1628                                 if (spill)
1629                                         create_spilled_store (cfg, spill, val, prev_dreg, ins);
1630                         }
1631                         DEBUG (g_print ("\tassigned dreg %s to dest R%d\n", mono_arch_regname (val), ins->dreg));
1632                         rs->isymbolic [val] = prev_dreg;
1633                         ins->dreg = val;
1634                         if (spec [MONO_INST_DEST] == 'l') {
1635                                 int hreg = prev_dreg + 1;
1636                                 val = rs->iassign [hreg];
1637                                 if (val < 0) {
1638                                         int spill = 0;
1639                                         if (val < -1) {
1640                                                 /* the register gets spilled after this inst */
1641                                                 spill = -val -1;
1642                                         }
1643                                         val = mono_regstate_alloc_int (rs, dest_mask);
1644                                         if (val < 0)
1645                                                 val = get_register_spilling (cfg, tmp, ins, dest_mask, hreg);
1646                                         rs->iassign [hreg] = val;
1647                                         if (spill)
1648                                                 create_spilled_store (cfg, spill, val, hreg, ins);
1649                                 }
1650                                 DEBUG (g_print ("\tassigned hreg %s to dest R%d\n", mono_arch_regname (val), hreg));
1651                                 rs->isymbolic [val] = hreg;
1652                                 /* FIXME:? ins->dreg = val; */
1653                                 if (ins->dreg == X86_EAX) {
1654                                         if (val != X86_EDX)
1655                                                 create_copy_ins (cfg, val, X86_EDX, ins);
1656                                 } else if (ins->dreg == X86_EDX) {
1657                                         if (val == X86_EAX) {
1658                                                 /* swap */
1659                                                 g_assert_not_reached ();
1660                                         } else {
1661                                                 /* two forced copies */
1662                                                 create_copy_ins (cfg, val, X86_EDX, ins);
1663                                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1664                                         }
1665                                 } else {
1666                                         if (val == X86_EDX) {
1667                                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1668                                         } else {
1669                                                 /* two forced copies */
1670                                                 create_copy_ins (cfg, val, X86_EDX, ins);
1671                                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1672                                         }
1673                                 }
1674                                 if (reg_is_freeable (val) && hreg >= 0 && reginfo [hreg].born_in >= i) {
1675                                         DEBUG (g_print ("\tfreeable %s (R%d)\n", mono_arch_regname (val), hreg));
1676                                         mono_regstate_free_int (rs, val);
1677                                 }
1678                         } else if (spec [MONO_INST_DEST] == 'a' && ins->dreg != X86_EAX && spec [MONO_INST_CLOB] != 'd') {
1679                                 /* this instruction only outputs to EAX, need to copy */
1680                                 create_copy_ins (cfg, ins->dreg, X86_EAX, ins);
1681                         } else if (spec [MONO_INST_DEST] == 'd' && ins->dreg != X86_EDX && spec [MONO_INST_CLOB] != 'd') {
1682                                 create_copy_ins (cfg, ins->dreg, X86_EDX, ins);
1683                         }
1684                 } else {
1685                         prev_dreg = -1;
1686                 }
1687                 if (spec [MONO_INST_DEST] != 'f' && reg_is_freeable (ins->dreg) && prev_dreg >= 0 && reginfo [prev_dreg].born_in >= i) {
1688                         DEBUG (g_print ("\tfreeable %s (R%d) (born in %d)\n", mono_arch_regname (ins->dreg), prev_dreg, reginfo [prev_dreg].born_in));
1689                         mono_regstate_free_int (rs, ins->dreg);
1690                 }
1691                 /* put src1 in EAX if it needs to be */
1692                 if (spec [MONO_INST_SRC1] == 'a') {
1693                         if (!(rs->ifree_mask & (1 << X86_EAX))) {
1694                                 DEBUG (g_print ("\tforced spill of R%d\n", rs->isymbolic [X86_EAX]));
1695                                 get_register_force_spilling (cfg, tmp, ins, rs->isymbolic [X86_EAX]);
1696                                 mono_regstate_free_int (rs, X86_EAX);
1697                         }
1698                         /* force-set sreg1 */
1699                         rs->iassign [ins->sreg1] = X86_EAX;
1700                         rs->isymbolic [X86_EAX] = ins->sreg1;
1701                         ins->sreg1 = X86_EAX;
1702                         rs->ifree_mask &= ~ (1 << X86_EAX);
1703                 }
1704                 if (spec [MONO_INST_SRC1] != 'f' && ins->sreg1 >= MONO_MAX_IREGS) {
1705                         val = rs->iassign [ins->sreg1];
1706                         prev_sreg1 = ins->sreg1;
1707                         if (val < 0) {
1708                                 int spill = 0;
1709                                 if (val < -1) {
1710                                         /* the register gets spilled after this inst */
1711                                         spill = -val -1;
1712                                 }
1713                                 if (0 && ins->opcode == OP_MOVE) {
1714                                         /* 
1715                                          * small optimization: the dest register is already allocated
1716                                          * but the src one is not: we can simply assign the same register
1717                                          * here and peephole will get rid of the instruction later.
1718                                          * This optimization may interfere with the clobbering handling:
1719                                          * it removes a mov operation that will be added again to handle clobbering.
1720                                          * There are also some other issues that should with make testjit.
1721                                          */
1722                                         mono_regstate_alloc_int (rs, 1 << ins->dreg);
1723                                         val = rs->iassign [ins->sreg1] = ins->dreg;
1724                                         //g_assert (val >= 0);
1725                                         DEBUG (g_print ("\tfast assigned sreg1 %s to R%d\n", mono_arch_regname (val), ins->sreg1));
1726                                 } else {
1727                                         //g_assert (val == -1); /* source cannot be spilled */
1728                                         val = mono_regstate_alloc_int (rs, src1_mask);
1729                                         if (val < 0)
1730                                                 val = get_register_spilling (cfg, tmp, ins, src1_mask, ins->sreg1);
1731                                         rs->iassign [ins->sreg1] = val;
1732                                         DEBUG (g_print ("\tassigned sreg1 %s to R%d\n", mono_arch_regname (val), ins->sreg1));
1733                                 }
1734                                 if (spill) {
1735                                         MonoInst *store = create_spilled_store (cfg, spill, val, prev_sreg1, NULL);
1736                                         insert_before_ins (ins, tmp, store);
1737                                 }
1738                         }
1739                         rs->isymbolic [val] = prev_sreg1;
1740                         ins->sreg1 = val;
1741                 } else {
1742                         prev_sreg1 = -1;
1743                 }
1744                 /* handle clobbering of sreg1 */
1745                 if ((spec [MONO_INST_CLOB] == '1' || spec [MONO_INST_CLOB] == 's') && ins->dreg != ins->sreg1) {
1746                         MonoInst *copy = create_copy_ins (cfg, ins->dreg, ins->sreg1, NULL);
1747                         DEBUG (g_print ("\tneed to copy sreg1 %s to dreg %s\n", mono_arch_regname (ins->sreg1), mono_arch_regname (ins->dreg)));
1748                         if (ins->sreg2 == -1 || spec [MONO_INST_CLOB] == 's') {
1749                                 /* note: the copy is inserted before the current instruction! */
1750                                 insert_before_ins (ins, tmp, copy);
1751                                 /* we set sreg1 to dest as well */
1752                                 prev_sreg1 = ins->sreg1 = ins->dreg;
1753                         } else {
1754                                 /* inserted after the operation */
1755                                 copy->next = ins->next;
1756                                 ins->next = copy;
1757                         }
1758                 }
1759                 if (spec [MONO_INST_SRC2] != 'f' && ins->sreg2 >= MONO_MAX_IREGS) {
1760                         val = rs->iassign [ins->sreg2];
1761                         prev_sreg2 = ins->sreg2;
1762                         if (val < 0) {
1763                                 int spill = 0;
1764                                 if (val < -1) {
1765                                         /* the register gets spilled after this inst */
1766                                         spill = -val -1;
1767                                 }
1768                                 val = mono_regstate_alloc_int (rs, src2_mask);
1769                                 if (val < 0)
1770                                         val = get_register_spilling (cfg, tmp, ins, src2_mask, ins->sreg2);
1771                                 rs->iassign [ins->sreg2] = val;
1772                                 DEBUG (g_print ("\tassigned sreg2 %s to R%d\n", mono_arch_regname (val), ins->sreg2));
1773                                 if (spill)
1774                                         create_spilled_store (cfg, spill, val, prev_sreg2, ins);
1775                         }
1776                         rs->isymbolic [val] = prev_sreg2;
1777                         ins->sreg2 = val;
1778                         if (spec [MONO_INST_CLOB] == 's' && ins->sreg2 != X86_ECX) {
1779                                 DEBUG (g_print ("\tassigned sreg2 %s to R%d, but ECX is needed (R%d)\n", mono_arch_regname (val), ins->sreg2, rs->iassign [X86_ECX]));
1780                         }
1781                 } else {
1782                         prev_sreg2 = -1;
1783                 }
1784
1785                 if (spec [MONO_INST_CLOB] == 'c') {
1786                         int j, s;
1787                         guint32 clob_mask = X86_CALLEE_REGS;
1788                         for (j = 0; j < MONO_MAX_IREGS; ++j) {
1789                                 s = 1 << j;
1790                                 if ((clob_mask & s) && !(rs->ifree_mask & s) && j != ins->sreg1) {
1791                                         //g_warning ("register %s busy at call site\n", mono_arch_regname (j));
1792                                 }
1793                         }
1794                 }
1795                 /*if (reg_is_freeable (ins->sreg1) && prev_sreg1 >= 0 && reginfo [prev_sreg1].born_in >= i) {
1796                         DEBUG (g_print ("freeable %s\n", mono_arch_regname (ins->sreg1)));
1797                         mono_regstate_free_int (rs, ins->sreg1);
1798                 }
1799                 if (reg_is_freeable (ins->sreg2) && prev_sreg2 >= 0 && reginfo [prev_sreg2].born_in >= i) {
1800                         DEBUG (g_print ("freeable %s\n", mono_arch_regname (ins->sreg2)));
1801                         mono_regstate_free_int (rs, ins->sreg2);
1802                 }*/
1803                 
1804                 //DEBUG (print_ins (i, ins));
1805                 /* this may result from a insert_before call */
1806                 if (!tmp->next)
1807                         bb->code = tmp->data;
1808                 tmp = tmp->next;
1809         }
1810 }
1811
1812 static unsigned char*
1813 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int size, gboolean is_signed)
1814 {
1815         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 4);
1816         x86_fnstcw_membase(code, X86_ESP, 0);
1817         x86_mov_reg_membase (code, dreg, X86_ESP, 0, 2);
1818         x86_alu_reg_imm (code, X86_OR, dreg, 0xc00);
1819         x86_mov_membase_reg (code, X86_ESP, 2, dreg, 2);
1820         x86_fldcw_membase (code, X86_ESP, 2);
1821         if (size == 8) {
1822                 x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
1823                 x86_fist_pop_membase (code, X86_ESP, 0, TRUE);
1824                 x86_pop_reg (code, dreg);
1825                 /* FIXME: need the high register 
1826                  * x86_pop_reg (code, dreg_high);
1827                  */
1828         } else {
1829                 x86_push_reg (code, X86_EAX); // SP = SP - 4
1830                 x86_fist_pop_membase (code, X86_ESP, 0, FALSE);
1831                 x86_pop_reg (code, dreg);
1832         }
1833         x86_fldcw_membase (code, X86_ESP, 0);
1834         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
1835
1836         if (size == 1)
1837                 x86_widen_reg (code, dreg, dreg, is_signed, FALSE);
1838         else if (size == 2)
1839                 x86_widen_reg (code, dreg, dreg, is_signed, TRUE);
1840         return code;
1841 }
1842
1843 static unsigned char*
1844 mono_emit_stack_alloc (guchar *code, MonoInst* tree)
1845 {
1846         int sreg = tree->sreg1;
1847 #ifdef PLATFORM_WIN32
1848         guint8* br[5];
1849
1850         /*
1851          * Under Windows:
1852          * If requested stack size is larger than one page,
1853          * perform stack-touch operation
1854          */
1855         /*
1856          * Generate stack probe code.
1857          * Under Windows, it is necessary to allocate one page at a time,
1858          * "touching" stack after each successful sub-allocation. This is
1859          * because of the way stack growth is implemented - there is a
1860          * guard page before the lowest stack page that is currently commited.
1861          * Stack normally grows sequentially so OS traps access to the
1862          * guard page and commits more pages when needed.
1863          */
1864         x86_test_reg_imm (code, sreg, ~0xFFF);
1865         br[0] = code; x86_branch8 (code, X86_CC_Z, 0, FALSE);
1866
1867         br[2] = code; /* loop */
1868         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 0x1000);
1869         x86_test_membase_reg (code, X86_ESP, 0, X86_ESP);
1870         x86_alu_reg_imm (code, X86_SUB, sreg, 0x1000);
1871         x86_alu_reg_imm (code, X86_CMP, sreg, 0x1000);
1872         br[3] = code; x86_branch8 (code, X86_CC_AE, 0, FALSE);
1873         x86_patch (br[3], br[2]);
1874         x86_test_reg_reg (code, sreg, sreg);
1875         br[4] = code; x86_branch8 (code, X86_CC_Z, 0, FALSE);
1876         x86_alu_reg_reg (code, X86_SUB, X86_ESP, sreg);
1877
1878         br[1] = code; x86_jump8 (code, 0);
1879
1880         x86_patch (br[0], code);
1881         x86_alu_reg_reg (code, X86_SUB, X86_ESP, sreg);
1882         x86_patch (br[1], code);
1883         x86_patch (br[4], code);
1884 #else /* PLATFORM_WIN32 */
1885         x86_alu_reg_reg (code, X86_SUB, X86_ESP, tree->sreg1);
1886 #endif
1887         if (tree->flags & MONO_INST_INIT) {
1888                 int offset = 0;
1889                 if (tree->dreg != X86_EAX && sreg != X86_EAX) {
1890                         x86_push_reg (code, X86_EAX);
1891                         offset += 4;
1892                 }
1893                 if (tree->dreg != X86_ECX && sreg != X86_ECX) {
1894                         x86_push_reg (code, X86_ECX);
1895                         offset += 4;
1896                 }
1897                 if (tree->dreg != X86_EDI && sreg != X86_EDI) {
1898                         x86_push_reg (code, X86_EDI);
1899                         offset += 4;
1900                 }
1901                 
1902                 x86_shift_reg_imm (code, X86_SHR, sreg, 2);
1903                 if (sreg != X86_ECX)
1904                         x86_mov_reg_reg (code, X86_ECX, sreg, 4);
1905                 x86_alu_reg_reg (code, X86_XOR, X86_EAX, X86_EAX);
1906                                 
1907                 x86_lea_membase (code, X86_EDI, X86_ESP, offset);
1908                 x86_cld (code);
1909                 x86_prefix (code, X86_REP_PREFIX);
1910                 x86_stosl (code);
1911                 
1912                 if (tree->dreg != X86_EDI && sreg != X86_EDI)
1913                         x86_pop_reg (code, X86_EDI);
1914                 if (tree->dreg != X86_ECX && sreg != X86_ECX)
1915                         x86_pop_reg (code, X86_ECX);
1916                 if (tree->dreg != X86_EAX && sreg != X86_EAX)
1917                         x86_pop_reg (code, X86_EAX);
1918         }
1919         return code;
1920 }
1921
1922 #define REAL_PRINT_REG(text,reg) \
1923 mono_assert (reg >= 0); \
1924 x86_push_reg (code, X86_EAX); \
1925 x86_push_reg (code, X86_EDX); \
1926 x86_push_reg (code, X86_ECX); \
1927 x86_push_reg (code, reg); \
1928 x86_push_imm (code, reg); \
1929 x86_push_imm (code, text " %d %p\n"); \
1930 x86_mov_reg_imm (code, X86_EAX, printf); \
1931 x86_call_reg (code, X86_EAX); \
1932 x86_alu_reg_imm (code, X86_ADD, X86_ESP, 3*4); \
1933 x86_pop_reg (code, X86_ECX); \
1934 x86_pop_reg (code, X86_EDX); \
1935 x86_pop_reg (code, X86_EAX);
1936
1937 void
1938 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
1939 {
1940         MonoInst *ins;
1941         MonoCallInst *call;
1942         guint offset;
1943         guint8 *code = cfg->native_code + cfg->code_len;
1944         MonoInst *last_ins = NULL;
1945         guint last_offset = 0;
1946         int max_len, cpos;
1947
1948         if (cfg->opt & MONO_OPT_PEEPHOLE)
1949                 peephole_pass (cfg, bb);
1950
1951 #if 0
1952         /* 
1953          * various stratgies to align BBs. Using real loop detection or simply
1954          * aligning every block leads to more consistent benchmark results,
1955          * but usually slows down the code
1956          * we should do the alignment outside this function or we should adjust
1957          * bb->native offset as well or the code is effectively slowed down!
1958          */
1959         /* align all blocks */
1960 //      if ((pad = (cfg->code_len & (align - 1)))) {
1961         /* poor man loop start detection */
1962 //      if (bb->code && bb->in_count && bb->in_bb [0]->cil_code > bb->cil_code && (pad = (cfg->code_len & (align - 1)))) {
1963         /* consider real loop detection and nesting level */
1964 //      if (bb->loop_blocks && bb->nesting < 3 && (pad = (cfg->code_len & (align - 1)))) {
1965         /* consider real loop detection */
1966         if (bb->loop_blocks && (pad = (cfg->code_len & (align - 1)))) {
1967                 pad = align - pad;
1968                 x86_padding (code, pad);
1969                 cfg->code_len += pad;
1970                 bb->native_offset = cfg->code_len;
1971         }
1972 #endif
1973
1974         if (cfg->verbose_level > 2)
1975                 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
1976
1977         cpos = bb->max_offset;
1978
1979         if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
1980                 MonoProfileCoverageInfo *cov = cfg->coverage_info;
1981                 g_assert (!mono_compile_aot);
1982                 cpos += 6;
1983
1984                 cov->data [bb->dfn].cil_code = bb->cil_code;
1985                 /* this is not thread save, but good enough */
1986                 x86_inc_mem (code, &cov->data [bb->dfn].count); 
1987         }
1988
1989         offset = code - cfg->native_code;
1990
1991         ins = bb->code;
1992         while (ins) {
1993                 offset = code - cfg->native_code;
1994
1995                 max_len = ((guint8 *)ins_spec [ins->opcode])[MONO_INST_LEN];
1996
1997                 if (offset > (cfg->code_size - max_len - 16)) {
1998                         cfg->code_size *= 2;
1999                         cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
2000                         code = cfg->native_code + offset;
2001                         mono_jit_stats.code_reallocs++;
2002                 }
2003
2004                 mono_debug_record_line_number (cfg, ins, offset);
2005
2006                 switch (ins->opcode) {
2007                 case OP_BIGMUL:
2008                         x86_mul_reg (code, ins->sreg2, TRUE);
2009                         break;
2010                 case OP_BIGMUL_UN:
2011                         x86_mul_reg (code, ins->sreg2, FALSE);
2012                         break;
2013                 case OP_X86_SETEQ_MEMBASE:
2014                         x86_set_membase (code, X86_CC_EQ, ins->inst_basereg, ins->inst_offset, TRUE);
2015                         break;
2016                 case OP_STOREI1_MEMBASE_IMM:
2017                         x86_mov_membase_imm (code, ins->inst_destbasereg, ins->inst_offset, ins->inst_imm, 1);
2018                         break;
2019                 case OP_STOREI2_MEMBASE_IMM:
2020                         x86_mov_membase_imm (code, ins->inst_destbasereg, ins->inst_offset, ins->inst_imm, 2);
2021                         break;
2022                 case OP_STORE_MEMBASE_IMM:
2023                 case OP_STOREI4_MEMBASE_IMM:
2024                         x86_mov_membase_imm (code, ins->inst_destbasereg, ins->inst_offset, ins->inst_imm, 4);
2025                         break;
2026                 case OP_STOREI1_MEMBASE_REG:
2027                         x86_mov_membase_reg (code, ins->inst_destbasereg, ins->inst_offset, ins->sreg1, 1);
2028                         break;
2029                 case OP_STOREI2_MEMBASE_REG:
2030                         x86_mov_membase_reg (code, ins->inst_destbasereg, ins->inst_offset, ins->sreg1, 2);
2031                         break;
2032                 case OP_STORE_MEMBASE_REG:
2033                 case OP_STOREI4_MEMBASE_REG:
2034                         x86_mov_membase_reg (code, ins->inst_destbasereg, ins->inst_offset, ins->sreg1, 4);
2035                         break;
2036                 case CEE_LDIND_I:
2037                 case CEE_LDIND_I4:
2038                 case CEE_LDIND_U4:
2039                         x86_mov_reg_mem (code, ins->dreg, ins->inst_p0, 4);
2040                         break;
2041                 case OP_LOADU4_MEM:
2042                         x86_mov_reg_imm (code, ins->dreg, ins->inst_p0);
2043                         x86_mov_reg_membase (code, ins->dreg, ins->dreg, 0, 4);
2044                         break;
2045                 case OP_LOAD_MEMBASE:
2046                 case OP_LOADI4_MEMBASE:
2047                 case OP_LOADU4_MEMBASE:
2048                         x86_mov_reg_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, 4);
2049                         break;
2050                 case OP_LOADU1_MEMBASE:
2051                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, FALSE, FALSE);
2052                         break;
2053                 case OP_LOADI1_MEMBASE:
2054                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, TRUE, FALSE);
2055                         break;
2056                 case OP_LOADU2_MEMBASE:
2057                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, FALSE, TRUE);
2058                         break;
2059                 case OP_LOADI2_MEMBASE:
2060                         x86_widen_membase (code, ins->dreg, ins->inst_basereg, ins->inst_offset, TRUE, TRUE);
2061                         break;
2062                 case CEE_CONV_I1:
2063                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, FALSE);
2064                         break;
2065                 case CEE_CONV_I2:
2066                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, TRUE);
2067                         break;
2068                 case CEE_CONV_U1:
2069                         x86_widen_reg (code, ins->dreg, ins->sreg1, FALSE, FALSE);
2070                         break;
2071                 case CEE_CONV_U2:
2072                         x86_widen_reg (code, ins->dreg, ins->sreg1, FALSE, TRUE);
2073                         break;
2074                 case OP_COMPARE:
2075                         x86_alu_reg_reg (code, X86_CMP, ins->sreg1, ins->sreg2);
2076                         break;
2077                 case OP_COMPARE_IMM:
2078                         x86_alu_reg_imm (code, X86_CMP, ins->sreg1, ins->inst_imm);
2079                         break;
2080                 case OP_X86_COMPARE_MEMBASE_REG:
2081                         x86_alu_membase_reg (code, X86_CMP, ins->inst_basereg, ins->inst_offset, ins->sreg2);
2082                         break;
2083                 case OP_X86_COMPARE_MEMBASE_IMM:
2084                         x86_alu_membase_imm (code, X86_CMP, ins->inst_basereg, ins->inst_offset, ins->inst_imm);
2085                         break;
2086                 case OP_X86_COMPARE_REG_MEMBASE:
2087                         x86_alu_reg_membase (code, X86_CMP, ins->sreg1, ins->sreg2, ins->inst_offset);
2088                         break;
2089                 case OP_X86_TEST_NULL:
2090                         x86_test_reg_reg (code, ins->sreg1, ins->sreg1);
2091                         break;
2092                 case OP_X86_ADD_MEMBASE_IMM:
2093                         x86_alu_membase_imm (code, X86_ADD, ins->inst_basereg, ins->inst_offset, ins->inst_imm);
2094                         break;
2095                 case OP_X86_SUB_MEMBASE_IMM:
2096                         x86_alu_membase_imm (code, X86_SUB, ins->inst_basereg, ins->inst_offset, ins->inst_imm);
2097                         break;
2098                 case OP_X86_INC_MEMBASE:
2099                         x86_inc_membase (code, ins->inst_basereg, ins->inst_offset);
2100                         break;
2101                 case OP_X86_INC_REG:
2102                         x86_inc_reg (code, ins->dreg);
2103                         break;
2104                 case OP_X86_DEC_MEMBASE:
2105                         x86_dec_membase (code, ins->inst_basereg, ins->inst_offset);
2106                         break;
2107                 case OP_X86_DEC_REG:
2108                         x86_dec_reg (code, ins->dreg);
2109                         break;
2110                 case CEE_BREAK:
2111                         x86_breakpoint (code);
2112                         break;
2113                 case OP_ADDCC:
2114                 case CEE_ADD:
2115                         x86_alu_reg_reg (code, X86_ADD, ins->sreg1, ins->sreg2);
2116                         break;
2117                 case OP_ADC:
2118                         x86_alu_reg_reg (code, X86_ADC, ins->sreg1, ins->sreg2);
2119                         break;
2120                 case OP_ADD_IMM:
2121                         x86_alu_reg_imm (code, X86_ADD, ins->dreg, ins->inst_imm);
2122                         break;
2123                 case OP_ADC_IMM:
2124                         x86_alu_reg_imm (code, X86_ADC, ins->dreg, ins->inst_imm);
2125                         break;
2126                 case OP_SUBCC:
2127                 case CEE_SUB:
2128                         x86_alu_reg_reg (code, X86_SUB, ins->sreg1, ins->sreg2);
2129                         break;
2130                 case OP_SBB:
2131                         x86_alu_reg_reg (code, X86_SBB, ins->sreg1, ins->sreg2);
2132                         break;
2133                 case OP_SUB_IMM:
2134                         x86_alu_reg_imm (code, X86_SUB, ins->dreg, ins->inst_imm);
2135                         break;
2136                 case OP_SBB_IMM:
2137                         x86_alu_reg_imm (code, X86_SBB, ins->dreg, ins->inst_imm);
2138                         break;
2139                 case CEE_AND:
2140                         x86_alu_reg_reg (code, X86_AND, ins->sreg1, ins->sreg2);
2141                         break;
2142                 case OP_AND_IMM:
2143                         x86_alu_reg_imm (code, X86_AND, ins->sreg1, ins->inst_imm);
2144                         break;
2145                 case CEE_DIV:
2146                         x86_cdq (code);
2147                         x86_div_reg (code, ins->sreg2, TRUE);
2148                         break;
2149                 case CEE_DIV_UN:
2150                         x86_alu_reg_reg (code, X86_XOR, X86_EDX, X86_EDX);
2151                         x86_div_reg (code, ins->sreg2, FALSE);
2152                         break;
2153                 case OP_DIV_IMM:
2154                         x86_mov_reg_imm (code, ins->sreg2, ins->inst_imm);
2155                         x86_cdq (code);
2156                         x86_div_reg (code, ins->sreg2, TRUE);
2157                         break;
2158                 case CEE_REM:
2159                         x86_cdq (code);
2160                         x86_div_reg (code, ins->sreg2, TRUE);
2161                         break;
2162                 case CEE_REM_UN:
2163                         x86_alu_reg_reg (code, X86_XOR, X86_EDX, X86_EDX);
2164                         x86_div_reg (code, ins->sreg2, FALSE);
2165                         break;
2166                 case OP_REM_IMM:
2167                         x86_mov_reg_imm (code, ins->sreg2, ins->inst_imm);
2168                         x86_cdq (code);
2169                         x86_div_reg (code, ins->sreg2, TRUE);
2170                         break;
2171                 case CEE_OR:
2172                         x86_alu_reg_reg (code, X86_OR, ins->sreg1, ins->sreg2);
2173                         break;
2174                 case OP_OR_IMM:
2175                         x86_alu_reg_imm (code, X86_OR, ins->sreg1, ins->inst_imm);
2176                         break;
2177                 case CEE_XOR:
2178                         x86_alu_reg_reg (code, X86_XOR, ins->sreg1, ins->sreg2);
2179                         break;
2180                 case OP_XOR_IMM:
2181                         x86_alu_reg_imm (code, X86_XOR, ins->sreg1, ins->inst_imm);
2182                         break;
2183                 case CEE_SHL:
2184                         g_assert (ins->sreg2 == X86_ECX);
2185                         x86_shift_reg (code, X86_SHL, ins->dreg);
2186                         break;
2187                 case CEE_SHR:
2188                         g_assert (ins->sreg2 == X86_ECX);
2189                         x86_shift_reg (code, X86_SAR, ins->dreg);
2190                         break;
2191                 case OP_SHR_IMM:
2192                         x86_shift_reg_imm (code, X86_SAR, ins->dreg, ins->inst_imm);
2193                         break;
2194                 case OP_SHR_UN_IMM:
2195                         x86_shift_reg_imm (code, X86_SHR, ins->dreg, ins->inst_imm);
2196                         break;
2197                 case CEE_SHR_UN:
2198                         g_assert (ins->sreg2 == X86_ECX);
2199                         x86_shift_reg (code, X86_SHR, ins->dreg);
2200                         break;
2201                 case OP_SHL_IMM:
2202                         x86_shift_reg_imm (code, X86_SHL, ins->dreg, ins->inst_imm);
2203                         break;
2204                 case CEE_NOT:
2205                         x86_not_reg (code, ins->sreg1);
2206                         break;
2207                 case CEE_NEG:
2208                         x86_neg_reg (code, ins->sreg1);
2209                         break;
2210                 case OP_SEXT_I1:
2211                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, FALSE);
2212                         break;
2213                 case OP_SEXT_I2:
2214                         x86_widen_reg (code, ins->dreg, ins->sreg1, TRUE, TRUE);
2215                         break;
2216                 case CEE_MUL:
2217                         x86_imul_reg_reg (code, ins->sreg1, ins->sreg2);
2218                         break;
2219                 case OP_MUL_IMM:
2220                         x86_imul_reg_reg_imm (code, ins->dreg, ins->sreg1, ins->inst_imm);
2221                         break;
2222                 case CEE_MUL_OVF:
2223                         x86_imul_reg_reg (code, ins->sreg1, ins->sreg2);
2224                         EMIT_COND_SYSTEM_EXCEPTION (X86_CC_O, FALSE, "OverflowException");
2225                         break;
2226                 case CEE_MUL_OVF_UN: {
2227                         /* the mul operation and the exception check should most likely be split */
2228                         int non_eax_reg, saved_eax = FALSE, saved_edx = FALSE;
2229                         /*g_assert (ins->sreg2 == X86_EAX);
2230                         g_assert (ins->dreg == X86_EAX);*/
2231                         if (ins->sreg2 == X86_EAX) {
2232                                 non_eax_reg = ins->sreg1;
2233                         } else if (ins->sreg1 == X86_EAX) {
2234                                 non_eax_reg = ins->sreg2;
2235                         } else {
2236                                 /* no need to save since we're going to store to it anyway */
2237                                 if (ins->dreg != X86_EAX) {
2238                                         saved_eax = TRUE;
2239                                         x86_push_reg (code, X86_EAX);
2240                                 }
2241                                 x86_mov_reg_reg (code, X86_EAX, ins->sreg1, 4);
2242                                 non_eax_reg = ins->sreg2;
2243                         }
2244                         if (ins->dreg == X86_EDX) {
2245                                 if (!saved_eax) {
2246                                         saved_eax = TRUE;
2247                                         x86_push_reg (code, X86_EAX);
2248                                 }
2249                         } else if (ins->dreg != X86_EAX) {
2250                                 saved_edx = TRUE;
2251                                 x86_push_reg (code, X86_EDX);
2252                         }
2253                         x86_mul_reg (code, non_eax_reg, FALSE);
2254                         /* save before the check since pop and mov don't change the flags */
2255                         if (saved_edx)
2256                                 x86_pop_reg (code, X86_EDX);
2257                         if (saved_eax)
2258                                 x86_pop_reg (code, X86_EAX);
2259                         if (ins->dreg != X86_EAX)
2260                                 x86_mov_reg_reg (code, ins->dreg, X86_EAX, 4);
2261                         EMIT_COND_SYSTEM_EXCEPTION (X86_CC_O, FALSE, "OverflowException");
2262                         break;
2263                 }
2264                 case OP_ICONST:
2265                         x86_mov_reg_imm (code, ins->dreg, ins->inst_c0);
2266                         break;
2267                 case OP_AOTCONST:
2268                         mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
2269                         x86_mov_reg_imm (code, ins->dreg, 0);
2270                         break;
2271                 case CEE_CONV_I4:
2272                 case CEE_CONV_U4:
2273                 case OP_MOVE:
2274                         x86_mov_reg_reg (code, ins->dreg, ins->sreg1, 4);
2275                         break;
2276                 case CEE_JMP: {
2277                         /*
2278                          * Note: this 'frame destruction' logic is useful for tail calls, too.
2279                          * Keep in sync with the code in emit_epilog.
2280                          */
2281                         int pos = 0;
2282
2283                         /* FIXME: no tracing support... */
2284                         if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
2285                                 code = mono_arch_instrument_epilog (cfg, mono_profiler_method_leave, code, FALSE);
2286                         /* reset offset to make max_len work */
2287                         offset = code - cfg->native_code;
2288
2289                         g_assert (!cfg->method->save_lmf);
2290
2291                         if (cfg->used_int_regs & (1 << X86_EBX))
2292                                 pos -= 4;
2293                         if (cfg->used_int_regs & (1 << X86_EDI))
2294                                 pos -= 4;
2295                         if (cfg->used_int_regs & (1 << X86_ESI))
2296                                 pos -= 4;
2297                         if (pos)
2298                                 x86_lea_membase (code, X86_ESP, X86_EBP, pos);
2299         
2300                         if (cfg->used_int_regs & (1 << X86_ESI))
2301                                 x86_pop_reg (code, X86_ESI);
2302                         if (cfg->used_int_regs & (1 << X86_EDI))
2303                                 x86_pop_reg (code, X86_EDI);
2304                         if (cfg->used_int_regs & (1 << X86_EBX))
2305                                 x86_pop_reg (code, X86_EBX);
2306         
2307                         /* restore ESP/EBP */
2308                         x86_leave (code);
2309                         offset = code - cfg->native_code;
2310                         mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
2311                         x86_jump32 (code, 0);
2312                         break;
2313                 }
2314                 case OP_CHECK_THIS:
2315                         /* ensure ins->sreg1 is not NULL */
2316                         x86_alu_membase_imm (code, X86_CMP, ins->sreg1, 0, 0);
2317                         break;
2318                 case OP_ARGLIST: {
2319                         int hreg = ins->sreg1 == X86_EAX? X86_ECX: X86_EAX;
2320                         x86_push_reg (code, hreg);
2321                         x86_lea_membase (code, hreg, X86_EBP, cfg->sig_cookie);
2322                         x86_mov_membase_reg (code, ins->sreg1, 0, hreg, 4);
2323                         x86_pop_reg (code, hreg);
2324                         break;
2325                 }
2326                 case OP_FCALL:
2327                 case OP_LCALL:
2328                 case OP_VCALL:
2329                 case OP_VOIDCALL:
2330                 case CEE_CALL:
2331                         call = (MonoCallInst*)ins;
2332                         if (ins->flags & MONO_INST_HAS_METHOD)
2333                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
2334                         else {
2335                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
2336                         }
2337                         x86_call_code (code, 0);
2338                         if (call->stack_usage && (call->signature->call_convention != MONO_CALL_STDCALL))
2339                                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, call->stack_usage);
2340                         break;
2341                 case OP_FCALL_REG:
2342                 case OP_LCALL_REG:
2343                 case OP_VCALL_REG:
2344                 case OP_VOIDCALL_REG:
2345                 case OP_CALL_REG:
2346                         call = (MonoCallInst*)ins;
2347                         x86_call_reg (code, ins->sreg1);
2348                         if (call->stack_usage && (call->signature->call_convention != MONO_CALL_STDCALL))
2349                                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, call->stack_usage);
2350                         break;
2351                 case OP_FCALL_MEMBASE:
2352                 case OP_LCALL_MEMBASE:
2353                 case OP_VCALL_MEMBASE:
2354                 case OP_VOIDCALL_MEMBASE:
2355                 case OP_CALL_MEMBASE:
2356                         call = (MonoCallInst*)ins;
2357                         x86_call_membase (code, ins->sreg1, ins->inst_offset);
2358                         if (call->stack_usage && (call->signature->call_convention != MONO_CALL_STDCALL))
2359                                 x86_alu_reg_imm (code, X86_ADD, X86_ESP, call->stack_usage);
2360                         break;
2361                 case OP_OUTARG:
2362                 case OP_X86_PUSH:
2363                         x86_push_reg (code, ins->sreg1);
2364                         break;
2365                 case OP_X86_PUSH_IMM:
2366                         x86_push_imm (code, ins->inst_imm);
2367                         break;
2368                 case OP_X86_PUSH_MEMBASE:
2369                         x86_push_membase (code, ins->inst_basereg, ins->inst_offset);
2370                         break;
2371                 case OP_X86_PUSH_OBJ: 
2372                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, ins->inst_imm);
2373                         x86_push_reg (code, X86_EDI);
2374                         x86_push_reg (code, X86_ESI);
2375                         x86_push_reg (code, X86_ECX);
2376                         if (ins->inst_offset)
2377                                 x86_lea_membase (code, X86_ESI, ins->inst_basereg, ins->inst_offset);
2378                         else
2379                                 x86_mov_reg_reg (code, X86_ESI, ins->inst_basereg, 4);
2380                         x86_lea_membase (code, X86_EDI, X86_ESP, 12);
2381                         x86_mov_reg_imm (code, X86_ECX, (ins->inst_imm >> 2));
2382                         x86_cld (code);
2383                         x86_prefix (code, X86_REP_PREFIX);
2384                         x86_movsd (code);
2385                         x86_pop_reg (code, X86_ECX);
2386                         x86_pop_reg (code, X86_ESI);
2387                         x86_pop_reg (code, X86_EDI);
2388                         break;
2389                 case OP_X86_LEA:
2390                         x86_lea_memindex (code, ins->dreg, ins->sreg1, ins->inst_imm, ins->sreg2, ins->unused);
2391                         break;
2392                 case OP_X86_LEA_MEMBASE:
2393                         x86_lea_membase (code, ins->dreg, ins->sreg1, ins->inst_imm);
2394                         break;
2395                 case OP_X86_XCHG:
2396                         x86_xchg_reg_reg (code, ins->sreg1, ins->sreg2, 4);
2397                         break;
2398                 case OP_LOCALLOC:
2399                         /* keep alignment */
2400                         x86_alu_reg_imm (code, X86_ADD, ins->sreg1, MONO_ARCH_FRAME_ALIGNMENT - 1);
2401                         x86_alu_reg_imm (code, X86_AND, ins->sreg1, ~(MONO_ARCH_FRAME_ALIGNMENT - 1));
2402                         code = mono_emit_stack_alloc (code, ins);
2403                         x86_mov_reg_reg (code, ins->dreg, X86_ESP, 4);
2404                         break;
2405                 case CEE_RET:
2406                         x86_ret (code);
2407                         break;
2408                 case CEE_THROW: {
2409                         x86_push_reg (code, ins->sreg1);
2410                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, 
2411                                              (gpointer)"mono_arch_throw_exception");
2412                         x86_call_code (code, 0);
2413                         break;
2414                 }
2415                 case OP_CALL_HANDLER: 
2416                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
2417                         x86_call_imm (code, 0);
2418                         break;
2419                 case OP_LABEL:
2420                         ins->inst_c0 = code - cfg->native_code;
2421                         break;
2422                 case CEE_BR:
2423                         //g_print ("target: %p, next: %p, curr: %p, last: %p\n", ins->inst_target_bb, bb->next_bb, ins, bb->last_ins);
2424                         //if ((ins->inst_target_bb == bb->next_bb) && ins == bb->last_ins)
2425                         //break;
2426                         if (ins->flags & MONO_INST_BRLABEL) {
2427                                 if (ins->inst_i0->inst_c0) {
2428                                         x86_jump_code (code, cfg->native_code + ins->inst_i0->inst_c0);
2429                                 } else {
2430                                         mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_LABEL, ins->inst_i0);
2431                                         x86_jump32 (code, 0);
2432                                 }
2433                         } else {
2434                                 if (ins->inst_target_bb->native_offset) {
2435                                         x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset); 
2436                                 } else {
2437                                         mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
2438                                         if ((cfg->opt & MONO_OPT_BRANCH) &&
2439                                             x86_is_imm8 (ins->inst_target_bb->max_offset - cpos))
2440                                                 x86_jump8 (code, 0);
2441                                         else 
2442                                                 x86_jump32 (code, 0);
2443                                 } 
2444                         }
2445                         break;
2446                 case OP_BR_REG:
2447                         x86_jump_reg (code, ins->sreg1);
2448                         break;
2449                 case OP_CEQ:
2450                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2451                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2452                         break;
2453                 case OP_CLT:
2454                         x86_set_reg (code, X86_CC_LT, ins->dreg, TRUE);
2455                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2456                         break;
2457                 case OP_CLT_UN:
2458                         x86_set_reg (code, X86_CC_LT, ins->dreg, FALSE);
2459                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2460                         break;
2461                 case OP_CGT:
2462                         x86_set_reg (code, X86_CC_GT, ins->dreg, TRUE);
2463                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2464                         break;
2465                 case OP_CGT_UN:
2466                         x86_set_reg (code, X86_CC_GT, ins->dreg, FALSE);
2467                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2468                         break;
2469                 case OP_COND_EXC_EQ:
2470                 case OP_COND_EXC_NE_UN:
2471                 case OP_COND_EXC_LT:
2472                 case OP_COND_EXC_LT_UN:
2473                 case OP_COND_EXC_GT:
2474                 case OP_COND_EXC_GT_UN:
2475                 case OP_COND_EXC_GE:
2476                 case OP_COND_EXC_GE_UN:
2477                 case OP_COND_EXC_LE:
2478                 case OP_COND_EXC_LE_UN:
2479                 case OP_COND_EXC_OV:
2480                 case OP_COND_EXC_NO:
2481                 case OP_COND_EXC_C:
2482                 case OP_COND_EXC_NC:
2483                         EMIT_COND_SYSTEM_EXCEPTION (branch_cc_table [ins->opcode - OP_COND_EXC_EQ], 
2484                                                     (ins->opcode < OP_COND_EXC_NE_UN), ins->inst_p1);
2485                         break;
2486                 case CEE_BEQ:
2487                 case CEE_BNE_UN:
2488                 case CEE_BLT:
2489                 case CEE_BLT_UN:
2490                 case CEE_BGT:
2491                 case CEE_BGT_UN:
2492                 case CEE_BGE:
2493                 case CEE_BGE_UN:
2494                 case CEE_BLE:
2495                 case CEE_BLE_UN:
2496                         EMIT_COND_BRANCH (ins, branch_cc_table [ins->opcode - CEE_BEQ], (ins->opcode < CEE_BNE_UN));
2497                         break;
2498
2499                 /* floating point opcodes */
2500                 case OP_R8CONST: {
2501                         double d = *(double *)ins->inst_p0;
2502
2503                         if ((d == 0.0) && (signbit (d) == 0)) {
2504                                 x86_fldz (code);
2505                         } else if (d == 1.0) {
2506                                 x86_fld1 (code);
2507                         } else {
2508                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_R8, ins->inst_p0);
2509                                 x86_fld (code, NULL, TRUE);
2510                         }
2511                         break;
2512                 }
2513                 case OP_R4CONST: {
2514                         float f = *(float *)ins->inst_p0;
2515
2516                         if ((f == 0.0) && (signbit (f) == 0)) {
2517                                 x86_fldz (code);
2518                         } else if (f == 1.0) {
2519                                 x86_fld1 (code);
2520                         } else {
2521                                 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_R4, ins->inst_p0);
2522                                 x86_fld (code, NULL, FALSE);
2523                         }
2524                         break;
2525                 }
2526                 case OP_STORER8_MEMBASE_REG:
2527                         x86_fst_membase (code, ins->inst_destbasereg, ins->inst_offset, TRUE, TRUE);
2528                         break;
2529                 case OP_LOADR8_MEMBASE:
2530                         x86_fld_membase (code, ins->inst_basereg, ins->inst_offset, TRUE);
2531                         break;
2532                 case OP_STORER4_MEMBASE_REG:
2533                         x86_fst_membase (code, ins->inst_destbasereg, ins->inst_offset, FALSE, TRUE);
2534                         break;
2535                 case OP_LOADR4_MEMBASE:
2536                         x86_fld_membase (code, ins->inst_basereg, ins->inst_offset, FALSE);
2537                         break;
2538                 case CEE_CONV_R4: /* FIXME: change precision */
2539                 case CEE_CONV_R8:
2540                         x86_push_reg (code, ins->sreg1);
2541                         x86_fild_membase (code, X86_ESP, 0, FALSE);
2542                         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
2543                         break;
2544                 case OP_X86_FP_LOAD_I8:
2545                         x86_fild_membase (code, ins->inst_basereg, ins->inst_offset, TRUE);
2546                         break;
2547                 case OP_X86_FP_LOAD_I4:
2548                         x86_fild_membase (code, ins->inst_basereg, ins->inst_offset, FALSE);
2549                         break;
2550                 case OP_FCONV_TO_I1:
2551                         code = emit_float_to_int (cfg, code, ins->dreg, 1, TRUE);
2552                         break;
2553                 case OP_FCONV_TO_U1:
2554                         code = emit_float_to_int (cfg, code, ins->dreg, 1, FALSE);
2555                         break;
2556                 case OP_FCONV_TO_I2:
2557                         code = emit_float_to_int (cfg, code, ins->dreg, 2, TRUE);
2558                         break;
2559                 case OP_FCONV_TO_U2:
2560                         code = emit_float_to_int (cfg, code, ins->dreg, 2, FALSE);
2561                         break;
2562                 case OP_FCONV_TO_I4:
2563                 case OP_FCONV_TO_I:
2564                         code = emit_float_to_int (cfg, code, ins->dreg, 4, TRUE);
2565                         break;
2566                 case OP_FCONV_TO_I8:
2567                         /* we defined this instruction to output only to eax:edx */
2568                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 4);
2569                         x86_fnstcw_membase(code, X86_ESP, 0);
2570                         x86_mov_reg_membase (code, X86_EAX, X86_ESP, 0, 2);
2571                         x86_alu_reg_imm (code, X86_OR, X86_EAX, 0xc00);
2572                         x86_mov_membase_reg (code, X86_ESP, 2, X86_EAX, 2);
2573                         x86_fldcw_membase (code, X86_ESP, 2);
2574                         x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
2575                         x86_fist_pop_membase (code, X86_ESP, 0, TRUE);
2576                         x86_pop_reg (code, X86_EAX);
2577                         x86_pop_reg (code, X86_EDX);
2578                         x86_fldcw_membase (code, X86_ESP, 0);
2579                         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
2580                         break;
2581                 case OP_LCONV_TO_R_UN: { 
2582                         static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
2583                         guint8 *br;
2584
2585                         /* load 64bit integer to FP stack */
2586                         x86_push_imm (code, 0);
2587                         x86_push_reg (code, ins->sreg2);
2588                         x86_push_reg (code, ins->sreg1);
2589                         x86_fild_membase (code, X86_ESP, 0, TRUE);
2590                         /* store as 80bit FP value */
2591                         x86_fst80_membase (code, X86_ESP, 0);
2592                         
2593                         /* test if lreg is negative */
2594                         x86_test_reg_reg (code, ins->sreg2, ins->sreg2);
2595                         br = code; x86_branch8 (code, X86_CC_GEZ, 0, TRUE);
2596         
2597                         /* add correction constant mn */
2598                         x86_fld80_mem (code, mn);
2599                         x86_fld80_membase (code, X86_ESP, 0);
2600                         x86_fp_op_reg (code, X86_FADD, 1, TRUE);
2601                         x86_fst80_membase (code, X86_ESP, 0);
2602
2603                         x86_patch (br, code);
2604
2605                         x86_fld80_membase (code, X86_ESP, 0);
2606                         x86_alu_reg_imm (code, X86_ADD, X86_ESP, 12);
2607
2608                         break;
2609                 }
2610                 case OP_LCONV_TO_OVF_I: {
2611                         guint8 *br [3], *label [1];
2612
2613                         /* 
2614                          * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
2615                          */
2616                         x86_test_reg_reg (code, ins->sreg1, ins->sreg1);
2617
2618                         /* If the low word top bit is set, see if we are negative */
2619                         br [0] = code; x86_branch8 (code, X86_CC_LT, 0, TRUE);
2620                         /* We are not negative (no top bit set, check for our top word to be zero */
2621                         x86_test_reg_reg (code, ins->sreg2, ins->sreg2);
2622                         br [1] = code; x86_branch8 (code, X86_CC_EQ, 0, TRUE);
2623                         label [0] = code;
2624
2625                         /* throw exception */
2626                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC, "OverflowException");
2627                         x86_jump32 (code, 0);
2628         
2629                         x86_patch (br [0], code);
2630                         /* our top bit is set, check that top word is 0xfffffff */
2631                         x86_alu_reg_imm (code, X86_CMP, ins->sreg2, 0xffffffff);
2632                 
2633                         x86_patch (br [1], code);
2634                         /* nope, emit exception */
2635                         br [2] = code; x86_branch8 (code, X86_CC_NE, 0, TRUE);
2636                         x86_patch (br [2], label [0]);
2637
2638                         if (ins->dreg != ins->sreg1)
2639                                 x86_mov_reg_reg (code, ins->dreg, ins->sreg1, 4);
2640                         break;
2641                 }
2642                 case OP_FADD:
2643                         x86_fp_op_reg (code, X86_FADD, 1, TRUE);
2644                         break;
2645                 case OP_FSUB:
2646                         x86_fp_op_reg (code, X86_FSUB, 1, TRUE);
2647                         break;          
2648                 case OP_FMUL:
2649                         x86_fp_op_reg (code, X86_FMUL, 1, TRUE);
2650                         break;          
2651                 case OP_FDIV:
2652                         x86_fp_op_reg (code, X86_FDIV, 1, TRUE);
2653                         break;          
2654                 case OP_FNEG:
2655                         x86_fchs (code);
2656                         break;          
2657                 case OP_SIN:
2658                         x86_fsin (code);
2659                         break;          
2660                 case OP_COS:
2661                         x86_fcos (code);
2662                         break;          
2663                 case OP_ABS:
2664                         x86_fabs (code);
2665                         break;          
2666                 case OP_TAN: {
2667                         /* 
2668                          * it really doesn't make sense to inline all this code,
2669                          * it's here just to show that things may not be as simple 
2670                          * as they appear.
2671                          */
2672                         guchar *check_pos, *end_tan, *pop_jump;
2673                         x86_push_reg (code, X86_EAX);
2674                         x86_fptan (code);
2675                         x86_fnstsw (code);
2676                         x86_test_reg_imm (code, X86_EAX, 0x400);
2677                         check_pos = code;
2678                         x86_branch8 (code, X86_CC_NE, 0, FALSE);
2679                         x86_fstp (code, 0); /* pop the 1.0 */
2680                         end_tan = code;
2681                         x86_jump8 (code, 0);
2682                         x86_fldpi (code);
2683                         x86_fp_op (code, X86_FADD, 0);
2684                         x86_fxch (code, 1);
2685                         x86_fprem1 (code);
2686                         x86_fstsw (code);
2687                         x86_test_reg_imm (code, X86_EAX, 0x400);
2688                         pop_jump = code;
2689                         x86_branch8 (code, X86_CC_NE, 0, FALSE);
2690                         x86_fstp (code, 1);
2691                         x86_fptan (code);
2692                         x86_patch (pop_jump, code);
2693                         x86_fstp (code, 0); /* pop the 1.0 */
2694                         x86_patch (check_pos, code);
2695                         x86_patch (end_tan, code);
2696                         x86_pop_reg (code, X86_EAX);
2697                         break;
2698                 }
2699                 case OP_ATAN:
2700                         x86_fld1 (code);
2701                         x86_fpatan (code);
2702                         break;          
2703                 case OP_SQRT:
2704                         x86_fsqrt (code);
2705                         break;          
2706                 case OP_X86_FPOP:
2707                         x86_fstp (code, 0);
2708                         break;          
2709                 case OP_FREM: {
2710                         guint8 *l1, *l2;
2711
2712                         x86_push_reg (code, X86_EAX);
2713                         /* we need to exchange ST(0) with ST(1) */
2714                         x86_fxch (code, 1);
2715
2716                         /* this requires a loop, because fprem somtimes 
2717                          * returns a partial remainder */
2718                         l1 = code;
2719                         /* looks like MS is using fprem instead of the IEEE compatible fprem1 */
2720                         /* x86_fprem1 (code); */
2721                         x86_fprem (code);
2722                         x86_fnstsw (code);
2723                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x0400);
2724                         l2 = code + 2;
2725                         x86_branch8 (code, X86_CC_NE, l1 - l2, FALSE);
2726
2727                         /* pop result */
2728                         x86_fstp (code, 1);
2729
2730                         x86_pop_reg (code, X86_EAX);
2731                         break;
2732                 }
2733                 case OP_FCOMPARE:
2734                         if (cfg->opt & MONO_OPT_FCMOV) {
2735                                 x86_fcomip (code, 1);
2736                                 x86_fstp (code, 0);
2737                                 break;
2738                         }
2739                         /* this overwrites EAX */
2740                         EMIT_FPCOMPARE(code);
2741                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2742                         break;
2743                 case OP_FCEQ:
2744                         if (cfg->opt & MONO_OPT_FCMOV) {
2745                                 /* zeroing the register at the start results in 
2746                                  * shorter and faster code (we can also remove the widening op)
2747                                  */
2748                                 guchar *unordered_check;
2749                                 x86_alu_reg_reg (code, X86_XOR, ins->dreg, ins->dreg);
2750                                 x86_fcomip (code, 1);
2751                                 x86_fstp (code, 0);
2752                                 unordered_check = code;
2753                                 x86_branch8 (code, X86_CC_P, 0, FALSE);
2754                                 x86_set_reg (code, X86_CC_EQ, ins->dreg, FALSE);
2755                                 x86_patch (unordered_check, code);
2756                                 break;
2757                         }
2758                         if (ins->dreg != X86_EAX) 
2759                                 x86_push_reg (code, X86_EAX);
2760
2761                         EMIT_FPCOMPARE(code);
2762                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2763                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4000);
2764                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2765                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2766
2767                         if (ins->dreg != X86_EAX) 
2768                                 x86_pop_reg (code, X86_EAX);
2769                         break;
2770                 case OP_FCLT:
2771                 case OP_FCLT_UN:
2772                         if (cfg->opt & MONO_OPT_FCMOV) {
2773                                 /* zeroing the register at the start results in 
2774                                  * shorter and faster code (we can also remove the widening op)
2775                                  */
2776                                 x86_alu_reg_reg (code, X86_XOR, ins->dreg, ins->dreg);
2777                                 x86_fcomip (code, 1);
2778                                 x86_fstp (code, 0);
2779                                 if (ins->opcode == OP_FCLT_UN) {
2780                                         guchar *unordered_check = code;
2781                                         guchar *jump_to_end;
2782                                         x86_branch8 (code, X86_CC_P, 0, FALSE);
2783                                         x86_set_reg (code, X86_CC_GT, ins->dreg, FALSE);
2784                                         jump_to_end = code;
2785                                         x86_jump8 (code, 0);
2786                                         x86_patch (unordered_check, code);
2787                                         x86_inc_reg (code, ins->dreg);
2788                                         x86_patch (jump_to_end, code);
2789                                 } else {
2790                                         x86_set_reg (code, X86_CC_GT, ins->dreg, FALSE);
2791                                 }
2792                                 break;
2793                         }
2794                         if (ins->dreg != X86_EAX) 
2795                                 x86_push_reg (code, X86_EAX);
2796
2797                         EMIT_FPCOMPARE(code);
2798                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2799                         if (ins->opcode == OP_FCLT_UN) {
2800                                 guchar *is_not_zero_check, *end_jump;
2801                                 is_not_zero_check = code;
2802                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2803                                 end_jump = code;
2804                                 x86_jump8 (code, 0);
2805                                 x86_patch (is_not_zero_check, code);
2806                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2807
2808                                 x86_patch (end_jump, code);
2809                         }
2810                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2811                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2812
2813                         if (ins->dreg != X86_EAX) 
2814                                 x86_pop_reg (code, X86_EAX);
2815                         break;
2816                 case OP_FCGT:
2817                 case OP_FCGT_UN:
2818                         if (cfg->opt & MONO_OPT_FCMOV) {
2819                                 /* zeroing the register at the start results in 
2820                                  * shorter and faster code (we can also remove the widening op)
2821                                  */
2822                                 guchar *unordered_check;
2823                                 x86_alu_reg_reg (code, X86_XOR, ins->dreg, ins->dreg);
2824                                 x86_fcomip (code, 1);
2825                                 x86_fstp (code, 0);
2826                                 if (ins->opcode == OP_FCGT) {
2827                                         unordered_check = code;
2828                                         x86_branch8 (code, X86_CC_P, 0, FALSE);
2829                                         x86_set_reg (code, X86_CC_LT, ins->dreg, FALSE);
2830                                         x86_patch (unordered_check, code);
2831                                 } else {
2832                                         x86_set_reg (code, X86_CC_LT, ins->dreg, FALSE);
2833                                 }
2834                                 break;
2835                         }
2836                         if (ins->dreg != X86_EAX) 
2837                                 x86_push_reg (code, X86_EAX);
2838
2839                         EMIT_FPCOMPARE(code);
2840                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4500);
2841                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2842                         if (ins->opcode == OP_FCGT_UN) {
2843                                 guchar *is_not_zero_check, *end_jump;
2844                                 is_not_zero_check = code;
2845                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2846                                 end_jump = code;
2847                                 x86_jump8 (code, 0);
2848                                 x86_patch (is_not_zero_check, code);
2849                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2850
2851                                 x86_patch (end_jump, code);
2852                         }
2853                         x86_set_reg (code, X86_CC_EQ, ins->dreg, TRUE);
2854                         x86_widen_reg (code, ins->dreg, ins->dreg, FALSE, FALSE);
2855
2856                         if (ins->dreg != X86_EAX) 
2857                                 x86_pop_reg (code, X86_EAX);
2858                         break;
2859                 case OP_FBEQ:
2860                         if (cfg->opt & MONO_OPT_FCMOV) {
2861                                 guchar *jump = code;
2862                                 x86_branch8 (code, X86_CC_P, 0, TRUE);
2863                                 EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2864                                 x86_patch (jump, code);
2865                                 break;
2866                         }
2867                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4000);
2868                         EMIT_COND_BRANCH (ins, X86_CC_EQ, TRUE);
2869                         break;
2870                 case OP_FBNE_UN:
2871                         if (cfg->opt & MONO_OPT_FCMOV) {
2872                                 EMIT_COND_BRANCH (ins, X86_CC_P, FALSE);
2873                                 EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2874                                 break;
2875                         }
2876                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4000);
2877                         EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2878                         break;
2879                 case OP_FBLT:
2880                         if (cfg->opt & MONO_OPT_FCMOV) {
2881                                 EMIT_COND_BRANCH (ins, X86_CC_GT, FALSE);
2882                                 break;
2883                         }
2884                         EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2885                         break;
2886                 case OP_FBLT_UN:
2887                         if (cfg->opt & MONO_OPT_FCMOV) {
2888                                 EMIT_COND_BRANCH (ins, X86_CC_P, FALSE);
2889                                 EMIT_COND_BRANCH (ins, X86_CC_GT, FALSE);
2890                                 break;
2891                         }
2892                         if (ins->opcode == OP_FBLT_UN) {
2893                                 guchar *is_not_zero_check, *end_jump;
2894                                 is_not_zero_check = code;
2895                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2896                                 end_jump = code;
2897                                 x86_jump8 (code, 0);
2898                                 x86_patch (is_not_zero_check, code);
2899                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2900
2901                                 x86_patch (end_jump, code);
2902                         }
2903                         EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2904                         break;
2905                 case OP_FBGT:
2906                 case OP_FBGT_UN:
2907                         if (cfg->opt & MONO_OPT_FCMOV) {
2908                                 EMIT_COND_BRANCH (ins, X86_CC_LT, FALSE);
2909                                 break;
2910                         }
2911                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2912                         if (ins->opcode == OP_FBGT_UN) {
2913                                 guchar *is_not_zero_check, *end_jump;
2914                                 is_not_zero_check = code;
2915                                 x86_branch8 (code, X86_CC_NZ, 0, TRUE);
2916                                 end_jump = code;
2917                                 x86_jump8 (code, 0);
2918                                 x86_patch (is_not_zero_check, code);
2919                                 x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x4500);
2920
2921                                 x86_patch (end_jump, code);
2922                         }
2923                         EMIT_COND_BRANCH (ins, X86_CC_EQ, FALSE);
2924                         break;
2925                 case OP_FBGE:
2926                 case OP_FBGE_UN:
2927                         if (cfg->opt & MONO_OPT_FCMOV) {
2928                                 EMIT_COND_BRANCH (ins, X86_CC_LE, FALSE);
2929                                 break;
2930                         }
2931                         EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2932                         break;
2933                 case OP_FBLE:
2934                 case OP_FBLE_UN:
2935                         if (cfg->opt & MONO_OPT_FCMOV) {
2936                                 EMIT_COND_BRANCH (ins, X86_CC_P, FALSE);
2937                                 EMIT_COND_BRANCH (ins, X86_CC_GE, FALSE);
2938                                 break;
2939                         }
2940                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2941                         EMIT_COND_BRANCH (ins, X86_CC_NE, FALSE);
2942                         break;
2943                 case CEE_CKFINITE: {
2944                         x86_push_reg (code, X86_EAX);
2945                         x86_fxam (code);
2946                         x86_fnstsw (code);
2947                         x86_alu_reg_imm (code, X86_AND, X86_EAX, 0x4100);
2948                         x86_alu_reg_imm (code, X86_CMP, X86_EAX, 0x0100);
2949                         x86_pop_reg (code, X86_EAX);
2950                         EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "ArithmeticException");
2951                         break;
2952                 }
2953                 default:
2954                         g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
2955                         g_assert_not_reached ();
2956                 }
2957
2958                 if ((code - cfg->native_code - offset) > max_len) {
2959                         g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %d)",
2960                                    mono_inst_name (ins->opcode), max_len, code - cfg->native_code - offset);
2961                         g_assert_not_reached ();
2962                 }
2963                
2964                 cpos += max_len;
2965
2966                 last_ins = ins;
2967                 last_offset = offset;
2968                 
2969                 ins = ins->next;
2970         }
2971
2972         cfg->code_len = code - cfg->native_code;
2973 }
2974
2975 void
2976 mono_arch_register_lowlevel_calls (void)
2977 {
2978         mono_register_jit_icall (enter_method, "mono_enter_method", NULL, TRUE);
2979         mono_register_jit_icall (leave_method, "mono_leave_method", NULL, TRUE);
2980 }
2981
2982 void
2983 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji)
2984 {
2985         MonoJumpInfo *patch_info;
2986
2987         for (patch_info = ji; patch_info; patch_info = patch_info->next) {
2988                 unsigned char *ip = patch_info->ip.i + code;
2989                 const unsigned char *target = NULL;
2990
2991                 switch (patch_info->type) {
2992                 case MONO_PATCH_INFO_BB:
2993                         target = patch_info->data.bb->native_offset + code;
2994                         break;
2995                 case MONO_PATCH_INFO_ABS:
2996                         target = patch_info->data.target;
2997                         break;
2998                 case MONO_PATCH_INFO_LABEL:
2999                         target = patch_info->data.inst->inst_c0 + code;
3000                         break;
3001                 case MONO_PATCH_INFO_IP:
3002                         *((gpointer *)(ip)) = ip;
3003                         continue;
3004                 case MONO_PATCH_INFO_METHOD_REL:
3005                         *((gpointer *)(ip)) = code + patch_info->data.offset;
3006                         continue;
3007                 case MONO_PATCH_INFO_INTERNAL_METHOD: {
3008                         MonoJitICallInfo *mi = mono_find_jit_icall_by_name (patch_info->data.name);
3009                         if (!mi) {
3010                                 g_warning ("unknown MONO_PATCH_INFO_INTERNAL_METHOD %s", patch_info->data.name);
3011                                 g_assert_not_reached ();
3012                         }
3013                         target = mono_icall_get_wrapper (mi);
3014                         break;
3015                 }
3016                 case MONO_PATCH_INFO_METHOD_JUMP: {
3017                         MonoJitInfo *jit_info;
3018                         GSList *list;
3019
3020                         /* get the trampoline to the method from the domain */
3021                         target = mono_arch_create_jump_trampoline (patch_info->data.method);
3022                         if (!domain->jump_target_hash)
3023                                 domain->jump_target_hash = g_hash_table_new (NULL, NULL);
3024                         list = g_hash_table_lookup (domain->jump_target_hash, patch_info->data.method);
3025                         list = g_slist_prepend (list, ip);
3026                         g_hash_table_insert (domain->jump_target_hash, patch_info->data.method, list);
3027                         break;
3028                 }
3029                 case MONO_PATCH_INFO_METHOD:
3030                         if (patch_info->data.method == method) {
3031                                 target = code;
3032                         } else
3033                                 /* get the trampoline to the method from the domain */
3034                                 target = mono_arch_create_jit_trampoline (patch_info->data.method);
3035                         break;
3036                 case MONO_PATCH_INFO_SWITCH: {
3037                         gpointer *jump_table = mono_mempool_alloc (domain->code_mp, sizeof (gpointer) * patch_info->table_size);
3038                         int i;
3039
3040                         *((gconstpointer *)(ip + 2)) = jump_table;
3041
3042                         for (i = 0; i < patch_info->table_size; i++) {
3043                                 jump_table [i] = code + (int)patch_info->data.table [i];
3044                         }
3045                         /* we put into the table the absolute address, no need for x86_patch in this case */
3046                         continue;
3047                 }
3048                 case MONO_PATCH_INFO_METHODCONST:
3049                 case MONO_PATCH_INFO_CLASS:
3050                 case MONO_PATCH_INFO_IMAGE:
3051                 case MONO_PATCH_INFO_FIELD:
3052                         *((gconstpointer *)(ip + 1)) = patch_info->data.target;
3053                         continue;
3054                 case MONO_PATCH_INFO_IID:
3055                         mono_class_init (patch_info->data.klass);
3056                         *((guint32 *)(ip + 1)) = patch_info->data.klass->interface_id;
3057                         continue;                       
3058                 case MONO_PATCH_INFO_VTABLE:
3059                         *((gconstpointer *)(ip + 1)) = mono_class_vtable (domain, patch_info->data.klass);
3060                         continue;
3061                 case MONO_PATCH_INFO_CLASS_INIT: {
3062                         guint8 *code = ip;
3063                         /* Might already been changed to a nop */
3064                         x86_call_imm (code, 0);
3065                         target = mono_create_class_init_trampoline (mono_class_vtable (domain, patch_info->data.klass));
3066                         break;
3067                 }
3068                 case MONO_PATCH_INFO_SFLDA: {
3069                         MonoVTable *vtable = mono_class_vtable (domain, patch_info->data.field->parent);
3070                         if (!vtable->initialized && !(vtable->klass->flags & TYPE_ATTRIBUTE_BEFORE_FIELD_INIT) && mono_class_needs_cctor_run (vtable->klass, method))
3071                                 /* Done by the generated code */
3072                                 ;
3073                         else {
3074                                 mono_runtime_class_init (vtable);
3075                         }
3076                         *((gconstpointer *)(ip + 1)) = 
3077                                 (char*)vtable->data + patch_info->data.field->offset;
3078                         continue;
3079                 }
3080                 case MONO_PATCH_INFO_R4:
3081                 case MONO_PATCH_INFO_R8:
3082                         *((gconstpointer *)(ip + 2)) = patch_info->data.target;
3083                         continue;
3084                 case MONO_PATCH_INFO_EXC_NAME:
3085                         *((gconstpointer *)(ip + 1)) = patch_info->data.name;
3086                         continue;
3087                 case MONO_PATCH_INFO_LDSTR:
3088                         *((gconstpointer *)(ip + 1)) = 
3089                                 mono_ldstr (domain, method->klass->image, 
3090                                                         mono_metadata_token_index (patch_info->data.token));
3091                         continue;
3092                 case MONO_PATCH_INFO_TYPE_FROM_HANDLE: {
3093                         gpointer handle;
3094                         MonoClass *handle_class;
3095
3096                         handle = mono_ldtoken (method->klass->image, 
3097                                                                    patch_info->data.token, &handle_class);
3098                         mono_class_init (handle_class);
3099                         mono_class_init (mono_class_from_mono_type (handle));
3100
3101                         *((gconstpointer *)(ip + 1)) = 
3102                                 mono_type_get_object (domain, handle);
3103                         continue;
3104                 }
3105                 case MONO_PATCH_INFO_LDTOKEN: {
3106                         gpointer handle;
3107                         MonoClass *handle_class;
3108
3109                         handle = mono_ldtoken (method->klass->image, 
3110                                                                    patch_info->data.token, &handle_class);
3111                         mono_class_init (handle_class);
3112
3113                         *((gconstpointer *)(ip + 1)) = handle;
3114                         continue;
3115                 }
3116                 default:
3117                         g_assert_not_reached ();
3118                 }
3119                 x86_patch (ip, target);
3120         }
3121 }
3122
3123 int
3124 mono_arch_max_epilog_size (MonoCompile *cfg)
3125 {
3126         int exc_count = 0, max_epilog_size = 16;
3127         MonoJumpInfo *patch_info;
3128         
3129         if (cfg->method->save_lmf)
3130                 max_epilog_size += 128;
3131         
3132         if (mono_jit_trace_calls)
3133                 max_epilog_size += 50;
3134
3135         if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
3136                 max_epilog_size += 50;
3137
3138         /* count the number of exception infos */
3139      
3140         for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
3141                 if (patch_info->type == MONO_PATCH_INFO_EXC)
3142                         exc_count++;
3143         }
3144
3145         /* 
3146          * make sure we have enough space for exceptions
3147          * 16 is the size of two push_imm instructions and a call
3148          */
3149         max_epilog_size += exc_count*16;
3150
3151         return max_epilog_size;
3152 }
3153
3154 guint8 *
3155 mono_arch_emit_prolog (MonoCompile *cfg)
3156 {
3157         MonoMethod *method = cfg->method;
3158         MonoBasicBlock *bb;
3159         MonoMethodSignature *sig;
3160         MonoInst *inst;
3161         int alloc_size, pos, max_offset, i;
3162         guint8 *code;
3163
3164         cfg->code_size =  MAX (((MonoMethodNormal *)method)->header->code_size * 4, 256);
3165         code = cfg->native_code = g_malloc (cfg->code_size);
3166
3167         x86_push_reg (code, X86_EBP);
3168         x86_mov_reg_reg (code, X86_EBP, X86_ESP, 4);
3169
3170         alloc_size = - cfg->stack_offset;
3171         pos = 0;
3172
3173         if (method->save_lmf) {
3174                 pos += sizeof (MonoLMF);
3175
3176                 /* save the current IP */
3177                 mono_add_patch_info (cfg, code + 1 - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
3178                 x86_push_imm (code, 0);
3179
3180                 /* save all caller saved regs */
3181                 x86_push_reg (code, X86_EBX);
3182                 x86_push_reg (code, X86_EDI);
3183                 x86_push_reg (code, X86_ESI);
3184                 x86_push_reg (code, X86_EBP);
3185
3186                 /* save method info */
3187                 x86_push_imm (code, method);
3188         
3189                 /* get the address of lmf for the current thread */
3190                 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, 
3191                                      (gpointer)"mono_get_lmf_addr");
3192                 x86_call_code (code, 0);
3193
3194                 /* push lmf */
3195                 x86_push_reg (code, X86_EAX); 
3196                 /* push *lfm (previous_lmf) */
3197                 x86_push_membase (code, X86_EAX, 0);
3198                 /* *(lmf) = ESP */
3199                 x86_mov_membase_reg (code, X86_EAX, 0, X86_ESP, 4);
3200         } else {
3201
3202                 if (cfg->used_int_regs & (1 << X86_EBX)) {
3203                         x86_push_reg (code, X86_EBX);
3204                         pos += 4;
3205                 }
3206
3207                 if (cfg->used_int_regs & (1 << X86_EDI)) {
3208                         x86_push_reg (code, X86_EDI);
3209                         pos += 4;
3210                 }
3211
3212                 if (cfg->used_int_regs & (1 << X86_ESI)) {
3213                         x86_push_reg (code, X86_ESI);
3214                         pos += 4;
3215                 }
3216         }
3217
3218         alloc_size -= pos;
3219
3220         if (alloc_size)
3221                 x86_alu_reg_imm (code, X86_SUB, X86_ESP, alloc_size);
3222
3223         /* compute max_offset in order to use short forward jumps */
3224         max_offset = 0;
3225         if (cfg->opt & MONO_OPT_BRANCH) {
3226                 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
3227                         MonoInst *ins = bb->code;
3228                         bb->max_offset = max_offset;
3229
3230                         if (cfg->prof_options & MONO_PROFILE_COVERAGE)
3231                                 max_offset += 6; 
3232
3233                         while (ins) {
3234                                 max_offset += ((guint8 *)ins_spec [ins->opcode])[MONO_INST_LEN];
3235                                 ins = ins->next;
3236                         }
3237                 }
3238         }
3239
3240         if (mono_jit_trace_calls)
3241                 code = mono_arch_instrument_prolog (cfg, enter_method, code, TRUE);
3242
3243         /* load arguments allocated to register from the stack */
3244         sig = method->signature;
3245         pos = 0;
3246
3247         for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
3248                 inst = cfg->varinfo [pos];
3249                 if (inst->opcode == OP_REGVAR) {
3250                         x86_mov_reg_membase (code, inst->dreg, X86_EBP, inst->inst_offset, 4);
3251                         if (cfg->verbose_level > 2)
3252                                 g_print ("Argument %d assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
3253                 }
3254                 pos++;
3255         }
3256
3257         cfg->code_len = code - cfg->native_code;
3258
3259         return code;
3260 }
3261
3262 void
3263 mono_arch_emit_epilog (MonoCompile *cfg)
3264 {
3265         MonoJumpInfo *patch_info;
3266         MonoMethod *method = cfg->method;
3267         int pos;
3268         guint8 *code;
3269
3270         code = cfg->native_code + cfg->code_len;
3271
3272         if (mono_jit_trace_calls)
3273                 code = mono_arch_instrument_epilog (cfg, leave_method, code, TRUE);
3274
3275         /* the code restoring the registers must be kept in sync with CEE_JMP */
3276         pos = 0;
3277         
3278         if (method->save_lmf) {
3279                 pos = -sizeof (MonoLMF);
3280         } else {
3281                 if (cfg->used_int_regs & (1 << X86_EBX)) {
3282                         pos -= 4;
3283                 }
3284                 if (cfg->used_int_regs & (1 << X86_EDI)) {
3285                         pos -= 4;
3286                 }
3287                 if (cfg->used_int_regs & (1 << X86_ESI)) {
3288                         pos -= 4;
3289                 }
3290         }
3291
3292         if (pos)
3293                 x86_lea_membase (code, X86_ESP, X86_EBP, pos);
3294         
3295         if (method->save_lmf) {
3296                 /* ebx = previous_lmf */
3297                 x86_pop_reg (code, X86_EBX);
3298                 /* edi = lmf */
3299                 x86_pop_reg (code, X86_EDI);
3300                 /* *(lmf) = previous_lmf */
3301                 x86_mov_membase_reg (code, X86_EDI, 0, X86_EBX, 4);
3302
3303                 /* discard method info */
3304                 x86_pop_reg (code, X86_ESI);
3305
3306                 /* restore caller saved regs */
3307                 x86_pop_reg (code, X86_EBP);
3308                 x86_pop_reg (code, X86_ESI);
3309                 x86_pop_reg (code, X86_EDI);
3310                 x86_pop_reg (code, X86_EBX);
3311
3312         } else {
3313
3314                 if (cfg->used_int_regs & (1 << X86_ESI)) {
3315                         x86_pop_reg (code, X86_ESI);
3316                 }
3317                 if (cfg->used_int_regs & (1 << X86_EDI)) {
3318                         x86_pop_reg (code, X86_EDI);
3319                 }
3320                 if (cfg->used_int_regs & (1 << X86_EBX)) {
3321                         x86_pop_reg (code, X86_EBX);
3322                 }
3323         }
3324
3325         x86_leave (code);
3326         /* FIXME: add another check to support stdcall convention here */
3327         if (MONO_TYPE_ISSTRUCT (cfg->method->signature->ret))
3328                 x86_ret_imm (code, 4);
3329         else
3330                 x86_ret (code);
3331
3332         /* add code to raise exceptions */
3333         for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
3334                 switch (patch_info->type) {
3335                 case MONO_PATCH_INFO_EXC:
3336                         x86_patch (patch_info->ip.i + cfg->native_code, code);
3337                         mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);
3338                         x86_push_imm (code, patch_info->data.target);
3339                         mono_add_patch_info (cfg, code + 1 - cfg->native_code, MONO_PATCH_INFO_METHOD_REL, (gpointer)patch_info->ip.i);
3340                         x86_push_imm (code, patch_info->ip.i + cfg->native_code);
3341                         patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
3342                         patch_info->data.name = "mono_arch_throw_exception_by_name";
3343                         patch_info->ip.i = code - cfg->native_code;
3344                         x86_jump_code (code, 0);
3345                         break;
3346                 default:
3347                         /* do nothing */
3348                         break;
3349                 }
3350         }
3351
3352         cfg->code_len = code - cfg->native_code;
3353
3354         g_assert (cfg->code_len < cfg->code_size);
3355
3356 }
3357
3358 void
3359 mono_arch_flush_icache (guint8 *code, gint size)
3360 {
3361         /* not needed */
3362 }
3363