2 * tramp-ppc.c: JIT trampoline code for PowerPC
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
9 * (C) 2001 Ximian, Inc.
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/marshal.h>
17 #include <mono/metadata/tabledefs.h>
18 #include <mono/arch/ppc/ppc-codegen.h>
19 #include <mono/metadata/mono-debug-debugger.h>
25 MONO_TRAMPOLINE_GENERIC,
27 MONO_TRAMPOLINE_CLASS_INIT
30 /* adapt to mini later... */
31 #define mono_jit_share_code (1)
34 * Address of the x86 trampoline code. This is used by the debugger to check
35 * whether a method is a trampoline.
37 guint8 *mono_generic_trampoline_code = NULL;
40 * get_unbox_trampoline:
42 * @addr: pointer to native code for @m
44 * when value type methods are called through the vtable we need to unbox the
45 * this argument. This method returns a pointer to a trampoline which does
46 * unboxing before calling the method
49 get_unbox_trampoline (MonoMethod *m, gpointer addr)
54 if (!m->signature->ret->byref && MONO_TYPE_ISSTRUCT (m->signature->ret))
57 start = code = g_malloc (20);
59 ppc_load (code, ppc_r0, addr);
60 ppc_mtctr (code, ppc_r0);
61 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
62 ppc_bcctr (code, 20, 0);
63 mono_arch_flush_icache (start, code - start);
64 g_assert ((code - start) <= 20);
65 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
66 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
71 /* Stack size for trampoline function */
72 #define STACK (144 + 8*8)
74 /* Method-specific trampoline code framgment size */
75 #define METHOD_TRAMPOLINE_SIZE 64
78 * ppc_magic_trampoline:
79 * @code: pointer into caller code
80 * @method: the method to translate
83 * This method is called by the function 'arch_create_jit_trampoline', which in
84 * turn is called by the trampoline functions for virtual methods.
85 * After having called the JIT compiler to compile the method, it inspects the
86 * caller code to find the address of the method-specific part of the
87 * trampoline vtable slot for this method, updates it with a fragment that calls
88 * the newly compiled code and returns this address of the compiled code to
89 * 'arch_create_jit_trampoline'
92 ppc_magic_trampoline (MonoMethod *method, guint32 *code, char *sp)
98 addr = mono_compile_method(method);
99 /*g_print ("method code at %p for %s:%s\n", addr, method->klass->name, method->name);*/
102 /* Locate the address of the method-specific trampoline. The call using
103 the vtable slot that took the processing flow to 'arch_create_jit_trampoline'
104 looks something like this:
106 mtlr rA ; Move rA (a register containing the
107 ; target address) to LR
108 blrl ; Call function at LR
110 PowerPC instructions are 32-bit long, which means that a 32-bit target
111 address cannot be encoded as an immediate value (because we already
112 have spent some bits to encode the branch instruction!). That's why a
113 'b'ranch to the contents of the 'l'ink 'r'egister (with 'l'ink register
114 update) is needed, instead of a simpler 'branch immediate'. This
115 complicates our purpose here, because 'blrl' overwrites LR, which holds
116 the value we're interested in.
118 Therefore, we need to locate the 'mtlr rA' instruction to know which
119 register LR was loaded from, and then retrieve the value from that
122 /* This is the 'blrl' instruction */
126 * Note that methods are called also with the bl opcode.
128 if (((*code) >> 26) == 18) {
129 /*g_print ("direct patching\n");*/
130 ppc_patch (code, addr);
131 mono_arch_flush_icache (code, 4);
135 /* Sanity check: instruction must be 'blrl' */
136 g_assert(*code == 0x4e800021);
138 /* OK, we're now at the 'blrl' instruction. Now walk backwards
139 till we get to a 'mtlr rA' */
141 if((*code & 0x7c0803a6) == 0x7c0803a6) {
143 /* Here we are: we reached the 'mtlr rA'.
144 Extract the register from the instruction */
145 reg = (*code & 0x03e00000) >> 21;
147 /* ok, this is a lwz reg, offset (vtreg)
148 * it is emitted with:
149 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
151 soff = (*code & 0xffff);
153 reg = (*code >> 16) & 0x1f;
154 /*g_print ("patching reg is %d\n", reg);*/
156 case 0 : o = *((int *) (sp + STACK - 8)); break;
157 case 3 : o = *((int *) (sp + STACK - 12)); break;
158 case 4 : o = *((int *) (sp + STACK - 16)); break;
159 case 5 : o = *((int *) (sp + STACK - 20)); break;
160 case 6 : o = *((int *) (sp + STACK - 24)); break;
161 case 7 : o = *((int *) (sp + STACK - 28)); break;
162 case 8 : o = *((int *) (sp + STACK - 32)); break;
163 case 9 : o = *((int *) (sp + STACK - 36)); break;
164 case 10: o = *((int *) (sp + STACK - 40)); break;
165 case 11: o = *((int *) (sp + STACK - 44)); break;
166 case 12: o = *((int *) (sp + STACK - 48)); break;
167 case 13: o = *((int *) (sp + STACK - 52)); break;
168 case 14: o = *((int *) (sp + STACK - 56)); break;
169 case 15: o = *((int *) (sp + STACK - 60)); break;
170 case 16: o = *((int *) (sp + STACK - 64)); break;
171 case 17: o = *((int *) (sp + STACK - 68)); break;
172 case 18: o = *((int *) (sp + STACK - 72)); break;
173 case 19: o = *((int *) (sp + STACK - 76)); break;
174 case 20: o = *((int *) (sp + STACK - 80)); break;
175 case 21: o = *((int *) (sp + STACK - 84)); break;
176 case 22: o = *((int *) (sp + STACK - 88)); break;
177 case 23: o = *((int *) (sp + STACK - 92)); break;
178 case 24: o = *((int *) (sp + STACK - 96)); break;
179 case 25: o = *((int *) (sp + STACK - 100)); break;
180 case 26: o = *((int *) (sp + STACK - 104)); break;
181 case 27: o = *((int *) (sp + STACK - 108)); break;
182 case 28: o = *((int *) (sp + STACK - 112)); break;
183 case 29: o = *((int *) (sp + STACK - 116)); break;
184 case 30: o = *((int *) (sp + STACK - 120)); break;
185 case 31: o = *((int *) (sp + STACK - 4)); break;
187 printf("%s: Unexpected register %d\n",
189 g_assert_not_reached();
195 /* this is not done for non-virtual calls, because in that case
196 we won't have an object, but the actual pointer to the
197 valuetype as the this argument
199 if (method->klass->valuetype)
200 addr = get_unbox_trampoline (method, addr);
203 *((gpointer *)o) = addr;
205 /* Finally, replace the method-specific trampoline code (which called
206 the generic trampoline code) with a fragment that calls directly the
211 /* FIXME: make the patching thread safe */
213 ppc_patch (o - 4, addr);
214 /*g_print ("patching at %p to %p\n", o, addr);*/
216 ppc_stwu (o, ppc_r1, -16, ppc_r1);
217 ppc_mflr (o, ppc_r0);
218 ppc_stw (o, ppc_r31, 12, ppc_r1);
219 ppc_stw (o, ppc_r0, 20, ppc_r1);
220 ppc_mr (o, ppc_r31, ppc_r1);
222 ppc_lis (o, ppc_r0, (guint32) addr >> 16);
223 ppc_ori (o, ppc_r0, ppc_r0, (guint32) addr & 0xffff);
224 ppc_mtlr (o, ppc_r0);
227 ppc_lwz (o, ppc_r11, 0, ppc_r1);
228 ppc_lwz (o, ppc_r0, 4, ppc_r11);
229 ppc_mtlr (o, ppc_r0);
230 ppc_lwz (o, ppc_r31, -4, ppc_r11);
231 ppc_mr (o, ppc_r1, ppc_r11);
234 mono_arch_flush_icache (start, o - start);
235 g_assert(o - start < METHOD_TRAMPOLINE_SIZE);
241 ppc_class_init_trampoline (void *vtable, guint32 *code, char *sp)
243 mono_runtime_class_init (vtable);
246 /* This is the 'bl' instruction */
249 if (((*code) >> 26) == 18) {
250 ppc_ori (code, 0, 0, 0); /* nop */
251 mono_arch_flush_icache (code, 4);
254 g_assert_not_reached ();
260 create_trampoline_code (MonoTrampolineType tramp_type)
262 guint8 *buf, *code = NULL;
263 static guint8* generic_jump_trampoline = NULL;
264 static guint8 *generic_class_init_trampoline = NULL;
267 switch (tramp_type) {
268 case MONO_TRAMPOLINE_GENERIC:
269 if (mono_generic_trampoline_code)
270 return mono_generic_trampoline_code;
272 case MONO_TRAMPOLINE_JUMP:
273 if (generic_jump_trampoline)
274 return generic_jump_trampoline;
276 case MONO_TRAMPOLINE_CLASS_INIT:
277 if (generic_class_init_trampoline)
278 return generic_class_init_trampoline;
283 /* Now we'll create in 'buf' the PowerPC trampoline code. This
284 is the trampoline code common to all methods */
286 code = buf = g_malloc(512);
288 /*-----------------------------------------------------------
289 STEP 0: First create a non-standard function prologue with a
290 stack size big enough to save our registers:
292 lr (We'll be calling functions here, so we
294 r0 (See ppc_magic_trampoline)
295 r1 (sp) (Stack pointer - must save)
296 r3-r10 Function arguments.
297 r11-r31 (See ppc_magic_trampoline)
298 method in r11 (See ppc_magic_trampoline)
300 This prologue is non-standard because r0 is not saved here - it
301 was saved in the method-specific trampoline code
302 -----------------------------------------------------------*/
304 ppc_stwu (buf, ppc_r1, -STACK, ppc_r1);
306 /* Save r0 before modifying it - we will need its contents in
307 'ppc_magic_trampoline' */
308 ppc_stw (buf, ppc_r0, STACK - 8, ppc_r1);
310 ppc_stw (buf, ppc_r31, STACK - 4, ppc_r1);
311 ppc_mr (buf, ppc_r31, ppc_r1);
313 /* Now save our registers. */
314 ppc_stw (buf, ppc_r3, STACK - 12, ppc_r1);
315 ppc_stw (buf, ppc_r4, STACK - 16, ppc_r1);
316 ppc_stw (buf, ppc_r5, STACK - 20, ppc_r1);
317 ppc_stw (buf, ppc_r6, STACK - 24, ppc_r1);
318 ppc_stw (buf, ppc_r7, STACK - 28, ppc_r1);
319 ppc_stw (buf, ppc_r8, STACK - 32, ppc_r1);
320 ppc_stw (buf, ppc_r9, STACK - 36, ppc_r1);
321 ppc_stw (buf, ppc_r10, STACK - 40, ppc_r1);
322 /* STACK - 44 contains r11, which is set in the method-specific
323 part of the trampoline (see bellow this 'if' block) */
324 ppc_stw (buf, ppc_r12, STACK - 48, ppc_r1);
325 ppc_stw (buf, ppc_r13, STACK - 52, ppc_r1);
326 ppc_stw (buf, ppc_r14, STACK - 56, ppc_r1);
327 ppc_stw (buf, ppc_r15, STACK - 60, ppc_r1);
328 ppc_stw (buf, ppc_r16, STACK - 64, ppc_r1);
329 ppc_stw (buf, ppc_r17, STACK - 68, ppc_r1);
330 ppc_stw (buf, ppc_r18, STACK - 72, ppc_r1);
331 ppc_stw (buf, ppc_r19, STACK - 76, ppc_r1);
332 ppc_stw (buf, ppc_r20, STACK - 80, ppc_r1);
333 ppc_stw (buf, ppc_r21, STACK - 84, ppc_r1);
334 ppc_stw (buf, ppc_r22, STACK - 88, ppc_r1);
335 ppc_stw (buf, ppc_r23, STACK - 92, ppc_r1);
336 ppc_stw (buf, ppc_r24, STACK - 96, ppc_r1);
337 ppc_stw (buf, ppc_r25, STACK - 100, ppc_r1);
338 ppc_stw (buf, ppc_r26, STACK - 104, ppc_r1);
339 ppc_stw (buf, ppc_r27, STACK - 108, ppc_r1);
340 ppc_stw (buf, ppc_r28, STACK - 112, ppc_r1);
341 ppc_stw (buf, ppc_r29, STACK - 116, ppc_r1);
342 ppc_stw (buf, ppc_r30, STACK - 120, ppc_r1);
343 /* Save 'method' pseudo-parameter - the one passed in r11 */
344 ppc_stw (buf, ppc_r11, STACK - 124, ppc_r1);
346 /* Save the FP registers */
347 offset = 124 + 4 + 8;
348 for (i = ppc_f1; i <= ppc_f8; ++i) {
349 ppc_stfd (buf, i, STACK - offset, ppc_r1);
353 /*----------------------------------------------------------
354 STEP 1: call 'mono_get_lmf_addr()' to get the address of our
355 LMF. We'll need to restore it after the call to
356 'ppc_magic_trampoline' and before the call to the native
358 ----------------------------------------------------------*/
360 /* Calculate the address and make the call. Keep in mind that
361 we're using r0, so we'll have to restore it before calling
362 'ppc_magic_trampoline' */
363 ppc_lis (buf, ppc_r0, (guint32) mono_get_lmf_addr >> 16);
364 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) mono_get_lmf_addr & 0xffff);
365 ppc_mtlr (buf, ppc_r0);
368 /* XXX Update LMF !!! */
370 /*----------------------------------------------------------
371 STEP 2: call 'ppc_magic_trampoline()', who will compile the
372 code and fix the method vtable entry for us
373 ----------------------------------------------------------*/
377 /* Arg 1: MonoMethod *method. It was put in r11 by the
378 method-specific trampoline code, and then saved before the call
379 to mono_get_lmf_addr()'. Restore r11, by the way :-) */
380 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
381 ppc_li (buf, ppc_r3, 0);
383 ppc_lwz (buf, ppc_r3, STACK - 124, ppc_r1);
384 ppc_lwz (buf, ppc_r11, STACK - 44, ppc_r1);
386 /* Arg 2: code (next address to the instruction that called us) */
387 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
388 ppc_li (buf, ppc_r4, 0);
390 ppc_lwz (buf, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
392 /* Arg 3: stack pointer */
393 ppc_mr (buf, ppc_r5, ppc_r1);
395 /* Calculate call address, restore r0 and call
396 'ppc_magic_trampoline'. Return value will be in r3 */
397 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
398 ppc_lis (buf, ppc_r0, (guint32) ppc_class_init_trampoline >> 16);
399 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) ppc_class_init_trampoline & 0xffff);
401 ppc_lis (buf, ppc_r0, (guint32) ppc_magic_trampoline >> 16);
402 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) ppc_magic_trampoline & 0xffff);
404 ppc_mtlr (buf, ppc_r0);
405 ppc_lwz (buf, ppc_r0, STACK - 8, ppc_r1);
408 /* OK, code address is now on r3. Move it to r0, so that we
409 can restore r3 and use it from r0 later */
410 ppc_mr (buf, ppc_r0, ppc_r3);
413 /*----------------------------------------------------------
414 STEP 3: Restore the LMF
415 ----------------------------------------------------------*/
419 /*----------------------------------------------------------
420 STEP 4: call the compiled method
421 ----------------------------------------------------------*/
423 /* Restore registers */
425 ppc_lwz (buf, ppc_r3, STACK - 12, ppc_r1);
426 ppc_lwz (buf, ppc_r4, STACK - 16, ppc_r1);
427 ppc_lwz (buf, ppc_r5, STACK - 20, ppc_r1);
428 ppc_lwz (buf, ppc_r6, STACK - 24, ppc_r1);
429 ppc_lwz (buf, ppc_r7, STACK - 28, ppc_r1);
430 ppc_lwz (buf, ppc_r8, STACK - 32, ppc_r1);
431 ppc_lwz (buf, ppc_r9, STACK - 36, ppc_r1);
432 ppc_lwz (buf, ppc_r10, STACK - 40, ppc_r1);
434 /* Restore the FP registers */
435 offset = 124 + 4 + 8;
436 for (i = ppc_f1; i <= ppc_f8; ++i) {
437 ppc_lfd (buf, i, STACK - offset, ppc_r1);
440 /* We haven't touched any of these, so there's no need to
443 ppc_lwz (buf, ppc_r14, STACK - 56, ppc_r1);
444 ppc_lwz (buf, ppc_r15, STACK - 60, ppc_r1);
445 ppc_lwz (buf, ppc_r16, STACK - 64, ppc_r1);
446 ppc_lwz (buf, ppc_r17, STACK - 68, ppc_r1);
447 ppc_lwz (buf, ppc_r18, STACK - 72, ppc_r1);
448 ppc_lwz (buf, ppc_r19, STACK - 76, ppc_r1);
449 ppc_lwz (buf, ppc_r20, STACK - 80, ppc_r1);
450 ppc_lwz (buf, ppc_r21, STACK - 84, ppc_r1);
451 ppc_lwz (buf, ppc_r22, STACK - 88, ppc_r1);
452 ppc_lwz (buf, ppc_r23, STACK - 92, ppc_r1);
453 ppc_lwz (buf, ppc_r24, STACK - 96, ppc_r1);
454 ppc_lwz (buf, ppc_r25, STACK - 100, ppc_r1);
455 ppc_lwz (buf, ppc_r26, STACK - 104, ppc_r1);
456 ppc_lwz (buf, ppc_r27, STACK - 108, ppc_r1);
457 ppc_lwz (buf, ppc_r28, STACK - 112, ppc_r1);
458 ppc_lwz (buf, ppc_r29, STACK - 116, ppc_r1);
459 ppc_lwz (buf, ppc_r30, STACK - 120, ppc_r1);
462 /* Non-standard function epilogue. Instead of doing a proper
463 return, we just call the compiled code, so
464 that, when it finishes, the method returns here. */
467 /* Restore stack pointer, r31, LR and jump to the code */
468 ppc_lwz (buf, ppc_r1, 0, ppc_r1);
469 ppc_lwz (buf, ppc_r31, -4, ppc_r1);
470 ppc_lwz (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
471 ppc_mtlr (buf, ppc_r11);
472 ppc_mtctr (buf, ppc_r0);
473 ppc_bcctr (buf, 20, 0);
475 ppc_mtlr (buf, ppc_r0);
478 /* Restore stack pointer, r31, LR and return to caller */
479 ppc_lwz (buf, ppc_r11, 0, ppc_r1);
480 ppc_lwz (buf, ppc_r31, -4, ppc_r11);
481 ppc_mr (buf, ppc_r1, ppc_r11);
482 ppc_lwz (buf, ppc_r0, 4, ppc_r1);
483 ppc_mtlr (buf, ppc_r0);
487 /* Flush instruction cache, since we've generated code */
488 mono_arch_flush_icache (code, buf - code);
491 g_assert ((buf - code) <= 512);
494 switch (tramp_type) {
495 case MONO_TRAMPOLINE_GENERIC:
496 mono_generic_trampoline_code = code;
498 case MONO_TRAMPOLINE_JUMP:
499 generic_jump_trampoline = code;
501 case MONO_TRAMPOLINE_CLASS_INIT:
502 generic_class_init_trampoline = code;
510 mono_arch_create_jump_trampoline (MonoMethod *method)
512 void *code = mono_compile_method (method);
513 /*g_print ("called jump trampoline for %s:%s\n", method->klass->name, method->name);*/
514 return mono_jit_info_table_find (mono_domain_get (), code);
518 * arch_create_jit_trampoline:
519 * @method: pointer to the method info
521 * Creates a trampoline function for virtual methods. If the created
522 * code is called it first starts JIT compilation of method,
523 * and then calls the newly created method. It also replaces the
524 * corresponding vtable entry (see ppc_magic_trampoline).
526 * A trampoline consists of two parts: a main fragment, shared by all method
527 * trampolines, and some code specific to each method, which hard-codes a
528 * reference to that method and then calls the main fragment.
530 * The main fragment contains a call to 'ppc_magic_trampoline', which performs
531 * call to the JIT compiler and substitutes the method-specific fragment with
532 * some code that directly calls the JIT-compiled method.
534 * Returns: a pointer to the newly created code
537 mono_arch_create_jit_trampoline (MonoMethod *method)
540 static guint8 *vc = NULL;
542 /* previously created trampoline code */
546 if (method->iflags & METHOD_IMPL_ATTRIBUTE_SYNCHRONIZED)
547 return mono_arch_create_jit_trampoline (mono_marshal_get_synchronized_wrapper (method));
549 vc = create_trampoline_code (MONO_TRAMPOLINE_GENERIC);
551 /* This is the method-specific part of the trampoline. Its purpose is
552 to provide the generic part with the MonoMethod *method pointer. We'll
553 use r11 to keep that value, for instance. However, the generic part of
554 the trampoline relies on r11 having the same value it had before coming
555 here, so we must save it before. */
556 //code = buf = g_malloc(METHOD_TRAMPOLINE_SIZE);
557 // FIXME: should pass the domain down tot his function
558 code = buf = mono_code_manager_reserve (mono_domain_get ()->code_mp, METHOD_TRAMPOLINE_SIZE);
560 /* Save r11. There's nothing magic in the '44', its just an arbitrary
561 position - see above */
562 ppc_stw (buf, ppc_r11, -44, ppc_r1);
564 /* Now save LR - we'll overwrite it now */
565 ppc_mflr (buf, ppc_r11);
566 ppc_stw (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
568 /* Prepare the jump to the generic trampoline code.*/
569 ppc_lis (buf, ppc_r11, (guint32) vc >> 16);
570 ppc_ori (buf, ppc_r11, ppc_r11, (guint32) vc & 0xffff);
571 ppc_mtlr (buf, ppc_r11);
573 /* And finally put 'method' in r11 and fly! */
574 ppc_lis (buf, ppc_r11, (guint32) method >> 16);
575 ppc_ori (buf, ppc_r11, ppc_r11, (guint32) method & 0xffff);
578 /* Flush instruction cache, since we've generated code */
579 mono_arch_flush_icache (code, buf - code);
582 g_assert ((buf - code) <= METHOD_TRAMPOLINE_SIZE);
584 /* Store trampoline address */
587 mono_jit_stats.method_trampolines++;
595 * x86_magic_trampoline:
596 * @eax: saved x86 register
597 * @ecx: saved x86 register
598 * @edx: saved x86 register
599 * @esi: saved x86 register
600 * @edi: saved x86 register
601 * @ebx: saved x86 register
602 * @code: pointer into caller code
603 * @method: the method to translate
605 * This method is called by the trampoline functions for virtual
606 * methods. It inspects the caller code to find the address of the
607 * vtable slot, then calls the JIT compiler and writes the address
608 * of the compiled method back to the vtable. All virtual methods
609 * are called with: x86_call_membase (inst, basereg, disp). We always
610 * use 32 bit displacement to ensure that the length of the call
611 * instruction is 6 bytes. We need to get the value of the basereg
612 * and the constant displacement.
615 x86_magic_trampoline (int eax, int ecx, int edx, int esi, int edi,
616 int ebx, guint8 *code, MonoMethod *m)
623 addr = mono_compile_method (m);
626 /* go to the start of the call instruction
628 * address_byte = (m << 6) | (o << 3) | reg
629 * call opcode: 0xff address_byte displacement
634 if ((code [1] != 0xe8) && (code [3] == 0xff) && ((code [4] & 0x18) == 0x10) && ((code [4] >> 6) == 1)) {
635 reg = code [4] & 0x07;
636 disp = (signed char)code [5];
638 if ((code [0] == 0xff) && ((code [1] & 0x18) == 0x10) && ((code [1] >> 6) == 2)) {
639 reg = code [1] & 0x07;
640 disp = *((gint32*)(code + 2));
641 } else if ((code [1] == 0xe8)) {
642 *((guint32*)(code + 2)) = (guint)addr - ((guint)code + 1) - 5;
644 } else if ((code [4] == 0xff) && (((code [5] >> 6) & 0x3) == 0) && (((code [5] >> 3) & 0x7) == 2)) {
646 * This is a interface call: should check the above code can't catch it earlier
647 * 8b 40 30 mov 0x30(%eax),%eax
651 reg = code [5] & 0x07;
653 printf ("Invalid trampoline sequence: %x %x %x %x %x %x %x\n", code [0], code [1], code [2], code [3],
654 code [4], code [5], code [6]);
655 g_assert_not_reached ();
679 g_assert_not_reached ();
684 if (m->klass->valuetype) {
685 return *((gpointer *)o) = get_unbox_trampoline (m, addr);
687 return *((gpointer *)o) = addr;
692 * mono_arch_create_jit_trampoline:
693 * @method: pointer to the method info
695 * Creates a trampoline function for virtual methods. If the created
696 * code is called it first starts JIT compilation of method,
697 * and then calls the newly created method. I also replaces the
698 * corresponding vtable entry (see x86_magic_trampoline).
700 * Returns: a pointer to the newly created code
703 mono_arch_create_jit_trampoline (MonoMethod *method)
707 /* previously created trampoline code */
711 if (!mono_generic_trampoline_code) {
712 mono_generic_trampoline_code = buf = g_malloc (256);
713 /* save caller save regs because we need to do a call */
714 x86_push_reg (buf, X86_EDX);
715 x86_push_reg (buf, X86_EAX);
716 x86_push_reg (buf, X86_ECX);
720 /* save the IP (caller ip) */
721 x86_push_membase (buf, X86_ESP, 16);
723 x86_push_reg (buf, X86_EBX);
724 x86_push_reg (buf, X86_EDI);
725 x86_push_reg (buf, X86_ESI);
726 x86_push_reg (buf, X86_EBP);
728 /* save method info */
729 x86_push_membase (buf, X86_ESP, 32);
730 /* get the address of lmf for the current thread */
731 x86_call_code (buf, mono_get_lmf_addr);
733 x86_push_reg (buf, X86_EAX);
734 /* push *lfm (previous_lmf) */
735 x86_push_membase (buf, X86_EAX, 0);
737 x86_mov_membase_reg (buf, X86_EAX, 0, X86_ESP, 4);
740 /* push the method info */
741 x86_push_membase (buf, X86_ESP, 44);
742 /* push the return address onto the stack */
743 x86_push_membase (buf, X86_ESP, 52);
745 /* save all register values */
746 x86_push_reg (buf, X86_EBX);
747 x86_push_reg (buf, X86_EDI);
748 x86_push_reg (buf, X86_ESI);
749 x86_push_membase (buf, X86_ESP, 64); /* EDX */
750 x86_push_membase (buf, X86_ESP, 64); /* ECX */
751 x86_push_membase (buf, X86_ESP, 64); /* EAX */
753 x86_call_code (buf, x86_magic_trampoline);
754 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 8*4);
756 /* restore LMF start */
757 /* ebx = previous_lmf */
758 x86_pop_reg (buf, X86_EBX);
760 x86_pop_reg (buf, X86_EDI);
761 /* *(lmf) = previous_lmf */
762 x86_mov_membase_reg (buf, X86_EDI, 0, X86_EBX, 4);
763 /* discard method info */
764 x86_pop_reg (buf, X86_ESI);
765 /* restore caller saved regs */
766 x86_pop_reg (buf, X86_EBP);
767 x86_pop_reg (buf, X86_ESI);
768 x86_pop_reg (buf, X86_EDI);
769 x86_pop_reg (buf, X86_EBX);
770 /* discard save IP */
771 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 4);
772 /* restore LMF end */
774 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 16);
776 /* call the compiled method */
777 x86_jump_reg (buf, X86_EAX);
779 g_assert ((buf - mono_generic_trampoline_code) <= 256);
782 code = buf = g_malloc (16);
783 x86_push_imm (buf, method);
784 x86_jump_code (buf, mono_generic_trampoline_code);
785 g_assert ((buf - code) <= 16);
787 /* store trampoline address */
790 //mono_jit_stats.method_trampolines++;
798 * mono_arch_create_class_init_trampoline:
799 * @vtable: the type to initialize
801 * Creates a trampoline function to run a type initializer.
802 * If the trampoline is called, it calls mono_runtime_class_init with the
803 * given vtable, then patches the caller code so it does not get called any
806 * Returns: a pointer to the newly created code
809 mono_arch_create_class_init_trampoline (MonoVTable *vtable)
811 guint8 *code, *buf, *tramp;
813 tramp = create_trampoline_code (MONO_TRAMPOLINE_CLASS_INIT);
815 /* This is the method-specific part of the trampoline. Its purpose is
816 to provide the generic part with the MonoMethod *method pointer. We'll
817 use r11 to keep that value, for instance. However, the generic part of
818 the trampoline relies on r11 having the same value it had before coming
819 here, so we must save it before. */
820 //code = buf = g_malloc(METHOD_TRAMPOLINE_SIZE);
821 code = buf = mono_code_manager_reserve (vtable->domain->code_mp, METHOD_TRAMPOLINE_SIZE);
824 ppc_mflr (buf, ppc_r4);
825 ppc_stw (buf, ppc_r4, PPC_RET_ADDR_OFFSET, ppc_sp);
826 ppc_stwu (buf, ppc_sp, -32, ppc_sp);
827 ppc_load (buf, ppc_r3, vtable);
828 ppc_load (buf, ppc_r5, 0);
830 ppc_load (buf, ppc_r0, ppc_class_init_trampoline);
831 ppc_mtlr (buf, ppc_r0);
834 ppc_lwz (buf, ppc_r0, 32 + PPC_RET_ADDR_OFFSET, ppc_sp);
835 ppc_mtlr (buf, ppc_r0);
836 ppc_addic (buf, ppc_sp, ppc_sp, 32);
839 /* Save r11. There's nothing magic in the '44', its just an arbitrary
840 position - see above */
841 ppc_stw (buf, ppc_r11, -44, ppc_r1);
843 /* Now save LR - we'll overwrite it now */
844 ppc_mflr (buf, ppc_r11);
845 ppc_stw (buf, ppc_r11, 4, ppc_r1);
846 ppc_stw (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
848 /* Prepare the jump to the generic trampoline code.*/
849 ppc_lis (buf, ppc_r11, (guint32) tramp >> 16);
850 ppc_ori (buf, ppc_r11, ppc_r11, (guint32) tramp & 0xffff);
851 ppc_mtlr (buf, ppc_r11);
853 /* And finally put 'vtable' in r11 and fly! */
854 ppc_lis (buf, ppc_r11, (guint32) vtable >> 16);
855 ppc_ori (buf, ppc_r11, ppc_r11, (guint32) vtable & 0xffff);
857 ppc_lwz (buf, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_r1);
858 ppc_mtlr (buf, ppc_r0);
863 /* Flush instruction cache, since we've generated code */
864 mono_arch_flush_icache (code, buf - code);
867 g_assert ((buf - code) <= METHOD_TRAMPOLINE_SIZE);
869 mono_jit_stats.method_trampolines++;
875 * This method is only called when running in the Mono Debugger.
878 mono_debugger_create_notification_function (gpointer *notification_address)
882 ptr = buf = g_malloc0 (16);
884 if (notification_address)
885 *notification_address = buf;