2 * trampoline.c: JIT trampoline code
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/arch/x86/x86-codegen.h>
14 #include <mono/metadata/appdomain.h>
15 #include <mono/metadata/tabledefs.h>
21 * get_unbox_trampoline:
23 * @addr: pointer to native code for @m
25 * when value type methods are called through the vtable we need to unbox the
26 * this argument. This method returns a pointer to a trampoline which does
27 * unboxing before calling the method
30 get_unbox_trampoline (MonoMethod *m, gpointer addr)
35 if (!m->signature->ret->byref && m->signature->ret->type == MONO_TYPE_VALUETYPE)
38 start = code = g_malloc (16);
40 x86_alu_membase_imm (code, X86_ADD, X86_ESP, this_pos, sizeof (MonoObject));
41 x86_jump_code (code, addr);
42 g_assert ((code - start) < 16);
48 * x86_magic_trampoline:
49 * @eax: saved x86 register
50 * @ecx: saved x86 register
51 * @edx: saved x86 register
52 * @esi: saved x86 register
53 * @edi: saved x86 register
54 * @ebx: saved x86 register
55 * @code: pointer into caller code
56 * @method: the method to translate
58 * This method is called by the trampoline functions for virtual
59 * methods. It inspects the caller code to find the address of the
60 * vtable slot, then calls the JIT compiler and writes the address
61 * of the compiled method back to the vtable. All virtual methods
62 * are called with: x86_call_membase (inst, basereg, disp). We always
63 * use 32 bit displacement to ensure that the length of the call
64 * instruction is 6 bytes. We need to get the value of the basereg
65 * and the constant displacement.
68 x86_magic_trampoline (int eax, int ecx, int edx, int esi, int edi,
69 int ebx, const guint8 *code, MonoMethod *m)
76 EnterCriticalSection (metadata_section);
77 addr = arch_compile_method (m);
78 LeaveCriticalSection (metadata_section);
82 /* go to the start of the call instruction
84 * address_byte = (m << 6) | (o << 3) | reg
85 * call opcode: 0xff address_byte displacement
90 if ((code [1] != 0xe8) && (code [3] == 0xff) && ((code [4] & 0x18) == 0x10) && ((code [4] >> 6) == 1)) {
91 reg = code [4] & 0x07;
92 disp = (signed char)code [5];
94 if ((code [0] == 0xff) && ((code [1] & 0x18) == 0x10) && ((code [1] >> 6) == 2)) {
95 reg = code [1] & 0x07;
96 disp = *((gint32*)(code + 2));
97 } else if ((code [1] == 0xe8)) {
98 *((guint32*)(code + 2)) = (guint)addr - ((guint)code + 1) - 5;
101 printf ("%x %x %x %x %x %x \n", code [0], code [1], code [2], code [3],
103 g_assert_not_reached ();
127 g_assert_not_reached ();
132 if (m->klass->valuetype) {
133 return *((gpointer *)o) = get_unbox_trampoline (m, addr);
135 return *((gpointer *)o) = addr;
140 * arch_create_jit_trampoline:
141 * @method: pointer to the method info
143 * Creates a trampoline function for virtual methods. If the created
144 * code is called it first starts JIT compilation of method,
145 * and then calls the newly created method. I also replaces the
146 * corresponding vtable entry (see x86_magic_trampoline).
148 * Returns: a pointer to the newly created code
151 arch_create_jit_trampoline (MonoMethod *method)
153 MonoDomain *domain = mono_domain_get ();
155 static guint8 *vc = NULL;
156 GHashTable *jit_code_hash;
158 /* previously created trampoline code */
162 /* we immediately compile runtime provided functions */
163 if (method->iflags & METHOD_IMPL_ATTRIBUTE_RUNTIME) {
164 method->info = arch_compile_method (method);
168 /* icalls use method->addr */
169 if ((method->iflags & METHOD_IMPL_ATTRIBUTE_INTERNAL_CALL) ||
170 (method->flags & METHOD_ATTRIBUTE_PINVOKE_IMPL)) {
171 method->info = arch_create_native_wrapper (method);
175 /* check if we already have JITed code */
176 if (mono_jit_share_code)
177 jit_code_hash = mono_root_domain->jit_code_hash;
179 jit_code_hash = domain->jit_code_hash;
181 if ((code = g_hash_table_lookup (jit_code_hash, method))) {
182 mono_jit_stats.methods_lookups++;
187 vc = buf = g_malloc (256);
189 /* save caller save regs because we need to do a call */
190 x86_push_reg (buf, X86_EDX);
191 x86_push_reg (buf, X86_EAX);
192 x86_push_reg (buf, X86_ECX);
196 /* save the IP (caller ip) */
197 x86_push_membase (buf, X86_ESP, 16);
199 x86_push_reg (buf, X86_EBX);
200 x86_push_reg (buf, X86_EDI);
201 x86_push_reg (buf, X86_ESI);
202 x86_push_reg (buf, X86_EBP);
204 /* save method info */
205 x86_push_membase (buf, X86_ESP, 32);
206 /* get the address of lmf for the current thread */
207 x86_call_code (buf, arch_get_lmf_addr);
209 x86_push_reg (buf, X86_EAX);
210 /* push *lfm (previous_lmf) */
211 x86_push_membase (buf, X86_EAX, 0);
213 x86_mov_membase_reg (buf, X86_EAX, 0, X86_ESP, 4);
216 /* push the method info */
217 x86_push_membase (buf, X86_ESP, 44);
218 /* push the return address onto the stack */
219 x86_push_membase (buf, X86_ESP, 52);
221 /* save all register values */
222 x86_push_reg (buf, X86_EBX);
223 x86_push_reg (buf, X86_EDI);
224 x86_push_reg (buf, X86_ESI);
225 x86_push_membase (buf, X86_ESP, 64); /* EDX */
226 x86_push_membase (buf, X86_ESP, 64); /* ECX */
227 x86_push_membase (buf, X86_ESP, 64); /* EAX */
229 x86_call_code (buf, x86_magic_trampoline);
230 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 8*4);
232 /* restore LMF start */
233 /* ebx = previous_lmf */
234 x86_pop_reg (buf, X86_EBX);
236 x86_pop_reg (buf, X86_EDI);
237 /* *(lmf) = previous_lmf */
238 x86_mov_membase_reg (buf, X86_EDI, 0, X86_EBX, 4);
239 /* discard method info */
240 x86_pop_reg (buf, X86_ESI);
241 /* restore caller saved regs */
242 x86_pop_reg (buf, X86_EBP);
243 x86_pop_reg (buf, X86_ESI);
244 x86_pop_reg (buf, X86_EDI);
245 x86_pop_reg (buf, X86_EBX);
246 /* discard save IP */
247 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 4);
248 /* restore LMF end */
250 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 16);
252 /* call the compiled method */
253 x86_jump_reg (buf, X86_EAX);
255 g_assert ((buf - vc) <= 256);
258 code = buf = g_malloc (16);
259 x86_push_imm (buf, method);
260 x86_jump_code (buf, vc);
261 g_assert ((buf - code) <= 16);
263 /* store trampoline address */
266 mono_jit_stats.method_trampolines++;