2 * trampoline.c: JIT trampoline code
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/metadata/appdomain.h>
14 #include <mono/metadata/tabledefs.h>
15 #include <mono/arch/x86/x86-codegen.h>
21 * get_unbox_trampoline:
23 * @addr: pointer to native code for @m
25 * when value type methods are called through the vtable we need to unbox the
26 * this argument. This method returns a pointer to a trampoline which does
27 * unboxing before calling the method
30 get_unbox_trampoline (MonoMethod *m, gpointer addr)
35 if (!m->signature->ret->byref && m->signature->ret->type == MONO_TYPE_VALUETYPE)
38 start = code = g_malloc (16);
40 x86_alu_membase_imm (code, X86_ADD, X86_ESP, this_pos, sizeof (MonoObject));
41 x86_jump_code (code, addr);
42 g_assert ((code - start) < 16);
48 * x86_magic_trampoline:
49 * @eax: saved x86 register
50 * @ecx: saved x86 register
51 * @edx: saved x86 register
52 * @esi: saved x86 register
53 * @edi: saved x86 register
54 * @ebx: saved x86 register
55 * @code: pointer into caller code
56 * @method: the method to translate
58 * This method is called by the trampoline functions for virtual
59 * methods. It inspects the caller code to find the address of the
60 * vtable slot, then calls the JIT compiler and writes the address
61 * of the compiled method back to the vtable. All virtual methods
62 * are called with: x86_call_membase (inst, basereg, disp). We always
63 * use 32 bit displacement to ensure that the length of the call
64 * instruction is 6 bytes. We need to get the value of the basereg
65 * and the constant displacement.
68 x86_magic_trampoline (int eax, int ecx, int edx, int esi, int edi,
69 int ebx, guint8 *code, MonoMethod *m)
76 EnterCriticalSection (metadata_section);
77 addr = mono_compile_method (m);
78 LeaveCriticalSection (metadata_section);
81 /* go to the start of the call instruction
83 * address_byte = (m << 6) | (o << 3) | reg
84 * call opcode: 0xff address_byte displacement
89 if ((code [1] != 0xe8) && (code [3] == 0xff) && ((code [4] & 0x18) == 0x10) && ((code [4] >> 6) == 1)) {
90 reg = code [4] & 0x07;
91 disp = (signed char)code [5];
93 if ((code [0] == 0xff) && ((code [1] & 0x18) == 0x10) && ((code [1] >> 6) == 2)) {
94 reg = code [1] & 0x07;
95 disp = *((gint32*)(code + 2));
96 } else if ((code [1] == 0xe8)) {
97 *((guint32*)(code + 2)) = (guint)addr - ((guint)code + 1) - 5;
100 printf ("%x %x %x %x %x %x \n", code [0], code [1], code [2], code [3],
102 g_assert_not_reached ();
126 g_assert_not_reached ();
131 if (m->klass->valuetype) {
132 return *((gpointer *)o) = get_unbox_trampoline (m, addr);
134 return *((gpointer *)o) = addr;
139 * arch_create_jit_trampoline:
140 * @method: pointer to the method info
142 * Creates a trampoline function for virtual methods. If the created
143 * code is called it first starts JIT compilation of method,
144 * and then calls the newly created method. I also replaces the
145 * corresponding vtable entry (see x86_magic_trampoline).
147 * Returns: a pointer to the newly created code
150 arch_create_jit_trampoline (MonoMethod *method)
152 MonoDomain *domain = mono_domain_get ();
154 static guint8 *vc = NULL;
155 GHashTable *jit_code_hash;
157 /* previously created trampoline code */
161 /* we immediately compile runtime provided functions */
162 if (method->iflags & METHOD_IMPL_ATTRIBUTE_RUNTIME) {
163 method->info = mono_compile_method (method);
167 /* icalls use method->addr */
168 if ((method->iflags & METHOD_IMPL_ATTRIBUTE_INTERNAL_CALL) ||
169 (method->flags & METHOD_ATTRIBUTE_PINVOKE_IMPL)) {
172 nm = mono_marshal_get_native_wrapper (method);
173 method->info = mono_compile_method (nm);
177 /* check if we already have JITed code */
178 if (mono_jit_share_code)
179 jit_code_hash = mono_root_domain->jit_code_hash;
181 jit_code_hash = domain->jit_code_hash;
183 if ((code = g_hash_table_lookup (jit_code_hash, method))) {
184 mono_jit_stats.methods_lookups++;
189 vc = buf = g_malloc (256);
190 /* save caller save regs because we need to do a call */
191 x86_push_reg (buf, X86_EDX);
192 x86_push_reg (buf, X86_EAX);
193 x86_push_reg (buf, X86_ECX);
197 /* save the IP (caller ip) */
198 x86_push_membase (buf, X86_ESP, 16);
200 x86_push_reg (buf, X86_EBX);
201 x86_push_reg (buf, X86_EDI);
202 x86_push_reg (buf, X86_ESI);
203 x86_push_reg (buf, X86_EBP);
205 /* save method info */
206 x86_push_membase (buf, X86_ESP, 32);
207 /* get the address of lmf for the current thread */
208 x86_call_code (buf, mono_get_lmf_addr);
210 x86_push_reg (buf, X86_EAX);
211 /* push *lfm (previous_lmf) */
212 x86_push_membase (buf, X86_EAX, 0);
214 x86_mov_membase_reg (buf, X86_EAX, 0, X86_ESP, 4);
217 /* push the method info */
218 x86_push_membase (buf, X86_ESP, 44);
219 /* push the return address onto the stack */
220 x86_push_membase (buf, X86_ESP, 52);
222 /* save all register values */
223 x86_push_reg (buf, X86_EBX);
224 x86_push_reg (buf, X86_EDI);
225 x86_push_reg (buf, X86_ESI);
226 x86_push_membase (buf, X86_ESP, 64); /* EDX */
227 x86_push_membase (buf, X86_ESP, 64); /* ECX */
228 x86_push_membase (buf, X86_ESP, 64); /* EAX */
230 x86_call_code (buf, x86_magic_trampoline);
231 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 8*4);
233 /* restore LMF start */
234 /* ebx = previous_lmf */
235 x86_pop_reg (buf, X86_EBX);
237 x86_pop_reg (buf, X86_EDI);
238 /* *(lmf) = previous_lmf */
239 x86_mov_membase_reg (buf, X86_EDI, 0, X86_EBX, 4);
240 /* discard method info */
241 x86_pop_reg (buf, X86_ESI);
242 /* restore caller saved regs */
243 x86_pop_reg (buf, X86_EBP);
244 x86_pop_reg (buf, X86_ESI);
245 x86_pop_reg (buf, X86_EDI);
246 x86_pop_reg (buf, X86_EBX);
247 /* discard save IP */
248 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 4);
249 /* restore LMF end */
251 x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 16);
253 /* call the compiled method */
254 x86_jump_reg (buf, X86_EAX);
256 g_assert ((buf - vc) <= 256);
259 code = buf = g_malloc (16);
260 x86_push_imm (buf, method);
261 x86_jump_code (buf, vc);
262 g_assert ((buf - code) <= 16);
264 /* store trampoline address */
267 mono_jit_stats.method_trampolines++;