2 * tramp-arm.c: JIT trampoline code for ARM
5 * Paolo Molaro (lupus@ximian.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/metadata/appdomain.h>
14 #include <mono/metadata/marshal.h>
15 #include <mono/metadata/tabledefs.h>
16 #include <mono/arch/arm/arm-codegen.h>
21 static guint8* nullified_class_init_trampoline;
24 * Return the instruction to jump from code to target, 0 if not
25 * reachable with a single instruction
28 branch_for_target_reachable (guint8 *branch, guint8 *target)
30 gint diff = target - branch - 8;
31 g_assert ((diff & 3) == 0);
34 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | (diff >> 2);
36 /* diff between 0 and -33554432 */
37 if (diff >= -33554432)
38 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | ((diff >> 2) & ~0xff000000);
44 * mono_arch_get_unbox_trampoline:
45 * @gsctx: the generic sharing context
47 * @addr: pointer to native code for @m
49 * when value type methods are called through the vtable we need to unbox the
50 * this argument. This method returns a pointer to a trampoline which does
51 * unboxing before calling the method
54 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
58 MonoDomain *domain = mono_domain_get ();
60 if (MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
63 mono_domain_lock (domain);
64 start = code = mono_code_manager_reserve (domain->code_mp, 16);
65 mono_domain_unlock (domain);
67 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 4);
68 ARM_ADD_REG_IMM8 (code, this_pos, this_pos, sizeof (MonoObject));
69 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_IP);
70 *(guint32*)code = (guint32)addr;
72 mono_arch_flush_icache (start, code - start);
73 g_assert ((code - start) <= 16);
74 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
75 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
81 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
83 guint32 *code = (guint32*)code_ptr;
85 /* This is the 'bl' or the 'mov pc' instruction */
89 * Note that methods are called also with the bl opcode.
91 if ((((*code) >> 25) & 7) == 5) {
92 /*g_print ("direct patching\n");*/
93 arm_patch ((guint8*)code, addr);
94 mono_arch_flush_icache ((guint8*)code, 4);
98 if ((((*code) >> 20) & 0xFF) == 0x12) {
99 /*g_print ("patching bx\n");*/
100 arm_patch ((guint8*)code, addr);
101 mono_arch_flush_icache ((guint8*)(code - 2), 4);
105 g_assert_not_reached ();
109 mono_arch_patch_plt_entry (guint8 *code, guint8 *addr)
111 /* Patch the jump table entry used by the plt entry */
112 guint32 offset = ((guint32*)code)[3];
113 guint8 *jump_entry = code + offset + 16;
115 *(guint8**)jump_entry = addr;
119 mono_arch_nullify_class_init_trampoline (guint8 *code, gssize *regs)
125 mono_arch_nullify_plt_entry (guint8 *code)
127 if (mono_aot_only && !nullified_class_init_trampoline)
128 nullified_class_init_trampoline = mono_aot_get_named_code ("nullified_class_init_trampoline");
130 mono_arch_patch_plt_entry (code, nullified_class_init_trampoline);
133 /* Stack size for trampoline function
135 #define STACK (sizeof (MonoLMF))
137 /* Method-specific trampoline code fragment size */
138 #define METHOD_TRAMPOLINE_SIZE 64
140 /* Jump-specific trampoline code fragment size */
141 #define JUMP_TRAMPOLINE_SIZE 64
143 #define GEN_TRAMP_SIZE 192
146 * Stack frame description when the generic trampoline is called.
148 * ------------------- old sp
150 * ------------------- sp
153 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
158 return mono_arch_create_trampoline_code_full (tramp_type, &code_size, &ji, FALSE);
162 mono_arch_create_trampoline_code_full (MonoTrampolineType tramp_type, guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
164 guint8 *buf, *code = NULL;
165 guint8 *load_get_lmf_addr, *load_trampoline;
170 /* Now we'll create in 'buf' the ARM trampoline code. This
171 is the trampoline code common to all methods */
173 code = buf = mono_global_codeman_reserve (GEN_TRAMP_SIZE);
176 * At this point lr points to the specific arg and sp points to the saved
177 * regs on the stack (all but PC and SP). The original LR value has been
178 * saved as sp + LR_OFFSET by the push in the specific trampoline
180 #define LR_OFFSET (sizeof (gpointer) * 13)
181 ARM_MOV_REG_REG (buf, ARMREG_V1, ARMREG_SP);
184 * The trampoline contains a pc-relative offset to the got slot where the
185 * value is stored. The offset can be found at [lr + 4].
187 g_assert (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT);
188 ARM_LDR_IMM (buf, ARMREG_V2, ARMREG_LR, 4);
189 ARM_LDR_REG_REG (buf, ARMREG_V2, ARMREG_V2, ARMREG_LR);
191 if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
192 ARM_LDR_IMM (buf, ARMREG_V2, ARMREG_LR, 0);
194 ARM_MOV_REG_REG (buf, ARMREG_V2, MONO_ARCH_VTABLE_REG);
196 ARM_LDR_IMM (buf, ARMREG_V3, ARMREG_SP, LR_OFFSET);
198 /* ok, now we can continue with the MonoLMF setup, mostly untouched
199 * from emit_prolog in mini-arm.c
200 * This is a synthetized call to mono_get_lmf_addr ()
203 *ji = mono_patch_info_list_prepend (*ji, buf - code, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
204 ARM_LDR_IMM (buf, ARMREG_R0, ARMREG_PC, 0);
206 *(gpointer*)buf = NULL;
208 ARM_LDR_REG_REG (buf, ARMREG_R0, ARMREG_PC, ARMREG_R0);
210 load_get_lmf_addr = buf;
213 ARM_MOV_REG_REG (buf, ARMREG_LR, ARMREG_PC);
214 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_R0);
216 /* we build the MonoLMF structure on the stack - see mini-arm.h
217 * The pointer to the struct is put in r1.
218 * the iregs array is already allocated on the stack by push.
220 ARM_SUB_REG_IMM8 (buf, ARMREG_SP, ARMREG_SP, sizeof (MonoLMF) - sizeof (guint) * 14);
221 ARM_ADD_REG_IMM8 (buf, ARMREG_R1, ARMREG_SP, STACK - sizeof (MonoLMF));
222 /* r0 is the result from mono_get_lmf_addr () */
223 ARM_STR_IMM (buf, ARMREG_R0, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, lmf_addr));
224 /* new_lmf->previous_lmf = *lmf_addr */
225 ARM_LDR_IMM (buf, ARMREG_R2, ARMREG_R0, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
226 ARM_STR_IMM (buf, ARMREG_R2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
227 /* *(lmf_addr) = r1 */
228 ARM_STR_IMM (buf, ARMREG_R1, ARMREG_R0, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
229 /* save method info (it's in v2) */
230 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
231 ARM_STR_IMM (buf, ARMREG_V2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, method));
232 ARM_STR_IMM (buf, ARMREG_SP, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, ebp));
233 /* save the IP (caller ip) */
234 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
235 ARM_MOV_REG_IMM8 (buf, ARMREG_R2, 0);
237 /* assumes STACK == sizeof (MonoLMF) */
238 ARM_LDR_IMM (buf, ARMREG_R2, ARMREG_SP, (G_STRUCT_OFFSET (MonoLMF, iregs) + 13*4));
240 ARM_STR_IMM (buf, ARMREG_R2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, eip));
243 * Now we're ready to call xxx_trampoline ().
245 /* Arg 1: the saved registers. It was put in v1 */
246 ARM_MOV_REG_REG (buf, ARMREG_R0, ARMREG_V1);
248 /* Arg 2: code (next address to the instruction that called us) */
249 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
250 ARM_MOV_REG_IMM8 (buf, ARMREG_R1, 0);
252 ARM_MOV_REG_REG (buf, ARMREG_R1, ARMREG_V3);
255 /* Arg 3: the specific argument, stored in v2
257 ARM_MOV_REG_REG (buf, ARMREG_R2, ARMREG_V2);
260 char *icall_name = g_strdup_printf ("trampoline_func_%d", tramp_type);
261 *ji = mono_patch_info_list_prepend (*ji, buf - code, MONO_PATCH_INFO_JIT_ICALL_ADDR, icall_name);
262 ARM_LDR_IMM (buf, ARMREG_IP, ARMREG_PC, 0);
264 *(gpointer*)buf = NULL;
266 ARM_LDR_REG_REG (buf, ARMREG_IP, ARMREG_PC, ARMREG_IP);
268 load_trampoline = buf;
272 ARM_MOV_REG_REG (buf, ARMREG_LR, ARMREG_PC);
273 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_IP);
275 /* OK, code address is now on r0. Move it to the place on the stack
276 * where IP was saved (it is now no more useful to us and it can be
277 * clobbered). This way we can just restore all the regs in one inst
280 ARM_STR_IMM (buf, ARMREG_R0, ARMREG_V1, (ARMREG_R12 * 4));
282 /* Check for thread interruption */
283 /* This is not perf critical code so no need to check the interrupt flag */
285 * Have to call the _force_ variant, since there could be a protected wrapper on the top of the stack.
288 *ji = mono_patch_info_list_prepend (*ji, buf - code, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_thread_force_interruption_checkpoint");
289 ARM_LDR_IMM (buf, ARMREG_IP, ARMREG_PC, 0);
291 *(gpointer*)buf = NULL;
293 ARM_LDR_REG_REG (buf, ARMREG_IP, ARMREG_PC, ARMREG_IP);
295 ARM_LDR_IMM (buf, ARMREG_IP, ARMREG_PC, 0);
297 *(gpointer*)buf = mono_thread_force_interruption_checkpoint;
300 ARM_MOV_REG_REG (buf, ARMREG_LR, ARMREG_PC);
301 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_IP);
304 * Now we restore the MonoLMF (see emit_epilogue in mini-arm.c)
305 * and the rest of the registers, so the method called will see
306 * the same state as before we executed.
307 * The pointer to MonoLMF is in r2.
309 ARM_MOV_REG_REG (buf, ARMREG_R2, ARMREG_SP);
310 /* ip = previous_lmf */
311 ARM_LDR_IMM (buf, ARMREG_IP, ARMREG_R2, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
313 ARM_LDR_IMM (buf, ARMREG_LR, ARMREG_R2, G_STRUCT_OFFSET (MonoLMF, lmf_addr));
314 /* *(lmf_addr) = previous_lmf */
315 ARM_STR_IMM (buf, ARMREG_IP, ARMREG_LR, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
317 /* Non-standard function epilogue. Instead of doing a proper
318 * return, we just jump to the compiled code.
320 /* Restore the registers and jump to the code:
321 * Note that IP has been conveniently set to the method addr.
323 ARM_ADD_REG_IMM8 (buf, ARMREG_SP, ARMREG_SP, sizeof (MonoLMF) - sizeof (guint) * 14);
324 ARM_POP_NWB (buf, 0x5fff);
325 if (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
326 ARM_MOV_REG_REG (buf, ARMREG_R0, ARMREG_IP);
327 /* do we need to set sp? */
328 ARM_ADD_REG_IMM8 (buf, ARMREG_SP, ARMREG_SP, (14 * 4));
329 if ((tramp_type == MONO_TRAMPOLINE_CLASS_INIT) || (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT) || (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
330 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_LR);
332 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_IP);
334 constants = (gpointer*)buf;
335 constants [0] = mono_get_lmf_addr;
336 constants [1] = (gpointer)mono_get_trampoline_func (tramp_type);
339 /* backpatch by emitting the missing instructions skipped above */
340 ARM_LDR_IMM (load_get_lmf_addr, ARMREG_R0, ARMREG_PC, (buf - load_get_lmf_addr - 8));
341 ARM_LDR_IMM (load_trampoline, ARMREG_IP, ARMREG_PC, (buf + 4 - load_trampoline - 8));
346 /* Flush instruction cache, since we've generated code */
347 mono_arch_flush_icache (code, buf - code);
350 g_assert ((buf - code) <= GEN_TRAMP_SIZE);
352 *code_size = buf - code;
354 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
357 /* Initialize the nullified class init trampoline used in the AOT case */
358 nullified_class_init_trampoline = mono_arch_get_nullified_class_init_trampoline (&code_len);
365 mono_arch_get_nullified_class_init_trampoline (guint32 *code_len)
369 code = buf = mono_global_codeman_reserve (16);
371 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_LR);
373 mono_arch_flush_icache (code, buf - code);
375 *code_len = buf - code;
380 #define SPEC_TRAMP_SIZE 24
383 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
385 guint8 *code, *buf, *tramp;
387 guint32 short_branch, size = SPEC_TRAMP_SIZE;
389 tramp = mono_get_trampoline_code (tramp_type);
391 mono_domain_lock (domain);
392 code = buf = mono_code_manager_reserve_align (domain->code_mp, size, 4);
393 if ((short_branch = branch_for_target_reachable (code + 8, tramp))) {
395 mono_code_manager_commit (domain->code_mp, code, SPEC_TRAMP_SIZE, size);
397 mono_domain_unlock (domain);
399 /* we could reduce this to 12 bytes if tramp is within reach:
403 * The called code can access method using the lr register
404 * A 20 byte sequence could be:
406 * ARM_MOV_REG_REG (lr, pc)
407 * ARM_LDR_IMM (pc, pc, 0)
411 /* We save all the registers, except PC and SP */
412 ARM_PUSH (buf, 0x5fff);
414 constants = (gpointer*)buf;
415 constants [0] = GUINT_TO_POINTER (short_branch | (1 << 24));
416 constants [1] = arg1;
419 ARM_LDR_IMM (buf, ARMREG_R1, ARMREG_PC, 8); /* temp reg */
420 ARM_MOV_REG_REG (buf, ARMREG_LR, ARMREG_PC);
421 ARM_MOV_REG_REG (buf, ARMREG_PC, ARMREG_R1);
423 constants = (gpointer*)buf;
424 constants [0] = arg1;
425 constants [1] = tramp;
429 /* Flush instruction cache, since we've generated code */
430 mono_arch_flush_icache (code, buf - code);
432 g_assert ((buf - code) <= size);
435 *code_len = buf - code;
440 #define arm_is_imm12(v) ((int)(v) > -4096 && (int)(v) < 4096)
443 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
449 guint8 **rgctx_null_jumps;
454 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
455 index = MONO_RGCTX_SLOT_INDEX (slot);
457 index += sizeof (MonoMethodRuntimeGenericContext) / sizeof (gpointer);
458 for (depth = 0; ; ++depth) {
459 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
461 if (index < size - 1)
466 tramp_size = 64 + 16 * depth;
468 code = buf = mono_global_codeman_reserve (tramp_size);
470 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
473 /* The vtable/mrgctx is in R0 */
474 g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
478 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_R0);
480 /* load rgctx ptr from vtable */
481 g_assert (arm_is_imm12 (G_STRUCT_OFFSET (MonoVTable, runtime_generic_context)));
482 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R0, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context));
483 /* is the rgctx ptr null? */
484 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
485 /* if yes, jump to actual trampoline */
486 rgctx_null_jumps [njumps ++] = code;
487 ARM_B_COND (code, ARMCOND_EQ, 0);
490 for (i = 0; i < depth; ++i) {
491 /* load ptr to next array */
492 if (mrgctx && i == 0) {
493 g_assert (arm_is_imm12 (sizeof (MonoMethodRuntimeGenericContext)));
494 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, sizeof (MonoMethodRuntimeGenericContext));
496 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, 0);
498 /* is the ptr null? */
499 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
500 /* if yes, jump to actual trampoline */
501 rgctx_null_jumps [njumps ++] = code;
502 ARM_B_COND (code, ARMCOND_EQ, 0);
506 code = mono_arm_emit_load_imm (code, ARMREG_R2, sizeof (gpointer) * (index + 1));
507 ARM_LDR_REG_REG (code, ARMREG_R1, ARMREG_R1, ARMREG_R2);
508 /* is the slot null? */
509 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
510 /* if yes, jump to actual trampoline */
511 rgctx_null_jumps [njumps ++] = code;
512 ARM_B_COND (code, ARMCOND_EQ, 0);
513 /* otherwise return, result is in R1 */
514 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_R1);
515 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_LR);
517 g_assert (njumps <= depth + 2);
518 for (i = 0; i < njumps; ++i)
519 arm_patch (rgctx_null_jumps [i], code);
521 g_free (rgctx_null_jumps);
525 /* The vtable/mrgctx is still in R0 */
527 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), &code_len);
529 /* Jump to the actual trampoline */
530 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
531 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_R1);
532 *(guint32*)code = tramp;
535 mono_arch_flush_icache (buf, code - buf);
537 g_assert (code - buf <= tramp_size);
542 #define arm_is_imm8(v) ((v) > -256 && (v) < 256)
545 mono_arch_create_generic_class_init_trampoline (void)
549 static int byte_offset = -1;
550 static guint8 bitmask;
553 guint32 code_len, imm8;
558 code = buf = mono_global_codeman_reserve (tramp_size);
561 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
563 g_assert (arm_is_imm8 (byte_offset));
564 ARM_LDRSB_IMM (code, ARMREG_IP, MONO_ARCH_VTABLE_REG, byte_offset);
565 imm8 = mono_arm_is_rotated_imm8 (bitmask, &rot_amount);
566 g_assert (imm8 >= 0);
567 ARM_AND_REG_IMM (code, ARMREG_IP, ARMREG_IP, imm8, rot_amount);
568 ARM_CMP_REG_IMM (code, ARMREG_IP, 0, 0);
570 ARM_B_COND (code, ARMCOND_EQ, 0);
572 /* Initialized case */
573 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_LR);
575 /* Uninitialized case */
576 arm_patch (jump, code);
578 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), &code_len);
580 /* Jump to the actual trampoline */
581 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
582 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_R1);
583 *(guint32*)code = tramp;
586 mono_arch_flush_icache (buf, code - buf);
588 g_assert (code - buf <= tramp_size);