2 * tramp-arm.c: JIT trampoline code for ARM
5 * Paolo Molaro (lupus@ximian.com)
7 * (C) 2001-2003 Ximian, Inc.
8 * Copyright 2003-2011 Novell Inc
9 * Copyright 2011 Xamarin Inc
15 #include <mono/metadata/abi-details.h>
16 #include <mono/metadata/appdomain.h>
17 #include <mono/metadata/marshal.h>
18 #include <mono/metadata/tabledefs.h>
19 #include <mono/metadata/profiler-private.h>
20 #include <mono/arch/arm/arm-codegen.h>
21 #include <mono/arch/arm/arm-vfp-codegen.h>
26 #define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
28 #ifdef USE_JUMP_TABLES
31 decode_imm16 (guint32 insn)
33 return (((insn >> 16) & 0xf) << 12) | (insn & 0xfff);
36 #define INSN_MASK 0xff00000
37 #define MOVW_MASK ((3 << 24) | (0 << 20))
38 #define MOVT_MASK ((3 << 24) | (4 << 20))
41 mono_arch_jumptable_entry_from_code (guint8 *code)
43 guint32 insn1 = ((guint32*)code) [0];
44 guint32 insn2 = ((guint32*)code) [1];
46 if (((insn1 & INSN_MASK) == MOVW_MASK) &&
47 ((insn2 & INSN_MASK) == MOVT_MASK) ) {
48 guint32 imm_lo = decode_imm16 (insn1);
49 guint32 imm_hi = decode_imm16 (insn2);
50 return (gpointer*) GUINT_TO_POINTER (imm_lo | (imm_hi << 16));
52 g_assert_not_reached ();
62 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
66 * code_ptr is 4 instructions after MOVW/MOVT used to address
69 jte = mono_jumptable_get_entry (code_ptr - 16);
70 g_assert ( jte != NULL);
75 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
77 guint32 *code = (guint32*)code_ptr;
79 /* This is the 'bl' or the 'mov pc' instruction */
83 * Note that methods are called also with the bl opcode.
85 if ((((*code) >> 25) & 7) == 5) {
86 /*g_print ("direct patching\n");*/
87 arm_patch ((guint8*)code, addr);
88 mono_arch_flush_icache ((guint8*)code, 4);
92 if ((((*code) >> 20) & 0xFF) == 0x12) {
93 /*g_print ("patching bx\n");*/
94 arm_patch ((guint8*)code, addr);
95 mono_arch_flush_icache ((guint8*)(code - 2), 4);
99 g_assert_not_reached ();
104 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
108 /* Patch the jump table entry used by the plt entry */
109 if (*(guint32*)code == 0xe59fc000) {
110 /* ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0); */
111 guint32 offset = ((guint32*)code)[2];
113 jump_entry = code + offset + 12;
114 } else if (*(guint16*)(code - 4) == 0xf8df) {
116 * Thumb PLT entry, begins with ldr.w ip, [pc, #8], code points to entry + 4, see
117 * mono_arm_get_thumb_plt_entry ().
122 offset = *(guint32*)(code + 12);
123 jump_entry = code + offset + 8;
125 g_assert_not_reached ();
128 *(guint8**)jump_entry = addr;
132 mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs)
134 mono_arch_patch_callsite (NULL, code, mini_get_nullified_class_init_trampoline ());
139 #define arm_is_imm12(v) ((int)(v) > -4096 && (int)(v) < 4096)
141 #ifndef USE_JUMP_TABLES
143 * Return the instruction to jump from code to target, 0 if not
144 * reachable with a single instruction
147 branch_for_target_reachable (guint8 *branch, guint8 *target)
149 gint diff = target - branch - 8;
150 g_assert ((diff & 3) == 0);
152 if (diff <= 33554431)
153 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | (diff >> 2);
155 /* diff between 0 and -33554432 */
156 if (diff >= -33554432)
157 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | ((diff >> 2) & ~0xff000000);
163 static inline guint8*
164 emit_bx (guint8* code, int reg)
166 if (mono_arm_thumb_supported ())
169 ARM_MOV_REG_REG (code, ARMREG_PC, reg);
173 /* Stack size for trampoline function
175 #define STACK ALIGN_TO (sizeof (MonoLMF), 8)
177 /* Method-specific trampoline code fragment size */
178 #define METHOD_TRAMPOLINE_SIZE 64
180 /* Jump-specific trampoline code fragment size */
181 #define JUMP_TRAMPOLINE_SIZE 64
184 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
187 guint8 *buf, *code = NULL;
188 #ifdef USE_JUMP_TABLES
189 gpointer *load_get_lmf_addr = NULL, *load_trampoline = NULL;
191 guint8 *load_get_lmf_addr = NULL, *load_trampoline = NULL;
195 int cfa_offset, regsave_size, lr_offset;
196 GSList *unwind_ops = NULL;
197 MonoJumpInfo *ji = NULL;
200 #ifdef USE_JUMP_TABLES
204 /* Now we'll create in 'buf' the ARM trampoline code. This
205 is the trampoline code common to all methods */
209 /* Add space for saving/restoring VFP regs. */
210 if (mono_arm_is_hard_float ())
213 code = buf = mono_global_codeman_reserve (buf_len);
216 * At this point lr points to the specific arg and sp points to the saved
217 * regs on the stack (all but PC and SP). The original LR value has been
218 * saved as sp + LR_OFFSET by the push in the specific trampoline
221 /* The size of the area already allocated by the push in the specific trampoline */
222 regsave_size = 14 * sizeof (mgreg_t);
223 /* The offset where lr was saved inside the regsave area */
224 lr_offset = 13 * sizeof (mgreg_t);
226 // FIXME: Finish the unwind info, the current info allows us to unwind
227 // when the trampoline is not in the epilog
229 // CFA = SP + (num registers pushed) * 4
230 cfa_offset = 14 * sizeof (mgreg_t);
231 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, cfa_offset);
232 // PC saved at sp+LR_OFFSET
233 mono_add_unwind_op_offset (unwind_ops, code, buf, ARMREG_LR, -4);
235 if (aot && tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
237 * For page trampolines the data is in r1, so just move it, otherwise use the got slot as below.
238 * The trampoline contains a pc-relative offset to the got slot
239 * preceeding the got slot where the value is stored. The offset can be
243 ARM_MOV_REG_REG (code, ARMREG_V2, ARMREG_R1);
245 ARM_LDR_IMM (code, ARMREG_V2, ARMREG_LR, 0);
246 ARM_ADD_REG_IMM (code, ARMREG_V2, ARMREG_V2, 4, 0);
247 ARM_LDR_REG_REG (code, ARMREG_V2, ARMREG_V2, ARMREG_LR);
250 if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
251 ARM_LDR_IMM (code, ARMREG_V2, ARMREG_LR, 0);
254 ARM_MOV_REG_REG (code, ARMREG_V2, MONO_ARCH_VTABLE_REG);
256 ARM_LDR_IMM (code, ARMREG_V3, ARMREG_SP, lr_offset);
258 /* ok, now we can continue with the MonoLMF setup, mostly untouched
259 * from emit_prolog in mini-arm.c
260 * This is a synthetized call to mono_get_lmf_addr ()
263 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
264 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
266 *(gpointer*)code = NULL;
268 ARM_LDR_REG_REG (code, ARMREG_R0, ARMREG_PC, ARMREG_R0);
270 #ifdef USE_JUMP_TABLES
271 load_get_lmf_addr = mono_jumptable_add_entry ();
272 code = mono_arm_load_jumptable_entry (code, load_get_lmf_addr, ARMREG_R0);
274 load_get_lmf_addr = code;
278 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
279 code = emit_bx (code, ARMREG_R0);
281 /* we build the MonoLMF structure on the stack - see mini-arm.h
282 * The pointer to the struct is put in r1.
283 * the iregs array is already allocated on the stack by push.
285 code = mono_arm_emit_load_imm (code, ARMREG_R2, STACK - regsave_size);
286 ARM_SUB_REG_REG (code, ARMREG_SP, ARMREG_SP, ARMREG_R2);
287 cfa_offset += STACK - regsave_size;
288 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
290 code = mono_arm_emit_load_imm (code, ARMREG_R2, STACK - sizeof (MonoLMF));
291 ARM_ADD_REG_REG (code, ARMREG_V1, ARMREG_SP, ARMREG_R2);
294 * The stack now looks like:
296 * v1 -> <rest of LMF>
300 /* r0 is the result from mono_get_lmf_addr () */
301 ARM_STR_IMM (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, lmf_addr));
302 /* new_lmf->previous_lmf = *lmf_addr */
303 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_R0, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
304 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
305 /* *(lmf_addr) = r1 */
306 ARM_STR_IMM (code, ARMREG_V1, ARMREG_R0, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
307 /* save method info (it's in v2) */
308 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
309 ARM_STR_IMM (code, ARMREG_V2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, method));
311 ARM_MOV_REG_IMM8 (code, ARMREG_R2, 0);
312 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, method));
315 code = mono_arm_emit_load_imm (code, ARMREG_R2, cfa_offset);
316 ARM_ADD_REG_REG (code, ARMREG_R2, ARMREG_SP, ARMREG_R2);
317 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, sp));
319 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_V1, (MONO_STRUCT_OFFSET (MonoLMF, iregs) + ARMREG_FP*4));
320 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, fp));
321 /* save the IP (caller ip) */
322 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
323 ARM_MOV_REG_IMM8 (code, ARMREG_R2, 0);
325 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_V1, (MONO_STRUCT_OFFSET (MonoLMF, iregs) + 13*4));
327 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, ip));
329 /* Save VFP registers. */
330 if (mono_arm_is_hard_float ()) {
332 * Strictly speaking, we don't have to save d0-d7 in the LMF, but
333 * it's easier than attempting to store them on the stack since
334 * this trampoline code is pretty messy.
336 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, fregs));
337 ARM_FSTMD (code, ARM_VFP_D0, 8, ARMREG_R0);
341 * Now we're ready to call xxx_trampoline ().
343 /* Arg 1: the saved registers */
344 ARM_ADD_REG_IMM (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, iregs), 0);
346 /* Arg 2: code (next address to the instruction that called us) */
347 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
348 ARM_MOV_REG_IMM8 (code, ARMREG_R1, 0);
350 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_V3);
353 /* Arg 3: the specific argument, stored in v2
355 ARM_MOV_REG_REG (code, ARMREG_R2, ARMREG_V2);
358 char *icall_name = g_strdup_printf ("trampoline_func_%d", tramp_type);
359 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, icall_name);
360 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
362 *(gpointer*)code = NULL;
364 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
366 #ifdef USE_JUMP_TABLES
367 load_trampoline = mono_jumptable_add_entry ();
368 code = mono_arm_load_jumptable_entry (code, load_trampoline, ARMREG_IP);
370 load_trampoline = code;
375 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
376 code = emit_bx (code, ARMREG_IP);
378 /* OK, code address is now on r0. Move it to the place on the stack
379 * where IP was saved (it is now no more useful to us and it can be
380 * clobbered). This way we can just restore all the regs in one inst
383 ARM_STR_IMM (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, iregs) + (ARMREG_R12 * sizeof (mgreg_t)));
385 /* Check for thread interruption */
386 /* This is not perf critical code so no need to check the interrupt flag */
388 * Have to call the _force_ variant, since there could be a protected wrapper on the top of the stack.
391 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_thread_force_interruption_checkpoint");
392 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
394 *(gpointer*)code = NULL;
396 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
398 #ifdef USE_JUMP_TABLES
399 gpointer *jte = mono_jumptable_add_entry ();
400 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_IP);
401 jte [0] = mono_thread_force_interruption_checkpoint;
403 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
405 *(gpointer*)code = mono_thread_force_interruption_checkpoint;
409 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
410 code = emit_bx (code, ARMREG_IP);
413 * Now we restore the MonoLMF (see emit_epilogue in mini-arm.c)
414 * and the rest of the registers, so the method called will see
415 * the same state as before we executed.
417 /* ip = previous_lmf */
418 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
420 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, lmf_addr));
421 /* *(lmf_addr) = previous_lmf */
422 ARM_STR_IMM (code, ARMREG_IP, ARMREG_LR, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
424 /* Restore VFP registers. */
425 if (mono_arm_is_hard_float ()) {
426 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, fregs));
427 ARM_FLDMD (code, ARM_VFP_D0, 8, ARMREG_R0);
430 /* Non-standard function epilogue. Instead of doing a proper
431 * return, we just jump to the compiled code.
433 /* Restore the registers and jump to the code:
434 * Note that IP has been conveniently set to the method addr.
436 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, STACK - regsave_size);
437 ARM_POP_NWB (code, 0x5fff);
438 if (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
439 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_IP);
440 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, regsave_size);
441 if ((tramp_type == MONO_TRAMPOLINE_CLASS_INIT) || (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT) || (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
442 code = emit_bx (code, ARMREG_LR);
444 code = emit_bx (code, ARMREG_IP);
446 #ifdef USE_JUMP_TABLES
447 load_get_lmf_addr [0] = mono_get_lmf_addr;
448 load_trampoline [0] = (gpointer)mono_get_trampoline_func (tramp_type);
450 constants = (gpointer*)code;
451 constants [0] = mono_get_lmf_addr;
452 constants [1] = (gpointer)mono_get_trampoline_func (tramp_type);
455 /* backpatch by emitting the missing instructions skipped above */
456 ARM_LDR_IMM (load_get_lmf_addr, ARMREG_R0, ARMREG_PC, (code - load_get_lmf_addr - 8));
457 ARM_LDR_IMM (load_trampoline, ARMREG_IP, ARMREG_PC, (code + 4 - load_trampoline - 8));
463 /* Flush instruction cache, since we've generated code */
464 mono_arch_flush_icache (buf, code - buf);
465 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
468 g_assert ((code - buf) <= buf_len);
471 tramp_name = mono_get_generic_trampoline_name (tramp_type);
472 *info = mono_tramp_info_create (tramp_name, buf, code - buf, ji, unwind_ops);
480 mono_arch_get_nullified_class_init_trampoline (MonoTrampInfo **info)
484 code = buf = mono_global_codeman_reserve (16);
486 code = emit_bx (code, ARMREG_LR);
488 mono_arch_flush_icache (buf, code - buf);
489 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
492 *info = mono_tramp_info_create ("nullified_class_init_trampoline", buf, code - buf, NULL, NULL);
497 #define SPEC_TRAMP_SIZE 24
500 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
502 guint8 *code, *buf, *tramp;
504 #ifndef USE_JUMP_TABLES
505 guint32 short_branch = FALSE;
507 guint32 size = SPEC_TRAMP_SIZE;
509 tramp = mono_get_trampoline_code (tramp_type);
512 mono_domain_lock (domain);
513 #ifdef USE_JUMP_TABLES
514 code = buf = mono_domain_code_reserve_align (domain, size, 4);
516 code = buf = mono_domain_code_reserve_align (domain, size, 4);
517 if ((short_branch = branch_for_target_reachable (code + 4, tramp))) {
519 mono_domain_code_commit (domain, code, SPEC_TRAMP_SIZE, size);
522 mono_domain_unlock (domain);
524 code = buf = mono_global_codeman_reserve (size);
525 short_branch = FALSE;
528 #ifdef USE_JUMP_TABLES
529 /* For jumptables case we always generate the same code for trampolines,
531 * push {r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, lr}
537 ARM_PUSH (code, 0x5fff);
538 constants = mono_jumptable_add_entries (2);
539 code = mono_arm_load_jumptable_entry_addr (code, constants, ARMREG_LR);
540 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_LR, 4);
541 code = emit_bx (code, ARMREG_R1);
542 constants [0] = arg1;
543 constants [1] = tramp;
545 /* we could reduce this to 12 bytes if tramp is within reach:
549 * The called code can access method using the lr register
550 * A 20 byte sequence could be:
552 * ARM_MOV_REG_REG (lr, pc)
553 * ARM_LDR_IMM (pc, pc, 0)
557 /* We save all the registers, except PC and SP */
558 ARM_PUSH (code, 0x5fff);
560 constants = (gpointer*)code;
561 constants [0] = GUINT_TO_POINTER (short_branch | (1 << 24));
562 constants [1] = arg1;
565 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 8); /* temp reg */
566 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
567 code = emit_bx (code, ARMREG_R1);
569 constants = (gpointer*)code;
570 constants [0] = arg1;
571 constants [1] = tramp;
576 /* Flush instruction cache, since we've generated code */
577 mono_arch_flush_icache (buf, code - buf);
578 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_SPECIFIC_TRAMPOLINE, mono_get_generic_trampoline_simple_name (tramp_type));
580 g_assert ((code - buf) <= size);
583 *code_len = code - buf;
589 * mono_arch_get_unbox_trampoline:
591 * @addr: pointer to native code for @m
593 * when value type methods are called through the vtable we need to unbox the
594 * this argument. This method returns a pointer to a trampoline which does
595 * unboxing before calling the method
598 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
600 guint8 *code, *start;
601 MonoDomain *domain = mono_domain_get ();
602 #ifdef USE_JUMP_TABLES
609 start = code = mono_domain_code_reserve (domain, size);
611 #ifdef USE_JUMP_TABLES
612 jte = mono_jumptable_add_entry ();
613 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_IP);
614 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_R0, sizeof (MonoObject));
615 code = emit_bx (code, ARMREG_IP);
618 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 4);
619 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_R0, sizeof (MonoObject));
620 code = emit_bx (code, ARMREG_IP);
621 *(guint32*)code = (guint32)addr;
624 mono_arch_flush_icache (start, code - start);
625 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_UNBOX_TRAMPOLINE, m);
626 g_assert ((code - start) <= size);
627 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
628 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
634 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
636 guint8 *code, *start;
637 #ifdef USE_JUMP_TABLES
643 MonoDomain *domain = mono_domain_get ();
645 start = code = mono_domain_code_reserve (domain, buf_len);
647 #ifdef USE_JUMP_TABLES
648 jte = mono_jumptable_add_entries (2);
649 code = mono_arm_load_jumptable_entry_addr (code, jte, ARMREG_IP);
650 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, ARMREG_IP, 0);
651 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 4);
652 ARM_BX (code, ARMREG_IP);
656 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, ARMREG_PC, 0);
657 ARM_LDR_IMM (code, ARMREG_PC, ARMREG_PC, 0);
658 *(guint32*)code = (guint32)mrgctx;
660 *(guint32*)code = (guint32)addr;
664 g_assert ((code - start) <= buf_len);
666 mono_arch_flush_icache (start, code - start);
667 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
673 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
679 guint8 **rgctx_null_jumps;
683 MonoJumpInfo *ji = NULL;
684 GSList *unwind_ops = NULL;
685 #ifdef USE_JUMP_TABLES
689 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
690 index = MONO_RGCTX_SLOT_INDEX (slot);
692 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
693 for (depth = 0; ; ++depth) {
694 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
696 if (index < size - 1)
701 tramp_size = 64 + 16 * depth;
703 code = buf = mono_global_codeman_reserve (tramp_size);
705 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, 0);
707 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
710 /* The vtable/mrgctx is in R0 */
711 g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
715 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_R0);
717 /* load rgctx ptr from vtable */
718 g_assert (arm_is_imm12 (MONO_STRUCT_OFFSET (MonoVTable, runtime_generic_context)));
719 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R0, MONO_STRUCT_OFFSET (MonoVTable, runtime_generic_context));
720 /* is the rgctx ptr null? */
721 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
722 /* if yes, jump to actual trampoline */
723 rgctx_null_jumps [njumps ++] = code;
724 ARM_B_COND (code, ARMCOND_EQ, 0);
727 for (i = 0; i < depth; ++i) {
728 /* load ptr to next array */
729 if (mrgctx && i == 0) {
730 g_assert (arm_is_imm12 (MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT));
731 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT);
733 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, 0);
735 /* is the ptr null? */
736 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
737 /* if yes, jump to actual trampoline */
738 rgctx_null_jumps [njumps ++] = code;
739 ARM_B_COND (code, ARMCOND_EQ, 0);
743 code = mono_arm_emit_load_imm (code, ARMREG_R2, sizeof (gpointer) * (index + 1));
744 ARM_LDR_REG_REG (code, ARMREG_R1, ARMREG_R1, ARMREG_R2);
745 /* is the slot null? */
746 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
747 /* if yes, jump to actual trampoline */
748 rgctx_null_jumps [njumps ++] = code;
749 ARM_B_COND (code, ARMCOND_EQ, 0);
750 /* otherwise return, result is in R1 */
751 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_R1);
752 code = emit_bx (code, ARMREG_LR);
754 g_assert (njumps <= depth + 2);
755 for (i = 0; i < njumps; ++i)
756 arm_patch (rgctx_null_jumps [i], code);
758 g_free (rgctx_null_jumps);
762 /* The vtable/mrgctx is still in R0 */
765 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
766 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
768 *(gpointer*)code = NULL;
770 ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
772 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), &code_len);
774 /* Jump to the actual trampoline */
775 #ifdef USE_JUMP_TABLES
776 jte = mono_jumptable_add_entry ();
778 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_R1);
779 code = emit_bx (code, ARMREG_R1);
781 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
782 code = emit_bx (code, ARMREG_R1);
783 *(gpointer*)code = tramp;
788 mono_arch_flush_icache (buf, code - buf);
789 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
791 g_assert (code - buf <= tramp_size);
794 char *name = mono_get_rgctx_fetch_trampoline_name (slot);
795 *info = mono_tramp_info_create (name, buf, code - buf, ji, unwind_ops);
803 mono_arch_create_general_rgctx_lazy_fetch_trampoline (MonoTrampInfo **info, gboolean aot)
807 MonoJumpInfo *ji = NULL;
808 GSList *unwind_ops = NULL;
814 code = buf = mono_global_codeman_reserve (tramp_size);
816 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, 0);
818 // FIXME: Currently, we always go to the slow path.
819 /* Load trampoline addr */
820 ARM_LDR_IMM (code, ARMREG_R1, MONO_ARCH_RGCTX_REG, 4);
821 /* The vtable/mrgctx is in R0 */
822 g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
823 code = emit_bx (code, ARMREG_R1);
825 mono_arch_flush_icache (buf, code - buf);
826 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
828 g_assert (code - buf <= tramp_size);
831 *info = mono_tramp_info_create ("rgctx_fetch_trampoline_general", buf, code - buf, ji, unwind_ops);
836 #define arm_is_imm8(v) ((v) > -256 && (v) < 256)
839 mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot)
843 static int byte_offset = -1;
844 static guint8 bitmask;
847 guint32 code_len, imm8;
849 GSList *unwind_ops = NULL;
850 MonoJumpInfo *ji = NULL;
854 code = buf = mono_global_codeman_reserve (tramp_size);
857 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
859 g_assert (arm_is_imm8 (byte_offset));
860 ARM_LDRSB_IMM (code, ARMREG_IP, MONO_ARCH_VTABLE_REG, byte_offset);
861 imm8 = mono_arm_is_rotated_imm8 (bitmask, &rot_amount);
862 g_assert (imm8 >= 0);
863 ARM_AND_REG_IMM (code, ARMREG_IP, ARMREG_IP, imm8, rot_amount);
864 ARM_CMP_REG_IMM (code, ARMREG_IP, 0, 0);
866 ARM_B_COND (code, ARMCOND_EQ, 0);
868 /* Initialized case */
869 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_LR);
871 /* Uninitialized case */
872 arm_patch (jump, code);
875 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
876 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
878 *(gpointer*)code = NULL;
880 ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
882 #ifdef USE_JUMP_TABLES
883 gpointer *jte = mono_jumptable_add_entry ();
885 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), &code_len);
887 /* Jump to the actual trampoline */
888 #ifdef USE_JUMP_TABLES
889 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_R1);
891 code = emit_bx (code, ARMREG_R1);
893 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
894 code = emit_bx (code, ARMREG_R1);
895 *(gpointer*)code = tramp;
900 mono_arch_flush_icache (buf, code - buf);
901 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
903 g_assert (code - buf <= tramp_size);
906 *info = mono_tramp_info_create ("generic_class_init_trampoline", buf, code - buf, ji, unwind_ops);
912 handler_block_trampoline_helper (gpointer *ptr)
914 MonoJitTlsData *jit_tls = mono_native_tls_get_value (mono_jit_tls_id);
915 return jit_tls->handler_block_return_address;
919 mono_arch_create_handler_block_trampoline (MonoTrampInfo **info, gboolean aot)
924 MonoJumpInfo *ji = NULL;
925 GSList *unwind_ops = NULL;
929 code = buf = mono_global_codeman_reserve (tramp_size);
931 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_HANDLER_BLOCK_GUARD, NULL, NULL);
934 This trampoline restore the call chain of the handler block then jumps into the code that deals with it.
938 * We are in a method frame after the call emitted by OP_CALL_HANDLER.
940 /* Obtain jit_tls->handler_block_return_address */
941 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
943 *(gpointer*)code = handler_block_trampoline_helper;
946 /* Set it as the return address so the trampoline will return to it */
947 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_R0);
949 /* Call the trampoline */
950 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
951 code = emit_bx (code, ARMREG_R0);
952 *(gpointer*)code = tramp;
955 mono_arch_flush_icache (buf, code - buf);
956 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
957 g_assert (code - buf <= tramp_size);
960 *info = mono_tramp_info_create ("handler_block_trampoline", buf, code - buf, ji, unwind_ops);
968 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
970 g_assert_not_reached ();
975 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
977 g_assert_not_reached ();
982 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
984 g_assert_not_reached ();
989 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
991 g_assert_not_reached ();
996 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
998 g_assert_not_reached ();
1003 mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot)
1005 g_assert_not_reached ();
1010 mono_arch_get_nullified_class_init_trampoline (MonoTrampInfo **info)
1012 g_assert_not_reached ();
1017 mono_arch_create_handler_block_trampoline (MonoTrampInfo **info, gboolean aot)
1019 g_assert_not_reached ();
1023 #endif /* DISABLE_JIT */
1026 mono_arch_get_call_target (guint8 *code)
1028 guint32 ins = ((guint32*)(gpointer)code) [-1];
1031 /* Should be a 'bl' or a 'b' */
1032 if (((ins >> 25) & 0x7) == 0x5) {
1034 /* Should be a 'bl' */
1035 if ((((ins >> 25) & 0x7) == 0x5) && (((ins >> 24) & 0x1) == 0x1)) {
1037 gint32 disp = ((((gint32)ins) & 0xffffff) << 8) >> 8;
1038 guint8 *target = code - 4 + 8 + (disp * 4);
1047 mono_arch_get_plt_info_offset (guint8 *plt_entry, mgreg_t *regs, guint8 *code)
1049 /* The offset is stored as the 4th word of the plt entry */
1050 return ((guint32*)plt_entry) [3];
1054 * Return the address of the PLT entry called by the thumb code CODE.
1057 mono_arm_get_thumb_plt_entry (guint8 *code)
1059 int s, j1, j2, imm10, imm11, i1, i2, imm32;
1064 /* code should be right after a BL */
1065 code = (guint8*)((mgreg_t)code & ~1);
1066 base = (guint8*)((mgreg_t)code & ~3);
1068 t1 = ((guint16*)bl) [0];
1069 t2 = ((guint16*)bl) [1];
1071 g_assert ((t1 >> 11) == 0x1e);
1073 s = (t1 >> 10) & 0x1;
1074 imm10 = (t1 >> 0) & 0x3ff;
1075 j1 = (t2 >> 13) & 0x1;
1076 j2 = (t2 >> 11) & 0x1;
1079 i1 = (s ^ j1) ? 0 : 1;
1080 i2 = (s ^ j2) ? 0 : 1;
1082 imm32 = (imm11 << 1) | (imm10 << 12) | (i2 << 22) | (i1 << 23);
1086 target = code + imm32;
1088 /* target now points to the thumb plt entry */
1089 /* ldr.w r12, [pc, #8] */
1090 g_assert (((guint16*)target) [0] == 0xf8df);
1091 g_assert (((guint16*)target) [1] == 0xc008);
1094 * The PLT info offset is at offset 16, but mono_arch_get_plt_entry_offset () returns
1095 * the 3rd word, so compensate by returning a different value.
1105 * mono_arch_get_gsharedvt_arg_trampoline:
1107 * See tramp-x86.c for documentation.
1110 mono_arch_get_gsharedvt_arg_trampoline (MonoDomain *domain, gpointer arg, gpointer addr)
1114 gpointer *constants;
1118 buf = code = mono_domain_code_reserve (domain, buf_len);
1120 /* Similar to the specialized trampoline code */
1121 ARM_PUSH (code, (1 << ARMREG_R0) | (1 << ARMREG_R1) | (1 << ARMREG_R2) | (1 << ARMREG_R3) | (1 << ARMREG_LR));
1122 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 8);
1123 /* arg is passed in LR */
1124 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_PC, 0);
1125 code = emit_bx (code, ARMREG_IP);
1126 constants = (gpointer*)code;
1127 constants [0] = arg;
1128 constants [1] = addr;
1131 g_assert ((code - buf) <= buf_len);
1133 nacl_domain_code_validate (domain, &buf, buf_len, &code);
1134 mono_arch_flush_icache (buf, code - buf);
1135 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
1143 mono_arch_get_gsharedvt_arg_trampoline (MonoDomain *domain, gpointer arg, gpointer addr)
1145 g_assert_not_reached ();
1151 #if defined(ENABLE_GSHAREDVT)
1153 #include "../../../mono-extensions/mono/mini/tramp-arm-gsharedvt.c"
1158 mono_arm_start_gsharedvt_call (GSharedVtCallInfo *info, gpointer *caller, gpointer *callee, gpointer mrgctx_reg)
1160 g_assert_not_reached ();
1165 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
1172 #endif /* !MONOTOUCH */