+ return buf;
+}
+
+/*
+ * mono_arch_get_unbox_trampoline:
+ * @m: method pointer
+ * @addr: pointer to native code for @m
+ *
+ * when value type methods are called through the vtable we need to unbox the
+ * this argument. This method returns a pointer to a trampoline which does
+ * unboxing before calling the method
+ */
+gpointer
+mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
+{
+ guint8 *code, *start;
+ MonoDomain *domain = mono_domain_get ();
+
+ start = code = mono_domain_code_reserve (domain, 16);
+
+ ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 4);
+ ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_R0, sizeof (MonoObject));
+ code = emit_bx (code, ARMREG_IP);
+ *(guint32*)code = (guint32)addr;
+ code += 4;
+ mono_arch_flush_icache (start, code - start);
+ g_assert ((code - start) <= 16);
+ /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
+ g_print ("unbox code is at %p for method at %p\n", start, addr);*/
+
+ return start;
+}
+
+gpointer
+mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
+{
+ guint8 *code, *start;
+ int buf_len;
+
+ MonoDomain *domain = mono_domain_get ();
+
+ buf_len = 16;
+
+ start = code = mono_domain_code_reserve (domain, buf_len);
+
+ ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, ARMREG_PC, 0);
+ ARM_LDR_IMM (code, ARMREG_PC, ARMREG_PC, 0);
+ *(guint32*)code = (guint32)mrgctx;
+ code += 4;
+ *(guint32*)code = (guint32)addr;
+ code += 4;
+
+ g_assert ((code - start) <= buf_len);
+
+ mono_arch_flush_icache (start, code - start);
+
+ return start;
+}
+
+gpointer
+mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
+{
+ guint8 *tramp;
+ guint8 *code, *buf;
+ int tramp_size;
+ guint32 code_len;
+ guint8 **rgctx_null_jumps;
+ int depth, index;
+ int i, njumps;
+ gboolean mrgctx;
+ MonoJumpInfo *ji = NULL;
+ GSList *unwind_ops = NULL;
+
+ mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
+ index = MONO_RGCTX_SLOT_INDEX (slot);
+ if (mrgctx)
+ index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
+ for (depth = 0; ; ++depth) {
+ int size = mono_class_rgctx_get_array_size (depth, mrgctx);
+
+ if (index < size - 1)
+ break;
+ index -= size - 1;
+ }
+
+ tramp_size = 64 + 16 * depth;
+
+ code = buf = mono_global_codeman_reserve (tramp_size);
+
+ mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, 0);
+
+ rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
+ njumps = 0;
+
+ /* The vtable/mrgctx is in R0 */
+ g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
+
+ if (mrgctx) {
+ /* get mrgctx ptr */
+ ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_R0);
+ } else {
+ /* load rgctx ptr from vtable */
+ g_assert (arm_is_imm12 (G_STRUCT_OFFSET (MonoVTable, runtime_generic_context)));
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R0, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context));
+ /* is the rgctx ptr null? */
+ ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
+ /* if yes, jump to actual trampoline */
+ rgctx_null_jumps [njumps ++] = code;
+ ARM_B_COND (code, ARMCOND_EQ, 0);
+ }
+
+ for (i = 0; i < depth; ++i) {
+ /* load ptr to next array */
+ if (mrgctx && i == 0) {
+ g_assert (arm_is_imm12 (MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT));
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT);
+ } else {
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, 0);
+ }
+ /* is the ptr null? */
+ ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
+ /* if yes, jump to actual trampoline */
+ rgctx_null_jumps [njumps ++] = code;
+ ARM_B_COND (code, ARMCOND_EQ, 0);
+ }
+
+ /* fetch slot */
+ code = mono_arm_emit_load_imm (code, ARMREG_R2, sizeof (gpointer) * (index + 1));
+ ARM_LDR_REG_REG (code, ARMREG_R1, ARMREG_R1, ARMREG_R2);
+ /* is the slot null? */
+ ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
+ /* if yes, jump to actual trampoline */
+ rgctx_null_jumps [njumps ++] = code;
+ ARM_B_COND (code, ARMCOND_EQ, 0);
+ /* otherwise return, result is in R1 */
+ ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_R1);
+ code = emit_bx (code, ARMREG_LR);
+
+ g_assert (njumps <= depth + 2);
+ for (i = 0; i < njumps; ++i)
+ arm_patch (rgctx_null_jumps [i], code);
+
+ g_free (rgctx_null_jumps);
+
+ /* Slowpath */
+
+ /* The vtable/mrgctx is still in R0 */
+
+ if (aot) {
+ ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
+ ARM_B (code, 0);
+ *(gpointer*)code = NULL;
+ code += 4;
+ ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
+ } else {
+ tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), &code_len);
+
+ /* Jump to the actual trampoline */
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
+ code = emit_bx (code, ARMREG_R1);
+ *(gpointer*)code = tramp;
+ code += 4;
+ }
+
+ mono_arch_flush_icache (buf, code - buf);
+
+ g_assert (code - buf <= tramp_size);
+
+ if (info)
+ *info = mono_tramp_info_create (mono_get_rgctx_fetch_trampoline_name (slot), buf, code - buf, ji, unwind_ops);
+
+ return buf;
+}
+
+#define arm_is_imm8(v) ((v) > -256 && (v) < 256)
+
+gpointer
+mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot)
+{
+ guint8 *tramp;
+ guint8 *code, *buf;
+ static int byte_offset = -1;
+ static guint8 bitmask;
+ guint8 *jump;
+ int tramp_size;
+ guint32 code_len, imm8;
+ gint rot_amount;
+ GSList *unwind_ops = NULL;
+ MonoJumpInfo *ji = NULL;
+
+ tramp_size = 64;
+
+ code = buf = mono_global_codeman_reserve (tramp_size);
+
+ if (byte_offset < 0)
+ mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
+
+ g_assert (arm_is_imm8 (byte_offset));
+ ARM_LDRSB_IMM (code, ARMREG_IP, MONO_ARCH_VTABLE_REG, byte_offset);
+ imm8 = mono_arm_is_rotated_imm8 (bitmask, &rot_amount);
+ g_assert (imm8 >= 0);
+ ARM_AND_REG_IMM (code, ARMREG_IP, ARMREG_IP, imm8, rot_amount);
+ ARM_CMP_REG_IMM (code, ARMREG_IP, 0, 0);
+ jump = code;
+ ARM_B_COND (code, ARMCOND_EQ, 0);
+
+ /* Initialized case */
+ ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_LR);
+
+ /* Uninitialized case */
+ arm_patch (jump, code);
+
+ if (aot) {
+ ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
+ ARM_B (code, 0);
+ *(gpointer*)code = NULL;
+ code += 4;
+ ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
+ } else {
+ tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), &code_len);
+
+ /* Jump to the actual trampoline */
+ ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
+ code = emit_bx (code, ARMREG_R1);
+ *(gpointer*)code = tramp;
+ code += 4;
+ }
+
+ mono_arch_flush_icache (buf, code - buf);
+
+ g_assert (code - buf <= tramp_size);
+
+ if (info)
+ *info = mono_tramp_info_create (g_strdup_printf ("generic_class_init_trampoline"), buf, code - buf, ji, unwind_ops);
+
+ return buf;
+}
+
+#else
+
+guchar*
+mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
+{
+ g_assert_not_reached ();
+ return NULL;