+gpointer
+mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
+{
+ guint8 *tramp;
+ guint8 *code, *buf;
+ guint8 **rgctx_null_jumps;
+ int tramp_size;
+ int depth, index;
+ int i;
+ gboolean mrgctx;
+ MonoJumpInfo *ji = NULL;
+ GSList *unwind_ops = NULL;
+
+ unwind_ops = mono_arch_get_cie_program ();
+
+ mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
+ index = MONO_RGCTX_SLOT_INDEX (slot);
+ if (mrgctx)
+ index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
+ for (depth = 0; ; ++depth) {
+ int size = mono_class_rgctx_get_array_size (depth, mrgctx);
+
+ if (index < size - 1)
+ break;
+ index -= size - 1;
+ }
+
+#if defined(__default_codegen__)
+ tramp_size = (aot ? 64 : 36) + 6 * depth;
+#elif defined(__native_client_codegen__)
+ tramp_size = (aot ? 64 : 36) + 2 * kNaClAlignment +
+ 6 * (depth + kNaClAlignment);
+#endif
+
+ code = buf = mono_global_codeman_reserve (tramp_size);
+
+ rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
+
+ /* load vtable/mrgctx ptr */
+ x86_mov_reg_membase (code, X86_EAX, X86_ESP, 4, 4);
+ if (!mrgctx) {
+ /* load rgctx ptr from vtable */
+ x86_mov_reg_membase (code, X86_EAX, X86_EAX, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), 4);
+ /* is the rgctx ptr null? */
+ x86_test_reg_reg (code, X86_EAX, X86_EAX);
+ /* if yes, jump to actual trampoline */
+ rgctx_null_jumps [0] = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+ }
+
+ for (i = 0; i < depth; ++i) {
+ /* load ptr to next array */
+ if (mrgctx && i == 0)
+ x86_mov_reg_membase (code, X86_EAX, X86_EAX, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT, 4);
+ else
+ x86_mov_reg_membase (code, X86_EAX, X86_EAX, 0, 4);
+ /* is the ptr null? */
+ x86_test_reg_reg (code, X86_EAX, X86_EAX);
+ /* if yes, jump to actual trampoline */
+ rgctx_null_jumps [i + 1] = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+ }
+
+ /* fetch slot */
+ x86_mov_reg_membase (code, X86_EAX, X86_EAX, sizeof (gpointer) * (index + 1), 4);
+ /* is the slot null? */
+ x86_test_reg_reg (code, X86_EAX, X86_EAX);
+ /* if yes, jump to actual trampoline */
+ rgctx_null_jumps [depth + 1] = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+ /* otherwise return */
+ x86_ret (code);
+
+ for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
+ x86_patch (rgctx_null_jumps [i], code);
+
+ g_free (rgctx_null_jumps);
+
+ x86_mov_reg_membase (code, MONO_ARCH_VTABLE_REG, X86_ESP, 4, 4);
+
+ if (aot) {
+ code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
+ x86_jump_reg (code, X86_EAX);
+ } else {
+ tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
+
+ /* jump to the actual trampoline */
+ x86_jump_code (code, tramp);
+ }
+
+ nacl_global_codeman_validate (&buf, tramp_size, &code);
+ mono_arch_flush_icache (buf, code - buf);
+
+ g_assert (code - buf <= tramp_size);
+
+ if (info)
+ *info = mono_tramp_info_create (mono_get_rgctx_fetch_trampoline_name (slot), buf, code - buf, ji, unwind_ops);
+
+ return buf;
+}
+
+gpointer
+mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot)
+{
+ guint8 *tramp;
+ guint8 *code, *buf;
+ static int byte_offset = -1;
+ static guint8 bitmask;
+ guint8 *jump;
+ int tramp_size;
+ GSList *unwind_ops = NULL;
+ MonoJumpInfo *ji = NULL;
+
+ tramp_size = 64;
+
+ code = buf = mono_global_codeman_reserve (tramp_size);
+
+ unwind_ops = mono_arch_get_cie_program ();
+
+ if (byte_offset < 0)
+ mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
+
+ x86_test_membase_imm (code, MONO_ARCH_VTABLE_REG, byte_offset, bitmask);
+ jump = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+
+ x86_ret (code);
+
+ x86_patch (jump, code);
+
+ /* Push the vtable so the stack is the same as in a specific trampoline */
+ x86_push_reg (code, MONO_ARCH_VTABLE_REG);
+
+ if (aot) {
+ code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "generic_trampoline_generic_class_init");
+ x86_jump_reg (code, X86_EAX);
+ } else {
+ tramp = mono_get_trampoline_code (MONO_TRAMPOLINE_GENERIC_CLASS_INIT);
+
+ /* jump to the actual trampoline */
+ x86_jump_code (code, tramp);
+ }
+
+ mono_arch_flush_icache (code, code - buf);
+
+ g_assert (code - buf <= tramp_size);
+#ifdef __native_client_codegen__
+ g_assert (code - buf <= kNaClAlignment);
+#endif
+
+ nacl_global_codeman_validate (&buf, tramp_size, &code);
+
+ if (info)
+ *info = mono_tramp_info_create (g_strdup_printf ("generic_class_init_trampoline"), buf, code - buf, ji, unwind_ops);
+
+ return buf;
+}
+
+#ifdef MONO_ARCH_MONITOR_OBJECT_REG
+/*
+ * The code produced by this trampoline is equivalent to this:
+ *
+ * if (obj) {
+ * if (obj->synchronisation) {
+ * if (obj->synchronisation->owner == 0) {
+ * if (cmpxch (&obj->synchronisation->owner, TID, 0) == 0)
+ * return;
+ * }
+ * if (obj->synchronisation->owner == TID) {
+ * ++obj->synchronisation->nest;
+ * return;
+ * }
+ * }
+ * }
+ * return full_monitor_enter ();
+ *
+ */
+gpointer
+mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot)
+{
+ guint8 *tramp = mono_get_trampoline_code (MONO_TRAMPOLINE_MONITOR_ENTER);
+ guint8 *code, *buf;
+ guint8 *jump_obj_null, *jump_sync_null, *jump_other_owner, *jump_cmpxchg_failed, *jump_tid, *jump_sync_thin_hash = NULL;
+ int tramp_size;
+ int owner_offset, nest_offset, dummy;
+ MonoJumpInfo *ji = NULL;
+ GSList *unwind_ops = NULL;
+
+ g_assert (MONO_ARCH_MONITOR_OBJECT_REG == X86_EAX);
+
+ mono_monitor_threads_sync_members_offset (&owner_offset, &nest_offset, &dummy);
+ g_assert (MONO_THREADS_SYNC_MEMBER_SIZE (owner_offset) == sizeof (gpointer));
+ g_assert (MONO_THREADS_SYNC_MEMBER_SIZE (nest_offset) == sizeof (guint32));
+ owner_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (owner_offset);
+ nest_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (nest_offset);
+
+ tramp_size = NACL_SIZE (64, 128);
+
+ code = buf = mono_global_codeman_reserve (tramp_size);
+
+ if (mono_thread_get_tls_offset () != -1) {
+ /* MonoObject* obj is in EAX */
+ /* is obj null? */
+ x86_test_reg_reg (code, X86_EAX, X86_EAX);
+ /* if yes, jump to actual trampoline */
+ jump_obj_null = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+
+ /* load obj->synchronization to ECX */
+ x86_mov_reg_membase (code, X86_ECX, X86_EAX, G_STRUCT_OFFSET (MonoObject, synchronisation), 4);
+
+ if (mono_gc_is_moving ()) {
+ /*if bit zero is set it's a thin hash*/
+ /*FIXME use testb encoding*/
+ x86_test_reg_imm (code, X86_ECX, 0x01);
+ jump_sync_thin_hash = code;
+ x86_branch8 (code, X86_CC_NE, -1, 1);
+
+ /*clear bits used by the gc*/
+ x86_alu_reg_imm (code, X86_AND, X86_ECX, ~0x3);
+ }
+
+ /* is synchronization null? */
+ x86_test_reg_reg (code, X86_ECX, X86_ECX);
+
+ /* if yes, jump to actual trampoline */
+ jump_sync_null = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+
+ /* load MonoInternalThread* into EDX */
+ code = mono_x86_emit_tls_get (code, X86_EDX, mono_thread_get_tls_offset ());
+ /* load TID into EDX */
+ x86_mov_reg_membase (code, X86_EDX, X86_EDX, G_STRUCT_OFFSET (MonoInternalThread, tid), 4);
+
+ /* is synchronization->owner null? */
+ x86_alu_membase_imm (code, X86_CMP, X86_ECX, owner_offset, 0);
+ /* if not, jump to next case */
+ jump_tid = code;
+ x86_branch8 (code, X86_CC_NZ, -1, 1);
+
+ /* if yes, try a compare-exchange with the TID */
+ /* free up register EAX, needed for the zero */
+ x86_push_reg (code, X86_EAX);
+ /* zero EAX */
+ x86_alu_reg_reg (code, X86_XOR, X86_EAX, X86_EAX);
+ /* compare and exchange */
+ x86_prefix (code, X86_LOCK_PREFIX);
+ x86_cmpxchg_membase_reg (code, X86_ECX, owner_offset, X86_EDX);
+ /* if not successful, jump to actual trampoline */
+ jump_cmpxchg_failed = code;
+ x86_branch8 (code, X86_CC_NZ, -1, 1);
+ /* if successful, pop and return */
+ x86_pop_reg (code, X86_EAX);
+ x86_ret (code);
+
+ /* next case: synchronization->owner is not null */
+ x86_patch (jump_tid, code);
+ /* is synchronization->owner == TID? */
+ x86_alu_membase_reg (code, X86_CMP, X86_ECX, owner_offset, X86_EDX);
+ /* if not, jump to actual trampoline */
+ jump_other_owner = code;
+ x86_branch8 (code, X86_CC_NZ, -1, 1);
+ /* if yes, increment nest */
+ x86_inc_membase (code, X86_ECX, nest_offset);
+ /* return */
+ x86_ret (code);
+
+ /* push obj */
+ x86_patch (jump_obj_null, code);
+ if (jump_sync_thin_hash)
+ x86_patch (jump_sync_thin_hash, code);
+ x86_patch (jump_sync_null, code);
+ x86_patch (jump_other_owner, code);
+ x86_push_reg (code, X86_EAX);
+ /* jump to the actual trampoline */
+ x86_patch (jump_cmpxchg_failed, code);
+ if (aot) {
+ /* We are calling the generic trampoline directly, the argument is pushed
+ * on the stack just like a specific trampoline.
+ */
+ code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "generic_trampoline_monitor_enter");
+ x86_jump_reg (code, X86_EAX);
+ } else {
+ x86_jump_code (code, tramp);
+ }
+ } else {
+ /* push obj and jump to the actual trampoline */
+ x86_push_reg (code, X86_EAX);
+ if (aot) {
+ code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "generic_trampoline_monitor_enter");
+ x86_jump_reg (code, X86_EAX);
+ } else {
+ x86_jump_code (code, tramp);
+ }
+ }
+
+ mono_arch_flush_icache (buf, code - buf);
+ g_assert (code - buf <= tramp_size);
+
+ nacl_global_codeman_validate (&buf, tramp_size, &code);
+
+ if (info)
+ *info = mono_tramp_info_create (g_strdup_printf ("monitor_enter_trampoline"), buf, code - buf, ji, unwind_ops);
+
+ return buf;
+}
+
+gpointer
+mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot)
+{
+ guint8 *tramp = mono_get_trampoline_code (MONO_TRAMPOLINE_MONITOR_EXIT);
+ guint8 *code, *buf;
+ guint8 *jump_obj_null, *jump_have_waiters, *jump_sync_null, *jump_not_owned, *jump_sync_thin_hash = NULL;
+ guint8 *jump_next;
+ int tramp_size;
+ int owner_offset, nest_offset, entry_count_offset;
+ MonoJumpInfo *ji = NULL;
+ GSList *unwind_ops = NULL;
+
+ g_assert (MONO_ARCH_MONITOR_OBJECT_REG == X86_EAX);
+
+ mono_monitor_threads_sync_members_offset (&owner_offset, &nest_offset, &entry_count_offset);
+ g_assert (MONO_THREADS_SYNC_MEMBER_SIZE (owner_offset) == sizeof (gpointer));
+ g_assert (MONO_THREADS_SYNC_MEMBER_SIZE (nest_offset) == sizeof (guint32));
+ g_assert (MONO_THREADS_SYNC_MEMBER_SIZE (entry_count_offset) == sizeof (gint32));
+ owner_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (owner_offset);
+ nest_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (nest_offset);
+ entry_count_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (entry_count_offset);
+
+ tramp_size = NACL_SIZE (96, 128);
+
+ code = buf = mono_global_codeman_reserve (tramp_size);
+
+ if (mono_thread_get_tls_offset () != -1) {
+ /* MonoObject* obj is in EAX */
+ /* is obj null? */
+ x86_test_reg_reg (code, X86_EAX, X86_EAX);
+ /* if yes, jump to actual trampoline */
+ jump_obj_null = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+
+ /* load obj->synchronization to ECX */
+ x86_mov_reg_membase (code, X86_ECX, X86_EAX, G_STRUCT_OFFSET (MonoObject, synchronisation), 4);
+
+ if (mono_gc_is_moving ()) {
+ /*if bit zero is set it's a thin hash*/
+ /*FIXME use testb encoding*/
+ x86_test_reg_imm (code, X86_ECX, 0x01);
+ jump_sync_thin_hash = code;
+ x86_branch8 (code, X86_CC_NE, -1, 1);
+
+ /*clear bits used by the gc*/
+ x86_alu_reg_imm (code, X86_AND, X86_ECX, ~0x3);
+ }
+
+ /* is synchronization null? */
+ x86_test_reg_reg (code, X86_ECX, X86_ECX);
+ /* if yes, jump to actual trampoline */
+ jump_sync_null = code;
+ x86_branch8 (code, X86_CC_Z, -1, 1);
+
+ /* next case: synchronization is not null */
+ /* load MonoInternalThread* into EDX */
+ code = mono_x86_emit_tls_get (code, X86_EDX, mono_thread_get_tls_offset ());
+ /* load TID into EDX */
+ x86_mov_reg_membase (code, X86_EDX, X86_EDX, G_STRUCT_OFFSET (MonoInternalThread, tid), 4);
+ /* is synchronization->owner == TID */
+ x86_alu_membase_reg (code, X86_CMP, X86_ECX, owner_offset, X86_EDX);
+ /* if no, jump to actual trampoline */
+ jump_not_owned = code;
+ x86_branch8 (code, X86_CC_NZ, -1, 1);
+
+ /* next case: synchronization->owner == TID */
+ /* is synchronization->nest == 1 */
+ x86_alu_membase_imm (code, X86_CMP, X86_ECX, nest_offset, 1);
+ /* if not, jump to next case */
+ jump_next = code;
+ x86_branch8 (code, X86_CC_NZ, -1, 1);
+ /* if yes, is synchronization->entry_count zero? */
+ x86_alu_membase_imm (code, X86_CMP, X86_ECX, entry_count_offset, 0);
+ /* if not, jump to actual trampoline */
+ jump_have_waiters = code;
+ x86_branch8 (code, X86_CC_NZ, -1 , 1);
+ /* if yes, set synchronization->owner to null and return */
+ x86_mov_membase_imm (code, X86_ECX, owner_offset, 0, 4);
+ x86_ret (code);
+
+ /* next case: synchronization->nest is not 1 */
+ x86_patch (jump_next, code);
+ /* decrease synchronization->nest and return */
+ x86_dec_membase (code, X86_ECX, nest_offset);
+ x86_ret (code);
+
+ /* push obj and jump to the actual trampoline */
+ x86_patch (jump_obj_null, code);
+ if (jump_sync_thin_hash)
+ x86_patch (jump_sync_thin_hash, code);
+ x86_patch (jump_have_waiters, code);
+ x86_patch (jump_not_owned, code);
+ x86_patch (jump_sync_null, code);
+ }
+
+ /* push obj and jump to the actual trampoline */
+ x86_push_reg (code, X86_EAX);
+ if (aot) {
+ code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "generic_trampoline_monitor_exit");
+ x86_jump_reg (code, X86_EAX);
+ } else {
+ x86_jump_code (code, tramp);
+ }
+
+ nacl_global_codeman_validate (&buf, tramp_size, &code);
+
+ mono_arch_flush_icache (buf, code - buf);
+ g_assert (code - buf <= tramp_size);
+
+ if (info)
+ *info = mono_tramp_info_create (g_strdup_printf ("monitor_exit_trampoline"), buf, code - buf, ji, unwind_ops);
+
+ return buf;
+}
+
+#else
+
+gpointer
+mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot)
+{
+ g_assert_not_reached ();
+ return NULL;
+}
+
+gpointer
+mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot)
+{
+ g_assert_not_reached ();
+ return NULL;
+}
+
+#endif
+