X-Git-Url: http://wien.tomnetworks.com/gitweb/?a=blobdiff_plain;f=mono%2Fmini%2Ftramp-x86.c;h=83cdee58228d06aa7ebb79d80c97424309c4b58a;hb=3dcc728b8c675da0289f63f7d3697c61bb01ddad;hp=0b871ff9a4aab1e86ddd66fb9203869293624b89;hpb=872ac7fb67029187c58679b976958eee405f7ea2;p=mono.git diff --git a/mono/mini/tramp-x86.c b/mono/mini/tramp-x86.c index 0b871ff9a4a..83cdee58228 100644 --- a/mono/mini/tramp-x86.c +++ b/mono/mini/tramp-x86.c @@ -17,6 +17,7 @@ #include #include #include +#include #include #include @@ -28,7 +29,6 @@ static guint8* nullified_class_init_trampoline; /* * mono_arch_get_unbox_trampoline: - * @gsctx: the generic sharing context * @m: method pointer * @addr: pointer to native code for @m * @@ -37,7 +37,7 @@ static guint8* nullified_class_init_trampoline; * unboxing before calling the method */ gpointer -mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr) +mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr) { guint8 *code, *start; int this_pos = 4; @@ -49,6 +49,8 @@ mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, x86_jump_code (code, addr); g_assert ((code - start) < 16); + nacl_domain_code_validate (domain, &start, 16, &code); + return start; } @@ -68,6 +70,7 @@ mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericCo x86_jump_code (code, addr); g_assert ((code - start) <= buf_len); + nacl_domain_code_validate (domain, &start, buf_len, &code); mono_arch_flush_icache (start, code - start); return start; @@ -96,6 +99,8 @@ mono_arch_get_llvm_imt_trampoline (MonoDomain *domain, MonoMethod *m, int vt_off g_assert ((code - start) < buf_len); + nacl_domain_code_validate (domain, &start, buf_len, &code); + mono_arch_flush_icache (start, code - start); return start; @@ -104,6 +109,7 @@ mono_arch_get_llvm_imt_trampoline (MonoDomain *domain, MonoMethod *m, int vt_off void mono_arch_patch_callsite (guint8 *method_start, guint8 *orig_code, guint8 *addr) { +#if defined(__default_codegen__) guint8 *code; guint8 buf [8]; gboolean can_write = mono_breakpoint_clean_code (method_start, orig_code, 8, buf, sizeof (buf)); @@ -119,7 +125,7 @@ mono_arch_patch_callsite (guint8 *method_start, guint8 *orig_code, guint8 *addr) */ code -= 6; orig_code -= 6; - if ((code [1] == 0xe8)) { + if (code [1] == 0xe8) { if (can_write) { InterlockedExchange ((gint32*)(orig_code + 2), (guint)addr - ((guint)orig_code + 1) - 5); @@ -135,6 +141,23 @@ mono_arch_patch_callsite (guint8 *method_start, guint8 *orig_code, guint8 *addr) code [4], code [5], code [6]); g_assert_not_reached (); } +#elif defined(__native_client__) + /* Target must be bundle-aligned */ + g_assert (((guint32)addr & kNaClAlignmentMask) == 0); + + /* 0xe8 = call , 0xe9 = jump */ + if ((orig_code [-5] == 0xe8) || orig_code [-6] == 0xe9) { + int ret; + gint32 offset = (gint32)addr - (gint32)orig_code; + guint8 buf[sizeof(gint32)]; + *((gint32*)(buf)) = offset; + ret = nacl_dyncode_modify (orig_code - sizeof(gint32), buf, sizeof(gint32)); + g_assert (ret == 0); + } else { + printf ("Invalid trampoline sequence %p: %02x %02x %02x %02x %02x\n", orig_code, orig_code [-5], orig_code [-4], orig_code [-3], orig_code [-2], orig_code[-1]); + g_assert_not_reached (); + } +#endif } void @@ -144,17 +167,73 @@ mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *a /* Patch the jump table entry used by the plt entry */ +#if defined(__native_client_codegen__) || defined(__native_client__) + /* for both compiler and runtime */ + /* A PLT entry: */ + /* mov (%ebx), %ecx */ + /* and 0xffffffe0, %ecx */ + /* jmp *%ecx */ + g_assert (code [0] == 0x8b); + g_assert (code [1] == 0x8b); + + offset = *(guint32*)(code + 2); +#elif defined(__default_codegen__) /* A PLT entry: jmp *(%ebx) */ g_assert (code [0] == 0xff); g_assert (code [1] == 0xa3); offset = *(guint32*)(code + 2); - +#endif /* __native_client_codegen__ */ if (!got) got = (gpointer*)(gsize) regs [MONO_ARCH_GOT_REG]; *(guint8**)((guint8*)got + offset) = addr; } +static gpointer +get_vcall_slot (guint8 *code, mgreg_t *regs, int *displacement) +{ + const int kBufSize = NACL_SIZE (8, 16); + guint8 buf [64]; + guint8 reg = 0; + gint32 disp = 0; + + mono_breakpoint_clean_code (NULL, code, kBufSize, buf, kBufSize); + code = buf + 8; + + *displacement = 0; + + if ((code [0] == 0xff) && ((code [1] & 0x18) == 0x10) && ((code [1] >> 6) == 2)) { + reg = code [1] & 0x07; + disp = *((gint32*)(code + 2)); +#if defined(__native_client_codegen__) || defined(__native_client__) + } else if ((code[1] == 0x83) && (code[2] == 0xe1) && (code[4] == 0xff) && + (code[5] == 0xd1) && (code[-5] == 0x8b)) { + disp = *((gint32*)(code - 3)); + reg = code[-4] & 0x07; + } else if ((code[-2] == 0x8b) && (code[1] == 0x83) && (code[4] == 0xff)) { + reg = code[-1] & 0x07; + disp = (signed char)code[0]; +#endif + } else { + g_assert_not_reached (); + return NULL; + } + + *displacement = disp; + return (gpointer)regs [reg]; +} + +static gpointer* +get_vcall_slot_addr (guint8* code, mgreg_t *regs) +{ + gpointer vt; + int displacement; + vt = get_vcall_slot (code, regs, &displacement); + if (!vt) + return NULL; + return (gpointer*)((char*)vt + displacement); +} + void mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs) { @@ -166,6 +245,7 @@ mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs) code -= 5; if (code [0] == 0xe8) { +#if defined(__default_codegen__) if (!mono_running_on_valgrind ()) { guint32 ops; /* @@ -192,6 +272,9 @@ mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs) /* Tell valgrind to recompile the patched code */ //VALGRIND_DISCARD_TRANSLATIONS (code, 8); } +#elif defined(__native_client_codegen__) + mono_arch_patch_callsite (code, code + 5, nullified_class_init_trampoline); +#endif } else if (code [0] == 0x90 || code [0] == 0xeb) { /* Already changed by another thread */ ; @@ -199,7 +282,7 @@ mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs) /* call *() -> Call made from AOT code */ gpointer *vtable_slot; - vtable_slot = mono_get_vcall_slot_addr (code + 5, regs); + vtable_slot = get_vcall_slot_addr (code + 5, regs); g_assert (vtable_slot); *vtable_slot = nullified_class_init_trampoline; @@ -443,6 +526,7 @@ mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInf x86_ret (code); + nacl_global_codeman_validate (&buf, 256, &code); g_assert ((code - buf) <= 256); if (info) @@ -460,15 +544,21 @@ gpointer mono_arch_get_nullified_class_init_trampoline (MonoTrampInfo **info) { guint8 *code, *buf; + int tramp_size = NACL_SIZE (16, kNaClAlignment); - code = buf = mono_global_codeman_reserve (16); + code = buf = mono_global_codeman_reserve (tramp_size); x86_ret (code); + nacl_global_codeman_validate (&buf, tramp_size, &code); + mono_arch_flush_icache (buf, code - buf); if (info) *info = mono_tramp_info_create (g_strdup_printf ("nullified_class_init_trampoline"), buf, code - buf, NULL, NULL); + if (mono_jit_map_is_enabled ()) + mono_emit_jit_tramp (buf, code - buf, "nullified_class_init_trampoline"); + return buf; } @@ -481,12 +571,14 @@ mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_ty tramp = mono_get_trampoline_code (tramp_type); - code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4); + code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, NACL_SIZE (4, kNaClAlignment)); x86_push_imm (buf, arg1); x86_jump_code (buf, tramp); g_assert ((buf - code) <= TRAMPOLINE_SIZE); + nacl_domain_code_validate (domain, &code, NACL_SIZE (4, kNaClAlignment), &buf); + mono_arch_flush_icache (code, buf - code); if (code_len) @@ -522,7 +614,12 @@ mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info index -= size - 1; } +#if defined(__default_codegen__) tramp_size = (aot ? 64 : 36) + 6 * depth; +#elif defined(__native_client_codegen__) + tramp_size = (aot ? 64 : 36) + 2 * kNaClAlignment + + 6 * (depth + kNaClAlignment); +#endif code = buf = mono_global_codeman_reserve (tramp_size); @@ -580,6 +677,7 @@ mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info x86_jump_code (code, tramp); } + nacl_global_codeman_validate (&buf, tramp_size, &code); mono_arch_flush_icache (buf, code - buf); g_assert (code - buf <= tramp_size); @@ -590,6 +688,48 @@ mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info return buf; } +/* + * mono_arch_create_general_rgctx_lazy_fetch_trampoline: + * + * This is a general variant of the rgctx fetch trampolines. It receives a pointer to gpointer[2] in the rgctx reg. The first entry contains the slot, the second + * the trampoline to call if the slot is not filled. + */ +gpointer +mono_arch_create_general_rgctx_lazy_fetch_trampoline (MonoTrampInfo **info, gboolean aot) +{ + guint8 *code, *buf; + int tramp_size; + MonoJumpInfo *ji = NULL; + GSList *unwind_ops = NULL; + + g_assert (aot); + + unwind_ops = mono_arch_get_cie_program (); + + tramp_size = 64; + + code = buf = mono_global_codeman_reserve (tramp_size); + + // FIXME: Currently, we always go to the slow path. + + /* Load trampoline addr */ + x86_mov_reg_membase (code, X86_EAX, MONO_ARCH_RGCTX_REG, 4, 4); + /* Load mrgctx/vtable */ + x86_mov_reg_membase (code, MONO_ARCH_VTABLE_REG, X86_ESP, 4, 4); + + x86_jump_reg (code, X86_EAX); + + nacl_global_codeman_validate (&buf, tramp_size, &code); + mono_arch_flush_icache (buf, code - buf); + + g_assert (code - buf <= tramp_size); + + if (info) + *info = mono_tramp_info_create ("rgctx_fetch_trampoline_general", buf, code - buf, ji, unwind_ops); + + return buf; +} + gpointer mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot) { @@ -635,6 +775,11 @@ mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean a mono_arch_flush_icache (code, code - buf); g_assert (code - buf <= tramp_size); +#ifdef __native_client_codegen__ + g_assert (code - buf <= kNaClAlignment); +#endif + + nacl_global_codeman_validate (&buf, tramp_size, &code); if (info) *info = mono_tramp_info_create (g_strdup_printf ("generic_class_init_trampoline"), buf, code - buf, ji, unwind_ops); @@ -666,7 +811,7 @@ mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot) { guint8 *tramp = mono_get_trampoline_code (MONO_TRAMPOLINE_MONITOR_ENTER); guint8 *code, *buf; - guint8 *jump_obj_null, *jump_sync_null, *jump_other_owner, *jump_cmpxchg_failed, *jump_tid; + guint8 *jump_obj_null, *jump_sync_null, *jump_other_owner, *jump_cmpxchg_failed, *jump_tid, *jump_sync_thin_hash = NULL; int tramp_size; int owner_offset, nest_offset, dummy; MonoJumpInfo *ji = NULL; @@ -680,7 +825,7 @@ mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot) owner_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (owner_offset); nest_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (nest_offset); - tramp_size = 64; + tramp_size = NACL_SIZE (96, 128); code = buf = mono_global_codeman_reserve (tramp_size); @@ -694,8 +839,21 @@ mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot) /* load obj->synchronization to ECX */ x86_mov_reg_membase (code, X86_ECX, X86_EAX, G_STRUCT_OFFSET (MonoObject, synchronisation), 4); + + if (mono_gc_is_moving ()) { + /*if bit zero is set it's a thin hash*/ + /*FIXME use testb encoding*/ + x86_test_reg_imm (code, X86_ECX, 0x01); + jump_sync_thin_hash = code; + x86_branch8 (code, X86_CC_NE, -1, 1); + + /*clear bits used by the gc*/ + x86_alu_reg_imm (code, X86_AND, X86_ECX, ~0x3); + } + /* is synchronization null? */ x86_test_reg_reg (code, X86_ECX, X86_ECX); + /* if yes, jump to actual trampoline */ jump_sync_null = code; x86_branch8 (code, X86_CC_Z, -1, 1); @@ -740,6 +898,8 @@ mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot) /* push obj */ x86_patch (jump_obj_null, code); + if (jump_sync_thin_hash) + x86_patch (jump_sync_thin_hash, code); x86_patch (jump_sync_null, code); x86_patch (jump_other_owner, code); x86_push_reg (code, X86_EAX); @@ -768,6 +928,8 @@ mono_arch_create_monitor_enter_trampoline (MonoTrampInfo **info, gboolean aot) mono_arch_flush_icache (buf, code - buf); g_assert (code - buf <= tramp_size); + nacl_global_codeman_validate (&buf, tramp_size, &code); + if (info) *info = mono_tramp_info_create (g_strdup_printf ("monitor_enter_trampoline"), buf, code - buf, ji, unwind_ops); @@ -779,7 +941,7 @@ mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot) { guint8 *tramp = mono_get_trampoline_code (MONO_TRAMPOLINE_MONITOR_EXIT); guint8 *code, *buf; - guint8 *jump_obj_null, *jump_have_waiters, *jump_sync_null, *jump_not_owned; + guint8 *jump_obj_null, *jump_have_waiters, *jump_sync_null, *jump_not_owned, *jump_sync_thin_hash = NULL; guint8 *jump_next; int tramp_size; int owner_offset, nest_offset, entry_count_offset; @@ -796,7 +958,7 @@ mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot) nest_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (nest_offset); entry_count_offset = MONO_THREADS_SYNC_MEMBER_OFFSET (entry_count_offset); - tramp_size = 64; + tramp_size = NACL_SIZE (96, 128); code = buf = mono_global_codeman_reserve (tramp_size); @@ -810,6 +972,18 @@ mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot) /* load obj->synchronization to ECX */ x86_mov_reg_membase (code, X86_ECX, X86_EAX, G_STRUCT_OFFSET (MonoObject, synchronisation), 4); + + if (mono_gc_is_moving ()) { + /*if bit zero is set it's a thin hash*/ + /*FIXME use testb encoding*/ + x86_test_reg_imm (code, X86_ECX, 0x01); + jump_sync_thin_hash = code; + x86_branch8 (code, X86_CC_NE, -1, 1); + + /*clear bits used by the gc*/ + x86_alu_reg_imm (code, X86_AND, X86_ECX, ~0x3); + } + /* is synchronization null? */ x86_test_reg_reg (code, X86_ECX, X86_ECX); /* if yes, jump to actual trampoline */ @@ -850,6 +1024,8 @@ mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot) /* push obj and jump to the actual trampoline */ x86_patch (jump_obj_null, code); + if (jump_sync_thin_hash) + x86_patch (jump_sync_thin_hash, code); x86_patch (jump_have_waiters, code); x86_patch (jump_not_owned, code); x86_patch (jump_sync_null, code); @@ -864,6 +1040,8 @@ mono_arch_create_monitor_exit_trampoline (MonoTrampInfo **info, gboolean aot) x86_jump_code (code, tramp); } + nacl_global_codeman_validate (&buf, tramp_size, &code); + mono_arch_flush_icache (buf, code - buf); g_assert (code - buf <= tramp_size); @@ -904,7 +1082,7 @@ mono_arch_invalidate_method (MonoJitInfo *ji, void *func, gpointer func_arg) static void handler_block_trampoline_helper (gpointer *ptr) { - MonoJitTlsData *jit_tls = TlsGetValue (mono_jit_tls_id); + MonoJitTlsData *jit_tls = mono_native_tls_get_value (mono_jit_tls_id); *ptr = jit_tls->handler_block_return_address; } @@ -924,18 +1102,26 @@ mono_arch_create_handler_block_trampoline (void) code = mono_x86_emit_tls_get (code, X86_EAX, mono_get_jit_tls_offset ()); x86_mov_reg_membase (code, X86_EAX, X86_EAX, G_STRUCT_OFFSET (MonoJitTlsData, handler_block_return_address), 4); /*simulate a call*/ + /*Fix stack alignment*/ + x86_alu_reg_imm (code, X86_SUB, X86_ESP, 0x8); x86_push_reg (code, X86_EAX); x86_jump_code (code, tramp); } else { /*Slow path uses a c helper*/ + x86_alu_reg_imm (code, X86_SUB, X86_ESP, 0x8); x86_push_reg (code, X86_ESP); x86_push_imm (code, tramp); x86_jump_code (code, handler_block_trampoline_helper); } + nacl_global_codeman_validate (&buf, tramp_size, &code); + mono_arch_flush_icache (buf, code - buf); g_assert (code - buf <= tramp_size); + if (mono_jit_map_is_enabled ()) + mono_emit_jit_tramp (buf, code - buf, "handler_block_trampoline"); + return buf; } @@ -955,5 +1141,46 @@ mono_arch_get_call_target (guint8 *code) guint32 mono_arch_get_plt_info_offset (guint8 *plt_entry, mgreg_t *regs, guint8 *code) { - return *(guint32*)(plt_entry + 6); + return *(guint32*)(plt_entry + NACL_SIZE (6, 12)); +} + +/* + * mono_arch_get_gsharedvt_arg_trampoline: + * + * Return a trampoline which passes ARG to the gsharedvt in/out trampoline ADDR. + */ +gpointer +mono_arch_get_gsharedvt_arg_trampoline (MonoDomain *domain, gpointer arg, gpointer addr) +{ + guint8 *code, *start; + int buf_len; + + buf_len = 10; + + start = code = mono_domain_code_reserve (domain, buf_len); + + x86_mov_reg_imm (code, X86_EAX, arg); + x86_jump_code (code, addr); + g_assert ((code - start) <= buf_len); + + nacl_domain_code_validate (domain, &start, buf_len, &code); + mono_arch_flush_icache (start, code - start); + + return start; } + +#if defined(MONOTOUCH) || defined(MONO_EXTENSIONS) + +#include "../../../mono-extensions/mono/mini/tramp-x86-gsharedvt.c" + +#else + +gpointer +mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot) +{ + if (info) + *info = NULL; + return NULL; +} + +#endif /* !MONOTOUCH */