2009-02-09 Zoltan Varga <vargaz@gmail.com>
+ * aot-compiler.c (emit_trampolines): Add throw_pending_exception/
+ generic_class_init trampolines.
+ (add_generic_class): Extract some code from add_generic_instances () into a
+ separate function so it can be called from other places too.
+ (compile_method): Call add_generic_class () for the classes of inflated methods
+ referenced by the method.
+ (can_encode_patch): Allow references to generic parameters.
+
+ * aot-runtime.c: Add support the patches required by the new trampolines.
+
+ * exceptions-amd64.c (mono_arch_get_throw_pending_exception_full): Add full-aot
+ support.
+
+ * tramp-amd64.c (mono_arch_create_generic_class_init_trampoline_full): Add
+ full-aot support.
+
* exceptions-amd64.c (mono_arch_get_throw_pending_exception_full): Rename
this from get_throw_pending_exception, make the signature full aot compatible.
else
g_assert_not_reached ();
break;
- case MONO_WRAPPER_STATIC_RGCTX_INVOKE: {
+ case MONO_WRAPPER_STATIC_RGCTX_INVOKE:
+ case MONO_WRAPPER_SYNCHRONIZED: {
MonoMethod *m;
m = mono_marshal_method_from_wrapper (method);
return FALSE;
}
+/*
+ * add_generic_class:
+ *
+ * Add all methods of a generic class.
+ */
+static void
+add_generic_class (MonoAotCompile *acfg, MonoClass *klass)
+{
+ MonoMethod *method;
+ gpointer iter;
+
+ mono_class_init (klass);
+
+ if (klass->generic_class && klass->generic_class->context.class_inst->is_open)
+ return;
+
+ if (has_type_vars (klass))
+ return;
+
+ if (!klass->generic_class && !klass->rank)
+ return;
+
+ /*
+ * Add rgctx wrappers for cctors since those are called by the runtime, so
+ * there is no methodspec for them. This is needed even for shared classes,
+ * since rgctx wrappers belong to inflated methods.
+ */
+ method = mono_class_get_cctor (klass);
+ if (method)
+ add_extra_method (acfg, mono_marshal_get_static_rgctx_invoke (method));
+
+ iter = NULL;
+ while ((method = mono_class_get_methods (klass, &iter))) {
+ if (mono_method_is_generic_sharable_impl (method, FALSE))
+ /* Already added */
+ continue;
+
+ if (method->is_generic)
+ /* FIXME: */
+ continue;
+
+ /*
+ * FIXME: Instances which are referenced by these methods are not added,
+ * for example Array.Resize<int> for List<int>.Add ().
+ */
+ add_extra_method (acfg, method);
+ }
+}
+
/*
* add_generic_instances:
*
for (i = 0; i < acfg->image->tables [MONO_TABLE_TYPESPEC].rows; ++i) {
MonoClass *klass;
- gpointer iter;
token = MONO_TOKEN_TYPE_SPEC | (i + 1);
klass = mono_class_get (acfg->image, token);
if (!klass)
continue;
- mono_class_init (klass);
-
- if (klass->generic_class && klass->generic_class->context.class_inst->is_open)
- continue;
-
- if (has_type_vars (klass))
- continue;
-
- if (!klass->generic_class && !klass->rank)
- continue;
-
- /*
- * Add rgctx wrappers for cctors since those are called by the runtime, so
- * there is no methodspec for them. This is needed even for shared classes,
- * since rgctx wrappers belong to inflated methods.
- */
- method = mono_class_get_cctor (klass);
- if (method)
- add_extra_method (acfg, mono_marshal_get_static_rgctx_invoke (method));
- iter = NULL;
- while ((method = mono_class_get_methods (klass, &iter))) {
- if (mono_method_is_generic_sharable_impl (method, FALSE))
- /* Already added */
- continue;
-
- if (method->is_generic)
- /* FIXME: */
- continue;
-
- /*
- * FIXME: Instances which are referenced by these methods are not added,
- * for example Array.Resize<int> for List<int>.Add ().
- */
- add_extra_method (acfg, method);
- }
+ add_generic_class (acfg, klass);
}
}
emit_named_code (acfg, "monitor_exit_trampoline", code, code_size, acfg->got_offset, ji);
#endif
+#if defined(__x86_64__)
+ code = mono_arch_create_generic_class_init_trampoline_full (&code_size, &ji, TRUE);
+ emit_named_code (acfg, "generic_class_init_trampoline", code, code_size, acfg->got_offset, ji);
+#endif
+
/* Emit the exception related code pieces */
code = mono_arch_get_restore_context_full (&code_size, &ji, TRUE);
emit_named_code (acfg, "restore_context", code, code_size, acfg->got_offset, ji);
code = mono_arch_get_throw_corlib_exception_full (&code_size, &ji, TRUE);
emit_named_code (acfg, "throw_corlib_exception", code, code_size, acfg->got_offset, ji);
+#if defined(__x86_64__)
+ code = mono_arch_get_throw_pending_exception_full (&code_size, &ji, TRUE);
+ emit_named_code (acfg, "throw_pending_exception", code, code_size, acfg->got_offset, ji);
+#endif
+
#if defined(__x86_64__) || defined(__arm__)
for (i = 0; i < 128; ++i) {
int offset;
case MONO_WRAPPER_UNKNOWN:
break;
default:
- //printf ("Skip (wrapper call): %s %d -> %s\n", mono_method_full_name (method, TRUE), patch_info->type, mono_method_full_name (patch_info->data.method, TRUE));
+ //printf ("Skip (wrapper call): %d -> %s\n", patch_info->type, mono_method_full_name (patch_info->data.method, TRUE));
return FALSE;
}
} else {
case MONO_PATCH_INFO_DELEGATE_TRAMPOLINE:
case MONO_PATCH_INFO_CLASS:
case MONO_PATCH_INFO_IID:
- case MONO_PATCH_INFO_ADJUSTED_IID:
- if (!patch_info->data.klass->type_token)
- if (!patch_info->data.klass->element_class->type_token && !(patch_info->data.klass->element_class->rank && patch_info->data.klass->element_class->element_class->type_token))
+ case MONO_PATCH_INFO_ADJUSTED_IID: {
+ MonoClass *klass = patch_info->data.klass;
+
+ if (!klass->type_token)
+ if (!klass->element_class->type_token && !(klass->element_class->rank && klass->element_class->element_class->type_token) && (klass->byval_arg.type != MONO_TYPE_VAR) && (klass->byval_arg.type != MONO_TYPE_MVAR)) {
+ //printf ("Skip: %s\n", mono_type_full_name (&patch_info->data.klass->byval_arg));
return FALSE;
+ }
break;
+ }
case MONO_PATCH_INFO_RGCTX_FETCH: {
MonoJumpInfoRgctxEntry *entry = patch_info->data.rgctx_entry;
return TRUE;
}
+static void
+add_generic_class (MonoAotCompile *acfg, MonoClass *klass);
+
/*
* compile_method:
*
return;
}
+ //printf ("X: %s\n", mono_method_full_name (method, TRUE));
+
/* Adds generic instances referenced by this method */
for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
switch (patch_info->type) {
mono_method_is_generic_sharable_impl (m, FALSE)) &&
!method_has_type_vars (m))
add_extra_method (acfg, m);
+ add_generic_class (acfg, m->klass);
}
break;
}
method = mono_get_method (acfg->image, token, NULL);
+ if (!method) {
+ printf ("Failed to load method 0x%x from '%s'.\n", token, image->name);
+ exit (1);
+ }
+
/* Load all methods eagerly to skip the slower lazy loading code */
mono_class_setup_methods (method->klass);
#include <mono/metadata/marshal.h>
#include <mono/metadata/gc-internal.h>
#include <mono/metadata/monitor.h>
+#include <mono/metadata/threads-types.h>
#include <mono/utils/mono-logger.h>
#include "mono/utils/mono-compiler.h"
} else if (!strcmp (ji->data.name, "mono_amd64_throw_exception")) {
target = mono_amd64_throw_exception;
#endif
+#ifdef __x86_64__
+ } else if (!strcmp (ji->data.name, "mono_amd64_get_original_ip")) {
+ target = mono_amd64_get_original_ip;
+#endif
#ifdef __arm__
} else if (!strcmp (ji->data.name, "mono_arm_throw_exception")) {
target = mono_arm_throw_exception;
target = mono_create_specific_trampoline (NULL, MONO_TRAMPOLINE_MONITOR_ENTER, mono_get_root_domain (), NULL);
} else if (!strcmp (ji->data.name, "specific_trampoline_monitor_exit")) {
target = mono_create_specific_trampoline (NULL, MONO_TRAMPOLINE_MONITOR_EXIT, mono_get_root_domain (), NULL);
+ } else if (!strcmp (ji->data.name, "specific_trampoline_generic_class_init")) {
+ target = mono_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), NULL);
+ } else if (!strcmp (ji->data.name, "mono_thread_get_and_clear_pending_exception")) {
+ target = mono_thread_get_and_clear_pending_exception;
} else {
fprintf (stderr, "Unknown relocation '%s'\n", ji->data.name);
g_assert_not_reached ();
#endif
}
-static guint64
-get_original_ip (void)
+guint64
+mono_amd64_get_original_ip (void)
{
MonoLMF *lmf = mono_get_lmf ();
*ji = NULL;
- g_assert (!aot);
-
start = code = mono_global_codeman_reserve (128);
/* We are in the frame of a managed method after a call */
amd64_alu_reg_imm (code, X86_SUB, AMD64_RSP, 8);
/* Obtain the pending exception */
- amd64_mov_reg_imm (code, AMD64_R11, mono_thread_get_and_clear_pending_exception);
+ if (aot) {
+ *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_thread_get_and_clear_pending_exception");
+ amd64_mov_reg_membase (code, AMD64_R11, AMD64_RIP, 0, 8);
+ } else {
+ amd64_mov_reg_imm (code, AMD64_R11, mono_thread_get_and_clear_pending_exception);
+ }
amd64_call_reg (code, AMD64_R11);
/* Check if it is NULL, and branch */
amd64_alu_reg_imm (code, X86_SUB, AMD64_RSP, 8);
/* Obtain the original ip and clear the flag in previous_lmf */
- amd64_mov_reg_imm (code, AMD64_R11, get_original_ip);
+ if (aot) {
+ *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_amd64_get_original_ip");
+ amd64_mov_reg_membase (code, AMD64_R11, AMD64_RIP, 0, 8);
+ } else {
+ amd64_mov_reg_imm (code, AMD64_R11, mono_amd64_get_original_ip);
+ }
amd64_call_reg (code, AMD64_R11);
/* Load exc */
amd64_push_reg (code, AMD64_RAX);
/* Call the throw trampoline */
- throw_trampoline = mono_get_throw_exception ();
- amd64_mov_reg_imm (code, AMD64_R11, throw_trampoline);
+ if (aot) {
+ *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_amd64_throw_exception");
+ amd64_mov_reg_membase (code, AMD64_R11, AMD64_RIP, 0, 8);
+ } else {
+ throw_trampoline = mono_get_throw_exception ();
+ amd64_mov_reg_imm (code, AMD64_R11, throw_trampoline);
+ }
/* We use a jump instead of a call so we can push the original ip on the stack */
amd64_jump_reg (code, AMD64_R11);
mono_amd64_patch (br [0], code);
/* Obtain the original ip and clear the flag in previous_lmf */
- amd64_mov_reg_imm (code, AMD64_R11, get_original_ip);
+ if (aot) {
+ *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_amd64_get_original_ip");
+ amd64_mov_reg_membase (code, AMD64_R11, AMD64_RIP, 0, 8);
+ } else {
+ amd64_mov_reg_imm (code, AMD64_R11, mono_amd64_get_original_ip);
+ }
amd64_call_reg (code, AMD64_R11);
amd64_mov_reg_reg (code, AMD64_R11, AMD64_RAX, 8);
guint32 code_size;
MonoJumpInfo *ji;
- /* Call this to avoid initialization races */
- throw_pending_exception = mono_arch_get_throw_pending_exception_full (&code_size, &ji, FALSE);
+ if (mono_aot_only) {
+ throw_pending_exception = mono_aot_get_named_code ("throw_pending_exception");
+ } else {
+ /* Call this to avoid initialization races */
+ throw_pending_exception = mono_arch_get_throw_pending_exception_full (&code_size, &ji, FALSE);
+ }
}
#ifdef PLATFORM_WIN32
guint64 rax, guint64 rcx, guint64 rdx,
guint64 rethrow);
+guint64
+mono_amd64_get_original_ip (void) MONO_INTERNAL;
+
guint8*
mono_amd64_emit_tls_get (guint8* code, int dreg, int tls_offset) MONO_INTERNAL;
mono_trampolines_lock ();
- if (!code)
- code = mono_arch_create_generic_class_init_trampoline ();
+ if (!code) {
+ if (mono_aot_only)
+ code = mono_aot_get_named_code ("generic_class_init_trampoline");
+ else
+ code = mono_arch_create_generic_class_init_trampoline ();
+ }
mono_trampolines_unlock ();
gpointer mono_arch_create_monitor_exit_trampoline (void) MONO_INTERNAL;
gpointer mono_arch_create_monitor_enter_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot) MONO_INTERNAL;
gpointer mono_arch_create_monitor_exit_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot) MONO_INTERNAL;
+gpointer mono_arch_create_generic_class_init_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot) MONO_INTERNAL;
GList *mono_arch_get_allocatable_int_vars (MonoCompile *cfg) MONO_INTERNAL;
GList *mono_arch_get_global_int_regs (MonoCompile *cfg) MONO_INTERNAL;
GList *mono_arch_get_global_fp_regs (MonoCompile *cfg) MONO_INTERNAL;
gpointer
mono_arch_create_generic_class_init_trampoline (void)
+{
+ guint32 code_size;
+ MonoJumpInfo *ji;
+
+ return mono_arch_create_generic_class_init_trampoline_full (&code_size, &ji, FALSE);
+}
+
+gpointer
+mono_arch_create_generic_class_init_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
{
guint8 *tramp;
guint8 *code, *buf;
guint8 *jump;
int tramp_size;
+ *ji = NULL;
+
tramp_size = 64;
code = buf = mono_global_codeman_reserve (tramp_size);
x86_patch (jump, code);
- tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), NULL);
+ if (aot) {
+ *ji = mono_patch_info_list_prepend (*ji, buf - code, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
+ amd64_mov_reg_membase (buf, AMD64_R11, AMD64_RIP, 0, 8);
+ amd64_jump_reg (buf, AMD64_R11);
+ } else {
+ tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), NULL);
- /* jump to the actual trampoline */
- amd64_jump_code (code, tramp);
+ /* jump to the actual trampoline */
+ amd64_jump_code (code, tramp);
+ }
mono_arch_flush_icache (buf, code - buf);
g_assert (code - buf <= tramp_size);
+ *code_size = code - buf;
+
return buf;
}