} else {
tls->unwind_state.unwind_data [MONO_UNWIND_DATA_LMF] = mono_get_lmf ();
if (sigctx) {
+#ifdef MONO_ARCH_HAVE_SIGCTX_TO_MONOCTX
mono_arch_sigctx_to_monoctx (sigctx, &tls->unwind_state.ctx);
tls->unwind_state.valid = TRUE;
+#else
+ tls->unwind_state.valid = FALSE;
+#endif
} else if (ctx) {
memcpy (&tls->unwind_state.ctx, ctx, sizeof (MonoContext));
tls->unwind_state.valid = TRUE;
precise_pass (tls, stack_start, stack_end);
}
+#ifndef DISABLE_JIT
+
static void
mini_gc_init_gc_map (MonoCompile *cfg)
{
* before the liveness pass. We emit OP_GC_LIVENESS_DEF instructions for
* them during VZERO decomposition.
*/
- if (!pc_offsets [vmv->vreg])
- pin = TRUE;
+ if (!is_arg) {
+ if (!pc_offsets [vmv->vreg])
+ pin = TRUE;
- if (ins->backend.is_pinvoke)
- pin = TRUE;
+ if (ins->backend.is_pinvoke)
+ pin = TRUE;
+ }
if (bitmap) {
for (cindex = 0; cindex < gcfg->ncallsites; ++cindex) {
create_map (cfg);
}
+#endif /* DISABLE_JIT */
+
static void
parse_debug_options (void)
{
#else
+void
+mini_gc_enable_gc_maps_for_aot (void)
+{
+}
+
void
mini_gc_init (void)
{
}
+#ifndef DISABLE_JIT
+
static void
mini_gc_init_gc_map (MonoCompile *cfg)
{
{
}
+#endif /* DISABLE_JIT */
+
#endif
+#ifndef DISABLE_JIT
+
/*
* mini_gc_init_cfg:
*
mini_gc_init_gc_map (cfg);
}
+#endif /* DISABLE_JIT */
+
/*
* Problems with the current code:
* - the stack walk is slow