static gboolean do_dump_nursery_content = FALSE;
static gboolean enable_nursery_canaries = FALSE;
+static gboolean precleaning_enabled = TRUE;
+
#ifdef HEAVY_STATISTICS
guint64 stat_objects_alloced_degraded = 0;
guint64 stat_bytes_alloced_degraded = 0;
char *end_addr = generation == GENERATION_NURSERY ? sgen_get_nursery_end () : (char*)-1;
SgenGrayQueue *queue = ctx.queue;
+ binary_protocol_finish_gray_stack_start (sgen_timestamp (), generation);
/*
* We copied all the reachable objects. Now it's the time to copy
* the objects that were not referenced by the roots, but by the copied objects.
sgen_client_clear_togglerefs (start_addr, end_addr, ctx);
TV_GETTIME (btv);
- SGEN_LOG (2, "Finalize queue handling scan for %s generation: %ld usecs %d ephemeron rounds", generation_name (generation), TV_ELAPSED (atv, btv), ephemeron_rounds);
+ SGEN_LOG (2, "Finalize queue handling scan for %s generation: %lld usecs %d ephemeron rounds", generation_name (generation), TV_ELAPSED (atv, btv), ephemeron_rounds);
/*
* handle disappearing links
g_assert (sgen_gray_object_queue_is_empty (queue));
sgen_gray_object_queue_trim_free_list (queue);
+ binary_protocol_finish_gray_stack_end (sgen_timestamp (), generation);
}
void
ScanCopyContext ctx = CONTEXT_FROM_OBJECT_OPERATIONS (job_data->ops, sgen_workers_get_job_gray_queue (worker_data));
g_assert (concurrent_collection_in_progress);
- major_collector.scan_card_table (TRUE, ctx);
+ major_collector.scan_card_table (CARDTABLE_SCAN_MOD_UNION, ctx);
}
static void
ScanCopyContext ctx = CONTEXT_FROM_OBJECT_OPERATIONS (job_data->ops, sgen_workers_get_job_gray_queue (worker_data));
g_assert (concurrent_collection_in_progress);
- sgen_los_scan_card_table (TRUE, ctx);
+ sgen_los_scan_card_table (CARDTABLE_SCAN_MOD_UNION, ctx);
+}
+
+static void
+job_mod_union_preclean (void *worker_data_untyped, SgenThreadPoolJob *job)
+{
+ WorkerData *worker_data = (WorkerData *)worker_data_untyped;
+ ScanJob *job_data = (ScanJob*)job;
+ ScanCopyContext ctx = CONTEXT_FROM_OBJECT_OPERATIONS (job_data->ops, sgen_workers_get_job_gray_queue (worker_data));
+
+ g_assert (concurrent_collection_in_progress);
+
+ major_collector.scan_card_table (CARDTABLE_SCAN_MOD_UNION_PRECLEAN, ctx);
+ sgen_los_scan_card_table (CARDTABLE_SCAN_MOD_UNION_PRECLEAN, ctx);
}
static void
TV_GETTIME (atv);
time_minor_pinning += TV_ELAPSED (btv, atv);
- SGEN_LOG (2, "Finding pinned pointers: %zd in %ld usecs", sgen_get_pinned_count (), TV_ELAPSED (btv, atv));
+ SGEN_LOG (2, "Finding pinned pointers: %zd in %lld usecs", sgen_get_pinned_count (), TV_ELAPSED (btv, atv));
SGEN_LOG (4, "Start scan with %zd pinned objects", sgen_get_pinned_count ());
- /*
- * FIXME: When we finish a concurrent collection we do a nursery collection first,
- * as part of which we scan the card table. Then, later, we scan the mod union
- * cardtable. We should only have to do one.
- */
sj = (ScanJob*)sgen_thread_pool_job_alloc ("scan remset", job_remembered_set_scan, sizeof (ScanJob));
sj->ops = object_ops;
sgen_workers_enqueue_job (&sj->job, FALSE);
/* we don't have complete write barrier yet, so we scan all the old generation sections */
TV_GETTIME (btv);
time_minor_scan_remsets += TV_ELAPSED (atv, btv);
- SGEN_LOG (2, "Old generation scan: %ld usecs", TV_ELAPSED (atv, btv));
+ SGEN_LOG (2, "Old generation scan: %lld usecs", TV_ELAPSED (atv, btv));
sgen_pin_stats_print_class_stats ();
- sgen_drain_gray_stack (ctx);
-
/* FIXME: Why do we do this at this specific, seemingly random, point? */
sgen_client_collecting_minor (&fin_ready_queue, &critical_fin_queue);
sgen_client_binary_protocol_reclaim_end (GENERATION_NURSERY);
TV_GETTIME (btv);
time_minor_fragment_creation += TV_ELAPSED (atv, btv);
- SGEN_LOG (2, "Fragment creation: %ld usecs, %lu bytes available", TV_ELAPSED (atv, btv), (unsigned long)fragment_total);
+ SGEN_LOG (2, "Fragment creation: %lld usecs, %lu bytes available", TV_ELAPSED (atv, btv), (unsigned long)fragment_total);
if (consistency_check_at_minor_collection)
sgen_check_major_refs ();
TV_GETTIME (btv);
time_major_pinning += TV_ELAPSED (atv, btv);
- SGEN_LOG (2, "Finding pinned pointers: %zd in %ld usecs", sgen_get_pinned_count (), TV_ELAPSED (atv, btv));
+ SGEN_LOG (2, "Finding pinned pointers: %zd in %lld usecs", sgen_get_pinned_count (), TV_ELAPSED (atv, btv));
SGEN_LOG (4, "Start scan with %zd pinned objects", sgen_get_pinned_count ());
major_collector.init_to_space ();
+ SGEN_ASSERT (0, sgen_workers_all_done (), "Why are the workers not done when we start or finish a major collection?");
/*
* The concurrent collector doesn't move objects, neither on
* the major heap nor in the nursery, so we can mark even
* collector we start the workers after pinning.
*/
if (mode == COPY_OR_MARK_FROM_ROOTS_START_CONCURRENT) {
- SGEN_ASSERT (0, sgen_workers_all_done (), "Why are the workers not done when we start or finish a major collection?");
- sgen_workers_start_all_workers (object_ops);
+ if (precleaning_enabled) {
+ ScanJob *sj;
+ /* Mod union preclean job */
+ sj = (ScanJob*)sgen_thread_pool_job_alloc ("preclean mod union cardtable", job_mod_union_preclean, sizeof (ScanJob));
+ sj->ops = object_ops;
+ sgen_workers_start_all_workers (object_ops, &sj->job);
+ } else {
+ sgen_workers_start_all_workers (object_ops, NULL);
+ }
gray_queue_enable_redirect (WORKERS_DISTRIBUTE_GRAY_QUEUE);
} else if (mode == COPY_OR_MARK_FROM_ROOTS_FINISH_CONCURRENT) {
if (sgen_workers_have_idle_work ()) {
- sgen_workers_start_all_workers (object_ops);
+ sgen_workers_start_all_workers (object_ops, NULL);
sgen_workers_join ();
}
}
time_major_fragment_creation += TV_ELAPSED (atv, btv);
binary_protocol_sweep_begin (GENERATION_OLD, !major_collector.sweeps_lazily);
+ sgen_memgov_major_pre_sweep ();
TV_GETTIME (atv);
time_major_free_bigobjs += TV_ELAPSED (btv, atv);
* LOCKING: The GC lock MUST be held.
*/
void
-sgen_ensure_free_space (size_t size)
+sgen_ensure_free_space (size_t size, int generation)
{
int generation_to_collect = -1;
const char *reason = NULL;
- if (size > SGEN_MAX_SMALL_OBJ_SIZE) {
+ if (generation == GENERATION_OLD) {
if (sgen_need_major_collection (size)) {
reason = "LOS overflow";
generation_to_collect = GENERATION_OLD;
return;
case -1:
/* being inited by another thread */
- g_usleep (1000);
+ mono_thread_info_usleep (1000);
break;
case 0:
/* we will init it */
continue;
}
+ if (!strcmp (opt, "precleaning")) {
+ precleaning_enabled = TRUE;
+ continue;
+ }
+ if (!strcmp (opt, "no-precleaning")) {
+ precleaning_enabled = FALSE;
+ continue;
+ }
+
if (major_collector.handle_gc_param && major_collector.handle_gc_param (opt))
continue;