g_assert (major_collector.is_concurrent);
concurrent_collection_in_progress = TRUE;
- object_ops = &major_collector.major_concurrent_ops;
+ object_ops = &major_collector.major_ops_concurrent_start;
} else {
- object_ops = &major_collector.major_ops;
+ object_ops = &major_collector.major_ops_serial;
}
reset_pinned_from_failed_allocation ();
TV_GETTIME (btv);
if (concurrent_collection_in_progress) {
- object_ops = &major_collector.major_concurrent_ops;
+ object_ops = &major_collector.major_ops_concurrent_finish;
sgen_workers_signal_start_nursery_collection_and_wait ();
check_nursery_is_clean ();
} else {
SGEN_ASSERT (0, !scan_whole_nursery, "scan_whole_nursery only applies to concurrent collections");
- object_ops = &major_collector.major_ops;
+ object_ops = &major_collector.major_ops_serial;
}
/*
void* (*alloc_small_pinned_obj) (MonoVTable *vtable, size_t size, gboolean has_references);
void* (*alloc_degraded) (MonoVTable *vtable, size_t size);
- SgenObjectOperations major_ops;
- SgenObjectOperations major_concurrent_ops;
+ SgenObjectOperations major_ops_serial;
+ SgenObjectOperations major_ops_concurrent_start;
+ SgenObjectOperations major_ops_concurrent;
+ SgenObjectOperations major_ops_concurrent_finish;
void* (*alloc_object) (MonoVTable *vtable, size_t size, gboolean has_references);
void (*free_pinned_object) (char *obj, size_t size);
binary_protocol_scan_process_reference ((obj), (ptr), __old); \
if (__old && !sgen_ptr_in_nursery (__old)) { \
PREFETCH_READ (__old); \
- major_copy_or_mark_object_with_evacuation_concurrent ((ptr), __old, queue); \
+ major_copy_or_mark_object_concurrent ((ptr), __old, queue); \
} else { \
if (G_UNLIKELY (sgen_ptr_in_nursery (__old) && !sgen_ptr_in_nursery ((ptr)))) \
sgen_add_to_global_remset ((ptr), __old); \
/* FIXME: Unify this with optimized code in sgen-marksweep.c. */
static void
-major_scan_object_no_mark_concurrent (char *start, mword desc, SgenGrayQueue *queue)
+major_scan_object_no_mark_concurrent_anywhere (char *start, mword desc, SgenGrayQueue *queue)
{
SGEN_OBJECT_LAYOUT_STATISTICS_DECLARE_BITMAP;
HEAVY_STAT (++stat_scan_object_called_major);
}
+static void
+major_scan_object_no_mark_concurrent_start_finish (char *start, mword desc, SgenGrayQueue *queue)
+{
+ major_scan_object_no_mark_concurrent_anywhere (start, desc, queue);
+}
+
+static void
+major_scan_object_no_mark_concurrent (char *start, mword desc, SgenGrayQueue *queue)
+{
+ SGEN_ASSERT (0, !sgen_ptr_in_nursery (start), "Why are we scanning nursery objects in the concurrent collector?");
+ major_scan_object_no_mark_concurrent_anywhere (start, desc, queue);
+}
+
static void
major_scan_vtype_concurrent (char *start, mword desc, SgenGrayQueue *queue BINARY_PROTOCOL_ARG (size_t size))
{
#include "sgen-major-copy-object.h"
static void
-major_copy_or_mark_object_with_evacuation_concurrent (void **ptr, void *obj, SgenGrayQueue *queue)
+major_copy_or_mark_object_concurrent (void **ptr, void *obj, SgenGrayQueue *queue)
{
SGEN_ASSERT (9, sgen_concurrent_collection_in_progress (), "Why are we scanning concurrently when there's no concurrent collection on?");
SGEN_ASSERT (9, !sgen_workers_are_working () || sgen_thread_pool_is_thread_pool_thread (mono_native_thread_id_get ()), "We must not scan from two threads at the same time!");
static void
major_copy_or_mark_object_concurrent_canonical (void **ptr, SgenGrayQueue *queue)
{
- major_copy_or_mark_object_with_evacuation_concurrent (ptr, *ptr, queue);
+ major_copy_or_mark_object_concurrent (ptr, *ptr, queue);
}
static void
collector->describe_pointer = major_describe_pointer;
collector->count_cards = major_count_cards;
- collector->major_ops.copy_or_mark_object = major_copy_or_mark_object_canonical;
- collector->major_ops.scan_object = major_scan_object_with_evacuation;
+ collector->major_ops_serial.copy_or_mark_object = major_copy_or_mark_object_canonical;
+ collector->major_ops_serial.scan_object = major_scan_object_with_evacuation;
if (is_concurrent) {
- collector->major_concurrent_ops.copy_or_mark_object = major_copy_or_mark_object_concurrent_canonical;
- collector->major_concurrent_ops.scan_object = major_scan_object_no_mark_concurrent;
- collector->major_concurrent_ops.scan_vtype = major_scan_vtype_concurrent;
+ collector->major_ops_concurrent_start.copy_or_mark_object = major_copy_or_mark_object_concurrent_canonical;
+ collector->major_ops_concurrent_start.scan_object = major_scan_object_no_mark_concurrent_start_finish;
+
+ collector->major_ops_concurrent.copy_or_mark_object = major_copy_or_mark_object_concurrent_canonical;
+ collector->major_ops_concurrent.scan_object = major_scan_object_no_mark_concurrent;
+
+ collector->major_ops_concurrent_finish.copy_or_mark_object = major_copy_or_mark_object_concurrent_canonical;
+ collector->major_ops_concurrent_finish.scan_object = major_scan_object_no_mark_concurrent_start_finish;
+ collector->major_ops_concurrent_finish.scan_vtype = major_scan_vtype_concurrent;
}
#if !defined (FIXED_HEAP) && !defined (SGEN_PARALLEL_MARK)
if (!continue_idle_func ())
return;
+ SGEN_ASSERT (0, sgen_concurrent_collection_in_progress (), "The worker should only mark in concurrent collections.");
SGEN_ASSERT (0, sgen_get_current_collection_generation () != GENERATION_NURSERY, "Why are we doing work while there's a nursery collection happening?");
if (workers_state == STATE_WORK_ENQUEUED) {
}
if (!sgen_gray_object_queue_is_empty (&data->private_gray_queue) || workers_get_work (data)) {
- SgenObjectOperations *ops = sgen_concurrent_collection_in_progress ()
- ? &major->major_concurrent_ops
- : &major->major_ops;
+ SgenObjectOperations *ops = &major->major_ops_concurrent;
ScanCopyContext ctx = CONTEXT_FROM_OBJECT_OPERATIONS (ops, &data->private_gray_queue);
SGEN_ASSERT (0, !sgen_gray_object_queue_is_empty (&data->private_gray_queue), "How is our gray queue empty if we just got work?");