[dtrace] GC heap allocation probes for SGen.
[mono.git] / mono / metadata / sgen-major-copying.c
index 830fcb6f810c53e90e070ce61b892a782012a89b..d1737a6b787b6630f0779dcdf7499eafc1463e90 100644 (file)
  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  */
 
-#define MAJOR_SECTION_SIZE             PINNED_CHUNK_SIZE
-#define BLOCK_FOR_OBJECT(o)            ((Block*)(((mword)(o)) & ~(MAJOR_SECTION_SIZE - 1)))
+#include "config.h"
+
+#ifdef HAVE_SGEN_GC
+
+#include "utils/mono-counters.h"
+
+#include "metadata/gc-internal.h"
+#include "metadata/sgen-gc.h"
+#include "metadata/sgen-protocol.h"
+#include "metadata/mono-gc.h"
+#include "metadata/object-internals.h"
+#include "metadata/profiler-private.h"
+#include "metadata/sgen-memory-governor.h"
+
+#define MAJOR_SECTION_SIZE             SGEN_PINNED_CHUNK_SIZE
+#define BLOCK_FOR_OBJECT(o)            SGEN_PINNED_CHUNK_FOR_PTR ((o))
 #define MAJOR_SECTION_FOR_OBJECT(o)    ((GCMemSection*)BLOCK_FOR_OBJECT ((o)))
-#define MIN_MINOR_COLLECTION_SECTION_ALLOWANCE (DEFAULT_NURSERY_SIZE * 3 / MAJOR_SECTION_SIZE)
 
 #define MAJOR_OBJ_IS_IN_TO_SPACE(o)    (MAJOR_SECTION_FOR_OBJECT ((o))->is_to_space)
 
-static int minor_collection_section_allowance;
-static int minor_collection_sections_alloced = 0;
 static int num_major_sections = 0;
 
 static GCMemSection *section_list = NULL;
 
-/* pinned_chunk_list is used for allocations of objects that are never moved */
-static PinnedChunk *pinned_chunk_list = NULL;
+static SgenPinnedAllocator pinned_allocator;
+
+static gboolean have_swept;
 
 /*
  * used when moving the objects
@@ -68,6 +80,13 @@ static char *to_space_bumper = NULL;
 static char *to_space_top = NULL;
 static GCMemSection *to_space_section = NULL;
 
+/* we get this at init */
+static int nursery_bits;
+static char *nursery_start;
+static char *nursery_end;
+
+#define ptr_in_nursery(p)      (SGEN_PTR_IN_NURSERY ((p), nursery_bits, nursery_start, nursery_end))
+
 #ifdef HEAVY_STATISTICS
 static long stat_major_copy_object_failed_forwarded = 0;
 static long stat_major_copy_object_failed_pinned = 0;
@@ -75,6 +94,20 @@ static long stat_major_copy_object_failed_large_pinned = 0;
 static long stat_major_copy_object_failed_to_space = 0;
 #endif
 
+static void*
+major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits)
+{
+       if (nursery_align)
+               nursery_start = sgen_alloc_os_memory_aligned (nursery_size, nursery_align, TRUE, TRUE, "nursery");
+       else
+               nursery_start = sgen_alloc_os_memory (nursery_size, TRUE, TRUE, "nursery");
+
+       nursery_end = nursery_start + nursery_size;
+       nursery_bits = the_nursery_bits;
+
+       return nursery_start;
+}
+
 static gboolean
 obj_is_from_pinned_alloc (char *p)
 {
@@ -84,13 +117,7 @@ obj_is_from_pinned_alloc (char *p)
 static void
 free_pinned_object (char *obj, size_t size)
 {
-       PinnedChunk *chunk = (PinnedChunk*) BLOCK_FOR_OBJECT (obj);
-       void **p = (void**)obj;
-       int slot = slot_for_size (size);
-
-       g_assert (obj >= (char*)chunk->start_data && obj < ((char*)chunk + chunk->num_pages * FREELIST_PAGESIZE));
-       *p = chunk->free_list [slot];
-       chunk->free_list [slot] = p;
+       sgen_free_pinned (&pinned_allocator, obj, size);
 }
 
 /*
@@ -102,16 +129,15 @@ alloc_major_section (void)
        GCMemSection *section;
        int scan_starts;
 
-       section = get_os_memory_aligned (MAJOR_SECTION_SIZE, MAJOR_SECTION_SIZE, TRUE);
-       section->next_data = section->data = (char*)section + SIZEOF_GC_MEM_SECTION;
+       section = sgen_alloc_os_memory_aligned (MAJOR_SECTION_SIZE, MAJOR_SECTION_SIZE, TRUE, TRUE, "major heap section");
+       section->next_data = section->data = (char*)section + SGEN_SIZEOF_GC_MEM_SECTION;
        g_assert (!((mword)section->data & 7));
-       section->size = MAJOR_SECTION_SIZE - SIZEOF_GC_MEM_SECTION;
+       section->size = MAJOR_SECTION_SIZE - SGEN_SIZEOF_GC_MEM_SECTION;
        section->end_data = section->data + section->size;
-       UPDATE_HEAP_BOUNDARIES (section->data, section->end_data);
-       total_alloc += section->size;
-       DEBUG (3, fprintf (gc_debug_file, "New major heap section: (%p-%p), total: %zd\n", section->data, section->end_data, total_alloc));
-       scan_starts = (section->size + SCAN_START_SIZE - 1) / SCAN_START_SIZE;
-       section->scan_starts = get_internal_mem (sizeof (char*) * scan_starts, INTERNAL_MEM_SCAN_STARTS);
+       sgen_update_heap_boundaries ((mword)section->data, (mword)section->end_data);
+       DEBUG (3, fprintf (gc_debug_file, "New major heap section: (%p-%p), total: %lld\n", section->data, section->end_data, (long long int)mono_gc_get_heap_size ()));
+       scan_starts = (section->size + SGEN_SCAN_START_SIZE - 1) / SGEN_SCAN_START_SIZE;
+       section->scan_starts = sgen_alloc_internal_dynamic (sizeof (char*) * scan_starts, INTERNAL_MEM_SCAN_STARTS, TRUE);
        section->num_scan_start = scan_starts;
        section->block.role = MEMORY_ROLE_GEN1;
        section->is_to_space = TRUE;
@@ -129,9 +155,9 @@ static void
 free_major_section (GCMemSection *section)
 {
        DEBUG (3, fprintf (gc_debug_file, "Freed major section %p (%p-%p)\n", section, section->data, section->end_data));
-       free_internal_mem (section->scan_starts, INTERNAL_MEM_SCAN_STARTS);
-       free_os_memory (section, MAJOR_SECTION_SIZE);
-       total_alloc -= MAJOR_SECTION_SIZE - SIZEOF_GC_MEM_SECTION;
+       sgen_free_internal_dynamic (section->scan_starts,
+                       (section->size + SGEN_SCAN_START_SIZE - 1) / SGEN_SCAN_START_SIZE * sizeof (char*), INTERNAL_MEM_SCAN_STARTS);
+       sgen_free_os_memory (section, MAJOR_SECTION_SIZE, TRUE);
 
        --num_major_sections;
 }
@@ -165,18 +191,21 @@ to_space_expand (void)
        new_to_space_section ();
 }
 
-#define MAJOR_GET_COPY_OBJECT_SPACE(dest, size) do {                   \
-               (dest) = to_space_bumper;                               \
-               /* Make sure we have enough space available */          \
-               if ((dest) + (size) > to_space_top) {                   \
-                       to_space_expand ();                             \
-                       (dest) = to_space_bumper;                       \
-                       DEBUG (8, g_assert ((dest) + (objsize) <= to_space_top)); \
-               }                                                       \
-               to_space_bumper += objsize;                             \
-               DEBUG (8, g_assert (to_space_bumper <= to_space_top));  \
-               to_space_section->scan_starts [((dest) - (char*)to_space_section->data)/SCAN_START_SIZE] = (dest); \
-       } while (0)
+static void*
+major_alloc_object (int size, gboolean has_references)
+{
+       char *dest = to_space_bumper;
+       /* Make sure we have enough space available */
+       if (dest + size > to_space_top) {
+               to_space_expand ();
+               (dest) = to_space_bumper;
+               DEBUG (8, g_assert (dest + size <= to_space_top));
+       }
+       to_space_bumper += size;
+       DEBUG (8, g_assert (to_space_bumper <= to_space_top));
+       to_space_section->scan_starts [(dest - (char*)to_space_section->data)/SGEN_SCAN_START_SIZE] = dest;
+       return dest;
+}
 
 static void
 unset_to_space (void)
@@ -200,12 +229,10 @@ major_is_object_live (char *obj)
        if (ptr_in_nursery (obj))
                return FALSE;
 
-       objsize = safe_object_get_size ((MonoObject*)obj);
-       objsize += ALLOC_ALIGN - 1;
-       objsize &= ~(ALLOC_ALIGN - 1);
+       objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
 
        /* LOS */
-       if (objsize > MAX_SMALL_OBJ_SIZE)
+       if (objsize > SGEN_MAX_SMALL_OBJ_SIZE)
                return FALSE;
 
        /* pinned chunk */
@@ -218,47 +245,20 @@ major_is_object_live (char *obj)
 
 /* size is a multiple of ALLOC_ALIGN */
 static void*
-major_alloc_small_pinned_obj (size_t size)
-{
-       int slot;
-       void *res = NULL;
-       PinnedChunk *pchunk;
-       slot = slot_for_size (size);
-       /*g_print ("using slot %d for size %d (slot size: %d)\n", slot, size, freelist_sizes [slot]);*/
-       g_assert (size <= freelist_sizes [slot]);
-       for (pchunk = pinned_chunk_list; pchunk; pchunk = pchunk->block.next) {
-               void **p = pchunk->free_list [slot];
-               if (p) {
-                       /*g_print ("found freelist for slot %d in chunk %p, returning %p, next %p\n", slot, pchunk, p, *p);*/
-                       pchunk->free_list [slot] = *p;
-                       res = p;
-                       goto found;
-               }
-       }
-       for (pchunk = pinned_chunk_list; pchunk; pchunk = pchunk->block.next) {
-               res = get_chunk_freelist (pchunk, slot);
-               if (res)
-                       goto found;
-       }
-       pchunk = alloc_pinned_chunk ();
-       /* FIXME: handle OOM */
-       pchunk->block.next = pinned_chunk_list;
-       pinned_chunk_list = pchunk;
-       res = get_chunk_freelist (pchunk, slot);
- found:
-       memset (res, 0, size);
-       return res;
+major_alloc_small_pinned_obj (size_t size, gboolean has_references)
+{
+       return sgen_alloc_pinned (&pinned_allocator, size);
 }
 
 /*
  * size is already rounded up and we hold the GC lock.
  */
 static void*
-alloc_degraded (MonoVTable *vtable, size_t size)
+major_alloc_degraded (MonoVTable *vtable, size_t size)
 {
        GCMemSection *section;
        void **p = NULL;
-       g_assert (size <= MAX_SMALL_OBJ_SIZE);
+       g_assert (size <= SGEN_MAX_SMALL_OBJ_SIZE);
        HEAVY_STAT (++stat_objects_alloced_degraded);
        HEAVY_STAT (stat_bytes_alloced_degraded += size);
        for (section = section_list; section; section = section->block.next) {
@@ -272,16 +272,24 @@ alloc_degraded (MonoVTable *vtable, size_t size)
                section->is_to_space = FALSE;
                /* FIXME: handle OOM */
                p = (void**)section->next_data;
+               sgen_register_major_sections_alloced (1);
        }
        section->next_data += size;
-       degraded_mode += size;
        DEBUG (3, fprintf (gc_debug_file, "Allocated (degraded) object %p, vtable: %p (%s), size: %zd in section %p\n", p, vtable, vtable->klass->name, size, section));
        *p = vtable;
        return p;
 }
 
+static inline void
+pin_major_object (char *obj, SgenGrayQueue *queue)
+{
+       sgen_pin_object (obj, queue);
+}
+
+#include "sgen-major-copy-object.h"
+
 static void
-major_copy_or_mark_object (void **obj_slot)
+major_copy_or_mark_object (void **obj_slot, SgenGrayQueue *queue)
 {
        char *forwarded;
        char *obj = *obj_slot;
@@ -307,8 +315,8 @@ major_copy_or_mark_object (void **obj_slot)
         *
         * Before we can copy the object we must make sure that we are
         * allowed to, i.e. that the object not pinned, not already
-        * forwarded and doesn't belong to the LOS, a pinned chunk, or
-        * a to-space section.
+        * forwarded, not in the nursery To Space and doesn't belong
+        * to the LOS, a pinned chunk, or a to-space section.
         *
         * We are usually called for to-space objects (5) when we have
         * two remset entries for the same reference.  The first entry
@@ -318,32 +326,36 @@ major_copy_or_mark_object (void **obj_slot)
         * get to-space objects.
         */
 
-       if ((forwarded = object_is_forwarded (obj))) {
-               DEBUG (9, g_assert (((MonoVTable*)LOAD_VTABLE(obj))->gc_descr));
+       if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
+               DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
                DEBUG (9, fprintf (gc_debug_file, " (already forwarded to %p)\n", forwarded));
                HEAVY_STAT (++stat_major_copy_object_failed_forwarded);
                *obj_slot = forwarded;
                return;
        }
-       if (object_is_pinned (obj)) {
-               DEBUG (9, g_assert (((MonoVTable*)LOAD_VTABLE(obj))->gc_descr));
+       if (SGEN_OBJECT_IS_PINNED (obj)) {
+               DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
                DEBUG (9, fprintf (gc_debug_file, " (pinned, no change)\n"));
                HEAVY_STAT (++stat_major_copy_object_failed_pinned);
                return;
        }
 
-       if (ptr_in_nursery (obj))
+       if (ptr_in_nursery (obj)) {
+               /* A To Space object is already on its final destination for the current collection. */
+               if (sgen_nursery_is_to_space (obj))
+                       return;
                goto copy;
+       }
 
        /*
         * At this point we know obj is not pinned, not forwarded and
         * belongs to 2, 3, 4, or 5.
         *
         * LOS object (2) are simple, at least until we always follow
-        * the rule: if objsize > MAX_SMALL_OBJ_SIZE, pin the object
-        * and return it.  At the end of major collections, we walk
-        * the los list and if the object is pinned, it is marked,
-        * otherwise it can be freed.
+        * the rule: if objsize > SGEN_MAX_SMALL_OBJ_SIZE, pin the
+        * object and return it.  At the end of major collections, we
+        * walk the los list and if the object is pinned, it is
+        * marked, otherwise it can be freed.
         *
         * Pinned chunks (3) and major heap sections (4, 5) both
         * reside in blocks, which are always aligned, so once we've
@@ -351,17 +363,15 @@ major_copy_or_mark_object (void **obj_slot)
         * see whether it's a pinned chunk or a major heap section.
         */
 
-       objsize = safe_object_get_size ((MonoObject*)obj);
-       objsize += ALLOC_ALIGN - 1;
-       objsize &= ~(ALLOC_ALIGN - 1);
+       objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
 
-       if (G_UNLIKELY (objsize > MAX_SMALL_OBJ_SIZE || obj_is_from_pinned_alloc (obj))) {
-               if (object_is_pinned (obj))
+       if (G_UNLIKELY (objsize > SGEN_MAX_SMALL_OBJ_SIZE || obj_is_from_pinned_alloc (obj))) {
+               if (SGEN_OBJECT_IS_PINNED (obj))
                        return;
-               DEBUG (9, fprintf (gc_debug_file, " (marked LOS/Pinned %p (%s), size: %zd)\n", obj, safe_name (obj), objsize));
-               binary_protocol_pin (obj, (gpointer)LOAD_VTABLE (obj), safe_object_get_size ((MonoObject*)obj));
-               pin_object (obj);
-               GRAY_OBJECT_ENQUEUE (obj);
+               DEBUG (9, fprintf (gc_debug_file, " (marked LOS/Pinned %p (%s), size: %td)\n", obj, sgen_safe_name (obj), objsize));
+               binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), sgen_safe_object_get_size ((MonoObject*)obj));
+               SGEN_PIN_OBJECT (obj);
+               GRAY_OBJECT_ENQUEUE (queue, obj);
                HEAVY_STAT (++stat_major_copy_object_failed_large_pinned);
                return;
        }
@@ -372,7 +382,7 @@ major_copy_or_mark_object (void **obj_slot)
         * not (4).
         */
        if (MAJOR_OBJ_IS_IN_TO_SPACE (obj)) {
-               DEBUG (9, g_assert (objsize <= MAX_SMALL_OBJ_SIZE));
+               DEBUG (9, g_assert (objsize <= SGEN_MAX_SMALL_OBJ_SIZE));
                DEBUG (9, fprintf (gc_debug_file, " (already copied)\n"));
                HEAVY_STAT (++stat_major_copy_object_failed_to_space);
                return;
@@ -381,9 +391,11 @@ major_copy_or_mark_object (void **obj_slot)
  copy:
        HEAVY_STAT (++stat_objects_copied_major);
 
-       *obj_slot = copy_object_no_checks (obj);
+       *obj_slot = copy_object_no_checks (obj, queue);
 }
 
+#include "sgen-major-scan-object.h"
+
 /* FIXME: later reduce code duplication here with build_nursery_fragments().
  * We don't keep track of section fragments for non-nursery sections yet, so
  * just memset to 0.
@@ -399,24 +411,22 @@ build_section_fragments (GCMemSection *section)
        memset (section->scan_starts, 0, section->num_scan_start * sizeof (gpointer));
        frag_start = section->data;
        section->next_data = section->data;
-       for (i = section->pin_queue_start; i < section->pin_queue_end; ++i) {
-               frag_end = pin_queue [i];
+       for (i = 0; i < section->pin_queue_num_entries; ++i) {
+               frag_end = section->pin_queue_start [i];
                /* remove the pin bit from pinned objects */
-               unpin_object (frag_end);
+               SGEN_UNPIN_OBJECT (frag_end);
                if (frag_end >= section->data + section->size) {
                        frag_end = section->data + section->size;
                } else {
-                       section->scan_starts [((char*)frag_end - (char*)section->data)/SCAN_START_SIZE] = frag_end;
+                       section->scan_starts [((char*)frag_end - (char*)section->data)/SGEN_SCAN_START_SIZE] = frag_end;
                }
                frag_size = frag_end - frag_start;
                if (frag_size) {
                        binary_protocol_empty (frag_start, frag_size);
                        memset (frag_start, 0, frag_size);
                }
-               frag_size = safe_object_get_size ((MonoObject*)pin_queue [i]);
-               frag_size += ALLOC_ALIGN - 1;
-               frag_size &= ~(ALLOC_ALIGN - 1);
-               frag_start = (char*)pin_queue [i] + frag_size;
+               frag_size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)section->pin_queue_start [i]));
+               frag_start = (char*)section->pin_queue_start [i] + frag_size;
                section->next_data = MAX (section->next_data, frag_start);
        }
        frag_end = section->end_data;
@@ -427,91 +437,14 @@ build_section_fragments (GCMemSection *section)
        }
 }
 
-static void
-scan_pinned_objects (IterateObjectCallbackFunc callback, void *callback_data)
-{
-       PinnedChunk *chunk;
-       int i, obj_size;
-       char *p, *endp;
-       void **ptr;
-       void *end_chunk;
-       for (chunk = pinned_chunk_list; chunk; chunk = chunk->block.next) {
-               end_chunk = (char*)chunk + chunk->num_pages * FREELIST_PAGESIZE;
-               DEBUG (6, fprintf (gc_debug_file, "Scanning pinned chunk %p (range: %p-%p)\n", chunk, chunk->start_data, end_chunk));
-               for (i = 0; i < chunk->num_pages; ++i) {
-                       obj_size = chunk->page_sizes [i];
-                       if (!obj_size)
-                               continue;
-                       p = i? (char*)chunk + i * FREELIST_PAGESIZE: chunk->start_data;
-                       endp = i? p + FREELIST_PAGESIZE: (char*)chunk + FREELIST_PAGESIZE;
-                       DEBUG (6, fprintf (gc_debug_file, "Page %d (size: %d, range: %p-%p)\n", i, obj_size, p, endp));
-                       while (p + obj_size <= endp) {
-                               ptr = (void**)p;
-                               DEBUG (9, fprintf (gc_debug_file, "Considering %p (vtable: %p)\n", ptr, *ptr));
-                               /* if the first word (the vtable) is outside the chunk we have an object */
-                               if (*ptr && (*ptr < (void*)chunk || *ptr >= end_chunk))
-                                       callback ((char*)ptr, obj_size, callback_data);
-                               p += obj_size;
-                       }
-               }
-       }
-}
-
-/*
- * the array of pointers from @start to @end contains conservative
- * pointers to objects inside @chunk: mark each referenced object
- * with the PIN bit.
- */
-static void
-mark_pinned_from_addresses (PinnedChunk *chunk, void **start, void **end)
-{
-       for (; start < end; start++) {
-               char *addr = *start;
-               int offset = (char*)addr - (char*)chunk;
-               int page = offset / FREELIST_PAGESIZE;
-               int obj_offset = page == 0? offset - ((char*)chunk->start_data - (char*)chunk): offset % FREELIST_PAGESIZE;
-               int slot_size = chunk->page_sizes [page];
-               void **ptr;
-               /* the page is not allocated */
-               if (!slot_size)
-                       continue;
-               /* would be faster if we restrict the sizes to power of two,
-                * but that's a waste of memory: need to measure. it could reduce
-                * fragmentation since there are less pages needed, if for example
-                * someone interns strings of each size we end up with one page per
-                * interned string (still this is just ~40 KB): with more fine-grained sizes
-                * this increases the number of used pages.
-                */
-               if (page == 0) {
-                       obj_offset /= slot_size;
-                       obj_offset *= slot_size;
-                       addr = (char*)chunk->start_data + obj_offset;
-               } else {
-                       obj_offset /= slot_size;
-                       obj_offset *= slot_size;
-                       addr = (char*)chunk + page * FREELIST_PAGESIZE + obj_offset;
-               }
-               ptr = (void**)addr;
-               /* if the vtable is inside the chunk it's on the freelist, so skip */
-               if (*ptr && (*ptr < (void*)chunk->start_data || *ptr > (void*)((char*)chunk + chunk->num_pages * FREELIST_PAGESIZE))) {
-                       binary_protocol_pin (addr, (gpointer)LOAD_VTABLE (addr), safe_object_get_size ((MonoObject*)addr));
-                       if (heap_dump_file && !object_is_pinned (addr))
-                               pin_stats_register_object ((char*) addr, safe_object_get_size ((MonoObject*) addr));
-                       pin_object (addr);
-                       GRAY_OBJECT_ENQUEUE (addr);
-                       DEBUG (6, fprintf (gc_debug_file, "Marked pinned object %p (%s) from roots\n", addr, safe_name (addr)));
-               }
-       }
-}
-
 static void
 sweep_pinned_objects_callback (char *ptr, size_t size, void *data)
 {
-       if (object_is_pinned (ptr)) {
-               unpin_object (ptr);
-               DEBUG (6, fprintf (gc_debug_file, "Unmarked pinned object %p (%s)\n", ptr, safe_name (ptr)));
+       if (SGEN_OBJECT_IS_PINNED (ptr)) {
+               SGEN_UNPIN_OBJECT (ptr);
+               DEBUG (6, fprintf (gc_debug_file, "Unmarked pinned object %p (%s)\n", ptr, sgen_safe_name (ptr)));
        } else {
-               DEBUG (6, fprintf (gc_debug_file, "Freeing unmarked pinned object %p (%s)\n", ptr, safe_name (ptr)));
+               DEBUG (6, fprintf (gc_debug_file, "Freeing unmarked pinned object %p (%s)\n", ptr, sgen_safe_name (ptr)));
                free_pinned_object (ptr, size);
        }
 }
@@ -519,7 +452,7 @@ sweep_pinned_objects_callback (char *ptr, size_t size, void *data)
 static void
 sweep_pinned_objects (void)
 {
-       scan_pinned_objects (sweep_pinned_objects_callback, NULL);
+       sgen_pinned_scan_objects (&pinned_allocator, sweep_pinned_objects_callback, NULL);
 }
 
 static void
@@ -528,10 +461,10 @@ major_iterate_objects (gboolean non_pinned, gboolean pinned, IterateObjectCallba
        if (non_pinned) {
                GCMemSection *section;
                for (section = section_list; section; section = section->block.next)
-                       scan_area_with_callback (section->data, section->end_data, callback, data);
+                       sgen_scan_area_with_callback (section->data, section->end_data, callback, data, FALSE);
        }
        if (pinned)
-               scan_pinned_objects (callback, data);
+               sgen_pinned_scan_objects (&pinned_allocator, callback, data);
 }
 
 static void
@@ -541,187 +474,70 @@ major_free_non_pinned_object (char *obj, size_t size)
 }
 
 static void
-major_do_collection (const char *reason)
+pin_pinned_object_callback (void *addr, size_t slot_size, SgenGrayQueue *queue)
 {
-       GCMemSection *section, *prev_section;
-       LOSObject *bigobj, *prevbo;
-       PinnedChunk *chunk;
-       TV_DECLARE (all_atv);
-       TV_DECLARE (all_btv);
-       TV_DECLARE (atv);
-       TV_DECLARE (btv);
-       /* FIXME: only use these values for the precise scan
-        * note that to_space pointers should be excluded anyway...
-        */
-       char *heap_start = NULL;
-       char *heap_end = (char*)-1;
-       int old_num_major_sections = num_major_sections;
-       int num_major_sections_saved, save_target, allowance_target;
-
-       init_stats ();
-       binary_protocol_collection (GENERATION_OLD);
-       check_scan_starts ();
-       gray_object_queue_init ();
-
-       degraded_mode = 0;
-       DEBUG (1, fprintf (gc_debug_file, "Start major collection %d\n", num_major_gcs));
-       num_major_gcs++;
-       mono_stats.major_gc_count ++;
-
-       /* world must be stopped already */
-       TV_GETTIME (all_atv);
-       TV_GETTIME (atv);
-
-       /* Pinning depends on this */
-       clear_nursery_fragments (nursery_next);
-
-       TV_GETTIME (btv);
-       time_major_pre_collection_fragment_clear += TV_ELAPSED_MS (atv, btv);
+       binary_protocol_pin (addr, (gpointer)SGEN_LOAD_VTABLE (addr), sgen_safe_object_get_size ((MonoObject*)addr));
+       if (!SGEN_OBJECT_IS_PINNED (addr))
+               sgen_pin_stats_register_object ((char*) addr, sgen_safe_object_get_size ((MonoObject*) addr));
+       SGEN_PIN_OBJECT (addr);
+       GRAY_OBJECT_ENQUEUE (queue, addr);
+       DEBUG (6, fprintf (gc_debug_file, "Marked pinned object %p (%s) from roots\n", addr, sgen_safe_name (addr)));
+}
 
-       if (xdomain_checks)
-               check_for_xdomain_refs ();
+static void
+major_find_pin_queue_start_ends (SgenGrayQueue *queue)
+{
+       GCMemSection *section;
 
-       nursery_section->next_data = nursery_real_end;
-       /* we should also coalesce scanning from sections close to each other
-        * and deal with pointers outside of the sections later.
-        */
-       /* The remsets are not useful for a major collection */
-       clear_remsets ();
+       for (section = section_list; section; section = section->block.next)
+               sgen_find_section_pin_queue_start_end (section);
+       sgen_pinned_scan_pinned_objects (&pinned_allocator, (IterateObjectCallbackFunc)pin_pinned_object_callback, queue);
+}
 
-       TV_GETTIME (atv);
-       init_pinning ();
-       DEBUG (6, fprintf (gc_debug_file, "Collecting pinned addresses\n"));
-       pin_from_roots ((void*)lowest_heap_address, (void*)highest_heap_address);
-       optimize_pin_queue (0);
+static void
+major_pin_objects (SgenGrayQueue *queue)
+{
+       GCMemSection *section;
 
-       /*
-        * pin_queue now contains all candidate pointers, sorted and
-        * uniqued.  We must do two passes now to figure out which
-        * objects are pinned.
-        *
-        * The first is to find within the pin_queue the area for each
-        * section.  This requires that the pin_queue be sorted.  We
-        * also process the LOS objects and pinned chunks here.
-        *
-        * The second, destructive, pass is to reduce the section
-        * areas to pointers to the actually pinned objects.
-        */
-       DEBUG (6, fprintf (gc_debug_file, "Pinning from sections\n"));
-       /* first pass for the sections */
-       find_section_pin_queue_start_end (nursery_section);
        for (section = section_list; section; section = section->block.next)
-               find_section_pin_queue_start_end (section);
-       /* identify possible pointers to the insize of large objects */
-       DEBUG (6, fprintf (gc_debug_file, "Pinning from large objects\n"));
-       for (bigobj = los_object_list; bigobj; bigobj = bigobj->next) {
-               int start, end;
-               find_optimized_pin_queue_area (bigobj->data, (char*)bigobj->data + bigobj->size, &start, &end);
-               if (start != end) {
-                       pin_object (bigobj->data);
-                       GRAY_OBJECT_ENQUEUE (bigobj->data);
-                       if (heap_dump_file)
-                               pin_stats_register_object ((char*) bigobj->data, safe_object_get_size ((MonoObject*) bigobj->data));
-                       DEBUG (6, fprintf (gc_debug_file, "Marked large object %p (%s) size: %zd from roots\n", bigobj->data, safe_name (bigobj->data), bigobj->size));
-               }
-       }
-       /* look for pinned addresses for pinned-alloc objects */
-       DEBUG (6, fprintf (gc_debug_file, "Pinning from pinned-alloc objects\n"));
-       for (chunk = pinned_chunk_list; chunk; chunk = chunk->block.next) {
-               int start, end;
-               find_optimized_pin_queue_area (chunk->start_data, (char*)chunk + chunk->num_pages * FREELIST_PAGESIZE, &start, &end);
-               if (start != end)
-                       mark_pinned_from_addresses (chunk, pin_queue + start, pin_queue + end);
-       }
-       /* second pass for the sections */
-       pin_objects_in_section (nursery_section);
-       for (section = section_list; section; section = section->block.next)
-               pin_objects_in_section (section);
-
-       TV_GETTIME (btv);
-       time_major_pinning += TV_ELAPSED_MS (atv, btv);
-       DEBUG (2, fprintf (gc_debug_file, "Finding pinned pointers: %d in %d usecs\n", next_pin_slot, TV_ELAPSED (atv, btv)));
-       DEBUG (4, fprintf (gc_debug_file, "Start scan with %d pinned objects\n", next_pin_slot));
+               sgen_pin_objects_in_section (section, queue);
+}
 
+static void
+major_init_to_space (void)
+{
        new_to_space_section ();
+}
 
-       drain_gray_stack ();
-
-       TV_GETTIME (atv);
-       time_major_scan_pinned += TV_ELAPSED_MS (btv, atv);
-
-       /* registered roots, this includes static fields */
-       scan_from_registered_roots (major_copy_or_mark_object, heap_start, heap_end, ROOT_TYPE_NORMAL);
-       scan_from_registered_roots (major_copy_or_mark_object, heap_start, heap_end, ROOT_TYPE_WBARRIER);
-       TV_GETTIME (btv);
-       time_major_scan_registered_roots += TV_ELAPSED_MS (atv, btv);
-
-       /* Threads */
-       /* FIXME: this is the wrong place for this, because it does
-          pinning */
-       scan_thread_data (heap_start, heap_end, TRUE);
-       TV_GETTIME (atv);
-       time_major_scan_thread_data += TV_ELAPSED_MS (btv, atv);
-
-       TV_GETTIME (btv);
-       time_major_scan_alloc_pinned += TV_ELAPSED_MS (atv, btv);
-
-       /* scan the list of objects ready for finalization */
-       scan_finalizer_entries (major_copy_or_mark_object, fin_ready_list);
-       scan_finalizer_entries (major_copy_or_mark_object, critical_fin_list);
-       TV_GETTIME (atv);
-       time_major_scan_finalized += TV_ELAPSED_MS (btv, atv);
-       DEBUG (2, fprintf (gc_debug_file, "Root scan: %d usecs\n", TV_ELAPSED (btv, atv)));
-
-       TV_GETTIME (btv);
-       time_major_scan_big_objects += TV_ELAPSED_MS (atv, btv);
-
-       /* all the objects in the heap */
-       finish_gray_stack (heap_start, heap_end, GENERATION_OLD);
-       TV_GETTIME (atv);
-       time_major_finish_gray_stack += TV_ELAPSED_MS (btv, atv);
+static void
+major_sweep (void)
+{
+       GCMemSection *section, *prev_section;
 
        to_space_set_next_data ();
        unset_to_space ();
 
-       /* sweep the big objects list */
-       prevbo = NULL;
-       for (bigobj = los_object_list; bigobj;) {
-               if (object_is_pinned (bigobj->data)) {
-                       unpin_object (bigobj->data);
-               } else {
-                       LOSObject *to_free;
-                       /* not referenced anywhere, so we can free it */
-                       if (prevbo)
-                               prevbo->next = bigobj->next;
-                       else
-                               los_object_list = bigobj->next;
-                       to_free = bigobj;
-                       bigobj = bigobj->next;
-                       free_large_object (to_free);
-                       continue;
-               }
-               prevbo = bigobj;
-               bigobj = bigobj->next;
-       }
        /* unpin objects from the pinned chunks and free the unmarked ones */
        sweep_pinned_objects ();
 
-       TV_GETTIME (btv);
-       time_major_sweep += TV_ELAPSED_MS (atv, btv);
+       sgen_pinned_update_heap_boundaries (&pinned_allocator);
 
        /* free the unused sections */
        prev_section = NULL;
        for (section = section_list; section;) {
+               GCMemSection *this_section = section;
+
                /* to_space doesn't need handling here */
                if (section->is_to_space) {
                        section->is_to_space = FALSE;
                        prev_section = section;
                        section = section->block.next;
-                       continue;
+                       goto update;
                }
                /* no pinning object, so the section is free */
-               if (section->pin_queue_start == section->pin_queue_end) {
+               if (!section->pin_queue_num_entries) {
                        GCMemSection *to_free;
+                       g_assert (!section->pin_queue_start);
                        if (prev_section)
                                prev_section->block.next = section->block.next;
                        else
@@ -731,63 +547,17 @@ major_do_collection (const char *reason)
                        free_major_section (to_free);
                        continue;
                } else {
-                       DEBUG (6, fprintf (gc_debug_file, "Section %p has still pinned objects (%d)\n", section, section->pin_queue_end - section->pin_queue_start));
+                       DEBUG (6, fprintf (gc_debug_file, "Section %p has still pinned objects (%d)\n", section, section->pin_queue_num_entries));
                        build_section_fragments (section);
                }
                prev_section = section;
                section = section->block.next;
-       }
 
-       /* walk the pin_queue, build up the fragment list of free memory, unmark
-        * pinned objects as we go, memzero() the empty fragments so they are ready for the
-        * next allocations.
-        */
-       build_nursery_fragments (nursery_section->pin_queue_start, nursery_section->pin_queue_end);
-
-       TV_GETTIME (atv);
-       time_major_fragment_creation += TV_ELAPSED_MS (btv, atv);
-
-       TV_GETTIME (all_btv);
-       mono_stats.major_gc_time_usecs += TV_ELAPSED (all_atv, all_btv);
-
-       if (heap_dump_file)
-               dump_heap ("major", num_major_gcs - 1, reason);
-
-       /* prepare the pin queue for the next collection */
-       next_pin_slot = 0;
-       if (fin_ready_list || critical_fin_list) {
-               DEBUG (4, fprintf (gc_debug_file, "Finalizer-thread wakeup: ready %d\n", num_ready_finalizers));
-               mono_gc_finalize_notify ();
+       update:
+               sgen_update_heap_boundaries ((mword)this_section->data, (mword)this_section->data + this_section->size);
        }
-       pin_stats_reset ();
 
-       g_assert (gray_object_queue_is_empty ());
-
-       num_major_sections_saved = MAX (old_num_major_sections - num_major_sections, 1);
-
-       save_target = num_major_sections / 2;
-       /*
-        * We aim to allow the allocation of as many sections as is
-        * necessary to reclaim save_target sections in the next
-        * collection.  We assume the collection pattern won't change.
-        * In the last cycle, we had num_major_sections_saved for
-        * minor_collection_sections_alloced.  Assuming things won't
-        * change, this must be the same ratio as save_target for
-        * allowance_target, i.e.
-        *
-        *    num_major_sections_saved            save_target
-        * --------------------------------- == ----------------
-        * minor_collection_sections_alloced    allowance_target
-        *
-        * hence:
-        */
-       allowance_target = save_target * minor_collection_sections_alloced / num_major_sections_saved;
-
-       minor_collection_section_allowance = MAX (MIN (allowance_target, num_major_sections), MIN_MINOR_COLLECTION_SECTION_ALLOWANCE);
-
-       minor_collection_sections_alloced = 0;
-
-       check_scan_starts ();
+       have_swept = TRUE;
 }
 
 static void
@@ -795,15 +565,15 @@ major_check_scan_starts (void)
 {
        GCMemSection *section;
        for (section = section_list; section; section = section->block.next)
-               check_section_scan_starts (section);
+               sgen_check_section_scan_starts (section);
 }
 
 static void
-major_dump_heap (void)
+major_dump_heap (FILE *heap_dump_file)
 {
        GCMemSection *section;
        for (section = section_list; section; section = section->block.next)
-               dump_section (section, "old");
+               sgen_dump_section (section, "old");
        /* FIXME: dump pinned sections, too */
 }
 
@@ -819,19 +589,6 @@ major_get_used_size (void)
        return tot;
 }
 
-static void
-major_init (void)
-{
-       minor_collection_section_allowance = MIN_MINOR_COLLECTION_SECTION_ALLOWANCE;
-
-#ifdef HEAVY_STATISTICS
-       mono_counters_register ("# major copy_object() failed forwarded", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_forwarded);
-       mono_counters_register ("# major copy_object() failed pinned", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_pinned);
-       mono_counters_register ("# major copy_object() failed large or pinned chunk", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_large_pinned);
-       mono_counters_register ("# major copy_object() failed to space", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_to_space);
-#endif
-}
-
 /* only valid during minor collections */
 static int old_num_major_sections;
 
@@ -864,13 +621,12 @@ major_finish_nursery_collection (void)
                section->is_to_space = FALSE;
 
        sections_alloced = num_major_sections - old_num_major_sections;
-       minor_collection_sections_alloced += sections_alloced;
+       sgen_register_major_sections_alloced (sections_alloced);
 }
 
-static gboolean
-major_need_major_collection (void)
+static void
+major_finish_major_collection (void)
 {
-       return minor_collection_sections_alloced > minor_collection_section_allowance;
 }
 
 static gboolean
@@ -888,8 +644,59 @@ major_ptr_is_in_non_pinned_space (char *ptr)
 static void
 major_report_pinned_memory_usage (void)
 {
-       PinnedChunk *chunk;
-       int i = 0;
-       for (chunk = pinned_chunk_list; chunk; chunk = chunk->block.next)
-               report_pinned_chunk (chunk, i++);
+       sgen_report_pinned_mem_usage (&pinned_allocator);
+}
+
+static int
+get_num_major_sections (void)
+{
+       return num_major_sections;
+}
+
+void
+sgen_copying_init (SgenMajorCollector *collector)
+{
+#ifdef HEAVY_STATISTICS
+       mono_counters_register ("# major copy_object() failed forwarded", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_forwarded);
+       mono_counters_register ("# major copy_object() failed pinned", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_pinned);
+       mono_counters_register ("# major copy_object() failed large or pinned chunk", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_large_pinned);
+       mono_counters_register ("# major copy_object() failed to space", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_to_space);
+#endif
+
+       collector->section_size = MAJOR_SECTION_SIZE;
+       collector->supports_cardtable = FALSE;
+       collector->is_parallel = FALSE;
+
+       collector->have_swept = &have_swept;
+
+       collector->alloc_heap = major_alloc_heap;
+       collector->is_object_live = major_is_object_live;
+       collector->alloc_small_pinned_obj = major_alloc_small_pinned_obj;
+       collector->alloc_degraded = major_alloc_degraded;
+       collector->alloc_object = major_alloc_object;
+       collector->free_pinned_object = free_pinned_object;
+       collector->iterate_objects = major_iterate_objects;
+       collector->free_non_pinned_object = major_free_non_pinned_object;
+       collector->find_pin_queue_start_ends = major_find_pin_queue_start_ends;
+       collector->pin_objects = major_pin_objects;
+       collector->pin_major_object = pin_major_object;
+       collector->init_to_space = major_init_to_space;
+       collector->sweep = major_sweep;
+       collector->check_scan_starts = major_check_scan_starts;
+       collector->dump_heap = major_dump_heap;
+       collector->get_used_size = major_get_used_size;
+       collector->start_nursery_collection = major_start_nursery_collection;
+       collector->finish_nursery_collection = major_finish_nursery_collection;
+       collector->finish_major_collection = major_finish_major_collection;
+       collector->ptr_is_in_non_pinned_space = major_ptr_is_in_non_pinned_space;
+       collector->obj_is_from_pinned_alloc = obj_is_from_pinned_alloc;
+       collector->report_pinned_memory_usage = major_report_pinned_memory_usage;
+       collector->get_num_major_sections = get_num_major_sections;
+       collector->handle_gc_param = NULL;
+       collector->print_gc_param_usage = NULL;
+
+       collector->major_ops.copy_or_mark_object = major_copy_or_mark_object;
+       collector->major_ops.scan_object = major_scan_object;
 }
+
+#endif