3 * Nursery allocation code.
5 * Copyright 2009-2010 Novell, Inc.
8 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
9 * Copyright (C) 2012 Xamarin Inc
11 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
15 * The young generation is divided into fragments. This is because
16 * we can hand one fragments to a thread for lock-less fast alloc and
17 * because the young generation ends up fragmented anyway by pinned objects.
18 * Once a collection is done, a list of fragments is created. When doing
19 * thread local alloc we use smallish nurseries so we allow new threads to
20 * allocate memory from gen0 without triggering a collection. Threads that
21 * are found to allocate lots of memory are given bigger fragments. This
22 * should make the finalizer thread use little nursery memory after a while.
23 * We should start assigning threads very small fragments: if there are many
24 * threads the nursery will be full of reserved space that the threads may not
25 * use at all, slowing down allocation speed.
26 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
27 * Allocation Buffers (TLABs).
38 #ifdef HAVE_SEMAPHORE_H
39 #include <semaphore.h>
52 #include "mono/sgen/sgen-gc.h"
53 #include "mono/sgen/sgen-cardtable.h"
54 #include "mono/sgen/sgen-protocol.h"
55 #include "mono/sgen/sgen-memory-governor.h"
56 #include "mono/sgen/sgen-pinning.h"
57 #include "mono/sgen/sgen-client.h"
58 #include "mono/utils/mono-membar.h"
60 /* Enable it so nursery allocation diagnostic data is collected */
61 //#define NALLOC_DEBUG 1
63 /* The mutator allocs from here. */
64 static SgenFragmentAllocator mutator_allocator;
66 /* freeelist of fragment structures */
67 static SgenFragment *fragment_freelist = NULL;
69 /* Allocator cursors */
70 static char *nursery_last_pinned_end = NULL;
72 char *sgen_nursery_start;
73 char *sgen_nursery_end;
75 /* good sizes are 512KB-1MB: larger ones increase a lot memzeroing time */
76 size_t sgen_nursery_size = (1 << 22);
77 /* The number of trailing 0 bits in sgen_nursery_size */
78 int sgen_nursery_bits = 22;
80 char *sgen_space_bitmap;
81 size_t sgen_space_bitmap_size;
83 #ifdef HEAVY_STATISTICS
85 static mword stat_wasted_bytes_trailer = 0;
86 static mword stat_wasted_bytes_small_areas = 0;
87 static mword stat_wasted_bytes_discarded_fragments = 0;
88 static guint64 stat_nursery_alloc_requests = 0;
89 static guint64 stat_alloc_iterations = 0;
90 static guint64 stat_alloc_retries = 0;
92 static guint64 stat_nursery_alloc_range_requests = 0;
93 static guint64 stat_alloc_range_iterations = 0;
94 static guint64 stat_alloc_range_retries = 0;
98 /************************************Nursery allocation debugging *********************************************/
115 MonoNativeThreadId tid;
118 #define ALLOC_RECORD_COUNT 128000
121 static AllocRecord *alloc_records;
122 static volatile int next_record;
123 static volatile int alloc_count;
125 void dump_alloc_records (void);
126 void verify_alloc_records (void);
129 get_reason_name (AllocRecord *rec)
131 switch (rec->reason) {
132 case FIXED_ALLOC: return "fixed-alloc";
133 case RANGE_ALLOC: return "range-alloc";
134 case PINNING: return "pinning";
135 case BLOCK_ZEROING: return "block-zeroing";
136 case CLEAR_NURSERY_FRAGS: return "clear-nursery-frag";
137 default: return "invalid";
142 reset_alloc_records (void)
149 add_alloc_record (char *addr, size_t size, int reason)
151 int idx = InterlockedIncrement (&next_record) - 1;
152 alloc_records [idx].address = addr;
153 alloc_records [idx].size = size;
154 alloc_records [idx].reason = reason;
155 alloc_records [idx].seq = idx;
156 alloc_records [idx].tid = mono_native_thread_id_get ();
160 comp_alloc_record (const void *_a, const void *_b)
162 const AllocRecord *a = _a;
163 const AllocRecord *b = _b;
164 if (a->address == b->address)
165 return a->seq - b->seq;
166 return a->address - b->address;
169 #define rec_end(REC) ((REC)->address + (REC)->size)
172 dump_alloc_records (void)
175 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
177 printf ("------------------------------------DUMP RECORDS----------------------------\n");
178 for (i = 0; i < next_record; ++i) {
179 AllocRecord *rec = alloc_records + i;
180 printf ("obj [%p, %p] size %d reason %s seq %d tid %x\n", rec->address, rec_end (rec), (int)rec->size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
185 verify_alloc_records (void)
191 AllocRecord *prev = NULL;
193 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
194 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record, alloc_count);
195 for (i = 0; i < next_record; ++i) {
196 AllocRecord *rec = alloc_records + i;
200 if (rec_end (prev) > rec->address)
201 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev->address, rec->address);
202 if ((rec->address - rec_end (prev)) >= 8)
204 hole_size = rec->address - rec_end (prev);
205 max_hole = MAX (max_hole, hole_size);
207 printf ("obj [%p, %p] size %d hole to prev %d reason %s seq %d tid %zx\n", rec->address, rec_end (rec), (int)rec->size, hole_size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
210 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total, holes, max_hole);
215 /*********************************************************************************/
218 static inline gpointer
219 mask (gpointer n, uintptr_t bit)
221 return (gpointer)(((uintptr_t)n) | bit);
224 static inline gpointer
227 return (gpointer)((uintptr_t)p & ~(uintptr_t)0x3);
230 static inline uintptr_t
231 get_mark (gpointer n)
233 return (uintptr_t)n & 0x1;
236 /*MUST be called with world stopped*/
238 sgen_fragment_allocator_alloc (void)
240 SgenFragment *frag = fragment_freelist;
242 fragment_freelist = frag->next_in_order;
243 frag->next = frag->next_in_order = NULL;
246 frag = (SgenFragment *)sgen_alloc_internal (INTERNAL_MEM_FRAGMENT);
247 frag->next = frag->next_in_order = NULL;
252 sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char *end)
254 SgenFragment *fragment;
256 fragment = sgen_fragment_allocator_alloc ();
257 fragment->fragment_start = start;
258 fragment->fragment_next = start;
259 fragment->fragment_end = end;
260 fragment->next_in_order = fragment->next = (SgenFragment *)unmask (allocator->region_head);
262 allocator->region_head = allocator->alloc_head = fragment;
263 g_assert (fragment->fragment_end > fragment->fragment_start);
267 sgen_fragment_allocator_release (SgenFragmentAllocator *allocator)
269 SgenFragment *last = allocator->region_head;
273 /* Find the last fragment in insert order */
274 for (; last->next_in_order; last = last->next_in_order) ;
276 last->next_in_order = fragment_freelist;
277 fragment_freelist = allocator->region_head;
278 allocator->alloc_head = allocator->region_head = NULL;
281 static SgenFragment**
282 find_previous_pointer_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag)
285 SgenFragment *cur, *next;
291 prev = &allocator->alloc_head;
294 printf ("retry count for fppf is %d\n", count);
297 cur = (SgenFragment *)unmask (*prev);
305 * We need to make sure that we dereference prev below
306 * after reading cur->next above, so we need a read
309 mono_memory_read_barrier ();
314 if (!get_mark (next)) {
319 next = (SgenFragment *)unmask (next);
320 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev, next, cur) != cur)
322 /*we must make sure that the next from cur->next happens after*/
323 mono_memory_write_barrier ();
326 cur = (SgenFragment *)unmask (next);
332 claim_remaining_size (SgenFragment *frag, char *alloc_end)
334 /* All space used, nothing to claim. */
335 if (frag->fragment_end <= alloc_end)
338 /* Try to alloc all the remaining space. */
339 return InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, frag->fragment_end, alloc_end) == alloc_end;
343 par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size)
345 char *p = frag->fragment_next;
346 char *end = p + size;
348 if (end > frag->fragment_end)
351 /* p = frag->fragment_next must happen before */
352 mono_memory_barrier ();
354 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, end, p) != p)
357 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
358 SgenFragment *next, **prev_ptr;
361 * Before we clean the remaining nursery, we must claim the remaining space
362 * as it could end up been used by the range allocator since it can end up
363 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
364 * when doing second chance allocation.
366 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) {
367 sgen_clear_range (end, frag->fragment_end);
368 HEAVY_STAT (stat_wasted_bytes_trailer += frag->fragment_end - end);
370 add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING);
374 prev_ptr = find_previous_pointer_fragment (allocator, frag);
376 /*Use Michaels linked list remove*/
378 /*prev_ptr will be null if the fragment was removed concurrently */
383 if (!get_mark (next)) {
384 /*frag->next read must happen before the first CAS*/
385 mono_memory_write_barrier ();
387 /*Fail if the next node is removed concurrently and its CAS wins */
388 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) {
393 /* The second CAS must happen after the first CAS or frag->next. */
394 mono_memory_write_barrier ();
396 /* Fail if the previous node was deleted and its CAS wins */
397 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev_ptr, unmask (next), frag) != frag) {
398 prev_ptr = find_previous_pointer_fragment (allocator, frag);
409 serial_alloc_from_fragment (SgenFragment **previous, SgenFragment *frag, size_t size)
411 char *p = frag->fragment_next;
412 char *end = p + size;
414 if (end > frag->fragment_end)
417 frag->fragment_next = end;
419 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
420 *previous = frag->next;
422 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
423 memset (end, 0, frag->fragment_end - end);
425 *previous = frag->next;
432 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size)
437 InterlockedIncrement (&alloc_count);
441 for (frag = (SgenFragment *)unmask (allocator->alloc_head); unmask (frag); frag = (SgenFragment *)unmask (frag->next)) {
442 HEAVY_STAT (++stat_alloc_iterations);
444 if (size <= (size_t)(frag->fragment_end - frag->fragment_next)) {
445 void *p = par_alloc_from_fragment (allocator, frag, size);
447 HEAVY_STAT (++stat_alloc_retries);
451 add_alloc_record (p, size, FIXED_ALLOC);
460 sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator *allocator, size_t size)
463 SgenFragment **previous;
465 InterlockedIncrement (&alloc_count);
468 previous = &allocator->alloc_head;
470 for (frag = *previous; frag; frag = *previous) {
471 char *p = (char *)serial_alloc_from_fragment (previous, frag, size);
473 HEAVY_STAT (++stat_alloc_iterations);
477 add_alloc_record (p, size, FIXED_ALLOC);
481 previous = &frag->next;
487 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
489 SgenFragment *frag, **previous, *min_frag = NULL, **prev_min_frag = NULL;
490 size_t current_minimum = minimum_size;
493 InterlockedIncrement (&alloc_count);
496 previous = &allocator->alloc_head;
498 for (frag = *previous; frag; frag = *previous) {
499 size_t frag_size = frag->fragment_end - frag->fragment_next;
501 HEAVY_STAT (++stat_alloc_range_iterations);
503 if (desired_size <= frag_size) {
505 *out_alloc_size = desired_size;
507 p = serial_alloc_from_fragment (previous, frag, desired_size);
509 add_alloc_record (p, desired_size, RANGE_ALLOC);
513 if (current_minimum <= frag_size) {
515 prev_min_frag = previous;
516 current_minimum = frag_size;
518 previous = &frag->next;
523 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
524 *out_alloc_size = frag_size;
526 p = serial_alloc_from_fragment (prev_min_frag, min_frag, frag_size);
529 add_alloc_record (p, frag_size, RANGE_ALLOC);
538 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
540 SgenFragment *frag, *min_frag;
541 size_t current_minimum;
545 current_minimum = minimum_size;
548 InterlockedIncrement (&alloc_count);
551 for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
552 size_t frag_size = frag->fragment_end - frag->fragment_next;
554 HEAVY_STAT (++stat_alloc_range_iterations);
556 if (desired_size <= frag_size) {
558 *out_alloc_size = desired_size;
560 p = par_alloc_from_fragment (allocator, frag, desired_size);
562 HEAVY_STAT (++stat_alloc_range_retries);
566 add_alloc_record (p, desired_size, RANGE_ALLOC);
570 if (current_minimum <= frag_size) {
572 current_minimum = frag_size;
576 /* The second fragment_next read should be ordered in respect to the first code block */
577 mono_memory_barrier ();
583 frag_size = min_frag->fragment_end - min_frag->fragment_next;
584 if (frag_size < minimum_size)
587 *out_alloc_size = frag_size;
589 mono_memory_barrier ();
590 p = par_alloc_from_fragment (allocator, min_frag, frag_size);
592 /*XXX restarting here is quite dubious given this is already second chance allocation. */
594 HEAVY_STAT (++stat_alloc_retries);
598 add_alloc_record (p, frag_size, RANGE_ALLOC);
607 sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator)
611 for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
612 SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end);
613 sgen_clear_range (frag->fragment_next, frag->fragment_end);
615 add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS);
620 /* Clear all remaining nursery fragments */
622 sgen_clear_nursery_fragments (void)
624 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) {
625 sgen_clear_allocator_fragments (&mutator_allocator);
626 sgen_minor_collector.clear_fragments ();
631 * Mark a given range of memory as invalid.
633 * This can be done either by zeroing memory or by placing
634 * a phony byte[] array. This keeps the heap forward walkable.
636 * This function ignores calls with a zero range, even if
637 * both start and end are NULL.
640 sgen_clear_range (char *start, char *end)
642 size_t size = end - start;
644 if ((start && !end) || (start > end))
645 g_error ("Invalid range [%p %p]", start, end);
647 if (sgen_client_array_fill_range (start, size)) {
648 sgen_set_nursery_scan_start (start);
649 SGEN_ASSERT (0, start + sgen_safe_object_get_size ((GCObject*)start) == end, "Array fill produced wrong size");
654 sgen_nursery_allocator_prepare_for_pinning (void)
656 sgen_clear_allocator_fragments (&mutator_allocator);
657 sgen_minor_collector.clear_fragments ();
660 static mword fragment_total = 0;
662 * We found a fragment of free memory in the nursery: memzero it and if
663 * it is big enough, add it to the list of fragments that can be used for
667 add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end)
669 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %zd", frag_start, frag_end, frag_size);
670 binary_protocol_empty (frag_start, frag_size);
671 /* Not worth dealing with smaller fragments: need to tune */
672 if (frag_size >= SGEN_MAX_NURSERY_WASTE) {
673 /* memsetting just the first chunk start is bound to provide better cache locality */
674 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC)
675 memset (frag_start, 0, frag_size);
676 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG)
677 memset (frag_start, 0xff, frag_size);
680 /* XXX convert this into a flight record entry
681 printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size);
684 sgen_fragment_allocator_add (allocator, frag_start, frag_end);
685 fragment_total += frag_size;
687 /* Clear unused fragments, pinning depends on this */
688 sgen_clear_range (frag_start, frag_end);
689 HEAVY_STAT (stat_wasted_bytes_small_areas += frag_size);
694 fragment_list_reverse (SgenFragmentAllocator *allocator)
696 SgenFragment *prev = NULL, *list = allocator->region_head;
698 SgenFragment *next = list->next;
700 list->next_in_order = prev;
705 allocator->region_head = allocator->alloc_head = prev;
709 sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpin_queue)
711 char *frag_start, *frag_end;
713 SgenFragment *frags_ranges;
714 void **pin_start, **pin_entry, **pin_end;
717 reset_alloc_records ();
719 /*The mutator fragments are done. We no longer need them. */
720 sgen_fragment_allocator_release (&mutator_allocator);
722 frag_start = sgen_nursery_start;
725 /* The current nursery might give us a fragment list to exclude [start, next[*/
726 frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();
728 /* clear scan starts */
729 memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
731 pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
732 pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);
734 while (pin_entry < pin_end || frags_ranges) {
738 addr0 = addr1 = sgen_nursery_end;
739 if (pin_entry < pin_end)
740 addr0 = (char *)*pin_entry;
742 addr1 = frags_ranges->fragment_start;
746 GRAY_OBJECT_ENQUEUE_SERIAL (unpin_queue, (GCObject*)addr0, sgen_obj_get_descriptor_safe ((GCObject*)addr0));
748 SGEN_UNPIN_OBJECT (addr0);
749 size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject*)addr0));
750 CANARIFY_SIZE (size);
751 sgen_set_nursery_scan_start (addr0);
756 size = frags_ranges->fragment_next - addr1;
757 frags_ranges = frags_ranges->next_in_order;
760 frag_size = frag_end - frag_start;
765 g_assert (frag_size >= 0);
767 if (frag_size && size)
768 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
772 add_alloc_record (*pin_entry, frag_size, PINNING);
774 frag_start = frag_end + frag_size;
777 nursery_last_pinned_end = frag_start;
778 frag_end = sgen_nursery_end;
779 frag_size = frag_end - frag_start;
781 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
783 /* Now it's safe to release the fragments exclude list. */
784 sgen_minor_collector.build_fragments_release_exclude_head ();
786 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
787 fragment_list_reverse (&mutator_allocator);
789 /*The collector might want to do something with the final nursery fragment list.*/
790 sgen_minor_collector.build_fragments_finish (&mutator_allocator);
792 if (!unmask (mutator_allocator.alloc_head)) {
793 SGEN_LOG (1, "Nursery fully pinned");
794 for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
795 GCObject *p = (GCObject *)*pin_entry;
796 SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p)), sgen_safe_object_get_size (p));
799 return fragment_total;
803 sgen_nursery_alloc_get_upper_alloc_bound (void)
805 /*FIXME we need to calculate the collector upper bound as well, but this must be done in the previous GC. */
806 return sgen_nursery_end;
809 /*** Nursery memory allocation ***/
811 sgen_nursery_retire_region (void *address, ptrdiff_t size)
813 HEAVY_STAT (stat_wasted_bytes_discarded_fragments += size);
817 sgen_can_alloc_size (size_t size)
821 if (!SGEN_CAN_ALIGN_UP (size))
824 size = SGEN_ALIGN_UP (size);
826 for (frag = (SgenFragment *)unmask (mutator_allocator.alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
827 if ((size_t)(frag->fragment_end - frag->fragment_next) >= size)
834 sgen_nursery_alloc (size_t size)
836 SGEN_ASSERT (1, size >= (SGEN_CLIENT_MINIMUM_OBJECT_SIZE + CANARY_SIZE) && size <= (SGEN_MAX_SMALL_OBJ_SIZE + CANARY_SIZE), "Invalid nursery object size");
838 SGEN_LOG (4, "Searching nursery for size: %zd", size);
839 size = SGEN_ALIGN_UP (size);
841 HEAVY_STAT (++stat_nursery_alloc_requests);
843 return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
847 sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
849 SGEN_LOG (4, "Searching for byte range desired size: %zd minimum size %zd", desired_size, minimum_size);
851 HEAVY_STAT (++stat_nursery_alloc_range_requests);
853 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size);
856 /*** Initialization ***/
858 #ifdef HEAVY_STATISTICS
861 sgen_nursery_allocator_init_heavy_stats (void)
863 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_trailer);
864 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_small_areas);
865 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_discarded_fragments);
867 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_nursery_alloc_requests);
868 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_iterations);
869 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_retries);
871 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_nursery_alloc_range_requests);
872 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_range_iterations);
873 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_range_retries);
879 sgen_init_nursery_allocator (void)
881 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment));
883 alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging memory");
888 sgen_nursery_alloc_prepare_for_minor (void)
890 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
894 sgen_nursery_alloc_prepare_for_major (void)
896 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
900 sgen_nursery_allocator_set_nursery_bounds (char *start, char *end)
902 sgen_nursery_start = start;
903 sgen_nursery_end = end;
906 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
907 * the right side of things and round up. We could just do a MIN(1,x) instead,
908 * since the nursery size must be a power of 2.
910 sgen_space_bitmap_size = (end - start + SGEN_TO_SPACE_GRANULE_IN_BYTES * 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8);
911 sgen_space_bitmap = (char *)g_malloc0 (sgen_space_bitmap_size);
913 /* Setup the single first large fragment */
914 sgen_minor_collector.init_nursery (&mutator_allocator, start, end);