2 * sgen-nursery-allocator.c: Nursery allocation code.
4 * Copyright 2009-2010 Novell, Inc.
7 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
8 * Copyright (C) 2012 Xamarin Inc
10 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
14 * The young generation is divided into fragments. This is because
15 * we can hand one fragments to a thread for lock-less fast alloc and
16 * because the young generation ends up fragmented anyway by pinned objects.
17 * Once a collection is done, a list of fragments is created. When doing
18 * thread local alloc we use smallish nurseries so we allow new threads to
19 * allocate memory from gen0 without triggering a collection. Threads that
20 * are found to allocate lots of memory are given bigger fragments. This
21 * should make the finalizer thread use little nursery memory after a while.
22 * We should start assigning threads very small fragments: if there are many
23 * threads the nursery will be full of reserved space that the threads may not
24 * use at all, slowing down allocation speed.
25 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
26 * Allocation Buffers (TLABs).
37 #ifdef HAVE_SEMAPHORE_H
38 #include <semaphore.h>
51 #include "mono/sgen/sgen-gc.h"
52 #include "mono/sgen/sgen-cardtable.h"
53 #include "mono/sgen/sgen-protocol.h"
54 #include "mono/sgen/sgen-memory-governor.h"
55 #include "mono/sgen/sgen-pinning.h"
56 #include "mono/sgen/sgen-client.h"
57 #include "mono/utils/mono-membar.h"
59 /* Enable it so nursery allocation diagnostic data is collected */
60 //#define NALLOC_DEBUG 1
62 /* The mutator allocs from here. */
63 static SgenFragmentAllocator mutator_allocator;
65 /* freeelist of fragment structures */
66 static SgenFragment *fragment_freelist = NULL;
68 /* Allocator cursors */
69 static char *nursery_last_pinned_end = NULL;
71 char *sgen_nursery_start;
72 char *sgen_nursery_end;
75 size_t sgen_nursery_size = (1 << 22);
76 int sgen_nursery_bits = 22;
79 char *sgen_space_bitmap;
80 size_t sgen_space_bitmap_size;
82 #ifdef HEAVY_STATISTICS
84 static mword stat_wasted_bytes_trailer = 0;
85 static mword stat_wasted_bytes_small_areas = 0;
86 static mword stat_wasted_bytes_discarded_fragments = 0;
87 static guint64 stat_nursery_alloc_requests = 0;
88 static guint64 stat_alloc_iterations = 0;
89 static guint64 stat_alloc_retries = 0;
91 static guint64 stat_nursery_alloc_range_requests = 0;
92 static guint64 stat_alloc_range_iterations = 0;
93 static guint64 stat_alloc_range_retries = 0;
97 /************************************Nursery allocation debugging *********************************************/
114 MonoNativeThreadId tid;
117 #define ALLOC_RECORD_COUNT 128000
120 static AllocRecord *alloc_records;
121 static volatile int next_record;
122 static volatile int alloc_count;
124 void dump_alloc_records (void);
125 void verify_alloc_records (void);
128 get_reason_name (AllocRecord *rec)
130 switch (rec->reason) {
131 case FIXED_ALLOC: return "fixed-alloc";
132 case RANGE_ALLOC: return "range-alloc";
133 case PINNING: return "pinning";
134 case BLOCK_ZEROING: return "block-zeroing";
135 case CLEAR_NURSERY_FRAGS: return "clear-nursery-frag";
136 default: return "invalid";
141 reset_alloc_records (void)
148 add_alloc_record (char *addr, size_t size, int reason)
150 int idx = InterlockedIncrement (&next_record) - 1;
151 alloc_records [idx].address = addr;
152 alloc_records [idx].size = size;
153 alloc_records [idx].reason = reason;
154 alloc_records [idx].seq = idx;
155 alloc_records [idx].tid = mono_native_thread_id_get ();
159 comp_alloc_record (const void *_a, const void *_b)
161 const AllocRecord *a = _a;
162 const AllocRecord *b = _b;
163 if (a->address == b->address)
164 return a->seq - b->seq;
165 return a->address - b->address;
168 #define rec_end(REC) ((REC)->address + (REC)->size)
171 dump_alloc_records (void)
174 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
176 printf ("------------------------------------DUMP RECORDS----------------------------\n");
177 for (i = 0; i < next_record; ++i) {
178 AllocRecord *rec = alloc_records + i;
179 printf ("obj [%p, %p] size %d reason %s seq %d tid %x\n", rec->address, rec_end (rec), (int)rec->size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
184 verify_alloc_records (void)
190 AllocRecord *prev = NULL;
192 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
193 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record, alloc_count);
194 for (i = 0; i < next_record; ++i) {
195 AllocRecord *rec = alloc_records + i;
199 if (rec_end (prev) > rec->address)
200 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev->address, rec->address);
201 if ((rec->address - rec_end (prev)) >= 8)
203 hole_size = rec->address - rec_end (prev);
204 max_hole = MAX (max_hole, hole_size);
206 printf ("obj [%p, %p] size %d hole to prev %d reason %s seq %d tid %zx\n", rec->address, rec_end (rec), (int)rec->size, hole_size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
209 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total, holes, max_hole);
214 /*********************************************************************************/
217 static inline gpointer
218 mask (gpointer n, uintptr_t bit)
220 return (gpointer)(((uintptr_t)n) | bit);
223 static inline gpointer
226 return (gpointer)((uintptr_t)p & ~(uintptr_t)0x3);
229 static inline uintptr_t
230 get_mark (gpointer n)
232 return (uintptr_t)n & 0x1;
235 /*MUST be called with world stopped*/
237 sgen_fragment_allocator_alloc (void)
239 SgenFragment *frag = fragment_freelist;
241 fragment_freelist = frag->next_in_order;
242 frag->next = frag->next_in_order = NULL;
245 frag = (SgenFragment *)sgen_alloc_internal (INTERNAL_MEM_FRAGMENT);
246 frag->next = frag->next_in_order = NULL;
251 sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char *end)
253 SgenFragment *fragment;
255 fragment = sgen_fragment_allocator_alloc ();
256 fragment->fragment_start = start;
257 fragment->fragment_next = start;
258 fragment->fragment_end = end;
259 fragment->next_in_order = fragment->next = (SgenFragment *)unmask (allocator->region_head);
261 allocator->region_head = allocator->alloc_head = fragment;
262 g_assert (fragment->fragment_end > fragment->fragment_start);
266 sgen_fragment_allocator_release (SgenFragmentAllocator *allocator)
268 SgenFragment *last = allocator->region_head;
272 /* Find the last fragment in insert order */
273 for (; last->next_in_order; last = last->next_in_order) ;
275 last->next_in_order = fragment_freelist;
276 fragment_freelist = allocator->region_head;
277 allocator->alloc_head = allocator->region_head = NULL;
280 static SgenFragment**
281 find_previous_pointer_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag)
284 SgenFragment *cur, *next;
290 prev = &allocator->alloc_head;
293 printf ("retry count for fppf is %d\n", count);
296 cur = (SgenFragment *)unmask (*prev);
304 * We need to make sure that we dereference prev below
305 * after reading cur->next above, so we need a read
308 mono_memory_read_barrier ();
313 if (!get_mark (next)) {
318 next = (SgenFragment *)unmask (next);
319 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev, next, cur) != cur)
321 /*we must make sure that the next from cur->next happens after*/
322 mono_memory_write_barrier ();
325 cur = (SgenFragment *)unmask (next);
331 claim_remaining_size (SgenFragment *frag, char *alloc_end)
333 /* All space used, nothing to claim. */
334 if (frag->fragment_end <= alloc_end)
337 /* Try to alloc all the remaining space. */
338 return InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, frag->fragment_end, alloc_end) == alloc_end;
342 par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size)
344 char *p = frag->fragment_next;
345 char *end = p + size;
347 if (end > frag->fragment_end)
350 /* p = frag->fragment_next must happen before */
351 mono_memory_barrier ();
353 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, end, p) != p)
356 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
357 SgenFragment *next, **prev_ptr;
360 * Before we clean the remaining nursery, we must claim the remaining space
361 * as it could end up been used by the range allocator since it can end up
362 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
363 * when doing second chance allocation.
365 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) {
366 sgen_clear_range (end, frag->fragment_end);
367 HEAVY_STAT (stat_wasted_bytes_trailer += frag->fragment_end - end);
369 add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING);
373 prev_ptr = find_previous_pointer_fragment (allocator, frag);
375 /*Use Michaels linked list remove*/
377 /*prev_ptr will be null if the fragment was removed concurrently */
382 if (!get_mark (next)) {
383 /*frag->next read must happen before the first CAS*/
384 mono_memory_write_barrier ();
386 /*Fail if the next node is removed concurrently and its CAS wins */
387 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) {
392 /* The second CAS must happen after the first CAS or frag->next. */
393 mono_memory_write_barrier ();
395 /* Fail if the previous node was deleted and its CAS wins */
396 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev_ptr, unmask (next), frag) != frag) {
397 prev_ptr = find_previous_pointer_fragment (allocator, frag);
408 serial_alloc_from_fragment (SgenFragment **previous, SgenFragment *frag, size_t size)
410 char *p = frag->fragment_next;
411 char *end = p + size;
413 if (end > frag->fragment_end)
416 frag->fragment_next = end;
418 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
419 *previous = frag->next;
421 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
422 memset (end, 0, frag->fragment_end - end);
424 *previous = frag->next;
431 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size)
436 InterlockedIncrement (&alloc_count);
440 for (frag = (SgenFragment *)unmask (allocator->alloc_head); unmask (frag); frag = (SgenFragment *)unmask (frag->next)) {
441 HEAVY_STAT (++stat_alloc_iterations);
443 if (size <= (size_t)(frag->fragment_end - frag->fragment_next)) {
444 void *p = par_alloc_from_fragment (allocator, frag, size);
446 HEAVY_STAT (++stat_alloc_retries);
450 add_alloc_record (p, size, FIXED_ALLOC);
459 sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator *allocator, size_t size)
462 SgenFragment **previous;
464 InterlockedIncrement (&alloc_count);
467 previous = &allocator->alloc_head;
469 for (frag = *previous; frag; frag = *previous) {
470 char *p = (char *)serial_alloc_from_fragment (previous, frag, size);
472 HEAVY_STAT (++stat_alloc_iterations);
476 add_alloc_record (p, size, FIXED_ALLOC);
480 previous = &frag->next;
486 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
488 SgenFragment *frag, **previous, *min_frag = NULL, **prev_min_frag = NULL;
489 size_t current_minimum = minimum_size;
492 InterlockedIncrement (&alloc_count);
495 previous = &allocator->alloc_head;
497 for (frag = *previous; frag; frag = *previous) {
498 size_t frag_size = frag->fragment_end - frag->fragment_next;
500 HEAVY_STAT (++stat_alloc_range_iterations);
502 if (desired_size <= frag_size) {
504 *out_alloc_size = desired_size;
506 p = serial_alloc_from_fragment (previous, frag, desired_size);
508 add_alloc_record (p, desired_size, RANGE_ALLOC);
512 if (current_minimum <= frag_size) {
514 prev_min_frag = previous;
515 current_minimum = frag_size;
517 previous = &frag->next;
522 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
523 *out_alloc_size = frag_size;
525 p = serial_alloc_from_fragment (prev_min_frag, min_frag, frag_size);
528 add_alloc_record (p, frag_size, RANGE_ALLOC);
537 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
539 SgenFragment *frag, *min_frag;
540 size_t current_minimum;
544 current_minimum = minimum_size;
547 InterlockedIncrement (&alloc_count);
550 for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
551 size_t frag_size = frag->fragment_end - frag->fragment_next;
553 HEAVY_STAT (++stat_alloc_range_iterations);
555 if (desired_size <= frag_size) {
557 *out_alloc_size = desired_size;
559 p = par_alloc_from_fragment (allocator, frag, desired_size);
561 HEAVY_STAT (++stat_alloc_range_retries);
565 add_alloc_record (p, desired_size, RANGE_ALLOC);
569 if (current_minimum <= frag_size) {
571 current_minimum = frag_size;
575 /* The second fragment_next read should be ordered in respect to the first code block */
576 mono_memory_barrier ();
582 frag_size = min_frag->fragment_end - min_frag->fragment_next;
583 if (frag_size < minimum_size)
586 *out_alloc_size = frag_size;
588 mono_memory_barrier ();
589 p = par_alloc_from_fragment (allocator, min_frag, frag_size);
591 /*XXX restarting here is quite dubious given this is already second chance allocation. */
593 HEAVY_STAT (++stat_alloc_retries);
597 add_alloc_record (p, frag_size, RANGE_ALLOC);
606 sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator)
610 for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
611 SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end);
612 sgen_clear_range (frag->fragment_next, frag->fragment_end);
614 add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS);
619 /* Clear all remaining nursery fragments */
621 sgen_clear_nursery_fragments (void)
623 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) {
624 sgen_clear_allocator_fragments (&mutator_allocator);
625 sgen_minor_collector.clear_fragments ();
630 * Mark a given range of memory as invalid.
632 * This can be done either by zeroing memory or by placing
633 * a phony byte[] array. This keeps the heap forward walkable.
635 * This function ignores calls with a zero range, even if
636 * both start and end are NULL.
639 sgen_clear_range (char *start, char *end)
641 size_t size = end - start;
643 if ((start && !end) || (start > end))
644 g_error ("Invalid range [%p %p]", start, end);
646 if (sgen_client_array_fill_range (start, size)) {
647 sgen_set_nursery_scan_start (start);
648 SGEN_ASSERT (0, start + sgen_safe_object_get_size ((GCObject*)start) == end, "Array fill produced wrong size");
653 sgen_nursery_allocator_prepare_for_pinning (void)
655 sgen_clear_allocator_fragments (&mutator_allocator);
656 sgen_minor_collector.clear_fragments ();
659 static mword fragment_total = 0;
661 * We found a fragment of free memory in the nursery: memzero it and if
662 * it is big enough, add it to the list of fragments that can be used for
666 add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end)
668 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %zd", frag_start, frag_end, frag_size);
669 binary_protocol_empty (frag_start, frag_size);
670 /* Not worth dealing with smaller fragments: need to tune */
671 if (frag_size >= SGEN_MAX_NURSERY_WASTE) {
672 /* memsetting just the first chunk start is bound to provide better cache locality */
673 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC)
674 memset (frag_start, 0, frag_size);
675 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG)
676 memset (frag_start, 0xff, frag_size);
679 /* XXX convert this into a flight record entry
680 printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size);
683 sgen_fragment_allocator_add (allocator, frag_start, frag_end);
684 fragment_total += frag_size;
686 /* Clear unused fragments, pinning depends on this */
687 sgen_clear_range (frag_start, frag_end);
688 HEAVY_STAT (stat_wasted_bytes_small_areas += frag_size);
693 fragment_list_reverse (SgenFragmentAllocator *allocator)
695 SgenFragment *prev = NULL, *list = allocator->region_head;
697 SgenFragment *next = list->next;
699 list->next_in_order = prev;
704 allocator->region_head = allocator->alloc_head = prev;
708 sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpin_queue)
710 char *frag_start, *frag_end;
712 SgenFragment *frags_ranges;
713 void **pin_start, **pin_entry, **pin_end;
716 reset_alloc_records ();
718 /*The mutator fragments are done. We no longer need them. */
719 sgen_fragment_allocator_release (&mutator_allocator);
721 frag_start = sgen_nursery_start;
724 /* The current nursery might give us a fragment list to exclude [start, next[*/
725 frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();
727 /* clear scan starts */
728 memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
730 pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
731 pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);
733 while (pin_entry < pin_end || frags_ranges) {
737 addr0 = addr1 = sgen_nursery_end;
738 if (pin_entry < pin_end)
739 addr0 = (char *)*pin_entry;
741 addr1 = frags_ranges->fragment_start;
745 GRAY_OBJECT_ENQUEUE (unpin_queue, (GCObject*)addr0, sgen_obj_get_descriptor_safe ((GCObject*)addr0));
747 SGEN_UNPIN_OBJECT (addr0);
748 size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject*)addr0));
749 CANARIFY_SIZE (size);
750 sgen_set_nursery_scan_start (addr0);
755 size = frags_ranges->fragment_next - addr1;
756 frags_ranges = frags_ranges->next_in_order;
759 frag_size = frag_end - frag_start;
764 g_assert (frag_size >= 0);
766 if (frag_size && size)
767 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
771 add_alloc_record (*pin_entry, frag_size, PINNING);
773 frag_start = frag_end + frag_size;
776 nursery_last_pinned_end = frag_start;
777 frag_end = sgen_nursery_end;
778 frag_size = frag_end - frag_start;
780 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
782 /* Now it's safe to release the fragments exclude list. */
783 sgen_minor_collector.build_fragments_release_exclude_head ();
785 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
786 fragment_list_reverse (&mutator_allocator);
788 /*The collector might want to do something with the final nursery fragment list.*/
789 sgen_minor_collector.build_fragments_finish (&mutator_allocator);
791 if (!unmask (mutator_allocator.alloc_head)) {
792 SGEN_LOG (1, "Nursery fully pinned");
793 for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
794 GCObject *p = (GCObject *)*pin_entry;
795 SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p)), sgen_safe_object_get_size (p));
798 return fragment_total;
802 sgen_nursery_alloc_get_upper_alloc_bound (void)
804 /*FIXME we need to calculate the collector upper bound as well, but this must be done in the previous GC. */
805 return sgen_nursery_end;
808 /*** Nursery memory allocation ***/
810 sgen_nursery_retire_region (void *address, ptrdiff_t size)
812 HEAVY_STAT (stat_wasted_bytes_discarded_fragments += size);
816 sgen_can_alloc_size (size_t size)
820 if (!SGEN_CAN_ALIGN_UP (size))
823 size = SGEN_ALIGN_UP (size);
825 for (frag = (SgenFragment *)unmask (mutator_allocator.alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
826 if ((size_t)(frag->fragment_end - frag->fragment_next) >= size)
833 sgen_nursery_alloc (size_t size)
835 SGEN_ASSERT (1, size >= (SGEN_CLIENT_MINIMUM_OBJECT_SIZE + CANARY_SIZE) && size <= (SGEN_MAX_SMALL_OBJ_SIZE + CANARY_SIZE), "Invalid nursery object size");
837 SGEN_LOG (4, "Searching nursery for size: %zd", size);
838 size = SGEN_ALIGN_UP (size);
840 HEAVY_STAT (++stat_nursery_alloc_requests);
842 return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
846 sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
848 SGEN_LOG (4, "Searching for byte range desired size: %zd minimum size %zd", desired_size, minimum_size);
850 HEAVY_STAT (++stat_nursery_alloc_range_requests);
852 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size);
855 /*** Initialization ***/
857 #ifdef HEAVY_STATISTICS
860 sgen_nursery_allocator_init_heavy_stats (void)
862 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_trailer);
863 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_small_areas);
864 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_discarded_fragments);
866 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_nursery_alloc_requests);
867 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_iterations);
868 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_retries);
870 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_nursery_alloc_range_requests);
871 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_range_iterations);
872 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_range_retries);
878 sgen_init_nursery_allocator (void)
880 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment));
882 alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging memory");
887 sgen_nursery_alloc_prepare_for_minor (void)
889 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
893 sgen_nursery_alloc_prepare_for_major (void)
895 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
899 sgen_nursery_allocator_set_nursery_bounds (char *start, char *end)
901 sgen_nursery_start = start;
902 sgen_nursery_end = end;
905 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
906 * the right side of things and round up. We could just do a MIN(1,x) instead,
907 * since the nursery size must be a power of 2.
909 sgen_space_bitmap_size = (end - start + SGEN_TO_SPACE_GRANULE_IN_BYTES * 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8);
910 sgen_space_bitmap = (char *)g_malloc0 (sgen_space_bitmap_size);
912 /* Setup the single first large fragment */
913 sgen_minor_collector.init_nursery (&mutator_allocator, start, end);