2 * sgen-nursery-allocator.c: Nursery allocation code.
4 * Copyright 2009-2010 Novell, Inc.
7 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
8 * Copyright (C) 2012 Xamarin Inc
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Library General Public
12 * License 2.0 as published by the Free Software Foundation;
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License 2.0 along with this library; if not, write to the Free
21 * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 * The young generation is divided into fragments. This is because
26 * we can hand one fragments to a thread for lock-less fast alloc and
27 * because the young generation ends up fragmented anyway by pinned objects.
28 * Once a collection is done, a list of fragments is created. When doing
29 * thread local alloc we use smallish nurseries so we allow new threads to
30 * allocate memory from gen0 without triggering a collection. Threads that
31 * are found to allocate lots of memory are given bigger fragments. This
32 * should make the finalizer thread use little nursery memory after a while.
33 * We should start assigning threads very small fragments: if there are many
34 * threads the nursery will be full of reserved space that the threads may not
35 * use at all, slowing down allocation speed.
36 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
37 * Allocation Buffers (TLABs).
48 #ifdef HAVE_SEMAPHORE_H
49 #include <semaphore.h>
63 #include "metadata/sgen-gc.h"
64 #include "metadata/metadata-internals.h"
65 #include "metadata/class-internals.h"
66 #include "metadata/gc-internal.h"
67 #include "metadata/object-internals.h"
68 #include "metadata/threads.h"
69 #include "metadata/sgen-cardtable.h"
70 #include "metadata/sgen-protocol.h"
71 #include "metadata/sgen-archdep.h"
72 #include "metadata/sgen-bridge.h"
73 #include "metadata/sgen-memory-governor.h"
74 #include "metadata/sgen-pinning.h"
75 #include "metadata/mono-gc.h"
76 #include "metadata/method-builder.h"
77 #include "metadata/profiler-private.h"
78 #include "metadata/monitor.h"
79 #include "metadata/threadpool-internals.h"
80 #include "metadata/mempool-internals.h"
81 #include "metadata/marshal.h"
82 #include "utils/mono-mmap.h"
83 #include "utils/mono-time.h"
84 #include "utils/mono-semaphore.h"
85 #include "utils/mono-counters.h"
86 #include "utils/mono-proclib.h"
87 #include "utils/mono-threads.h"
89 /* Enable it so nursery allocation diagnostic data is collected */
90 //#define NALLOC_DEBUG 1
92 /* The mutator allocs from here. */
93 SgenFragmentAllocator mutator_allocator;
95 /* freeelist of fragment structures */
96 static SgenFragment *fragment_freelist = NULL;
98 /* Allocator cursors */
99 static char *nursery_last_pinned_end = NULL;
101 char *sgen_nursery_start;
102 char *sgen_nursery_end;
105 size_t sgen_nursery_size = (1 << 22);
106 #ifdef SGEN_ALIGN_NURSERY
107 int sgen_nursery_bits = 22;
111 char *sgen_space_bitmap MONO_INTERNAL;
112 size_t sgen_space_bitmap_size MONO_INTERNAL;
114 #ifdef HEAVY_STATISTICS
116 static gint32 stat_wasted_bytes_trailer = 0;
117 static gint32 stat_wasted_bytes_small_areas = 0;
118 static gint32 stat_wasted_bytes_discarded_fragments = 0;
119 static gint32 stat_nursery_alloc_requests = 0;
120 static gint32 stat_alloc_iterations = 0;
121 static gint32 stat_alloc_retries = 0;
123 static gint32 stat_nursery_alloc_range_requests = 0;
124 static gint32 stat_alloc_range_iterations = 0;
125 static gint32 stat_alloc_range_retries = 0;
129 /************************************Nursery allocation debugging *********************************************/
146 MonoNativeThreadId tid;
149 #define ALLOC_RECORD_COUNT 128000
152 static AllocRecord *alloc_records;
153 static volatile int next_record;
154 static volatile int alloc_count;
156 void dump_alloc_records (void);
157 void verify_alloc_records (void);
160 get_reason_name (AllocRecord *rec)
162 switch (rec->reason) {
163 case FIXED_ALLOC: return "fixed-alloc";
164 case RANGE_ALLOC: return "range-alloc";
165 case PINNING: return "pinning";
166 case BLOCK_ZEROING: return "block-zeroing";
167 case CLEAR_NURSERY_FRAGS: return "clear-nursery-frag";
168 default: return "invalid";
173 reset_alloc_records (void)
180 add_alloc_record (char *addr, size_t size, int reason)
182 int idx = InterlockedIncrement (&next_record) - 1;
183 alloc_records [idx].address = addr;
184 alloc_records [idx].size = size;
185 alloc_records [idx].reason = reason;
186 alloc_records [idx].seq = idx;
187 alloc_records [idx].tid = mono_native_thread_id_get ();
191 comp_alloc_record (const void *_a, const void *_b)
193 const AllocRecord *a = _a;
194 const AllocRecord *b = _b;
195 if (a->address == b->address)
196 return a->seq - b->seq;
197 return a->address - b->address;
200 #define rec_end(REC) ((REC)->address + (REC)->size)
203 dump_alloc_records (void)
206 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
208 printf ("------------------------------------DUMP RECORDS----------------------------\n");
209 for (i = 0; i < next_record; ++i) {
210 AllocRecord *rec = alloc_records + i;
211 printf ("obj [%p, %p] size %zd reason %s seq %d tid %zx\n", rec->address, rec_end (rec), rec->size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
216 verify_alloc_records (void)
222 AllocRecord *prev = NULL;
224 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
225 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record, alloc_count);
226 for (i = 0; i < next_record; ++i) {
227 AllocRecord *rec = alloc_records + i;
231 if (rec_end (prev) > rec->address)
232 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev->address, rec->address);
233 if ((rec->address - rec_end (prev)) >= 8)
235 hole_size = rec->address - rec_end (prev);
236 max_hole = MAX (max_hole, hole_size);
238 printf ("obj [%p, %p] size %zd hole to prev %d reason %s seq %d tid %zx\n", rec->address, rec_end (rec), rec->size, hole_size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
241 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total, holes, max_hole);
246 /*********************************************************************************/
249 static inline gpointer
250 mask (gpointer n, uintptr_t bit)
252 return (gpointer)(((uintptr_t)n) | bit);
255 static inline gpointer
258 return (gpointer)((uintptr_t)p & ~(uintptr_t)0x3);
261 static inline uintptr_t
262 get_mark (gpointer n)
264 return (uintptr_t)n & 0x1;
267 /*MUST be called with world stopped*/
269 sgen_fragment_allocator_alloc (void)
271 SgenFragment *frag = fragment_freelist;
273 fragment_freelist = frag->next_in_order;
274 frag->next = frag->next_in_order = NULL;
277 frag = sgen_alloc_internal (INTERNAL_MEM_FRAGMENT);
278 frag->next = frag->next_in_order = NULL;
283 sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char *end)
285 SgenFragment *fragment;
287 fragment = sgen_fragment_allocator_alloc ();
288 fragment->fragment_start = start;
289 fragment->fragment_next = start;
290 fragment->fragment_end = end;
291 fragment->next_in_order = fragment->next = unmask (allocator->region_head);
293 allocator->region_head = allocator->alloc_head = fragment;
294 g_assert (fragment->fragment_end > fragment->fragment_start);
298 sgen_fragment_allocator_release (SgenFragmentAllocator *allocator)
300 SgenFragment *last = allocator->region_head;
304 /* Find the last fragment in insert order */
305 for (; last->next_in_order; last = last->next_in_order) ;
307 last->next_in_order = fragment_freelist;
308 fragment_freelist = allocator->region_head;
309 allocator->alloc_head = allocator->region_head = NULL;
312 static SgenFragment**
313 find_previous_pointer_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag)
316 SgenFragment *cur, *next;
322 prev = &allocator->alloc_head;
325 printf ("retry count for fppf is %d\n", count);
328 cur = unmask (*prev);
336 * We need to make sure that we dereference prev below
337 * after reading cur->next above, so we need a read
340 mono_memory_read_barrier ();
345 if (!get_mark (next)) {
350 next = unmask (next);
351 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev, next, cur) != cur)
353 /*we must make sure that the next from cur->next happens after*/
354 mono_memory_write_barrier ();
357 cur = mono_lls_pointer_unmask (next);
363 claim_remaining_size (SgenFragment *frag, char *alloc_end)
365 /* All space used, nothing to claim. */
366 if (frag->fragment_end <= alloc_end)
369 /* Try to alloc all the remaining space. */
370 return InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, frag->fragment_end, alloc_end) == alloc_end;
374 par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size)
376 char *p = frag->fragment_next;
377 char *end = p + size;
379 if (end > frag->fragment_end)
382 /* p = frag->fragment_next must happen before */
383 mono_memory_barrier ();
385 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, end, p) != p)
388 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
389 SgenFragment *next, **prev_ptr;
392 * Before we clean the remaining nursery, we must claim the remaining space
393 * as it could end up been used by the range allocator since it can end up
394 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
395 * when doing second chance allocation.
397 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) {
398 sgen_clear_range (end, frag->fragment_end);
399 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_trailer, frag->fragment_end - end));
401 add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING);
405 prev_ptr = find_previous_pointer_fragment (allocator, frag);
407 /*Use Michaels linked list remove*/
409 /*prev_ptr will be null if the fragment was removed concurrently */
414 if (!get_mark (next)) {
415 /*frag->next read must happen before the first CAS*/
416 mono_memory_write_barrier ();
418 /*Fail if the next node is removed concurrently and its CAS wins */
419 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) {
424 /* The second CAS must happen after the first CAS or frag->next. */
425 mono_memory_write_barrier ();
427 /* Fail if the previous node was deleted and its CAS wins */
428 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev_ptr, unmask (next), frag) != frag) {
429 prev_ptr = find_previous_pointer_fragment (allocator, frag);
440 serial_alloc_from_fragment (SgenFragment **previous, SgenFragment *frag, size_t size)
442 char *p = frag->fragment_next;
443 char *end = p + size;
445 if (end > frag->fragment_end)
448 frag->fragment_next = end;
450 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
451 *previous = frag->next;
453 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
454 memset (end, 0, frag->fragment_end - end);
456 *previous = frag->next;
463 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size)
468 InterlockedIncrement (&alloc_count);
472 for (frag = unmask (allocator->alloc_head); unmask (frag); frag = unmask (frag->next)) {
473 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));
475 if (size <= (size_t)(frag->fragment_end - frag->fragment_next)) {
476 void *p = par_alloc_from_fragment (allocator, frag, size);
478 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
482 add_alloc_record (p, size, FIXED_ALLOC);
491 sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator *allocator, size_t size)
494 SgenFragment **previous;
496 InterlockedIncrement (&alloc_count);
499 previous = &allocator->alloc_head;
501 for (frag = *previous; frag; frag = *previous) {
502 char *p = serial_alloc_from_fragment (previous, frag, size);
504 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));
508 add_alloc_record (p, size, FIXED_ALLOC);
512 previous = &frag->next;
518 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
520 SgenFragment *frag, **previous, *min_frag = NULL, **prev_min_frag = NULL;
521 size_t current_minimum = minimum_size;
524 InterlockedIncrement (&alloc_count);
527 previous = &allocator->alloc_head;
529 for (frag = *previous; frag; frag = *previous) {
530 size_t frag_size = frag->fragment_end - frag->fragment_next;
532 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations));
534 if (desired_size <= frag_size) {
536 *out_alloc_size = desired_size;
538 p = serial_alloc_from_fragment (previous, frag, desired_size);
540 add_alloc_record (p, desired_size, RANGE_ALLOC);
544 if (current_minimum <= frag_size) {
546 prev_min_frag = previous;
547 current_minimum = frag_size;
549 previous = &frag->next;
554 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
555 *out_alloc_size = frag_size;
557 p = serial_alloc_from_fragment (prev_min_frag, min_frag, frag_size);
560 add_alloc_record (p, frag_size, RANGE_ALLOC);
569 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
571 SgenFragment *frag, *min_frag;
572 size_t current_minimum;
576 current_minimum = minimum_size;
579 InterlockedIncrement (&alloc_count);
582 for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) {
583 size_t frag_size = frag->fragment_end - frag->fragment_next;
585 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations));
587 if (desired_size <= frag_size) {
589 *out_alloc_size = desired_size;
591 p = par_alloc_from_fragment (allocator, frag, desired_size);
593 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_retries));
597 add_alloc_record (p, desired_size, RANGE_ALLOC);
601 if (current_minimum <= frag_size) {
603 current_minimum = frag_size;
607 /* The second fragment_next read should be ordered in respect to the first code block */
608 mono_memory_barrier ();
614 frag_size = min_frag->fragment_end - min_frag->fragment_next;
615 if (frag_size < minimum_size)
618 *out_alloc_size = frag_size;
620 mono_memory_barrier ();
621 p = par_alloc_from_fragment (allocator, min_frag, frag_size);
623 /*XXX restarting here is quite dubious given this is already second chance allocation. */
625 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
629 add_alloc_record (p, frag_size, RANGE_ALLOC);
638 sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator)
642 for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) {
643 SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end);
644 sgen_clear_range (frag->fragment_next, frag->fragment_end);
646 add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS);
651 /* Clear all remaining nursery fragments */
653 sgen_clear_nursery_fragments (void)
655 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) {
656 sgen_clear_allocator_fragments (&mutator_allocator);
657 sgen_minor_collector.clear_fragments ();
662 * Mark a given range of memory as invalid.
664 * This can be done either by zeroing memory or by placing
665 * a phony byte[] array. This keeps the heap forward walkable.
667 * This function ignores calls with a zero range, even if
668 * both start and end are NULL.
671 sgen_clear_range (char *start, char *end)
674 size_t size = end - start;
676 if ((start && !end) || (start > end))
677 g_error ("Invalid range [%p %p]", start, end);
679 if (size < sizeof (MonoArray)) {
680 memset (start, 0, size);
684 o = (MonoArray*)start;
685 o->obj.vtable = sgen_get_array_fill_vtable ();
686 /* Mark this as not a real object */
687 o->obj.synchronisation = GINT_TO_POINTER (-1);
689 o->max_length = (mono_array_size_t)(size - sizeof (MonoArray));
690 sgen_set_nursery_scan_start (start);
691 g_assert (start + sgen_safe_object_get_size ((MonoObject*)o) == end);
695 sgen_nursery_allocator_prepare_for_pinning (void)
697 sgen_clear_allocator_fragments (&mutator_allocator);
698 sgen_minor_collector.clear_fragments ();
701 static mword fragment_total = 0;
703 * We found a fragment of free memory in the nursery: memzero it and if
704 * it is big enough, add it to the list of fragments that can be used for
708 add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end)
710 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %zd", frag_start, frag_end, frag_size);
711 binary_protocol_empty (frag_start, frag_size);
712 MONO_GC_NURSERY_SWEPT ((mword)frag_start, frag_end - frag_start);
713 /* Not worth dealing with smaller fragments: need to tune */
714 if (frag_size >= SGEN_MAX_NURSERY_WASTE) {
715 /* memsetting just the first chunk start is bound to provide better cache locality */
716 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC)
717 memset (frag_start, 0, frag_size);
718 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG)
719 memset (frag_start, 0xff, frag_size);
722 /* XXX convert this into a flight record entry
723 printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size);
726 sgen_fragment_allocator_add (allocator, frag_start, frag_end);
727 fragment_total += frag_size;
729 /* Clear unused fragments, pinning depends on this */
730 sgen_clear_range (frag_start, frag_end);
731 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_small_areas, frag_size));
736 fragment_list_reverse (SgenFragmentAllocator *allocator)
738 SgenFragment *prev = NULL, *list = allocator->region_head;
740 SgenFragment *next = list->next;
742 list->next_in_order = prev;
747 allocator->region_head = allocator->alloc_head = prev;
751 sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpin_queue)
753 char *frag_start, *frag_end;
755 SgenFragment *frags_ranges;
756 void **pin_start, **pin_entry, **pin_end;
759 reset_alloc_records ();
761 /*The mutator fragments are done. We no longer need them. */
762 sgen_fragment_allocator_release (&mutator_allocator);
764 frag_start = sgen_nursery_start;
767 /* The current nursery might give us a fragment list to exclude [start, next[*/
768 frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();
770 /* clear scan starts */
771 memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
773 pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
774 pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);
776 while (pin_entry < pin_end || frags_ranges) {
779 SgenFragment *last_frag = NULL;
781 addr0 = addr1 = sgen_nursery_end;
782 if (pin_entry < pin_end)
785 addr1 = frags_ranges->fragment_start;
789 GRAY_OBJECT_ENQUEUE (unpin_queue, addr0, sgen_obj_get_descriptor_safe (addr0));
791 SGEN_UNPIN_OBJECT (addr0);
792 size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)addr0));
793 CANARIFY_SIZE (size);
794 sgen_set_nursery_scan_start (addr0);
799 size = frags_ranges->fragment_next - addr1;
800 last_frag = frags_ranges;
801 frags_ranges = frags_ranges->next_in_order;
804 frag_size = frag_end - frag_start;
809 g_assert (frag_size >= 0);
811 if (frag_size && size)
812 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
816 add_alloc_record (*pin_entry, frag_size, PINNING);
818 frag_start = frag_end + frag_size;
821 nursery_last_pinned_end = frag_start;
822 frag_end = sgen_nursery_end;
823 frag_size = frag_end - frag_start;
825 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
827 /* Now it's safe to release the fragments exclude list. */
828 sgen_minor_collector.build_fragments_release_exclude_head ();
830 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
831 fragment_list_reverse (&mutator_allocator);
833 /*The collector might want to do something with the final nursery fragment list.*/
834 sgen_minor_collector.build_fragments_finish (&mutator_allocator);
836 if (!unmask (mutator_allocator.alloc_head)) {
837 SGEN_LOG (1, "Nursery fully pinned");
838 for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
839 void *p = *pin_entry;
840 SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_safe_name (p), sgen_safe_object_get_size (p));
843 return fragment_total;
847 sgen_nursery_alloc_get_upper_alloc_bound (void)
849 /*FIXME we need to calculate the collector upper bound as well, but this must be done in the previous GC. */
850 return sgen_nursery_end;
853 /*** Nursery memory allocation ***/
855 sgen_nursery_retire_region (void *address, ptrdiff_t size)
857 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_discarded_fragments, size));
861 sgen_can_alloc_size (size_t size)
865 if (!SGEN_CAN_ALIGN_UP (size))
868 size = SGEN_ALIGN_UP (size);
870 for (frag = unmask (mutator_allocator.alloc_head); frag; frag = unmask (frag->next)) {
871 if ((size_t)(frag->fragment_end - frag->fragment_next) >= size)
878 sgen_nursery_alloc (size_t size)
880 SGEN_ASSERT (1, size >= sizeof (MonoObject) && size <= (SGEN_MAX_SMALL_OBJ_SIZE + CANARY_SIZE), "Invalid nursery object size");
882 SGEN_LOG (4, "Searching nursery for size: %zd", size);
883 size = SGEN_ALIGN_UP (size);
885 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests));
887 return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
891 sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
893 SGEN_LOG (4, "Searching for byte range desired size: %zd minimum size %zd", desired_size, minimum_size);
895 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_range_requests));
897 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size);
900 /*** Initialization ***/
902 #ifdef HEAVY_STATISTICS
905 sgen_nursery_allocator_init_heavy_stats (void)
907 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_trailer);
908 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_small_areas);
909 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_discarded_fragments);
911 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_nursery_alloc_requests);
912 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_iterations);
913 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_retries);
915 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_nursery_alloc_range_requests);
916 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_range_iterations);
917 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_range_retries);
923 sgen_init_nursery_allocator (void)
925 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment));
927 alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging memory");
932 sgen_nursery_alloc_prepare_for_minor (void)
934 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
938 sgen_nursery_alloc_prepare_for_major (void)
940 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
944 sgen_nursery_allocator_set_nursery_bounds (char *start, char *end)
946 sgen_nursery_start = start;
947 sgen_nursery_end = end;
950 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
951 * the right side of things and round up. We could just do a MIN(1,x) instead,
952 * since the nursery size must be a power of 2.
954 sgen_space_bitmap_size = (end - start + SGEN_TO_SPACE_GRANULE_IN_BYTES * 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8);
955 sgen_space_bitmap = g_malloc0 (sgen_space_bitmap_size);
957 /* Setup the single first large fragment */
958 sgen_minor_collector.init_nursery (&mutator_allocator, start, end);