2 * sgen-nursery-allocator.c: Nursery allocation code.
5 * Copyright 2009-2010 Novell, Inc.
8 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
11 * Permission is hereby granted, free of charge, to any person obtaining
12 * a copy of this software and associated documentation files (the
13 * "Software"), to deal in the Software without restriction, including
14 * without limitation the rights to use, copy, modify, merge, publish,
15 * distribute, sublicense, and/or sell copies of the Software, and to
16 * permit persons to whom the Software is furnished to do so, subject to
17 * the following conditions:
19 * The above copyright notice and this permission notice shall be
20 * included in all copies or substantial portions of the Software.
22 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
23 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
24 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
25 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
26 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
27 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
28 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
32 * The young generation is divided into fragments. This is because
33 * we can hand one fragments to a thread for lock-less fast alloc and
34 * because the young generation ends up fragmented anyway by pinned objects.
35 * Once a collection is done, a list of fragments is created. When doing
36 * thread local alloc we use smallish nurseries so we allow new threads to
37 * allocate memory from gen0 without triggering a collection. Threads that
38 * are found to allocate lots of memory are given bigger fragments. This
39 * should make the finalizer thread use little nursery memory after a while.
40 * We should start assigning threads very small fragments: if there are many
41 * threads the nursery will be full of reserved space that the threads may not
42 * use at all, slowing down allocation speed.
43 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
44 * Allocation Buffers (TLABs).
55 #ifdef HAVE_SEMAPHORE_H
56 #include <semaphore.h>
70 #include "metadata/sgen-gc.h"
71 #include "metadata/metadata-internals.h"
72 #include "metadata/class-internals.h"
73 #include "metadata/gc-internal.h"
74 #include "metadata/object-internals.h"
75 #include "metadata/threads.h"
76 #include "metadata/sgen-cardtable.h"
77 #include "metadata/sgen-protocol.h"
78 #include "metadata/sgen-archdep.h"
79 #include "metadata/sgen-bridge.h"
80 #include "metadata/mono-gc.h"
81 #include "metadata/method-builder.h"
82 #include "metadata/profiler-private.h"
83 #include "metadata/monitor.h"
84 #include "metadata/threadpool-internals.h"
85 #include "metadata/mempool-internals.h"
86 #include "metadata/marshal.h"
87 #include "utils/mono-mmap.h"
88 #include "utils/mono-time.h"
89 #include "utils/mono-semaphore.h"
90 #include "utils/mono-counters.h"
91 #include "utils/mono-proclib.h"
92 #include "utils/mono-threads.h"
94 /* Enable it so nursery allocation diagnostic data is collected */
95 //#define NALLOC_DEBUG 1
97 /* The mutator allocs from here. */
98 SgenFragmentAllocator mutator_allocator;
100 /* freeelist of fragment structures */
101 static SgenFragment *fragment_freelist = NULL;
103 /* Allocator cursors */
104 static char *nursery_last_pinned_end = NULL;
106 char *sgen_nursery_start;
107 char *sgen_nursery_end;
110 int sgen_nursery_size = (1 << 22);
111 #ifdef SGEN_ALIGN_NURSERY
112 int sgen_nursery_bits = 22;
116 char *sgen_space_bitmap MONO_INTERNAL;
117 int sgen_space_bitmap_size MONO_INTERNAL;
119 #ifdef HEAVY_STATISTICS
121 static gint32 stat_wasted_bytes_trailer = 0;
122 static gint32 stat_wasted_bytes_small_areas = 0;
123 static gint32 stat_wasted_bytes_discarded_fragments = 0;
124 static gint32 stat_nursery_alloc_requests = 0;
125 static gint32 stat_alloc_iterations = 0;
126 static gint32 stat_alloc_retries = 0;
128 static gint32 stat_nursery_alloc_range_requests = 0;
129 static gint32 stat_alloc_range_iterations = 0;
130 static gint32 stat_alloc_range_retries = 0;
134 /************************************Nursery allocation debugging *********************************************/
151 MonoNativeThreadId tid;
154 #define ALLOC_RECORD_COUNT 128000
157 static AllocRecord *alloc_records;
158 static volatile int next_record;
159 static volatile int alloc_count;
163 get_reason_name (AllocRecord *rec)
165 switch (rec->reason) {
166 case FIXED_ALLOC: return "fixed-alloc";
167 case RANGE_ALLOC: return "range-alloc";
168 case PINNING: return "pinning";
169 case BLOCK_ZEROING: return "block-zeroing";
170 case CLEAR_NURSERY_FRAGS: return "clear-nursery-frag";
171 default: return "invalid";
176 reset_alloc_records (void)
183 add_alloc_record (char *addr, size_t size, int reason)
185 int idx = InterlockedIncrement (&next_record) - 1;
186 alloc_records [idx].address = addr;
187 alloc_records [idx].size = size;
188 alloc_records [idx].reason = reason;
189 alloc_records [idx].seq = idx;
190 alloc_records [idx].tid = mono_native_thread_id_get ();
194 comp_alloc_record (const void *_a, const void *_b)
196 const AllocRecord *a = _a;
197 const AllocRecord *b = _b;
198 if (a->address == b->address)
199 return a->seq - b->seq;
200 return a->address - b->address;
203 #define rec_end(REC) ((REC)->address + (REC)->size)
206 dump_alloc_records (void)
209 qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
211 printf ("------------------------------------DUMP RECORDS----------------------------\n");
212 for (i = 0; i < next_record; ++i) {
213 AllocRecord *rec = alloc_records + i;
214 printf ("obj [%p, %p] size %zd reason %s seq %d tid %zx\n", rec->address, rec_end (rec), rec->size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
219 verify_alloc_records (void)
225 AllocRecord *prev = NULL;
227 qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
228 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record, alloc_count);
229 for (i = 0; i < next_record; ++i) {
230 AllocRecord *rec = alloc_records + i;
234 if (rec_end (prev) > rec->address)
235 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev->address, rec->address);
236 if ((rec->address - rec_end (prev)) >= 8)
238 hole_size = rec->address - rec_end (prev);
239 max_hole = MAX (max_hole, hole_size);
241 printf ("obj [%p, %p] size %zd hole to prev %d reason %s seq %d tid %zx\n", rec->address, rec_end (rec), rec->size, hole_size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
244 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total, holes, max_hole);
249 /*********************************************************************************/
252 static inline gpointer
253 mask (gpointer n, uintptr_t bit)
255 return (gpointer)(((uintptr_t)n) | bit);
258 static inline gpointer
261 return (gpointer)((uintptr_t)p & ~(uintptr_t)0x3);
264 static inline uintptr_t
265 get_mark (gpointer n)
267 return (uintptr_t)n & 0x1;
270 /*MUST be called with world stopped*/
272 sgen_fragment_allocator_alloc (void)
274 SgenFragment *frag = fragment_freelist;
276 fragment_freelist = frag->next_in_order;
277 frag->next = frag->next_in_order = NULL;
280 frag = sgen_alloc_internal (INTERNAL_MEM_FRAGMENT);
281 frag->next = frag->next_in_order = NULL;
286 sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char *end)
288 SgenFragment *fragment;
290 fragment = sgen_fragment_allocator_alloc ();
291 fragment->fragment_start = start;
292 fragment->fragment_next = start;
293 fragment->fragment_end = end;
294 fragment->next_in_order = fragment->next = unmask (allocator->region_head);
296 allocator->region_head = allocator->alloc_head = fragment;
297 g_assert (fragment->fragment_end > fragment->fragment_start);
301 sgen_fragment_allocator_release (SgenFragmentAllocator *allocator)
303 SgenFragment *last = allocator->region_head;
307 /* Find the last fragment in insert order */
308 for (; last->next_in_order; last = last->next_in_order) ;
310 last->next_in_order = fragment_freelist;
311 fragment_freelist = allocator->region_head;
312 allocator->alloc_head = allocator->region_head = NULL;
315 static SgenFragment**
316 find_previous_pointer_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag)
319 SgenFragment *cur, *next;
325 prev = &allocator->alloc_head;
328 printf ("retry count for fppf is %d\n", count);
331 cur = unmask (*prev);
339 * We need to make sure that we dereference prev below
340 * after reading cur->next above, so we need a read
343 mono_memory_read_barrier ();
348 if (!get_mark (next)) {
353 next = unmask (next);
354 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev, next, cur) != cur)
356 /*we must make sure that the next from cur->next happens after*/
357 mono_memory_write_barrier ();
360 cur = mono_lls_pointer_unmask (next);
366 claim_remaining_size (SgenFragment *frag, char *alloc_end)
368 /* All space used, nothing to claim. */
369 if (frag->fragment_end <= alloc_end)
372 /* Try to alloc all the remaining space. */
373 return InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, frag->fragment_end, alloc_end) == alloc_end;
377 par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size)
379 char *p = frag->fragment_next;
380 char *end = p + size;
382 if (end > frag->fragment_end)
385 /* p = frag->fragment_next must happen before */
386 mono_memory_barrier ();
388 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, end, p) != p)
391 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
392 SgenFragment *next, **prev_ptr;
395 * Before we clean the remaining nursery, we must claim the remaining space
396 * as it could end up been used by the range allocator since it can end up
397 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
398 * when doing second chance allocation.
400 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION && claim_remaining_size (frag, end)) {
401 sgen_clear_range (end, frag->fragment_end);
402 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_trailer, frag->fragment_end - end));
404 add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING);
408 prev_ptr = find_previous_pointer_fragment (allocator, frag);
410 /*Use Michaels linked list remove*/
412 /*prev_ptr will be null if the fragment was removed concurrently */
417 if (!get_mark (next)) {
418 /*frag->next read must happen before the first CAS*/
419 mono_memory_write_barrier ();
421 /*Fail if the next done is removed concurrently and its CAS wins */
422 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) {
427 /* The second CAS must happen after the first CAS or frag->next. */
428 mono_memory_write_barrier ();
430 /* Fail if the previous node was deleted and its CAS wins */
431 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev_ptr, next, frag) != frag) {
432 prev_ptr = find_previous_pointer_fragment (allocator, frag);
443 serial_alloc_from_fragment (SgenFragment **previous, SgenFragment *frag, size_t size)
445 char *p = frag->fragment_next;
446 char *end = p + size;
448 if (end > frag->fragment_end)
451 frag->fragment_next = end;
453 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
454 *previous = frag->next;
456 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
457 memset (end, 0, frag->fragment_end - end);
459 *previous = frag->next;
466 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size)
471 InterlockedIncrement (&alloc_count);
475 for (frag = unmask (allocator->alloc_head); unmask (frag); frag = unmask (frag->next)) {
476 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));
478 if (size <= (frag->fragment_end - frag->fragment_next)) {
479 void *p = par_alloc_from_fragment (allocator, frag, size);
481 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
485 add_alloc_record (p, size, FIXED_ALLOC);
494 sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator *allocator, size_t size)
497 SgenFragment **previous;
499 InterlockedIncrement (&alloc_count);
502 previous = &allocator->alloc_head;
504 for (frag = *previous; frag; frag = *previous) {
505 char *p = serial_alloc_from_fragment (previous, frag, size);
507 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));
511 add_alloc_record (p, size, FIXED_ALLOC);
515 previous = &frag->next;
521 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
523 SgenFragment *frag, **previous, *min_frag = NULL, **prev_min_frag = NULL;
524 size_t current_minimum = minimum_size;
527 InterlockedIncrement (&alloc_count);
530 previous = &allocator->alloc_head;
532 for (frag = *previous; frag; frag = *previous) {
533 size_t frag_size = frag->fragment_end - frag->fragment_next;
535 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations));
537 if (desired_size <= frag_size) {
539 *out_alloc_size = desired_size;
541 p = serial_alloc_from_fragment (previous, frag, desired_size);
543 add_alloc_record (p, desired_size, RANGE_ALLOC);
547 if (current_minimum <= frag_size) {
549 prev_min_frag = previous;
550 current_minimum = frag_size;
552 previous = &frag->next;
557 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
558 *out_alloc_size = frag_size;
560 p = serial_alloc_from_fragment (prev_min_frag, min_frag, frag_size);
563 add_alloc_record (p, frag_size, RANGE_ALLOC);
572 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
574 SgenFragment *frag, *min_frag;
575 size_t current_minimum;
579 current_minimum = minimum_size;
582 InterlockedIncrement (&alloc_count);
585 for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) {
586 int frag_size = frag->fragment_end - frag->fragment_next;
588 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations));
590 if (desired_size <= frag_size) {
592 *out_alloc_size = desired_size;
594 p = par_alloc_from_fragment (allocator, frag, desired_size);
596 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_retries));
600 add_alloc_record (p, desired_size, RANGE_ALLOC);
604 if (current_minimum <= frag_size) {
606 current_minimum = frag_size;
610 /* The second fragment_next read should be ordered in respect to the first code block */
611 mono_memory_barrier ();
617 frag_size = min_frag->fragment_end - min_frag->fragment_next;
618 if (frag_size < minimum_size)
621 *out_alloc_size = frag_size;
623 mono_memory_barrier ();
624 p = par_alloc_from_fragment (allocator, min_frag, frag_size);
626 /*XXX restarting here is quite dubious given this is already second chance allocation. */
628 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
632 add_alloc_record (p, frag_size, RANGE_ALLOC);
641 sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator)
645 for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) {
646 DEBUG (4, fprintf (gc_debug_file, "Clear nursery frag %p-%p\n", frag->fragment_next, frag->fragment_end));
647 sgen_clear_range (frag->fragment_next, frag->fragment_end);
649 add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS);
654 /* Clear all remaining nursery fragments */
656 sgen_clear_nursery_fragments (void)
658 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION) {
659 sgen_clear_allocator_fragments (&mutator_allocator);
660 sgen_minor_collector.clear_fragments ();
665 * Mark a given range of memory as invalid.
667 * This can be done either by zeroing memory or by placing
668 * a phony byte[] array. This keeps the heap forward walkable.
670 * This function ignores calls with a zero range, even if
671 * both start and end are NULL.
674 sgen_clear_range (char *start, char *end)
677 size_t size = end - start;
679 if ((start && !end) || (start > end))
680 g_error ("Invalid range [%p %p]", start, end);
682 if (size < sizeof (MonoArray)) {
683 memset (start, 0, size);
687 o = (MonoArray*)start;
688 o->obj.vtable = sgen_get_array_fill_vtable ();
689 /* Mark this as not a real object */
690 o->obj.synchronisation = GINT_TO_POINTER (-1);
692 o->max_length = size - sizeof (MonoArray);
693 sgen_set_nursery_scan_start (start);
694 g_assert (start + sgen_safe_object_get_size ((MonoObject*)o) == end);
698 sgen_nursery_allocator_prepare_for_pinning (void)
700 sgen_clear_allocator_fragments (&mutator_allocator);
701 sgen_minor_collector.clear_fragments ();
704 static mword fragment_total = 0;
706 * We found a fragment of free memory in the nursery: memzero it and if
707 * it is big enough, add it to the list of fragments that can be used for
711 add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end)
713 DEBUG (4, fprintf (gc_debug_file, "Found empty fragment: %p-%p, size: %zd\n", frag_start, frag_end, frag_size));
714 binary_protocol_empty (frag_start, frag_size);
715 MONO_GC_NURSERY_SWEEPED (frag_start, frag_end - frag_start);
716 /* Not worth dealing with smaller fragments: need to tune */
717 if (frag_size >= SGEN_MAX_NURSERY_WASTE) {
718 /* memsetting just the first chunk start is bound to provide better cache locality */
719 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC)
720 memset (frag_start, 0, frag_size);
723 /* XXX convert this into a flight record entry
724 printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size);
727 sgen_fragment_allocator_add (allocator, frag_start, frag_end);
728 fragment_total += frag_size;
730 /* Clear unused fragments, pinning depends on this */
731 sgen_clear_range (frag_start, frag_end);
732 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_small_areas, frag_size));
737 fragment_list_reverse (SgenFragmentAllocator *allocator)
739 SgenFragment *prev = NULL, *list = allocator->region_head;
741 SgenFragment *next = list->next;
743 list->next_in_order = prev;
748 allocator->region_head = allocator->alloc_head = prev;
752 sgen_build_nursery_fragments (GCMemSection *nursery_section, void **start, int num_entries)
754 char *frag_start, *frag_end;
757 SgenFragment *frags_ranges;
760 reset_alloc_records ();
762 /*The mutator fragments are done. We no longer need them. */
763 sgen_fragment_allocator_release (&mutator_allocator);
765 frag_start = sgen_nursery_start;
768 /* The current nursery might give us a fragment list to exclude [start, next[*/
769 frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();
771 /* clear scan starts */
772 memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
774 while (i < num_entries || frags_ranges) {
777 SgenFragment *last_frag = NULL;
779 addr0 = addr1 = sgen_nursery_end;
783 addr1 = frags_ranges->fragment_start;
786 SGEN_UNPIN_OBJECT (addr0);
787 sgen_set_nursery_scan_start (addr0);
789 size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)addr0));
793 size = frags_ranges->fragment_next - addr1;
794 last_frag = frags_ranges;
795 frags_ranges = frags_ranges->next_in_order;
798 frag_size = frag_end - frag_start;
803 g_assert (frag_size >= 0);
805 if (frag_size && size)
806 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
810 add_alloc_record (start [i], frag_size, PINNING);
812 frag_start = frag_end + frag_size;
815 nursery_last_pinned_end = frag_start;
816 frag_end = sgen_nursery_end;
817 frag_size = frag_end - frag_start;
819 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
821 /* Now it's safe to release the fragments exclude list. */
822 sgen_minor_collector.build_fragments_release_exclude_head ();
824 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
825 fragment_list_reverse (&mutator_allocator);
827 /*The collector might want to do something with the final nursery fragment list.*/
828 sgen_minor_collector.build_fragments_finish (&mutator_allocator);
830 if (!unmask (mutator_allocator.alloc_head)) {
831 DEBUG (1, fprintf (gc_debug_file, "Nursery fully pinned (%d)\n", num_entries));
832 for (i = 0; i < num_entries; ++i) {
833 DEBUG (3, fprintf (gc_debug_file, "Bastard pinning obj %p (%s), size: %d\n", start [i], sgen_safe_name (start [i]), sgen_safe_object_get_size (start [i])));
836 return fragment_total;
840 sgen_nursery_alloc_get_upper_alloc_bound (void)
842 /*FIXME we need to calculate the collector upper bound as well, but this must be done in the previous GC. */
843 return sgen_nursery_end;
846 /*** Nursery memory allocation ***/
848 sgen_nursery_retire_region (void *address, ptrdiff_t size)
850 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_discarded_fragments, size));
854 sgen_can_alloc_size (size_t size)
857 size = SGEN_ALIGN_UP (size);
859 for (frag = unmask (mutator_allocator.alloc_head); frag; frag = unmask (frag->next)) {
860 if ((frag->fragment_end - frag->fragment_next) >= size)
867 sgen_nursery_alloc (size_t size)
869 DEBUG (4, fprintf (gc_debug_file, "Searching nursery for size: %zd\n", size));
870 size = SGEN_ALIGN_UP (size);
872 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests));
874 return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
878 sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
880 DEBUG (4, fprintf (gc_debug_file, "Searching for byte range desired size: %zd minimum size %zd\n", desired_size, minimum_size));
882 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_range_requests));
884 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size);
887 /*** Initialization ***/
889 #ifdef HEAVY_STATISTICS
892 sgen_nursery_allocator_init_heavy_stats (void)
894 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_trailer);
895 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_small_areas);
896 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_discarded_fragments);
898 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_nursery_alloc_requests);
899 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_iterations);
900 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_retries);
902 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_nursery_alloc_range_requests);
903 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_range_iterations);
904 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_range_retries);
910 sgen_init_nursery_allocator (void)
912 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment));
914 alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, TRUE, "debugging memory");
919 sgen_nursery_alloc_prepare_for_minor (void)
921 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
925 sgen_nursery_alloc_prepare_for_major (void)
927 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
931 sgen_nursery_allocator_set_nursery_bounds (char *start, char *end)
933 sgen_nursery_start = start;
934 sgen_nursery_end = end;
936 sgen_space_bitmap_size = (end - start) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8);
937 sgen_space_bitmap = g_malloc0 (sgen_space_bitmap_size);
939 /* Setup the single first large fragment */
940 sgen_minor_collector.init_nursery (&mutator_allocator, start, end);