2 * sgen-nursery-allocator.c: Nursery allocation code.
4 * Copyright 2009-2010 Novell, Inc.
7 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
8 * Copyright (C) 2012 Xamarin Inc
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Library General Public
12 * License 2.0 as published by the Free Software Foundation;
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License 2.0 along with this library; if not, write to the Free
21 * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 * The young generation is divided into fragments. This is because
26 * we can hand one fragments to a thread for lock-less fast alloc and
27 * because the young generation ends up fragmented anyway by pinned objects.
28 * Once a collection is done, a list of fragments is created. When doing
29 * thread local alloc we use smallish nurseries so we allow new threads to
30 * allocate memory from gen0 without triggering a collection. Threads that
31 * are found to allocate lots of memory are given bigger fragments. This
32 * should make the finalizer thread use little nursery memory after a while.
33 * We should start assigning threads very small fragments: if there are many
34 * threads the nursery will be full of reserved space that the threads may not
35 * use at all, slowing down allocation speed.
36 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
37 * Allocation Buffers (TLABs).
48 #ifdef HAVE_SEMAPHORE_H
49 #include <semaphore.h>
62 #include "metadata/sgen-gc.h"
63 #include "metadata/metadata-internals.h"
64 #include "metadata/class-internals.h"
65 #include "metadata/gc-internal.h"
66 #include "metadata/object-internals.h"
67 #include "metadata/threads.h"
68 #include "metadata/sgen-cardtable.h"
69 #include "metadata/sgen-protocol.h"
70 #include "metadata/sgen-archdep.h"
71 #include "metadata/sgen-bridge.h"
72 #include "metadata/sgen-memory-governor.h"
73 #include "metadata/sgen-pinning.h"
74 #include "metadata/sgen-client.h"
75 #include "metadata/mono-gc.h"
76 #include "metadata/method-builder.h"
77 #include "metadata/profiler-private.h"
78 #include "metadata/monitor.h"
79 #include "metadata/threadpool-internals.h"
80 #include "metadata/mempool-internals.h"
81 #include "metadata/marshal.h"
82 #include "utils/mono-mmap.h"
83 #include "utils/mono-time.h"
84 #include "utils/mono-semaphore.h"
85 #include "utils/mono-counters.h"
86 #include "utils/mono-proclib.h"
87 #include "utils/mono-threads.h"
89 /* Enable it so nursery allocation diagnostic data is collected */
90 //#define NALLOC_DEBUG 1
92 /* The mutator allocs from here. */
93 SgenFragmentAllocator mutator_allocator;
95 /* freeelist of fragment structures */
96 static SgenFragment *fragment_freelist = NULL;
98 /* Allocator cursors */
99 static char *nursery_last_pinned_end = NULL;
101 char *sgen_nursery_start;
102 char *sgen_nursery_end;
105 size_t sgen_nursery_size = (1 << 22);
106 int sgen_nursery_bits = 22;
109 char *sgen_space_bitmap;
110 size_t sgen_space_bitmap_size;
112 #ifdef HEAVY_STATISTICS
114 static gint32 stat_wasted_bytes_trailer = 0;
115 static gint32 stat_wasted_bytes_small_areas = 0;
116 static gint32 stat_wasted_bytes_discarded_fragments = 0;
117 static gint32 stat_nursery_alloc_requests = 0;
118 static gint32 stat_alloc_iterations = 0;
119 static gint32 stat_alloc_retries = 0;
121 static gint32 stat_nursery_alloc_range_requests = 0;
122 static gint32 stat_alloc_range_iterations = 0;
123 static gint32 stat_alloc_range_retries = 0;
127 /************************************Nursery allocation debugging *********************************************/
144 MonoNativeThreadId tid;
147 #define ALLOC_RECORD_COUNT 128000
150 static AllocRecord *alloc_records;
151 static volatile int next_record;
152 static volatile int alloc_count;
154 void dump_alloc_records (void);
155 void verify_alloc_records (void);
158 get_reason_name (AllocRecord *rec)
160 switch (rec->reason) {
161 case FIXED_ALLOC: return "fixed-alloc";
162 case RANGE_ALLOC: return "range-alloc";
163 case PINNING: return "pinning";
164 case BLOCK_ZEROING: return "block-zeroing";
165 case CLEAR_NURSERY_FRAGS: return "clear-nursery-frag";
166 default: return "invalid";
171 reset_alloc_records (void)
178 add_alloc_record (char *addr, size_t size, int reason)
180 int idx = InterlockedIncrement (&next_record) - 1;
181 alloc_records [idx].address = addr;
182 alloc_records [idx].size = size;
183 alloc_records [idx].reason = reason;
184 alloc_records [idx].seq = idx;
185 alloc_records [idx].tid = mono_native_thread_id_get ();
189 comp_alloc_record (const void *_a, const void *_b)
191 const AllocRecord *a = _a;
192 const AllocRecord *b = _b;
193 if (a->address == b->address)
194 return a->seq - b->seq;
195 return a->address - b->address;
198 #define rec_end(REC) ((REC)->address + (REC)->size)
201 dump_alloc_records (void)
204 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
206 printf ("------------------------------------DUMP RECORDS----------------------------\n");
207 for (i = 0; i < next_record; ++i) {
208 AllocRecord *rec = alloc_records + i;
209 printf ("obj [%p, %p] size %d reason %s seq %d tid %x\n", rec->address, rec_end (rec), (int)rec->size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
214 verify_alloc_records (void)
220 AllocRecord *prev = NULL;
222 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
223 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record, alloc_count);
224 for (i = 0; i < next_record; ++i) {
225 AllocRecord *rec = alloc_records + i;
229 if (rec_end (prev) > rec->address)
230 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev->address, rec->address);
231 if ((rec->address - rec_end (prev)) >= 8)
233 hole_size = rec->address - rec_end (prev);
234 max_hole = MAX (max_hole, hole_size);
236 printf ("obj [%p, %p] size %d hole to prev %d reason %s seq %d tid %zx\n", rec->address, rec_end (rec), (int)rec->size, hole_size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
239 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total, holes, max_hole);
244 /*********************************************************************************/
247 static inline gpointer
248 mask (gpointer n, uintptr_t bit)
250 return (gpointer)(((uintptr_t)n) | bit);
253 static inline gpointer
256 return (gpointer)((uintptr_t)p & ~(uintptr_t)0x3);
259 static inline uintptr_t
260 get_mark (gpointer n)
262 return (uintptr_t)n & 0x1;
265 /*MUST be called with world stopped*/
267 sgen_fragment_allocator_alloc (void)
269 SgenFragment *frag = fragment_freelist;
271 fragment_freelist = frag->next_in_order;
272 frag->next = frag->next_in_order = NULL;
275 frag = sgen_alloc_internal (INTERNAL_MEM_FRAGMENT);
276 frag->next = frag->next_in_order = NULL;
281 sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char *end)
283 SgenFragment *fragment;
285 fragment = sgen_fragment_allocator_alloc ();
286 fragment->fragment_start = start;
287 fragment->fragment_next = start;
288 fragment->fragment_end = end;
289 fragment->next_in_order = fragment->next = unmask (allocator->region_head);
291 allocator->region_head = allocator->alloc_head = fragment;
292 g_assert (fragment->fragment_end > fragment->fragment_start);
296 sgen_fragment_allocator_release (SgenFragmentAllocator *allocator)
298 SgenFragment *last = allocator->region_head;
302 /* Find the last fragment in insert order */
303 for (; last->next_in_order; last = last->next_in_order) ;
305 last->next_in_order = fragment_freelist;
306 fragment_freelist = allocator->region_head;
307 allocator->alloc_head = allocator->region_head = NULL;
310 static SgenFragment**
311 find_previous_pointer_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag)
314 SgenFragment *cur, *next;
320 prev = &allocator->alloc_head;
323 printf ("retry count for fppf is %d\n", count);
326 cur = unmask (*prev);
334 * We need to make sure that we dereference prev below
335 * after reading cur->next above, so we need a read
338 mono_memory_read_barrier ();
343 if (!get_mark (next)) {
348 next = unmask (next);
349 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev, next, cur) != cur)
351 /*we must make sure that the next from cur->next happens after*/
352 mono_memory_write_barrier ();
355 cur = mono_lls_pointer_unmask (next);
361 claim_remaining_size (SgenFragment *frag, char *alloc_end)
363 /* All space used, nothing to claim. */
364 if (frag->fragment_end <= alloc_end)
367 /* Try to alloc all the remaining space. */
368 return InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, frag->fragment_end, alloc_end) == alloc_end;
372 par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size)
374 char *p = frag->fragment_next;
375 char *end = p + size;
377 if (end > frag->fragment_end)
380 /* p = frag->fragment_next must happen before */
381 mono_memory_barrier ();
383 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->fragment_next, end, p) != p)
386 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
387 SgenFragment *next, **prev_ptr;
390 * Before we clean the remaining nursery, we must claim the remaining space
391 * as it could end up been used by the range allocator since it can end up
392 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
393 * when doing second chance allocation.
395 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) {
396 sgen_clear_range (end, frag->fragment_end);
397 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_trailer, frag->fragment_end - end));
399 add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING);
403 prev_ptr = find_previous_pointer_fragment (allocator, frag);
405 /*Use Michaels linked list remove*/
407 /*prev_ptr will be null if the fragment was removed concurrently */
412 if (!get_mark (next)) {
413 /*frag->next read must happen before the first CAS*/
414 mono_memory_write_barrier ();
416 /*Fail if the next node is removed concurrently and its CAS wins */
417 if (InterlockedCompareExchangePointer ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) {
422 /* The second CAS must happen after the first CAS or frag->next. */
423 mono_memory_write_barrier ();
425 /* Fail if the previous node was deleted and its CAS wins */
426 if (InterlockedCompareExchangePointer ((volatile gpointer*)prev_ptr, unmask (next), frag) != frag) {
427 prev_ptr = find_previous_pointer_fragment (allocator, frag);
438 serial_alloc_from_fragment (SgenFragment **previous, SgenFragment *frag, size_t size)
440 char *p = frag->fragment_next;
441 char *end = p + size;
443 if (end > frag->fragment_end)
446 frag->fragment_next = end;
448 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
449 *previous = frag->next;
451 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
452 memset (end, 0, frag->fragment_end - end);
454 *previous = frag->next;
461 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size)
466 InterlockedIncrement (&alloc_count);
470 for (frag = unmask (allocator->alloc_head); unmask (frag); frag = unmask (frag->next)) {
471 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));
473 if (size <= (size_t)(frag->fragment_end - frag->fragment_next)) {
474 void *p = par_alloc_from_fragment (allocator, frag, size);
476 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
480 add_alloc_record (p, size, FIXED_ALLOC);
489 sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator *allocator, size_t size)
492 SgenFragment **previous;
494 InterlockedIncrement (&alloc_count);
497 previous = &allocator->alloc_head;
499 for (frag = *previous; frag; frag = *previous) {
500 char *p = serial_alloc_from_fragment (previous, frag, size);
502 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations));
506 add_alloc_record (p, size, FIXED_ALLOC);
510 previous = &frag->next;
516 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
518 SgenFragment *frag, **previous, *min_frag = NULL, **prev_min_frag = NULL;
519 size_t current_minimum = minimum_size;
522 InterlockedIncrement (&alloc_count);
525 previous = &allocator->alloc_head;
527 for (frag = *previous; frag; frag = *previous) {
528 size_t frag_size = frag->fragment_end - frag->fragment_next;
530 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations));
532 if (desired_size <= frag_size) {
534 *out_alloc_size = desired_size;
536 p = serial_alloc_from_fragment (previous, frag, desired_size);
538 add_alloc_record (p, desired_size, RANGE_ALLOC);
542 if (current_minimum <= frag_size) {
544 prev_min_frag = previous;
545 current_minimum = frag_size;
547 previous = &frag->next;
552 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
553 *out_alloc_size = frag_size;
555 p = serial_alloc_from_fragment (prev_min_frag, min_frag, frag_size);
558 add_alloc_record (p, frag_size, RANGE_ALLOC);
567 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
569 SgenFragment *frag, *min_frag;
570 size_t current_minimum;
574 current_minimum = minimum_size;
577 InterlockedIncrement (&alloc_count);
580 for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) {
581 size_t frag_size = frag->fragment_end - frag->fragment_next;
583 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations));
585 if (desired_size <= frag_size) {
587 *out_alloc_size = desired_size;
589 p = par_alloc_from_fragment (allocator, frag, desired_size);
591 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_retries));
595 add_alloc_record (p, desired_size, RANGE_ALLOC);
599 if (current_minimum <= frag_size) {
601 current_minimum = frag_size;
605 /* The second fragment_next read should be ordered in respect to the first code block */
606 mono_memory_barrier ();
612 frag_size = min_frag->fragment_end - min_frag->fragment_next;
613 if (frag_size < minimum_size)
616 *out_alloc_size = frag_size;
618 mono_memory_barrier ();
619 p = par_alloc_from_fragment (allocator, min_frag, frag_size);
621 /*XXX restarting here is quite dubious given this is already second chance allocation. */
623 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries));
627 add_alloc_record (p, frag_size, RANGE_ALLOC);
636 sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator)
640 for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) {
641 SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end);
642 sgen_clear_range (frag->fragment_next, frag->fragment_end);
644 add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS);
649 /* Clear all remaining nursery fragments */
651 sgen_clear_nursery_fragments (void)
653 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) {
654 sgen_clear_allocator_fragments (&mutator_allocator);
655 sgen_minor_collector.clear_fragments ();
660 * Mark a given range of memory as invalid.
662 * This can be done either by zeroing memory or by placing
663 * a phony byte[] array. This keeps the heap forward walkable.
665 * This function ignores calls with a zero range, even if
666 * both start and end are NULL.
669 sgen_clear_range (char *start, char *end)
671 size_t size = end - start;
673 if ((start && !end) || (start > end))
674 g_error ("Invalid range [%p %p]", start, end);
676 if (sgen_client_array_fill_range (start, size)) {
677 sgen_set_nursery_scan_start (start);
678 SGEN_ASSERT (0, start + sgen_safe_object_get_size ((MonoObject*)start) == end, "Array fill produced wrong size");
683 sgen_nursery_allocator_prepare_for_pinning (void)
685 sgen_clear_allocator_fragments (&mutator_allocator);
686 sgen_minor_collector.clear_fragments ();
689 static mword fragment_total = 0;
691 * We found a fragment of free memory in the nursery: memzero it and if
692 * it is big enough, add it to the list of fragments that can be used for
696 add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end)
698 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %zd", frag_start, frag_end, frag_size);
699 binary_protocol_empty (frag_start, frag_size);
700 MONO_GC_NURSERY_SWEPT ((mword)frag_start, frag_end - frag_start);
701 /* Not worth dealing with smaller fragments: need to tune */
702 if (frag_size >= SGEN_MAX_NURSERY_WASTE) {
703 /* memsetting just the first chunk start is bound to provide better cache locality */
704 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC)
705 memset (frag_start, 0, frag_size);
706 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG)
707 memset (frag_start, 0xff, frag_size);
710 /* XXX convert this into a flight record entry
711 printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size);
714 sgen_fragment_allocator_add (allocator, frag_start, frag_end);
715 fragment_total += frag_size;
717 /* Clear unused fragments, pinning depends on this */
718 sgen_clear_range (frag_start, frag_end);
719 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_small_areas, frag_size));
724 fragment_list_reverse (SgenFragmentAllocator *allocator)
726 SgenFragment *prev = NULL, *list = allocator->region_head;
728 SgenFragment *next = list->next;
730 list->next_in_order = prev;
735 allocator->region_head = allocator->alloc_head = prev;
739 sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpin_queue)
741 char *frag_start, *frag_end;
743 SgenFragment *frags_ranges;
744 void **pin_start, **pin_entry, **pin_end;
747 reset_alloc_records ();
749 /*The mutator fragments are done. We no longer need them. */
750 sgen_fragment_allocator_release (&mutator_allocator);
752 frag_start = sgen_nursery_start;
755 /* The current nursery might give us a fragment list to exclude [start, next[*/
756 frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();
758 /* clear scan starts */
759 memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
761 pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
762 pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);
764 while (pin_entry < pin_end || frags_ranges) {
768 addr0 = addr1 = sgen_nursery_end;
769 if (pin_entry < pin_end)
772 addr1 = frags_ranges->fragment_start;
776 GRAY_OBJECT_ENQUEUE (unpin_queue, addr0, sgen_obj_get_descriptor_safe (addr0));
778 SGEN_UNPIN_OBJECT (addr0);
779 size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)addr0));
780 CANARIFY_SIZE (size);
781 sgen_set_nursery_scan_start (addr0);
786 size = frags_ranges->fragment_next - addr1;
787 frags_ranges = frags_ranges->next_in_order;
790 frag_size = frag_end - frag_start;
795 g_assert (frag_size >= 0);
797 if (frag_size && size)
798 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
802 add_alloc_record (*pin_entry, frag_size, PINNING);
804 frag_start = frag_end + frag_size;
807 nursery_last_pinned_end = frag_start;
808 frag_end = sgen_nursery_end;
809 frag_size = frag_end - frag_start;
811 add_nursery_frag (&mutator_allocator, frag_size, frag_start, frag_end);
813 /* Now it's safe to release the fragments exclude list. */
814 sgen_minor_collector.build_fragments_release_exclude_head ();
816 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
817 fragment_list_reverse (&mutator_allocator);
819 /*The collector might want to do something with the final nursery fragment list.*/
820 sgen_minor_collector.build_fragments_finish (&mutator_allocator);
822 if (!unmask (mutator_allocator.alloc_head)) {
823 SGEN_LOG (1, "Nursery fully pinned");
824 for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
825 void *p = *pin_entry;
826 SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_safe_name (p), sgen_safe_object_get_size (p));
829 return fragment_total;
833 sgen_nursery_alloc_get_upper_alloc_bound (void)
835 /*FIXME we need to calculate the collector upper bound as well, but this must be done in the previous GC. */
836 return sgen_nursery_end;
839 /*** Nursery memory allocation ***/
841 sgen_nursery_retire_region (void *address, ptrdiff_t size)
843 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_discarded_fragments, size));
847 sgen_can_alloc_size (size_t size)
851 if (!SGEN_CAN_ALIGN_UP (size))
854 size = SGEN_ALIGN_UP (size);
856 for (frag = unmask (mutator_allocator.alloc_head); frag; frag = unmask (frag->next)) {
857 if ((size_t)(frag->fragment_end - frag->fragment_next) >= size)
864 sgen_nursery_alloc (size_t size)
866 SGEN_ASSERT (1, size >= sizeof (MonoObject) && size <= (SGEN_MAX_SMALL_OBJ_SIZE + CANARY_SIZE), "Invalid nursery object size");
868 SGEN_LOG (4, "Searching nursery for size: %zd", size);
869 size = SGEN_ALIGN_UP (size);
871 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests));
873 return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
877 sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
879 SGEN_LOG (4, "Searching for byte range desired size: %zd minimum size %zd", desired_size, minimum_size);
881 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_range_requests));
883 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size);
886 /*** Initialization ***/
888 #ifdef HEAVY_STATISTICS
891 sgen_nursery_allocator_init_heavy_stats (void)
893 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_trailer);
894 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_small_areas);
895 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wasted_bytes_discarded_fragments);
897 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_nursery_alloc_requests);
898 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_iterations);
899 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_retries);
901 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_nursery_alloc_range_requests);
902 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_range_iterations);
903 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_alloc_range_retries);
909 sgen_init_nursery_allocator (void)
911 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment));
913 alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging memory");
918 sgen_nursery_alloc_prepare_for_minor (void)
920 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
924 sgen_nursery_alloc_prepare_for_major (void)
926 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
930 sgen_nursery_allocator_set_nursery_bounds (char *start, char *end)
932 sgen_nursery_start = start;
933 sgen_nursery_end = end;
936 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
937 * the right side of things and round up. We could just do a MIN(1,x) instead,
938 * since the nursery size must be a power of 2.
940 sgen_space_bitmap_size = (end - start + SGEN_TO_SPACE_GRANULE_IN_BYTES * 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8);
941 sgen_space_bitmap = g_malloc0 (sgen_space_bitmap_size);
943 /* Setup the single first large fragment */
944 sgen_minor_collector.init_nursery (&mutator_allocator, start, end);