process_fin_stage_entries ();
process_dislink_stage_entries ();
- clear_nursery_fragments (nursery_next);
+ mono_sgen_clear_nursery_fragments (nursery_next);
if (xdomain_checks && domain != mono_get_root_domain ()) {
scan_for_registered_roots_in_domain (domain, ROOT_TYPE_NORMAL);
nursery_section = section;
- /* Setup the single first large fragment */
- add_fragment (nursery_start, nursery_end);
+ mono_sgen_nursery_allocator_set_nursery_bounds (nursery_start, nursery_end);
}
void*
ScanFromRemsetsJobData sfrjd;
ScanFromRegisteredRootsJobData scrrjd_normal, scrrjd_wbarrier;
ScanThreadDataJobData stdjd;
+ mword fragment_total;
TV_DECLARE (all_atv);
TV_DECLARE (all_btv);
TV_DECLARE (atv);
atv = all_atv;
/* Pinning no longer depends on clearing all nursery fragments */
- clear_current_nursery_fragment (orig_nursery_next);
+ mono_sgen_clear_current_nursery_fragment (orig_nursery_next);
TV_GETTIME (btv);
time_minor_pre_collection_fragment_clear += TV_ELAPSED_MS (atv, btv);
* next allocations.
*/
mono_profiler_gc_event (MONO_GC_EVENT_RECLAIM_START, 0);
- build_nursery_fragments (pin_queue, next_pin_slot);
+ fragment_total = mono_sgen_build_nursery_fragments (pin_queue, next_pin_slot);
+ /* Clear TLABs for all threads */
+ clear_tlabs ();
+
mono_profiler_gc_event (MONO_GC_EVENT_RECLAIM_END, 0);
TV_GETTIME (btv);
time_minor_fragment_creation += TV_ELAPSED_MS (atv, btv);
atv = all_atv;
/* Pinning depends on this */
- clear_nursery_fragments (nursery_next);
+ mono_sgen_clear_nursery_fragments (nursery_next);
TV_GETTIME (btv);
time_major_pre_collection_fragment_clear += TV_ELAPSED_MS (atv, btv);
* pinned objects as we go, memzero() the empty fragments so they are ready for the
* next allocations.
*/
- build_nursery_fragments (nursery_section->pin_queue_start, nursery_section->pin_queue_num_entries);
+ mono_sgen_build_nursery_fragments (nursery_section->pin_queue_start, nursery_section->pin_queue_num_entries);
+ /* Clear TLABs for all threads */
+ clear_tlabs ();
TV_GETTIME (atv);
time_major_fragment_creation += TV_ELAPSED_MS (btv, atv);
DEBUG (2, fprintf (gc_debug_file, "Heap size: %lu, LOS size: %lu\n", (unsigned long)total_alloc, (unsigned long)los_memory_usage));
restart_world (0);
/* this also sets the proper pointers for the next allocation */
- if (!alloc_fragment_for_size (size)) {
+ if (!mono_sgen_alloc_fragment_for_size (size)) {
int i;
/* TypeBuilder and MonoMethod are killing mcs with fragmentation */
DEBUG (1, fprintf (gc_debug_file, "nursery collection didn't find enough room for %zd alloc (%d pinned)\n", size, last_num_pinned));
collect_nursery (0);
restart_world (0);
mono_profiler_gc_event (MONO_GC_EVENT_END, 0);
- if (!degraded_mode && !alloc_fragment_for_size (size) && size <= MAX_SMALL_OBJ_SIZE) {
+ if (!degraded_mode && !mono_sgen_alloc_fragment_for_size (size) && size <= MAX_SMALL_OBJ_SIZE) {
// FIXME:
g_assert_not_reached ();
}
if (size > tlab_size) {
/* Allocate directly from the nursery */
if (nursery_next + size >= nursery_frag_real_end) {
- if (!alloc_fragment_for_size (size)) {
+ if (!mono_sgen_alloc_fragment_for_size (size)) {
minor_collect_or_expand_inner (size);
if (degraded_mode) {
p = alloc_degraded (vtable, size);
if (available_in_nursery > MAX_NURSERY_TLAB_WASTE && available_in_nursery > size) {
alloc_size = available_in_nursery;
} else {
- alloc_size = alloc_fragment_for_size_range (tlab_size, size);
+ alloc_size = mono_sgen_alloc_fragment_for_size_range (tlab_size, size);
if (!alloc_size) {
alloc_size = tlab_size;
minor_collect_or_expand_inner (tlab_size);
hwi.callback = callback;
hwi.data = data;
- clear_nursery_fragments (nursery_next);
+ mono_sgen_clear_nursery_fragments (nursery_next);
mono_sgen_scan_area_with_callback (nursery_section->data, nursery_section->end_data, walk_references, &hwi, FALSE);
major_collector.iterate_objects (TRUE, TRUE, walk_references, &hwi);
void mono_sgen_los_scan_card_table (SgenGrayQueue *queue) MONO_INTERNAL;
FILE *mono_sgen_get_logfile (void) MONO_INTERNAL;
+/* nursery allocator */
+
+void mono_sgen_clear_nursery_fragments (char *next) MONO_INTERNAL;
+void mono_sgen_nursery_allocator_prepare_for_pinning (void) MONO_INTERNAL;
+void mono_sgen_clear_current_nursery_fragment (char *next) MONO_INTERNAL;
+void mono_sgen_nursery_allocator_set_nursery_bounds (char *nursery_start, char *nursery_end) MONO_INTERNAL;
+mword mono_sgen_build_nursery_fragments (void **start, int num_entries) MONO_INTERNAL;
+void mono_sgen_init_nursery_allocator (void) MONO_INTERNAL;
+void mono_sgen_nursery_allocator_init_heavy_stats (void) MONO_INTERNAL;
+int mono_sgen_alloc_fragment_for_size_range (size_t desired_size, size_t minimum_size) MONO_INTERNAL;
+gboolean mono_sgen_alloc_fragment_for_size (size_t size) MONO_INTERNAL;
+
+
#endif /* HAVE_SGEN_GC */
#endif /* __MONO_SGENGC_H__ */
* Thread local allocation is done from areas of memory Hotspot calls Thread Local
* Allocation Buffers (TLABs).
*/
+
typedef struct _Fragment Fragment;
struct _Fragment {
#endif
-static gboolean alloc_fragment_for_size (size_t size);
-static int alloc_fragment_for_size_range (size_t desired_size, size_t minimum_size);
-static void clear_nursery_fragments (char *next);
-
static Fragment*
alloc_fragment (void)
{
fragment_freelist = frag;
}
-static void
-clear_current_nursery_fragment (char *next)
+void
+mono_sgen_clear_current_nursery_fragment (char *next)
{
if (nursery_clear_policy == CLEAR_AT_TLAB_CREATION) {
g_assert (next <= nursery_frag_real_end);
}
/* Clear all remaining nursery fragments */
-static void
-clear_nursery_fragments (char *next)
+void
+mono_sgen_clear_nursery_fragments (char *next)
{
Fragment *frag;
if (nursery_clear_policy == CLEAR_AT_TLAB_CREATION) {
}
}
-static void
+void
mono_sgen_nursery_allocator_prepare_for_pinning (void)
{
Fragment *frag;
}
-static void
-build_nursery_fragments (void **start, int num_entries)
+mword
+mono_sgen_build_nursery_fragments (void **start, int num_entries)
{
char *frag_start, *frag_end;
size_t frag_size;
}
nursery_next = nursery_frag_real_end = NULL;
-
- /* Clear TLABs for all threads */
- clear_tlabs ();
+ return fragment_total;
}
/*** Nursery memory allocation ***/
* nursery_next and nursery_frag_real_end are set to the boundaries of the fragment.
* Return TRUE if found, FALSE otherwise.
*/
-static gboolean
-alloc_fragment_for_size (size_t size)
+gboolean
+mono_sgen_alloc_fragment_for_size (size_t size)
{
Fragment *frag, *prev;
DEBUG (4, fprintf (gc_debug_file, "Searching nursery fragment %p, size: %zd\n", nursery_frag_real_end, size));
* Same as alloc_fragment_for_size but if search for @desired_size fails, try to satisfy @minimum_size.
* This improves nursery usage.
*/
-static int
-alloc_fragment_for_size_range (size_t desired_size, size_t minimum_size)
+int
+mono_sgen_alloc_fragment_for_size_range (size_t desired_size, size_t minimum_size)
{
Fragment *frag, *prev, *min_prev;
DEBUG (4, fprintf (gc_debug_file, "Searching nursery fragment %p, desired size: %zd minimum size %zd\n", nursery_frag_real_end, desired_size, minimum_size));
#ifdef HEAVY_STATISTICS
-static void
+void
mono_sgen_nursery_allocator_init_heavy_stats (void)
{
mono_counters_register ("# wasted fragments used", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_wasted_fragments_used);
#endif
-static void
+void
mono_sgen_init_nursery_allocator (void)
{
mono_sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (Fragment));
}
+
+void
+mono_sgen_nursery_allocator_set_nursery_bounds (char *nursery_start, char *nursery_end)
+{
+ /* Setup the single first large fragment */
+ add_fragment (nursery_start, nursery_end);
+}