static LOCK_DECLARE (interruption_mutex);
static LOCK_DECLARE (global_remset_mutex);
+static LOCK_DECLARE (pin_queue_mutex);
#define LOCK_GLOBAL_REMSET pthread_mutex_lock (&global_remset_mutex)
#define UNLOCK_GLOBAL_REMSET pthread_mutex_unlock (&global_remset_mutex)
+#define LOCK_PIN_QUEUE pthread_mutex_lock (&pin_queue_mutex)
+#define UNLOCK_PIN_QUEUE pthread_mutex_unlock (&pin_queue_mutex)
+
typedef struct _FinalizeEntry FinalizeEntry;
struct _FinalizeEntry {
FinalizeEntry *next;
static mword roots_size = 0; /* amount of memory in the root set */
static int num_roots_entries [ROOT_TYPE_NUM] = { 0, 0, 0 };
+#define GC_ROOT_NUM 32
+typedef struct {
+ int count;
+ void *objects [GC_ROOT_NUM];
+ int root_types [GC_ROOT_NUM];
+ uintptr_t extra_info [GC_ROOT_NUM];
+} GCRootReport;
+
+static void
+notify_gc_roots (GCRootReport *report)
+{
+ if (!report->count)
+ return;
+ mono_profiler_gc_roots (report->count, report->objects, report->root_types, report->extra_info);
+ report->count = 0;
+}
+
+static void
+add_profile_gc_root (GCRootReport *report, void *object, int rtype, uintptr_t extra_info)
+{
+ if (report->count == GC_ROOT_NUM)
+ notify_gc_roots (report);
+ report->objects [report->count] = object;
+ report->root_types [report->count] = rtype;
+ report->extra_info [report->count++] = ((MonoVTable*)LOAD_VTABLE (object))->klass;
+}
+
/*
* The current allocation cursors
* We allocate objects in the nursery.
/* Vtable of the objects used to fill out nursery fragments before a collection */
static MonoVTable *array_fill_vtable;
+/*
+ * ######################################################################
+ * ######## Heap size accounting
+ * ######################################################################
+ */
+/*heap limits*/
+static mword max_heap_size = ((mword)0)- ((mword)1);
+static mword allocated_heap;
+
+/*Object was pinned during the current collection*/
+static mword objects_pinned;
+
+void
+mono_sgen_release_space (mword size, int space)
+{
+ allocated_heap -= size;
+}
+
+static size_t
+available_free_space (void)
+{
+ return max_heap_size - MIN (allocated_heap, max_heap_size);
+}
+
+gboolean
+mono_sgen_try_alloc_space (mword size, int space)
+{
+ if (available_free_space () < size)
+ return FALSE;
+
+ allocated_heap += size;
+ return TRUE;
+}
+
+static void
+init_heap_size_limits (glong max_heap)
+{
+ if (max_heap == 0)
+ return;
+
+ if (max_heap < nursery_size * 4) {
+ fprintf (stderr, "max-heap-size must be at least 4 times larger than nursery size.\n");
+ exit (1);
+ }
+ max_heap_size = max_heap - nursery_size;
+}
+
/*
* ######################################################################
* ######## Macros and function declarations.
static void scan_from_remsets (void *start_nursery, void *end_nursery, GrayQueue *queue);
static void scan_from_registered_roots (CopyOrMarkObjectFunc copy_func, char *addr_start, char *addr_end, int root_type, GrayQueue *queue);
static void scan_finalizer_entries (CopyOrMarkObjectFunc copy_func, FinalizeEntry *list, GrayQueue *queue);
+static void report_finalizer_roots (void);
+static void report_registered_roots (void);
static void find_pinning_ref_from_thread (char *obj, size_t size);
static void update_current_thread_stack (void *start);
static void finalize_in_range (CopyOrMarkObjectFunc copy_func, char *start, char *end, int generation, GrayQueue *queue);
static void sort_addresses (void **array, int size);
static void drain_gray_stack (GrayQueue *queue);
static void finish_gray_stack (char *start_addr, char *end_addr, int generation, GrayQueue *queue);
-static gboolean need_major_collection (void);
+static gboolean need_major_collection (mword space_needed);
static void major_collection (const char *reason);
static void mono_gc_register_disappearing_link (MonoObject *obj, void **link, gboolean track);
start++;
}
//printf ("effective pinned: %d (at the end: %d)\n", count, (char*)end_nursery - (char*)last);
+ if (mono_profiler_get_events () & MONO_PROFILE_GC_ROOTS) {
+ GCRootReport report;
+ report.count = 0;
+ for (idx = 0; idx < count; ++idx)
+ add_profile_gc_root (&report, definitely_pinned [idx], MONO_PROFILE_GC_ROOT_PINNING, 0);
+ notify_gc_roots (&report);
+ }
return count;
}
}
}
+
+void
+mono_sgen_pin_object (void *object, GrayQueue *queue)
+{
+ if (major_collector.is_parallel) {
+ LOCK_PIN_QUEUE;
+ /*object arrives pinned*/
+ pin_stage_ptr (object);
+ ++objects_pinned ;
+ UNLOCK_PIN_QUEUE;
+ } else {
+ SGEN_PIN_OBJECT (object);
+ pin_stage_ptr (object);
+ ++objects_pinned;
+ }
+ GRAY_OBJECT_ENQUEUE (queue, object);
+}
+
/* Sort the addresses in array in increasing order.
* Done using a by-the book heap sort. Which has decent and stable performance, is pretty cache efficient.
*/
return nursery_start;
}
+static void
+report_finalizer_roots_list (FinalizeEntry *list)
+{
+ GCRootReport report;
+ FinalizeEntry *fin;
+
+ report.count = 0;
+ for (fin = list; fin; fin = fin->next) {
+ if (!fin->object)
+ continue;
+ add_profile_gc_root (&report, fin->object, MONO_PROFILE_GC_ROOT_FINALIZER, 0);
+ }
+ notify_gc_roots (&report);
+}
+
+static void
+report_finalizer_roots (void)
+{
+ report_finalizer_roots_list (fin_ready_list);
+ report_finalizer_roots_list (critical_fin_list);
+}
+
+static GCRootReport *root_report;
+
+static void
+single_arg_report_root (void **obj)
+{
+ if (*obj)
+ add_profile_gc_root (root_report, *obj, MONO_PROFILE_GC_ROOT_OTHER, 0);
+}
+
+static void
+precisely_report_roots_from (GCRootReport *report, void** start_root, void** end_root, mword desc)
+{
+ switch (desc & ROOT_DESC_TYPE_MASK) {
+ case ROOT_DESC_BITMAP:
+ desc >>= ROOT_DESC_TYPE_SHIFT;
+ while (desc) {
+ if ((desc & 1) && *start_root) {
+ add_profile_gc_root (report, *start_root, MONO_PROFILE_GC_ROOT_OTHER, 0);
+ }
+ desc >>= 1;
+ start_root++;
+ }
+ return;
+ case ROOT_DESC_COMPLEX: {
+ gsize *bitmap_data = complex_descriptors + (desc >> ROOT_DESC_TYPE_SHIFT);
+ int bwords = (*bitmap_data) - 1;
+ void **start_run = start_root;
+ bitmap_data++;
+ while (bwords-- > 0) {
+ gsize bmap = *bitmap_data++;
+ void **objptr = start_run;
+ while (bmap) {
+ if ((bmap & 1) && *objptr) {
+ add_profile_gc_root (report, *objptr, MONO_PROFILE_GC_ROOT_OTHER, 0);
+ }
+ bmap >>= 1;
+ ++objptr;
+ }
+ start_run += GC_BITS_PER_WORD;
+ }
+ break;
+ }
+ case ROOT_DESC_USER: {
+ MonoGCRootMarkFunc marker = user_descriptors [desc >> ROOT_DESC_TYPE_SHIFT];
+ root_report = report;
+ marker (start_root, single_arg_report_root);
+ break;
+ }
+ case ROOT_DESC_RUN_LEN:
+ g_assert_not_reached ();
+ default:
+ g_assert_not_reached ();
+ }
+}
+
+static void
+report_registered_roots_by_type (int root_type)
+{
+ GCRootReport report;
+ int i;
+ RootRecord *root;
+ report.count = 0;
+ for (i = 0; i < roots_hash_size [root_type]; ++i) {
+ for (root = roots_hash [root_type][i]; root; root = root->next) {
+ DEBUG (6, fprintf (gc_debug_file, "Precise root scan %p-%p (desc: %p)\n", root->start_root, root->end_root, (void*)root->root_desc));
+ precisely_report_roots_from (&report, (void**)root->start_root, (void**)root->end_root, root->root_desc);
+ }
+ }
+ notify_gc_roots (&report);
+}
+
+static void
+report_registered_roots (void)
+{
+ report_registered_roots_by_type (ROOT_TYPE_NORMAL);
+ report_registered_roots_by_type (ROOT_TYPE_WBARRIER);
+}
+
static void
scan_finalizer_entries (CopyOrMarkObjectFunc copy_func, FinalizeEntry *list, GrayQueue *queue)
{
}
static gboolean
-need_major_collection (void)
+need_major_collection (mword space_needed)
{
mword los_alloced = los_memory_usage - MIN (last_los_memory_usage, los_memory_usage);
- return minor_collection_sections_alloced * major_collector.section_size + los_alloced > minor_collection_allowance;
+ return (space_needed > available_free_space ()) ||
+ minor_collection_sections_alloced * major_collector.section_size + los_alloced > minor_collection_allowance;
}
/*
static gboolean
collect_nursery (size_t requested_size)
{
+ gboolean needs_major;
size_t max_garbage_amount;
char *orig_nursery_next;
TV_DECLARE (all_atv);
check_scan_starts ();
degraded_mode = 0;
+ objects_pinned = 0;
orig_nursery_next = nursery_next;
nursery_next = MAX (nursery_next, nursery_last_pinned_end);
/* FIXME: optimize later to use the higher address where an object can be present */
drain_gray_stack (&gray_queue);
+ if (mono_profiler_get_events () & MONO_PROFILE_GC_ROOTS)
+ report_registered_roots ();
+ if (mono_profiler_get_events () & MONO_PROFILE_GC_ROOTS)
+ report_finalizer_roots ();
TV_GETTIME (atv);
time_minor_scan_pinned += TV_ELAPSED_MS (btv, atv);
/* registered roots, this includes static fields */
time_minor_finish_gray_stack += TV_ELAPSED_MS (btv, atv);
mono_profiler_gc_event (MONO_GC_EVENT_MARK_END, 0);
+ if (objects_pinned) {
+ evacuate_pin_staging_area ();
+ optimize_pin_queue (0);
+ nursery_section->pin_queue_start = pin_queue;
+ nursery_section->pin_queue_num_entries = next_pin_slot;
+ }
+
/* walk the pin_queue, build up the fragment list of free memory, unmark
* pinned objects as we go, memzero() the empty fragments so they are ready for the
* next allocations.
binary_protocol_flush_buffers (FALSE);
+ /*objects are late pinned because of lack of memory, so a major is a good call*/
+ needs_major = need_major_collection (0) || objects_pinned;
current_collection_generation = -1;
+ objects_pinned = 0;
- return need_major_collection ();
+ return needs_major;
}
static void
char *heap_end = (char*)-1;
int old_num_major_sections = major_collector.get_num_major_sections ();
int num_major_sections, num_major_sections_saved, save_target, allowance_target;
+ int old_next_pin_slot;
mword los_memory_saved, los_memory_alloced, old_los_memory_usage;
mono_perfcounters->gc_collections1++;
*/
los_memory_alloced = los_memory_usage - MIN (last_los_memory_usage, los_memory_usage);
old_los_memory_usage = los_memory_usage;
+ objects_pinned = 0;
//count_ref_nonref_objs ();
//consistency_check ();
/* we should also coalesce scanning from sections close to each other
* and deal with pointers outside of the sections later.
*/
+
+ if (major_collector.start_major_collection)
+ major_collector.start_major_collection ();
+
/* The remsets are not useful for a major collection */
clear_remsets ();
global_remset_cache_clear ();
/* second pass for the sections */
mono_sgen_pin_objects_in_section (nursery_section, WORKERS_DISTRIBUTE_GRAY_QUEUE);
major_collector.pin_objects (WORKERS_DISTRIBUTE_GRAY_QUEUE);
+ old_next_pin_slot = next_pin_slot;
TV_GETTIME (btv);
time_major_pinning += TV_ELAPSED_MS (atv, btv);
workers_start_all_workers (1);
+ if (mono_profiler_get_events () & MONO_PROFILE_GC_ROOTS)
+ report_registered_roots ();
TV_GETTIME (atv);
time_major_scan_pinned += TV_ELAPSED_MS (btv, atv);
TV_GETTIME (btv);
time_major_scan_alloc_pinned += TV_ELAPSED_MS (atv, btv);
+ if (mono_profiler_get_events () & MONO_PROFILE_GC_ROOTS)
+ report_finalizer_roots ();
/* scan the list of objects ready for finalization */
scan_finalizer_entries (major_collector.copy_or_mark_object, fin_ready_list, WORKERS_DISTRIBUTE_GRAY_QUEUE);
scan_finalizer_entries (major_collector.copy_or_mark_object, critical_fin_list, WORKERS_DISTRIBUTE_GRAY_QUEUE);
TV_GETTIME (atv);
time_major_finish_gray_stack += TV_ELAPSED_MS (btv, atv);
+ if (objects_pinned) {
+ /*This is slow, but we just OOM'd*/
+ mono_sgen_pin_queue_clear_discarded_entries (nursery_section, old_next_pin_slot);
+ evacuate_pin_staging_area ();
+ optimize_pin_queue (0);
+ mono_sgen_find_section_pin_queue_start_end (nursery_section);
+ objects_pinned = 0;
+ }
+
/* sweep the big objects list */
prevbo = NULL;
for (bigobj = los_object_list; bigobj;) {
current_collection_generation = -1;
}
+void
+sgen_collect_major_no_lock (const char *reason)
+{
+ mono_profiler_gc_event (MONO_GC_EVENT_START, 1);
+ stop_world (1);
+ major_collection (reason);
+ restart_world (1);
+ mono_profiler_gc_event (MONO_GC_EVENT_END, 1);
+}
+
/*
* When deciding if it's better to collect or to expand, keep track
* of how much garbage was reclaimed with the last collection: if it's too
static void*
alloc_degraded (MonoVTable *vtable, size_t size)
{
- if (need_major_collection ()) {
+ if (need_major_collection (0)) {
mono_profiler_gc_event (MONO_GC_EVENT_START, 1);
stop_world (1);
major_collection ("degraded overflow");
}
}
- DEBUG (6, fprintf (gc_debug_file, "Allocated object %p, vtable: %p (%s), size: %zd\n", p, vtable, vtable->klass->name, size));
- binary_protocol_alloc (p, vtable, size);
- *p = vtable;
+ if (G_LIKELY (p)) {
+ DEBUG (6, fprintf (gc_debug_file, "Allocated object %p, vtable: %p (%s), size: %zd\n", p, vtable, vtable->klass->name, size));
+ binary_protocol_alloc (p, vtable, size);
+ *p = vtable;
+ }
return p;
}
LOCK_GC;
res = mono_gc_alloc_obj_nolock (vtable, size);
UNLOCK_GC;
+ if (G_UNLIKELY (!res))
+ return mono_gc_out_of_memory (size);
return res;
}
LOCK_GC;
arr = mono_gc_alloc_obj_nolock (vtable, size);
+ if (G_UNLIKELY (!arr)) {
+ UNLOCK_GC;
+ return mono_gc_out_of_memory (size);
+ }
+
arr->max_length = max_length;
UNLOCK_GC;
LOCK_GC;
arr = mono_gc_alloc_obj_nolock (vtable, size);
+ if (G_UNLIKELY (!arr)) {
+ UNLOCK_GC;
+ return mono_gc_out_of_memory (size);
+ }
+
arr->max_length = max_length;
bounds = (MonoArrayBounds*)((char*)arr + size - bounds_size);
LOCK_GC;
str = mono_gc_alloc_obj_nolock (vtable, size);
+ if (G_UNLIKELY (!str)) {
+ UNLOCK_GC;
+ return mono_gc_out_of_memory (size);
+ }
+
str->length = len;
UNLOCK_GC;
void*
mono_gc_alloc_pinned_obj (MonoVTable *vtable, size_t size)
{
- /* FIXME: handle OOM */
void **p;
size = ALIGN_UP (size);
LOCK_GC;
+
if (size > MAX_SMALL_OBJ_SIZE) {
/* large objects are always pinned anyway */
p = alloc_large_inner (vtable, size);
DEBUG (9, g_assert (vtable->klass->inited));
p = major_collector.alloc_small_pinned_obj (size, vtable->klass->has_references);
}
- DEBUG (6, fprintf (gc_debug_file, "Allocated pinned object %p, vtable: %p (%s), size: %zd\n", p, vtable, vtable->klass->name, size));
- binary_protocol_alloc_pinned (p, vtable, size);
- *p = vtable;
+ if (G_LIKELY (p)) {
+ DEBUG (6, fprintf (gc_debug_file, "Allocated pinned object %p, vtable: %p (%s), size: %zd\n", p, vtable, vtable->klass->name, size));
+ binary_protocol_alloc_pinned (p, vtable, size);
+ *p = vtable;
+ }
UNLOCK_GC;
return p;
}
int count;
int called;
MonoObject *refs [REFS_SIZE];
+ uintptr_t offsets [REFS_SIZE];
} HeapWalkInfo;
#undef HANDLE_PTR
#define HANDLE_PTR(ptr,obj) do { \
if (*(ptr)) { \
if (hwi->count == REFS_SIZE) { \
- hwi->callback ((MonoObject*)start, mono_object_class (start), hwi->called? 0: size, hwi->count, hwi->refs, hwi->data); \
+ hwi->callback ((MonoObject*)start, mono_object_class (start), hwi->called? 0: size, hwi->count, hwi->refs, hwi->offsets, hwi->data); \
hwi->count = 0; \
hwi->called = 1; \
} \
+ hwi->offsets [hwi->count] = (char*)(ptr)-(char*)start; \
hwi->refs [hwi->count++] = *(ptr); \
} \
} while (0)
hwi->count = 0;
collect_references (hwi, start, size);
if (hwi->count || !hwi->called)
- hwi->callback ((MonoObject*)start, mono_object_class (start), hwi->called? 0: size, hwi->count, hwi->refs, hwi->data);
+ hwi->callback ((MonoObject*)start, mono_object_class (start), hwi->called? 0: size, hwi->count, hwi->refs, hwi->offsets, hwi->data);
}
/**
* This function can be used to iterate over all the live objects in the heap:
* for each object, @callback is invoked, providing info about the object's
* location in memory, its class, its size and the objects it references.
+ * For each referenced object it's offset from the object address is
+ * reported in the offsets array.
* The object references may be buffered, so the callback may be invoked
* multiple times for the same object: in all but the first call, the size
* argument will be zero.
return result;
}
-/* Tries to extract a number from the passed string, taking in to account m, k
- * and g suffixes */
-gboolean
-mono_sgen_parse_environment_string_extract_number (const char *str, glong *out)
-{
- char *endptr;
- int len = strlen (str), shift = 0;
- glong val;
- gboolean is_suffix = FALSE;
- char suffix;
-
- switch (str [len - 1]) {
- case 'g':
- case 'G':
- shift += 10;
- case 'm':
- case 'M':
- shift += 10;
- case 'k':
- case 'K':
- shift += 10;
- is_suffix = TRUE;
- suffix = str [len - 1];
- break;
- }
-
- errno = 0;
- val = strtol (str, &endptr, 10);
-
- if ((errno == ERANGE && (val == LONG_MAX || val == LONG_MIN))
- || (errno != 0 && val == 0) || (endptr == str))
- return FALSE;
-
- if (is_suffix) {
- if (*(endptr + 1)) /* Invalid string. */
- return FALSE;
- val <<= shift;
- }
-
- *out = val;
- return TRUE;
-}
-
void
mono_gc_base_init (void)
{
char **opts, **ptr;
char *major_collector_opt = NULL;
struct sigaction sinfo;
+ glong max_heap = 0;
#ifdef PLATFORM_ANDROID
g_assert_not_reached ();
LOCK_INIT (interruption_mutex);
LOCK_INIT (global_remset_mutex);
+ LOCK_INIT (pin_queue_mutex);
if ((env = getenv ("MONO_GC_PARAMS"))) {
opts = g_strsplit (env, ",", -1);
}
continue;
}
+ if (g_str_has_prefix (opt, "max-heap-size=")) {
+ opt = strchr (opt, '=') + 1;
+ if (*opt && mono_gc_parse_environment_string_extract_number (opt, &max_heap)) {
+ if ((max_heap & (mono_pagesize () - 1))) {
+ fprintf (stderr, "max-heap-size size must be a multiple of %d.\n", mono_pagesize ());
+ exit (1);
+ }
+ } else {
+ fprintf (stderr, "max-heap-size must be an integer.\n");
+ exit (1);
+ }
+ continue;
+ }
#ifdef USER_CONFIG
if (g_str_has_prefix (opt, "nursery-size=")) {
long val;
opt = strchr (opt, '=') + 1;
- if (*opt && mono_sgen_parse_environment_string_extract_number (opt, &val)) {
+ if (*opt && mono_gc_parse_environment_string_extract_number (opt, &val)) {
default_nursery_size = val;
#ifdef SGEN_ALIGN_NURSERY
if ((val & (val - 1))) {
#endif
if (!(major_collector.handle_gc_param && major_collector.handle_gc_param (opt))) {
fprintf (stderr, "MONO_GC_PARAMS must be a comma-delimited list of one or more of the following:\n");
+ fprintf (stderr, " max-heap-size=N (where N is an integer, possibly with a k, m or a g suffix)\n");
fprintf (stderr, " nursery-size=N (where N is an integer, possibly with a k, m or a g suffix)\n");
fprintf (stderr, " major=COLLECTOR (where COLLECTOR is `marksweep', `marksweep-par' or `copying')\n");
fprintf (stderr, " wbarrier=WBARRIER (where WBARRIER is `remset' or `cardtable')\n");
nursery_size = DEFAULT_NURSERY_SIZE;
minor_collection_allowance = MIN_MINOR_COLLECTION_ALLOWANCE;
+ init_heap_size_limits (max_heap);
alloc_nursery ();