#include <unistd.h>
#include <stdio.h>
#include <string.h>
-#include <pthread.h>
#include <semaphore.h>
#include <signal.h>
#include <errno.h>
#include "metadata/threads.h"
#include "metadata/sgen-gc.h"
#include "metadata/mono-gc.h"
+#include "utils/mono-mmap.h"
+#ifdef HAVE_VALGRIND_MEMCHECK_H
+#include <valgrind/memcheck.h>
+#endif
/*
* ######################################################################
LOSObject *next;
mword size; /* this is the object size */
int dummy; /* to have a sizeof (LOSObject) a multiple of ALLOC_ALIGN and data starting at same alignment */
- int role;
+ guint16 role;
+ guint16 scanned;
char data [MONO_ZERO_LEN_ARRAY];
};
};
static __thread RememberedSet *remembered_set MONO_TLS_FAST;
+static pthread_key_t remembered_set_key;
static RememberedSet *global_remset;
static int store_to_global_remset = 0;
/* these bits are set in the object vtable: we could merge them since an object can be
* either pinned or forwarded but not both.
* We store them in the vtable slot because the bits are used in the sync block for
- * other purpouses: if we merge them and alloc the sync blocks aligned to 8 bytes, we can change
+ * other purposes: if we merge them and alloc the sync blocks aligned to 8 bytes, we can change
* this and use bit 3 in the syncblock (with the lower two bits both set for forwarded, that
* would be an invalid combination for the monitor and hash code).
* The values are already shifted.
}
}
+static inline gboolean
+is_half_constructed (MonoObject *o)
+{
+ MonoClass *klass;
+
+ klass = ((MonoVTable*)LOAD_VTABLE (o))->klass;
+ if ((klass == mono_defaults.string_class && mono_string_length ((MonoString*)o) == 0) ||
+ (klass->rank && mono_array_length ((MonoArray*)o) == 0))
+ return TRUE;
+ else
+ return FALSE;
+}
+
/*
* ######################################################################
* ######## Global data.
* ######################################################################
*/
-static pthread_mutex_t gc_mutex = PTHREAD_MUTEX_INITIALIZER;
+static LOCK_DECLARE (gc_mutex);
static int gc_disabled = 0;
static int num_minor_gcs = 0;
static int num_major_gcs = 0;
#define DEFAULT_NURSERY_SIZE (1024*512*2)
#define DEFAULT_MAX_SECTION (DEFAULT_NURSERY_SIZE * 16)
#define DEFAULT_LOS_COLLECTION_TARGET (DEFAULT_NURSERY_SIZE * 2)
-/* to quickly find the heard of an object pinned by a conservative address
+/* to quickly find the head of an object pinned by a conservative address
* we keep track of the objects allocated for each SCAN_START_SIZE memory
* chunk in the nursery or other memory sections. Larger values have less
* memory overhead and bigger runtime cost. 4-8 KB are reasonable values.
/* This is a fixed value used for pinned chunks, not the system pagesize */
#define FREELIST_PAGESIZE 4096
-static mword pagesize = 4096; /* FIXME */
+static mword pagesize = 4096;
static mword nursery_size = DEFAULT_NURSERY_SIZE;
static mword next_section_size = DEFAULT_NURSERY_SIZE * 4;
static mword max_section_size = DEFAULT_MAX_SECTION;
static mword disappearing_link_hash_size = 0;
static mword finalizable_hash_size = 0;
-static mword num_registered_finalizers = 0;
-static mword num_ready_finalizers = 0;
-static mword num_disappearing_links = 0;
+static int num_registered_finalizers = 0;
+static int num_ready_finalizers = 0;
+static int num_disappearing_links = 0;
static int no_finalize = 0;
/* keep each size a multiple of ALLOC_ALIGN */
static RootRecord **roots_hash = NULL;
static int roots_hash_size = 0;
static mword roots_size = 0; /* amount of memory in the root set */
-static mword num_roots_entries = 0;
+static int num_roots_entries = 0;
/*
* The current allocation cursors
* ######################################################################
*/
-/*
- * Recursion is not allowed for the thread lock.
- */
-#define LOCK_GC pthread_mutex_lock (&gc_mutex)
-#define UNLOCK_GC pthread_mutex_unlock (&gc_mutex)
-
#define UPDATE_HEAP_BOUNDARIES(low,high) do { \
if ((mword)(low) < lowest_heap_address) \
lowest_heap_address = (mword)(low); \
static void find_pinning_ref_from_thread (char *obj, size_t size);
static void update_current_thread_stack (void *start);
static GCMemSection* alloc_section (size_t size);
-static void finalize_in_range (void **start, void **end);
-static void null_link_in_range (void **start, void **end);
+static void finalize_in_range (char *start, char *end);
+static void null_link_in_range (char *start, char *end);
static gboolean search_fragment_for_size (size_t size);
static void mark_pinned_from_addresses (PinnedChunk *chunk, void **start, void **end);
static void clear_remsets (void);
static void free_large_object (LOSObject *obj);
static void free_mem_section (GCMemSection *section);
+void check_consistency (void);
+
/*
* ######################################################################
* ######## GC descriptors
* Descriptor builders.
*/
void*
-mono_gc_make_descr_for_string (void)
+mono_gc_make_descr_for_string (gsize *bitmap, int numbits)
{
return (void*) DESC_TYPE_STRING;
}
*/
if (first_set < 0) {
desc = DESC_TYPE_RUN_LENGTH | stored_size;
- DEBUG (6, fprintf (gc_debug_file, "Ptrfree descriptor %p, size: %d\n", (void*)desc, stored_size));
+ DEBUG (6, fprintf (gc_debug_file, "Ptrfree descriptor %p, size: %zd\n", (void*)desc, stored_size));
return (void*) desc;
} else if (first_set < 256 && num_set < 256 && (first_set + num_set == last_set + 1)) {
desc = DESC_TYPE_RUN_LENGTH | stored_size | (first_set << 16) | (num_set << 24);
- DEBUG (6, fprintf (gc_debug_file, "Runlen descriptor %p, size: %d, first set: %d, num set: %d\n", (void*)desc, stored_size, first_set, num_set));
+ DEBUG (6, fprintf (gc_debug_file, "Runlen descriptor %p, size: %zd, first set: %d, num set: %d\n", (void*)desc, stored_size, first_set, num_set));
return (void*) desc;
}
/* we know the 2-word header is ptr-free */
if (last_set < SMALL_BITMAP_SIZE + OBJECT_HEADER_WORDS) {
desc = DESC_TYPE_SMALL_BITMAP | stored_size | ((*bitmap >> OBJECT_HEADER_WORDS) << SMALL_BITMAP_SHIFT);
- DEBUG (6, fprintf (gc_debug_file, "Smallbitmap descriptor %p, size: %d, last set: %d\n", (void*)desc, stored_size, last_set));
+ DEBUG (6, fprintf (gc_debug_file, "Smallbitmap descriptor %p, size: %zd, last set: %d\n", (void*)desc, stored_size, last_set));
return (void*) desc;
}
}
/* we know the 2-word header is ptr-free */
if (last_set < LARGE_BITMAP_SIZE + OBJECT_HEADER_WORDS) {
desc = DESC_TYPE_LARGE_BITMAP | ((*bitmap >> OBJECT_HEADER_WORDS) << LOW_TYPE_BITS);
- DEBUG (6, fprintf (gc_debug_file, "Largebitmap descriptor %p, size: %d, last set: %d\n", (void*)desc, stored_size, last_set));
+ DEBUG (6, fprintf (gc_debug_file, "Largebitmap descriptor %p, size: %zd, last set: %d\n", (void*)desc, stored_size, last_set));
return (void*) desc;
}
/* it's a complex object ... */
}
/* Note: we also handle structs with just ref fields */
if (num_set * sizeof (gpointer) == elem_size) {
- return (void*)(desc | VECTOR_SUBTYPE_REFS | ((-1LL) << 16));
+ return (void*)(desc | VECTOR_SUBTYPE_REFS | ((gssize)(-1) << 16));
}
/* FIXME: try run-len first */
/* Note: we can't skip the object header here, because it's not present */
bitmap_data++; \
if (0) { \
MonoObject *myobj = (MonoObject*)obj; \
- g_print ("found %d at %p (0x%x): %s.%s\n", bwords, (obj), (vt)->desc, myobj->vtable->klass->name_space, myobj->vtable->klass->name); \
+ g_print ("found %d at %p (0x%zx): %s.%s\n", bwords, (obj), (vt)->desc, myobj->vtable->klass->name_space, myobj->vtable->klass->name); \
} \
while (bwords-- > 0) { \
gsize _bmap = *bitmap_data++; \
char *e_end = e_start + el_size * mono_array_length ((MonoArray*)(obj)); \
if (0) { \
MonoObject *myobj = (MonoObject*)start; \
- g_print ("found %d at %p (0x%x): %s.%s\n", mbwords, (obj), (vt)->desc, myobj->vtable->klass->name_space, myobj->vtable->klass->name); \
+ g_print ("found %d at %p (0x%zx): %s.%s\n", mbwords, (obj), (vt)->desc, myobj->vtable->klass->name_space, myobj->vtable->klass->name); \
} \
while (e_start < e_end) { \
void **_objptr = (void**)e_start; \
DEBUG (8, fprintf (gc_debug_file, "Scanning object %p, vtable: %p (%s)\n", start, vt, vt->klass->name));
if (0) {
MonoObject *obj = (MonoObject*)start;
- g_print ("found at %p (0x%x): %s.%s\n", start, vt->desc, obj->vtable->klass->name_space, obj->vtable->klass->name);
+ g_print ("found at %p (0x%zx): %s.%s\n", start, vt->desc, obj->vtable->klass->name_space, obj->vtable->klass->name);
}
type = vt->desc & 0x7;
if (type == DESC_TYPE_STRING) {
static char* __attribute__((noinline))
copy_object (char *obj, char *from_space_start, char *from_space_end)
{
- if (obj >= from_space_start && obj < from_space_end) {
+ if (obj >= from_space_start && obj < from_space_end && (obj < to_space || obj >= to_space_end)) {
MonoVTable *vt;
char *forwarded;
mword objsize;
objsize = safe_object_get_size ((MonoObject*)obj);
objsize += ALLOC_ALIGN - 1;
objsize &= ~(ALLOC_ALIGN - 1);
- DEBUG (9, fprintf (gc_debug_file, " (to %p, %s size: %d)\n", gray_objects, ((MonoObject*)obj)->vtable->klass->name, objsize));
+ DEBUG (9, fprintf (gc_debug_file, " (to %p, %s size: %zd)\n", gray_objects, ((MonoObject*)obj)->vtable->klass->name, objsize));
/* FIXME: handle pinned allocs:
* Large objects are simple, at least until we always follow the rule:
* if objsize >= MAX_SMALL_OBJ_SIZE, pin the object and return it.
* the object is pinned, it is marked, otherwise it can be freed.
*/
if (objsize >= MAX_SMALL_OBJ_SIZE || (obj >= min_pinned_chunk_addr && obj < max_pinned_chunk_addr && obj_is_from_pinned_alloc (obj))) {
- DEBUG (9, fprintf (gc_debug_file, "Marked LOS/Pinned %p (%s), size: %d\n", obj, safe_name (obj), objsize));
+ DEBUG (9, fprintf (gc_debug_file, "Marked LOS/Pinned %p (%s), size: %zd\n", obj, safe_name (obj), objsize));
pin_object (obj);
return obj;
}
if (vt->rank && ((MonoArray*)obj)->bounds) {
MonoArray *array = (MonoArray*)gray_objects;
array->bounds = (MonoArrayBounds*)((char*)gray_objects + ((char*)((MonoArray*)obj)->bounds - (char*)obj));
- DEBUG (9, fprintf (gc_debug_file, "Array instance %p: size: %d, rank: %d, length: %d\n", array, objsize, vt->rank, mono_array_length (array)));
+ DEBUG (9, fprintf (gc_debug_file, "Array instance %p: size: %zd, rank: %d, length: %d\n", array, objsize, vt->rank, mono_array_length (array)));
}
/* set the forwarding pointer */
forward_object (obj, gray_objects);
last_obj_size = safe_object_get_size ((MonoObject*)search_start);
last_obj_size += ALLOC_ALIGN - 1;
last_obj_size &= ~(ALLOC_ALIGN - 1);
- DEBUG (8, fprintf (gc_debug_file, "Pinned try match %p (%s), size %d\n", last_obj, safe_name (last_obj), last_obj_size));
+ DEBUG (8, fprintf (gc_debug_file, "Pinned try match %p (%s), size %zd\n", last_obj, safe_name (last_obj), last_obj_size));
if (addr >= search_start && (char*)addr < (char*)last_obj + last_obj_size) {
DEBUG (4, fprintf (gc_debug_file, "Pinned object %p, vtable %p (%s), count %d\n", search_start, *(void**)search_start, safe_name (search_start), count));
pin_object (search_start);
gpointer next;
for (i = 0; i < next_pin_slot; ++i) {
next = pin_queue [i];
- fprintf (gc_debug_file, "Nursery range: %p-%p, size: %d\n", first, next, (char*)next-(char*)first);
+ fprintf (gc_debug_file, "Nursery range: %p-%p, size: %zd\n", first, next, (char*)next-(char*)first);
first = next;
}
next = end_nursery;
- fprintf (gc_debug_file, "Nursery range: %p-%p, size: %d\n", first, next, (char*)next-(char*)first);
+ fprintf (gc_debug_file, "Nursery range: %p-%p, size: %zd\n", first, next, (char*)next-(char*)first);
}
/* reduce the info in the pin queue, removing duplicate pointers and sorting them */
start++;
}
DEBUG (7, if (count) fprintf (gc_debug_file, "found %d potential pinned heap pointers\n", count));
+
+#ifdef HAVE_VALGRIND_MEMCHECK_H
+ /*
+ * The pinning addresses might come from undefined memory, this is normal. Since they
+ * are used in lots of functions, we make the memory defined here instead of having
+ * to add a supression for those functions.
+ */
+ VALGRIND_MAKE_MEM_DEFINED (pin_queue, next_pin_slot * sizeof (pin_queue [0]));
+#endif
}
/*
if (nursery_section)
return;
- DEBUG (2, fprintf (gc_debug_file, "Allocating nursery size: %d\n", nursery_size));
+ DEBUG (2, fprintf (gc_debug_file, "Allocating nursery size: %zd\n", nursery_size));
/* later we will alloc a larger area for the nursery but only activate
* what we need. The rest will be used as expansion if we have too many pinned
* objects in the existing nursery.
nursery_temp_end = data + SCAN_START_SIZE;
UPDATE_HEAP_BOUNDARIES (nursery_start, nursery_real_end);
total_alloc += nursery_size;
- DEBUG (4, fprintf (gc_debug_file, "Expanding heap size: %d, total: %d\n", nursery_size, total_alloc));
+ DEBUG (4, fprintf (gc_debug_file, "Expanding heap size: %zd, total: %zd\n", nursery_size, total_alloc));
section->data = section->next_data = data;
section->size = nursery_size;
section->end_data = nursery_real_end;
}
/* scan the old object space, too */
for (big_object = los_object_list; big_object; big_object = big_object->next) {
- DEBUG (5, fprintf (gc_debug_file, "Scan of big object: %p (%s), size: %d\n", big_object->data, safe_name (big_object->data), big_object->size));
+ DEBUG (5, fprintf (gc_debug_file, "Scan of big object: %p (%s), size: %zd\n", big_object->data, safe_name (big_object->data), big_object->size));
scan_object (big_object->data, start, end);
}
/* scan the list of objects ready for finalization */
add_nursery_frag (size_t frag_size, char* frag_start, char* frag_end)
{
Fragment *fragment;
- DEBUG (4, fprintf (gc_debug_file, "Found empty fragment: %p-%p, size: %d\n", frag_start, frag_end, frag_size));
+ DEBUG (4, fprintf (gc_debug_file, "Found empty fragment: %p-%p, size: %zd\n", frag_start, frag_end, frag_size));
/* memsetting just the first chunk start is bound to provide better cache locality */
memset (frag_start, 0, frag_size);
/* Not worth dealing with smaller fragments: need to tune */
}
}
+static int
+scan_needed_big_objects (char *start_addr, char *end_addr)
+{
+ LOSObject *big_object;
+ int count = 0;
+ for (big_object = los_object_list; big_object; big_object = big_object->next) {
+ if (!big_object->scanned && object_is_pinned (big_object->data)) {
+ DEBUG (5, fprintf (gc_debug_file, "Scan of big object: %p (%s), size: %zd\n", big_object->data, safe_name (big_object->data), big_object->size));
+ scan_object (big_object->data, start_addr, end_addr);
+ big_object->scanned = TRUE;
+ count++;
+ }
+ }
+ return count;
+}
+
static void
drain_gray_stack (char *start_addr, char *end_addr)
{
TV_DECLARE (atv);
TV_DECLARE (btv);
- int fin_ready;
+ int fin_ready, bigo_scanned_num;
char *gray_start;
/*
*/
do {
fin_ready = num_ready_finalizers;
- finalize_in_range ((void**)start_addr, (void**)end_addr);
+ finalize_in_range (start_addr, end_addr);
+ bigo_scanned_num = scan_needed_big_objects (start_addr, end_addr);
/* drain the new stack that might have been created */
DEBUG (6, fprintf (gc_debug_file, "Precise scan of gray area post fin: %p-%p, size: %d\n", gray_start, gray_objects, (int)(gray_objects - gray_start)));
DEBUG (9, fprintf (gc_debug_file, "Precise gray object scan %p (%s)\n", gray_start, safe_name (gray_start)));
gray_start = scan_object (gray_start, start_addr, end_addr);
}
- } while (fin_ready != num_ready_finalizers);
+ } while (fin_ready != num_ready_finalizers || bigo_scanned_num);
DEBUG (2, fprintf (gc_debug_file, "Copied to old space: %d bytes\n", (int)(gray_objects - to_space)));
to_space = gray_start;
* GC a finalized object my lose the monitor because it is cleared before the finalizer is
* called.
*/
- null_link_in_range ((void**)start_addr, (void**)end_addr);
+ null_link_in_range (start_addr, end_addr);
TV_GETTIME (btv);
DEBUG (2, fprintf (gc_debug_file, "Finalize queue handling scan: %d usecs\n", TV_ELAPSED (atv, btv)));
}
static int last_num_pinned = 0;
static void
-build_nursery_fragments (int start_pin, int end_pin)
+build_nursery_fragments (int start_pin, int end_pin, char *nursery_last_allocated)
{
char *frag_start, *frag_end;
size_t frag_size;
frag_size += ALLOC_ALIGN - 1;
frag_size &= ~(ALLOC_ALIGN - 1);
frag_start = (char*)pin_queue [i] + frag_size;
+ /*
+ * pin_queue [i] might point to a half-constructed string or vector whose
+ * length field is not set. In that case, frag_start points inside the
+ * (zero initialized) object. Find the end of the object by scanning forward.
+ *
+ */
+ if (is_half_constructed (pin_queue [i])) {
+ /* Can't use nursery_next as the limit as it is modified in collect_nursery () */
+ while ((frag_start < nursery_last_allocated) && *(mword*)frag_start == 0)
+ frag_start += sizeof (mword);
+ }
}
nursery_last_pinned_end = frag_start;
frag_end = nursery_real_end;
frag_end = pin_queue [i];
/* remove the pin bit from pinned objects */
unpin_object (frag_end);
- section->scan_starts [((char*)frag_end - (char*)section->data)/SCAN_START_SIZE] = frag_end;
+ if (frag_end >= section->data + section->size) {
+ frag_end = section->data + section->size;
+ } else {
+ section->scan_starts [((char*)frag_end - (char*)section->data)/SCAN_START_SIZE] = frag_end;
+ }
frag_size = frag_end - frag_start;
if (frag_size)
memset (frag_start, 0, frag_size);
memset (frag_start, 0, frag_size);
}
+static void
+scan_from_registered_roots (char *addr_start, char *addr_end)
+{
+ int i;
+ RootRecord *root;
+ for (i = 0; i < roots_hash_size; ++i) {
+ for (root = roots_hash [i]; root; root = root->next) {
+ /* if desc is non-null it has precise info */
+ if (!root->root_desc)
+ continue;
+ DEBUG (6, fprintf (gc_debug_file, "Precise root scan %p-%p (desc: %p)\n", root->start_root, root->end_root, (void*)root->root_desc));
+ precisely_scan_objects_from ((void**)root->start_root, (void**)root->end_root, addr_start, addr_end, root->root_desc);
+ }
+ }
+}
+
/*
* Collect objects in the nursery.
*/
GCMemSection *section;
size_t max_garbage_amount;
int i;
- RootRecord *root;
+ char *nursery_last_allocated;
+ TV_DECLARE (all_atv);
+ TV_DECLARE (all_btv);
TV_DECLARE (atv);
TV_DECLARE (btv);
degraded_mode = 0;
+ nursery_last_allocated = nursery_next;
nursery_next = MAX (nursery_next, nursery_last_pinned_end);
/* FIXME: optimize later to use the higher address where an object can be present */
nursery_next = MAX (nursery_next, nursery_real_end);
nursery_section->next_data = nursery_next;
num_minor_gcs++;
+ mono_stats.minor_gc_count ++;
/* world must be stopped already */
+ TV_GETTIME (all_atv);
TV_GETTIME (atv);
/* pin from pinned handles */
pin_from_roots (nursery_start, nursery_next);
scan_object (pin_queue [i], nursery_start, nursery_next);
}
/* registered roots, this includes static fields */
- for (i = 0; i < roots_hash_size; ++i) {
- for (root = roots_hash [i]; root; root = root->next) {
- /* if desc is non-null it has precise info */
- if (!root->root_desc)
- continue;
- DEBUG (6, fprintf (gc_debug_file, "Precise root scan %p-%p (desc: %p)\n", root->start_root, root->end_root, (void*)root->root_desc));
- precisely_scan_objects_from ((void**)root->start_root, root->end_root, nursery_start, nursery_next, root->root_desc);
- }
- }
+ scan_from_registered_roots (nursery_start, nursery_next);
TV_GETTIME (btv);
DEBUG (2, fprintf (gc_debug_file, "Root scan: %d usecs\n", TV_ELAPSED (atv, btv)));
* pinned objects as we go, memzero() the empty fragments so they are ready for the
* next allocations.
*/
- build_nursery_fragments (0, next_pin_slot);
+ build_nursery_fragments (0, next_pin_slot, nursery_last_allocated);
TV_GETTIME (atv);
- DEBUG (2, fprintf (gc_debug_file, "Fragment creation: %d usecs, %d bytes available\n", TV_ELAPSED (btv, atv), fragment_total));
+ DEBUG (2, fprintf (gc_debug_file, "Fragment creation: %d usecs, %zd bytes available\n", TV_ELAPSED (btv, atv), fragment_total));
+
+ TV_GETTIME (all_btv);
+ mono_stats.minor_gc_time_usecs += TV_ELAPSED (all_atv, all_btv);
/* prepare the pin queue for the next collection */
last_num_pinned = next_pin_slot;
GCMemSection *section, *prev_section;
LOSObject *bigobj, *prevbo;
int i;
- RootRecord *root;
PinnedChunk *chunk;
FinalizeEntry *fin;
int count;
+ TV_DECLARE (all_atv);
+ TV_DECLARE (all_btv);
TV_DECLARE (atv);
TV_DECLARE (btv);
/* FIXME: only use these values for the precise scan
degraded_mode = 0;
DEBUG (1, fprintf (gc_debug_file, "Start major collection %d\n", num_major_gcs));
num_major_gcs++;
+ mono_stats.major_gc_count ++;
/*
* FIXME: implement Mark/Compact
* Until that is done, we can just apply mostly the same alg as for the nursery:
collect_nursery (0);
return;
}
+ TV_GETTIME (all_atv);
/* FIXME: make sure the nursery next_data ptr is updated */
nursery_section->next_data = nursery_real_end;
/* we should also coalesce scanning from sections close to each other
TV_GETTIME (atv);
DEBUG (6, fprintf (gc_debug_file, "Pinning from sections\n"));
for (section = section_list; section; section = section->next) {
- section->pin_queue_start = count = next_pin_slot;
+ section->pin_queue_start = count = section->pin_queue_end = next_pin_slot;
pin_from_roots (section->data, section->next_data);
if (count != next_pin_slot) {
int reduced_to;
if (next_pin_slot != count) {
next_pin_slot = count;
pin_object (bigobj->data);
- DEBUG (6, fprintf (gc_debug_file, "Marked large object %p (%s) size: %d from roots\n", bigobj->data, safe_name (bigobj->data), bigobj->size));
+ DEBUG (6, fprintf (gc_debug_file, "Marked large object %p (%s) size: %zd from roots\n", bigobj->data, safe_name (bigobj->data), bigobj->size));
}
}
/* look for pinned addresses for pinned-alloc objects */
DEBUG (4, fprintf (gc_debug_file, "Start scan with %d pinned objects\n", next_pin_slot));
/* allocate the big to space */
- DEBUG (4, fprintf (gc_debug_file, "Allocate tospace for size: %d\n", copy_space_required));
+ DEBUG (4, fprintf (gc_debug_file, "Allocate tospace for size: %zd\n", copy_space_required));
section = alloc_section (copy_space_required);
to_space = gray_objects = section->next_data;
to_space_end = section->end_data;
* mark any section without pinned objects, so we can free it since we will be able to
* move all the objects.
*/
- /* the pinned objects are roots */
+ /* the pinned objects are roots (big objects are included in this list, too) */
for (i = 0; i < next_pin_slot; ++i) {
DEBUG (6, fprintf (gc_debug_file, "Precise object scan %d of pinned %p (%s)\n", i, pin_queue [i], safe_name (pin_queue [i])));
scan_object (pin_queue [i], heap_start, heap_end);
}
/* registered roots, this includes static fields */
- for (i = 0; i < roots_hash_size; ++i) {
- for (root = roots_hash [i]; root; root = root->next) {
- /* if desc is non-null it has precise info */
- if (!root->root_desc)
- continue;
- DEBUG (6, fprintf (gc_debug_file, "Precise root scan %p-%p (desc: %p)\n", root->start_root, root->end_root, (void*)root->root_desc));
- precisely_scan_objects_from ((void**)root->start_root, root->end_root, heap_start, heap_end, root->root_desc);
- }
- }
+ scan_from_registered_roots (heap_start, heap_end);
+
/* scan the list of objects ready for finalization */
for (fin = fin_ready_list; fin; fin = fin->next) {
DEBUG (5, fprintf (gc_debug_file, "Scan of fin ready object: %p (%s)\n", fin->object, safe_name (fin->object)));
TV_GETTIME (atv);
DEBUG (2, fprintf (gc_debug_file, "Root scan: %d usecs\n", TV_ELAPSED (btv, atv)));
+ /* we need to go over the big object list to see if any was marked and scan it
+ * And we need to make this in a loop, considering that objects referenced by finalizable
+ * objects could reference big objects (this happens in drain_gray_stack ())
+ */
+ scan_needed_big_objects (heap_start, heap_end);
/* all the objects in the heap */
drain_gray_stack (heap_start, heap_end);
for (bigobj = los_object_list; bigobj;) {
if (object_is_pinned (bigobj->data)) {
unpin_object (bigobj->data);
+ bigobj->scanned = FALSE;
} else {
LOSObject *to_free;
/* not referenced anywhere, so we can free it */
* pinned objects as we go, memzero() the empty fragments so they are ready for the
* next allocations.
*/
- build_nursery_fragments (nursery_section->pin_queue_start, nursery_section->pin_queue_end);
+ build_nursery_fragments (nursery_section->pin_queue_start, nursery_section->pin_queue_end, nursery_next);
+ TV_GETTIME (all_btv);
+ mono_stats.major_gc_time_usecs += TV_ELAPSED (all_atv, all_btv);
/* prepare the pin queue for the next collection */
next_pin_slot = 0;
if (fin_ready_list) {
section->end_data = data + new_size;
UPDATE_HEAP_BOUNDARIES (data, section->end_data);
total_alloc += new_size;
- DEBUG (2, fprintf (gc_debug_file, "Expanding heap size: %d, total: %d\n", new_size, total_alloc));
+ DEBUG (2, fprintf (gc_debug_file, "Expanding heap size: %zd, total: %zd\n", new_size, total_alloc));
section->data = data;
section->size = new_size;
scan_starts = new_size / SCAN_START_SIZE;
{
char *data = section->data;
size_t size = section->size;
- DEBUG (2, fprintf (gc_debug_file, "Freed section %p, size %d\n", data, size));
+ DEBUG (2, fprintf (gc_debug_file, "Freed section %p, size %zd\n", data, size));
free_os_memory (data, size);
free_internal_mem (section);
total_alloc -= size;
if (do_minor_collection) {
stop_world ();
collect_nursery (size);
- DEBUG (2, fprintf (gc_debug_file, "Heap size: %d, LOS size: %d\n", total_alloc, los_memory_usage));
+ DEBUG (2, fprintf (gc_debug_file, "Heap size: %zd, LOS size: %zd\n", total_alloc, los_memory_usage));
restart_world ();
/* this also sets the proper pointers for the next allocation */
if (!search_fragment_for_size (size)) {
int i;
/* TypeBuilder and MonoMethod are killing mcs with fragmentation */
- DEBUG (1, fprintf (gc_debug_file, "nursery collection didn't find enough room for %d alloc (%d pinned)", size, last_num_pinned));
+ DEBUG (1, fprintf (gc_debug_file, "nursery collection didn't find enough room for %zd alloc (%d pinned)", size, last_num_pinned));
for (i = 0; i < last_num_pinned; ++i) {
DEBUG (3, fprintf (gc_debug_file, "Bastard pinning obj %p (%s), size: %d\n", pin_queue [i], safe_name (pin_queue [i]), safe_object_get_size (pin_queue [i])));
}
* Internal memory can be handled with a freelist for small objects.
*/
-#ifndef MAP_ANONYMOUS
-#define MAP_ANONYMOUS MAP_ANON
-#endif
-
/*
* Allocate a big chunk of memory from the OS (usually 64KB to several megabytes).
* This must not require any lock.
get_os_memory (size_t size, int activate)
{
void *ptr;
- unsigned long prot_flags = activate? PROT_READ|PROT_WRITE: PROT_NONE;
+ unsigned long prot_flags = activate? MONO_MMAP_READ|MONO_MMAP_WRITE: MONO_MMAP_NONE;
+ prot_flags |= MONO_MMAP_PRIVATE | MONO_MMAP_ANON;
size += pagesize - 1;
size &= ~(pagesize - 1);
- ptr = mmap (0, size, prot_flags, MAP_PRIVATE|MAP_ANONYMOUS, -1, 0);
- if (ptr == (void*)-1) {
- int fd = open ("/dev/zero", O_RDONLY);
- if (fd != -1) {
- ptr = mmap (0, size, prot_flags, MAP_PRIVATE, fd, 0);
- close (fd);
- }
- if (ptr == (void*)-1) {
- return NULL;
- }
- }
+ ptr = mono_valloc (0, size, prot_flags);
return ptr;
}
void **p, **end;
int count = 0;
/*g_print ("building freelist for slot %d, size %d in %p\n", slot, size, chunk);*/
- p = start_page;
+ p = (void**)start_page;
end = (void**)(end_page - size);
g_assert (!chunk->free_list [slot]);
chunk->free_list [slot] = p;
- while ((char*)p + size <= end) {
+ while ((char*)p + size <= (char*)end) {
count++;
*p = (void*)((char*)p + size);
p = *p;
/* allocate the first page to the freelist */
chunk->page_sizes [0] = PINNED_FIRST_SLOT_SIZE;
build_freelist (chunk, slot_for_size (PINNED_FIRST_SLOT_SIZE), PINNED_FIRST_SLOT_SIZE, chunk->start_data, ((char*)chunk + FREELIST_PAGESIZE));
- DEBUG (4, fprintf (gc_debug_file, "Allocated pinned chunk %p, size: %d\n", chunk, size));
+ DEBUG (4, fprintf (gc_debug_file, "Allocated pinned chunk %p, size: %zd\n", chunk, size));
min_pinned_chunk_addr = MIN (min_pinned_chunk_addr, (char*)chunk->start_data);
max_pinned_chunk_addr = MAX (max_pinned_chunk_addr, ((char*)chunk + size));
return chunk;
get_internal_mem (size_t size)
{
return calloc (1, size);
+#if 0
int slot;
void *res = NULL;
PinnedChunk *pchunk;
internal_chunk_list = pchunk;
res = get_chunk_freelist (pchunk, slot);
return res;
+#endif
}
static void
free_internal_mem (void *addr)
{
free (addr);
- return;
+#if 0
PinnedChunk *pchunk;
for (pchunk = internal_chunk_list; pchunk; pchunk = pchunk->next) {
/*printf ("trying to free %p in %p (pages: %d)\n", addr, pchunk, pchunk->num_pages);*/
}
printf ("free of %p failed\n", addr);
g_assert_not_reached ();
+#endif
}
/*
free_large_object (LOSObject *obj)
{
size_t size = obj->size;
- DEBUG (4, fprintf (gc_debug_file, "Freed large object %p, size %d\n", obj->data, obj->size));
+ DEBUG (4, fprintf (gc_debug_file, "Freed large object %p, size %zd\n", obj->data, obj->size));
los_memory_usage -= size;
size += sizeof (LOSObject);
int just_did_major_gc = FALSE;
if (los_memory_usage > next_los_collection) {
- DEBUG (4, fprintf (gc_debug_file, "Should trigger major collection: req size %d (los already: %u, limit: %u)\n", size, los_memory_usage, next_los_collection));
+ DEBUG (4, fprintf (gc_debug_file, "Should trigger major collection: req size %zd (los already: %zu, limit: %zu)\n", size, los_memory_usage, next_los_collection));
just_did_major_gc = TRUE;
stop_world ();
major_collection ();
los_object_list = obj;
los_memory_usage += size;
los_num_objects++;
- DEBUG (4, fprintf (gc_debug_file, "Allocated large object %p, vtable: %p (%s), size: %d\n", obj->data, vtable, vtable->klass->name, size));
+ DEBUG (4, fprintf (gc_debug_file, "Allocated large object %p, vtable: %p (%s), size: %zd\n", obj->data, vtable, vtable->klass->name, size));
return obj->data;
}
search_fragment_for_size (size_t size)
{
Fragment *frag, *prev;
- DEBUG (4, fprintf (gc_debug_file, "Searching nursery fragment %p, size: %d\n", nursery_frag_real_end, size));
+ DEBUG (4, fprintf (gc_debug_file, "Searching nursery fragment %p, size: %zd\n", nursery_frag_real_end, size));
prev = NULL;
for (frag = nursery_fragments; frag; frag = frag->next) {
if (size <= (frag->fragment_end - frag->fragment_start)) {
nursery_frag_real_end = frag->fragment_end;
nursery_temp_end = MIN (nursery_frag_real_end, nursery_next + size + SCAN_START_SIZE);
- DEBUG (4, fprintf (gc_debug_file, "Using nursery fragment %p-%p, size: %d (req: %d)\n", nursery_next, nursery_frag_real_end, nursery_frag_real_end - nursery_next, size));
+ DEBUG (4, fprintf (gc_debug_file, "Using nursery fragment %p-%p, size: %zd (req: %zd)\n", nursery_next, nursery_frag_real_end, nursery_frag_real_end - nursery_next, size));
frag->next = fragment_freelist;
fragment_freelist = frag;
return TRUE;
}
/*
- * size is already rounded up.
+ * size is already rounded up and we hold the GC lock.
*/
static void*
alloc_degraded (MonoVTable *vtable, size_t size)
void **p = NULL;
for (section = section_list; section; section = section->next) {
if (section != nursery_section && (section->end_data - section->next_data) >= size) {
- p = section->next_data;
+ p = (void**)section->next_data;
break;
}
}
if (!p) {
section = alloc_section (nursery_section->size * 4);
/* FIXME: handle OOM */
- p = section->next_data;
+ p = (void**)section->next_data;
}
section->next_data += size;
degraded_mode += size;
- DEBUG (3, fprintf (gc_debug_file, "Allocated (degraded) object %p, vtable: %p (%s), size: %d in section %p\n", p, vtable, vtable->klass->name, size, section));
+ DEBUG (3, fprintf (gc_debug_file, "Allocated (degraded) object %p, vtable: %p (%s), size: %zd in section %p\n", p, vtable, vtable->klass->name, size, section));
*p = vtable;
return p;
}
}
} else {
/* record the scan start so we can find pinned objects more easily */
- nursery_section->scan_starts [((char*)p - (char*)nursery_section->data)/SCAN_START_SIZE] = p;
+ nursery_section->scan_starts [((char*)p - (char*)nursery_section->data)/SCAN_START_SIZE] = (char*)p;
/* we just bump nursery_temp_end as well */
nursery_temp_end = MIN (nursery_frag_real_end, nursery_next + SCAN_START_SIZE);
DEBUG (5, fprintf (gc_debug_file, "Expanding local alloc: %p-%p\n", nursery_next, nursery_temp_end));
}
}
}
- DEBUG (6, fprintf (gc_debug_file, "Allocated object %p, vtable: %p (%s), size: %d\n", p, vtable, vtable->klass->name, size));
+ DEBUG (6, fprintf (gc_debug_file, "Allocated object %p, vtable: %p (%s), size: %zd\n", p, vtable, vtable->klass->name, size));
*p = vtable;
UNLOCK_GC;
p = alloc_from_freelist (size);
memset (p, 0, size);
}
- DEBUG (6, fprintf (gc_debug_file, "Allocated pinned object %p, vtable: %p (%s), size: %d\n", p, vtable, vtable->klass->name, size));
+ DEBUG (6, fprintf (gc_debug_file, "Allocated pinned object %p, vtable: %p (%s), size: %zd\n", p, vtable, vtable->klass->name, size));
*p = vtable;
UNLOCK_GC;
return p;
#define object_is_fin_ready(obj) (!object_is_pinned (obj) && !object_is_forwarded (obj))
static void
-finalize_in_range (void **start, void **end)
+finalize_in_range (char *start, char *end)
{
FinalizeEntry *entry, *prev;
int i;
for (i = 0; i < finalizable_hash_size; ++i) {
prev = NULL;
for (entry = finalizable_hash [i]; entry;) {
- if (entry->object >= start && entry->object < end) {
+ if ((char*)entry->object >= start && (char*)entry->object < end && ((char*)entry->object < to_space || (char*)entry->object >= to_space_end)) {
if (object_is_fin_ready (entry->object)) {
char *from;
FinalizeEntry *next;
}
static void
-null_link_in_range (void **start, void **end)
+null_link_in_range (char *start, char *end)
{
FinalizeEntry *entry, *prev;
int i;
for (i = 0; i < disappearing_link_hash_size; ++i) {
prev = NULL;
for (entry = disappearing_link_hash [i]; entry;) {
- if (entry->object >= start && entry->object < end) {
+ if ((char*)entry->object >= start && (char*)entry->object < end && ((char*)entry->object < to_space || (char*)entry->object >= to_space_end)) {
if (object_is_fin_ready (entry->object)) {
void **p = entry->data;
FinalizeEntry *old;
old = entry->next;
free_internal_mem (entry);
entry = old;
+ num_disappearing_links--;
continue;
} else {
void **link;
* ######################################################################
*/
-#undef pthread_create
-#undef pthread_join
-#undef pthread_detach
-
-typedef struct {
- void *(*start_routine) (void *);
- void *arg;
- int flags;
- sem_t registered;
-} SgenThreadStartInfo;
-
/* eventually share with MonoThread? */
typedef struct _SgenThreadInfo SgenThreadInfo;
struct _SgenThreadInfo {
SgenThreadInfo *next;
- pthread_t id;
+ ARCH_THREAD_TYPE id;
unsigned int stop_count; /* to catch duplicate signals */
int signal;
int skip;
void *stack_end;
void *stack_start;
- RememberedSet **remset;
+ RememberedSet *remset;
};
/* FIXME: handle large/small config */
#define HASH_PTHREAD_T(id) (((unsigned int)(id) >> 4) * 2654435761u)
static SgenThreadInfo* thread_table [THREAD_HASH_SIZE];
+
+#if USE_SIGNAL_BASED_START_STOP_WORLD
+
static sem_t suspend_ack_semaphore;
static unsigned int global_stop_count = 0;
static int suspend_signal_num = SIGPWR;
/* LOCKING: assumes the GC lock is held */
static SgenThreadInfo*
-thread_info_lookup (pthread_t id)
+thread_info_lookup (ARCH_THREAD_TYPE id)
{
unsigned int hash = HASH_PTHREAD_T (id) % THREAD_HASH_SIZE;
SgenThreadInfo *info;
info = thread_table [hash];
- while (info && !pthread_equal (info->id, id)) {
+ while (info && !ARCH_THREAD_EQUALS (info->id, id)) {
info = info->next;
}
return info;
update_current_thread_stack (void *start)
{
void *ptr = cur_thread_regs;
- SgenThreadInfo *info = thread_info_lookup (pthread_self ());
+ SgenThreadInfo *info = thread_info_lookup (ARCH_GET_THREAD ());
info->stack_start = align_pointer (&ptr);
ARCH_STORE_REGS (ptr);
}
for (i = 0; i < THREAD_HASH_SIZE; ++i) {
for (info = thread_table [i]; info; info = info->next) {
DEBUG (4, fprintf (gc_debug_file, "considering thread %p for signal %d (%s)\n", info, signum, signal_desc (signum)));
- if (pthread_equal (info->id, me)) {
+ if (ARCH_THREAD_EQUALS (info->id, me)) {
DEBUG (4, fprintf (gc_debug_file, "Skip (equal): %p, %p\n", (void*)me, (void*)info->id));
continue;
}
int count;
global_stop_count++;
- DEBUG (3, fprintf (gc_debug_file, "stopping world n %d from %p %p\n", global_stop_count, thread_info_lookup (pthread_self ()), (gpointer)pthread_self ()));
+ DEBUG (3, fprintf (gc_debug_file, "stopping world n %d from %p %p\n", global_stop_count, thread_info_lookup (ARCH_GET_THREAD ()), (gpointer)ARCH_GET_THREAD ()));
TV_GETTIME (stop_world_time);
count = thread_handshake (suspend_signal_num);
DEBUG (3, fprintf (gc_debug_file, "world stopped %d thread(s)\n", count));
return count;
}
+#endif /* USE_SIGNAL_BASED_START_STOP_WORLD */
+
/*
* Identify objects pinned in a thread stack and its registers.
*/
for (i = 0; i < THREAD_HASH_SIZE; ++i) {
for (info = thread_table [i]; info; info = info->next) {
if (info->skip) {
- DEBUG (2, fprintf (gc_debug_file, "Skipping dead thread %p, range: %p-%p, size: %d\n", info, info->stack_start, info->stack_end, (char*)info->stack_end - (char*)info->stack_start));
+ DEBUG (2, fprintf (gc_debug_file, "Skipping dead thread %p, range: %p-%p, size: %zd\n", info, info->stack_start, info->stack_end, (char*)info->stack_end - (char*)info->stack_start));
continue;
}
- DEBUG (2, fprintf (gc_debug_file, "Scanning thread %p, range: %p-%p, size: %d\n", info, info->stack_start, info->stack_end, (char*)info->stack_end - (char*)info->stack_start));
+ DEBUG (2, fprintf (gc_debug_file, "Scanning thread %p, range: %p-%p, size: %zd\n", info, info->stack_start, info->stack_end, (char*)info->stack_end - (char*)info->stack_start));
conservatively_pin_objects_from (info->stack_start, info->stack_end, start_nursery, end_nursery);
}
}
DEBUG (2, fprintf (gc_debug_file, "Scanning current thread registers\n"));
- conservatively_pin_objects_from (cur_thread_regs, cur_thread_regs + ARCH_NUM_REGS, start_nursery, end_nursery);
+ conservatively_pin_objects_from ((void*)cur_thread_regs, (void*)(cur_thread_regs + ARCH_NUM_REGS), start_nursery, end_nursery);
}
static void
continue;
while (start < (char**)info->stack_end) {
if (*start >= obj && *start < endobj) {
- DEBUG (0, fprintf (gc_debug_file, "Object %p referenced in thread %p (id %p) at %p, stack: %p-%p\n", obj, info, info->id, start, info->stack_start, info->stack_end));
+ DEBUG (0, fprintf (gc_debug_file, "Object %p referenced in thread %p (id %p) at %p, stack: %p-%p\n", obj, info, (gpointer)info->id, start, info->stack_start, info->stack_end));
}
start++;
}
switch ((*p) & REMSET_TYPE_MASK) {
case REMSET_LOCATION:
ptr = (void**)(*p);
- if ((ptr < start_nursery || ptr >= end_nursery) && ptr_in_heap (ptr)) {
+ if (((void*)ptr < start_nursery || (void*)ptr >= end_nursery) && ptr_in_heap (ptr)) {
*ptr = copy_object (*ptr, start_nursery, end_nursery);
DEBUG (9, fprintf (gc_debug_file, "Overwrote remset at %p with %p\n", ptr, *ptr));
if (!global && *ptr >= start_nursery && *ptr < end_nursery)
return p + 1;
case REMSET_RANGE:
ptr = (void**)(*p & ~REMSET_TYPE_MASK);
- if ((ptr >= start_nursery && ptr < end_nursery) || !ptr_in_heap (ptr))
+ if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery) || !ptr_in_heap (ptr))
return p + 2;
count = p [1];
while (count-- > 0) {
return p + 2;
case REMSET_OBJECT:
ptr = (void**)(*p & ~REMSET_TYPE_MASK);
- if ((ptr >= start_nursery && ptr < end_nursery) || !ptr_in_heap (ptr))
+ if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery) || !ptr_in_heap (ptr))
return p + 1;
scan_object (*ptr, start_nursery, end_nursery);
return p + 1;
/* the global one */
for (remset = global_remset; remset; remset = remset->next) {
- DEBUG (4, fprintf (gc_debug_file, "Scanning global remset range: %p-%p, size: %d\n", remset->data, remset->store_next, remset->store_next - remset->data));
+ DEBUG (4, fprintf (gc_debug_file, "Scanning global remset range: %p-%p, size: %zd\n", remset->data, remset->store_next, remset->store_next - remset->data));
for (p = remset->data; p < remset->store_next;) {
p = handle_remset (p, start_nursery, end_nursery, TRUE);
}
for (i = 0; i < THREAD_HASH_SIZE; ++i) {
for (info = thread_table [i]; info; info = info->next) {
for (remset = info->remset; remset; remset = next) {
- DEBUG (4, fprintf (gc_debug_file, "Scanning remset for thread %p, range: %p-%p, size: %d\n", info, remset->data, remset->store_next, remset->store_next - remset->data));
+ DEBUG (4, fprintf (gc_debug_file, "Scanning remset for thread %p, range: %p-%p, size: %zd\n", info, remset->data, remset->store_next, remset->store_next - remset->data));
for (p = remset->data; p < remset->store_next;) {
p = handle_remset (p, start_nursery, end_nursery, FALSE);
}
SgenThreadInfo* info = malloc (sizeof (SgenThreadInfo));
if (!info)
return NULL;
- info->id = pthread_self ();
+ info->id = ARCH_GET_THREAD ();
info->stop_count = -1;
info->skip = 0;
info->signal = 0;
pthread_getattr_np (pthread_self (), &attr);
pthread_attr_getstack (&attr, &sstart, &size);
info->stack_end = (char*)sstart + size;
+ pthread_attr_destroy (&attr);
}
#elif defined(HAVE_PTHREAD_GET_STACKSIZE_NP) && defined(HAVE_PTHREAD_GET_STACKADDR_NP)
info->stack_end = (char*)pthread_get_stackaddr_np (pthread_self ());
thread_table [hash] = info;
remembered_set = info->remset = alloc_remset (DEFAULT_REMSET_SIZE, info);
+ pthread_setspecific (remembered_set_key, remembered_set);
DEBUG (3, fprintf (gc_debug_file, "registered thread %p (%p) (hash: %d)\n", info, (gpointer)info->id, hash));
return info;
}
int hash;
SgenThreadInfo *prev = NULL;
SgenThreadInfo *p;
- pthread_t id = pthread_self ();
+ RememberedSet *rset;
+ ARCH_THREAD_TYPE id = ARCH_GET_THREAD ();
hash = HASH_PTHREAD_T (id) % THREAD_HASH_SIZE;
p = thread_table [hash];
assert (p);
DEBUG (3, fprintf (gc_debug_file, "unregister thread %p (%p)\n", p, (gpointer)p->id));
- while (!pthread_equal (p->id, id)) {
+ while (!ARCH_THREAD_EQUALS (p->id, id)) {
prev = p;
p = p->next;
}
} else {
prev->next = p->next;
}
+ rset = p->remset;
/* FIXME: transfer remsets if any */
+ while (rset) {
+ RememberedSet *next = rset->next;
+ free_internal_mem (rset);
+ rset = next;
+ }
free (p);
}
+static void
+unregister_thread (void *k)
+{
+ LOCK_GC;
+ unregister_current_thread ();
+ UNLOCK_GC;
+}
+
+gboolean
+mono_gc_register_thread (void *baseptr)
+{
+ SgenThreadInfo *info;
+ LOCK_GC;
+ info = thread_info_lookup (ARCH_GET_THREAD ());
+ if (info == NULL)
+ info = gc_register_current_thread (baseptr);
+ UNLOCK_GC;
+ return info != NULL;
+}
+
+#if USE_PTHREAD_INTERCEPT
+
+#undef pthread_create
+#undef pthread_join
+#undef pthread_detach
+
+typedef struct {
+ void *(*start_routine) (void *);
+ void *arg;
+ int flags;
+ sem_t registered;
+} SgenThreadStartInfo;
+
static void*
gc_start_thread (void *arg)
{
UNLOCK_GC;
sem_post (&(start_info->registered));
result = start_func (t_arg);
+ /*
+ * this is done by the pthread key dtor
LOCK_GC;
unregister_current_thread ();
UNLOCK_GC;
+ */
return result;
}
return pthread_detach (thread);
}
-gboolean
-mono_gc_register_thread (void *baseptr)
-{
- SgenThreadInfo *info;
- LOCK_GC;
- info = thread_info_lookup (pthread_self ());
- if (info == NULL)
- info = gc_register_current_thread (baseptr);
- UNLOCK_GC;
- return info != NULL;
-}
+#endif /* USE_PTHREAD_INTERCEPT */
/*
* ######################################################################
mono_gc_wbarrier_set_field (MonoObject *obj, gpointer field_ptr, MonoObject* value)
{
RememberedSet *rs;
- if (field_ptr >= nursery_start && field_ptr < nursery_real_end) {
+ if ((char*)field_ptr >= nursery_start && (char*)field_ptr < nursery_real_end) {
*(void**)field_ptr = value;
return;
}
rs = alloc_remset (rs->end_set - rs->data, (void*)1);
rs->next = remembered_set;
remembered_set = rs;
- thread_info_lookup (pthread_self())->remset = rs;
+ thread_info_lookup (ARCH_GET_THREAD ())->remset = rs;
*(rs->store_next++) = (mword)field_ptr;
*(void**)field_ptr = value;
}
mono_gc_wbarrier_set_arrayref (MonoArray *arr, gpointer slot_ptr, MonoObject* value)
{
RememberedSet *rs = remembered_set;
- if (slot_ptr >= nursery_start && slot_ptr < nursery_real_end) {
+ if ((char*)slot_ptr >= nursery_start && (char*)slot_ptr < nursery_real_end) {
*(void**)slot_ptr = value;
return;
}
rs = alloc_remset (rs->end_set - rs->data, (void*)1);
rs->next = remembered_set;
remembered_set = rs;
- thread_info_lookup (pthread_self())->remset = rs;
+ thread_info_lookup (ARCH_GET_THREAD ())->remset = rs;
*(rs->store_next++) = (mword)slot_ptr;
*(void**)slot_ptr = value;
}
mono_gc_wbarrier_arrayref_copy (MonoArray *arr, gpointer slot_ptr, int count)
{
RememberedSet *rs = remembered_set;
- if (slot_ptr >= nursery_start && slot_ptr < nursery_real_end)
+ if ((char*)slot_ptr >= nursery_start && (char*)slot_ptr < nursery_real_end)
return;
DEBUG (8, fprintf (gc_debug_file, "Adding remset at %p, %d\n", slot_ptr, count));
if (rs->store_next + 1 < rs->end_set) {
rs = alloc_remset (rs->end_set - rs->data, (void*)1);
rs->next = remembered_set;
remembered_set = rs;
- thread_info_lookup (pthread_self())->remset = rs;
+ thread_info_lookup (ARCH_GET_THREAD ())->remset = rs;
*(rs->store_next++) = (mword)slot_ptr | REMSET_RANGE;
*(rs->store_next++) = count;
}
mono_gc_wbarrier_generic_store (gpointer ptr, MonoObject* value)
{
RememberedSet *rs = remembered_set;
- if (ptr >= nursery_start && ptr < nursery_real_end) {
+ if ((char*)ptr >= nursery_start && (char*)ptr < nursery_real_end) {
DEBUG (8, fprintf (gc_debug_file, "Skipping remset at %p\n", ptr));
*(void**)ptr = value;
return;
rs = alloc_remset (rs->end_set - rs->data, (void*)1);
rs->next = remembered_set;
remembered_set = rs;
- thread_info_lookup (pthread_self())->remset = rs;
+ thread_info_lookup (ARCH_GET_THREAD ())->remset = rs;
*(rs->store_next++) = (mword)ptr;
*(void**)ptr = value;
}
void
mono_gc_wbarrier_value_copy (gpointer dest, gpointer src, int count, MonoClass *klass)
{
- if (dest >= nursery_start && dest < nursery_real_end) {
+ if ((char*)dest >= nursery_start && (char*)dest < nursery_real_end) {
return;
}
DEBUG (1, fprintf (gc_debug_file, "Adding value remset at %p, count %d for class %s\n", dest, count, klass->name));
rs = alloc_remset (rs->end_set - rs->data, (void*)1);
rs->next = remembered_set;
remembered_set = rs;
- thread_info_lookup (pthread_self())->remset = rs;
+ thread_info_lookup (ARCH_GET_THREAD ())->remset = rs;
*(rs->store_next++) = (mword)obj | REMSET_OBJECT;
}
+/*
+ * ######################################################################
+ * ######## Collector debugging
+ * ######################################################################
+ */
+
+static mword*
+find_in_remset_loc (mword *p, char *addr, gboolean *found)
+{
+ void **ptr;
+ mword count;
+
+ switch ((*p) & REMSET_TYPE_MASK) {
+ case REMSET_LOCATION:
+ if (*p == (mword)addr)
+ *found = TRUE;
+ return p + 1;
+ case REMSET_RANGE:
+ ptr = (void**)(*p & ~REMSET_TYPE_MASK);
+ count = p [1];
+ if ((void**)addr >= ptr && (void**)addr < ptr + count)
+ *found = TRUE;
+ return p + 2;
+ case REMSET_OBJECT:
+ ptr = (void**)(*p & ~REMSET_TYPE_MASK);
+ count = safe_object_get_size ((MonoObject*)ptr);
+ count += (ALLOC_ALIGN - 1);
+ count &= (ALLOC_ALIGN - 1);
+ count /= sizeof (mword);
+ if ((void**)addr >= ptr && (void**)addr < ptr + count)
+ *found = TRUE;
+ return p + 1;
+ default:
+ g_assert_not_reached ();
+ }
+ return NULL;
+}
+
+/*
+ * Return whenever ADDR occurs in the remembered sets
+ */
+static gboolean
+find_in_remsets (char *addr)
+{
+ int i;
+ SgenThreadInfo *info;
+ RememberedSet *remset;
+ mword *p;
+ gboolean found = FALSE;
+
+ /* the global one */
+ for (remset = global_remset; remset; remset = remset->next) {
+ DEBUG (4, fprintf (gc_debug_file, "Scanning global remset range: %p-%p, size: %zd\n", remset->data, remset->store_next, remset->store_next - remset->data));
+ for (p = remset->data; p < remset->store_next;) {
+ p = find_in_remset_loc (p, addr, &found);
+ if (found)
+ return TRUE;
+ }
+ }
+ /* the per-thread ones */
+ for (i = 0; i < THREAD_HASH_SIZE; ++i) {
+ for (info = thread_table [i]; info; info = info->next) {
+ for (remset = info->remset; remset; remset = remset->next) {
+ DEBUG (4, fprintf (gc_debug_file, "Scanning remset for thread %p, range: %p-%p, size: %zd\n", info, remset->data, remset->store_next, remset->store_next - remset->data));
+ for (p = remset->data; p < remset->store_next;) {
+ p = find_in_remset_loc (p, addr, &found);
+ if (found)
+ return TRUE;
+ }
+ }
+ }
+ }
+
+ return FALSE;
+}
+
+#undef HANDLE_PTR
+#define HANDLE_PTR(ptr,obj) do { \
+ if (*(ptr) && (char*)*(ptr) >= nursery_start && (char*)*(ptr) < nursery_next) { \
+ if (!find_in_remsets ((char*)(ptr))) { \
+ fprintf (gc_debug_file, "Oldspace->newspace reference %p at offset %zd in object %p (%s.%s) not found in remsets.\n", *(ptr), (char*)(ptr) - (char*)(obj), (obj), ((MonoObject*)(obj))->vtable->klass->name_space, ((MonoObject*)(obj))->vtable->klass->name); \
+ g_assert_not_reached (); \
+ } \
+ } \
+ } while (0)
+
+/*
+ * Check that each object reference inside the area which points into the nursery
+ * can be found in the remembered sets.
+ */
+static void __attribute__((noinline))
+check_remsets_for_area (char *start, char *end)
+{
+ GCVTable *vt;
+ size_t skip_size;
+ int type;
+ int type_str = 0, type_rlen = 0, type_bitmap = 0, type_vector = 0, type_lbit = 0, type_complex = 0;
+ new_obj_references = 0;
+ obj_references_checked = 0;
+ while (start < end) {
+ if (!*(void**)start) {
+ start += sizeof (void*); /* should be ALLOC_ALIGN, really */
+ continue;
+ }
+ vt = (GCVTable*)LOAD_VTABLE (start);
+ DEBUG (8, fprintf (gc_debug_file, "Scanning object %p, vtable: %p (%s)\n", start, vt, vt->klass->name));
+ if (0) {
+ MonoObject *obj = (MonoObject*)start;
+ g_print ("found at %p (0x%lx): %s.%s\n", start, (long)vt->desc, obj->vtable->klass->name_space, obj->vtable->klass->name);
+ }
+ type = vt->desc & 0x7;
+ if (type == DESC_TYPE_STRING) {
+ STRING_SIZE (skip_size, start);
+ start += skip_size;
+ type_str++;
+ continue;
+ } else if (type == DESC_TYPE_RUN_LENGTH) {
+ OBJ_RUN_LEN_SIZE (skip_size, vt, start);
+ g_assert (skip_size);
+ OBJ_RUN_LEN_FOREACH_PTR (vt,start);
+ start += skip_size;
+ type_rlen++;
+ continue;
+ } else if (type == DESC_TYPE_VECTOR) { // includes ARRAY, too
+ skip_size = (vt->desc >> LOW_TYPE_BITS) & MAX_ELEMENT_SIZE;
+ skip_size *= mono_array_length ((MonoArray*)start);
+ skip_size += sizeof (MonoArray);
+ skip_size += (ALLOC_ALIGN - 1);
+ skip_size &= ~(ALLOC_ALIGN - 1);
+ OBJ_VECTOR_FOREACH_PTR (vt, start);
+ if (((MonoArray*)start)->bounds) {
+ /* account for the bounds */
+ skip_size += sizeof (MonoArrayBounds) * vt->klass->rank;
+ }
+ start += skip_size;
+ type_vector++;
+ continue;
+ } else if (type == DESC_TYPE_SMALL_BITMAP) {
+ OBJ_BITMAP_SIZE (skip_size, vt, start);
+ g_assert (skip_size);
+ OBJ_BITMAP_FOREACH_PTR (vt,start);
+ start += skip_size;
+ type_bitmap++;
+ continue;
+ } else if (type == DESC_TYPE_LARGE_BITMAP) {
+ skip_size = safe_object_get_size ((MonoObject*)start);
+ skip_size += (ALLOC_ALIGN - 1);
+ skip_size &= ~(ALLOC_ALIGN - 1);
+ OBJ_LARGE_BITMAP_FOREACH_PTR (vt,start);
+ start += skip_size;
+ type_lbit++;
+ continue;
+ } else if (type == DESC_TYPE_COMPLEX) {
+ /* this is a complex object */
+ skip_size = safe_object_get_size ((MonoObject*)start);
+ skip_size += (ALLOC_ALIGN - 1);
+ skip_size &= ~(ALLOC_ALIGN - 1);
+ OBJ_COMPLEX_FOREACH_PTR (vt, start);
+ start += skip_size;
+ type_complex++;
+ continue;
+ } else if (type == DESC_TYPE_COMPLEX_ARR) {
+ /* this is an array of complex structs */
+ skip_size = mono_array_element_size (((MonoVTable*)vt)->klass);
+ skip_size *= mono_array_length ((MonoArray*)start);
+ skip_size += sizeof (MonoArray);
+ skip_size += (ALLOC_ALIGN - 1);
+ skip_size &= ~(ALLOC_ALIGN - 1);
+ OBJ_COMPLEX_ARR_FOREACH_PTR (vt, start);
+ if (((MonoArray*)start)->bounds) {
+ /* account for the bounds */
+ skip_size += sizeof (MonoArrayBounds) * vt->klass->rank;
+ }
+ start += skip_size;
+ type_complex++;
+ continue;
+ } else {
+ g_assert (0);
+ }
+ }
+}
+
+/*
+ * Perform consistency check of the heap.
+ *
+ * Assumes the world is stopped.
+ */
+void
+check_consistency (void)
+{
+ GCMemSection *section;
+
+ // Need to add more checks
+ // FIXME: Create a general heap enumeration function and use that
+
+ DEBUG (1, fprintf (gc_debug_file, "Begin heap consistency check...\n"));
+
+ // Check that oldspace->newspace pointers are registered with the collector
+ for (section = section_list; section; section = section->next) {
+ if (section->role == MEMORY_ROLE_GEN0)
+ continue;
+ DEBUG (2, fprintf (gc_debug_file, "Scan of old section: %p-%p, size: %d\n", section->data, section->next_data, (int)(section->next_data - section->data)));
+ check_remsets_for_area (section->data, section->next_data);
+ }
+
+ DEBUG (1, fprintf (gc_debug_file, "Heap consistency check done.\n"));
+}
+
/*
* ######################################################################
* ######## Other mono public interface functions.
return num_major_gcs;
}
-int
-mono_gc_get_generation (MonoObject *object)
-{
- /* FIXME */
- return 0;
-}
-
gint64
mono_gc_get_used_size (void)
{
{
gboolean result;
LOCK_GC;
- result = thread_info_lookup (pthread_self ()) != NULL;
+ result = thread_info_lookup (ARCH_GET_THREAD ()) != NULL;
UNLOCK_GC;
return result;
}
char *env;
struct sigaction sinfo;
+ LOCK_INIT (gc_mutex);
LOCK_GC;
if (gc_initialized) {
UNLOCK_GC;
return;
}
- gc_initialized = TRUE;
+ pagesize = mono_pagesize ();
gc_debug_file = stderr;
/* format: MONO_GC_DEBUG=l[,filename] where l is a debug level 0-9 */
if ((env = getenv ("MONO_GC_DEBUG"))) {
global_remset = alloc_remset (1024, NULL);
global_remset->next = NULL;
+ pthread_key_create (&remembered_set_key, unregister_thread);
+ gc_initialized = TRUE;
UNLOCK_GC;
mono_gc_register_thread (&sinfo);
}
+MonoMethod*
+mono_gc_get_managed_allocator (MonoVTable *vtable, gboolean for_box)
+{
+ return NULL;
+}
+
+int
+mono_gc_get_managed_allocator_type (MonoMethod *managed_alloc)
+{
+ return -1;
+}
+
+MonoMethod*
+mono_gc_get_managed_allocator_by_type (int atype)
+{
+ return NULL;
+}
+
#endif /* HAVE_SGEN_GC */