p = sgen_los_alloc_large_inner (vtable, size);
} else {
SGEN_ASSERT (9, vtable->klass->inited, "class %s:%s is not initialized", vtable->klass->name_space, vtable->klass->name);
- p = major_collector.alloc_small_pinned_obj (size, SGEN_VTABLE_HAS_REFERENCES (vtable));
+ p = major_collector.alloc_small_pinned_obj (vtable, size, SGEN_VTABLE_HAS_REFERENCES (vtable));
}
if (G_LIKELY (p)) {
SGEN_LOG (6, "Allocated pinned object %p, vtable: %p (%s), size: %zd", p, vtable, vtable->klass->name, size);
else
MONO_GC_MAJOR_OBJ_ALLOC_PINNED ((mword)p, size, vtable->klass->name_space, vtable->klass->name);
binary_protocol_alloc_pinned (p, vtable, size);
- mono_atomic_store_seq (p, vtable);
}
UNLOCK_GC;
return p;
size_t size = ALIGN_UP (vtable->klass->instance_size);
LOCK_GC;
res = alloc_degraded (vtable, size, TRUE);
- mono_atomic_store_seq (res, vtable);
UNLOCK_GC;
if (G_UNLIKELY (vtable->klass->has_finalize))
mono_object_register_finalizer ((MonoObject*)res);
MonoVTable *vt = ((MonoObject*)obj)->vtable;
gboolean has_references = SGEN_VTABLE_HAS_REFERENCES (vt);
mword objsize = SGEN_ALIGN_UP (sgen_par_object_get_size (vt, (MonoObject*)obj));
- char *destination = COLLECTOR_SERIAL_ALLOC_FOR_PROMOTION (obj, objsize, has_references);
+ char *destination = COLLECTOR_SERIAL_ALLOC_FOR_PROMOTION (vt, obj, objsize, has_references);
if (G_UNLIKELY (!destination)) {
collector_pin_object (obj, queue);
return obj;
}
- *(MonoVTable**)destination = vt;
par_copy_object_no_checks (destination, vt, obj, objsize, has_references ? queue : NULL);
+ /* FIXME: mark mod union cards if necessary */
/* set the forwarding pointer */
SGEN_FORWARD_OBJECT (obj, destination);
}
typedef struct {
- char* (*alloc_for_promotion) (char *obj, size_t objsize, gboolean has_references);
- char* (*par_alloc_for_promotion) (char *obj, size_t objsize, gboolean has_references);
+ char* (*alloc_for_promotion) (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references);
+ char* (*par_alloc_for_promotion) (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references);
SgenObjectOperations serial_ops;
SgenObjectOperations parallel_ops;
void* (*alloc_heap) (mword nursery_size, mword nursery_align, int nursery_bits);
gboolean (*is_object_live) (char *obj);
- void* (*alloc_small_pinned_obj) (size_t size, gboolean has_references);
+ void* (*alloc_small_pinned_obj) (MonoVTable *vtable, size_t size, gboolean has_references);
void* (*alloc_degraded) (MonoVTable *vtable, size_t size);
SgenObjectOperations major_ops;
- void* (*alloc_object) (int size, gboolean has_references);
- void* (*par_alloc_object) (int size, gboolean has_references);
+ void* (*alloc_object) (MonoVTable *vtable, int size, gboolean has_references);
+ void* (*par_alloc_object) (MonoVTable *vtable, int size, gboolean has_references);
void (*free_pinned_object) (char *obj, size_t size);
void (*iterate_objects) (gboolean non_pinned, gboolean pinned, IterateObjectCallbackFunc callback, void *data);
void (*free_non_pinned_object) (char *obj, size_t size);
}
static void*
-major_alloc_object (int size, gboolean has_references)
+major_alloc_object (MonoVTable *vtable, int size, gboolean has_references)
{
char *dest = to_space_bumper;
/* Make sure we have enough space available */
to_space_bumper += size;
SGEN_ASSERT (8, to_space_bumper <= to_space_top, "to-space-bumper %p overflow to-space-top %p", to_space_bumper, to_space_top);
to_space_section->scan_starts [(dest - (char*)to_space_section->data)/SGEN_SCAN_START_SIZE] = dest;
+ /* FIXME: write vtable */
+ g_assert_not_reached ();
return dest;
}
/* size is a multiple of ALLOC_ALIGN */
static void*
-major_alloc_small_pinned_obj (size_t size, gboolean has_references)
+major_alloc_small_pinned_obj (MonoVTable *vtable, size_t size, gboolean has_references)
{
+ /* FIXME: write vtable */
+ g_assert_not_reached ();
return sgen_alloc_pinned (&pinned_allocator, size);
}
}
static void*
-alloc_obj_par (int size, gboolean pinned, gboolean has_references)
+alloc_obj_par (MonoVTable *vtable, int size, gboolean pinned, gboolean has_references)
{
int size_index = MS_BLOCK_OBJ_SIZE_INDEX (size);
MSBlockInfo **free_blocks_local = FREE_BLOCKS_LOCAL (pinned, has_references);
}
}
- /*
- * FIXME: This should not be necessary because it'll be
- * overwritten by the vtable immediately.
- */
- *(void**)obj = NULL;
+ *(MonoVTable**)obj = vtable;
#ifdef SGEN_CONCURRENT_MARK
g_assert_not_reached ();
}
static void*
-major_par_alloc_object (int size, gboolean has_references)
+major_par_alloc_object (MonoVTable *vtable, int size, gboolean has_references)
{
- return alloc_obj_par (size, FALSE, has_references);
+ return alloc_obj_par (vtable, size, FALSE, has_references);
}
#endif
static void*
-alloc_obj (int size, gboolean pinned, gboolean has_references)
+alloc_obj (MonoVTable *vtable, int size, gboolean pinned, gboolean has_references)
{
int size_index = MS_BLOCK_OBJ_SIZE_INDEX (size);
MSBlockInfo **free_blocks = FREE_BLOCKS (pinned, has_references);
obj = unlink_slot_from_free_list_uncontested (free_blocks, size_index);
- /*
- * FIXME: This should not be necessary because it'll be
- * overwritten by the vtable immediately.
- */
- *(void**)obj = NULL;
+ *(MonoVTable**)obj = vtable;
#ifdef SGEN_CONCURRENT_MARK
if (obj && sgen_remember_major_object_for_concurrent_mark (obj)) {
}
static void*
-major_alloc_object (int size, gboolean has_references)
+major_alloc_object (MonoVTable *vtable, int size, gboolean has_references)
{
- return alloc_obj (size, FALSE, has_references);
+ return alloc_obj (vtable, size, FALSE, has_references);
}
/*
/* size is a multiple of SGEN_ALLOC_ALIGN */
static void*
-major_alloc_small_pinned_obj (size_t size, gboolean has_references)
+major_alloc_small_pinned_obj (MonoVTable *vtable, size_t size, gboolean has_references)
{
void *res;
ms_wait_for_sweep_done ();
- res = alloc_obj (size, TRUE, has_references);
+ res = alloc_obj (vtable, size, TRUE, has_references);
/*If we failed to alloc memory, we better try releasing memory
*as pinned alloc is requested by the runtime.
*/
if (!res) {
sgen_perform_collection (0, GENERATION_OLD, "pinned alloc failure", TRUE);
- res = alloc_obj (size, TRUE, has_references);
+ res = alloc_obj (vtable, size, TRUE, has_references);
}
return res;
}
old_num_sections = num_major_sections;
- obj = alloc_obj (size, FALSE, SGEN_VTABLE_HAS_REFERENCES (vtable));
+ obj = alloc_obj (vtable, size, FALSE, SGEN_VTABLE_HAS_REFERENCES (vtable));
if (G_LIKELY (obj)) {
- *(MonoVTable**)obj = vtable;
HEAVY_STAT (++stat_objects_alloced_degraded);
HEAVY_STAT (stat_bytes_alloced_degraded += size);
g_assert (num_major_sections >= old_num_sections);
objsize = SGEN_ALIGN_UP (sgen_par_object_get_size (vt, (MonoObject*)obj));
has_references = SGEN_VTABLE_HAS_REFERENCES (vt);
- destination = sgen_minor_collector.par_alloc_for_promotion (obj, objsize, has_references);
+ destination = sgen_minor_collector.par_alloc_for_promotion (vt, obj, objsize, has_references);
if (G_UNLIKELY (!destination)) {
if (!sgen_ptr_in_nursery (obj)) {
int size_index;
return;
}
- /*
- * We do this before the CAS because we want to make
- * sure that if another thread sees the destination
- * pointer the VTable is already in place. Not doing
- * this can crash binary protocols.
- */
- *(MonoVTable**)destination = vt;
-
if (SGEN_CAS_PTR (obj, (void*)((mword)destination | SGEN_FORWARDED_BIT), vt) == vt) {
gboolean was_marked;
objsize = SGEN_ALIGN_UP (sgen_par_object_get_size (vt, (MonoObject*)obj));
has_references = SGEN_VTABLE_HAS_REFERENCES (vt);
- destination = COLLECTOR_PARALLEL_ALLOC_FOR_PROMOTION (obj, objsize, has_references);
+ destination = COLLECTOR_PARALLEL_ALLOC_FOR_PROMOTION (vt, obj, objsize, has_references);
if (G_UNLIKELY (!destination)) {
sgen_parallel_pin_or_update (obj_slot, obj, vt, queue);
#include "metadata/sgen-protocol.h"
static inline char*
-alloc_for_promotion (char *obj, size_t objsize, gboolean has_references)
+alloc_for_promotion (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references)
{
- return major_collector.alloc_object (objsize, has_references);
+ return major_collector.alloc_object (vtable, objsize, has_references);
}
static inline char*
-par_alloc_for_promotion (char *obj, size_t objsize, gboolean has_references)
+par_alloc_for_promotion (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references)
{
- return major_collector.par_alloc_object (objsize, has_references);
+ return major_collector.par_alloc_object (vtable, objsize, has_references);
}
static SgenFragment*
}
static inline char*
-alloc_for_promotion (char *obj, size_t objsize, gboolean has_references)
+alloc_for_promotion (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references)
{
char *p = NULL;
int age;
age = get_object_age (obj);
if (age >= promote_age)
- return major_collector.alloc_object (objsize, has_references);
+ return major_collector.alloc_object (vtable, objsize, has_references);
/* Promote! */
++age;
} else {
p = alloc_for_promotion_slow_path (age, objsize);
if (!p)
- p = major_collector.alloc_object (objsize, has_references);
+ return major_collector.alloc_object (vtable, objsize, has_references);
}
+ *(MonoVTable**)p = vtable;
+
return p;
}
}
static inline char*
-par_alloc_for_promotion (char *obj, size_t objsize, gboolean has_references)
+par_alloc_for_promotion (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references)
{
char *p;
int age;
age = get_object_age (obj);
if (age >= promote_age)
- return major_collector.par_alloc_object (objsize, has_references);
+ return major_collector.par_alloc_object (vtable, objsize, has_references);
restart:
p = age_alloc_buffers [age].next;
/* Have we failed to promote to the nursery, lets just evacuate it to old gen. */
if (!p)
- p = major_collector.par_alloc_object (objsize, has_references);
+ return major_collector.par_alloc_object (vtable, objsize, has_references);
}
+ *(MonoVTable**)p = vtable;
+
return p;
}
static char*
-minor_alloc_for_promotion (char *obj, size_t objsize, gboolean has_references)
+minor_alloc_for_promotion (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references)
{
/*
We only need to check for a non-nursery object if we're doing a major collection.
*/
if (!sgen_ptr_in_nursery (obj))
- return major_collector.alloc_object (objsize, has_references);
+ return major_collector.alloc_object (vtable, objsize, has_references);
- return alloc_for_promotion (obj, objsize, has_references);
+ return alloc_for_promotion (vtable, obj, objsize, has_references);
}
static char*
-minor_par_alloc_for_promotion (char *obj, size_t objsize, gboolean has_references)
+minor_par_alloc_for_promotion (MonoVTable *vtable, char *obj, size_t objsize, gboolean has_references)
{
/*
We only need to check for a non-nursery object if we're doing a major collection.
*/
if (!sgen_ptr_in_nursery (obj))
- return major_collector.par_alloc_object (objsize, has_references);
+ return major_collector.par_alloc_object (vtable, objsize, has_references);
- return par_alloc_for_promotion (obj, objsize, has_references);
+ return par_alloc_for_promotion (vtable, obj, objsize, has_references);
}
static SgenFragment*
concurrent_enqueue_check (SgenGrayQueue *queue, char *obj)
{
g_assert (!sgen_ptr_in_nursery (obj));
+ g_assert (SGEN_LOAD_VTABLE (obj));
}
static void