#define ALIGN_UP SGEN_ALIGN_UP
#define ALLOC_ALIGN SGEN_ALLOC_ALIGN
-#define ALLOC_ALIGN_BITS SGEN_ALLOC_ALIGN_BITS
#define MAX_SMALL_OBJ_SIZE SGEN_MAX_SMALL_OBJ_SIZE
#define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
static gboolean use_managed_allocator = TRUE;
#ifdef HEAVY_STATISTICS
-static long long stat_objects_alloced = 0;
-static long long stat_bytes_alloced = 0;
-static long long stat_bytes_alloced_los = 0;
+static guint64 stat_objects_alloced = 0;
+static guint64 stat_bytes_alloced = 0;
+static guint64 stat_bytes_alloced_los = 0;
#endif
static __thread char *tlab_temp_end;
static __thread char *tlab_real_end;
/* Used by the managed allocator/wbarrier */
-static __thread char **tlab_next_addr;
+static __thread char **tlab_next_addr MONO_ATTR_USED;
#endif
#ifdef HAVE_KW_THREAD
/* FIXME: handle OOM */
void **p;
char *new_next;
- TLAB_ACCESS_INIT;
size_t real_size = size;
+ TLAB_ACCESS_INIT;
CANARIFY_SIZE(size);
available_in_tlab = (int)(TLAB_REAL_END - TLAB_NEXT);//We'll never have tlabs > 2Gb
if (size > tlab_size || available_in_tlab > SGEN_MAX_NURSERY_WASTE) {
/* Allocate directly from the nursery */
- do {
- p = sgen_nursery_alloc (size);
- if (!p) {
- sgen_ensure_free_space (real_size);
- if (degraded_mode)
- return alloc_degraded (vtable, size, FALSE);
- else
- p = sgen_nursery_alloc (size);
- }
- } while (!p);
+ p = sgen_nursery_alloc (size);
if (!p) {
- // no space left
- g_assert (0);
+ /*
+ * We couldn't allocate from the nursery, so we try
+ * collecting. Even after the collection, we might
+ * still not have enough memory to allocate the
+ * object. The reason will most likely be that we've
+ * run out of memory, but there is the theoretical
+ * possibility that other threads might have consumed
+ * the freed up memory ahead of us.
+ *
+ * What we do in this case is allocate degraded, i.e.,
+ * from the major heap.
+ *
+ * Ideally we'd like to detect the case of other
+ * threads allocating ahead of us and loop (if we
+ * always loop we will loop endlessly in the case of
+ * OOM).
+ */
+ sgen_ensure_free_space (real_size);
+ if (!degraded_mode)
+ p = sgen_nursery_alloc (size);
}
+ if (!p)
+ return alloc_degraded (vtable, size, FALSE);
zero_tlab_if_necessary (p, size);
} else {
SGEN_LOG (3, "Retire TLAB: %p-%p [%ld]", TLAB_START, TLAB_REAL_END, (long)(TLAB_REAL_END - TLAB_NEXT - size));
sgen_nursery_retire_region (p, available_in_tlab);
- do {
- p = sgen_nursery_alloc_range (tlab_size, size, &alloc_size);
- if (!p) {
- sgen_ensure_free_space (tlab_size);
- if (degraded_mode)
- return alloc_degraded (vtable, size, FALSE);
- else
- p = sgen_nursery_alloc_range (tlab_size, size, &alloc_size);
- }
- } while (!p);
-
+ p = sgen_nursery_alloc_range (tlab_size, size, &alloc_size);
if (!p) {
- // no space left
- g_assert (0);
+ /* See comment above in similar case. */
+ sgen_ensure_free_space (tlab_size);
+ if (!degraded_mode)
+ p = sgen_nursery_alloc_range (tlab_size, size, &alloc_size);
}
+ if (!p)
+ return alloc_degraded (vtable, size, FALSE);
/* Allocate a new TLAB from the current nursery fragment */
TLAB_START = (char*)p;
{
void **p;
char *new_next;
- TLAB_ACCESS_INIT;
size_t real_size = size;
+ TLAB_ACCESS_INIT;
CANARIFY_SIZE(size);
mono_mb_emit_byte (mb, CEE_STIND_I);
/*The tlab store must be visible before the the vtable store. This could be replaced with a DDS but doing it with IL would be tricky. */
- mono_mb_emit_byte ((mb), MONO_CUSTOM_PREFIX);
- mono_mb_emit_op (mb, CEE_MONO_MEMORY_BARRIER, (gpointer)StoreStoreBarrier);
+ mono_mb_emit_byte (mb, MONO_CUSTOM_PREFIX);
+ mono_mb_emit_byte (mb, CEE_MONO_MEMORY_BARRIER);
+ mono_mb_emit_i4 (mb, MONO_MEMORY_BARRIER_REL);
/* *p = vtable; */
mono_mb_emit_ldloc (mb, p_var);
/*
We must make sure both vtable and max_length are globaly visible before returning to managed land.
*/
- mono_mb_emit_byte ((mb), MONO_CUSTOM_PREFIX);
- mono_mb_emit_op (mb, CEE_MONO_MEMORY_BARRIER, (gpointer)StoreStoreBarrier);
+ mono_mb_emit_byte (mb, MONO_CUSTOM_PREFIX);
+ mono_mb_emit_byte (mb, CEE_MONO_MEMORY_BARRIER);
+ mono_mb_emit_i4 (mb, MONO_MEMORY_BARRIER_REL);
/* return p */
mono_mb_emit_ldloc (mb, p_var);
void
sgen_alloc_init_heavy_stats (void)
{
- mono_counters_register ("# objects allocated", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_objects_alloced);
- mono_counters_register ("bytes allocated", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_bytes_alloced);
- mono_counters_register ("bytes allocated in LOS", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_bytes_alloced_los);
+ mono_counters_register ("# objects allocated", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_objects_alloced);
+ mono_counters_register ("bytes allocated", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_bytes_alloced);
+ mono_counters_register ("bytes allocated in LOS", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_bytes_alloced_los);
}
#endif