#include "../callargs.h"
#include "../threads/thread.h"
#include "../threads/locks.h"
+#include "../sysdep/threads.h"
#include <assert.h>
+#include "lifespan.h"
#include "mm.h"
#undef ALIGN
#undef OFFSET
+#define HEURISTIC_SEL 0
+#define HEURISTIC_PARAM 2UL
+
+
+#define next_collection_heuristic_init() \
+ (void*)((long)heap_top + (((long)heap_limit - (long)heap_top) >> 4))
+
+#if HEURISTIC_SEL == 0
+#define next_collection_heuristic() \
+ (void*)((long)heap_top + (((long)heap_limit - (long)heap_top) >> HEURISTIC_PARAM))
+#elif HEURISTIC_SEL == 1
+#define next_collection_heuristic() \
+ (void*)((long)heap_top + (((long)heap_top - (long)heap_base) << HEURISTIC_PARAM))
+#elif HEURISTIC_SEL == 2
+#define next_collection_heuristic() \
+ (void*)((long)heap_top + HEURISTIC_PARAM)
+#endif
+
+//#define PSEUDO_GENERATIONAL
+//#define COLLECT_LIFESPAN
+//#define NEW_COLLECT_LIFESPAN
+//#define COLLECT_FRAGMENTATION
+//#define GC_COLLECT_STATISTICS
+//#define FINALIZER_COUNTING
+
+#undef STRUCTURES_ON_HEAP
+//#define STRUCTURES_ON_HEAP
+
#define false 0
#define true 1
#define align_size(size) ((size) & ~((1 << ALIGN) - 1))
#define MAP_ADDRESS (void*) 0x10000000
-#define VERBOSITY_MESSAGE 1
-#define VERBOSITY_DEBUG 2
-#define VERBOSITY_MISTRUST 3
-#define VERBOSITY_TRACE 4
-#define VERBOSITY_PARANOIA 5
-#define VERBOSITY_LIFESPAN 6
-
-//#define VERBOSITY VERBOSITY_MESSAGE
-//#define VERBOSITY VERBOSITY_PARANOIA
-#define VERBOSITY VERBOSITY_LIFESPAN
-
/* --- file-wide variables */
static void* heap_base = NULL;
typedef struct address_list_node {
void* address;
+ struct address_list_node* prev;
struct address_list_node* next;
} address_list_node;
static address_list_node* references = NULL;
static address_list_node* finalizers = NULL;
+#ifdef GC_COLLECT_STATISTICS
+
+static unsigned long gc_collections_count = 0;
+
+static unsigned long gc_alloc_total = 0;
+static unsigned long gc_alloc_count = 0;
+
+static unsigned long gc_mark_heapblocks_visited = 0;
+static unsigned long gc_mark_not_aligned = 0;
+static unsigned long gc_mark_not_inheap = 0;
+static unsigned long gc_mark_not_object = 0;
+static unsigned long gc_mark_objects_marked = 0;
+static unsigned long gc_mark_already_marked = 0;
+
+#endif
+
+#ifdef FINALIZER_COUNTING
+
+static unsigned long gc_finalizers_executed = 0;
+static unsigned long gc_finalizers_detected = 0;
+
+#endif
+
+#ifdef USE_THREADS
+static iMux alloc_mutex;
+#endif
+
+#ifdef COLLECT_LIFESPAN
+static FILE* tracefile;
+#endif
+
+#ifdef COLLECT_FRAGMENTATION
+static FILE* fragfile;
+static FILE* fragsizefile;
+#endif
+
/* --- implementation */
void
SIZE startsize, /* when should we collect for the first time ? */
void **in_stackbottom)
{
-#if VERBOSITY == VERBOSITY_MESSAGE
- fprintf(stderr, "The heap is verbose.\n");
-#elif VERBOSITY == VERBOSITY_DEBUG
- fprintf(stderr, "The heap is in debug mode.\n");
-#elif VERBOSITY == VERBOSITY_MISTRUST
- fprintf(stderr, "The heap is mistrusting us.\n");
-#elif VERBOSITY == VERBOSITY_TRACE
- fprintf(stderr, "The heap is outputting trace information.\n");
-#elif VERBOSITY == VERBOSITY_PARANOIA
- fprintf(stderr, "The heap is paranoid.\n");
-#elif VERBOSITY == VERBOSITY_LIFESPAN
- fprintf(stderr, "The heap is collecting lifespan information.\n");
-#endif
-
/* 1. Initialise the freelists & reset the allocator's state */
allocator_init();
/* 2. Allocate at least (alignment!) size bytes of memory for the heap */
heap_size = align_size(size + ((1 << ALIGN) - 1));
-#if 1
- /* For now, we'll try to map in the stack at a fixed address...
- ...this will change soon. -- phil. */
- heap_base = (void*) mmap (MAP_ADDRESS,
+#ifdef DONT_MMAP
+ heap_base = malloc(heap_size);
+#else
+ heap_base = (void*) mmap (NULL,
((size_t)heap_size + PAGESIZE_MINUS_ONE) & ~PAGESIZE_MINUS_ONE,
PROT_READ | PROT_WRITE,
- MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED,
+ MAP_PRIVATE | MAP_ANONYMOUS,
-1,
(off_t) 0);
-#else
- heap_base = malloc(heap_size);
#endif
if (heap_base == (void*)MAP_FAILED) {
/* unable to allocate the requested amount of memory */
- fprintf(stderr, "The queen, mylord, is dead!\n");
+ fprintf(stderr, "heap2.c: The queen, mylord, is dead! (mmap failed)\n");
exit(-1);
}
/* 6. calculate a useful first collection limit */
/* This is extremly primitive at this point...
we should replace it with something more useful -- phil. */
- heap_next_collection = (void*)((long)heap_base + (heap_size / 8));
+ heap_next_collection = next_collection_heuristic_init();
/* 7. Init the global reference lists & finalizer addresses */
references = NULL;
finalizers = NULL;
+
+#ifdef STRUCTURES_ON_HEAP
+ heap_addreference(&references);
+ heap_addreference(&finalizers);
+#endif
+
+#ifdef USE_THREADS
+ /* 8. Init the mutexes for synchronization */
+ alloc_mutex.holder = 0;
+#endif
+
+ /* 9. Set up collection of lifespan data */
+#ifdef COLLECT_LIFESPAN
+#if 0
+ tracefile = fopen("heap.trace", "w");
+#else
+ tracefile = popen("gzip -9 >heap.trace.gz", "w");
+#endif
+ if (!tracefile) {
+ fprintf(stderr, "heap2.c: Radio Ga Ga! (fopen failed)\n");
+ exit(-2);
+ }
+
+ fprintf(tracefile, "heap_base\t0x%lx\n", heap_base);
+ fprintf(tracefile, "heap_limit\t0x%lx\n", heap_limit);
+ fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
+#endif
+
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_init(heap_base, heap_size);
+#endif
+
+ /* 10. Set up collection of fragmentation data */
+#ifdef COLLECT_FRAGMENTATION
+ fragfile = popen("gzip -9 >fragmentation.gz", "w");
+ fragsizefile = popen("gzip -9 >freeblocks.gz", "w");
+#endif
+}
+
+__inline__
+static
+void
+heap_call_finalizer_for_object_at(java_objectheader* object_addr)
+{
+ asm_calljavamethod(object_addr->vftbl->class->finalizer, object_addr, NULL, NULL, NULL);
+#ifdef FINALIZER_COUNTING
+ ++gc_finalizers_executed;
+#endif
}
void
heap_close (void)
{
- /* 1. Clean up on the heap... finalize all remaining objects */
-#if 0
address_list_node* curr = finalizers;
- while (curr) {
- address_list_node* prev = curr;
- if (bitmap_testbit(start_bits, curr->address))
- asm_calljavamethod(((java_objectheader*)(curr->address))->vftbl->class->finalizer, curr->address, NULL, NULL, NULL);
+ /* 0. clean up lifespan module */
+#ifdef COLLECT_LIFESPAN
+#if 0
+ fclose(tracefile);
+#else
+ pclose(tracefile);
+#endif
+#endif
+
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_close();
+#endif
+
+#ifdef COLLECT_FRAGMENTATION
+ pclose(fragfile);
+ pclose(fragsizefile);
+#endif
+ /* 1. Clean up on the heap... finalize all remaining objects */
+#if 1
+ while (curr) {
+ address_list_node* prev = curr;
+ java_objectheader* addr = (java_objectheader*)(curr->address);
+
+ if (addr && bitmap_testbit(start_bits, addr))
+ heap_call_finalizer_for_object_at(addr);
+
curr = curr->next;
free(prev);
}
/* 3. Release the memory allocated to the heap */
if (heap_base)
munmap(heap_base, heap_size);
+
+ /* 4. emit statistical data */
+#ifdef GC_COLLECT_STATISTICS
+ sprintf(logtext, "%ld bytes for %ld objects allocated.",
+ gc_alloc_total, gc_alloc_count);
+ dolog();
+ sprintf(logtext, "%ld garbage collections performed.", gc_collections_count);
+ dolog();
+ sprintf(logtext, "%ld heapblocks visited, %ld objects marked",
+ gc_mark_heapblocks_visited, gc_mark_objects_marked);
+ dolog();
+ sprintf(logtext, " %ld out of heap.", gc_mark_not_inheap);
+ dolog();
+ sprintf(logtext, " %ld visits to objects already marked.", gc_mark_already_marked);
+ dolog();
+ sprintf(logtext, " %ld not an object.", gc_mark_not_object);
+ dolog();
+ sprintf(logtext, " %ld potential references not aligned.", gc_mark_not_aligned);
+ dolog();
+#endif
+
+#ifdef FINALIZER_COUNTING
+ sprintf(logtext, "%ld objects with a finalizer", gc_finalizers_detected);
+ dolog();
+
+ if (gc_finalizers_detected == gc_finalizers_executed)
+ sprintf(logtext, " all finalizers executed.");
+ else
+ sprintf(logtext, " only %ld finalizers executed.", gc_finalizers_executed);
+ dolog();
+#endif
+
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_emit();
+#endif
}
__inline__
new_node->next = NULL;
while (*list && (*list)->next) {
-#if VERBOSITY >= VERBOSITY_PARANOIA
- if ((*list)->address == address)
- fprintf(stderr,
- "Attempt to add a duplicate adress to an adress list.\n");
-#endif
-
if ((*list)->next->address < address)
list = &(*list)->next;
else {
objects will need to be added first. -- phil. */
heap_add_address_to_address_list(&finalizers, addr);
+
+#ifdef COLLECT_LIFESPAN
+ fprintf(tracefile, "finalizer\t0x%lx\n", addr);
+#endif
}
void*
SIZE length = align_size(in_length + ((1 << ALIGN) - 1));
void* free_chunk = NULL;
-#if VERBOSITY >= VERBOSITY_MISTRUST && 0
+#if 0
/* check for misaligned in_length parameter */
if (length != in_length)
fprintf(stderr,
in_length, length);
#endif
- // fprintf(stderr, "heap_allocate: 0x%lx (%ld) requested, 0x%lx (%ld) aligned\n", in_length, in_length, length, length);
-
+#ifdef FINALIZER_COUNTING
if (finalizer)
- fprintf(stderr, "finalizer detected\n");
+ ++gc_finalizers_detected;
+#endif
-#if VERBOSITY >= VERBOSITY_LIFESPAN && 0
+#if defined(COLLECT_LIFESPAN) || defined(NEW_COLLECT_LIFESPAN)
/* perform garbage collection to collect data for lifespan analysis */
if (heap_top > heap_base)
gc_call();
#endif
- intsDisable();
-
+#ifdef USE_THREADS
+ lock_mutex(&alloc_mutex);
+#endif
retry:
/* 1. attempt to get a free block with size >= length from the freelists */
free_chunk = allocator_alloc(length);
}
success:
- /* 3.a. mark all necessary bits, store the finalizer & return the newly allocate block */
-#if 0
+ /* 3.a. mark all necessary bits, store the finalizer & return the newly allocated block */
+
/* I don't mark the object-start anymore, as it always is at the beginning of a free-block,
which already is marked (Note: The first free-block gets marked in heap_init). -- phil. */
bitmap_setbit(start_bits, free_chunk); /* mark the new object */
-#endif
+
+#ifndef SIZE_FROM_CLASSINFO
bitmap_setbit(start_bits, (void*)((long)free_chunk + (long)length)); /* mark the freespace behind the new object */
+#endif
+
if (references)
bitmap_setbit(reference_bits, free_chunk);
else
bitmap_clearbit(reference_bits, free_chunk);
-#if 1
- /* store a hint, that there's a finalizer */
+ /* store a hint, that there's a finalizer for this address */
if (finalizer)
heap_add_finalizer_for_object_at(free_chunk);
-#else
-# warning "finalizers disabled."
-#endif
-#if VERBOSITY >= VERBOSITY_TRACE && 0
- fprintf(stderr, "heap_allocate: returning free block of 0x%lx bytes at 0x%lx\n", length, free_chunk);
-#endif
-#if VERBOSITY >= VERBOSITY_PARANOIA && 0
- fprintf(stderr, "heap_allocate: reporting heap state variables:\n");
- fprintf(stderr, "\theap top 0x%lx\n", heap_top);
- fprintf(stderr, "\theap size 0x%lx (%ld)\n", heap_size, heap_size);
- fprintf(stderr, "\theap_limit 0x%lx\n", heap_limit);
- fprintf(stderr, "\theap next collection 0x%lx\n", heap_next_collection);
+#ifdef GC_COLLECT_STATISTICS
+ gc_alloc_total += length;
+ ++gc_alloc_count;
#endif
- return free_chunk;
-
- failure:
- /* 3.b. failure to allocate enough memory... fail gracefully */
-#if VERBOSITY >= VERBOSITY_MESSAGE && 0
- fprintf(stderr,
- "heap2.c: heap_allocate was unable to allocate 0x%lx bytes on the VM heap.\n",
- length);
+#ifdef COLLECT_LIFESPAN
+ fprintf(tracefile, "alloc\t0x%lx\t0x%lx\n",
+ free_chunk, (long)free_chunk + length);
#endif
-#if VERBOSITY >= VERBOSITY_TRACE
- fprintf(stderr, "heap_allocate: returning NULL\n");
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_alloc(free_chunk, length);
#endif
- return NULL;
-
- intsRestore();
+ failure:
+#ifdef USE_THREADS
+ unlock_mutex(&alloc_mutex);
+#endif
+ return free_chunk;
}
void
address_list_node* prev;
#if 0
- while (curr) {
- if (!bitmap_testbit(mark_bits, curr->address)) {
- /* I need a way to save derefs. Any suggestions? -- phil. */
- asm_calljavamethod(((java_objectheader*)curr->address)->vftbl->class->finalizer,
- curr->address, NULL, NULL, NULL);
-
- prev->next = curr->next;
- free(curr);
- } else {
- prev = curr;
- curr = curr->next;
- }
- }
+ /* FIXME: new code, please! */
#else
while (curr) {
if (curr->address) {
if (!bitmap_testbit(mark_bits, curr->address)) {
- fprintf(stderr, "executing finalizer\n");
+
+#ifdef FINALIZER_COUNTING
+ ++gc_finalizers_executed;
+#endif
asm_calljavamethod(((java_objectheader*)curr->address)->vftbl->class->finalizer,
curr->address, NULL, NULL, NULL);
curr->address = 0;
static
void gc_reclaim (void)
{
+#ifdef PSEUDO_GENERATIONAL
+ static void* generation_start = 0;
+ static int generation_num = 0;
+ void* addr = heap_base;
+#endif
void* free_start;
void* free_end = heap_base;
BITBLOCK* temp_bits;
bitmap_t* temp_bitmap;
- /* 1. reset the freelists */
- allocator_reset();
+#ifdef COLLECT_FRAGMENTATION
+ unsigned long free_size = 0;
+ unsigned long free_fragments = 0;
+#endif
- /* 2. reclaim unmarked objects */
-#if 0 && SIZE_FROM_CLASSINFO
- free_start = bitmap_find_next_combination_set_unset(start_bitmap,
- mark_bitmap,
- free_end);
- while (free_start < heap_top) {
- if (!bitmap_testbit(start_bits, free_start) ||
- bitmap_testbit(mark_bits, free_start)) {
- fprintf(stderr, "gc_reclaim: inconsistent bit info for 0x%lx\n", free_start);
- }
+#ifdef PSEUDO_GENERATIONAL
+ if (!generation_start || !(generation_num % 5))
+ generation_start = heap_base;
- free_end = free_start;
- while((free_end < heap_top) &&
- (!bitmap_testbit(mark_bits, free_end)) {
- free_end +=
- }
+ ++generation_num;
+#endif
+ /* 1. reset the freelists */
+#if 0
+ allocator_mark_free_kludge(start_bits); /* this line will be kicked out, when
+ the SIZE_FROM_CLASSINFO reclaim
+ is implemented (very soon!!) */
+#endif
-bitmap_find_next_setbit(mark_bitmap, free_start + 8); /* FIXME: constant used */
+#ifdef PSEUDO_GENERATIONAL
+ for (addr = heap_base; addr <= generation_start; ++addr) {
+ if (bitmap_testbit(start_bits, addr))
+ bitmap_setbit(mark_bits, addr);
+ }
- // fprintf(stderr, "%lx -- %lx\n", free_start, free_end);
-
- if (free_end < heap_top) {
- allocator_free(free_start, free_end - free_start);
+ allocator_mark_free_kludge(start_bits); /* this line will be kicked out, when
+ the SIZE_FROM_CLASSINFO reclaim
+ is implemented (very soon!!) */
+#endif
- // fprintf(stderr, "gc_reclaim: freed 0x%lx bytes at 0x%lx\n", free_end - free_start, free_start);
+ allocator_reset();
+
+ /* 2. reclaim unmarked objects */
+#if 0
+ if (!testbit(start_bits, heap_base))
+ free_start = heap_base;
+ else
+ free_start = bitmap_find_next_combination_set_unset(start_bitmap,
+ mark_bitmap,
+ free_start);
-#if !defined(JIT_MARKER_SUPPORT)
- /* might make trouble with JIT-Marker support. The Marker for unused blocks
- might be called, leading to a bad dereference. -- phil. */
- bitmap_setbit(mark_bits, free_start);
-#endif
- }
- } else {
- // fprintf(stderr, "hmmm... found freeable area of 0 bytes at heaptop ?!\n");
- free_end = heap_top;
- }
- }
#else
while (free_end < heap_top) {
free_start = bitmap_find_next_combination_set_unset(start_bitmap,
mark_bitmap,
free_end);
- if (!bitmap_testbit(start_bits, free_start) ||
- bitmap_testbit(mark_bits, free_start))
- fprintf(stderr, "gc_reclaim: inconsistent bit info for 0x%lx\n", free_start);
-
if (free_start < heap_top) {
- free_end = bitmap_find_next_setbit(mark_bitmap, (void*)((long)free_start + 8)); /* FIXME: constant used */
+ free_end = bitmap_find_next_setbit(mark_bitmap, (void*)((long)free_start + 8)); /* FIXME: constant used */
- // fprintf(stderr, "%lx -- %lx\n", free_start, free_end);
-
if (free_end < heap_top) {
allocator_free(free_start, (long)free_end - (long)free_start);
- // fprintf(stderr, "gc_reclaim: freed 0x%lx bytes at 0x%lx\n", free_end - free_start, free_start);
+#ifdef COLLECT_FRAGMENTATION
+ free_size += (long)free_end - (long)free_start;
+ free_fragments++;
+#endif
+
+#ifdef COLLECT_LIFESPAN
+ fprintf(tracefile,
+ "free\t0x%lx\t0x%lx\n",
+ free_start,
+ (long)free_end);
+#endif
-#if !defined(JIT_MARKER_SUPPORT)
- /* might make trouble with JIT-Marker support. The Marker for unused blocks
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_free(free_start, free_end);
+#endif
+
+#ifndef SIZE_FROM_CLASSINFO
+ /* would make trouble with JIT-Marker support. The Marker for unused blocks
might be called, leading to a bad dereference. -- phil. */
- bitmap_setbit(mark_bits, free_start);
+ bitmap_setbit(mark_bits, free_start); /* necessary to calculate obj-size bitmap based. */
#endif
}
} else {
- // fprintf(stderr, "hmmm... found freeable area of 0 bytes at heaptop ?!\n");
- free_end = heap_top;
- }
+ free_end = heap_top;
+ }
}
#endif
mark_bitmap = start_bitmap;
start_bitmap = temp_bitmap;
+#if 0 /* operation already handled in allocate */
/* 3.2. mask reference bitmap */
-#warning "bitmap masking unimplemented --- references will not be cleared"
+ bitmap_mask_with_bitmap(reference_bitmap, start_bitmap);
+#endif
/* 3.3. update heap_top */
- if (free_start < heap_top)
+ if (free_start < heap_top) {
heap_top = free_start;
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_free(free_start, free_end);
+#endif
+ }
+#if 0
if (heap_top < heap_limit)
bitmap_setbit(start_bits, heap_top);
+#endif
+
+ /* 3.4. emit fragmentation info */
+#ifdef COLLECT_FRAGMENTATION
+ {
+ unsigned long heap_full = (unsigned long)heap_top - (unsigned long)heap_base;
+ unsigned long heap_life = (unsigned long)heap_top - (unsigned long)heap_base - free_size;
+
+ fprintf(fragfile,
+ "%ld\t%ld\t%ld\t%ld\t%f\t%f\t%f\n",
+ heap_full,
+ heap_life,
+ free_size,
+ free_fragments,
+ 100*(float)free_size/free_fragments,
+ 100*(float)heap_life/heap_full,
+ 100*(float)free_size/heap_full
+ );
+ }
+ fflush(fragfile);
+
+ allocator_dump_to_file(fragsizefile);
+#endif
/* 4. adjust the collection threshold */
- heap_next_collection = (void*)((long)heap_top + ((long)heap_limit - (long)heap_top) / 8);
+ heap_next_collection = next_collection_heuristic();
if (heap_next_collection > heap_limit)
heap_next_collection = heap_limit;
-#if VERBOSITY >= VERBOSITY_PARANOIA && 0
- fprintf(stderr, "gc_reclaim: reporting heap state variables:\n");
- fprintf(stderr, "\theap top 0x%lx\n", heap_top);
- fprintf(stderr, "\theap size 0x%lx (%ld)\n", heap_size, heap_size);
- fprintf(stderr, "\theap_limit 0x%lx\n", heap_limit);
- fprintf(stderr, "\theap next collection 0x%lx\n", heap_next_collection);
+#ifdef COLLECT_LIFESPAN
+ fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
+#endif
- allocator_dump();
+#ifdef PSEUDO_GENERATIONAL
+ generation_start = heap_top;
#endif
}
void
gc_mark_object_at (void** addr)
{
-#if 0
- // fprintf(stderr, "gc_mark_object_at: called for address 0x%lx\n", addr);
-
- if (!((void*)addr >= heap_base &&
- (void*)addr < heap_top)) {
- // fprintf(stderr, "not an address on the heap.\n");
- } else {
- if (!bitmap_testbit(start_bits, (void*)addr)) {
- // fprintf(stderr, "not an object.\n");
- }
- else {
- if (bitmap_testbit(mark_bits, (void*)addr)) {
- // fprintf(stderr, "gc_mark_object_at: called for address 0x%lx: ", addr);
- // fprintf(stderr, "already marked.\n");
- }
- }
+ /*
+ * A note concerning the order of the tests:
+ *
+ * Statistics collected during a test run, where alignment
+ * was tested before checking whether the addr points into
+ * the heap:
+ * >> LOG: 9301464 bytes for 196724 objects allocated.
+ * >> LOG: 15 garbage collections performed.
+ * >> LOG: 6568440 heapblocks visited, 469249 objects marked
+ * >> LOG: 1064447 visits to objects already marked.
+ * >> LOG: 988270 potential references not aligned.
+ * >> LOG: 4049446 out of heap.
+ * >> LOG: 5236 not an object.
+ *
+ * These results show, that only about 1/4 of all heapblocks
+ * point to objects; The single most important reason why a
+ * heapblock can not point at an object is, that it's value
+ * doesn't fall within the heap area (this test was performed
+ * with a 3MB heap).
+ *
+ * From the results, the various tests have to be conducted
+ * in the following order for maximum efficiency:
+ * 1. addr in heap?
+ * 2. already marked ?
+ * 3. aligned ?
+ * 4. object ?
+ *
+ * The results after reordering:
+ * >> LOG: 9301464 bytes for 196724 objects allocated.
+ * >> LOG: 15 garbage collections performed.
+ * >> LOG: 6568440 heapblocks visited, 469249 objects marked
+ * >> LOG: 1064447 visits to objects already marked.
+ * >> LOG: 350 potential references not aligned.
+ * >> LOG: 5037366 out of heap.
+ * >> LOG: 5236 not an object.
+ *
+ * And using:
+ * 1. addr in heap?
+ * 2. already marked ?
+ * 3. object ?
+ * 4. aligned ?
+ *
+ * >> LOG: 9301464 bytes for 196724 objects allocated.
+ * >> LOG: 15 garbage collections performed.
+ * >> LOG: 6568440 heapblocks visited, 469249 objects marked
+ * >> LOG: 5037366 out of heap.
+ * >> LOG: 1064456 visits to objects already marked.
+ * >> LOG: 5539 not an object.
+ * >> LOG: 38 potential references not aligned.
+ *
+ * Apparently, most unaligned values will already be eliminated
+ * when checking against the bounds of the heap. Checking this
+ * property first, should thus improve collection times.
+ */
+
+
+ /* 1.a. if addr doesn't point into the heap, return. */
+ if ((unsigned long)addr - (unsigned long)heap_base >=
+ ((long)heap_top - (long)heap_base)) {
+#ifdef GC_COLLECT_STATISTICS
+ ++gc_mark_not_inheap;
+#endif
+ return;
+ }
+
+ /* 1.b. if align(addr) has already been marked during this collection, return. */
+ if (bitmap_testbit(mark_bits, (void*)addr)) {
+#ifdef GC_COLLECT_STATISTICS
+ ++gc_mark_already_marked;
+#endif
+ return;
}
+
+ /* 1.c. if align(addr) doesn't point to the start of an object, return. */
+ if (!bitmap_testbit(start_bits, (void*)addr)) {
+#ifdef GC_COLLECT_STATISTICS
+ ++gc_mark_not_object;
#endif
+ return;
+ }
- /* 1. is the addr aligned, on the heap && the start of an object */
- if (!((long)addr & ((1 << ALIGN) - 1)) &&
- (void*)addr >= heap_base &&
- (void*)addr < heap_top &&
- bitmap_testbit(start_bits, (void*)addr) &&
- !bitmap_testbit(mark_bits, (void*)addr)) {
- bitmap_setbit(mark_bits, (void*)addr);
+ /* 1.d. if addr is not properly aligned, return. */
+ if ((long)addr & ((1 << ALIGN) - 1)) {
+#ifdef GC_COLLECT_STATISTICS
+ ++gc_mark_not_aligned;
+#endif
+ return;
+ }
- // fprintf(stderr, "gc_mark_object_at: called for address 0x%lx: ", addr);
- // fprintf(stderr, "marking object.\n");
+ /* 2. Mark the object at addr */
+ bitmap_setbit(mark_bits, (void*)addr);
+#ifdef GC_COLLECT_STATISTICS
+ ++gc_mark_objects_marked;
+#endif
#ifdef JIT_MARKER_SUPPORT
- /* 1.a. invoke the JIT-marker method */
- asm_calljavamethod(addr->vftbl->class->marker, addr, NULL, NULL, NULL);
+ asm_calljavamethod(addr->vftbl->class->marker, addr, NULL, NULL, NULL);
#else
- /* 1.b. mark the references contained */
- if (bitmap_testbit(reference_bits, addr)) {
- void** end;
-#ifdef SIZE_FROM_CLASSINFO
- void** old_end;
- if (((java_objectheader*)addr)->vftbl == class_array->vftbl) {
- end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
-// fprintf(stderr, "size found for array at 0x%lx: 0x%lx\n", addr, ((java_arrayheader*)addr)->alignedsize);
- }
- else {
- end = (void**)((long)addr + (long)((java_objectheader*)addr)->vftbl->class->alignedsize);
-// fprintf(stderr, "size found for 0x%lx: 0x%lx\n", addr, ((java_objectheader*)addr)->vftbl->class->alignedsize);
- }
+ /* 3. mark the references contained within the extents of the object at addr */
+ if (bitmap_testbit(reference_bits, addr)) {
+ /* 3.1. find the end of the object */
+ void** end;
- old_end = (void**)bitmap_find_next_setbit(start_bitmap, (void*)addr + 8);
- if (end != old_end) {
- fprintf(stderr, "inconsistent object length for object at 0x%lx:", addr);
- fprintf(stderr, " old = 0x%lx ; new = 0x%lx\n", old_end, end);
- }
+#ifdef SIZE_FROM_CLASSINFO
+ if (((java_objectheader*)addr)->vftbl == class_array->vftbl)
+ end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
+ else
+ end = (void**)((long)addr + (long)((java_objectheader*)addr)->vftbl->class->alignedsize);
#else
- end = (void**)bitmap_find_next_setbit(start_bitmap, addr + 8); /* points just behind the object */
+ end = (void**)bitmap_find_next_setbit(start_bitmap, addr + 1); /* points just behind the object */
#endif
- while (addr < end)
- gc_mark_object_at(*(addr++));
- }
+ /* 3.2. mark the references within the object at addr */
+#ifdef GC_COLLECT_STATISTICS
+ gc_mark_heapblocks_visited += ((long)end - (long)addr) >> ALIGN;
+#endif
+ while (addr < end)
+ gc_mark_object_at(*(addr++));
#endif
}
-
+
return;
}
{
address_list_node* curr = references;
- // fprintf(stderr, "gc_mark_references\n");
-
while (curr) {
gc_mark_object_at(*((void**)(curr->address)));
curr = curr->next;
else {
for (aThread = liveThreads; aThread != 0;
aThread = CONTEXT(aThread).nextlive) {
- mark_object_at((void*)aThread);
- if (aThread == currentThread) {
- void **top_of_stack = &dummy;
-
- if (top_of_stack > (void**)CONTEXT(aThread).stackEnd)
- markreferences((void**)CONTEXT(aThread).stackEnd, top_of_stack);
- else
- markreferences(top_of_stack, (void**)CONTEXT(aThread).stackEnd);
- }
- else {
- if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
- markreferences((void**)CONTEXT(aThread).stackEnd,
- (void**)CONTEXT(aThread).usedStackTop);
- else
- markreferences((void**)CONTEXT(aThread).usedStackTop,
- (void**)CONTEXT(aThread).stackEnd);
- }
+ gc_mark_object_at((void*)aThread);
+ if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
+ markreferences((void**)CONTEXT(aThread).stackEnd,
+ (void**)CONTEXT(aThread).usedStackTop);
+ else
+ markreferences((void**)CONTEXT(aThread).usedStackTop,
+ (void**)CONTEXT(aThread).stackEnd);
}
markreferences((void**)&threadQhead[0],
#else
void **top_of_stack = &dummy;
- // fprintf(stderr, "gc_mark_stack\n");
-
- if (top_of_stack > stackbottom) {
- // fprintf(stderr, "stack growing upward\n");
+ if (top_of_stack > stackbottom)
markreferences(stackbottom, top_of_stack);
- } else {
- // fprintf(stderr, "stack growing downward\n");
+ else
markreferences(top_of_stack, stackbottom);
- }
#endif
}
gc_reclaim();
armageddon_is_near = false;
+
+#ifdef GC_COLLECT_STATISTICS
+ ++gc_collections_count;
+#endif
}
gc_call (void)
{
#ifdef USE_THREADS
+ u1 dummy;
+
assert(blockInts == 0);
intsDisable();
- if (currentThread == NULL || currentThread == mainThread)
+ if (currentThread == NULL || currentThread == mainThread) {
+ CONTEXT(mainThread).usedStackTop = &dummy;
gc_run();
+ }
else
- asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run);
+ asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run,
+ (void**)&(CONTEXT(currentThread).usedStackTop));
intsRestore();
#else
gc_run();
* tab-width: 4
* End:
*/
+