#include "../callargs.h"
#include "../threads/thread.h"
#include "../threads/locks.h"
+#include "../sysdep/threads.h"
#include <assert.h>
#include "lifespan.h"
#undef ALIGN
#undef OFFSET
+#define HEURISTIC_SEL 0
+#define HEURISTIC_PARAM 2UL
+
+
+#define next_collection_heuristic_init() \
+ (void*)((long)heap_top + (((long)heap_limit - (long)heap_top) >> 4))
+
+#if HEURISTIC_SEL == 0
+#define next_collection_heuristic() \
+ (void*)((long)heap_top + (((long)heap_limit - (long)heap_top) >> HEURISTIC_PARAM))
+#elif HEURISTIC_SEL == 1
+#define next_collection_heuristic() \
+ (void*)((long)heap_top + (((long)heap_top - (long)heap_base) << HEURISTIC_PARAM))
+#elif HEURISTIC_SEL == 2
+#define next_collection_heuristic() \
+ (void*)((long)heap_top + HEURISTIC_PARAM)
+#endif
+
+//#define PSEUDO_GENERATIONAL
//#define COLLECT_LIFESPAN
//#define NEW_COLLECT_LIFESPAN
-#define GC_COLLECT_STATISTICS
-#define FINALIZER_COUNTING
+//#define COLLECT_FRAGMENTATION
+//#define GC_COLLECT_STATISTICS
+//#define FINALIZER_COUNTING
#undef STRUCTURES_ON_HEAP
//#define STRUCTURES_ON_HEAP
#define align_size(size) ((size) & ~((1 << ALIGN) - 1))
#define MAP_ADDRESS (void*) 0x10000000
-#define VERBOSITY_MESSAGE 1
-#define VERBOSITY_DEBUG 2
-#define VERBOSITY_MISTRUST 3
-#define VERBOSITY_TRACE 4
-#define VERBOSITY_PARANOIA 5
-#define VERBOSITY_LIFESPAN 6
-
-//#define VERBOSITY VERBOSITY_MESSAGE
-//#define VERBOSITY VERBOSITY_PARANOIA
-#define VERBOSITY VERBOSITY_LIFESPAN
-
/* --- file-wide variables */
static void* heap_base = NULL;
static FILE* tracefile;
#endif
+#ifdef COLLECT_FRAGMENTATION
+static FILE* fragfile;
+static FILE* fragsizefile;
+#endif
+
/* --- implementation */
void
/* 6. calculate a useful first collection limit */
/* This is extremly primitive at this point...
we should replace it with something more useful -- phil. */
- heap_next_collection = (void*)((long)heap_base + (heap_size / 4));
+ heap_next_collection = next_collection_heuristic_init();
/* 7. Init the global reference lists & finalizer addresses */
references = NULL;
#ifdef NEW_COLLECT_LIFESPAN
lifespan_init(heap_base, heap_size);
#endif
+
+ /* 10. Set up collection of fragmentation data */
+#ifdef COLLECT_FRAGMENTATION
+ fragfile = popen("gzip -9 >fragmentation.gz", "w");
+ fragsizefile = popen("gzip -9 >freeblocks.gz", "w");
+#endif
}
__inline__
lifespan_close();
#endif
+#ifdef COLLECT_FRAGMENTATION
+ pclose(fragfile);
+ pclose(fragsizefile);
+#endif
+
/* 1. Clean up on the heap... finalize all remaining objects */
#if 1
while (curr) {
new_node->next = NULL;
while (*list && (*list)->next) {
-#if VERBOSITY >= VERBOSITY_PARANOIA
- if ((*list)->address == address)
- fprintf(stderr,
- "Attempt to add a duplicate adress to an adress list.\n");
-#endif
-
if ((*list)->next->address < address)
list = &(*list)->next;
else {
}
success:
- /* 3.a. mark all necessary bits, store the finalizer & return the newly allocate block */
+ /* 3.a. mark all necessary bits, store the finalizer & return the newly allocated block */
/* I don't mark the object-start anymore, as it always is at the beginning of a free-block,
which already is marked (Note: The first free-block gets marked in heap_init). -- phil. */
bitmap_setbit(start_bits, free_chunk); /* mark the new object */
-#if 1 /* FIXME: will become unecessary soon */
+#ifndef SIZE_FROM_CLASSINFO
bitmap_setbit(start_bits, (void*)((long)free_chunk + (long)length)); /* mark the freespace behind the new object */
#endif
++gc_alloc_count;
#endif
-#if 0
- if (free_chunk == 0x20000430228)
- fprintf(stderr, "yell!\n");
-#endif
-
#ifdef COLLECT_LIFESPAN
fprintf(tracefile, "alloc\t0x%lx\t0x%lx\n",
free_chunk, (long)free_chunk + length);
static
void gc_reclaim (void)
{
+#ifdef PSEUDO_GENERATIONAL
+ static void* generation_start = 0;
+ static int generation_num = 0;
+ void* addr = heap_base;
+#endif
void* free_start;
void* free_end = heap_base;
BITBLOCK* temp_bits;
bitmap_t* temp_bitmap;
+#ifdef COLLECT_FRAGMENTATION
+ unsigned long free_size = 0;
+ unsigned long free_fragments = 0;
+#endif
+
+#ifdef PSEUDO_GENERATIONAL
+ if (!generation_start || !(generation_num % 5))
+ generation_start = heap_base;
+
+ ++generation_num;
+#endif
+
/* 1. reset the freelists */
+
+#if 0
+ allocator_mark_free_kludge(start_bits); /* this line will be kicked out, when
+ the SIZE_FROM_CLASSINFO reclaim
+ is implemented (very soon!!) */
+#endif
+
+#ifdef PSEUDO_GENERATIONAL
+ for (addr = heap_base; addr <= generation_start; ++addr) {
+ if (bitmap_testbit(start_bits, addr))
+ bitmap_setbit(mark_bits, addr);
+ }
+
+ allocator_mark_free_kludge(start_bits); /* this line will be kicked out, when
+ the SIZE_FROM_CLASSINFO reclaim
+ is implemented (very soon!!) */
+#endif
+
allocator_reset();
/* 2. reclaim unmarked objects */
#if 0
- /* FIXME: add new code, please! */
+ if (!testbit(start_bits, heap_base))
+ free_start = heap_base;
+ else
+ free_start = bitmap_find_next_combination_set_unset(start_bitmap,
+ mark_bitmap,
+ free_start);
+
#else
while (free_end < heap_top) {
free_start = bitmap_find_next_combination_set_unset(start_bitmap,
if (free_end < heap_top) {
allocator_free(free_start, (long)free_end - (long)free_start);
+#ifdef COLLECT_FRAGMENTATION
+ free_size += (long)free_end - (long)free_start;
+ free_fragments++;
+#endif
+
#ifdef COLLECT_LIFESPAN
fprintf(tracefile,
"free\t0x%lx\t0x%lx\n",
bitmap_setbit(mark_bits, free_start); /* necessary to calculate obj-size bitmap based. */
#endif
}
- } else
+ } else {
free_end = heap_top;
+ }
}
#endif
mark_bitmap = start_bitmap;
start_bitmap = temp_bitmap;
-#if 0 /* already heandled in allocate */
+#if 0 /* operation already handled in allocate */
/* 3.2. mask reference bitmap */
bitmap_mask_with_bitmap(reference_bitmap, start_bitmap);
#endif
/* 3.3. update heap_top */
- if (free_start < heap_top)
+ if (free_start < heap_top) {
heap_top = free_start;
+#ifdef NEW_COLLECT_LIFESPAN
+ lifespan_free(free_start, free_end);
+#endif
+ }
+#if 0
if (heap_top < heap_limit)
bitmap_setbit(start_bits, heap_top);
+#endif
+
+ /* 3.4. emit fragmentation info */
+#ifdef COLLECT_FRAGMENTATION
+ {
+ unsigned long heap_full = (unsigned long)heap_top - (unsigned long)heap_base;
+ unsigned long heap_life = (unsigned long)heap_top - (unsigned long)heap_base - free_size;
+
+ fprintf(fragfile,
+ "%ld\t%ld\t%ld\t%ld\t%f\t%f\t%f\n",
+ heap_full,
+ heap_life,
+ free_size,
+ free_fragments,
+ 100*(float)free_size/free_fragments,
+ 100*(float)heap_life/heap_full,
+ 100*(float)free_size/heap_full
+ );
+ }
+ fflush(fragfile);
+
+ allocator_dump_to_file(fragsizefile);
+#endif
/* 4. adjust the collection threshold */
- heap_next_collection = (void*)((long)heap_top + ((long)heap_limit - (long)heap_top) / 4);
+ heap_next_collection = next_collection_heuristic();
if (heap_next_collection > heap_limit)
heap_next_collection = heap_limit;
#ifdef COLLECT_LIFESPAN
fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
#endif
+
+#ifdef PSEUDO_GENERATIONAL
+ generation_start = heap_top;
+#endif
}
__inline__
/* 1.a. if addr doesn't point into the heap, return. */
- if (!((void*)addr >= heap_base && (void*)addr < heap_top)) {
+ if ((unsigned long)addr - (unsigned long)heap_base >=
+ ((long)heap_top - (long)heap_base)) {
#ifdef GC_COLLECT_STATISTICS
++gc_mark_not_inheap;
#endif
void** end;
#ifdef SIZE_FROM_CLASSINFO
-#if 0
- if (addr == 0x20000430228)
- fprintf(stderr, "stop me!\n");
-#endif
-
if (((java_objectheader*)addr)->vftbl == class_array->vftbl)
end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
else
for (aThread = liveThreads; aThread != 0;
aThread = CONTEXT(aThread).nextlive) {
gc_mark_object_at((void*)aThread);
- if (aThread == currentThread) {
- void **top_of_stack = &dummy;
-
- if (top_of_stack > (void**)CONTEXT(aThread).stackEnd)
- markreferences((void**)CONTEXT(aThread).stackEnd, top_of_stack);
- else
- markreferences(top_of_stack, (void**)CONTEXT(aThread).stackEnd);
- }
- else {
- if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
- markreferences((void**)CONTEXT(aThread).stackEnd,
- (void**)CONTEXT(aThread).usedStackTop);
- else
- markreferences((void**)CONTEXT(aThread).usedStackTop,
- (void**)CONTEXT(aThread).stackEnd);
- }
+ if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
+ markreferences((void**)CONTEXT(aThread).stackEnd,
+ (void**)CONTEXT(aThread).usedStackTop);
+ else
+ markreferences((void**)CONTEXT(aThread).usedStackTop,
+ (void**)CONTEXT(aThread).stackEnd);
}
markreferences((void**)&threadQhead[0],
gc_call (void)
{
#ifdef USE_THREADS
+ u1 dummy;
+
assert(blockInts == 0);
intsDisable();
- if (currentThread == NULL || currentThread == mainThread)
+ if (currentThread == NULL || currentThread == mainThread) {
+ CONTEXT(mainThread).usedStackTop = &dummy;
gc_run();
+ }
else
- asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run);
+ asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run,
+ (void**)&(CONTEXT(currentThread).usedStackTop));
intsRestore();
#else
gc_run();