2 #include <unistd.h> /* getpagesize, mmap, ... */
5 #define MAP_FAILED ((void*) -1)
9 #include "../asmpart.h"
10 #include "../callargs.h"
11 #include "../threads/thread.h"
12 #include "../threads/locks.h"
17 #define PAGESIZE_MINUS_ONE (getpagesize() - 1)
22 #define GC_COLLECT_STATISTICS
23 //#define FINALIZER_COUNTING
25 #undef STRUCTURES_ON_HEAP
26 //#define STRUCTURES_ON_HEAP
31 #include "allocator.h" /* rev. 1 allocator */
32 #include "bitmap2.h" /* rev. 2 bitmap management */
40 #define align_size(size) ((size) & ~((1 << ALIGN) - 1))
41 #define MAP_ADDRESS (void*) 0x10000000
43 #define VERBOSITY_MESSAGE 1
44 #define VERBOSITY_DEBUG 2
45 #define VERBOSITY_MISTRUST 3
46 #define VERBOSITY_TRACE 4
47 #define VERBOSITY_PARANOIA 5
48 #define VERBOSITY_LIFESPAN 6
50 //#define VERBOSITY VERBOSITY_MESSAGE
51 //#define VERBOSITY VERBOSITY_PARANOIA
52 #define VERBOSITY VERBOSITY_LIFESPAN
54 /* --- file-wide variables */
56 static void* heap_base = NULL;
57 static SIZE heap_size = 0;
58 static void* heap_top = NULL;
59 static void* heap_limit = NULL;
60 static void* heap_next_collection = NULL;
62 static bitmap_t* start_bitmap = NULL;
63 static BITBLOCK* start_bits = NULL;
64 static bitmap_t* reference_bitmap = NULL;
65 static BITBLOCK* reference_bits = NULL;
66 static bitmap_t* mark_bitmap = NULL;
67 static BITBLOCK* mark_bits = NULL;
69 static void** stackbottom = NULL;
71 typedef struct address_list_node {
73 struct address_list_node* prev;
74 struct address_list_node* next;
77 static address_list_node* references = NULL;
78 static address_list_node* finalizers = NULL;
80 #ifdef GC_COLLECT_STATISTICS
82 static unsigned long gc_collections_count = 0;
84 static unsigned long gc_alloc_total = 0;
85 static unsigned long gc_alloc_count = 0;
87 static unsigned long gc_mark_heapblocks_visited = 0;
88 static unsigned long gc_mark_not_aligned = 0;
89 static unsigned long gc_mark_not_inheap = 0;
90 static unsigned long gc_mark_not_object = 0;
91 static unsigned long gc_mark_objects_marked = 0;
92 static unsigned long gc_mark_already_marked = 0;
96 #ifdef FINALIZER_COUNTING
98 static unsigned long gc_finalizers_executed = 0;
99 static unsigned long gc_finalizers_detected = 0;
104 static iMux alloc_mutex;
107 /* --- implementation */
110 heap_init (SIZE size,
111 SIZE startsize, /* when should we collect for the first time ? */
112 void **in_stackbottom)
114 /* 1. Initialise the freelists & reset the allocator's state */
117 /* 2. Allocate at least (alignment!) size bytes of memory for the heap */
118 heap_size = align_size(size + ((1 << ALIGN) - 1));
121 heap_base = malloc(heap_size);
123 heap_base = (void*) mmap (NULL,
124 ((size_t)heap_size + PAGESIZE_MINUS_ONE) & ~PAGESIZE_MINUS_ONE,
125 PROT_READ | PROT_WRITE,
126 MAP_PRIVATE | MAP_ANONYMOUS,
131 if (heap_base == (void*)MAP_FAILED) {
132 /* unable to allocate the requested amount of memory */
133 fprintf(stderr, "heap2.c: The queen, mylord, is dead! (mmap failed)\n");
137 /* 3. Allocate the bitmaps */
138 start_bitmap = bitmap_allocate(heap_base, heap_size);
139 reference_bitmap = bitmap_allocate(heap_base, heap_size);
140 mark_bitmap = bitmap_allocate(heap_base, heap_size);
142 start_bits = start_bitmap->bitmap;
143 reference_bits = reference_bitmap->bitmap;
144 mark_bits = mark_bitmap->bitmap;
146 /* 4. Mark the first free-area as an object-start */
147 bitmap_setbit(start_bits, heap_base);
149 /* 5. Initialise the heap's state (heap_top, etc.) */
150 stackbottom = in_stackbottom; /* copy the stackbottom */
152 heap_top = heap_base; /* the current end of the heap (just behind the last allocated object) */
153 heap_limit = (void*)((long)heap_base + heap_size); /* points just behind the last accessible block of the heap */
155 /* 6. calculate a useful first collection limit */
156 /* This is extremly primitive at this point...
157 we should replace it with something more useful -- phil. */
158 heap_next_collection = (void*)((long)heap_base + (heap_size / 4));
160 /* 7. Init the global reference lists & finalizer addresses */
164 #ifdef STRUCTURES_ON_HEAP
165 heap_addreference(&references);
166 heap_addreference(&finalizers);
170 /* 8. Init the mutexes for synchronization */
171 alloc_mutex.holder = 0;
178 heap_call_finalizer_for_object_at(java_objectheader* object_addr)
180 asm_calljavamethod(object_addr->vftbl->class->finalizer, object_addr, NULL, NULL, NULL);
181 #ifdef FINALIZER_COUNTING
182 ++gc_finalizers_executed;
189 address_list_node* curr = finalizers;
191 /* 1. Clean up on the heap... finalize all remaining objects */
194 address_list_node* prev = curr;
195 java_objectheader* addr = (java_objectheader*)(curr->address);
197 if (addr && bitmap_testbit(start_bits, addr))
198 heap_call_finalizer_for_object_at(addr);
205 /* 2. Release the bitmaps */
206 bitmap_release(start_bitmap);
207 bitmap_release(reference_bitmap);
208 bitmap_release(mark_bitmap);
210 /* 3. Release the memory allocated to the heap */
212 munmap(heap_base, heap_size);
214 #ifdef GC_COLLECT_STATISTICS
215 /* 4. emit statistical data */
216 sprintf(logtext, "%ld bytes for %ld objects allocated.",
217 gc_alloc_total, gc_alloc_count);
219 sprintf(logtext, "%ld garbage collections performed.", gc_collections_count);
221 sprintf(logtext, "%ld heapblocks visited, %ld objects marked",
222 gc_mark_heapblocks_visited, gc_mark_objects_marked);
224 sprintf(logtext, " %ld out of heap.", gc_mark_not_inheap);
226 sprintf(logtext, " %ld visits to objects already marked.", gc_mark_already_marked);
228 sprintf(logtext, " %ld not an object.", gc_mark_not_object);
230 sprintf(logtext, " %ld potential references not aligned.", gc_mark_not_aligned);
234 #ifdef FINALIZER_COUNTING
235 sprintf(logtext, "%ld objects with a finalizer", gc_finalizers_detected);
238 if (gc_finalizers_detected == gc_finalizers_executed)
239 sprintf(logtext, " all finalizers executed.");
241 sprintf(logtext, " only %ld finalizers executed.", gc_finalizers_executed);
250 heap_add_address_to_address_list(address_list_node** list, void* address)
252 /* Note: address lists are kept sorted to simplify finalization */
254 address_list_node* new_node = malloc(sizeof(address_list_node));
255 new_node->address = address;
256 new_node->next = NULL;
258 while (*list && (*list)->next) {
259 #if VERBOSITY >= VERBOSITY_PARANOIA
260 if ((*list)->address == address)
262 "Attempt to add a duplicate adress to an adress list.\n");
265 if ((*list)->next->address < address)
266 list = &(*list)->next;
268 new_node->next = *list;
274 new_node->next = *list;
282 heap_add_finalizer_for_object_at(void* addr)
284 /* Finalizers seem to be very rare... for this reason, I keep a linked
285 list of object addresses, which have a finalizer attached. This list
286 is kept in ascending order according to the order garbage is freed.
287 This list is currently kept separate from the heap, but should be
288 moved onto it, but some JIT-marker code to handle these special
289 objects will need to be added first. -- phil. */
291 heap_add_address_to_address_list(&finalizers, addr);
295 heap_allocate (SIZE in_length,
297 methodinfo *finalizer)
299 SIZE length = align_size(in_length + ((1 << ALIGN) - 1));
300 void* free_chunk = NULL;
303 /* check for misaligned in_length parameter */
304 if (length != in_length)
306 "heap2.c: heap_allocate was passed unaligned in_length parameter: %ld, \n aligned to %ld. (mistrust)\n",
310 #ifdef FINALIZER_COUNTING
312 ++gc_finalizers_detected;
315 #if VERBOSITY >= VERBOSITY_LIFESPAN && 0
316 /* perform garbage collection to collect data for lifespan analysis */
317 if (heap_top > heap_base)
322 lock_mutex(&alloc_mutex);
325 /* 1. attempt to get a free block with size >= length from the freelists */
326 free_chunk = allocator_alloc(length);
328 /* 2. if unsuccessful, try alternative allocation strategies */
330 /* 2.a if the collection threshold would be exceeded, collect the heap */
331 if ((long)heap_top + length > (long)heap_next_collection) {
332 /* 2.a.1. collect if the next_collection threshold would be exceeded */
335 /* 2.a.2. we just ran a collection, recheck the freelists */
336 free_chunk = allocator_alloc(length);
340 /* 2.a.3. we can't satisfy the request from the freelists, check
341 against the heap_limit whether growing the heap is possible */
342 if ((long)heap_top + length > (long)heap_limit)
346 /* 2.b. grow the heap */
347 free_chunk = heap_top;
348 heap_top = (void*)((long)heap_top + length);
352 /* 3.a. mark all necessary bits, store the finalizer & return the newly allocate block */
354 /* I don't mark the object-start anymore, as it always is at the beginning of a free-block,
355 which already is marked (Note: The first free-block gets marked in heap_init). -- phil. */
356 bitmap_setbit(start_bits, free_chunk); /* mark the new object */
358 #if 1 /* FIXME: will become unecessary soon */
359 bitmap_setbit(start_bits, (void*)((long)free_chunk + (long)length)); /* mark the freespace behind the new object */
363 bitmap_setbit(reference_bits, free_chunk);
365 bitmap_clearbit(reference_bits, free_chunk);
367 /* store a hint, that there's a finalizer for this address */
369 heap_add_finalizer_for_object_at(free_chunk);
372 fprintf(stderr, "alloc 0x%lx bytes @ 0x%lx\n", length, free_chunk);
375 #ifdef GC_COLLECT_STATISTICS
376 gc_alloc_total += length;
382 unlock_mutex(&alloc_mutex);
388 heap_addreference (void **reflocation)
390 /* I currently use a separate linked list (as in the original code) to hold
391 the global reference locations, but I'll change this to allocate these
392 in blocks on the heap; we'll have to add JIT-Marker code for those Java
393 objects then. -- phil. */
395 heap_add_address_to_address_list(&references, reflocation);
400 void gc_finalize (void)
402 /* This will have to be slightly rewritten as soon the JIT-marked heap-based lists are used. -- phil. */
404 address_list_node* curr = finalizers;
405 address_list_node* prev;
408 /* FIXME: new code, please! */
412 if (!bitmap_testbit(mark_bits, curr->address)) {
414 #ifdef FINALIZER_COUNTING
415 ++gc_finalizers_executed;
417 asm_calljavamethod(((java_objectheader*)curr->address)->vftbl->class->finalizer,
418 curr->address, NULL, NULL, NULL);
431 void gc_reclaim (void)
434 void* free_end = heap_base;
436 bitmap_t* temp_bitmap;
438 /* 1. reset the freelists */
441 /* 2. reclaim unmarked objects */
443 /* FIXME: add new code, please! */
445 while (free_end < heap_top) {
446 free_start = bitmap_find_next_combination_set_unset(start_bitmap,
450 if (free_start < heap_top) {
451 free_end = bitmap_find_next_setbit(mark_bitmap, (void*)((long)free_start + 8)); /* FIXME: constant used */
453 if (free_end < heap_top) {
454 allocator_free(free_start, (long)free_end - (long)free_start);
456 #if !defined(JIT_MARKER_SUPPORT)
457 /* would make trouble with JIT-Marker support. The Marker for unused blocks
458 might be called, leading to a bad dereference. -- phil. */
459 bitmap_setbit(mark_bits, free_start);
467 /* 3.1. swap mark & start bitmaps */
468 temp_bits = mark_bits;
469 mark_bits = start_bits;
470 start_bits = temp_bits;
472 temp_bitmap = mark_bitmap;
473 mark_bitmap = start_bitmap;
474 start_bitmap = temp_bitmap;
476 #if 0 /* already heandled in allocate */
477 /* 3.2. mask reference bitmap */
478 bitmap_mask_with_bitmap(reference_bitmap, start_bitmap);
481 /* 3.3. update heap_top */
482 if (free_start < heap_top)
483 heap_top = free_start;
485 if (heap_top < heap_limit)
486 bitmap_setbit(start_bits, heap_top);
488 /* 4. adjust the collection threshold */
489 heap_next_collection = (void*)((long)heap_top + ((long)heap_limit - (long)heap_top) / 4);
490 if (heap_next_collection > heap_limit)
491 heap_next_collection = heap_limit;
497 gc_mark_object_at (void** addr)
500 * A note concerning the order of the tests:
502 * Statistics collected during a test run, where alignment
503 * was tested before checking whether the addr points into
505 * >> LOG: 9301464 bytes for 196724 objects allocated.
506 * >> LOG: 15 garbage collections performed.
507 * >> LOG: 6568440 heapblocks visited, 469249 objects marked
508 * >> LOG: 1064447 visits to objects already marked.
509 * >> LOG: 988270 potential references not aligned.
510 * >> LOG: 4049446 out of heap.
511 * >> LOG: 5236 not an object.
513 * These results show, that only about 1/4 of all heapblocks
514 * point to objects; The single most important reason why a
515 * heapblock can not point at an object is, that it's value
516 * doesn't fall within the heap area (this test was performed
519 * From the results, the various tests have to be conducted
520 * in the following order for maximum efficiency:
522 * 2. already marked ?
526 * The results after reordering:
527 * >> LOG: 9301464 bytes for 196724 objects allocated.
528 * >> LOG: 15 garbage collections performed.
529 * >> LOG: 6568440 heapblocks visited, 469249 objects marked
530 * >> LOG: 1064447 visits to objects already marked.
531 * >> LOG: 350 potential references not aligned.
532 * >> LOG: 5037366 out of heap.
533 * >> LOG: 5236 not an object.
537 * 2. already marked ?
541 * >> LOG: 9301464 bytes for 196724 objects allocated.
542 * >> LOG: 15 garbage collections performed.
543 * >> LOG: 6568440 heapblocks visited, 469249 objects marked
544 * >> LOG: 5037366 out of heap.
545 * >> LOG: 1064456 visits to objects already marked.
546 * >> LOG: 5539 not an object.
547 * >> LOG: 38 potential references not aligned.
549 * Apparently, most unaligned values will already be eliminated
550 * when checking against the bounds of the heap. Checking this
551 * property first, should thus improve collection times.
555 /* 1.a. if addr doesn't point into the heap, return. */
556 if (!((void*)addr >= heap_base && (void*)addr < heap_top)) {
557 #ifdef GC_COLLECT_STATISTICS
558 ++gc_mark_not_inheap;
563 /* 1.b. if align(addr) has already been marked during this collection, return. */
564 if (bitmap_testbit(mark_bits, (void*)addr)) {
565 #ifdef GC_COLLECT_STATISTICS
566 ++gc_mark_already_marked;
571 /* 1.c. if align(addr) doesn't point to the start of an object, return. */
572 if (!bitmap_testbit(start_bits, (void*)addr)) {
573 #ifdef GC_COLLECT_STATISTICS
574 ++gc_mark_not_object;
579 /* 1.d. if addr is not properly aligned, return. */
580 if ((long)addr & ((1 << ALIGN) - 1)) {
581 #ifdef GC_COLLECT_STATISTICS
582 ++gc_mark_not_aligned;
587 /* 2. Mark the object at addr */
588 bitmap_setbit(mark_bits, (void*)addr);
589 #ifdef GC_COLLECT_STATISTICS
590 ++gc_mark_objects_marked;
593 #ifdef JIT_MARKER_SUPPORT
594 asm_calljavamethod(addr->vftbl->class->marker, addr, NULL, NULL, NULL);
597 /* 3. mark the references contained within the extents of the object at addr */
598 if (bitmap_testbit(reference_bits, addr)) {
599 /* 3.1. find the end of the object */
602 #ifdef SIZE_FROM_CLASSINFO
603 if (((java_objectheader*)addr)->vftbl == class_array->vftbl)
604 end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
606 end = (void**)((long)addr + (long)((java_objectheader*)addr)->vftbl->class->alignedsize);
608 end = (void**)bitmap_find_next_setbit(start_bitmap, addr + 1); /* points just behind the object */
611 /* 3.2. mark the references within the object at addr */
612 #ifdef GC_COLLECT_STATISTICS
613 gc_mark_heapblocks_visited += ((long)end - (long)addr) >> ALIGN;
616 gc_mark_object_at(*(addr++));
626 void gc_mark_references (void)
628 address_list_node* curr = references;
631 gc_mark_object_at(*((void**)(curr->address)));
639 markreferences(void** start, void** end)
642 gc_mark_object_at(*(start++));
647 void gc_mark_stack (void)
654 if (currentThread == NULL) {
655 void **top_of_stack = &dummy;
657 if (top_of_stack > stackbottom)
658 markreferences(stackbottom, top_of_stack);
660 markreferences(top_of_stack, stackbottom);
663 for (aThread = liveThreads; aThread != 0;
664 aThread = CONTEXT(aThread).nextlive) {
665 gc_mark_object_at((void*)aThread);
666 if (aThread == currentThread) {
667 void **top_of_stack = &dummy;
669 if (top_of_stack > (void**)CONTEXT(aThread).stackEnd)
670 markreferences((void**)CONTEXT(aThread).stackEnd, top_of_stack);
672 markreferences(top_of_stack, (void**)CONTEXT(aThread).stackEnd);
675 if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
676 markreferences((void**)CONTEXT(aThread).stackEnd,
677 (void**)CONTEXT(aThread).usedStackTop);
679 markreferences((void**)CONTEXT(aThread).usedStackTop,
680 (void**)CONTEXT(aThread).stackEnd);
684 markreferences((void**)&threadQhead[0],
685 (void**)&threadQhead[MAX_THREAD_PRIO]);
688 void **top_of_stack = &dummy;
690 if (top_of_stack > stackbottom)
691 markreferences(stackbottom, top_of_stack);
693 markreferences(top_of_stack, stackbottom);
701 static int armageddon_is_near = 0;
703 if (armageddon_is_near) {
704 /* armageddon_is_here! */
705 fprintf(stderr, "Oops, seems like there's a slight problem here: gc_run() called while still running?!\n");
709 armageddon_is_near = true;
710 heap_next_collection = heap_limit; /* try to avoid finalizer-induced collections */
712 bitmap_clear(mark_bitmap);
714 asm_dumpregistersandcall(gc_mark_stack);
715 gc_mark_references();
719 armageddon_is_near = false;
721 #ifdef GC_COLLECT_STATISTICS
722 ++gc_collections_count;
727 /************************* Function: gc_init **********************************
729 Initializes anything that must be initialized to call the gc on the right
732 ******************************************************************************/
739 /************************** Function: gc_call ********************************
741 Calls the garbage collector. The garbage collector should always be called
742 using this function since it ensures that enough stack space is available.
744 ******************************************************************************/
750 assert(blockInts == 0);
753 if (currentThread == NULL || currentThread == mainThread)
756 asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run);
766 * These are local overrides for various environment variables in Emacs.
767 * Please do not remove this and leave it at the end of the file, where
768 * Emacs will automagically detect them.
769 * ---------------------------------------------------------------------
772 * indent-tabs-mode: t