added lifespan analyser & fixed the "very last" bug
[cacao.git] / mm / heap2.c
1 #include <stddef.h>
2 #include <unistd.h>             /* getpagesize, mmap, ... */
3 #include <sys/mman.h>
4 #ifndef MAP_FAILED
5 #define MAP_FAILED ((void*) -1)
6 #endif
7 #include <sys/types.h>
8 #include <stdio.h>
9 #include "../asmpart.h"
10 #include "../callargs.h"
11 #include "../threads/thread.h"
12 #include "../threads/locks.h"
13 #include <assert.h>
14 #include "lifespan.h"
15
16 #include "mm.h"
17
18 #define PAGESIZE_MINUS_ONE      (getpagesize() - 1)
19
20 #undef ALIGN
21 #undef OFFSET
22
23 //#define COLLECT_LIFESPAN
24 #define NEW_COLLECT_LIFESPAN
25 #define GC_COLLECT_STATISTICS
26 #define FINALIZER_COUNTING
27
28 #undef STRUCTURES_ON_HEAP
29 //#define STRUCTURES_ON_HEAP
30
31 #define false 0
32 #define true 1
33
34 #include "allocator.h" /* rev. 1 allocator */
35 #include "bitmap2.h"   /* rev. 2 bitmap management */
36
37 bool collectverbose;
38
39 void gc_call (void);
40
41 /* --- macros */
42
43 #define align_size(size)        ((size) & ~((1 << ALIGN) - 1))
44 #define MAP_ADDRESS                     (void*) 0x10000000
45
46 #define VERBOSITY_MESSAGE       1
47 #define VERBOSITY_DEBUG         2
48 #define VERBOSITY_MISTRUST      3
49 #define VERBOSITY_TRACE         4
50 #define VERBOSITY_PARANOIA      5
51 #define VERBOSITY_LIFESPAN      6
52
53 //#define VERBOSITY                     VERBOSITY_MESSAGE
54 //#define VERBOSITY                     VERBOSITY_PARANOIA
55 #define VERBOSITY                       VERBOSITY_LIFESPAN
56
57 /* --- file-wide variables */
58
59 static void*    heap_base = NULL;
60 static SIZE             heap_size = 0;
61 static void*    heap_top = NULL;
62 static void*    heap_limit = NULL;
63 static void*    heap_next_collection = NULL;
64
65 static bitmap_t* start_bitmap = NULL;
66 static BITBLOCK* start_bits = NULL;
67 static bitmap_t* reference_bitmap = NULL;
68 static BITBLOCK* reference_bits = NULL;
69 static bitmap_t* mark_bitmap = NULL;
70 static BITBLOCK* mark_bits = NULL;
71
72 static void**   stackbottom = NULL;
73
74 typedef struct address_list_node {
75         void* address;
76         struct address_list_node* prev;
77         struct address_list_node* next;
78 } address_list_node;
79
80 static address_list_node* references = NULL;
81 static address_list_node* finalizers = NULL;
82
83 #ifdef GC_COLLECT_STATISTICS
84
85 static unsigned long gc_collections_count = 0;
86
87 static unsigned long gc_alloc_total = 0;
88 static unsigned long gc_alloc_count = 0;
89
90 static unsigned long gc_mark_heapblocks_visited = 0;
91 static unsigned long gc_mark_not_aligned = 0;
92 static unsigned long gc_mark_not_inheap = 0;
93 static unsigned long gc_mark_not_object = 0;
94 static unsigned long gc_mark_objects_marked = 0;
95 static unsigned long gc_mark_already_marked = 0;
96
97 #endif
98
99 #ifdef FINALIZER_COUNTING
100
101 static unsigned long gc_finalizers_executed = 0;
102 static unsigned long gc_finalizers_detected = 0;
103
104 #endif
105
106 #ifdef USE_THREADS
107 static iMux  alloc_mutex;
108 #endif
109
110 #ifdef COLLECT_LIFESPAN
111 static FILE* tracefile;
112 #endif
113
114 /* --- implementation */
115
116 void 
117 heap_init (SIZE size, 
118                    SIZE startsize, /* when should we collect for the first time ? */
119                    void **in_stackbottom)
120 {
121         /* 1. Initialise the freelists & reset the allocator's state */
122         allocator_init(); 
123
124         /* 2. Allocate at least (alignment!) size bytes of memory for the heap */
125         heap_size = align_size(size + ((1 << ALIGN) - 1));
126
127 #ifdef DONT_MMAP
128         heap_base = malloc(heap_size);
129 #else
130         heap_base = (void*) mmap (NULL, 
131                                                           ((size_t)heap_size + PAGESIZE_MINUS_ONE) & ~PAGESIZE_MINUS_ONE,
132                                                           PROT_READ | PROT_WRITE, 
133                                                           MAP_PRIVATE | MAP_ANONYMOUS, 
134                                                           -1, 
135                                                           (off_t) 0);
136 #endif
137
138         if (heap_base == (void*)MAP_FAILED) {
139                 /* unable to allocate the requested amount of memory */
140                 fprintf(stderr, "heap2.c: The queen, mylord, is dead! (mmap failed)\n");
141                 exit(-1);
142         }
143
144         /* 3. Allocate the bitmaps */
145         start_bitmap = bitmap_allocate(heap_base, heap_size);
146         reference_bitmap = bitmap_allocate(heap_base, heap_size);
147         mark_bitmap = bitmap_allocate(heap_base, heap_size);
148
149         start_bits = start_bitmap->bitmap;
150     reference_bits = reference_bitmap->bitmap;
151     mark_bits = mark_bitmap->bitmap;
152
153         /* 4. Mark the first free-area as an object-start */
154         bitmap_setbit(start_bits, heap_base);
155
156         /* 5. Initialise the heap's state (heap_top, etc.) */
157         stackbottom = in_stackbottom; /* copy the stackbottom */
158
159         heap_top = heap_base; /* the current end of the heap (just behind the last allocated object) */
160         heap_limit = (void*)((long)heap_base + heap_size); /* points just behind the last accessible block of the heap */
161
162         /* 6. calculate a useful first collection limit */
163         /* This is extremly primitive at this point... 
164            we should replace it with something more useful -- phil. */
165         heap_next_collection = (void*)((long)heap_base + (heap_size / 4));
166
167         /* 7. Init the global reference lists & finalizer addresses */
168         references = NULL;
169         finalizers = NULL;
170
171 #ifdef STRUCTURES_ON_HEAP
172         heap_addreference(&references);
173         heap_addreference(&finalizers);
174 #endif
175
176 #ifdef USE_THREADS
177         /* 8. Init the mutexes for synchronization */
178         alloc_mutex.holder = 0;
179 #endif
180
181         /* 9. Set up collection of lifespan data */
182 #ifdef COLLECT_LIFESPAN
183 #if 0
184         tracefile = fopen("heap.trace", "w");
185 #else
186         tracefile = popen("gzip -9 >heap.trace.gz", "w");
187 #endif
188         if (!tracefile) {
189                 fprintf(stderr, "heap2.c: Radio Ga Ga! (fopen failed)\n");
190                 exit(-2);
191         }
192
193         fprintf(tracefile, "heap_base\t0x%lx\n", heap_base);    
194         fprintf(tracefile, "heap_limit\t0x%lx\n", heap_limit);
195         fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
196 #endif
197
198 #ifdef NEW_COLLECT_LIFESPAN
199         lifespan_init(heap_base, heap_size);
200 #endif
201 }
202
203 __inline__
204 static
205 void
206 heap_call_finalizer_for_object_at(java_objectheader* object_addr)
207 {
208         asm_calljavamethod(object_addr->vftbl->class->finalizer, object_addr, NULL, NULL, NULL);
209 #ifdef FINALIZER_COUNTING
210         ++gc_finalizers_executed;
211 #endif
212 }
213
214 void 
215 heap_close (void)
216 {
217         address_list_node* curr = finalizers;
218
219         /* 0. clean up lifespan module */
220 #ifdef COLLECT_LIFESPAN
221 #if 0
222         fclose(tracefile);
223 #else
224         pclose(tracefile);
225 #endif
226 #endif
227
228 #ifdef NEW_COLLECT_LIFESPAN
229         lifespan_close();
230 #endif
231
232         /* 1. Clean up on the heap... finalize all remaining objects */
233 #if 1
234         while (curr) {
235                 address_list_node* prev = curr;
236                 java_objectheader* addr = (java_objectheader*)(curr->address);
237                 
238                 if (addr && bitmap_testbit(start_bits, addr))
239                         heap_call_finalizer_for_object_at(addr);
240                 
241                 curr = curr->next;
242                 free(prev);
243         }
244 #endif
245
246         /* 2. Release the bitmaps */
247         bitmap_release(start_bitmap);
248         bitmap_release(reference_bitmap);
249         bitmap_release(mark_bitmap);
250
251         /* 3. Release the memory allocated to the heap */
252         if (heap_base)
253                 munmap(heap_base, heap_size);
254
255         /* 4. emit statistical data */
256 #ifdef GC_COLLECT_STATISTICS
257         sprintf(logtext, "%ld bytes for %ld objects allocated.", 
258                         gc_alloc_total, gc_alloc_count); 
259         dolog();
260         sprintf(logtext, "%ld garbage collections performed.", gc_collections_count);
261         dolog();
262         sprintf(logtext, "%ld heapblocks visited, %ld objects marked", 
263                         gc_mark_heapblocks_visited, gc_mark_objects_marked);
264         dolog();
265         sprintf(logtext, "    %ld out of heap.", gc_mark_not_inheap);
266         dolog();
267         sprintf(logtext, "    %ld visits to objects already marked.", gc_mark_already_marked);
268         dolog();
269         sprintf(logtext, "    %ld not an object.", gc_mark_not_object);
270         dolog();
271         sprintf(logtext, "    %ld potential references not aligned.", gc_mark_not_aligned);
272         dolog();
273 #endif
274
275 #ifdef FINALIZER_COUNTING
276         sprintf(logtext, "%ld objects with a finalizer", gc_finalizers_detected);
277         dolog();
278
279         if (gc_finalizers_detected == gc_finalizers_executed)
280                 sprintf(logtext, "    all finalizers executed.");
281         else
282                 sprintf(logtext, "    only %ld finalizers executed.", gc_finalizers_executed);
283         dolog();
284 #endif
285
286 #ifdef NEW_COLLECT_LIFESPAN
287         lifespan_emit();
288 #endif
289 }
290
291 __inline__
292 static
293 void 
294 heap_add_address_to_address_list(address_list_node** list, void* address)
295 {
296         /* Note: address lists are kept sorted to simplify finalization */
297
298         address_list_node* new_node = malloc(sizeof(address_list_node));
299         new_node->address = address;
300         new_node->next = NULL;
301
302         while (*list && (*list)->next) {
303 #if VERBOSITY >= VERBOSITY_PARANOIA
304                 if ((*list)->address == address)
305                         fprintf(stderr,
306                                         "Attempt to add a duplicate adress to an adress list.\n");
307 #endif
308
309                 if ((*list)->next->address < address)
310                         list = &(*list)->next;
311                 else {
312                         new_node->next = *list;
313                         *list = new_node;
314                         return;
315                 }                       
316         }
317
318         new_node->next = *list;
319         *list = new_node;
320 }
321
322
323 __inline__
324 static
325 void
326 heap_add_finalizer_for_object_at(void* addr)
327 {
328         /* Finalizers seem to be very rare... for this reason, I keep a linked
329            list of object addresses, which have a finalizer attached. This list
330            is kept in ascending order according to the order garbage is freed.
331            This list is currently kept separate from the heap, but should be 
332            moved onto it, but some JIT-marker code to handle these special
333            objects will need to be added first. -- phil. */
334
335         heap_add_address_to_address_list(&finalizers, addr);
336
337 #ifdef COLLECT_LIFESPAN
338         fprintf(tracefile, "finalizer\t0x%lx\n", addr);
339 #endif
340 }
341
342 void* 
343 heap_allocate (SIZE               in_length, 
344                            bool           references, 
345                            methodinfo *finalizer)
346 {
347         SIZE    length = align_size(in_length + ((1 << ALIGN) - 1)); 
348         void*   free_chunk = NULL;
349
350 #if 0
351         /* check for misaligned in_length parameter */
352         if (length != in_length)
353                 fprintf(stderr,
354                                 "heap2.c: heap_allocate was passed unaligned in_length parameter: %ld, \n         aligned to %ld. (mistrust)\n",
355                                 in_length, length);
356 #endif
357
358 #ifdef FINALIZER_COUNTING
359         if (finalizer)
360                 ++gc_finalizers_detected;
361 #endif
362
363 #if defined(COLLECT_LIFESPAN) || defined(NEW_COLLECT_LIFESPAN)
364         /* perform garbage collection to collect data for lifespan analysis */
365         if (heap_top > heap_base)
366                 gc_call();
367 #endif
368
369 #ifdef USE_THREADS
370         lock_mutex(&alloc_mutex);
371 #endif  
372  retry:
373         /* 1. attempt to get a free block with size >= length from the freelists */
374         free_chunk = allocator_alloc(length);
375
376         /* 2. if unsuccessful, try alternative allocation strategies */
377         if (!free_chunk) {
378                 /* 2.a if the collection threshold would be exceeded, collect the heap */
379                 if ((long)heap_top + length > (long)heap_next_collection) {
380                         /* 2.a.1. collect if the next_collection threshold would be exceeded */
381                         gc_call();
382
383                         /* 2.a.2. we just ran a collection, recheck the freelists */
384                         free_chunk = allocator_alloc(length);
385                         if (free_chunk)
386                                 goto success;
387
388                         /* 2.a.3. we can't satisfy the request from the freelists, check
389                                   against the heap_limit whether growing the heap is possible */
390                         if ((long)heap_top + length > (long)heap_limit)
391                                 goto failure;
392                 }
393
394                 /* 2.b. grow the heap */
395                 free_chunk = heap_top;
396                 heap_top = (void*)((long)heap_top + length);
397         }
398
399  success:
400         /* 3.a. mark all necessary bits, store the finalizer & return the newly allocate block */
401
402         /* I don't mark the object-start anymore, as it always is at the beginning of a free-block,
403            which already is marked (Note: The first free-block gets marked in heap_init). -- phil. */
404         bitmap_setbit(start_bits, free_chunk); /* mark the new object */
405
406 #if 1 /* FIXME: will become unecessary soon */
407         bitmap_setbit(start_bits, (void*)((long)free_chunk + (long)length)); /* mark the freespace behind the new object */
408 #endif
409
410         if (references)
411                 bitmap_setbit(reference_bits, free_chunk);
412         else 
413                 bitmap_clearbit(reference_bits, free_chunk);
414
415         /* store a hint, that there's a finalizer for this address */
416         if (finalizer)
417                 heap_add_finalizer_for_object_at(free_chunk);
418
419 #ifdef GC_COLLECT_STATISTICS
420         gc_alloc_total += length;
421         ++gc_alloc_count;
422 #endif
423
424 #if 0
425         if (free_chunk == 0x20000430228)
426                 fprintf(stderr, "yell!\n");
427 #endif
428
429 #ifdef COLLECT_LIFESPAN
430         fprintf(tracefile, "alloc\t0x%lx\t0x%lx\n", 
431                         free_chunk, (long)free_chunk + length);
432 #endif
433
434 #ifdef NEW_COLLECT_LIFESPAN
435         lifespan_alloc(free_chunk, length);
436 #endif
437
438  failure:
439 #ifdef USE_THREADS
440         unlock_mutex(&alloc_mutex);
441 #endif  
442         return free_chunk;
443 }
444
445 void 
446 heap_addreference (void **reflocation)
447 {
448         /* I currently use a separate linked list (as in the original code) to hold
449            the global reference locations, but I'll change this to allocate these
450            in blocks on the heap; we'll have to add JIT-Marker code for those Java
451            objects then. -- phil. */
452
453         heap_add_address_to_address_list(&references, reflocation);
454 }
455
456 static
457 __inline__
458 void gc_finalize (void)
459 {
460         /* This will have to be slightly rewritten as soon the JIT-marked heap-based lists are used. -- phil. */
461
462         address_list_node*  curr = finalizers;
463         address_list_node*  prev;
464
465 #if 0
466         /* FIXME: new code, please! */
467 #else
468         while (curr) {
469                 if (curr->address) {
470                         if (!bitmap_testbit(mark_bits, curr->address)) {
471
472 #ifdef FINALIZER_COUNTING
473                                 ++gc_finalizers_executed;
474 #endif
475                                 asm_calljavamethod(((java_objectheader*)curr->address)->vftbl->class->finalizer, 
476                                                                    curr->address, NULL, NULL, NULL);
477                                 curr->address = 0;
478                         }
479                 }
480
481                 curr = curr->next;
482         }
483 #endif
484 }
485
486
487 __inline__
488 static 
489 void gc_reclaim (void)
490 {
491         void* free_start;
492         void* free_end = heap_base;
493         BITBLOCK* temp_bits;
494         bitmap_t* temp_bitmap;
495
496         /* 1. reset the freelists */
497         allocator_reset();
498
499         /* 2. reclaim unmarked objects */
500 #if 0
501         /* FIXME: add new code, please! */
502 #else
503         while (free_end < heap_top) {
504                 free_start = bitmap_find_next_combination_set_unset(start_bitmap,
505                                                                                                                         mark_bitmap,
506                                                                                                                         free_end);
507
508                 if (free_start < heap_top) {
509                         free_end = bitmap_find_next_setbit(mark_bitmap, (void*)((long)free_start + 8)); /* FIXME: constant used */
510
511                         if (free_end < heap_top) {
512                                 allocator_free(free_start, (long)free_end - (long)free_start);
513
514 #ifdef COLLECT_LIFESPAN
515                                 fprintf(tracefile, 
516                                                 "free\t0x%lx\t0x%lx\n", 
517                                                 free_start,
518                                                 (long)free_end);
519 #endif
520
521 #ifdef NEW_COLLECT_LIFESPAN
522                                 lifespan_free(free_start, free_end);
523 #endif
524
525 #ifndef SIZE_FROM_CLASSINFO
526                                 /* would make trouble with JIT-Marker support. The Marker for unused blocks 
527                                    might be called, leading to a bad dereference. -- phil. */
528                                 bitmap_setbit(mark_bits, free_start); /* necessary to calculate obj-size bitmap based. */
529 #endif
530                         }
531                 } else
532                         free_end = heap_top;    
533         }
534 #endif
535
536         /* 3.1. swap mark & start bitmaps */
537         temp_bits = mark_bits;
538         mark_bits = start_bits;
539         start_bits = temp_bits;
540
541         temp_bitmap = mark_bitmap;
542         mark_bitmap = start_bitmap;
543         start_bitmap = temp_bitmap;
544
545 #if 0 /* already heandled in allocate */
546         /* 3.2. mask reference bitmap */
547         bitmap_mask_with_bitmap(reference_bitmap, start_bitmap);
548 #endif
549
550         /* 3.3. update heap_top */
551         if (free_start < heap_top)
552                 heap_top = free_start;
553
554         if (heap_top < heap_limit)
555                 bitmap_setbit(start_bits, heap_top);
556
557         /* 4. adjust the collection threshold */
558         heap_next_collection = (void*)((long)heap_top + ((long)heap_limit - (long)heap_top) / 4);
559         if (heap_next_collection > heap_limit)
560                 heap_next_collection = heap_limit;
561
562 #ifdef COLLECT_LIFESPAN
563         fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
564 #endif
565 }
566
567 __inline__
568 static
569 void 
570 gc_mark_object_at (void** addr)
571 {
572         /*
573          * A note concerning the order of the tests:
574          *
575          * Statistics collected during a test run, where alignment
576          * was tested before checking whether the addr points into 
577          * the heap:
578          * >> LOG: 9301464 bytes for 196724 objects allocated.
579          * >> LOG: 15 garbage collections performed.
580          * >> LOG: 6568440 heapblocks visited, 469249 objects marked
581          * >> LOG:     1064447 visits to objects already marked.
582          * >> LOG:     988270 potential references not aligned.
583          * >> LOG:     4049446 out of heap.
584          * >> LOG:     5236 not an object.
585          *
586          * These results show, that only about 1/4 of all heapblocks 
587          * point to objects; The single most important reason why a 
588          * heapblock can not point at an object is, that it's value 
589          * doesn't fall within the heap area (this test was performed
590          * with a 3MB heap).
591          *
592          * From the results, the various tests have to be conducted 
593          * in the following order for maximum efficiency:
594          *    1. addr in heap?
595          *    2. already marked ?
596          *    3. aligned ?
597          *    4. object ?
598          *
599          * The results after reordering:
600          * >> LOG: 9301464 bytes for 196724 objects allocated.
601          * >> LOG: 15 garbage collections performed.
602          * >> LOG: 6568440 heapblocks visited, 469249 objects marked
603          * >> LOG:     1064447 visits to objects already marked.
604          * >> LOG:     350 potential references not aligned.
605          * >> LOG:     5037366 out of heap.
606          * >> LOG:     5236 not an object.
607          *
608          * And using:
609          *    1. addr in heap?
610          *    2. already marked ?
611          *    3. object ?
612          *    4. aligned ?
613          * 
614          * >> LOG: 9301464 bytes for 196724 objects allocated.
615          * >> LOG: 15 garbage collections performed.
616          * >> LOG: 6568440 heapblocks visited, 469249 objects marked
617          * >> LOG:     5037366 out of heap.
618          * >> LOG:     1064456 visits to objects already marked.
619          * >> LOG:     5539 not an object.
620          * >> LOG:     38 potential references not aligned.
621          *
622          * Apparently, most unaligned values will already be eliminated
623          * when checking against the bounds of the heap. Checking this
624          * property first, should thus improve collection times.
625          */
626
627
628         /* 1.a. if addr doesn't point into the heap, return. */
629         if (!((void*)addr >= heap_base && (void*)addr < heap_top)) {
630 #ifdef GC_COLLECT_STATISTICS
631                 ++gc_mark_not_inheap;
632 #endif
633                 return;
634         }
635
636         /* 1.b. if align(addr) has already been marked during this collection, return. */
637         if (bitmap_testbit(mark_bits, (void*)addr)) {
638 #ifdef GC_COLLECT_STATISTICS
639                 ++gc_mark_already_marked;
640 #endif
641                 return;
642         }
643
644         /* 1.c. if align(addr) doesn't point to the start of an object, return. */
645         if (!bitmap_testbit(start_bits, (void*)addr)) {
646 #ifdef GC_COLLECT_STATISTICS
647                 ++gc_mark_not_object;
648 #endif
649                 return;
650         }
651
652         /* 1.d. if addr is not properly aligned, return. */
653         if ((long)addr & ((1 << ALIGN) - 1)) {
654 #ifdef GC_COLLECT_STATISTICS
655                 ++gc_mark_not_aligned;
656 #endif
657                 return;
658         }
659
660         /* 2. Mark the object at addr */
661         bitmap_setbit(mark_bits, (void*)addr); 
662 #ifdef GC_COLLECT_STATISTICS
663         ++gc_mark_objects_marked;
664 #endif
665
666 #ifdef JIT_MARKER_SUPPORT
667         asm_calljavamethod(addr->vftbl->class->marker, addr, NULL, NULL, NULL);
668 #else
669
670         /* 3. mark the references contained within the extents of the object at addr */
671         if (bitmap_testbit(reference_bits, addr)) {
672                 /* 3.1. find the end of the object */
673                 void** end;
674
675 #ifdef SIZE_FROM_CLASSINFO
676 #if 0
677                 if (addr == 0x20000430228)
678                         fprintf(stderr, "stop me!\n");
679 #endif
680
681                 if (((java_objectheader*)addr)->vftbl == class_array->vftbl)
682                         end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
683                 else
684                         end = (void**)((long)addr + (long)((java_objectheader*)addr)->vftbl->class->alignedsize);
685 #else
686                 end = (void**)bitmap_find_next_setbit(start_bitmap, addr + 1); /* points just behind the object */
687 #endif
688
689                 /* 3.2. mark the references within the object at addr */
690 #ifdef GC_COLLECT_STATISTICS
691                 gc_mark_heapblocks_visited += ((long)end - (long)addr) >> ALIGN;
692 #endif
693                 while (addr < end)
694                         gc_mark_object_at(*(addr++));
695 #endif  
696         }
697         
698         return;
699 }
700
701
702 __inline__
703 static
704 void gc_mark_references (void)
705 {
706         address_list_node* curr = references;
707
708         while (curr) {
709                 gc_mark_object_at(*((void**)(curr->address)));
710                 curr = curr->next;
711         }
712 }
713
714 __inline__
715 static
716 void 
717 markreferences(void** start, void** end)
718 {
719         while (start < end)
720                 gc_mark_object_at(*(start++));
721 }
722
723 __inline__
724 static
725 void gc_mark_stack (void)
726 {
727         void *dummy;
728
729 #ifdef USE_THREADS 
730     thread *aThread;
731         
732         if (currentThread == NULL) {
733                 void **top_of_stack = &dummy;
734                 
735                 if (top_of_stack > stackbottom)
736                         markreferences(stackbottom, top_of_stack);
737                 else
738                         markreferences(top_of_stack, stackbottom);
739         }
740         else {
741                 for (aThread = liveThreads; aThread != 0;
742                          aThread = CONTEXT(aThread).nextlive) {
743                         gc_mark_object_at((void*)aThread);
744                         if (aThread == currentThread) {
745                                 void **top_of_stack = &dummy;
746                                 
747                                 if (top_of_stack > (void**)CONTEXT(aThread).stackEnd)
748                                         markreferences((void**)CONTEXT(aThread).stackEnd, top_of_stack);
749                                 else    
750                                         markreferences(top_of_stack, (void**)CONTEXT(aThread).stackEnd);
751                         }
752                         else {
753                                 if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
754                                         markreferences((void**)CONTEXT(aThread).stackEnd,
755                                                                    (void**)CONTEXT(aThread).usedStackTop);
756                                 else    
757                                         markreferences((void**)CONTEXT(aThread).usedStackTop,
758                                                                    (void**)CONTEXT(aThread).stackEnd);
759                         }
760             }
761
762                 markreferences((void**)&threadQhead[0],
763                                            (void**)&threadQhead[MAX_THREAD_PRIO]);
764         }
765 #else
766     void **top_of_stack = &dummy;
767
768     if (top_of_stack > stackbottom)
769         markreferences(stackbottom, top_of_stack);
770         else
771         markreferences(top_of_stack, stackbottom);
772 #endif
773 }
774
775
776 static 
777 void gc_run (void)
778 {
779         static int armageddon_is_near = 0;
780
781         if (armageddon_is_near) {
782                 /* armageddon_is_here! */
783                 fprintf(stderr, "Oops, seems like there's a slight problem here: gc_run() called while still running?!\n");
784                 return;
785         }
786
787         armageddon_is_near = true;
788         heap_next_collection = heap_limit;  /* try to avoid finalizer-induced collections */
789
790         bitmap_clear(mark_bitmap);
791
792         asm_dumpregistersandcall(gc_mark_stack);
793         gc_mark_references();
794         gc_finalize();
795         gc_reclaim();
796
797         armageddon_is_near = false;
798
799 #ifdef GC_COLLECT_STATISTICS
800         ++gc_collections_count;
801 #endif
802 }
803
804
805 /************************* Function: gc_init **********************************
806
807   Initializes anything that must be initialized to call the gc on the right
808   stack.
809
810 ******************************************************************************/
811
812 void
813 gc_init (void)
814 {
815 }
816
817 /************************** Function: gc_call ********************************
818
819   Calls the garbage collector. The garbage collector should always be called
820   using this function since it ensures that enough stack space is available.
821
822 ******************************************************************************/
823
824 void
825 gc_call (void)
826 {
827 #ifdef USE_THREADS
828         assert(blockInts == 0);
829
830         intsDisable();
831         if (currentThread == NULL || currentThread == mainThread)
832                 gc_run();
833         else
834                 asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run);
835         intsRestore();
836 #else
837         gc_run();
838 #endif
839 }
840
841
842
843 /*
844  * These are local overrides for various environment variables in Emacs.
845  * Please do not remove this and leave it at the end of the file, where
846  * Emacs will automagically detect them.
847  * ---------------------------------------------------------------------
848  * Local variables:
849  * mode: c
850  * indent-tabs-mode: t
851  * c-basic-offset: 4
852  * tab-width: 4
853  * End:
854  */
855