Cleaned up, fixed the "last" bug, optimized.
[cacao.git] / mm / heap2.c
1 #include <stddef.h>
2 #include <unistd.h>             /* getpagesize, mmap, ... */
3 #include <sys/mman.h>
4 #ifndef MAP_FAILED
5 #define MAP_FAILED ((void*) -1)
6 #endif
7 #include <sys/types.h>
8 #include <stdio.h>
9 #include "../asmpart.h"
10 #include "../callargs.h"
11 #include "../threads/thread.h"
12 #include "../threads/locks.h"
13 #include <assert.h>
14
15 #include "mm.h"
16
17 #define PAGESIZE_MINUS_ONE      (getpagesize() - 1)
18
19 #undef ALIGN
20 #undef OFFSET
21
22 #define GC_COLLECT_STATISTICS
23 //#define FINALIZER_COUNTING
24
25 #undef STRUCTURES_ON_HEAP
26 //#define STRUCTURES_ON_HEAP
27
28 #define false 0
29 #define true 1
30
31 #include "allocator.h" /* rev. 1 allocator */
32 #include "bitmap2.h"   /* rev. 2 bitmap management */
33
34 bool collectverbose;
35
36 void gc_call (void);
37
38 /* --- macros */
39
40 #define align_size(size)        ((size) & ~((1 << ALIGN) - 1))
41 #define MAP_ADDRESS                     (void*) 0x10000000
42
43 #define VERBOSITY_MESSAGE       1
44 #define VERBOSITY_DEBUG         2
45 #define VERBOSITY_MISTRUST      3
46 #define VERBOSITY_TRACE         4
47 #define VERBOSITY_PARANOIA      5
48 #define VERBOSITY_LIFESPAN      6
49
50 //#define VERBOSITY                     VERBOSITY_MESSAGE
51 //#define VERBOSITY                     VERBOSITY_PARANOIA
52 #define VERBOSITY                       VERBOSITY_LIFESPAN
53
54 /* --- file-wide variables */
55
56 static void*    heap_base = NULL;
57 static SIZE             heap_size = 0;
58 static void*    heap_top = NULL;
59 static void*    heap_limit = NULL;
60 static void*    heap_next_collection = NULL;
61
62 static bitmap_t* start_bitmap = NULL;
63 static BITBLOCK* start_bits = NULL;
64 static bitmap_t* reference_bitmap = NULL;
65 static BITBLOCK* reference_bits = NULL;
66 static bitmap_t* mark_bitmap = NULL;
67 static BITBLOCK* mark_bits = NULL;
68
69 static void**   stackbottom = NULL;
70
71 typedef struct address_list_node {
72         void* address;
73         struct address_list_node* prev;
74         struct address_list_node* next;
75 } address_list_node;
76
77 static address_list_node* references = NULL;
78 static address_list_node* finalizers = NULL;
79
80 #ifdef GC_COLLECT_STATISTICS
81
82 static unsigned long gc_collections_count = 0;
83
84 static unsigned long gc_alloc_total = 0;
85 static unsigned long gc_alloc_count = 0;
86
87 static unsigned long gc_mark_heapblocks_visited = 0;
88 static unsigned long gc_mark_not_aligned = 0;
89 static unsigned long gc_mark_not_inheap = 0;
90 static unsigned long gc_mark_not_object = 0;
91 static unsigned long gc_mark_objects_marked = 0;
92 static unsigned long gc_mark_already_marked = 0;
93
94 #endif
95
96 #ifdef FINALIZER_COUNTING
97
98 static unsigned long gc_finalizers_executed = 0;
99 static unsigned long gc_finalizers_detected = 0;
100
101 #endif
102
103 #ifdef USE_THREADS
104 static iMux  alloc_mutex;
105 #endif
106
107 /* --- implementation */
108
109 void 
110 heap_init (SIZE size, 
111                    SIZE startsize, /* when should we collect for the first time ? */
112                    void **in_stackbottom)
113 {
114         /* 1. Initialise the freelists & reset the allocator's state */
115         allocator_init(); 
116
117         /* 2. Allocate at least (alignment!) size bytes of memory for the heap */
118         heap_size = align_size(size + ((1 << ALIGN) - 1));
119
120 #ifdef DONT_MMAP
121         heap_base = malloc(heap_size);
122 #else
123         heap_base = (void*) mmap (NULL, 
124                                                           ((size_t)heap_size + PAGESIZE_MINUS_ONE) & ~PAGESIZE_MINUS_ONE,
125                                                           PROT_READ | PROT_WRITE, 
126                                                           MAP_PRIVATE | MAP_ANONYMOUS, 
127                                                           -1, 
128                                                           (off_t) 0);
129 #endif
130
131         if (heap_base == (void*)MAP_FAILED) {
132                 /* unable to allocate the requested amount of memory */
133                 fprintf(stderr, "heap2.c: The queen, mylord, is dead! (mmap failed)\n");
134                 exit(-1);
135         }
136
137         /* 3. Allocate the bitmaps */
138         start_bitmap = bitmap_allocate(heap_base, heap_size);
139         reference_bitmap = bitmap_allocate(heap_base, heap_size);
140         mark_bitmap = bitmap_allocate(heap_base, heap_size);
141
142         start_bits = start_bitmap->bitmap;
143     reference_bits = reference_bitmap->bitmap;
144     mark_bits = mark_bitmap->bitmap;
145
146         /* 4. Mark the first free-area as an object-start */
147         bitmap_setbit(start_bits, heap_base);
148
149         /* 5. Initialise the heap's state (heap_top, etc.) */
150         stackbottom = in_stackbottom; /* copy the stackbottom */
151
152         heap_top = heap_base; /* the current end of the heap (just behind the last allocated object) */
153         heap_limit = (void*)((long)heap_base + heap_size); /* points just behind the last accessible block of the heap */
154
155         /* 6. calculate a useful first collection limit */
156         /* This is extremly primitive at this point... 
157            we should replace it with something more useful -- phil. */
158         heap_next_collection = (void*)((long)heap_base + (heap_size / 4));
159
160         /* 7. Init the global reference lists & finalizer addresses */
161         references = NULL;
162         finalizers = NULL;
163
164 #ifdef STRUCTURES_ON_HEAP
165         heap_addreference(&references);
166         heap_addreference(&finalizers);
167 #endif
168
169 #ifdef USE_THREADS
170         /* 8. Init the mutexes for synchronization */
171         alloc_mutex.holder = 0;
172 #endif
173 }
174
175 __inline__
176 static
177 void
178 heap_call_finalizer_for_object_at(java_objectheader* object_addr)
179 {
180         asm_calljavamethod(object_addr->vftbl->class->finalizer, object_addr, NULL, NULL, NULL);
181 #ifdef FINALIZER_COUNTING
182         ++gc_finalizers_executed;
183 #endif
184 }
185
186 void 
187 heap_close (void)
188 {
189         address_list_node* curr = finalizers;
190
191         /* 1. Clean up on the heap... finalize all remaining objects */
192 #if 1
193         while (curr) {
194                 address_list_node* prev = curr;
195                 java_objectheader* addr = (java_objectheader*)(curr->address);
196                 
197                 if (addr && bitmap_testbit(start_bits, addr))
198                         heap_call_finalizer_for_object_at(addr);
199                 
200                 curr = curr->next;
201                 free(prev);
202         }
203 #endif
204
205         /* 2. Release the bitmaps */
206         bitmap_release(start_bitmap);
207         bitmap_release(reference_bitmap);
208         bitmap_release(mark_bitmap);
209
210         /* 3. Release the memory allocated to the heap */
211         if (heap_base)
212                 munmap(heap_base, heap_size);
213
214 #ifdef GC_COLLECT_STATISTICS
215         /* 4. emit statistical data */
216         sprintf(logtext, "%ld bytes for %ld objects allocated.", 
217                         gc_alloc_total, gc_alloc_count); 
218         dolog();
219         sprintf(logtext, "%ld garbage collections performed.", gc_collections_count);
220         dolog();
221         sprintf(logtext, "%ld heapblocks visited, %ld objects marked", 
222                         gc_mark_heapblocks_visited, gc_mark_objects_marked);
223         dolog();
224         sprintf(logtext, "    %ld out of heap.", gc_mark_not_inheap);
225         dolog();
226         sprintf(logtext, "    %ld visits to objects already marked.", gc_mark_already_marked);
227         dolog();
228         sprintf(logtext, "    %ld not an object.", gc_mark_not_object);
229         dolog();
230         sprintf(logtext, "    %ld potential references not aligned.", gc_mark_not_aligned);
231         dolog();
232 #endif
233
234 #ifdef FINALIZER_COUNTING
235         sprintf(logtext, "%ld objects with a finalizer", gc_finalizers_detected);
236         dolog();
237
238         if (gc_finalizers_detected == gc_finalizers_executed)
239                 sprintf(logtext, "    all finalizers executed.");
240         else
241                 sprintf(logtext, "    only %ld finalizers executed.", gc_finalizers_executed);
242         dolog();
243 #endif
244
245 }
246
247 __inline__
248 static
249 void 
250 heap_add_address_to_address_list(address_list_node** list, void* address)
251 {
252         /* Note: address lists are kept sorted to simplify finalization */
253
254         address_list_node* new_node = malloc(sizeof(address_list_node));
255         new_node->address = address;
256         new_node->next = NULL;
257
258         while (*list && (*list)->next) {
259 #if VERBOSITY >= VERBOSITY_PARANOIA
260                 if ((*list)->address == address)
261                         fprintf(stderr,
262                                         "Attempt to add a duplicate adress to an adress list.\n");
263 #endif
264
265                 if ((*list)->next->address < address)
266                         list = &(*list)->next;
267                 else {
268                         new_node->next = *list;
269                         *list = new_node;
270                         return;
271                 }                       
272         }
273
274         new_node->next = *list;
275         *list = new_node;
276 }
277
278
279 __inline__
280 static
281 void
282 heap_add_finalizer_for_object_at(void* addr)
283 {
284         /* Finalizers seem to be very rare... for this reason, I keep a linked
285            list of object addresses, which have a finalizer attached. This list
286            is kept in ascending order according to the order garbage is freed.
287            This list is currently kept separate from the heap, but should be 
288            moved onto it, but some JIT-marker code to handle these special
289            objects will need to be added first. -- phil. */
290
291         heap_add_address_to_address_list(&finalizers, addr);
292 }
293
294 void* 
295 heap_allocate (SIZE               in_length, 
296                            bool           references, 
297                            methodinfo *finalizer)
298 {
299         SIZE    length = align_size(in_length + ((1 << ALIGN) - 1)); 
300         void*   free_chunk = NULL;
301
302 #if 0
303         /* check for misaligned in_length parameter */
304         if (length != in_length)
305                 fprintf(stderr,
306                                 "heap2.c: heap_allocate was passed unaligned in_length parameter: %ld, \n         aligned to %ld. (mistrust)\n",
307                                 in_length, length);
308 #endif
309
310 #ifdef FINALIZER_COUNTING
311         if (finalizer)
312                 ++gc_finalizers_detected;
313 #endif
314
315 #if VERBOSITY >= VERBOSITY_LIFESPAN && 0
316         /* perform garbage collection to collect data for lifespan analysis */
317         if (heap_top > heap_base)
318                 gc_call();
319 #endif
320
321 #ifdef USE_THREADS
322         lock_mutex(&alloc_mutex);
323 #endif  
324  retry:
325         /* 1. attempt to get a free block with size >= length from the freelists */
326         free_chunk = allocator_alloc(length);
327
328         /* 2. if unsuccessful, try alternative allocation strategies */
329         if (!free_chunk) {
330                 /* 2.a if the collection threshold would be exceeded, collect the heap */
331                 if ((long)heap_top + length > (long)heap_next_collection) {
332                         /* 2.a.1. collect if the next_collection threshold would be exceeded */
333                         gc_call();
334
335                         /* 2.a.2. we just ran a collection, recheck the freelists */
336                         free_chunk = allocator_alloc(length);
337                         if (free_chunk)
338                                 goto success;
339
340                         /* 2.a.3. we can't satisfy the request from the freelists, check
341                                   against the heap_limit whether growing the heap is possible */
342                         if ((long)heap_top + length > (long)heap_limit)
343                                 goto failure;
344                 }
345
346                 /* 2.b. grow the heap */
347                 free_chunk = heap_top;
348                 heap_top = (void*)((long)heap_top + length);
349         }
350
351  success:
352         /* 3.a. mark all necessary bits, store the finalizer & return the newly allocate block */
353
354         /* I don't mark the object-start anymore, as it always is at the beginning of a free-block,
355            which already is marked (Note: The first free-block gets marked in heap_init). -- phil. */
356         bitmap_setbit(start_bits, free_chunk); /* mark the new object */
357
358 #if 1 /* FIXME: will become unecessary soon */
359         bitmap_setbit(start_bits, (void*)((long)free_chunk + (long)length)); /* mark the freespace behind the new object */
360 #endif
361
362         if (references)
363                 bitmap_setbit(reference_bits, free_chunk);
364         else 
365                 bitmap_clearbit(reference_bits, free_chunk);
366
367         /* store a hint, that there's a finalizer for this address */
368         if (finalizer)
369                 heap_add_finalizer_for_object_at(free_chunk);
370
371 #if 0 /* lifespan */
372         fprintf(stderr, "alloc 0x%lx bytes @ 0x%lx\n", length, free_chunk);
373 #endif
374
375 #ifdef GC_COLLECT_STATISTICS
376         gc_alloc_total += length;
377         ++gc_alloc_count;
378 #endif
379
380  failure:
381 #ifdef USE_THREADS
382         unlock_mutex(&alloc_mutex);
383 #endif  
384         return free_chunk;
385 }
386
387 void 
388 heap_addreference (void **reflocation)
389 {
390         /* I currently use a separate linked list (as in the original code) to hold
391            the global reference locations, but I'll change this to allocate these
392            in blocks on the heap; we'll have to add JIT-Marker code for those Java
393            objects then. -- phil. */
394
395         heap_add_address_to_address_list(&references, reflocation);
396 }
397
398 static
399 __inline__
400 void gc_finalize (void)
401 {
402         /* This will have to be slightly rewritten as soon the JIT-marked heap-based lists are used. -- phil. */
403
404         address_list_node*  curr = finalizers;
405         address_list_node*  prev;
406
407 #if 0
408         /* FIXME: new code, please! */
409 #else
410         while (curr) {
411                 if (curr->address) {
412                         if (!bitmap_testbit(mark_bits, curr->address)) {
413
414 #ifdef FINALIZER_COUNTING
415                                 ++gc_finalizers_executed;
416 #endif
417                                 asm_calljavamethod(((java_objectheader*)curr->address)->vftbl->class->finalizer, 
418                                                                    curr->address, NULL, NULL, NULL);
419                                 curr->address = 0;
420                         }
421                 }
422
423                 curr = curr->next;
424         }
425 #endif
426 }
427
428
429 __inline__
430 static 
431 void gc_reclaim (void)
432 {
433         void* free_start;
434         void* free_end = heap_base;
435         BITBLOCK* temp_bits;
436         bitmap_t* temp_bitmap;
437
438         /* 1. reset the freelists */
439         allocator_reset();
440
441         /* 2. reclaim unmarked objects */
442 #if 0
443         /* FIXME: add new code, please! */
444 #else
445         while (free_end < heap_top) {
446                 free_start = bitmap_find_next_combination_set_unset(start_bitmap,
447                                                                                                                         mark_bitmap,
448                                                                                                                         free_end);
449
450                 if (free_start < heap_top) {
451                         free_end   = bitmap_find_next_setbit(mark_bitmap, (void*)((long)free_start + 8)); /* FIXME: constant used */
452
453                         if (free_end < heap_top) {
454                                 allocator_free(free_start, (long)free_end - (long)free_start);
455
456 #if !defined(JIT_MARKER_SUPPORT)
457                                 /* would make trouble with JIT-Marker support. The Marker for unused blocks 
458                                    might be called, leading to a bad dereference. -- phil. */
459                                 bitmap_setbit(mark_bits, free_start);
460 #endif
461                         }
462                 } else
463                         free_end = heap_top;    
464         }
465 #endif
466
467         /* 3.1. swap mark & start bitmaps */
468         temp_bits = mark_bits;
469         mark_bits = start_bits;
470         start_bits = temp_bits;
471
472         temp_bitmap = mark_bitmap;
473         mark_bitmap = start_bitmap;
474         start_bitmap = temp_bitmap;
475
476 #if 0 /* already heandled in allocate */
477         /* 3.2. mask reference bitmap */
478         bitmap_mask_with_bitmap(reference_bitmap, start_bitmap);
479 #endif
480
481         /* 3.3. update heap_top */
482         if (free_start < heap_top)
483                 heap_top = free_start;
484
485         if (heap_top < heap_limit)
486                 bitmap_setbit(start_bits, heap_top);
487
488         /* 4. adjust the collection threshold */
489         heap_next_collection = (void*)((long)heap_top + ((long)heap_limit - (long)heap_top) / 4);
490         if (heap_next_collection > heap_limit)
491                 heap_next_collection = heap_limit;
492 }
493
494 __inline__
495 static
496 void 
497 gc_mark_object_at (void** addr)
498 {
499         /*
500          * A note concerning the order of the tests:
501          *
502          * Statistics collected during a test run, where alignment
503          * was tested before checking whether the addr points into 
504          * the heap:
505          * >> LOG: 9301464 bytes for 196724 objects allocated.
506          * >> LOG: 15 garbage collections performed.
507          * >> LOG: 6568440 heapblocks visited, 469249 objects marked
508          * >> LOG:     1064447 visits to objects already marked.
509          * >> LOG:     988270 potential references not aligned.
510          * >> LOG:     4049446 out of heap.
511          * >> LOG:     5236 not an object.
512          *
513          * These results show, that only about 1/4 of all heapblocks 
514          * point to objects; The single most important reason why a 
515          * heapblock can not point at an object is, that it's value 
516          * doesn't fall within the heap area (this test was performed
517          * with a 3MB heap).
518          *
519          * From the results, the various tests have to be conducted 
520          * in the following order for maximum efficiency:
521          *    1. addr in heap?
522          *    2. already marked ?
523          *    3. aligned ?
524          *    4. object ?
525          *
526          * The results after reordering:
527          * >> LOG: 9301464 bytes for 196724 objects allocated.
528          * >> LOG: 15 garbage collections performed.
529          * >> LOG: 6568440 heapblocks visited, 469249 objects marked
530          * >> LOG:     1064447 visits to objects already marked.
531          * >> LOG:     350 potential references not aligned.
532          * >> LOG:     5037366 out of heap.
533          * >> LOG:     5236 not an object.
534          *
535          * And using:
536          *    1. addr in heap?
537          *    2. already marked ?
538          *    3. object ?
539          *    4. aligned ?
540          * 
541          * >> LOG: 9301464 bytes for 196724 objects allocated.
542          * >> LOG: 15 garbage collections performed.
543          * >> LOG: 6568440 heapblocks visited, 469249 objects marked
544          * >> LOG:     5037366 out of heap.
545          * >> LOG:     1064456 visits to objects already marked.
546          * >> LOG:     5539 not an object.
547          * >> LOG:     38 potential references not aligned.
548          *
549          * Apparently, most unaligned values will already be eliminated
550          * when checking against the bounds of the heap. Checking this
551          * property first, should thus improve collection times.
552          */
553
554
555         /* 1.a. if addr doesn't point into the heap, return. */
556         if (!((void*)addr >= heap_base && (void*)addr < heap_top)) {
557 #ifdef GC_COLLECT_STATISTICS
558                 ++gc_mark_not_inheap;
559 #endif
560                 return;
561         }
562
563         /* 1.b. if align(addr) has already been marked during this collection, return. */
564         if (bitmap_testbit(mark_bits, (void*)addr)) {
565 #ifdef GC_COLLECT_STATISTICS
566                 ++gc_mark_already_marked;
567 #endif
568                 return;
569         }
570
571         /* 1.c. if align(addr) doesn't point to the start of an object, return. */
572         if (!bitmap_testbit(start_bits, (void*)addr)) {
573 #ifdef GC_COLLECT_STATISTICS
574                 ++gc_mark_not_object;
575 #endif
576                 return;
577         }
578
579         /* 1.d. if addr is not properly aligned, return. */
580         if ((long)addr & ((1 << ALIGN) - 1)) {
581 #ifdef GC_COLLECT_STATISTICS
582                 ++gc_mark_not_aligned;
583 #endif
584                 return;
585         }
586
587         /* 2. Mark the object at addr */
588         bitmap_setbit(mark_bits, (void*)addr); 
589 #ifdef GC_COLLECT_STATISTICS
590         ++gc_mark_objects_marked;
591 #endif
592
593 #ifdef JIT_MARKER_SUPPORT
594         asm_calljavamethod(addr->vftbl->class->marker, addr, NULL, NULL, NULL);
595 #else
596
597         /* 3. mark the references contained within the extents of the object at addr */
598         if (bitmap_testbit(reference_bits, addr)) {
599                 /* 3.1. find the end of the object */
600                 void** end;
601
602 #ifdef SIZE_FROM_CLASSINFO
603                 if (((java_objectheader*)addr)->vftbl == class_array->vftbl)
604                         end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
605                 else
606                         end = (void**)((long)addr + (long)((java_objectheader*)addr)->vftbl->class->alignedsize);
607 #else
608                 end = (void**)bitmap_find_next_setbit(start_bitmap, addr + 1); /* points just behind the object */
609 #endif
610
611                 /* 3.2. mark the references within the object at addr */
612 #ifdef GC_COLLECT_STATISTICS
613                 gc_mark_heapblocks_visited += ((long)end - (long)addr) >> ALIGN;
614 #endif
615                 while (addr < end)
616                         gc_mark_object_at(*(addr++));
617 #endif  
618         }
619         
620         return;
621 }
622
623
624 __inline__
625 static
626 void gc_mark_references (void)
627 {
628         address_list_node* curr = references;
629
630         while (curr) {
631                 gc_mark_object_at(*((void**)(curr->address)));
632                 curr = curr->next;
633         }
634 }
635
636 __inline__
637 static
638 void 
639 markreferences(void** start, void** end)
640 {
641         while (start < end)
642                 gc_mark_object_at(*(start++));
643 }
644
645 __inline__
646 static
647 void gc_mark_stack (void)
648 {
649         void *dummy;
650
651 #ifdef USE_THREADS 
652     thread *aThread;
653         
654         if (currentThread == NULL) {
655                 void **top_of_stack = &dummy;
656                 
657                 if (top_of_stack > stackbottom)
658                         markreferences(stackbottom, top_of_stack);
659                 else
660                         markreferences(top_of_stack, stackbottom);
661         }
662         else {
663                 for (aThread = liveThreads; aThread != 0;
664                          aThread = CONTEXT(aThread).nextlive) {
665                         gc_mark_object_at((void*)aThread);
666                         if (aThread == currentThread) {
667                                 void **top_of_stack = &dummy;
668                                 
669                                 if (top_of_stack > (void**)CONTEXT(aThread).stackEnd)
670                                         markreferences((void**)CONTEXT(aThread).stackEnd, top_of_stack);
671                                 else    
672                                         markreferences(top_of_stack, (void**)CONTEXT(aThread).stackEnd);
673                         }
674                         else {
675                                 if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
676                                         markreferences((void**)CONTEXT(aThread).stackEnd,
677                                                                    (void**)CONTEXT(aThread).usedStackTop);
678                                 else    
679                                         markreferences((void**)CONTEXT(aThread).usedStackTop,
680                                                                    (void**)CONTEXT(aThread).stackEnd);
681                         }
682             }
683
684                 markreferences((void**)&threadQhead[0],
685                                            (void**)&threadQhead[MAX_THREAD_PRIO]);
686         }
687 #else
688     void **top_of_stack = &dummy;
689
690     if (top_of_stack > stackbottom)
691         markreferences(stackbottom, top_of_stack);
692         else
693         markreferences(top_of_stack, stackbottom);
694 #endif
695 }
696
697
698 static 
699 void gc_run (void)
700 {
701         static int armageddon_is_near = 0;
702
703         if (armageddon_is_near) {
704                 /* armageddon_is_here! */
705                 fprintf(stderr, "Oops, seems like there's a slight problem here: gc_run() called while still running?!\n");
706                 return;
707         }
708
709         armageddon_is_near = true;
710         heap_next_collection = heap_limit;  /* try to avoid finalizer-induced collections */
711
712         bitmap_clear(mark_bitmap);
713
714         asm_dumpregistersandcall(gc_mark_stack);
715         gc_mark_references();
716         gc_finalize();
717         gc_reclaim();
718
719         armageddon_is_near = false;
720
721 #ifdef GC_COLLECT_STATISTICS
722         ++gc_collections_count;
723 #endif
724 }
725
726
727 /************************* Function: gc_init **********************************
728
729   Initializes anything that must be initialized to call the gc on the right
730   stack.
731
732 ******************************************************************************/
733
734 void
735 gc_init (void)
736 {
737 }
738
739 /************************** Function: gc_call ********************************
740
741   Calls the garbage collector. The garbage collector should always be called
742   using this function since it ensures that enough stack space is available.
743
744 ******************************************************************************/
745
746 void
747 gc_call (void)
748 {
749 #ifdef USE_THREADS
750         assert(blockInts == 0);
751
752         intsDisable();
753         if (currentThread == NULL || currentThread == mainThread)
754                 gc_run();
755         else
756                 asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run);
757         intsRestore();
758 #else
759         gc_run();
760 #endif
761 }
762
763
764
765 /*
766  * These are local overrides for various environment variables in Emacs.
767  * Please do not remove this and leave it at the end of the file, where
768  * Emacs will automagically detect them.
769  * ---------------------------------------------------------------------
770  * Local variables:
771  * mode: c
772  * indent-tabs-mode: t
773  * c-basic-offset: 4
774  * tab-width: 4
775  * End:
776  */
777