2 #include <unistd.h> /* getpagesize, mmap, ... */
11 #include "threads/thread.h"
12 #include "threads/locks.h"
17 #if !defined(HAVE_MAP_FAILED)
18 #define MAP_FAILED ((void*) -1)
22 #define PAGESIZE_MINUS_ONE (getpagesize() - 1)
27 #define HEURISTIC_SEL 0
28 #define HEURISTIC_PARAM 2UL
31 #define next_collection_heuristic_init() \
32 (void*)((long)heap_top + (((long)heap_limit - (long)heap_top) >> 4))
34 #if HEURISTIC_SEL == 0
35 #define next_collection_heuristic() \
36 (void*)((long)heap_top + (((long)heap_limit - (long)heap_top) >> HEURISTIC_PARAM))
37 #elif HEURISTIC_SEL == 1
38 #define next_collection_heuristic() \
39 (void*)((long)heap_top + (((long)heap_top - (long)heap_base) << HEURISTIC_PARAM))
40 #elif HEURISTIC_SEL == 2
41 #define next_collection_heuristic() \
42 (void*)((long)heap_top + HEURISTIC_PARAM)
45 //#define PSEUDO_GENERATIONAL
46 //#define COLLECT_LIFESPAN
47 //#define NEW_COLLECT_LIFESPAN
48 //#define COLLECT_FRAGMENTATION
49 //#define COLLECT_SIZES
51 //#define GC_COLLECT_STATISTICS
52 //#define FINALIZER_COUNTING
54 #undef STRUCTURES_ON_HEAP
55 //#define STRUCTURES_ON_HEAP
60 #include "allocator.h" /* rev. 1 allocator */
61 #include "bitmap2.h" /* rev. 2 bitmap management */
69 #define align_size(size) ((size) & ~((1 << ALIGN) - 1))
70 #define MAP_ADDRESS (void*) 0x10000000
72 /* --- file-wide variables */
74 static void* heap_base = NULL;
75 static SIZE heap_size = 0;
76 static void* heap_top = NULL;
77 static void* heap_limit = NULL;
78 static void* heap_next_collection = NULL;
80 static bitmap_t* start_bitmap = NULL;
81 static BITBLOCK* start_bits = NULL;
82 static bitmap_t* reference_bitmap = NULL;
83 static BITBLOCK* reference_bits = NULL;
84 static bitmap_t* mark_bitmap = NULL;
85 static BITBLOCK* mark_bits = NULL;
87 static void** stackbottom = NULL;
89 typedef struct address_list_node {
91 struct address_list_node* prev;
92 struct address_list_node* next;
95 static address_list_node* references = NULL;
96 static address_list_node* finalizers = NULL;
98 #ifdef GC_COLLECT_STATISTICS
100 static unsigned long gc_collections_count = 0;
102 static unsigned long gc_alloc_total = 0;
103 static unsigned long gc_alloc_count = 0;
105 static unsigned long gc_mark_heapblocks_visited = 0;
106 static unsigned long gc_mark_not_aligned = 0;
107 static unsigned long gc_mark_not_inheap = 0;
108 static unsigned long gc_mark_not_object = 0;
109 static unsigned long gc_mark_objects_marked = 0;
110 static unsigned long gc_mark_already_marked = 0;
112 static unsigned long gc_mark_null_pointer = 0;
116 #ifdef FINALIZER_COUNTING
118 static unsigned long gc_finalizers_executed = 0;
119 static unsigned long gc_finalizers_detected = 0;
124 static iMux alloc_mutex;
127 #ifdef COLLECT_LIFESPAN
128 static FILE* tracefile;
131 #ifdef COLLECT_FRAGMENTATION
132 static FILE* fragfile;
133 static FILE* fragsizefile;
136 /* --- implementation */
139 heap_init (SIZE size,
140 SIZE startsize, /* when should we collect for the first time ? */
141 void **in_stackbottom)
143 /* 1. Initialise the freelists & reset the allocator's state */
146 /* 2. Allocate at least (alignment!) size bytes of memory for the heap */
147 heap_size = align_size(size + ((1 << ALIGN) - 1));
149 #if !(defined(HAVE_MAP_ANONYMOUS))
150 heap_base = malloc(heap_size);
152 heap_base = (void*) mmap (NULL,
153 ((size_t)heap_size + PAGESIZE_MINUS_ONE) & ~PAGESIZE_MINUS_ONE,
154 PROT_READ | PROT_WRITE,
155 MAP_PRIVATE | MAP_ANONYMOUS,
160 if (heap_base == (void*)MAP_FAILED) {
161 /* unable to allocate the requested amount of memory */
162 fprintf(stderr, "heap2.c: The queen, mylord, is dead! (mmap failed)\n");
166 /* 3. Allocate the bitmaps */
167 start_bitmap = bitmap_allocate(heap_base, heap_size);
168 reference_bitmap = bitmap_allocate(heap_base, heap_size);
169 mark_bitmap = bitmap_allocate(heap_base, heap_size);
171 start_bits = start_bitmap->bitmap;
172 reference_bits = reference_bitmap->bitmap;
173 mark_bits = mark_bitmap->bitmap;
175 /* 4. Mark the first free-area as an object-start */
176 bitmap_setbit(start_bits, heap_base);
178 /* 5. Initialise the heap's state (heap_top, etc.) */
179 stackbottom = in_stackbottom; /* copy the stackbottom */
181 heap_top = heap_base; /* the current end of the heap (just behind the last allocated object) */
182 heap_limit = (void*)((long)heap_base + heap_size); /* points just behind the last accessible block of the heap */
184 /* 6. calculate a useful first collection limit */
185 /* This is extremly primitive at this point...
186 we should replace it with something more useful -- phil. */
187 heap_next_collection = next_collection_heuristic_init();
189 /* 7. Init the global reference lists & finalizer addresses */
193 #ifdef STRUCTURES_ON_HEAP
194 heap_addreference(&references);
195 heap_addreference(&finalizers);
199 /* 8. Init the mutexes for synchronization */
200 alloc_mutex.holder = 0;
203 /* 9. Set up collection of lifespan data */
204 #ifdef COLLECT_LIFESPAN
206 tracefile = fopen("heap.trace", "w");
208 tracefile = popen("gzip -9 >heap.trace.gz", "w");
211 fprintf(stderr, "heap2.c: Radio Ga Ga! (fopen failed)\n");
215 fprintf(tracefile, "heap_base\t0x%lx\n", heap_base);
216 fprintf(tracefile, "heap_limit\t0x%lx\n", heap_limit);
217 fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
220 #if defined(NEW_COLLECT_LIFESPAN) || defined(COLLECT_SIZES)
221 lifespan_init(heap_base, heap_size);
224 /* 10. Set up collection of fragmentation data */
225 #ifdef COLLECT_FRAGMENTATION
226 fragfile = popen("gzip -9 >fragmentation.gz", "w");
227 fragsizefile = popen("gzip -9 >freeblocks.gz", "w");
234 heap_call_finalizer_for_object_at(java_objectheader* object_addr)
236 asm_calljavamethod(object_addr->vftbl->class->finalizer, object_addr, NULL, NULL, NULL);
237 #ifdef FINALIZER_COUNTING
238 ++gc_finalizers_executed;
245 address_list_node* curr = finalizers;
247 /* 0. clean up lifespan module */
248 #ifdef COLLECT_LIFESPAN
256 #if defined(NEW_COLLECT_LIFESPAN)
260 #ifdef COLLECT_FRAGMENTATION
262 pclose(fragsizefile);
265 /* 1. Clean up on the heap... finalize all remaining objects */
268 address_list_node* prev = curr;
269 java_objectheader* addr = (java_objectheader*)(curr->address);
271 if (addr && bitmap_testbit(start_bits, addr))
272 heap_call_finalizer_for_object_at(addr);
279 /* 2. Release the bitmaps */
280 bitmap_release(start_bitmap);
281 bitmap_release(reference_bitmap);
282 bitmap_release(mark_bitmap);
284 /* 3. Release the memory allocated to the heap */
286 munmap(heap_base, heap_size);
288 /* 4. emit statistical data */
289 #ifdef GC_COLLECT_STATISTICS
290 sprintf(logtext, "%ld bytes for %ld objects allocated.",
291 gc_alloc_total, gc_alloc_count);
293 sprintf(logtext, "%ld garbage collections performed.", gc_collections_count);
295 sprintf(logtext, "%ld heapblocks visited, %ld objects marked",
296 gc_mark_heapblocks_visited, gc_mark_objects_marked);
298 sprintf(logtext, " %ld null pointers.", gc_mark_null_pointer);
300 sprintf(logtext, " %ld out of heap.", gc_mark_not_inheap);
302 sprintf(logtext, " %ld visits to objects already marked.", gc_mark_already_marked);
304 sprintf(logtext, " %ld not an object.", gc_mark_not_object);
306 sprintf(logtext, " %ld potential references not aligned.", gc_mark_not_aligned);
310 #ifdef FINALIZER_COUNTING
311 sprintf(logtext, "%ld objects with a finalizer", gc_finalizers_detected);
314 if (gc_finalizers_detected == gc_finalizers_executed)
315 sprintf(logtext, " all finalizers executed.");
317 sprintf(logtext, " only %ld finalizers executed.", gc_finalizers_executed);
321 #if defined(NEW_COLLECT_LIFESPAN) || defined(COLLECT_SIZES)
329 heap_add_address_to_address_list(address_list_node** list, void* address)
331 /* Note: address lists are kept sorted to simplify finalization */
333 address_list_node* new_node = malloc(sizeof(address_list_node));
334 new_node->address = address;
335 new_node->next = NULL;
337 while (*list && (*list)->next) {
338 if ((*list)->next->address < address)
339 list = &(*list)->next;
341 new_node->next = *list;
347 new_node->next = *list;
355 heap_add_finalizer_for_object_at(void* addr)
357 /* Finalizers seem to be very rare... for this reason, I keep a linked
358 list of object addresses, which have a finalizer attached. This list
359 is kept in ascending order according to the order garbage is freed.
360 This list is currently kept separate from the heap, but should be
361 moved onto it, but some JIT-marker code to handle these special
362 objects will need to be added first. -- phil. */
364 heap_add_address_to_address_list(&finalizers, addr);
366 #ifdef COLLECT_LIFESPAN
367 fprintf(tracefile, "finalizer\t0x%lx\n", addr);
372 heap_allocate (SIZE in_length,
374 methodinfo *finalizer)
376 SIZE length = align_size(in_length + ((1 << ALIGN) - 1));
377 void* free_chunk = NULL;
380 /* check for misaligned in_length parameter */
381 if (length != in_length)
383 "heap2.c: heap_allocate was passed unaligned in_length parameter: %ld, \n aligned to %ld. (mistrust)\n",
387 #ifdef FINALIZER_COUNTING
389 ++gc_finalizers_detected;
392 #if defined(COLLECT_LIFESPAN) || defined(NEW_COLLECT_LIFESPAN)
393 /* perform garbage collection to collect data for lifespan analysis */
394 if (heap_top > heap_base)
399 lock_mutex(&alloc_mutex);
402 /* 1. attempt to get a free block with size >= length from the freelists */
403 free_chunk = allocator_alloc(length);
405 /* 2. if unsuccessful, try alternative allocation strategies */
407 /* 2.a if the collection threshold would be exceeded, collect the heap */
408 if ((long)heap_top + length > (long)heap_next_collection) {
409 /* 2.a.1. collect if the next_collection threshold would be exceeded */
412 /* 2.a.2. we just ran a collection, recheck the freelists */
413 free_chunk = allocator_alloc(length);
417 /* 2.a.3. we can't satisfy the request from the freelists, check
418 against the heap_limit whether growing the heap is possible */
419 if ((long)heap_top + length > (long)heap_limit)
423 /* 2.b. grow the heap */
424 free_chunk = heap_top;
425 heap_top = (void*)((long)heap_top + length);
429 /* 3.a. mark all necessary bits, store the finalizer & return the newly allocated block */
431 /* I don't mark the object-start anymore, as it always is at the beginning of a free-block,
432 which already is marked (Note: The first free-block gets marked in heap_init). -- phil. */
433 bitmap_setbit(start_bits, free_chunk); /* mark the new object */
435 #ifndef SIZE_FROM_CLASSINFO
436 bitmap_setbit(start_bits, (void*)((long)free_chunk + (long)length)); /* mark the freespace behind the new object */
440 bitmap_setbit(reference_bits, free_chunk);
442 bitmap_clearbit(reference_bits, free_chunk);
444 /* store a hint, that there's a finalizer for this address */
446 heap_add_finalizer_for_object_at(free_chunk);
448 #ifdef GC_COLLECT_STATISTICS
449 gc_alloc_total += length;
453 #ifdef COLLECT_LIFESPAN
454 fprintf(tracefile, "alloc\t0x%lx\t0x%lx\n",
455 free_chunk, (long)free_chunk + length);
458 #if defined(NEW_COLLECT_LIFESPAN) || defined(COLLECT_SIZES)
459 lifespan_alloc(free_chunk, length);
464 unlock_mutex(&alloc_mutex);
470 heap_addreference (void **reflocation)
472 /* I currently use a separate linked list (as in the original code) to hold
473 the global reference locations, but I'll change this to allocate these
474 in blocks on the heap; we'll have to add JIT-Marker code for those Java
475 objects then. -- phil. */
477 heap_add_address_to_address_list(&references, reflocation);
482 void gc_finalize (void)
484 /* This will have to be slightly rewritten as soon the JIT-marked heap-based lists are used. -- phil. */
486 address_list_node* curr = finalizers;
487 address_list_node* prev;
490 /* FIXME: new code, please! */
494 if (!bitmap_testbit(mark_bits, curr->address)) {
496 #ifdef FINALIZER_COUNTING
497 ++gc_finalizers_executed;
499 asm_calljavamethod(((java_objectheader*)curr->address)->vftbl->class->finalizer,
500 curr->address, NULL, NULL, NULL);
513 void gc_reclaim (void)
515 #ifdef PSEUDO_GENERATIONAL
516 static void* generation_start = 0;
517 static int generation_num = 0;
518 void* addr = heap_base;
521 void* free_end = heap_base;
523 bitmap_t* temp_bitmap;
525 #ifdef COLLECT_FRAGMENTATION
526 unsigned long free_size = 0;
527 unsigned long free_fragments = 0;
530 #ifdef PSEUDO_GENERATIONAL
531 if (!generation_start || !(generation_num % 5))
532 generation_start = heap_base;
537 /* 1. reset the freelists */
540 allocator_mark_free_kludge(start_bits); /* this line will be kicked out, when
541 the SIZE_FROM_CLASSINFO reclaim
542 is implemented (very soon!!) */
545 #ifdef PSEUDO_GENERATIONAL
546 for (addr = heap_base; addr <= generation_start; ++addr) {
547 if (bitmap_testbit(start_bits, addr))
548 bitmap_setbit(mark_bits, addr);
551 allocator_mark_free_kludge(start_bits); /* this line will be kicked out, when
552 the SIZE_FROM_CLASSINFO reclaim
553 is implemented (very soon!!) */
558 /* 2. reclaim unmarked objects */
560 if (!testbit(start_bits, heap_base))
561 free_start = heap_base;
563 free_start = bitmap_find_next_combination_set_unset(start_bitmap,
568 while (free_end < heap_top) {
569 free_start = bitmap_find_next_combination_set_unset(start_bitmap,
573 if (free_start < heap_top) {
574 free_end = bitmap_find_next_setbit(mark_bitmap, (void*)((long)free_start + 8)); /* FIXME: constant used */
576 if (free_end < heap_top) {
577 allocator_free(free_start, (long)free_end - (long)free_start);
579 #ifdef COLLECT_FRAGMENTATION
580 free_size += (long)free_end - (long)free_start;
584 #ifdef COLLECT_LIFESPAN
586 "free\t0x%lx\t0x%lx\n",
591 #ifdef NEW_COLLECT_LIFESPAN
592 lifespan_free(free_start, free_end);
595 #ifndef SIZE_FROM_CLASSINFO
596 /* would make trouble with JIT-Marker support. The Marker for unused blocks
597 might be called, leading to a bad dereference. -- phil. */
598 bitmap_setbit(mark_bits, free_start); /* necessary to calculate obj-size bitmap based. */
607 /* 3.1. swap mark & start bitmaps */
608 temp_bits = mark_bits;
609 mark_bits = start_bits;
610 start_bits = temp_bits;
612 temp_bitmap = mark_bitmap;
613 mark_bitmap = start_bitmap;
614 start_bitmap = temp_bitmap;
616 #if 0 /* operation already handled in allocate */
617 /* 3.2. mask reference bitmap */
618 bitmap_mask_with_bitmap(reference_bitmap, start_bitmap);
621 /* 3.3. update heap_top */
622 if (free_start < heap_top) {
623 heap_top = free_start;
624 #ifdef NEW_COLLECT_LIFESPAN
625 lifespan_free(free_start, free_end);
630 if (heap_top < heap_limit)
631 bitmap_setbit(start_bits, heap_top);
634 /* 3.4. emit fragmentation info */
635 #ifdef COLLECT_FRAGMENTATION
637 unsigned long heap_full = (unsigned long)heap_top - (unsigned long)heap_base;
638 unsigned long heap_life = (unsigned long)heap_top - (unsigned long)heap_base - free_size;
641 "%ld\t%ld\t%ld\t%ld\t%f\t%f\t%f\n",
646 100*(float)free_size/(free_fragments ? free_fragments : 1),
647 100*(float)heap_life/(heap_full ? heap_full : 1),
648 100*(float)free_size/(heap_full ? heap_full : 1)
653 allocator_dump_to_file(fragsizefile);
656 /* 4. adjust the collection threshold */
657 heap_next_collection = next_collection_heuristic();
658 if (heap_next_collection > heap_limit)
659 heap_next_collection = heap_limit;
661 #ifdef COLLECT_LIFESPAN
662 fprintf(tracefile, "heap_top\t0x%lx\n", heap_top);
665 #ifdef PSEUDO_GENERATIONAL
666 generation_start = heap_top;
673 gc_mark_object_at (void** addr)
676 * A note concerning the order of the tests:
678 * Statistics collected during a test run, where alignment
679 * was tested before checking whether the addr points into
681 * >> LOG: 9301464 bytes for 196724 objects allocated.
682 * >> LOG: 15 garbage collections performed.
683 * >> LOG: 6568440 heapblocks visited, 469249 objects marked
684 * >> LOG: 1064447 visits to objects already marked.
685 * >> LOG: 988270 potential references not aligned.
686 * >> LOG: 4049446 out of heap.
687 * >> LOG: 5236 not an object.
689 * These results show, that only about 1/4 of all heapblocks
690 * point to objects; The single most important reason why a
691 * heapblock can not point at an object is, that it's value
692 * doesn't fall within the heap area (this test was performed
695 * From the results, the various tests have to be conducted
696 * in the following order for maximum efficiency:
698 * 2. already marked ?
702 * The results after reordering:
703 * >> LOG: 9301464 bytes for 196724 objects allocated.
704 * >> LOG: 15 garbage collections performed.
705 * >> LOG: 6568440 heapblocks visited, 469249 objects marked
706 * >> LOG: 1064447 visits to objects already marked.
707 * >> LOG: 350 potential references not aligned.
708 * >> LOG: 5037366 out of heap.
709 * >> LOG: 5236 not an object.
713 * 2. already marked ?
717 * >> LOG: 9301464 bytes for 196724 objects allocated.
718 * >> LOG: 15 garbage collections performed.
719 * >> LOG: 6568440 heapblocks visited, 469249 objects marked
720 * >> LOG: 5037366 out of heap.
721 * >> LOG: 1064456 visits to objects already marked.
722 * >> LOG: 5539 not an object.
723 * >> LOG: 38 potential references not aligned.
725 * Apparently, most unaligned values will already be eliminated
726 * when checking against the bounds of the heap. Checking this
727 * property first, should thus improve collection times.
730 /* 1.a. if addr doesn't point into the heap, return. */
731 if ((unsigned long)addr - (unsigned long)heap_base >=
732 ((long)heap_top - (long)heap_base)) {
733 #ifdef GC_COLLECT_STATISTICS
735 ++gc_mark_null_pointer;
737 ++gc_mark_not_inheap;
742 /* 1.b. if align(addr) has already been marked during this collection, return. */
743 if (bitmap_testbit(mark_bits, (void*)addr)) {
744 #ifdef GC_COLLECT_STATISTICS
745 ++gc_mark_already_marked;
750 /* 1.c. if align(addr) doesn't point to the start of an object, return. */
751 if (!bitmap_testbit(start_bits, (void*)addr)) {
752 #ifdef GC_COLLECT_STATISTICS
753 ++gc_mark_not_object;
758 /* 1.d. if addr is not properly aligned, return. */
759 if ((long)addr & ((1 << ALIGN) - 1)) {
760 #ifdef GC_COLLECT_STATISTICS
761 ++gc_mark_not_aligned;
766 /* 2. Mark the object at addr */
767 bitmap_setbit(mark_bits, (void*)addr);
768 #ifdef GC_COLLECT_STATISTICS
769 ++gc_mark_objects_marked;
772 #ifdef JIT_MARKER_SUPPORT
773 asm_calljavamethod(addr->vftbl->class->marker, addr, NULL, NULL, NULL);
776 /* 3. mark the references contained within the extents of the object at addr */
777 if (bitmap_testbit(reference_bits, addr)) {
778 /* 3.1. find the end of the object */
781 #ifdef SIZE_FROM_CLASSINFO
782 if (((java_objectheader*)addr)->vftbl == class_array->vftbl)
783 end = (void**)((long)addr + (long)((java_arrayheader*)addr)->alignedsize);
785 end = (void**)((long)addr + (long)((java_objectheader*)addr)->vftbl->class->alignedsize);
787 end = (void**)bitmap_find_next_setbit(start_bitmap, addr + 1); /* points just behind the object */
790 /* 3.2. mark the references within the object at addr */
791 #ifdef GC_COLLECT_STATISTICS
792 gc_mark_heapblocks_visited += ((long)end - (long)addr) >> ALIGN;
795 gc_mark_object_at(*(addr++));
805 void gc_mark_references (void)
807 address_list_node* curr = references;
810 #ifdef GC_COLLECT_STATISTICS
811 ++gc_mark_heapblocks_visited;
813 gc_mark_object_at(*((void**)(curr->address)));
821 markreferences(void** start, void** end)
823 while (start < end) {
824 #ifdef GC_COLLECT_STATISTICS
825 ++gc_mark_heapblocks_visited;
827 gc_mark_object_at(*(start++));
833 void gc_mark_stack (void)
840 if (currentThread == NULL) {
841 void **top_of_stack = &dummy;
843 if (top_of_stack > stackbottom)
844 markreferences(stackbottom, top_of_stack);
846 markreferences(top_of_stack, stackbottom);
849 for (aThread = liveThreads; aThread != 0;
850 aThread = CONTEXT(aThread).nextlive) {
851 gc_mark_object_at((void*)aThread);
852 if (CONTEXT(aThread).usedStackTop > CONTEXT(aThread).stackEnd)
853 markreferences((void**)CONTEXT(aThread).stackEnd,
854 (void**)CONTEXT(aThread).usedStackTop);
856 markreferences((void**)CONTEXT(aThread).usedStackTop,
857 (void**)CONTEXT(aThread).stackEnd);
860 markreferences((void**)&threadQhead[0],
861 (void**)&threadQhead[MAX_THREAD_PRIO]);
864 void **top_of_stack = &dummy;
866 if (top_of_stack > stackbottom)
867 markreferences(stackbottom, top_of_stack);
869 markreferences(top_of_stack, stackbottom);
877 static int armageddon_is_near = 0;
879 if (armageddon_is_near) {
880 /* armageddon_is_here! */
881 fprintf(stderr, "Oops, seems like there's a slight problem here: gc_run() called while still running?!\n");
885 armageddon_is_near = true;
886 heap_next_collection = heap_limit; /* try to avoid finalizer-induced collections */
888 bitmap_clear(mark_bitmap);
890 asm_dumpregistersandcall(gc_mark_stack);
891 gc_mark_references();
895 armageddon_is_near = false;
897 #ifdef GC_COLLECT_STATISTICS
898 ++gc_collections_count;
903 /************************* Function: gc_init **********************************
905 Initializes anything that must be initialized to call the gc on the right
908 ******************************************************************************/
915 /************************** Function: gc_call ********************************
917 Calls the garbage collector. The garbage collector should always be called
918 using this function since it ensures that enough stack space is available.
920 ******************************************************************************/
928 assert(blockInts == 0);
931 if (currentThread == NULL || currentThread == mainThread) {
932 CONTEXT(mainThread).usedStackTop = &dummy;
936 asm_switchstackandcall(CONTEXT(mainThread).usedStackTop, gc_run,
937 (void**)&(CONTEXT(currentThread).usedStackTop));
947 * These are local overrides for various environment variables in Emacs.
948 * Please do not remove this and leave it at the end of the file, where
949 * Emacs will automagically detect them.
950 * ---------------------------------------------------------------------
953 * indent-tabs-mode: t