2 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
3 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
14 /* Boehm, August 9, 1995 6:09 pm PDT */
18 # include "private/gc_priv.h"
21 * We maintain several hash tables of hblks that have had false hits.
22 * Each contains one bit per hash bucket; If any page in the bucket
23 * has had a false hit, we assume that all of them have.
24 * See the definition of page_hash_table in gc_private.h.
25 * False hits from the stack(s) are much more dangerous than false hits
26 * from elsewhere, since the former can pin a large object that spans the
27 * block, eventhough it does not start on the dangerous block.
31 * Externally callable routines are:
33 * GC_add_to_black_list_normal
34 * GC_add_to_black_list_stack
35 * GC_promote_black_lists
38 * All require that the allocator lock is held.
41 /* Pointers to individual tables. We replace one table by another by */
42 /* switching these pointers. */
43 word * GC_old_normal_bl;
44 /* Nonstack false references seen at last full */
46 word * GC_incomplete_normal_bl;
47 /* Nonstack false references seen since last */
48 /* full collection. */
49 word * GC_old_stack_bl;
50 word * GC_incomplete_stack_bl;
52 word GC_total_stack_black_listed;
54 word GC_black_list_spacing = MINHINCR*HBLKSIZE; /* Initial rough guess */
58 # if defined(__STDC__) || defined(__cplusplus)
59 void GC_default_print_heap_obj_proc(ptr_t p)
61 void GC_default_print_heap_obj_proc(p)
65 ptr_t base = GC_base(p);
67 GC_err_printf2("start: 0x%lx, appr. length: %ld", base, GC_size(base));
70 void (*GC_print_heap_obj) GC_PROTO((ptr_t p)) =
71 GC_default_print_heap_obj_proc;
73 void GC_print_source_ptr(p)
76 ptr_t base = GC_base(p);
79 GC_err_printf0("in register");
81 GC_err_printf0("in root set");
84 GC_err_printf0("in object at ");
85 (*GC_print_heap_obj)(base);
91 if (!GC_all_interior_pointers) {
92 GC_old_normal_bl = (word *)
93 GC_scratch_alloc((word)(sizeof (page_hash_table)));
94 GC_incomplete_normal_bl = (word *)GC_scratch_alloc
95 ((word)(sizeof(page_hash_table)));
96 if (GC_old_normal_bl == 0 || GC_incomplete_normal_bl == 0) {
97 GC_err_printf0("Insufficient memory for black list\n");
100 GC_clear_bl(GC_old_normal_bl);
101 GC_clear_bl(GC_incomplete_normal_bl);
103 GC_old_stack_bl = (word *)GC_scratch_alloc((word)(sizeof(page_hash_table)));
104 GC_incomplete_stack_bl = (word *)GC_scratch_alloc
105 ((word)(sizeof(page_hash_table)));
106 if (GC_old_stack_bl == 0 || GC_incomplete_stack_bl == 0) {
107 GC_err_printf0("Insufficient memory for black list\n");
110 GC_clear_bl(GC_old_stack_bl);
111 GC_clear_bl(GC_incomplete_stack_bl);
114 void GC_clear_bl(doomed)
117 BZERO(doomed, sizeof(page_hash_table));
120 void GC_copy_bl(old, new)
123 BCOPY(old, new, sizeof(page_hash_table));
126 static word total_stack_black_listed();
128 /* Signal the completion of a collection. Turn the incomplete black */
129 /* lists into new black lists, etc. */
130 void GC_promote_black_lists()
132 word * very_old_normal_bl = GC_old_normal_bl;
133 word * very_old_stack_bl = GC_old_stack_bl;
135 GC_old_normal_bl = GC_incomplete_normal_bl;
136 GC_old_stack_bl = GC_incomplete_stack_bl;
137 if (!GC_all_interior_pointers) {
138 GC_clear_bl(very_old_normal_bl);
140 GC_clear_bl(very_old_stack_bl);
141 GC_incomplete_normal_bl = very_old_normal_bl;
142 GC_incomplete_stack_bl = very_old_stack_bl;
143 GC_total_stack_black_listed = total_stack_black_listed();
145 GC_printf1("%ld bytes in heap blacklisted for interior pointers\n",
146 (unsigned long)GC_total_stack_black_listed);
148 if (GC_total_stack_black_listed != 0) {
149 GC_black_list_spacing =
150 HBLKSIZE*(GC_heapsize/GC_total_stack_black_listed);
152 if (GC_black_list_spacing < 3 * HBLKSIZE) {
153 GC_black_list_spacing = 3 * HBLKSIZE;
155 if (GC_black_list_spacing > MAXHINCR * HBLKSIZE) {
156 GC_black_list_spacing = MAXHINCR * HBLKSIZE;
157 /* Makes it easier to allocate really huge blocks, which otherwise */
158 /* may have problems with nonuniform blacklist distributions. */
159 /* This way we should always succeed immediately after growing the */
164 void GC_unpromote_black_lists()
166 if (!GC_all_interior_pointers) {
167 GC_copy_bl(GC_old_normal_bl, GC_incomplete_normal_bl);
169 GC_copy_bl(GC_old_stack_bl, GC_incomplete_stack_bl);
172 /* P is not a valid pointer reference, but it falls inside */
173 /* the plausible heap bounds. */
174 /* Add it to the normal incomplete black list if appropriate. */
175 #ifdef PRINT_BLACK_LIST
176 void GC_add_to_black_list_normal(p, source)
179 void GC_add_to_black_list_normal(p)
183 if (!(GC_modws_valid_offsets[p & (sizeof(word)-1)])) return;
185 register int index = PHT_HASH(p);
187 if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_normal_bl, index)) {
188 # ifdef PRINT_BLACK_LIST
189 if (!get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
191 "Black listing (normal) 0x%lx referenced from 0x%lx ",
192 (unsigned long) p, (unsigned long) source);
193 GC_print_source_ptr(source);
197 set_pht_entry_from_index(GC_incomplete_normal_bl, index);
198 } /* else this is probably just an interior pointer to an allocated */
199 /* object, and isn't worth black listing. */
203 /* And the same for false pointers from the stack. */
204 #ifdef PRINT_BLACK_LIST
205 void GC_add_to_black_list_stack(p, source)
208 void GC_add_to_black_list_stack(p)
212 register int index = PHT_HASH(p);
214 if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_stack_bl, index)) {
215 # ifdef PRINT_BLACK_LIST
216 if (!get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
218 "Black listing (stack) 0x%lx referenced from 0x%lx ",
219 (unsigned long)p, (unsigned long)source);
220 GC_print_source_ptr(source);
224 set_pht_entry_from_index(GC_incomplete_stack_bl, index);
229 * Is the block starting at h of size len bytes black listed? If so,
230 * return the address of the next plausible r such that (r, len) might not
231 * be black listed. (R may not actually be in the heap. We guarantee only
232 * that every smaller value of r after h is also black listed.)
233 * If (h,len) is not black listed, return 0.
234 * Knows about the structure of the black list hash tables.
236 struct hblk * GC_is_black_listed(h, len)
240 register int index = PHT_HASH((word)h);
242 word nblocks = divHBLKSZ(len);
244 if (!GC_all_interior_pointers) {
245 if (get_pht_entry_from_index(GC_old_normal_bl, index)
246 || get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
252 if (GC_old_stack_bl[divWORDSZ(index)] == 0
253 && GC_incomplete_stack_bl[divWORDSZ(index)] == 0) {
255 i += WORDSZ - modWORDSZ(index);
257 if (get_pht_entry_from_index(GC_old_stack_bl, index)
258 || get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
263 if (i >= nblocks) break;
264 index = PHT_HASH((word)(h+i));
270 /* Return the number of blacklisted blocks in a given range. */
271 /* Used only for statistical purposes. */
272 /* Looks only at the GC_incomplete_stack_bl. */
273 word GC_number_stack_black_listed(start, endp1)
274 struct hblk *start, *endp1;
276 register struct hblk * h;
279 for (h = start; h < endp1; h++) {
280 register int index = PHT_HASH((word)h);
282 if (get_pht_entry_from_index(GC_old_stack_bl, index)) result++;
288 /* Return the total number of (stack) black-listed bytes. */
289 static word total_stack_black_listed()
294 for (i = 0; i < GC_n_heap_sects; i++) {
295 struct hblk * start = (struct hblk *) GC_heap_sects[i].hs_start;
296 word len = (word) GC_heap_sects[i].hs_bytes;
297 struct hblk * endp1 = start + len/HBLKSIZE;
299 total += GC_number_stack_black_listed(start, endp1);
301 return(total * HBLKSIZE);