2 * sgen-ssb.c: Remembered sets - sequential store buffer
5 * Rodrigo Kumpera (rkumpera@novell.com)
7 * Copyright 2001-2003 Ximian, Inc
8 * Copyright 2003-2010 Novell, Inc.
9 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
10 * Copyright (C) 2012 Xamarin Inc
12 * This library is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU Library General Public
14 * License 2.0 as published by the Free Software Foundation;
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Library General Public License for more details.
21 * You should have received a copy of the GNU Library General Public
22 * License 2.0 along with this library; if not, write to the Free
23 * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
29 #include "metadata/sgen-gc.h"
30 #include "metadata/sgen-ssb.h"
31 #include "metadata/sgen-protocol.h"
32 #include "utils/mono-counters.h"
34 #ifndef DISABLE_SGEN_REMSET
36 /*A two slots cache for recently inserted remsets */
37 static gpointer global_remset_cache [2];
39 static LOCK_DECLARE (global_remset_mutex);
41 #define LOCK_GLOBAL_REMSET mono_mutex_lock (&global_remset_mutex)
42 #define UNLOCK_GLOBAL_REMSET mono_mutex_unlock (&global_remset_mutex)
45 static __thread RememberedSet *remembered_set MONO_TLS_FAST;
47 static MonoNativeTlsKey remembered_set_key;
48 static RememberedSet *global_remset;
49 static RememberedSet *freed_thread_remsets;
50 static GenericStoreRememberedSet *generic_store_remsets = NULL;
52 #ifdef HEAVY_STATISTICS
53 static int stat_wbarrier_generic_store_remset = 0;
55 static long long stat_store_remsets = 0;
56 static long long stat_store_remsets_unique = 0;
57 static long long stat_saved_remsets_1 = 0;
58 static long long stat_saved_remsets_2 = 0;
59 static long long stat_local_remsets_processed = 0;
60 static long long stat_global_remsets_added = 0;
61 static long long stat_global_remsets_readded = 0;
62 static long long stat_global_remsets_processed = 0;
63 static long long stat_global_remsets_discarded = 0;
67 static gboolean global_remset_location_was_not_added (gpointer ptr);
71 clear_thread_store_remset_buffer (SgenThreadInfo *info)
73 *info->store_remset_buffer_index_addr = 0;
74 /* See the comment at the end of sgen_thread_unregister() */
75 if (*info->store_remset_buffer_addr)
76 memset (*info->store_remset_buffer_addr, 0, sizeof (gpointer) * STORE_REMSET_BUFFER_SIZE);
80 remset_byte_size (RememberedSet *remset)
82 return sizeof (RememberedSet) + (remset->end_set - remset->data) * sizeof (gpointer);
86 add_generic_store_remset_from_buffer (gpointer *buffer)
88 GenericStoreRememberedSet *remset = sgen_alloc_internal (INTERNAL_MEM_STORE_REMSET);
89 memcpy (remset->data, buffer + 1, sizeof (gpointer) * (STORE_REMSET_BUFFER_SIZE - 1));
90 remset->next = generic_store_remsets;
91 generic_store_remsets = remset;
95 evacuate_remset_buffer (void)
100 buffer = STORE_REMSET_BUFFER;
102 add_generic_store_remset_from_buffer (buffer);
103 memset (buffer, 0, sizeof (gpointer) * STORE_REMSET_BUFFER_SIZE);
105 STORE_REMSET_BUFFER_INDEX = 0;
108 /* FIXME: later choose a size that takes into account the RememberedSet struct
109 * and doesn't waste any alloc paddin space.
111 static RememberedSet*
112 sgen_alloc_remset (int size, gpointer id, gboolean global)
114 RememberedSet* res = sgen_alloc_internal_dynamic (sizeof (RememberedSet) + (size * sizeof (gpointer)), INTERNAL_MEM_REMSET, TRUE);
115 res->store_next = res->data;
116 res->end_set = res->data + size;
118 SGEN_LOG (4, "Allocated%s remset size %d at %p for %p", global ? " global" : "", size, res->data, id);
125 sgen_ssb_wbarrier_set_field (MonoObject *obj, gpointer field_ptr, MonoObject* value)
132 if (rs->store_next < rs->end_set) {
133 *(rs->store_next++) = (mword)field_ptr;
134 *(void**)field_ptr = value;
138 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
139 rs->next = REMEMBERED_SET;
141 #ifdef HAVE_KW_THREAD
142 mono_thread_info_current ()->remset = rs;
144 *(rs->store_next++) = (mword)field_ptr;
145 *(void**)field_ptr = value;
150 sgen_ssb_wbarrier_set_arrayref (MonoArray *arr, gpointer slot_ptr, MonoObject* value)
157 if (rs->store_next < rs->end_set) {
158 *(rs->store_next++) = (mword)slot_ptr;
159 *(void**)slot_ptr = value;
163 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
164 rs->next = REMEMBERED_SET;
166 #ifdef HAVE_KW_THREAD
167 mono_thread_info_current ()->remset = rs;
169 *(rs->store_next++) = (mword)slot_ptr;
170 *(void**)slot_ptr = value;
175 sgen_ssb_wbarrier_arrayref_copy (gpointer dest_ptr, gpointer src_ptr, int count)
180 mono_gc_memmove (dest_ptr, src_ptr, count * sizeof (gpointer));
183 SGEN_LOG (8, "Adding remset at %p, %d", dest_ptr, count);
184 if (rs->store_next + 1 < rs->end_set) {
185 *(rs->store_next++) = (mword)dest_ptr | REMSET_RANGE;
186 *(rs->store_next++) = count;
190 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
191 rs->next = REMEMBERED_SET;
193 #ifdef HAVE_KW_THREAD
194 mono_thread_info_current ()->remset = rs;
196 *(rs->store_next++) = (mword)dest_ptr | REMSET_RANGE;
197 *(rs->store_next++) = count;
203 sgen_ssb_wbarrier_value_copy (gpointer dest, gpointer src, int count, MonoClass *klass)
206 size_t element_size = mono_class_value_size (klass, NULL);
207 size_t size = count * element_size;
210 g_assert (klass->gc_descr_inited);
213 mono_gc_memmove (dest, src, size);
216 if (rs->store_next + 4 < rs->end_set) {
217 *(rs->store_next++) = (mword)dest | REMSET_VTYPE;
218 *(rs->store_next++) = (mword)klass->gc_descr;
219 *(rs->store_next++) = (mword)count;
220 *(rs->store_next++) = (mword)element_size;
224 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
225 rs->next = REMEMBERED_SET;
227 #ifdef HAVE_KW_THREAD
228 mono_thread_info_current ()->remset = rs;
230 *(rs->store_next++) = (mword)dest | REMSET_VTYPE;
231 *(rs->store_next++) = (mword)klass->gc_descr;
232 *(rs->store_next++) = (mword)count;
233 *(rs->store_next++) = (mword)element_size;
238 sgen_ssb_wbarrier_object_copy (MonoObject* obj, MonoObject *src)
244 size = mono_object_class (obj)->instance_size;
247 SGEN_LOG (6, "Adding object remset for %p", obj);
250 /* do not copy the sync state */
251 mono_gc_memmove ((char*)obj + sizeof (MonoObject), (char*)src + sizeof (MonoObject),
252 size - sizeof (MonoObject));
254 if (rs->store_next < rs->end_set) {
255 *(rs->store_next++) = (mword)obj | REMSET_OBJECT;
259 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
260 rs->next = REMEMBERED_SET;
263 #ifdef HAVE_KW_THREAD
264 mono_thread_info_current ()->remset = rs;
266 *(rs->store_next++) = (mword)obj | REMSET_OBJECT;
271 sgen_ssb_wbarrier_generic_nostore (gpointer ptr)
279 buffer = STORE_REMSET_BUFFER;
280 index = STORE_REMSET_BUFFER_INDEX;
281 /* This simple optimization eliminates a sizable portion of
282 entries. Comparing it to the last but one entry as well
283 doesn't eliminate significantly more entries. */
284 if (buffer [index] == ptr) {
289 HEAVY_STAT (++stat_wbarrier_generic_store_remset);
292 if (index >= STORE_REMSET_BUFFER_SIZE) {
293 evacuate_remset_buffer ();
294 index = STORE_REMSET_BUFFER_INDEX;
295 g_assert (index == 0);
298 buffer [index] = ptr;
299 STORE_REMSET_BUFFER_INDEX = index;
305 #ifdef HEAVY_STATISTICS
307 collect_store_remsets (RememberedSet *remset, mword *bumper)
309 mword *p = remset->data;
314 while (p < remset->store_next) {
315 switch ((*p) & REMSET_TYPE_MASK) {
316 case REMSET_LOCATION:
319 ++stat_saved_remsets_1;
321 if (*p == last1 || *p == last2) {
322 ++stat_saved_remsets_2;
339 g_assert_not_reached ();
349 RememberedSet *remset;
351 SgenThreadInfo *info;
352 mword *addresses, *bumper, *p, *r;
354 FOREACH_THREAD (info) {
355 for (remset = info->remset; remset; remset = remset->next)
356 size += remset->store_next - remset->data;
358 for (remset = freed_thread_remsets; remset; remset = remset->next)
359 size += remset->store_next - remset->data;
360 for (remset = global_remset; remset; remset = remset->next)
361 size += remset->store_next - remset->data;
363 bumper = addresses = sgen_alloc_internal_dynamic (sizeof (mword) * size, INTERNAL_MEM_STATISTICS, TRUE);
365 FOREACH_THREAD (info) {
366 for (remset = info->remset; remset; remset = remset->next)
367 bumper = collect_store_remsets (remset, bumper);
369 for (remset = global_remset; remset; remset = remset->next)
370 bumper = collect_store_remsets (remset, bumper);
371 for (remset = freed_thread_remsets; remset; remset = remset->next)
372 bumper = collect_store_remsets (remset, bumper);
374 g_assert (bumper <= addresses + size);
376 stat_store_remsets += bumper - addresses;
378 sgen_sort_addresses ((void**)addresses, bumper - addresses);
387 stat_store_remsets_unique += p - addresses;
389 sgen_free_internal_dynamic (addresses, sizeof (mword) * size, INTERNAL_MEM_STATISTICS);
395 handle_remset (mword *p, void *start_nursery, void *end_nursery, gboolean global, SgenGrayQueue *queue)
402 HEAVY_STAT (++stat_global_remsets_processed);
404 HEAVY_STAT (++stat_local_remsets_processed);
406 /* FIXME: exclude stack locations */
407 switch ((*p) & REMSET_TYPE_MASK) {
408 case REMSET_LOCATION:
410 //__builtin_prefetch (ptr);
411 if (((void*)ptr < start_nursery || (void*)ptr >= end_nursery)) {
415 sgen_get_current_object_ops ()->copy_or_mark_object (ptr, queue);
417 SGEN_LOG (9, "Overwrote remset at %p with %p", ptr, copy);
419 binary_protocol_ptr_update (ptr, old, copy, (gpointer)SGEN_LOAD_VTABLE (copy), sgen_safe_object_get_size (copy));
420 if (!global && copy >= start_nursery && copy < end_nursery) {
422 * If the object is pinned, each reference to it from nonpinned objects
423 * becomes part of the global remset, which can grow very large.
425 SGEN_LOG (9, "Add to global remset because of pinning %p (%p %s)", ptr, copy, sgen_safe_name (copy));
426 sgen_add_to_global_remset (ptr, copy);
429 SGEN_LOG (9, "Skipping remset at %p holding %p", ptr, *ptr);
433 CopyOrMarkObjectFunc copy_func = sgen_get_current_object_ops ()->copy_or_mark_object;
435 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
436 if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
439 while (count-- > 0) {
441 copy_func (ptr, queue);
443 SGEN_LOG (9, "Overwrote remset at %p with %p (count: %d)", ptr, copy, (int)count);
444 if (!global && copy >= start_nursery && copy < end_nursery)
445 sgen_add_to_global_remset (ptr, copy);
451 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
452 if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
454 sgen_get_current_object_ops ()->scan_object ((char*)ptr, queue);
459 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
460 if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
465 while (count-- > 0) {
466 sgen_get_current_object_ops ()->scan_vtype ((char*)ptr, desc, queue);
467 ptr = (void**)((char*)ptr + skip_size);
472 g_assert_not_reached ();
478 sgen_ssb_begin_scan_remsets (void *start_nursery, void *end_nursery, SgenGrayQueue *queue)
480 RememberedSet *remset;
481 mword *p, *next_p, *store_pos;
484 for (remset = global_remset; remset; remset = remset->next) {
485 SGEN_LOG (4, "Scanning global remset range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
486 store_pos = remset->data;
487 for (p = remset->data; p < remset->store_next; p = next_p) {
488 void **ptr = (void**)p [0];
490 /*Ignore previously processed remset.*/
491 if (!global_remset_location_was_not_added (ptr)) {
496 next_p = handle_remset (p, start_nursery, end_nursery, TRUE, queue);
499 * Clear global remsets of locations which no longer point to the
500 * nursery. Otherwise, they could grow indefinitely between major
503 * Since all global remsets are location remsets, we don't need to unmask the pointer.
505 if (sgen_ptr_in_nursery (*ptr)) {
506 *store_pos ++ = p [0];
507 HEAVY_STAT (++stat_global_remsets_readded);
511 /* Truncate the remset */
512 remset->store_next = store_pos;
517 sgen_ssb_finish_scan_remsets (void *start_nursery, void *end_nursery, SgenGrayQueue *queue)
520 SgenThreadInfo *info;
521 RememberedSet *remset;
522 GenericStoreRememberedSet *store_remset;
525 #ifdef HEAVY_STATISTICS
529 /* the generic store ones */
530 store_remset = generic_store_remsets;
531 while (store_remset) {
532 GenericStoreRememberedSet *next = store_remset->next;
534 for (i = 0; i < STORE_REMSET_BUFFER_SIZE - 1; ++i) {
535 gpointer addr = store_remset->data [i];
537 handle_remset ((mword*)&addr, start_nursery, end_nursery, FALSE, queue);
540 sgen_free_internal (store_remset, INTERNAL_MEM_STORE_REMSET);
544 generic_store_remsets = NULL;
546 /* the per-thread ones */
547 FOREACH_THREAD (info) {
550 for (remset = info->remset; remset; remset = next) {
551 SGEN_LOG (4, "Scanning remset for thread %p, range: %p-%p, size: %td", info, remset->data, remset->store_next, remset->store_next - remset->data);
552 for (p = remset->data; p < remset->store_next;)
553 p = handle_remset (p, start_nursery, end_nursery, FALSE, queue);
554 remset->store_next = remset->data;
557 if (remset != info->remset) {
558 SGEN_LOG (4, "Freed remset at %p", remset->data);
559 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
562 for (j = 0; j < *info->store_remset_buffer_index_addr; ++j)
563 handle_remset ((mword*)*info->store_remset_buffer_addr + j + 1, start_nursery, end_nursery, FALSE, queue);
564 clear_thread_store_remset_buffer (info);
567 /* the freed thread ones */
568 while (freed_thread_remsets) {
570 remset = freed_thread_remsets;
571 SGEN_LOG (4, "Scanning remset for freed thread, range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
572 for (p = remset->data; p < remset->store_next;)
573 p = handle_remset (p, start_nursery, end_nursery, FALSE, queue);
575 SGEN_LOG (4, "Freed remset at %p", remset->data);
576 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
577 freed_thread_remsets = next;
583 sgen_ssb_cleanup_thread (SgenThreadInfo *p)
588 if (freed_thread_remsets) {
589 for (rset = p->remset; rset->next; rset = rset->next)
591 rset->next = freed_thread_remsets;
592 freed_thread_remsets = p->remset;
594 freed_thread_remsets = p->remset;
598 if (*p->store_remset_buffer_index_addr)
599 add_generic_store_remset_from_buffer (*p->store_remset_buffer_addr);
600 sgen_free_internal (*p->store_remset_buffer_addr, INTERNAL_MEM_STORE_REMSET);
603 * This is currently not strictly required, but we do it
604 * anyway in case we change thread unregistering:
606 * If the thread is removed from the thread list after
607 * unregistering (this is currently not the case), and a
608 * collection occurs, clear_remsets() would want to memset
609 * this buffer, which would either clobber memory or crash.
611 *p->store_remset_buffer_addr = NULL;
615 sgen_ssb_register_thread (SgenThreadInfo *info)
617 #ifndef HAVE_KW_THREAD
618 SgenThreadInfo *__thread_info__ = info;
621 info->remset = sgen_alloc_remset (DEFAULT_REMSET_SIZE, info, FALSE);
622 mono_native_tls_set_value (remembered_set_key, info->remset);
623 #ifdef HAVE_KW_THREAD
624 remembered_set = info->remset;
627 STORE_REMSET_BUFFER = sgen_alloc_internal (INTERNAL_MEM_STORE_REMSET);
628 STORE_REMSET_BUFFER_INDEX = 0;
631 #ifdef HAVE_KW_THREAD
633 sgen_ssb_fill_thread_info_for_suspend (SgenThreadInfo *info)
635 /* update the remset info in the thread data structure */
636 info->remset = remembered_set;
641 sgen_ssb_prepare_for_minor_collection (void)
643 memset (global_remset_cache, 0, sizeof (global_remset_cache));
647 * Clear the info in the remembered sets: we're doing a major collection, so
648 * the per-thread ones are not needed and the global ones will be reconstructed
652 sgen_ssb_prepare_for_major_collection (void)
654 SgenThreadInfo *info;
655 RememberedSet *remset, *next;
657 sgen_ssb_prepare_for_minor_collection ();
659 /* the global list */
660 for (remset = global_remset; remset; remset = next) {
661 remset->store_next = remset->data;
664 if (remset != global_remset) {
665 SGEN_LOG (4, "Freed remset at %p", remset->data);
666 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
669 /* the generic store ones */
670 while (generic_store_remsets) {
671 GenericStoreRememberedSet *gs_next = generic_store_remsets->next;
672 sgen_free_internal (generic_store_remsets, INTERNAL_MEM_STORE_REMSET);
673 generic_store_remsets = gs_next;
675 /* the per-thread ones */
676 FOREACH_THREAD (info) {
677 for (remset = info->remset; remset; remset = next) {
678 remset->store_next = remset->data;
681 if (remset != info->remset) {
682 SGEN_LOG (3, "Freed remset at %p", remset->data);
683 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
686 clear_thread_store_remset_buffer (info);
689 /* the freed thread ones */
690 while (freed_thread_remsets) {
691 next = freed_thread_remsets->next;
692 SGEN_LOG (4, "Freed remset at %p", freed_thread_remsets->data);
693 sgen_free_internal_dynamic (freed_thread_remsets, remset_byte_size (freed_thread_remsets), INTERNAL_MEM_REMSET);
694 freed_thread_remsets = next;
700 * Tries to check if a given remset location was already added to the global remset.
703 * A 2 entry, LRU cache of recently saw location remsets.
705 * It's hand-coded instead of done using loops to reduce the number of memory references on cache hit.
707 * Returns TRUE is the element was added..
710 global_remset_location_was_not_added (gpointer ptr)
713 gpointer first = global_remset_cache [0], second;
715 HEAVY_STAT (++stat_global_remsets_discarded);
719 second = global_remset_cache [1];
722 /*Move the second to the front*/
723 global_remset_cache [0] = second;
724 global_remset_cache [1] = first;
726 HEAVY_STAT (++stat_global_remsets_discarded);
730 global_remset_cache [0] = second;
731 global_remset_cache [1] = ptr;
736 sgen_ssb_record_pointer (gpointer ptr)
739 gboolean lock = sgen_collection_is_parallel ();
740 gpointer obj = *(gpointer*)ptr;
742 g_assert (!sgen_ptr_in_nursery (ptr) && sgen_ptr_in_nursery (obj));
747 if (!global_remset_location_was_not_added (ptr))
750 if (G_UNLIKELY (do_pin_stats))
751 sgen_pin_stats_register_global_remset (obj);
753 SGEN_LOG (8, "Adding global remset for %p", ptr);
754 binary_protocol_global_remset (ptr, *(gpointer*)ptr, (gpointer)SGEN_LOAD_VTABLE (obj));
756 HEAVY_STAT (++stat_global_remsets_added);
759 * FIXME: If an object remains pinned, we need to add it at every minor collection.
760 * To avoid uncontrolled growth of the global remset, only add each pointer once.
762 if (global_remset->store_next + 3 < global_remset->end_set) {
763 *(global_remset->store_next++) = (mword)ptr;
766 rs = sgen_alloc_remset (global_remset->end_set - global_remset->data, NULL, TRUE);
767 rs->next = global_remset;
769 *(global_remset->store_next++) = (mword)ptr;
771 #if SGEN_MAX_DEBUG_LEVEL >= 4
773 int global_rs_size = 0;
775 for (rs = global_remset; rs; rs = rs->next) {
776 global_rs_size += rs->store_next - rs->data;
778 SGEN_LOG (4, "Global remset now has size %d", global_rs_size);
784 UNLOCK_GLOBAL_REMSET;
788 * ######################################################################
789 * ######## Debug support
790 * ######################################################################
794 find_in_remset_loc (mword *p, char *addr, gboolean *found)
800 switch ((*p) & REMSET_TYPE_MASK) {
801 case REMSET_LOCATION:
802 if (*p == (mword)addr)
806 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
808 if ((void**)addr >= ptr && (void**)addr < ptr + count)
812 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
813 count = sgen_safe_object_get_size ((MonoObject*)ptr);
814 count = SGEN_ALIGN_UP (count);
815 count /= sizeof (mword);
816 if ((void**)addr >= ptr && (void**)addr < ptr + count)
820 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
825 /* The descriptor includes the size of MonoObject */
826 skip_size -= sizeof (MonoObject);
828 if ((void**)addr >= ptr && (void**)addr < ptr + (skip_size / sizeof (gpointer)))
833 g_assert_not_reached ();
838 * Return whenever ADDR occurs in the remembered sets
841 sgen_ssb_find_address (char *addr)
844 SgenThreadInfo *info;
845 RememberedSet *remset;
846 GenericStoreRememberedSet *store_remset;
848 gboolean found = FALSE;
851 for (remset = global_remset; remset; remset = remset->next) {
852 SGEN_LOG (4, "Scanning global remset range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
853 for (p = remset->data; p < remset->store_next;) {
854 p = find_in_remset_loc (p, addr, &found);
860 /* the generic store ones */
861 for (store_remset = generic_store_remsets; store_remset; store_remset = store_remset->next) {
862 for (i = 0; i < STORE_REMSET_BUFFER_SIZE - 1; ++i) {
863 if (store_remset->data [i] == addr)
868 /* the per-thread ones */
869 FOREACH_THREAD (info) {
871 for (remset = info->remset; remset; remset = remset->next) {
872 SGEN_LOG (4, "Scanning remset for thread %p, range: %p-%p, size: %td", info, remset->data, remset->store_next, remset->store_next - remset->data);
873 for (p = remset->data; p < remset->store_next;) {
874 p = find_in_remset_loc (p, addr, &found);
879 for (j = 0; j < *info->store_remset_buffer_index_addr; ++j) {
880 if ((*info->store_remset_buffer_addr) [j + 1] == addr)
885 /* the freed thread ones */
886 for (remset = freed_thread_remsets; remset; remset = remset->next) {
887 SGEN_LOG (4, "Scanning remset for freed thread, range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
888 for (p = remset->data; p < remset->store_next;) {
889 p = find_in_remset_loc (p, addr, &found);
899 sgen_ssb_init (SgenRemeberedSet *remset)
901 LOCK_INIT (global_remset_mutex);
903 global_remset = sgen_alloc_remset (1024, NULL, FALSE);
904 global_remset->next = NULL;
906 mono_native_tls_alloc (&remembered_set_key, NULL);
908 #ifdef HEAVY_STATISTICS
909 mono_counters_register ("WBarrier generic store stored", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wbarrier_generic_store_remset);
911 mono_counters_register ("Store remsets", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_store_remsets);
912 mono_counters_register ("Unique store remsets", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_store_remsets_unique);
913 mono_counters_register ("Saved remsets 1", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_saved_remsets_1);
914 mono_counters_register ("Saved remsets 2", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_saved_remsets_2);
915 mono_counters_register ("Non-global remsets processed", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_local_remsets_processed);
916 mono_counters_register ("Global remsets added", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_added);
917 mono_counters_register ("Global remsets re-added", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_readded);
918 mono_counters_register ("Global remsets processed", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_processed);
919 mono_counters_register ("Global remsets discarded", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_discarded);
922 remset->wbarrier_set_field = sgen_ssb_wbarrier_set_field;
923 remset->wbarrier_set_arrayref = sgen_ssb_wbarrier_set_arrayref;
924 remset->wbarrier_arrayref_copy = sgen_ssb_wbarrier_arrayref_copy;
925 remset->wbarrier_value_copy = sgen_ssb_wbarrier_value_copy;
926 remset->wbarrier_object_copy = sgen_ssb_wbarrier_object_copy;
927 remset->wbarrier_generic_nostore = sgen_ssb_wbarrier_generic_nostore;
928 remset->record_pointer = sgen_ssb_record_pointer;
930 remset->begin_scan_remsets = sgen_ssb_begin_scan_remsets;
931 remset->finish_scan_remsets = sgen_ssb_finish_scan_remsets;
933 remset->register_thread = sgen_ssb_register_thread;
934 remset->cleanup_thread = sgen_ssb_cleanup_thread;
935 #ifdef HAVE_KW_THREAD
936 remset->fill_thread_info_for_suspend = sgen_ssb_fill_thread_info_for_suspend;
939 remset->prepare_for_minor_collection = sgen_ssb_prepare_for_minor_collection;
940 remset->prepare_for_major_collection = sgen_ssb_prepare_for_major_collection;
942 remset->find_address = sgen_ssb_find_address;
948 sgen_ssb_init (SgenRemeberedSet *remset)
950 fprintf (stderr, "Error: Mono was configured using --enable-minimal=sgen_wbarrier.\n");
954 #endif /* DISABLE_SGEN_REMSET */
956 #endif /* HAVE_SGEN_GC */