2 * sgen-ssb.c: Remembered sets - sequential store buffer
5 * Rodrigo Kumpera (rkumpera@novell.com)
7 * Copyright 2001-2003 Ximian, Inc
8 * Copyright 2003-2010 Novell, Inc.
9 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
10 * Copyright (C) 2012 Xamarin Inc
12 * This library is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU Library General Public
14 * License 2.0 as published by the Free Software Foundation;
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Library General Public License for more details.
21 * You should have received a copy of the GNU Library General Public
22 * License 2.0 along with this library; if not, write to the Free
23 * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
29 #include "metadata/sgen-gc.h"
30 #include "metadata/sgen-ssb.h"
31 #include "metadata/sgen-protocol.h"
32 #include "utils/mono-counters.h"
34 #ifndef DISABLE_SGEN_REMSET
36 /*A two slots cache for recently inserted remsets */
37 static gpointer global_remset_cache [2];
39 static LOCK_DECLARE (global_remset_mutex);
41 #define LOCK_GLOBAL_REMSET mono_mutex_lock (&global_remset_mutex)
42 #define UNLOCK_GLOBAL_REMSET mono_mutex_unlock (&global_remset_mutex)
45 static __thread RememberedSet *remembered_set MONO_TLS_FAST;
47 static MonoNativeTlsKey remembered_set_key;
48 static RememberedSet *global_remset;
49 static RememberedSet *freed_thread_remsets;
50 static GenericStoreRememberedSet *generic_store_remsets = NULL;
52 #ifdef HEAVY_STATISTICS
53 static int stat_wbarrier_generic_store_remset = 0;
55 static long long stat_store_remsets = 0;
56 static long long stat_store_remsets_unique = 0;
57 static long long stat_saved_remsets_1 = 0;
58 static long long stat_saved_remsets_2 = 0;
59 static long long stat_local_remsets_processed = 0;
60 static long long stat_global_remsets_added = 0;
61 static long long stat_global_remsets_readded = 0;
62 static long long stat_global_remsets_processed = 0;
63 static long long stat_global_remsets_discarded = 0;
67 static gboolean global_remset_location_was_not_added (gpointer ptr);
71 clear_thread_store_remset_buffer (SgenThreadInfo *info)
73 *info->store_remset_buffer_index_addr = 0;
74 /* See the comment at the end of sgen_thread_unregister() */
75 if (*info->store_remset_buffer_addr)
76 memset (*info->store_remset_buffer_addr, 0, sizeof (gpointer) * STORE_REMSET_BUFFER_SIZE);
80 remset_byte_size (RememberedSet *remset)
82 return sizeof (RememberedSet) + (remset->end_set - remset->data) * sizeof (gpointer);
86 add_generic_store_remset_from_buffer (gpointer *buffer)
88 GenericStoreRememberedSet *remset = sgen_alloc_internal (INTERNAL_MEM_STORE_REMSET);
89 memcpy (remset->data, buffer + 1, sizeof (gpointer) * (STORE_REMSET_BUFFER_SIZE - 1));
90 remset->next = generic_store_remsets;
91 generic_store_remsets = remset;
95 evacuate_remset_buffer (void)
100 buffer = STORE_REMSET_BUFFER;
102 add_generic_store_remset_from_buffer (buffer);
103 memset (buffer, 0, sizeof (gpointer) * STORE_REMSET_BUFFER_SIZE);
105 STORE_REMSET_BUFFER_INDEX = 0;
108 /* FIXME: later choose a size that takes into account the RememberedSet struct
109 * and doesn't waste any alloc paddin space.
111 static RememberedSet*
112 sgen_alloc_remset (int size, gpointer id, gboolean global)
114 RememberedSet* res = sgen_alloc_internal_dynamic (sizeof (RememberedSet) + (size * sizeof (gpointer)), INTERNAL_MEM_REMSET, TRUE);
115 res->store_next = res->data;
116 res->end_set = res->data + size;
118 SGEN_LOG (4, "Allocated%s remset size %d at %p for %p", global ? " global" : "", size, res->data, id);
125 sgen_ssb_wbarrier_set_field (MonoObject *obj, gpointer field_ptr, MonoObject* value)
132 if (rs->store_next < rs->end_set) {
133 *(rs->store_next++) = (mword)field_ptr;
134 *(void**)field_ptr = value;
138 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
139 rs->next = REMEMBERED_SET;
141 #ifdef HAVE_KW_THREAD
142 mono_thread_info_current ()->remset = rs;
144 *(rs->store_next++) = (mword)field_ptr;
145 *(void**)field_ptr = value;
150 sgen_ssb_wbarrier_set_arrayref (MonoArray *arr, gpointer slot_ptr, MonoObject* value)
157 if (rs->store_next < rs->end_set) {
158 *(rs->store_next++) = (mword)slot_ptr;
159 *(void**)slot_ptr = value;
163 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
164 rs->next = REMEMBERED_SET;
166 #ifdef HAVE_KW_THREAD
167 mono_thread_info_current ()->remset = rs;
169 *(rs->store_next++) = (mword)slot_ptr;
170 *(void**)slot_ptr = value;
175 sgen_ssb_wbarrier_arrayref_copy (gpointer dest_ptr, gpointer src_ptr, int count)
180 mono_gc_memmove (dest_ptr, src_ptr, count * sizeof (gpointer));
183 SGEN_LOG (8, "Adding remset at %p, %d", dest_ptr, count);
184 if (rs->store_next + 1 < rs->end_set) {
185 *(rs->store_next++) = (mword)dest_ptr | REMSET_RANGE;
186 *(rs->store_next++) = count;
190 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
191 rs->next = REMEMBERED_SET;
193 #ifdef HAVE_KW_THREAD
194 mono_thread_info_current ()->remset = rs;
196 *(rs->store_next++) = (mword)dest_ptr | REMSET_RANGE;
197 *(rs->store_next++) = count;
203 sgen_ssb_wbarrier_value_copy (gpointer dest, gpointer src, int count, MonoClass *klass)
206 size_t element_size = mono_class_value_size (klass, NULL);
207 size_t size = count * element_size;
210 g_assert (klass->gc_descr_inited);
213 mono_gc_memmove (dest, src, size);
216 if (rs->store_next + 4 < rs->end_set) {
217 *(rs->store_next++) = (mword)dest | REMSET_VTYPE;
218 *(rs->store_next++) = (mword)klass->gc_descr;
219 *(rs->store_next++) = (mword)count;
220 *(rs->store_next++) = (mword)element_size;
224 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
225 rs->next = REMEMBERED_SET;
227 #ifdef HAVE_KW_THREAD
228 mono_thread_info_current ()->remset = rs;
230 *(rs->store_next++) = (mword)dest | REMSET_VTYPE;
231 *(rs->store_next++) = (mword)klass->gc_descr;
232 *(rs->store_next++) = (mword)count;
233 *(rs->store_next++) = (mword)element_size;
238 sgen_ssb_wbarrier_object_copy (MonoObject* obj, MonoObject *src)
244 size = mono_object_class (obj)->instance_size;
247 SGEN_LOG (6, "Adding object remset for %p", obj);
250 /* do not copy the sync state */
251 mono_gc_memmove ((char*)obj + sizeof (MonoObject), (char*)src + sizeof (MonoObject),
252 size - sizeof (MonoObject));
254 if (rs->store_next < rs->end_set) {
255 *(rs->store_next++) = (mword)obj | REMSET_OBJECT;
259 rs = sgen_alloc_remset (rs->end_set - rs->data, (void*)1, FALSE);
260 rs->next = REMEMBERED_SET;
263 #ifdef HAVE_KW_THREAD
264 mono_thread_info_current ()->remset = rs;
266 *(rs->store_next++) = (mword)obj | REMSET_OBJECT;
271 sgen_ssb_wbarrier_generic_nostore (gpointer ptr)
279 buffer = STORE_REMSET_BUFFER;
280 index = STORE_REMSET_BUFFER_INDEX;
281 /* This simple optimization eliminates a sizable portion of
282 entries. Comparing it to the last but one entry as well
283 doesn't eliminate significantly more entries. */
284 if (buffer [index] == ptr) {
289 HEAVY_STAT (++stat_wbarrier_generic_store_remset);
292 if (index >= STORE_REMSET_BUFFER_SIZE) {
293 evacuate_remset_buffer ();
294 index = STORE_REMSET_BUFFER_INDEX;
295 g_assert (index == 0);
298 buffer [index] = ptr;
299 STORE_REMSET_BUFFER_INDEX = index;
305 #ifdef HEAVY_STATISTICS
307 collect_store_remsets (RememberedSet *remset, mword *bumper)
309 mword *p = remset->data;
314 while (p < remset->store_next) {
315 switch ((*p) & REMSET_TYPE_MASK) {
316 case REMSET_LOCATION:
319 ++stat_saved_remsets_1;
321 if (*p == last1 || *p == last2) {
322 ++stat_saved_remsets_2;
339 g_assert_not_reached ();
349 RememberedSet *remset;
351 SgenThreadInfo *info;
352 mword *addresses, *bumper, *p, *r;
354 FOREACH_THREAD (info) {
355 for (remset = info->remset; remset; remset = remset->next)
356 size += remset->store_next - remset->data;
358 for (remset = freed_thread_remsets; remset; remset = remset->next)
359 size += remset->store_next - remset->data;
360 for (remset = global_remset; remset; remset = remset->next)
361 size += remset->store_next - remset->data;
363 bumper = addresses = sgen_alloc_internal_dynamic (sizeof (mword) * size, INTERNAL_MEM_STATISTICS, TRUE);
365 FOREACH_THREAD (info) {
366 for (remset = info->remset; remset; remset = remset->next)
367 bumper = collect_store_remsets (remset, bumper);
369 for (remset = global_remset; remset; remset = remset->next)
370 bumper = collect_store_remsets (remset, bumper);
371 for (remset = freed_thread_remsets; remset; remset = remset->next)
372 bumper = collect_store_remsets (remset, bumper);
374 g_assert (bumper <= addresses + size);
376 stat_store_remsets += bumper - addresses;
378 sgen_sort_addresses ((void**)addresses, bumper - addresses);
387 stat_store_remsets_unique += p - addresses;
389 sgen_free_internal_dynamic (addresses, sizeof (mword) * size, INTERNAL_MEM_STATISTICS);
395 handle_remset (mword *p, void *start_nursery, void *end_nursery, gboolean global, SgenGrayQueue *queue)
402 HEAVY_STAT (++stat_global_remsets_processed);
404 HEAVY_STAT (++stat_local_remsets_processed);
406 /* FIXME: exclude stack locations */
407 switch ((*p) & REMSET_TYPE_MASK) {
408 case REMSET_LOCATION:
410 //__builtin_prefetch (ptr);
411 if (((void*)ptr < start_nursery || (void*)ptr >= end_nursery)) {
414 sgen_get_current_object_ops ()->copy_or_mark_object (ptr, queue);
415 SGEN_LOG (9, "Overwrote remset at %p with %p", ptr, *ptr);
417 binary_protocol_ptr_update (ptr, old, *ptr, (gpointer)SGEN_LOAD_VTABLE (*ptr), sgen_safe_object_get_size (*ptr));
418 if (!global && *ptr >= start_nursery && *ptr < end_nursery) {
420 * If the object is pinned, each reference to it from nonpinned objects
421 * becomes part of the global remset, which can grow very large.
423 SGEN_LOG (9, "Add to global remset because of pinning %p (%p %s)", ptr, *ptr, sgen_safe_name (*ptr));
424 sgen_add_to_global_remset (ptr);
427 SGEN_LOG (9, "Skipping remset at %p holding %p", ptr, *ptr);
431 CopyOrMarkObjectFunc copy_func = sgen_get_current_object_ops ()->copy_or_mark_object;
433 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
434 if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
437 while (count-- > 0) {
438 copy_func (ptr, queue);
439 SGEN_LOG (9, "Overwrote remset at %p with %p (count: %d)", ptr, *ptr, (int)count);
440 if (!global && *ptr >= start_nursery && *ptr < end_nursery)
441 sgen_add_to_global_remset (ptr);
447 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
448 if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
450 sgen_get_current_object_ops ()->scan_object ((char*)ptr, queue);
455 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
456 if (((void*)ptr >= start_nursery && (void*)ptr < end_nursery))
461 while (count-- > 0) {
462 sgen_get_current_object_ops ()->scan_vtype ((char*)ptr, desc, queue);
463 ptr = (void**)((char*)ptr + skip_size);
468 g_assert_not_reached ();
474 sgen_ssb_begin_scan_remsets (void *start_nursery, void *end_nursery, SgenGrayQueue *queue)
476 RememberedSet *remset;
477 mword *p, *next_p, *store_pos;
480 for (remset = global_remset; remset; remset = remset->next) {
481 SGEN_LOG (4, "Scanning global remset range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
482 store_pos = remset->data;
483 for (p = remset->data; p < remset->store_next; p = next_p) {
484 void **ptr = (void**)p [0];
486 /*Ignore previously processed remset.*/
487 if (!global_remset_location_was_not_added (ptr)) {
492 next_p = handle_remset (p, start_nursery, end_nursery, TRUE, queue);
495 * Clear global remsets of locations which no longer point to the
496 * nursery. Otherwise, they could grow indefinitely between major
499 * Since all global remsets are location remsets, we don't need to unmask the pointer.
501 if (sgen_ptr_in_nursery (*ptr)) {
502 *store_pos ++ = p [0];
503 HEAVY_STAT (++stat_global_remsets_readded);
507 /* Truncate the remset */
508 remset->store_next = store_pos;
513 sgen_ssb_finish_scan_remsets (void *start_nursery, void *end_nursery, SgenGrayQueue *queue)
516 SgenThreadInfo *info;
517 RememberedSet *remset;
518 GenericStoreRememberedSet *store_remset;
521 #ifdef HEAVY_STATISTICS
525 /* the generic store ones */
526 store_remset = generic_store_remsets;
527 while (store_remset) {
528 GenericStoreRememberedSet *next = store_remset->next;
530 for (i = 0; i < STORE_REMSET_BUFFER_SIZE - 1; ++i) {
531 gpointer addr = store_remset->data [i];
533 handle_remset ((mword*)&addr, start_nursery, end_nursery, FALSE, queue);
536 sgen_free_internal (store_remset, INTERNAL_MEM_STORE_REMSET);
540 generic_store_remsets = NULL;
542 /* the per-thread ones */
543 FOREACH_THREAD (info) {
546 for (remset = info->remset; remset; remset = next) {
547 SGEN_LOG (4, "Scanning remset for thread %p, range: %p-%p, size: %td", info, remset->data, remset->store_next, remset->store_next - remset->data);
548 for (p = remset->data; p < remset->store_next;)
549 p = handle_remset (p, start_nursery, end_nursery, FALSE, queue);
550 remset->store_next = remset->data;
553 if (remset != info->remset) {
554 SGEN_LOG (4, "Freed remset at %p", remset->data);
555 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
558 for (j = 0; j < *info->store_remset_buffer_index_addr; ++j)
559 handle_remset ((mword*)*info->store_remset_buffer_addr + j + 1, start_nursery, end_nursery, FALSE, queue);
560 clear_thread_store_remset_buffer (info);
563 /* the freed thread ones */
564 while (freed_thread_remsets) {
566 remset = freed_thread_remsets;
567 SGEN_LOG (4, "Scanning remset for freed thread, range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
568 for (p = remset->data; p < remset->store_next;)
569 p = handle_remset (p, start_nursery, end_nursery, FALSE, queue);
571 SGEN_LOG (4, "Freed remset at %p", remset->data);
572 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
573 freed_thread_remsets = next;
579 sgen_ssb_cleanup_thread (SgenThreadInfo *p)
584 if (freed_thread_remsets) {
585 for (rset = p->remset; rset->next; rset = rset->next)
587 rset->next = freed_thread_remsets;
588 freed_thread_remsets = p->remset;
590 freed_thread_remsets = p->remset;
594 if (*p->store_remset_buffer_index_addr)
595 add_generic_store_remset_from_buffer (*p->store_remset_buffer_addr);
596 sgen_free_internal (*p->store_remset_buffer_addr, INTERNAL_MEM_STORE_REMSET);
599 * This is currently not strictly required, but we do it
600 * anyway in case we change thread unregistering:
602 * If the thread is removed from the thread list after
603 * unregistering (this is currently not the case), and a
604 * collection occurs, clear_remsets() would want to memset
605 * this buffer, which would either clobber memory or crash.
607 *p->store_remset_buffer_addr = NULL;
611 sgen_ssb_register_thread (SgenThreadInfo *info)
613 #ifndef HAVE_KW_THREAD
614 SgenThreadInfo *__thread_info__ = info;
617 info->remset = sgen_alloc_remset (DEFAULT_REMSET_SIZE, info, FALSE);
618 mono_native_tls_set_value (remembered_set_key, info->remset);
619 #ifdef HAVE_KW_THREAD
620 remembered_set = info->remset;
623 STORE_REMSET_BUFFER = sgen_alloc_internal (INTERNAL_MEM_STORE_REMSET);
624 STORE_REMSET_BUFFER_INDEX = 0;
627 #ifdef HAVE_KW_THREAD
629 sgen_ssb_fill_thread_info_for_suspend (SgenThreadInfo *info)
631 /* update the remset info in the thread data structure */
632 info->remset = remembered_set;
637 sgen_ssb_prepare_for_minor_collection (void)
639 memset (global_remset_cache, 0, sizeof (global_remset_cache));
643 * Clear the info in the remembered sets: we're doing a major collection, so
644 * the per-thread ones are not needed and the global ones will be reconstructed
648 sgen_ssb_prepare_for_major_collection (void)
650 SgenThreadInfo *info;
651 RememberedSet *remset, *next;
653 sgen_ssb_prepare_for_minor_collection ();
655 /* the global list */
656 for (remset = global_remset; remset; remset = next) {
657 remset->store_next = remset->data;
660 if (remset != global_remset) {
661 SGEN_LOG (4, "Freed remset at %p", remset->data);
662 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
665 /* the generic store ones */
666 while (generic_store_remsets) {
667 GenericStoreRememberedSet *gs_next = generic_store_remsets->next;
668 sgen_free_internal (generic_store_remsets, INTERNAL_MEM_STORE_REMSET);
669 generic_store_remsets = gs_next;
671 /* the per-thread ones */
672 FOREACH_THREAD (info) {
673 for (remset = info->remset; remset; remset = next) {
674 remset->store_next = remset->data;
677 if (remset != info->remset) {
678 SGEN_LOG (3, "Freed remset at %p", remset->data);
679 sgen_free_internal_dynamic (remset, remset_byte_size (remset), INTERNAL_MEM_REMSET);
682 clear_thread_store_remset_buffer (info);
685 /* the freed thread ones */
686 while (freed_thread_remsets) {
687 next = freed_thread_remsets->next;
688 SGEN_LOG (4, "Freed remset at %p", freed_thread_remsets->data);
689 sgen_free_internal_dynamic (freed_thread_remsets, remset_byte_size (freed_thread_remsets), INTERNAL_MEM_REMSET);
690 freed_thread_remsets = next;
696 * Tries to check if a given remset location was already added to the global remset.
699 * A 2 entry, LRU cache of recently saw location remsets.
701 * It's hand-coded instead of done using loops to reduce the number of memory references on cache hit.
703 * Returns TRUE is the element was added..
706 global_remset_location_was_not_added (gpointer ptr)
709 gpointer first = global_remset_cache [0], second;
711 HEAVY_STAT (++stat_global_remsets_discarded);
715 second = global_remset_cache [1];
718 /*Move the second to the front*/
719 global_remset_cache [0] = second;
720 global_remset_cache [1] = first;
722 HEAVY_STAT (++stat_global_remsets_discarded);
726 global_remset_cache [0] = second;
727 global_remset_cache [1] = ptr;
732 sgen_ssb_record_pointer (gpointer ptr)
735 gboolean lock = sgen_collection_is_parallel ();
736 gpointer obj = *(gpointer*)ptr;
738 g_assert (!sgen_ptr_in_nursery (ptr) && sgen_ptr_in_nursery (obj));
743 if (!global_remset_location_was_not_added (ptr))
746 if (G_UNLIKELY (do_pin_stats))
747 sgen_pin_stats_register_global_remset (obj);
749 SGEN_LOG (8, "Adding global remset for %p", ptr);
750 binary_protocol_global_remset (ptr, *(gpointer*)ptr, (gpointer)SGEN_LOAD_VTABLE (obj));
752 HEAVY_STAT (++stat_global_remsets_added);
755 * FIXME: If an object remains pinned, we need to add it at every minor collection.
756 * To avoid uncontrolled growth of the global remset, only add each pointer once.
758 if (global_remset->store_next + 3 < global_remset->end_set) {
759 *(global_remset->store_next++) = (mword)ptr;
762 rs = sgen_alloc_remset (global_remset->end_set - global_remset->data, NULL, TRUE);
763 rs->next = global_remset;
765 *(global_remset->store_next++) = (mword)ptr;
768 int global_rs_size = 0;
770 for (rs = global_remset; rs; rs = rs->next) {
771 global_rs_size += rs->store_next - rs->data;
773 SGEN_LOG (4, "Global remset now has size %d", global_rs_size);
778 UNLOCK_GLOBAL_REMSET;
782 * ######################################################################
783 * ######## Debug support
784 * ######################################################################
788 find_in_remset_loc (mword *p, char *addr, gboolean *found)
794 switch ((*p) & REMSET_TYPE_MASK) {
795 case REMSET_LOCATION:
796 if (*p == (mword)addr)
800 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
802 if ((void**)addr >= ptr && (void**)addr < ptr + count)
806 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
807 count = sgen_safe_object_get_size ((MonoObject*)ptr);
808 count = SGEN_ALIGN_UP (count);
809 count /= sizeof (mword);
810 if ((void**)addr >= ptr && (void**)addr < ptr + count)
814 ptr = (void**)(*p & ~REMSET_TYPE_MASK);
819 /* The descriptor includes the size of MonoObject */
820 skip_size -= sizeof (MonoObject);
822 if ((void**)addr >= ptr && (void**)addr < ptr + (skip_size / sizeof (gpointer)))
827 g_assert_not_reached ();
832 * Return whenever ADDR occurs in the remembered sets
835 sgen_ssb_find_address (char *addr)
838 SgenThreadInfo *info;
839 RememberedSet *remset;
840 GenericStoreRememberedSet *store_remset;
842 gboolean found = FALSE;
845 for (remset = global_remset; remset; remset = remset->next) {
846 SGEN_LOG (4, "Scanning global remset range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
847 for (p = remset->data; p < remset->store_next;) {
848 p = find_in_remset_loc (p, addr, &found);
854 /* the generic store ones */
855 for (store_remset = generic_store_remsets; store_remset; store_remset = store_remset->next) {
856 for (i = 0; i < STORE_REMSET_BUFFER_SIZE - 1; ++i) {
857 if (store_remset->data [i] == addr)
862 /* the per-thread ones */
863 FOREACH_THREAD (info) {
865 for (remset = info->remset; remset; remset = remset->next) {
866 SGEN_LOG (4, "Scanning remset for thread %p, range: %p-%p, size: %td", info, remset->data, remset->store_next, remset->store_next - remset->data);
867 for (p = remset->data; p < remset->store_next;) {
868 p = find_in_remset_loc (p, addr, &found);
873 for (j = 0; j < *info->store_remset_buffer_index_addr; ++j) {
874 if ((*info->store_remset_buffer_addr) [j + 1] == addr)
879 /* the freed thread ones */
880 for (remset = freed_thread_remsets; remset; remset = remset->next) {
881 SGEN_LOG (4, "Scanning remset for freed thread, range: %p-%p, size: %td", remset->data, remset->store_next, remset->store_next - remset->data);
882 for (p = remset->data; p < remset->store_next;) {
883 p = find_in_remset_loc (p, addr, &found);
893 sgen_ssb_init (SgenRemeberedSet *remset)
895 LOCK_INIT (global_remset_mutex);
897 global_remset = sgen_alloc_remset (1024, NULL, FALSE);
898 global_remset->next = NULL;
900 mono_native_tls_alloc (&remembered_set_key, NULL);
902 #ifdef HEAVY_STATISTICS
903 mono_counters_register ("WBarrier generic store stored", MONO_COUNTER_GC | MONO_COUNTER_INT, &stat_wbarrier_generic_store_remset);
905 mono_counters_register ("Store remsets", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_store_remsets);
906 mono_counters_register ("Unique store remsets", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_store_remsets_unique);
907 mono_counters_register ("Saved remsets 1", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_saved_remsets_1);
908 mono_counters_register ("Saved remsets 2", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_saved_remsets_2);
909 mono_counters_register ("Non-global remsets processed", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_local_remsets_processed);
910 mono_counters_register ("Global remsets added", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_added);
911 mono_counters_register ("Global remsets re-added", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_readded);
912 mono_counters_register ("Global remsets processed", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_processed);
913 mono_counters_register ("Global remsets discarded", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_global_remsets_discarded);
916 remset->wbarrier_set_field = sgen_ssb_wbarrier_set_field;
917 remset->wbarrier_set_arrayref = sgen_ssb_wbarrier_set_arrayref;
918 remset->wbarrier_arrayref_copy = sgen_ssb_wbarrier_arrayref_copy;
919 remset->wbarrier_value_copy = sgen_ssb_wbarrier_value_copy;
920 remset->wbarrier_object_copy = sgen_ssb_wbarrier_object_copy;
921 remset->wbarrier_generic_nostore = sgen_ssb_wbarrier_generic_nostore;
922 remset->record_pointer = sgen_ssb_record_pointer;
924 remset->begin_scan_remsets = sgen_ssb_begin_scan_remsets;
925 remset->finish_scan_remsets = sgen_ssb_finish_scan_remsets;
927 remset->register_thread = sgen_ssb_register_thread;
928 remset->cleanup_thread = sgen_ssb_cleanup_thread;
929 #ifdef HAVE_KW_THREAD
930 remset->fill_thread_info_for_suspend = sgen_ssb_fill_thread_info_for_suspend;
933 remset->prepare_for_minor_collection = sgen_ssb_prepare_for_minor_collection;
934 remset->prepare_for_major_collection = sgen_ssb_prepare_for_major_collection;
936 remset->find_address = sgen_ssb_find_address;
942 sgen_ssb_init (SgenRemeberedSet *remset)
944 fprintf (stderr, "Error: Mono was configured using --enable-minimal=sgen_wbarrier.\n");
948 #endif /* DISABLE_SGEN_REMSET */
950 #endif /* HAVE_SGEN_GC */