[dtrace] GC heap allocation probes for SGen.
[mono.git] / mono / metadata / sgen-major-copying.c
1 /*
2  * sgen-major-copying.c: Simple generational GC.
3  *
4  * Author:
5  *      Paolo Molaro (lupus@ximian.com)
6  *
7  * Copyright 2005-2010 Novell, Inc (http://www.novell.com)
8  *
9  * Thread start/stop adapted from Boehm's GC:
10  * Copyright (c) 1994 by Xerox Corporation.  All rights reserved.
11  * Copyright (c) 1996 by Silicon Graphics.  All rights reserved.
12  * Copyright (c) 1998 by Fergus Henderson.  All rights reserved.
13  * Copyright (c) 2000-2004 by Hewlett-Packard Company.  All rights reserved.
14  *
15  * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
16  * OR IMPLIED.  ANY USE IS AT YOUR OWN RISK.
17  *
18  * Permission is hereby granted to use or copy this program
19  * for any purpose,  provided the above notices are retained on all copies.
20  * Permission to modify the code and to distribute modified code is granted,
21  * provided the above notices are retained, and a notice that the code was
22  * modified is included with the above copyright notice.
23  *
24  *
25  * Copyright 2001-2003 Ximian, Inc
26  * Copyright 2003-2010 Novell, Inc.
27  * 
28  * Permission is hereby granted, free of charge, to any person obtaining
29  * a copy of this software and associated documentation files (the
30  * "Software"), to deal in the Software without restriction, including
31  * without limitation the rights to use, copy, modify, merge, publish,
32  * distribute, sublicense, and/or sell copies of the Software, and to
33  * permit persons to whom the Software is furnished to do so, subject to
34  * the following conditions:
35  * 
36  * The above copyright notice and this permission notice shall be
37  * included in all copies or substantial portions of the Software.
38  * 
39  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
40  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
41  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
42  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
43  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
44  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
45  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
46  */
47
48 #include "config.h"
49
50 #ifdef HAVE_SGEN_GC
51
52 #include "utils/mono-counters.h"
53
54 #include "metadata/gc-internal.h"
55 #include "metadata/sgen-gc.h"
56 #include "metadata/sgen-protocol.h"
57 #include "metadata/mono-gc.h"
58 #include "metadata/object-internals.h"
59 #include "metadata/profiler-private.h"
60 #include "metadata/sgen-memory-governor.h"
61
62 #define MAJOR_SECTION_SIZE              SGEN_PINNED_CHUNK_SIZE
63 #define BLOCK_FOR_OBJECT(o)             SGEN_PINNED_CHUNK_FOR_PTR ((o))
64 #define MAJOR_SECTION_FOR_OBJECT(o)     ((GCMemSection*)BLOCK_FOR_OBJECT ((o)))
65
66 #define MAJOR_OBJ_IS_IN_TO_SPACE(o)     (MAJOR_SECTION_FOR_OBJECT ((o))->is_to_space)
67
68 static int num_major_sections = 0;
69
70 static GCMemSection *section_list = NULL;
71
72 static SgenPinnedAllocator pinned_allocator;
73
74 static gboolean have_swept;
75
76 /*
77  * used when moving the objects
78  */
79 static char *to_space_bumper = NULL;
80 static char *to_space_top = NULL;
81 static GCMemSection *to_space_section = NULL;
82
83 /* we get this at init */
84 static int nursery_bits;
85 static char *nursery_start;
86 static char *nursery_end;
87
88 #define ptr_in_nursery(p)       (SGEN_PTR_IN_NURSERY ((p), nursery_bits, nursery_start, nursery_end))
89
90 #ifdef HEAVY_STATISTICS
91 static long stat_major_copy_object_failed_forwarded = 0;
92 static long stat_major_copy_object_failed_pinned = 0;
93 static long stat_major_copy_object_failed_large_pinned = 0;
94 static long stat_major_copy_object_failed_to_space = 0;
95 #endif
96
97 static void*
98 major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits)
99 {
100         if (nursery_align)
101                 nursery_start = sgen_alloc_os_memory_aligned (nursery_size, nursery_align, TRUE, TRUE, "nursery");
102         else
103                 nursery_start = sgen_alloc_os_memory (nursery_size, TRUE, TRUE, "nursery");
104
105         nursery_end = nursery_start + nursery_size;
106         nursery_bits = the_nursery_bits;
107
108         return nursery_start;
109 }
110
111 static gboolean
112 obj_is_from_pinned_alloc (char *p)
113 {
114         return BLOCK_FOR_OBJECT (p)->role == MEMORY_ROLE_PINNED;
115 }
116
117 static void
118 free_pinned_object (char *obj, size_t size)
119 {
120         sgen_free_pinned (&pinned_allocator, obj, size);
121 }
122
123 /*
124  * Allocate a new section of memory to be used as old generation.
125  */
126 static GCMemSection*
127 alloc_major_section (void)
128 {
129         GCMemSection *section;
130         int scan_starts;
131
132         section = sgen_alloc_os_memory_aligned (MAJOR_SECTION_SIZE, MAJOR_SECTION_SIZE, TRUE, TRUE, "major heap section");
133         section->next_data = section->data = (char*)section + SGEN_SIZEOF_GC_MEM_SECTION;
134         g_assert (!((mword)section->data & 7));
135         section->size = MAJOR_SECTION_SIZE - SGEN_SIZEOF_GC_MEM_SECTION;
136         section->end_data = section->data + section->size;
137         sgen_update_heap_boundaries ((mword)section->data, (mword)section->end_data);
138         DEBUG (3, fprintf (gc_debug_file, "New major heap section: (%p-%p), total: %lld\n", section->data, section->end_data, (long long int)mono_gc_get_heap_size ()));
139         scan_starts = (section->size + SGEN_SCAN_START_SIZE - 1) / SGEN_SCAN_START_SIZE;
140         section->scan_starts = sgen_alloc_internal_dynamic (sizeof (char*) * scan_starts, INTERNAL_MEM_SCAN_STARTS, TRUE);
141         section->num_scan_start = scan_starts;
142         section->block.role = MEMORY_ROLE_GEN1;
143         section->is_to_space = TRUE;
144
145         /* add to the section list */
146         section->block.next = section_list;
147         section_list = section;
148
149         ++num_major_sections;
150
151         return section;
152 }
153
154 static void
155 free_major_section (GCMemSection *section)
156 {
157         DEBUG (3, fprintf (gc_debug_file, "Freed major section %p (%p-%p)\n", section, section->data, section->end_data));
158         sgen_free_internal_dynamic (section->scan_starts,
159                         (section->size + SGEN_SCAN_START_SIZE - 1) / SGEN_SCAN_START_SIZE * sizeof (char*), INTERNAL_MEM_SCAN_STARTS);
160         sgen_free_os_memory (section, MAJOR_SECTION_SIZE, TRUE);
161
162         --num_major_sections;
163 }
164
165 static void
166 new_to_space_section (void)
167 {
168         /* FIXME: if the current to_space_section is empty, we don't
169            have to allocate a new one */
170
171         to_space_section = alloc_major_section ();
172         to_space_bumper = to_space_section->next_data;
173         to_space_top = to_space_section->end_data;
174 }
175
176 static void
177 to_space_set_next_data (void)
178 {
179         g_assert (to_space_bumper >= to_space_section->next_data && to_space_bumper <= to_space_section->end_data);
180         to_space_section->next_data = to_space_bumper;
181 }
182
183 static void
184 to_space_expand (void)
185 {
186         if (to_space_section) {
187                 g_assert (to_space_top == to_space_section->end_data);
188                 to_space_set_next_data ();
189         }
190
191         new_to_space_section ();
192 }
193
194 static void*
195 major_alloc_object (int size, gboolean has_references)
196 {
197         char *dest = to_space_bumper;
198         /* Make sure we have enough space available */
199         if (dest + size > to_space_top) {
200                 to_space_expand ();
201                 (dest) = to_space_bumper;
202                 DEBUG (8, g_assert (dest + size <= to_space_top));
203         }
204         to_space_bumper += size;
205         DEBUG (8, g_assert (to_space_bumper <= to_space_top));
206         to_space_section->scan_starts [(dest - (char*)to_space_section->data)/SGEN_SCAN_START_SIZE] = dest;
207         return dest;
208 }
209
210 static void
211 unset_to_space (void)
212 {
213         /* between collections the to_space_bumper is invalidated
214            because degraded allocations might occur, so we set it to
215            NULL, just to make it explicit */
216         to_space_bumper = NULL;
217
218         /* don't unset to_space_section if we implement the FIXME in
219            new_to_space_section */
220         to_space_section = NULL;
221 }
222
223 static gboolean
224 major_is_object_live (char *obj)
225 {
226         mword objsize;
227
228         /* nursery */
229         if (ptr_in_nursery (obj))
230                 return FALSE;
231
232         objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
233
234         /* LOS */
235         if (objsize > SGEN_MAX_SMALL_OBJ_SIZE)
236                 return FALSE;
237
238         /* pinned chunk */
239         if (obj_is_from_pinned_alloc (obj))
240                 return FALSE;
241
242         /* now we know it's in a major heap section */
243         return MAJOR_SECTION_FOR_OBJECT (obj)->is_to_space;
244 }
245
246 /* size is a multiple of ALLOC_ALIGN */
247 static void*
248 major_alloc_small_pinned_obj (size_t size, gboolean has_references)
249 {
250         return sgen_alloc_pinned (&pinned_allocator, size);
251 }
252
253 /*
254  * size is already rounded up and we hold the GC lock.
255  */
256 static void*
257 major_alloc_degraded (MonoVTable *vtable, size_t size)
258 {
259         GCMemSection *section;
260         void **p = NULL;
261         g_assert (size <= SGEN_MAX_SMALL_OBJ_SIZE);
262         HEAVY_STAT (++stat_objects_alloced_degraded);
263         HEAVY_STAT (stat_bytes_alloced_degraded += size);
264         for (section = section_list; section; section = section->block.next) {
265                 if ((section->end_data - section->next_data) >= size) {
266                         p = (void**)section->next_data;
267                         break;
268                 }
269         }
270         if (!p) {
271                 section = alloc_major_section ();
272                 section->is_to_space = FALSE;
273                 /* FIXME: handle OOM */
274                 p = (void**)section->next_data;
275                 sgen_register_major_sections_alloced (1);
276         }
277         section->next_data += size;
278         DEBUG (3, fprintf (gc_debug_file, "Allocated (degraded) object %p, vtable: %p (%s), size: %zd in section %p\n", p, vtable, vtable->klass->name, size, section));
279         *p = vtable;
280         return p;
281 }
282
283 static inline void
284 pin_major_object (char *obj, SgenGrayQueue *queue)
285 {
286         sgen_pin_object (obj, queue);
287 }
288
289 #include "sgen-major-copy-object.h"
290
291 static void
292 major_copy_or_mark_object (void **obj_slot, SgenGrayQueue *queue)
293 {
294         char *forwarded;
295         char *obj = *obj_slot;
296         mword objsize;
297
298         DEBUG (9, g_assert (current_collection_generation == GENERATION_OLD));
299
300         HEAVY_STAT (++stat_copy_object_called_major);
301
302         DEBUG (9, fprintf (gc_debug_file, "Precise copy of %p from %p", obj, obj_slot));
303
304         /*
305          * obj must belong to one of:
306          *
307          * 1. the nursery
308          * 2. the LOS
309          * 3. a pinned chunk
310          * 4. a non-to-space section of the major heap
311          * 5. a to-space section of the major heap
312          *
313          * In addition, objects in 1, 2 and 4 might also be pinned.
314          * Objects in 1 and 4 might be forwarded.
315          *
316          * Before we can copy the object we must make sure that we are
317          * allowed to, i.e. that the object not pinned, not already
318          * forwarded, not in the nursery To Space and doesn't belong
319          * to the LOS, a pinned chunk, or a to-space section.
320          *
321          * We are usually called for to-space objects (5) when we have
322          * two remset entries for the same reference.  The first entry
323          * copies the object and updates the reference and the second
324          * calls us with the updated reference that points into
325          * to-space.  There might also be other circumstances where we
326          * get to-space objects.
327          */
328
329         if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
330                 DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
331                 DEBUG (9, fprintf (gc_debug_file, " (already forwarded to %p)\n", forwarded));
332                 HEAVY_STAT (++stat_major_copy_object_failed_forwarded);
333                 *obj_slot = forwarded;
334                 return;
335         }
336         if (SGEN_OBJECT_IS_PINNED (obj)) {
337                 DEBUG (9, g_assert (((MonoVTable*)SGEN_LOAD_VTABLE(obj))->gc_descr));
338                 DEBUG (9, fprintf (gc_debug_file, " (pinned, no change)\n"));
339                 HEAVY_STAT (++stat_major_copy_object_failed_pinned);
340                 return;
341         }
342
343         if (ptr_in_nursery (obj)) {
344                 /* A To Space object is already on its final destination for the current collection. */
345                 if (sgen_nursery_is_to_space (obj))
346                         return;
347                 goto copy;
348         }
349
350         /*
351          * At this point we know obj is not pinned, not forwarded and
352          * belongs to 2, 3, 4, or 5.
353          *
354          * LOS object (2) are simple, at least until we always follow
355          * the rule: if objsize > SGEN_MAX_SMALL_OBJ_SIZE, pin the
356          * object and return it.  At the end of major collections, we
357          * walk the los list and if the object is pinned, it is
358          * marked, otherwise it can be freed.
359          *
360          * Pinned chunks (3) and major heap sections (4, 5) both
361          * reside in blocks, which are always aligned, so once we've
362          * eliminated LOS objects, we can just access the block and
363          * see whether it's a pinned chunk or a major heap section.
364          */
365
366         objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
367
368         if (G_UNLIKELY (objsize > SGEN_MAX_SMALL_OBJ_SIZE || obj_is_from_pinned_alloc (obj))) {
369                 if (SGEN_OBJECT_IS_PINNED (obj))
370                         return;
371                 DEBUG (9, fprintf (gc_debug_file, " (marked LOS/Pinned %p (%s), size: %td)\n", obj, sgen_safe_name (obj), objsize));
372                 binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), sgen_safe_object_get_size ((MonoObject*)obj));
373                 SGEN_PIN_OBJECT (obj);
374                 GRAY_OBJECT_ENQUEUE (queue, obj);
375                 HEAVY_STAT (++stat_major_copy_object_failed_large_pinned);
376                 return;
377         }
378
379         /*
380          * Now we know the object is in a major heap section.  All we
381          * need to do is check whether it's already in to-space (5) or
382          * not (4).
383          */
384         if (MAJOR_OBJ_IS_IN_TO_SPACE (obj)) {
385                 DEBUG (9, g_assert (objsize <= SGEN_MAX_SMALL_OBJ_SIZE));
386                 DEBUG (9, fprintf (gc_debug_file, " (already copied)\n"));
387                 HEAVY_STAT (++stat_major_copy_object_failed_to_space);
388                 return;
389         }
390
391  copy:
392         HEAVY_STAT (++stat_objects_copied_major);
393
394         *obj_slot = copy_object_no_checks (obj, queue);
395 }
396
397 #include "sgen-major-scan-object.h"
398
399 /* FIXME: later reduce code duplication here with build_nursery_fragments().
400  * We don't keep track of section fragments for non-nursery sections yet, so
401  * just memset to 0.
402  */
403 static void
404 build_section_fragments (GCMemSection *section)
405 {
406         int i;
407         char *frag_start, *frag_end;
408         size_t frag_size;
409
410         /* clear scan starts */
411         memset (section->scan_starts, 0, section->num_scan_start * sizeof (gpointer));
412         frag_start = section->data;
413         section->next_data = section->data;
414         for (i = 0; i < section->pin_queue_num_entries; ++i) {
415                 frag_end = section->pin_queue_start [i];
416                 /* remove the pin bit from pinned objects */
417                 SGEN_UNPIN_OBJECT (frag_end);
418                 if (frag_end >= section->data + section->size) {
419                         frag_end = section->data + section->size;
420                 } else {
421                         section->scan_starts [((char*)frag_end - (char*)section->data)/SGEN_SCAN_START_SIZE] = frag_end;
422                 }
423                 frag_size = frag_end - frag_start;
424                 if (frag_size) {
425                         binary_protocol_empty (frag_start, frag_size);
426                         memset (frag_start, 0, frag_size);
427                 }
428                 frag_size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)section->pin_queue_start [i]));
429                 frag_start = (char*)section->pin_queue_start [i] + frag_size;
430                 section->next_data = MAX (section->next_data, frag_start);
431         }
432         frag_end = section->end_data;
433         frag_size = frag_end - frag_start;
434         if (frag_size) {
435                 binary_protocol_empty (frag_start, frag_size);
436                 memset (frag_start, 0, frag_size);
437         }
438 }
439
440 static void
441 sweep_pinned_objects_callback (char *ptr, size_t size, void *data)
442 {
443         if (SGEN_OBJECT_IS_PINNED (ptr)) {
444                 SGEN_UNPIN_OBJECT (ptr);
445                 DEBUG (6, fprintf (gc_debug_file, "Unmarked pinned object %p (%s)\n", ptr, sgen_safe_name (ptr)));
446         } else {
447                 DEBUG (6, fprintf (gc_debug_file, "Freeing unmarked pinned object %p (%s)\n", ptr, sgen_safe_name (ptr)));
448                 free_pinned_object (ptr, size);
449         }
450 }
451
452 static void
453 sweep_pinned_objects (void)
454 {
455         sgen_pinned_scan_objects (&pinned_allocator, sweep_pinned_objects_callback, NULL);
456 }
457
458 static void
459 major_iterate_objects (gboolean non_pinned, gboolean pinned, IterateObjectCallbackFunc callback, void *data)
460 {
461         if (non_pinned) {
462                 GCMemSection *section;
463                 for (section = section_list; section; section = section->block.next)
464                         sgen_scan_area_with_callback (section->data, section->end_data, callback, data, FALSE);
465         }
466         if (pinned)
467                 sgen_pinned_scan_objects (&pinned_allocator, callback, data);
468 }
469
470 static void
471 major_free_non_pinned_object (char *obj, size_t size)
472 {
473         memset (obj, 0, size);
474 }
475
476 static void
477 pin_pinned_object_callback (void *addr, size_t slot_size, SgenGrayQueue *queue)
478 {
479         binary_protocol_pin (addr, (gpointer)SGEN_LOAD_VTABLE (addr), sgen_safe_object_get_size ((MonoObject*)addr));
480         if (!SGEN_OBJECT_IS_PINNED (addr))
481                 sgen_pin_stats_register_object ((char*) addr, sgen_safe_object_get_size ((MonoObject*) addr));
482         SGEN_PIN_OBJECT (addr);
483         GRAY_OBJECT_ENQUEUE (queue, addr);
484         DEBUG (6, fprintf (gc_debug_file, "Marked pinned object %p (%s) from roots\n", addr, sgen_safe_name (addr)));
485 }
486
487 static void
488 major_find_pin_queue_start_ends (SgenGrayQueue *queue)
489 {
490         GCMemSection *section;
491
492         for (section = section_list; section; section = section->block.next)
493                 sgen_find_section_pin_queue_start_end (section);
494         sgen_pinned_scan_pinned_objects (&pinned_allocator, (IterateObjectCallbackFunc)pin_pinned_object_callback, queue);
495 }
496
497 static void
498 major_pin_objects (SgenGrayQueue *queue)
499 {
500         GCMemSection *section;
501
502         for (section = section_list; section; section = section->block.next)
503                 sgen_pin_objects_in_section (section, queue);
504 }
505
506 static void
507 major_init_to_space (void)
508 {
509         new_to_space_section ();
510 }
511
512 static void
513 major_sweep (void)
514 {
515         GCMemSection *section, *prev_section;
516
517         to_space_set_next_data ();
518         unset_to_space ();
519
520         /* unpin objects from the pinned chunks and free the unmarked ones */
521         sweep_pinned_objects ();
522
523         sgen_pinned_update_heap_boundaries (&pinned_allocator);
524
525         /* free the unused sections */
526         prev_section = NULL;
527         for (section = section_list; section;) {
528                 GCMemSection *this_section = section;
529
530                 /* to_space doesn't need handling here */
531                 if (section->is_to_space) {
532                         section->is_to_space = FALSE;
533                         prev_section = section;
534                         section = section->block.next;
535                         goto update;
536                 }
537                 /* no pinning object, so the section is free */
538                 if (!section->pin_queue_num_entries) {
539                         GCMemSection *to_free;
540                         g_assert (!section->pin_queue_start);
541                         if (prev_section)
542                                 prev_section->block.next = section->block.next;
543                         else
544                                 section_list = section->block.next;
545                         to_free = section;
546                         section = section->block.next;
547                         free_major_section (to_free);
548                         continue;
549                 } else {
550                         DEBUG (6, fprintf (gc_debug_file, "Section %p has still pinned objects (%d)\n", section, section->pin_queue_num_entries));
551                         build_section_fragments (section);
552                 }
553                 prev_section = section;
554                 section = section->block.next;
555
556         update:
557                 sgen_update_heap_boundaries ((mword)this_section->data, (mword)this_section->data + this_section->size);
558         }
559
560         have_swept = TRUE;
561 }
562
563 static void
564 major_check_scan_starts (void)
565 {
566         GCMemSection *section;
567         for (section = section_list; section; section = section->block.next)
568                 sgen_check_section_scan_starts (section);
569 }
570
571 static void
572 major_dump_heap (FILE *heap_dump_file)
573 {
574         GCMemSection *section;
575         for (section = section_list; section; section = section->block.next)
576                 sgen_dump_section (section, "old");
577         /* FIXME: dump pinned sections, too */
578 }
579
580 static gint64
581 major_get_used_size (void)
582 {
583         gint64 tot = 0;
584         GCMemSection *section;
585         for (section = section_list; section; section = section->block.next) {
586                 /* this is approximate... */
587                 tot += section->next_data - section->data;
588         }
589         return tot;
590 }
591
592 /* only valid during minor collections */
593 static int old_num_major_sections;
594
595 static void
596 major_start_nursery_collection (void)
597 {
598         old_num_major_sections = num_major_sections;
599
600         if (!to_space_section) {
601                 new_to_space_section ();
602         } else {
603                 /* we might have done degraded allocation since the
604                    last collection */
605                 g_assert (to_space_bumper <= to_space_section->next_data);
606                 to_space_bumper = to_space_section->next_data;
607
608                 to_space_section->is_to_space = TRUE;
609         }
610 }
611
612 static void
613 major_finish_nursery_collection (void)
614 {
615         GCMemSection *section;
616         int sections_alloced;
617
618         to_space_set_next_data ();
619
620         for (section = section_list; section; section = section->block.next)
621                 section->is_to_space = FALSE;
622
623         sections_alloced = num_major_sections - old_num_major_sections;
624         sgen_register_major_sections_alloced (sections_alloced);
625 }
626
627 static void
628 major_finish_major_collection (void)
629 {
630 }
631
632 static gboolean
633 major_ptr_is_in_non_pinned_space (char *ptr)
634 {
635         GCMemSection *section;
636         for (section = section_list; section;) {
637                 if (ptr >= section->data && ptr < section->data + section->size)
638                         return TRUE;
639                 section = section->block.next;
640         }
641         return FALSE;
642 }
643
644 static void
645 major_report_pinned_memory_usage (void)
646 {
647         sgen_report_pinned_mem_usage (&pinned_allocator);
648 }
649
650 static int
651 get_num_major_sections (void)
652 {
653         return num_major_sections;
654 }
655
656 void
657 sgen_copying_init (SgenMajorCollector *collector)
658 {
659 #ifdef HEAVY_STATISTICS
660         mono_counters_register ("# major copy_object() failed forwarded", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_forwarded);
661         mono_counters_register ("# major copy_object() failed pinned", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_pinned);
662         mono_counters_register ("# major copy_object() failed large or pinned chunk", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_large_pinned);
663         mono_counters_register ("# major copy_object() failed to space", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_copy_object_failed_to_space);
664 #endif
665
666         collector->section_size = MAJOR_SECTION_SIZE;
667         collector->supports_cardtable = FALSE;
668         collector->is_parallel = FALSE;
669
670         collector->have_swept = &have_swept;
671
672         collector->alloc_heap = major_alloc_heap;
673         collector->is_object_live = major_is_object_live;
674         collector->alloc_small_pinned_obj = major_alloc_small_pinned_obj;
675         collector->alloc_degraded = major_alloc_degraded;
676         collector->alloc_object = major_alloc_object;
677         collector->free_pinned_object = free_pinned_object;
678         collector->iterate_objects = major_iterate_objects;
679         collector->free_non_pinned_object = major_free_non_pinned_object;
680         collector->find_pin_queue_start_ends = major_find_pin_queue_start_ends;
681         collector->pin_objects = major_pin_objects;
682         collector->pin_major_object = pin_major_object;
683         collector->init_to_space = major_init_to_space;
684         collector->sweep = major_sweep;
685         collector->check_scan_starts = major_check_scan_starts;
686         collector->dump_heap = major_dump_heap;
687         collector->get_used_size = major_get_used_size;
688         collector->start_nursery_collection = major_start_nursery_collection;
689         collector->finish_nursery_collection = major_finish_nursery_collection;
690         collector->finish_major_collection = major_finish_major_collection;
691         collector->ptr_is_in_non_pinned_space = major_ptr_is_in_non_pinned_space;
692         collector->obj_is_from_pinned_alloc = obj_is_from_pinned_alloc;
693         collector->report_pinned_memory_usage = major_report_pinned_memory_usage;
694         collector->get_num_major_sections = get_num_major_sections;
695         collector->handle_gc_param = NULL;
696         collector->print_gc_param_usage = NULL;
697
698         collector->major_ops.copy_or_mark_object = major_copy_or_mark_object;
699         collector->major_ops.scan_object = major_scan_object;
700 }
701
702 #endif