Merge pull request #1219 from panzone/los_partial_marking
[mono.git] / mono / metadata / sgen-marksweep.c
1 /*
2  * sgen-marksweep.c: The Mark & Sweep major collector.
3  *
4  * Author:
5  *      Mark Probst <mark.probst@gmail.com>
6  *
7  * Copyright 2009-2010 Novell, Inc.
8  * Copyright (C) 2012 Xamarin Inc
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License 2.0 as published by the Free Software Foundation;
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public
20  * License 2.0 along with this library; if not, write to the Free
21  * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
22  */
23
24 #include "config.h"
25
26 #ifdef HAVE_SGEN_GC
27
28 #include <math.h>
29 #include <errno.h>
30
31 #include "utils/mono-counters.h"
32 #include "utils/mono-semaphore.h"
33 #include "utils/mono-time.h"
34 #include "metadata/object-internals.h"
35 #include "metadata/profiler-private.h"
36
37 #include "metadata/sgen-gc.h"
38 #include "metadata/sgen-protocol.h"
39 #include "metadata/sgen-cardtable.h"
40 #include "metadata/sgen-memory-governor.h"
41 #include "metadata/sgen-layout-stats.h"
42 #include "metadata/gc-internal.h"
43
44 #if !defined(SGEN_PARALLEL_MARK) && !defined(FIXED_HEAP)
45 #define SGEN_HAVE_CONCURRENT_MARK
46 #endif
47
48 #define MS_BLOCK_SIZE   (16*1024)
49 #define MS_BLOCK_SIZE_SHIFT     14
50 #define MAJOR_SECTION_SIZE      MS_BLOCK_SIZE
51 #define CARDS_PER_BLOCK (MS_BLOCK_SIZE / CARD_SIZE_IN_BYTES)
52
53 #ifdef FIXED_HEAP
54 #define MS_DEFAULT_HEAP_NUM_BLOCKS      (32 * 1024) /* 512 MB */
55 #endif
56
57 /*
58  * Don't allocate single blocks, but alloc a contingent of this many
59  * blocks in one swoop.  This must be a power of two.
60  */
61 #define MS_BLOCK_ALLOC_NUM      32
62
63 /*
64  * Number of bytes before the first object in a block.  At the start
65  * of a block is the MSBlockHeader, then opional padding, then come
66  * the objects, so this must be >= sizeof (MSBlockHeader).
67  */
68 #ifdef FIXED_HEAP
69 #define MS_BLOCK_SKIP   0
70 #else
71 #define MS_BLOCK_SKIP   16
72 #endif
73
74 #define MS_BLOCK_FREE   (MS_BLOCK_SIZE - MS_BLOCK_SKIP)
75
76 #define MS_NUM_MARK_WORDS       ((MS_BLOCK_SIZE / SGEN_ALLOC_ALIGN + sizeof (mword) * 8 - 1) / (sizeof (mword) * 8))
77
78 #if SGEN_MAX_SMALL_OBJ_SIZE > MS_BLOCK_FREE / 2
79 #error MAX_SMALL_OBJ_SIZE must be at most MS_BLOCK_FREE / 2
80 #endif
81
82 typedef struct _MSBlockInfo MSBlockInfo;
83 struct _MSBlockInfo {
84         int obj_size;
85         int obj_size_index;
86         size_t pin_queue_num_entries;
87         unsigned int pinned : 1;
88         unsigned int has_references : 1;
89         unsigned int has_pinned : 1;    /* means cannot evacuate */
90         unsigned int is_to_space : 1;
91         unsigned int swept : 1;
92 #ifdef FIXED_HEAP
93         unsigned int used : 1;
94         unsigned int zeroed : 1;
95 #endif
96         MSBlockInfo *next;
97         char *block;
98         void **free_list;
99         MSBlockInfo *next_free;
100         void **pin_queue_start;
101 #ifdef SGEN_HAVE_CONCURRENT_MARK
102         guint8 *cardtable_mod_union;
103 #endif
104         mword mark_words [MS_NUM_MARK_WORDS];
105 };
106
107 #ifdef FIXED_HEAP
108 static mword ms_heap_num_blocks = MS_DEFAULT_HEAP_NUM_BLOCKS;
109
110 static char *ms_heap_start;
111 static char *ms_heap_end;
112
113 #define MS_PTR_IN_SMALL_MAJOR_HEAP(p)   ((char*)(p) >= ms_heap_start && (char*)(p) < ms_heap_end)
114
115 /* array of all all block infos in the system */
116 static MSBlockInfo *block_infos;
117 #endif
118
119 #define MS_BLOCK_OBJ(b,i)               ((b)->block + MS_BLOCK_SKIP + (b)->obj_size * (i))
120 #define MS_BLOCK_OBJ_FOR_SIZE(b,i,obj_size)             ((b)->block + MS_BLOCK_SKIP + (obj_size) * (i))
121 #define MS_BLOCK_DATA_FOR_OBJ(o)        ((char*)((mword)(o) & ~(mword)(MS_BLOCK_SIZE - 1)))
122
123 #ifdef FIXED_HEAP
124 #define MS_BLOCK_FOR_OBJ(o)             (&block_infos [(mword)((char*)(o) - ms_heap_start) >> MS_BLOCK_SIZE_SHIFT])
125 #else
126 typedef struct {
127         MSBlockInfo *info;
128 } MSBlockHeader;
129
130 #define MS_BLOCK_FOR_OBJ(o)             (((MSBlockHeader*)MS_BLOCK_DATA_FOR_OBJ ((o)))->info)
131 #endif
132
133 /* object index will always be small */
134 #define MS_BLOCK_OBJ_INDEX(o,b) ((int)(((char*)(o) - ((b)->block + MS_BLOCK_SKIP)) / (b)->obj_size))
135
136 //casting to int is fine since blocks are 32k
137 #define MS_CALC_MARK_BIT(w,b,o)         do {                            \
138                 int i = ((int)((char*)(o) - MS_BLOCK_DATA_FOR_OBJ ((o)))) >> SGEN_ALLOC_ALIGN_BITS; \
139                 if (sizeof (mword) == 4) {                              \
140                         (w) = i >> 5;                                   \
141                         (b) = i & 31;                                   \
142                 } else {                                                \
143                         (w) = i >> 6;                                   \
144                         (b) = i & 63;                                   \
145                 }                                                       \
146         } while (0)
147
148 #define MS_MARK_BIT(bl,w,b)     ((bl)->mark_words [(w)] & (ONE_P << (b)))
149 #define MS_SET_MARK_BIT(bl,w,b) ((bl)->mark_words [(w)] |= (ONE_P << (b)))
150 #define MS_PAR_SET_MARK_BIT(was_marked,bl,w,b)  do {                    \
151                 mword __old = (bl)->mark_words [(w)];                   \
152                 mword __bitmask = ONE_P << (b);                         \
153                 if (__old & __bitmask) {                                \
154                         was_marked = TRUE;                              \
155                         break;                                          \
156                 }                                                       \
157                 if (SGEN_CAS_PTR ((gpointer*)&(bl)->mark_words [(w)],   \
158                                                 (gpointer)(__old | __bitmask), \
159                                                 (gpointer)__old) ==     \
160                                 (gpointer)__old) {                      \
161                         was_marked = FALSE;                             \
162                         break;                                          \
163                 }                                                       \
164         } while (1)
165
166 #define MS_OBJ_ALLOCED(o,b)     (*(void**)(o) && (*(char**)(o) < (b)->block || *(char**)(o) >= (b)->block + MS_BLOCK_SIZE))
167
168 #define MS_BLOCK_OBJ_SIZE_FACTOR        (sqrt (2.0))
169
170 /*
171  * This way we can lookup block object size indexes for sizes up to
172  * 256 bytes with a single load.
173  */
174 #define MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES      32
175
176 static int *block_obj_sizes;
177 static int num_block_obj_sizes;
178 static int fast_block_obj_size_indexes [MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES];
179
180 #define MS_BLOCK_FLAG_PINNED    1
181 #define MS_BLOCK_FLAG_REFS      2
182
183 #define MS_BLOCK_TYPE_MAX       4
184
185 #ifdef SGEN_PARALLEL_MARK
186 static LOCK_DECLARE (ms_block_list_mutex);
187 #define LOCK_MS_BLOCK_LIST mono_mutex_lock (&ms_block_list_mutex)
188 #define UNLOCK_MS_BLOCK_LIST mono_mutex_unlock (&ms_block_list_mutex)
189 #endif
190
191 static gboolean *evacuate_block_obj_sizes;
192 static float evacuation_threshold = 0.666f;
193 #ifdef SGEN_HAVE_CONCURRENT_MARK
194 static float concurrent_evacuation_threshold = 0.666f;
195 static gboolean want_evacuation = FALSE;
196 #endif
197
198 static gboolean lazy_sweep = TRUE;
199 static gboolean have_swept;
200
201 #ifdef SGEN_HAVE_CONCURRENT_MARK
202 static gboolean concurrent_mark;
203 #endif
204
205 /* all allocated blocks in the system */
206 static MSBlockInfo *all_blocks;
207
208 #ifdef FIXED_HEAP
209 /* non-allocated block free-list */
210 static MSBlockInfo *empty_blocks = NULL;
211 #else
212 /* non-allocated block free-list */
213 static void *empty_blocks = NULL;
214 static size_t num_empty_blocks = 0;
215 #endif
216
217 #define FOREACH_BLOCK(bl)       for ((bl) = all_blocks; (bl); (bl) = (bl)->next) {
218 #define END_FOREACH_BLOCK       }
219
220 static size_t num_major_sections = 0;
221 /* one free block list for each block object size */
222 static MSBlockInfo **free_block_lists [MS_BLOCK_TYPE_MAX];
223
224 #ifdef SGEN_PARALLEL_MARK
225 #ifdef HAVE_KW_THREAD
226 static __thread MSBlockInfo ***workers_free_block_lists;
227 #else
228 static MonoNativeTlsKey workers_free_block_lists_key;
229 #endif
230 #endif
231
232 static long long stat_major_blocks_alloced = 0;
233 static long long stat_major_blocks_freed = 0;
234 static long long stat_major_blocks_lazy_swept = 0;
235 static long long stat_major_objects_evacuated = 0;
236
237 #if SIZEOF_VOID_P != 8
238 static long long stat_major_blocks_freed_ideal = 0;
239 static long long stat_major_blocks_freed_less_ideal = 0;
240 static long long stat_major_blocks_freed_individual = 0;
241 static long long stat_major_blocks_alloced_less_ideal = 0;
242 #endif
243
244 #ifdef SGEN_COUNT_NUMBER_OF_MAJOR_OBJECTS_MARKED
245 static long long num_major_objects_marked = 0;
246 #define INC_NUM_MAJOR_OBJECTS_MARKED()  (++num_major_objects_marked)
247 #else
248 #define INC_NUM_MAJOR_OBJECTS_MARKED()
249 #endif
250
251 static void
252 sweep_block (MSBlockInfo *block, gboolean during_major_collection);
253
254 static int
255 ms_find_block_obj_size_index (size_t size)
256 {
257         int i;
258         SGEN_ASSERT (9, size <= SGEN_MAX_SMALL_OBJ_SIZE, "size %d is bigger than max small object size %d", size, SGEN_MAX_SMALL_OBJ_SIZE);
259         for (i = 0; i < num_block_obj_sizes; ++i)
260                 if (block_obj_sizes [i] >= size)
261                         return i;
262         g_error ("no object of size %d\n", size);
263 }
264
265 #define FREE_BLOCKS_FROM(lists,p,r)     (lists [((p) ? MS_BLOCK_FLAG_PINNED : 0) | ((r) ? MS_BLOCK_FLAG_REFS : 0)])
266 #define FREE_BLOCKS(p,r)                (FREE_BLOCKS_FROM (free_block_lists, (p), (r)))
267 #ifdef SGEN_PARALLEL_MARK
268 #ifdef HAVE_KW_THREAD
269 #define FREE_BLOCKS_LOCAL(p,r)          (FREE_BLOCKS_FROM (workers_free_block_lists, (p), (r)))
270 #else
271 #define FREE_BLOCKS_LOCAL(p,r)          (FREE_BLOCKS_FROM (((MSBlockInfo***)(mono_native_tls_get_value (workers_free_block_lists_key))), (p), (r)))
272 #endif
273 #else
274 //#define FREE_BLOCKS_LOCAL(p,r)                (FREE_BLOCKS_FROM (free_block_lists, (p), (r)))
275 #endif
276
277 #define MS_BLOCK_OBJ_SIZE_INDEX(s)                              \
278         (((s)+7)>>3 < MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES ?      \
279          fast_block_obj_size_indexes [((s)+7)>>3] :             \
280          ms_find_block_obj_size_index ((s)))
281
282 #ifdef FIXED_HEAP
283 static void*
284 major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits)
285 {
286         char *nursery_start;
287         mword major_heap_size = ms_heap_num_blocks * MS_BLOCK_SIZE;
288         mword alloc_size = nursery_size + major_heap_size;
289         mword i;
290
291         g_assert (ms_heap_num_blocks > 0);
292         g_assert (nursery_size % MS_BLOCK_SIZE == 0);
293         if (nursery_align)
294                 g_assert (nursery_align % MS_BLOCK_SIZE == 0);
295
296         nursery_start = sgen_alloc_os_memory_aligned (alloc_size, nursery_align ? nursery_align : MS_BLOCK_SIZE, SGEN_ALLOC_HEAP | SGEN_ALLOC_ACTIVATE, "heap");
297         ms_heap_start = nursery_start + nursery_size;
298         ms_heap_end = ms_heap_start + major_heap_size;
299
300         block_infos = sgen_alloc_internal_dynamic (sizeof (MSBlockInfo) * ms_heap_num_blocks, INTERNAL_MEM_MS_BLOCK_INFO, TRUE);
301
302         for (i = 0; i < ms_heap_num_blocks; ++i) {
303                 block_infos [i].block = ms_heap_start + i * MS_BLOCK_SIZE;
304                 if (i < ms_heap_num_blocks - 1)
305                         block_infos [i].next_free = &block_infos [i + 1];
306                 else
307                         block_infos [i].next_free = NULL;
308                 block_infos [i].zeroed = TRUE;
309         }
310
311         empty_blocks = &block_infos [0];
312
313         return nursery_start;
314 }
315 #else
316 static void*
317 major_alloc_heap (mword nursery_size, mword nursery_align, int the_nursery_bits)
318 {
319         char *start;
320         if (nursery_align)
321                 start = sgen_alloc_os_memory_aligned (nursery_size, nursery_align, SGEN_ALLOC_HEAP | SGEN_ALLOC_ACTIVATE, "nursery");
322         else
323                 start = sgen_alloc_os_memory (nursery_size, SGEN_ALLOC_HEAP | SGEN_ALLOC_ACTIVATE, "nursery");
324
325         return start;
326 }
327 #endif
328
329 static void
330 update_heap_boundaries_for_block (MSBlockInfo *block)
331 {
332         sgen_update_heap_boundaries ((mword)block->block, (mword)block->block + MS_BLOCK_SIZE);
333 }
334
335 #ifdef FIXED_HEAP
336 static MSBlockInfo*
337 ms_get_empty_block (void)
338 {
339         MSBlockInfo *block;
340
341         g_assert (empty_blocks);
342
343         do {
344                 block = empty_blocks;
345         } while (SGEN_CAS_PTR ((gpointer*)&empty_blocks, block->next_free, block) != block);
346
347         block->used = TRUE;
348
349         if (!block->zeroed)
350                 memset (block->block, 0, MS_BLOCK_SIZE);
351
352         return block;
353 }
354
355 static void
356 ms_free_block (MSBlockInfo *block)
357 {
358         block->next_free = empty_blocks;
359         empty_blocks = block;
360         block->used = FALSE;
361         block->zeroed = FALSE;
362         sgen_memgov_release_space (MS_BLOCK_SIZE, SPACE_MAJOR);
363 }
364 #else
365 static void*
366 ms_get_empty_block (void)
367 {
368         char *p;
369         int i;
370         void *block, *empty, *next;
371
372  retry:
373         if (!empty_blocks) {
374                 /*
375                  * We try allocating MS_BLOCK_ALLOC_NUM blocks first.  If that's
376                  * unsuccessful, we halve the number of blocks and try again, until we're at
377                  * 1.  If that doesn't work, either, we assert.
378                  */
379                 int alloc_num = MS_BLOCK_ALLOC_NUM;
380                 for (;;) {
381                         p = sgen_alloc_os_memory_aligned (MS_BLOCK_SIZE * alloc_num, MS_BLOCK_SIZE, SGEN_ALLOC_HEAP | SGEN_ALLOC_ACTIVATE,
382                                         alloc_num == 1 ? "major heap section" : NULL);
383                         if (p)
384                                 break;
385                         alloc_num >>= 1;
386                 }
387
388                 for (i = 0; i < alloc_num; ++i) {
389                         block = p;
390                         /*
391                          * We do the free list update one after the
392                          * other so that other threads can use the new
393                          * blocks as quickly as possible.
394                          */
395                         do {
396                                 empty = empty_blocks;
397                                 *(void**)block = empty;
398                         } while (SGEN_CAS_PTR ((gpointer*)&empty_blocks, block, empty) != empty);
399                         p += MS_BLOCK_SIZE;
400                 }
401
402                 SGEN_ATOMIC_ADD_P (num_empty_blocks, alloc_num);
403
404                 stat_major_blocks_alloced += alloc_num;
405 #if SIZEOF_VOID_P != 8
406                 if (alloc_num != MS_BLOCK_ALLOC_NUM)
407                         stat_major_blocks_alloced_less_ideal += alloc_num;
408 #endif
409         }
410
411         do {
412                 empty = empty_blocks;
413                 if (!empty)
414                         goto retry;
415                 block = empty;
416                 next = *(void**)block;
417         } while (SGEN_CAS_PTR (&empty_blocks, next, empty) != empty);
418
419         SGEN_ATOMIC_ADD_P (num_empty_blocks, -1);
420
421         *(void**)block = NULL;
422
423         g_assert (!((mword)block & (MS_BLOCK_SIZE - 1)));
424
425         return block;
426 }
427
428 static void
429 ms_free_block (void *block)
430 {
431         void *empty;
432
433         sgen_memgov_release_space (MS_BLOCK_SIZE, SPACE_MAJOR);
434         memset (block, 0, MS_BLOCK_SIZE);
435
436         do {
437                 empty = empty_blocks;
438                 *(void**)block = empty;
439         } while (SGEN_CAS_PTR (&empty_blocks, block, empty) != empty);
440
441         SGEN_ATOMIC_ADD_P (num_empty_blocks, 1);
442 }
443 #endif
444
445 //#define MARKSWEEP_CONSISTENCY_CHECK
446
447 #ifdef MARKSWEEP_CONSISTENCY_CHECK
448 static void
449 check_block_free_list (MSBlockInfo *block, int size, gboolean pinned)
450 {
451         MSBlockInfo *b;
452
453         for (; block; block = block->next_free) {
454                 g_assert (block->obj_size == size);
455                 g_assert ((pinned && block->pinned) || (!pinned && !block->pinned));
456
457                 /* blocks in the free lists must have at least
458                    one free slot */
459                 if (block->swept)
460                         g_assert (block->free_list);
461
462 #ifdef FIXED_HEAP
463                 /* the block must not be in the empty_blocks list */
464                 for (b = empty_blocks; b; b = b->next_free)
465                         g_assert (b != block);
466 #endif
467                 /* the block must be in the all_blocks list */
468                 for (b = all_blocks; b; b = b->next) {
469                         if (b == block)
470                                 break;
471                 }
472                 g_assert (b == block);
473         }
474 }
475
476 static void
477 check_empty_blocks (void)
478 {
479 #ifndef FIXED_HEAP
480         void *p;
481         size_t i = 0;
482         for (p = empty_blocks; p; p = *(void**)p)
483                 ++i;
484         g_assert (i == num_empty_blocks);
485 #endif
486 }
487
488 static void
489 consistency_check (void)
490 {
491         MSBlockInfo *block;
492         int i;
493
494         /* check all blocks */
495         FOREACH_BLOCK (block) {
496                 int count = MS_BLOCK_FREE / block->obj_size;
497                 int num_free = 0;
498                 void **free;
499
500 #ifndef FIXED_HEAP
501                 /* check block header */
502                 g_assert (((MSBlockHeader*)block->block)->info == block);
503 #endif
504
505                 /* count number of free slots */
506                 for (i = 0; i < count; ++i) {
507                         void **obj = (void**) MS_BLOCK_OBJ (block, i);
508                         if (!MS_OBJ_ALLOCED (obj, block))
509                                 ++num_free;
510                 }
511
512                 /* check free list */
513                 for (free = block->free_list; free; free = (void**)*free) {
514                         g_assert (MS_BLOCK_FOR_OBJ (free) == block);
515                         --num_free;
516                 }
517                 g_assert (num_free == 0);
518
519                 /* check all mark words are zero */
520                 if (block->swept) {
521                         for (i = 0; i < MS_NUM_MARK_WORDS; ++i)
522                                 g_assert (block->mark_words [i] == 0);
523                 }
524         } END_FOREACH_BLOCK;
525
526         /* check free blocks */
527         for (i = 0; i < num_block_obj_sizes; ++i) {
528                 int j;
529                 for (j = 0; j < MS_BLOCK_TYPE_MAX; ++j)
530                         check_block_free_list (free_block_lists [j][i], block_obj_sizes [i], j & MS_BLOCK_FLAG_PINNED);
531         }
532
533         check_empty_blocks ();
534 }
535 #endif
536
537 static gboolean
538 ms_alloc_block (int size_index, gboolean pinned, gboolean has_references)
539 {
540         int size = block_obj_sizes [size_index];
541         int count = MS_BLOCK_FREE / size;
542         MSBlockInfo *info;
543 #ifdef SGEN_PARALLEL_MARK
544         MSBlockInfo *next;
545 #endif
546 #ifndef FIXED_HEAP
547         MSBlockHeader *header;
548 #endif
549         MSBlockInfo **free_blocks = FREE_BLOCKS (pinned, has_references);
550         char *obj_start;
551         int i;
552
553         if (!sgen_memgov_try_alloc_space (MS_BLOCK_SIZE, SPACE_MAJOR))
554                 return FALSE;
555
556 #ifdef FIXED_HEAP
557         info = ms_get_empty_block ();
558 #else
559         info = sgen_alloc_internal (INTERNAL_MEM_MS_BLOCK_INFO);
560 #endif
561
562         SGEN_ASSERT (9, count >= 2, "block with %d objects, it must hold at least 2", count);
563
564         info->obj_size = size;
565         info->obj_size_index = size_index;
566         info->pinned = pinned;
567         info->has_references = has_references;
568         info->has_pinned = pinned;
569         /*
570          * Blocks that are to-space are not evacuated from.  During an major collection
571          * blocks are allocated for two reasons: evacuating objects from the nursery and
572          * evacuating them from major blocks marked for evacuation.  In both cases we don't
573          * want further evacuation.
574          */
575         info->is_to_space = (sgen_get_current_collection_generation () == GENERATION_OLD);
576         info->swept = 1;
577 #ifndef FIXED_HEAP
578         info->block = ms_get_empty_block ();
579
580         header = (MSBlockHeader*) info->block;
581         header->info = info;
582 #endif
583 #ifdef SGEN_HAVE_CONCURRENT_MARK
584         info->cardtable_mod_union = NULL;
585 #endif
586
587         update_heap_boundaries_for_block (info);
588
589         /* build free list */
590         obj_start = info->block + MS_BLOCK_SKIP;
591         info->free_list = (void**)obj_start;
592         /* we're skipping the last one - it must be nulled */
593         for (i = 0; i < count - 1; ++i) {
594                 char *next_obj_start = obj_start + size;
595                 *(void**)obj_start = next_obj_start;
596                 obj_start = next_obj_start;
597         }
598         /* the last one */
599         *(void**)obj_start = NULL;
600
601 #ifdef SGEN_PARALLEL_MARK
602         do {
603                 next = info->next_free = free_blocks [size_index];
604         } while (SGEN_CAS_PTR ((void**)&free_blocks [size_index], info, next) != next);
605
606         do {
607                 next = info->next = all_blocks;
608         } while (SGEN_CAS_PTR ((void**)&all_blocks, info, next) != next);
609 #else
610         info->next_free = free_blocks [size_index];
611         free_blocks [size_index] = info;
612
613         info->next = all_blocks;
614         all_blocks = info;
615 #endif
616
617         ++num_major_sections;
618         return TRUE;
619 }
620
621 static gboolean
622 obj_is_from_pinned_alloc (char *ptr)
623 {
624         MSBlockInfo *block;
625
626         FOREACH_BLOCK (block) {
627                 if (ptr >= block->block && ptr <= block->block + MS_BLOCK_SIZE)
628                         return block->pinned;
629         } END_FOREACH_BLOCK;
630         return FALSE;
631 }
632
633 static void*
634 unlink_slot_from_free_list_uncontested (MSBlockInfo **free_blocks, int size_index)
635 {
636         MSBlockInfo *block;
637         void *obj;
638
639         block = free_blocks [size_index];
640         SGEN_ASSERT (9, block, "no free block to unlink from free_blocks %p size_index %d", free_blocks, size_index);
641
642         if (G_UNLIKELY (!block->swept)) {
643                 stat_major_blocks_lazy_swept ++;
644                 sweep_block (block, FALSE);
645         }
646
647         obj = block->free_list;
648         SGEN_ASSERT (9, obj, "block %p in free list had no available object to alloc from", block);
649
650         block->free_list = *(void**)obj;
651         if (!block->free_list) {
652                 free_blocks [size_index] = block->next_free;
653                 block->next_free = NULL;
654         }
655
656         return obj;
657 }
658
659 #ifdef SGEN_PARALLEL_MARK
660 static gboolean
661 try_remove_block_from_free_list (MSBlockInfo *block, MSBlockInfo **free_blocks, int size_index)
662 {
663         /*
664          * No more free slots in the block, so try to free the block.
665          * Don't try again if we don't succeed - another thread will
666          * already have done it.
667          */
668         MSBlockInfo *next_block = block->next_free;
669         if (SGEN_CAS_PTR ((void**)&free_blocks [size_index], next_block, block) == block) {
670                 /*
671                 void *old = SGEN_CAS_PTR ((void**)&block->next_free, NULL, next_block);
672                 g_assert (old == next_block);
673                 */
674                 block->next_free = NULL;
675                 return TRUE;
676         }
677         return FALSE;
678 }
679
680 static void*
681 alloc_obj_par (MonoVTable *vtable, int size, gboolean pinned, gboolean has_references)
682 {
683         int size_index = MS_BLOCK_OBJ_SIZE_INDEX (size);
684         MSBlockInfo **free_blocks_local = FREE_BLOCKS_LOCAL (pinned, has_references);
685         MSBlockInfo *block;
686         void *obj;
687
688 #ifdef SGEN_HAVE_CONCURRENT_MARK
689         if (concurrent_mark)
690                 g_assert_not_reached ();
691 #endif
692
693         SGEN_ASSERT (9, current_collection_generation == GENERATION_OLD, "old gen parallel allocator called from a %d collection", current_collection_generation);
694
695         if (free_blocks_local [size_index]) {
696         get_slot:
697                 obj = unlink_slot_from_free_list_uncontested (free_blocks_local, size_index);
698         } else {
699                 MSBlockInfo **free_blocks = FREE_BLOCKS (pinned, has_references);
700
701         get_block:
702                 block = free_blocks [size_index];
703                 if (block) {
704                         if (!try_remove_block_from_free_list (block, free_blocks, size_index))
705                                 goto get_block;
706
707                         g_assert (block->next_free == NULL);
708                         g_assert (block->free_list);
709                         block->next_free = free_blocks_local [size_index];
710                         free_blocks_local [size_index] = block;
711
712                         goto get_slot;
713                 } else {
714                         gboolean success;
715
716                         LOCK_MS_BLOCK_LIST;
717                         success = ms_alloc_block (size_index, pinned, has_references);
718                         UNLOCK_MS_BLOCK_LIST;
719
720                         if (G_UNLIKELY (!success))
721                                 return NULL;
722
723                         goto get_block;
724                 }
725         }
726
727         *(MonoVTable**)obj = vtable;
728
729         return obj;
730 }
731
732 static void*
733 major_par_alloc_object (MonoVTable *vtable, size_t size, gboolean has_references)
734 {
735         return alloc_obj_par (vtable, size, FALSE, has_references);
736 }
737 #endif
738
739 static void*
740 alloc_obj (MonoVTable *vtable, size_t size, gboolean pinned, gboolean has_references)
741 {
742         int size_index = MS_BLOCK_OBJ_SIZE_INDEX (size);
743         MSBlockInfo **free_blocks = FREE_BLOCKS (pinned, has_references);
744         void *obj;
745
746 #ifdef SGEN_PARALLEL_MARK
747         SGEN_ASSERT (9, current_collection_generation == GENERATION_OLD, "old gen parallel allocator called from a %d collection", current_collection_generation);
748
749 #endif
750
751         if (!free_blocks [size_index]) {
752                 if (G_UNLIKELY (!ms_alloc_block (size_index, pinned, has_references)))
753                         return NULL;
754         }
755
756         obj = unlink_slot_from_free_list_uncontested (free_blocks, size_index);
757
758         *(MonoVTable**)obj = vtable;
759
760         return obj;
761 }
762
763 static void*
764 major_alloc_object (MonoVTable *vtable, size_t size, gboolean has_references)
765 {
766         return alloc_obj (vtable, size, FALSE, has_references);
767 }
768
769 /*
770  * We're not freeing the block if it's empty.  We leave that work for
771  * the next major collection.
772  *
773  * This is just called from the domain clearing code, which runs in a
774  * single thread and has the GC lock, so we don't need an extra lock.
775  */
776 static void
777 free_object (char *obj, size_t size, gboolean pinned)
778 {
779         MSBlockInfo *block = MS_BLOCK_FOR_OBJ (obj);
780         int word, bit;
781
782         if (!block->swept)
783                 sweep_block (block, FALSE);
784         SGEN_ASSERT (9, (pinned && block->pinned) || (!pinned && !block->pinned), "free-object pinning mixup object %p pinned %d block %p pinned %d", obj, pinned, block, block->pinned);
785         SGEN_ASSERT (9, MS_OBJ_ALLOCED (obj, block), "object %p is already free", obj);
786         MS_CALC_MARK_BIT (word, bit, obj);
787         SGEN_ASSERT (9, !MS_MARK_BIT (block, word, bit), "object %p has mark bit set");
788         if (!block->free_list) {
789                 MSBlockInfo **free_blocks = FREE_BLOCKS (pinned, block->has_references);
790                 int size_index = MS_BLOCK_OBJ_SIZE_INDEX (size);
791                 SGEN_ASSERT (9, !block->next_free, "block %p doesn't have a free-list of object but belongs to a free-list of blocks");
792                 block->next_free = free_blocks [size_index];
793                 free_blocks [size_index] = block;
794         }
795         memset (obj, 0, size);
796         *(void**)obj = block->free_list;
797         block->free_list = (void**)obj;
798 }
799
800 static void
801 major_free_non_pinned_object (char *obj, size_t size)
802 {
803         free_object (obj, size, FALSE);
804 }
805
806 /* size is a multiple of SGEN_ALLOC_ALIGN */
807 static void*
808 major_alloc_small_pinned_obj (MonoVTable *vtable, size_t size, gboolean has_references)
809 {
810         void *res;
811
812         res = alloc_obj (vtable, size, TRUE, has_references);
813          /*If we failed to alloc memory, we better try releasing memory
814           *as pinned alloc is requested by the runtime.
815           */
816          if (!res) {
817                 sgen_perform_collection (0, GENERATION_OLD, "pinned alloc failure", TRUE);
818                 res = alloc_obj (vtable, size, TRUE, has_references);
819          }
820          return res;
821 }
822
823 static void
824 free_pinned_object (char *obj, size_t size)
825 {
826         free_object (obj, size, TRUE);
827 }
828
829 /*
830  * size is already rounded up and we hold the GC lock.
831  */
832 static void*
833 major_alloc_degraded (MonoVTable *vtable, size_t size)
834 {
835         void *obj;
836         size_t old_num_sections;
837
838         old_num_sections = num_major_sections;
839
840         obj = alloc_obj (vtable, size, FALSE, SGEN_VTABLE_HAS_REFERENCES (vtable));
841         if (G_LIKELY (obj)) {
842                 HEAVY_STAT (++stat_objects_alloced_degraded);
843                 HEAVY_STAT (stat_bytes_alloced_degraded += size);
844                 g_assert (num_major_sections >= old_num_sections);
845                 sgen_register_major_sections_alloced (num_major_sections - old_num_sections);
846         }
847         return obj;
848 }
849
850 #define MAJOR_OBJ_IS_IN_TO_SPACE(obj)   FALSE
851
852 /*
853  * obj is some object.  If it's not in the major heap (i.e. if it's in
854  * the nursery or LOS), return FALSE.  Otherwise return whether it's
855  * been marked or copied.
856  */
857 static gboolean
858 major_is_object_live (char *obj)
859 {
860         MSBlockInfo *block;
861         int word, bit;
862 #ifndef FIXED_HEAP
863         mword objsize;
864 #endif
865
866         if (sgen_ptr_in_nursery (obj))
867                 return FALSE;
868
869 #ifdef FIXED_HEAP
870         /* LOS */
871         if (!MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
872                 return FALSE;
873 #else
874         objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
875
876         /* LOS */
877         if (objsize > SGEN_MAX_SMALL_OBJ_SIZE)
878                 return FALSE;
879 #endif
880
881         /* now we know it's in a major block */
882         block = MS_BLOCK_FOR_OBJ (obj);
883         SGEN_ASSERT (9, !block->pinned, "block %p is pinned, BTW why is this bad?");
884         MS_CALC_MARK_BIT (word, bit, obj);
885         return MS_MARK_BIT (block, word, bit) ? TRUE : FALSE;
886 }
887
888 static gboolean
889 major_ptr_is_in_non_pinned_space (char *ptr, char **start)
890 {
891         MSBlockInfo *block;
892
893         FOREACH_BLOCK (block) {
894                 if (ptr >= block->block && ptr <= block->block + MS_BLOCK_SIZE) {
895                         int count = MS_BLOCK_FREE / block->obj_size;
896                         int i;
897
898                         *start = NULL;
899                         for (i = 0; i <= count; ++i) {
900                                 if (ptr >= MS_BLOCK_OBJ (block, i) && ptr < MS_BLOCK_OBJ (block, i + 1)) {
901                                         *start = MS_BLOCK_OBJ (block, i);
902                                         break;
903                                 }
904                         }
905                         return !block->pinned;
906                 }
907         } END_FOREACH_BLOCK;
908         return FALSE;
909 }
910
911 static void
912 major_iterate_objects (IterateObjectsFlags flags, IterateObjectCallbackFunc callback, void *data)
913 {
914         gboolean sweep = flags & ITERATE_OBJECTS_SWEEP;
915         gboolean non_pinned = flags & ITERATE_OBJECTS_NON_PINNED;
916         gboolean pinned = flags & ITERATE_OBJECTS_PINNED;
917         MSBlockInfo *block;
918
919         FOREACH_BLOCK (block) {
920                 int count = MS_BLOCK_FREE / block->obj_size;
921                 int i;
922
923                 if (block->pinned && !pinned)
924                         continue;
925                 if (!block->pinned && !non_pinned)
926                         continue;
927                 if (sweep && lazy_sweep) {
928                         sweep_block (block, FALSE);
929                         SGEN_ASSERT (0, block->swept, "Block must be swept after sweeping");
930                 }
931
932                 for (i = 0; i < count; ++i) {
933                         void **obj = (void**) MS_BLOCK_OBJ (block, i);
934                         if (!block->swept) {
935                                 int word, bit;
936                                 MS_CALC_MARK_BIT (word, bit, obj);
937                                 if (!MS_MARK_BIT (block, word, bit))
938                                         continue;
939                         }
940                         if (MS_OBJ_ALLOCED (obj, block))
941                                 callback ((char*)obj, block->obj_size, data);
942                 }
943         } END_FOREACH_BLOCK;
944 }
945
946 static gboolean
947 major_is_valid_object (char *object)
948 {
949         MSBlockInfo *block;
950
951         FOREACH_BLOCK (block) {
952                 int idx;
953                 char *obj;
954
955                 if ((block->block > object) || ((block->block + MS_BLOCK_SIZE) <= object))
956                         continue;
957
958                 idx = MS_BLOCK_OBJ_INDEX (object, block);
959                 obj = (char*)MS_BLOCK_OBJ (block, idx);
960                 if (obj != object)
961                         return FALSE;
962                 return MS_OBJ_ALLOCED (obj, block);
963         } END_FOREACH_BLOCK;
964
965         return FALSE;
966 }
967
968
969 static MonoVTable*
970 major_describe_pointer (char *ptr)
971 {
972         MSBlockInfo *block;
973
974         FOREACH_BLOCK (block) {
975                 int idx;
976                 char *obj;
977                 gboolean live;
978                 MonoVTable *vtable;
979                 int w, b;
980                 gboolean marked;
981
982                 if ((block->block > ptr) || ((block->block + MS_BLOCK_SIZE) <= ptr))
983                         continue;
984
985                 SGEN_LOG (0, "major-ptr (block %p sz %d pin %d ref %d)\n",
986                         block->block, block->obj_size, block->pinned, block->has_references);
987
988                 idx = MS_BLOCK_OBJ_INDEX (ptr, block);
989                 obj = (char*)MS_BLOCK_OBJ (block, idx);
990                 live = MS_OBJ_ALLOCED (obj, block);
991                 vtable = live ? (MonoVTable*)SGEN_LOAD_VTABLE (obj) : NULL;
992
993                 MS_CALC_MARK_BIT (w, b, obj);
994                 marked = MS_MARK_BIT (block, w, b);
995
996                 if (obj == ptr) {
997                         SGEN_LOG (0, "\t(");
998                         if (live)
999                                 SGEN_LOG (0, "object");
1000                         else
1001                                 SGEN_LOG (0, "dead-object");
1002                 } else {
1003                         if (live)
1004                                 SGEN_LOG (0, "interior-ptr offset %td", ptr - obj);
1005                         else
1006                                 SGEN_LOG (0, "dead-interior-ptr offset %td", ptr - obj);
1007                 }
1008
1009                 SGEN_LOG (0, " marked %d)\n", marked ? 1 : 0);
1010
1011                 return vtable;
1012         } END_FOREACH_BLOCK;
1013
1014         return NULL;
1015 }
1016
1017 static void
1018 major_check_scan_starts (void)
1019 {
1020 }
1021
1022 static void
1023 major_dump_heap (FILE *heap_dump_file)
1024 {
1025         MSBlockInfo *block;
1026         int *slots_available = alloca (sizeof (int) * num_block_obj_sizes);
1027         int *slots_used = alloca (sizeof (int) * num_block_obj_sizes);
1028         int i;
1029
1030         for (i = 0; i < num_block_obj_sizes; ++i)
1031                 slots_available [i] = slots_used [i] = 0;
1032
1033         FOREACH_BLOCK (block) {
1034                 int index = ms_find_block_obj_size_index (block->obj_size);
1035                 int count = MS_BLOCK_FREE / block->obj_size;
1036
1037                 slots_available [index] += count;
1038                 for (i = 0; i < count; ++i) {
1039                         if (MS_OBJ_ALLOCED (MS_BLOCK_OBJ (block, i), block))
1040                                 ++slots_used [index];
1041                 }
1042         } END_FOREACH_BLOCK;
1043
1044         fprintf (heap_dump_file, "<occupancies>\n");
1045         for (i = 0; i < num_block_obj_sizes; ++i) {
1046                 fprintf (heap_dump_file, "<occupancy size=\"%d\" available=\"%d\" used=\"%d\" />\n",
1047                                 block_obj_sizes [i], slots_available [i], slots_used [i]);
1048         }
1049         fprintf (heap_dump_file, "</occupancies>\n");
1050
1051         FOREACH_BLOCK (block) {
1052                 int count = MS_BLOCK_FREE / block->obj_size;
1053                 int i;
1054                 int start = -1;
1055
1056                 fprintf (heap_dump_file, "<section type=\"%s\" size=\"%zu\">\n", "old", (size_t)MS_BLOCK_FREE);
1057
1058                 for (i = 0; i <= count; ++i) {
1059                         if ((i < count) && MS_OBJ_ALLOCED (MS_BLOCK_OBJ (block, i), block)) {
1060                                 if (start < 0)
1061                                         start = i;
1062                         } else {
1063                                 if (start >= 0) {
1064                                         sgen_dump_occupied (MS_BLOCK_OBJ (block, start), MS_BLOCK_OBJ (block, i), block->block);
1065                                         start = -1;
1066                                 }
1067                         }
1068                 }
1069
1070                 fprintf (heap_dump_file, "</section>\n");
1071         } END_FOREACH_BLOCK;
1072 }
1073
1074 #define LOAD_VTABLE     SGEN_LOAD_VTABLE
1075
1076 #define MS_MARK_OBJECT_AND_ENQUEUE_CHECKED(obj,block,queue) do {        \
1077                 int __word, __bit;                                      \
1078                 MS_CALC_MARK_BIT (__word, __bit, (obj));                \
1079                 if (!MS_MARK_BIT ((block), __word, __bit) && MS_OBJ_ALLOCED ((obj), (block))) { \
1080                         MS_SET_MARK_BIT ((block), __word, __bit);       \
1081                         if ((block)->has_references)                    \
1082                                 GRAY_OBJECT_ENQUEUE ((queue), (obj));   \
1083                         binary_protocol_mark ((obj), (gpointer)LOAD_VTABLE ((obj)), sgen_safe_object_get_size ((MonoObject*)(obj))); \
1084                         INC_NUM_MAJOR_OBJECTS_MARKED ();                \
1085                 }                                                       \
1086         } while (0)
1087 #define MS_MARK_OBJECT_AND_ENQUEUE(obj,block,queue) do {                \
1088                 int __word, __bit;                                      \
1089                 MS_CALC_MARK_BIT (__word, __bit, (obj));                \
1090                 SGEN_ASSERT (9, MS_OBJ_ALLOCED ((obj), (block)), "object %p not allocated", obj);       \
1091                 if (!MS_MARK_BIT ((block), __word, __bit)) {            \
1092                         MS_SET_MARK_BIT ((block), __word, __bit);       \
1093                         if ((block)->has_references)                    \
1094                                 GRAY_OBJECT_ENQUEUE ((queue), (obj));   \
1095                         binary_protocol_mark ((obj), (gpointer)LOAD_VTABLE ((obj)), sgen_safe_object_get_size ((MonoObject*)(obj))); \
1096                         INC_NUM_MAJOR_OBJECTS_MARKED ();                \
1097                 }                                                       \
1098         } while (0)
1099 #define MS_PAR_MARK_OBJECT_AND_ENQUEUE(obj,block,queue) do {            \
1100                 int __word, __bit;                                      \
1101                 gboolean __was_marked;                                  \
1102                 SGEN_ASSERT (9, MS_OBJ_ALLOCED ((obj), (block)), "object %p not allocated", obj);       \
1103                 MS_CALC_MARK_BIT (__word, __bit, (obj));                \
1104                 MS_PAR_SET_MARK_BIT (__was_marked, (block), __word, __bit); \
1105                 if (!__was_marked) {                                    \
1106                         if ((block)->has_references)                    \
1107                                 GRAY_OBJECT_ENQUEUE ((queue), (obj));   \
1108                         binary_protocol_mark ((obj), (gpointer)LOAD_VTABLE ((obj)), sgen_safe_object_get_size ((MonoObject*)(obj))); \
1109                         INC_NUM_MAJOR_OBJECTS_MARKED ();                \
1110                 }                                                       \
1111         } while (0)
1112
1113 static void
1114 pin_major_object (char *obj, SgenGrayQueue *queue)
1115 {
1116         MSBlockInfo *block;
1117
1118 #ifdef SGEN_HAVE_CONCURRENT_MARK
1119         if (concurrent_mark)
1120                 g_assert_not_reached ();
1121 #endif
1122
1123         block = MS_BLOCK_FOR_OBJ (obj);
1124         block->has_pinned = TRUE;
1125         MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
1126 }
1127
1128 #include "sgen-major-copy-object.h"
1129
1130 #ifdef SGEN_PARALLEL_MARK
1131 static void
1132 major_copy_or_mark_object (void **ptr, void *obj, SgenGrayQueue *queue)
1133 {
1134         mword objsize;
1135         MSBlockInfo *block;
1136         MonoVTable *vt;
1137
1138         HEAVY_STAT (++stat_copy_object_called_major);
1139
1140         SGEN_ASSERT (9, obj, "null object from pointer %p", ptr);
1141         SGEN_ASSERT (9, current_collection_generation == GENERATION_OLD, "old gen parallel allocator called from a %d collection", current_collection_generation);
1142
1143         if (sgen_ptr_in_nursery (obj)) {
1144                 int word, bit;
1145                 gboolean has_references;
1146                 void *destination;
1147                 mword vtable_word = *(mword*)obj;
1148                 vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);
1149
1150                 if (vtable_word & SGEN_FORWARDED_BIT) {
1151                         *ptr = (void*)vt;
1152                         return;
1153                 }
1154
1155                 if (vtable_word & SGEN_PINNED_BIT)
1156                         return;
1157
1158                 /* An object in the nursery To Space has already been copied and grayed. Nothing to do. */
1159                 if (sgen_nursery_is_to_space (obj))
1160                         return;
1161
1162                 HEAVY_STAT (++stat_objects_copied_major);
1163
1164         do_copy_object:
1165                 objsize = SGEN_ALIGN_UP (sgen_par_object_get_size (vt, (MonoObject*)obj));
1166                 has_references = SGEN_VTABLE_HAS_REFERENCES (vt);
1167
1168                 destination = sgen_minor_collector.par_alloc_for_promotion (vt, obj, objsize, has_references);
1169                 if (G_UNLIKELY (!destination)) {
1170                         if (!sgen_ptr_in_nursery (obj)) {
1171                                 int size_index;
1172                                 block = MS_BLOCK_FOR_OBJ (obj);
1173                                 size_index = block->obj_size_index;
1174                                 evacuate_block_obj_sizes [size_index] = FALSE;
1175                         }
1176
1177                         sgen_parallel_pin_or_update (ptr, obj, vt, queue);
1178                         sgen_set_pinned_from_failed_allocation (objsize);
1179                         return;
1180                 }
1181
1182                 if (SGEN_CAS_PTR (obj, (void*)((mword)destination | SGEN_FORWARDED_BIT), vt) == vt) {
1183                         gboolean was_marked;
1184
1185                         par_copy_object_no_checks (destination, vt, obj, objsize, has_references ? queue : NULL);
1186                         obj = destination;
1187                         *ptr = obj;
1188
1189                         /*
1190                          * FIXME: If we make major_alloc_object() give
1191                          * us the block info, too, we won't have to
1192                          * re-fetch it here.
1193                          *
1194                          * FIXME (2): We should rework this to avoid all those nursery checks.
1195                          */
1196                         /*
1197                          * For the split nursery allocator the object
1198                          * might still be in the nursery despite
1199                          * having being promoted, in which case we
1200                          * can't mark it.
1201                          */
1202                         if (!sgen_ptr_in_nursery (obj)) {
1203                                 block = MS_BLOCK_FOR_OBJ (obj);
1204                                 MS_CALC_MARK_BIT (word, bit, obj);
1205                                 SGEN_ASSERT (9, !MS_MARK_BIT (block, word, bit), "object %p already marked", obj);
1206                                 MS_PAR_SET_MARK_BIT (was_marked, block, word, bit);
1207                                 binary_protocol_mark (obj, vt, sgen_safe_object_get_size ((MonoObject*)obj));
1208                         }
1209                 } else {
1210                         /*
1211                          * FIXME: We have allocated destination, but
1212                          * we cannot use it.  Give it back to the
1213                          * allocator.
1214                          */
1215                         *(void**)destination = NULL;
1216
1217                         vtable_word = *(mword*)obj;
1218                         g_assert (vtable_word & SGEN_FORWARDED_BIT);
1219
1220                         obj = (void*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);
1221
1222                         *ptr = obj;
1223
1224                         HEAVY_STAT (++stat_slots_allocated_in_vain);
1225                 }
1226         } else {
1227 #ifdef FIXED_HEAP
1228                 if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
1229 #else
1230                 mword vtable_word = *(mword*)obj;
1231                 vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);
1232
1233                 /* see comment in the non-parallel version below */
1234                 if (vtable_word & SGEN_FORWARDED_BIT) {
1235                         *ptr = (void*)vt;
1236                         return;
1237                 }
1238                 objsize = SGEN_ALIGN_UP (sgen_par_object_get_size (vt, (MonoObject*)obj));
1239
1240                 if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)
1241 #endif
1242                 {
1243                         int size_index;
1244
1245                         block = MS_BLOCK_FOR_OBJ (obj);
1246                         size_index = block->obj_size_index;
1247
1248                         if (!block->has_pinned && evacuate_block_obj_sizes [size_index]) {
1249                                 if (block->is_to_space)
1250                                         return;
1251
1252 #ifdef FIXED_HEAP
1253                                 {
1254                                         mword vtable_word = *(mword*)obj;
1255                                         vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);
1256
1257                                         if (vtable_word & SGEN_FORWARDED_BIT) {
1258                                                 *ptr = (void*)vt;
1259                                                 return;
1260                                         }
1261                                 }
1262 #endif
1263
1264                                 HEAVY_STAT (++stat_major_objects_evacuated);
1265                                 goto do_copy_object;
1266                         }
1267
1268                         MS_PAR_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
1269                 } else {
1270                         LOSObject *bigobj = sgen_los_header_for_object (obj);
1271                         mword size_word = bigobj->size;
1272 #ifdef FIXED_HEAP
1273                         mword vtable_word = *(mword*)obj;
1274                         vt = (MonoVTable*)(vtable_word & ~SGEN_VTABLE_BITS_MASK);
1275 #endif
1276                         if (size_word & 1)
1277                                 return;
1278                         binary_protocol_pin (obj, vt, sgen_safe_object_get_size ((MonoObject*)obj));
1279                         if (SGEN_CAS_PTR ((void*)&bigobj->size, (void*)(size_word | 1), (void*)size_word) == (void*)size_word) {
1280                                 if (SGEN_VTABLE_HAS_REFERENCES (vt))
1281                                         GRAY_PARTIAL_OBJECT_ENQUEUE (queue,obj);
1282                         } else {
1283                                 g_assert (sgen_los_object_is_pinned (obj));
1284                         }
1285                 }
1286         }
1287 }
1288 #else
1289 #ifdef SGEN_HAVE_CONCURRENT_MARK
1290 static void
1291 major_copy_or_mark_object_concurrent (void **ptr, void *obj, SgenGrayQueue *queue)
1292 {
1293         g_assert (!SGEN_OBJECT_IS_FORWARDED (obj));
1294
1295         if (!sgen_ptr_in_nursery (obj)) {
1296 #ifdef FIXED_HEAP
1297                 if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
1298 #else
1299                 mword objsize;
1300
1301                 objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
1302
1303                 if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)
1304 #endif
1305                 {
1306                         MSBlockInfo *block = MS_BLOCK_FOR_OBJ (obj);
1307                         MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
1308                 } else {
1309                         if (sgen_los_object_is_pinned (obj))
1310                                 return;
1311
1312 #ifdef ENABLE_DTRACE
1313                         if (G_UNLIKELY (MONO_GC_OBJ_PINNED_ENABLED ())) {
1314                                 MonoVTable *vt = (MonoVTable*)SGEN_LOAD_VTABLE (obj);
1315                                 MONO_GC_OBJ_PINNED ((mword)obj, sgen_safe_object_get_size (obj), vt->klass->name_space, vt->klass->name, GENERATION_OLD);
1316                         }
1317 #endif
1318
1319                         sgen_los_pin_object (obj);
1320                         if (SGEN_OBJECT_HAS_REFERENCES (obj))
1321                                 GRAY_OBJECT_ENQUEUE (queue, obj);
1322                         INC_NUM_MAJOR_OBJECTS_MARKED ();
1323                 }
1324         }
1325 }
1326 #endif
1327
1328 static void
1329 major_copy_or_mark_object (void **ptr, void *obj, SgenGrayQueue *queue)
1330 {
1331         MSBlockInfo *block;
1332
1333         HEAVY_STAT (++stat_copy_object_called_major);
1334
1335         SGEN_ASSERT (9, obj, "null object from pointer %p", ptr);
1336         SGEN_ASSERT (9, current_collection_generation == GENERATION_OLD, "old gen parallel allocator called from a %d collection", current_collection_generation);
1337
1338         if (sgen_ptr_in_nursery (obj)) {
1339                 int word, bit;
1340                 char *forwarded, *old_obj;
1341
1342                 if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
1343                         *ptr = forwarded;
1344                         return;
1345                 }
1346                 if (SGEN_OBJECT_IS_PINNED (obj))
1347                         return;
1348
1349                 /* An object in the nursery To Space has already been copied and grayed. Nothing to do. */
1350                 if (sgen_nursery_is_to_space (obj))
1351                         return;
1352
1353                 HEAVY_STAT (++stat_objects_copied_major);
1354
1355         do_copy_object:
1356                 old_obj = obj;
1357                 obj = copy_object_no_checks (obj, queue);
1358                 if (G_UNLIKELY (old_obj == obj)) {
1359                         /*If we fail to evacuate an object we just stop doing it for a given block size as all other will surely fail too.*/
1360                         if (!sgen_ptr_in_nursery (obj)) {
1361                                 int size_index;
1362                                 block = MS_BLOCK_FOR_OBJ (obj);
1363                                 size_index = block->obj_size_index;
1364                                 evacuate_block_obj_sizes [size_index] = FALSE;
1365                                 MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
1366                         }
1367                         return;
1368                 }
1369                 *ptr = obj;
1370
1371                 /*
1372                  * FIXME: See comment for copy_object_no_checks().  If
1373                  * we have that, we can let the allocation function
1374                  * give us the block info, too, and we won't have to
1375                  * re-fetch it.
1376                  *
1377                  * FIXME (2): We should rework this to avoid all those nursery checks.
1378                  */
1379                 /*
1380                  * For the split nursery allocator the object might
1381                  * still be in the nursery despite having being
1382                  * promoted, in which case we can't mark it.
1383                  */
1384                 if (!sgen_ptr_in_nursery (obj)) {
1385                         block = MS_BLOCK_FOR_OBJ (obj);
1386                         MS_CALC_MARK_BIT (word, bit, obj);
1387                         SGEN_ASSERT (9, !MS_MARK_BIT (block, word, bit), "object %p already marked", obj);
1388                         MS_SET_MARK_BIT (block, word, bit);
1389                         binary_protocol_mark (obj, (gpointer)LOAD_VTABLE (obj), sgen_safe_object_get_size ((MonoObject*)obj));
1390                 }
1391         } else {
1392                 char *forwarded;
1393 #ifdef FIXED_HEAP
1394                 if (MS_PTR_IN_SMALL_MAJOR_HEAP (obj))
1395 #else
1396                 mword objsize;
1397
1398                 /*
1399                  * If we have don't have a fixed heap we cannot know
1400                  * whether an object is in the LOS or in the small
1401                  * object major heap without checking its size.  To do
1402                  * that, however, we need to know that we actually
1403                  * have a valid object, not a forwarding pointer, so
1404                  * we have to do this check first.
1405                  */
1406                 if ((forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
1407                         *ptr = forwarded;
1408                         return;
1409                 }
1410
1411                 objsize = SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject*)obj));
1412
1413                 if (objsize <= SGEN_MAX_SMALL_OBJ_SIZE)
1414 #endif
1415                 {
1416                         int size_index;
1417                         gboolean evacuate;
1418
1419                         block = MS_BLOCK_FOR_OBJ (obj);
1420                         size_index = block->obj_size_index;
1421                         evacuate = evacuate_block_obj_sizes [size_index];
1422
1423 #ifdef FIXED_HEAP
1424                         /*
1425                          * We could also check for !block->has_pinned
1426                          * here, but it would only make an uncommon case
1427                          * faster, namely objects that are in blocks
1428                          * whose slot sizes are evacuated but which have
1429                          * pinned objects.
1430                          */
1431                         if (evacuate && (forwarded = SGEN_OBJECT_IS_FORWARDED (obj))) {
1432                                 *ptr = forwarded;
1433                                 return;
1434                         }
1435 #endif
1436
1437                         if (evacuate && !block->has_pinned) {
1438                                 g_assert (!SGEN_OBJECT_IS_PINNED (obj));
1439                                 if (block->is_to_space)
1440                                         return;
1441                                 HEAVY_STAT (++stat_major_objects_evacuated);
1442                                 goto do_copy_object;
1443                         } else {
1444                                 MS_MARK_OBJECT_AND_ENQUEUE (obj, block, queue);
1445                         }
1446                 } else {
1447                         if (sgen_los_object_is_pinned (obj))
1448                                 return;
1449                         binary_protocol_pin (obj, (gpointer)SGEN_LOAD_VTABLE (obj), sgen_safe_object_get_size ((MonoObject*)obj));
1450
1451 #ifdef ENABLE_DTRACE
1452                         if (G_UNLIKELY (MONO_GC_OBJ_PINNED_ENABLED ())) {
1453                                 MonoVTable *vt = (MonoVTable*)SGEN_LOAD_VTABLE (obj);
1454                                 MONO_GC_OBJ_PINNED ((mword)obj, sgen_safe_object_get_size (obj), vt->klass->name_space, vt->klass->name, GENERATION_OLD);
1455                         }
1456 #endif
1457
1458                         sgen_los_pin_object (obj);
1459                         if (SGEN_OBJECT_HAS_REFERENCES (obj))
1460                                 GRAY_OBJECT_ENQUEUE (queue, obj);
1461                 }
1462         }
1463 }
1464 #endif
1465
1466 static void
1467 major_copy_or_mark_object_canonical (void **ptr, SgenGrayQueue *queue)
1468 {
1469         major_copy_or_mark_object (ptr, *ptr, queue);
1470 }
1471
1472 #ifdef SGEN_HAVE_CONCURRENT_MARK
1473 static void
1474 major_copy_or_mark_object_concurrent_canonical (void **ptr, SgenGrayQueue *queue)
1475 {
1476         major_copy_or_mark_object_concurrent (ptr, *ptr, queue);
1477 }
1478
1479 static long long
1480 major_get_and_reset_num_major_objects_marked (void)
1481 {
1482 #ifdef SGEN_COUNT_NUMBER_OF_MAJOR_OBJECTS_MARKED
1483         long long num = num_major_objects_marked;
1484         num_major_objects_marked = 0;
1485         return num;
1486 #else
1487         return 0;
1488 #endif
1489 }
1490 #endif
1491
1492 #include "sgen-major-scan-object.h"
1493
1494 #ifdef SGEN_HAVE_CONCURRENT_MARK
1495 #define SCAN_FOR_CONCURRENT_MARK
1496 #include "sgen-major-scan-object.h"
1497 #undef SCAN_FOR_CONCURRENT_MARK
1498 #endif
1499
1500 static void
1501 mark_pinned_objects_in_block (MSBlockInfo *block, SgenGrayQueue *queue)
1502 {
1503         int i;
1504         int last_index = -1;
1505
1506         if (!block->pin_queue_num_entries)
1507                 return;
1508
1509         block->has_pinned = TRUE;
1510
1511         for (i = 0; i < block->pin_queue_num_entries; ++i) {
1512                 int index = MS_BLOCK_OBJ_INDEX (block->pin_queue_start [i], block);
1513                 SGEN_ASSERT (9, index >= 0 && index < MS_BLOCK_FREE / block->obj_size, "invalid object %p index %d max-index %d", block->pin_queue_start [i], index, MS_BLOCK_FREE / block->obj_size);
1514                 if (index == last_index)
1515                         continue;
1516                 MS_MARK_OBJECT_AND_ENQUEUE_CHECKED (MS_BLOCK_OBJ (block, index), block, queue);
1517                 last_index = index;
1518         }
1519 }
1520
1521 static inline void
1522 sweep_block_for_size (MSBlockInfo *block, int count, int obj_size)
1523 {
1524         int obj_index;
1525
1526         for (obj_index = 0; obj_index < count; ++obj_index) {
1527                 int word, bit;
1528                 void *obj = MS_BLOCK_OBJ_FOR_SIZE (block, obj_index, obj_size);
1529
1530                 MS_CALC_MARK_BIT (word, bit, obj);
1531                 if (MS_MARK_BIT (block, word, bit)) {
1532                         SGEN_ASSERT (9, MS_OBJ_ALLOCED (obj, block), "object %p not allocated", obj);
1533                 } else {
1534                         /* an unmarked object */
1535                         if (MS_OBJ_ALLOCED (obj, block)) {
1536                                 /*
1537                                  * FIXME: Merge consecutive
1538                                  * slots for lower reporting
1539                                  * overhead.  Maybe memset
1540                                  * will also benefit?
1541                                  */
1542                                 binary_protocol_empty (obj, obj_size);
1543                                 MONO_GC_MAJOR_SWEPT ((mword)obj, obj_size);
1544                                 memset (obj, 0, obj_size);
1545                         }
1546                         *(void**)obj = block->free_list;
1547                         block->free_list = obj;
1548                 }
1549         }
1550 }
1551
1552 /*
1553  * sweep_block:
1554  *
1555  *   Traverse BLOCK, freeing and zeroing unused objects.
1556  */
1557 static void
1558 sweep_block (MSBlockInfo *block, gboolean during_major_collection)
1559 {
1560         int count;
1561         void *reversed = NULL;
1562
1563         if (!during_major_collection)
1564                 g_assert (!sgen_concurrent_collection_in_progress ());
1565
1566         if (block->swept)
1567                 return;
1568
1569         count = MS_BLOCK_FREE / block->obj_size;
1570
1571         block->free_list = NULL;
1572
1573         /* Use inline instances specialized to constant sizes, this allows the compiler to replace the memset calls with inline code */
1574         // FIXME: Add more sizes
1575         switch (block->obj_size) {
1576         case 16:
1577                 sweep_block_for_size (block, count, 16);
1578                 break;
1579         default:
1580                 sweep_block_for_size (block, count, block->obj_size);
1581                 break;
1582         }
1583
1584         /* reset mark bits */
1585         memset (block->mark_words, 0, sizeof (mword) * MS_NUM_MARK_WORDS);
1586
1587         /* Reverse free list so that it's in address order */
1588         reversed = NULL;
1589         while (block->free_list) {
1590                 void *next = *(void**)block->free_list;
1591                 *(void**)block->free_list = reversed;
1592                 reversed = block->free_list;
1593                 block->free_list = next;
1594         }
1595         block->free_list = reversed;
1596
1597         block->swept = 1;
1598 }
1599
1600 static inline int
1601 bitcount (mword d)
1602 {
1603         int count = 0;
1604
1605 #ifdef __GNUC__
1606         if (sizeof (mword) == sizeof (unsigned long))
1607                 count += __builtin_popcountl (d);
1608         else
1609                 count += __builtin_popcount (d);
1610 #else
1611         while (d) {
1612                 count ++;
1613                 d &= (d - 1);
1614         }
1615 #endif
1616         return count;
1617 }
1618
1619 static void
1620 ms_sweep (void)
1621 {
1622         int i;
1623         MSBlockInfo **iter;
1624
1625         /* statistics for evacuation */
1626         int *slots_available = alloca (sizeof (int) * num_block_obj_sizes);
1627         int *slots_used = alloca (sizeof (int) * num_block_obj_sizes);
1628         int *num_blocks = alloca (sizeof (int) * num_block_obj_sizes);
1629
1630 #ifdef SGEN_HAVE_CONCURRENT_MARK
1631         mword total_evacuate_heap = 0;
1632         mword total_evacuate_saved = 0;
1633 #endif
1634
1635         for (i = 0; i < num_block_obj_sizes; ++i)
1636                 slots_available [i] = slots_used [i] = num_blocks [i] = 0;
1637
1638         /* clear all the free lists */
1639         for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i) {
1640                 MSBlockInfo **free_blocks = free_block_lists [i];
1641                 int j;
1642                 for (j = 0; j < num_block_obj_sizes; ++j)
1643                         free_blocks [j] = NULL;
1644         }
1645
1646         /* traverse all blocks, free and zero unmarked objects */
1647         iter = &all_blocks;
1648         while (*iter) {
1649                 MSBlockInfo *block = *iter;
1650                 int count;
1651                 gboolean have_live = FALSE;
1652                 gboolean has_pinned;
1653                 gboolean have_free = FALSE;
1654                 int obj_size_index;
1655                 int nused = 0;
1656
1657                 obj_size_index = block->obj_size_index;
1658
1659                 has_pinned = block->has_pinned;
1660                 block->has_pinned = block->pinned;
1661
1662                 block->is_to_space = FALSE;
1663                 block->swept = 0;
1664
1665                 count = MS_BLOCK_FREE / block->obj_size;
1666
1667 #ifdef SGEN_HAVE_CONCURRENT_MARK
1668                 if (block->cardtable_mod_union) {
1669                         sgen_free_internal_dynamic (block->cardtable_mod_union, CARDS_PER_BLOCK, INTERNAL_MEM_CARDTABLE_MOD_UNION);
1670                         block->cardtable_mod_union = NULL;
1671                 }
1672 #endif
1673
1674                 /* Count marked objects in the block */
1675                 for (i = 0; i < MS_NUM_MARK_WORDS; ++i) {
1676                         nused += bitcount (block->mark_words [i]);
1677                 }
1678                 if (nused) {
1679                         have_live = TRUE;
1680                 }
1681                 if (nused < count)
1682                         have_free = TRUE;
1683
1684                 if (!lazy_sweep)
1685                         sweep_block (block, TRUE);
1686
1687                 if (have_live) {
1688                         if (!has_pinned) {
1689                                 ++num_blocks [obj_size_index];
1690                                 slots_used [obj_size_index] += nused;
1691                                 slots_available [obj_size_index] += count;
1692                         }
1693
1694                         iter = &block->next;
1695
1696                         /*
1697                          * If there are free slots in the block, add
1698                          * the block to the corresponding free list.
1699                          */
1700                         if (have_free) {
1701                                 MSBlockInfo **free_blocks = FREE_BLOCKS (block->pinned, block->has_references);
1702                                 int index = MS_BLOCK_OBJ_SIZE_INDEX (block->obj_size);
1703                                 block->next_free = free_blocks [index];
1704                                 free_blocks [index] = block;
1705                         }
1706
1707                         update_heap_boundaries_for_block (block);
1708                 } else {
1709                         /*
1710                          * Blocks without live objects are removed from the
1711                          * block list and freed.
1712                          */
1713                         *iter = block->next;
1714
1715                         binary_protocol_empty (MS_BLOCK_OBJ (block, 0), (char*)MS_BLOCK_OBJ (block, count) - (char*)MS_BLOCK_OBJ (block, 0));
1716 #ifdef FIXED_HEAP
1717                         ms_free_block (block);
1718 #else
1719                         ms_free_block (block->block);
1720
1721                         sgen_free_internal (block, INTERNAL_MEM_MS_BLOCK_INFO);
1722 #endif
1723
1724                         --num_major_sections;
1725                 }
1726         }
1727
1728         for (i = 0; i < num_block_obj_sizes; ++i) {
1729                 float usage = (float)slots_used [i] / (float)slots_available [i];
1730                 if (num_blocks [i] > 5 && usage < evacuation_threshold) {
1731                         evacuate_block_obj_sizes [i] = TRUE;
1732                         /*
1733                         g_print ("slot size %d - %d of %d used\n",
1734                                         block_obj_sizes [i], slots_used [i], slots_available [i]);
1735                         */
1736                 } else {
1737                         evacuate_block_obj_sizes [i] = FALSE;
1738                 }
1739 #ifdef SGEN_HAVE_CONCURRENT_MARK
1740                 {
1741                         mword total_bytes = block_obj_sizes [i] * slots_available [i];
1742                         total_evacuate_heap += total_bytes;
1743                         if (evacuate_block_obj_sizes [i])
1744                                 total_evacuate_saved += total_bytes - block_obj_sizes [i] * slots_used [i];
1745                 }
1746 #endif
1747         }
1748
1749 #ifdef SGEN_HAVE_CONCURRENT_MARK
1750         want_evacuation = (float)total_evacuate_saved / (float)total_evacuate_heap > (1 - concurrent_evacuation_threshold);
1751 #endif
1752
1753         have_swept = TRUE;
1754 }
1755
1756 static void
1757 major_sweep (void)
1758 {
1759         ms_sweep ();
1760 }
1761
1762 static int count_pinned_ref;
1763 static int count_pinned_nonref;
1764 static int count_nonpinned_ref;
1765 static int count_nonpinned_nonref;
1766
1767 static void
1768 count_nonpinned_callback (char *obj, size_t size, void *data)
1769 {
1770         MonoVTable *vtable = (MonoVTable*)LOAD_VTABLE (obj);
1771
1772         if (vtable->klass->has_references)
1773                 ++count_nonpinned_ref;
1774         else
1775                 ++count_nonpinned_nonref;
1776 }
1777
1778 static void
1779 count_pinned_callback (char *obj, size_t size, void *data)
1780 {
1781         MonoVTable *vtable = (MonoVTable*)LOAD_VTABLE (obj);
1782
1783         if (vtable->klass->has_references)
1784                 ++count_pinned_ref;
1785         else
1786                 ++count_pinned_nonref;
1787 }
1788
1789 static G_GNUC_UNUSED void
1790 count_ref_nonref_objs (void)
1791 {
1792         int total;
1793
1794         count_pinned_ref = 0;
1795         count_pinned_nonref = 0;
1796         count_nonpinned_ref = 0;
1797         count_nonpinned_nonref = 0;
1798
1799         major_iterate_objects (ITERATE_OBJECTS_SWEEP_NON_PINNED, count_nonpinned_callback, NULL);
1800         major_iterate_objects (ITERATE_OBJECTS_SWEEP_PINNED, count_pinned_callback, NULL);
1801
1802         total = count_pinned_nonref + count_nonpinned_nonref + count_pinned_ref + count_nonpinned_ref;
1803
1804         g_print ("ref: %d pinned %d non-pinned   non-ref: %d pinned %d non-pinned  --  %.1f\n",
1805                         count_pinned_ref, count_nonpinned_ref,
1806                         count_pinned_nonref, count_nonpinned_nonref,
1807                         (count_pinned_nonref + count_nonpinned_nonref) * 100.0 / total);
1808 }
1809
1810 static int
1811 ms_calculate_block_obj_sizes (double factor, int *arr)
1812 {
1813         double target_size = sizeof (MonoObject);
1814         int num_sizes = 0;
1815         int last_size = 0;
1816
1817         do {
1818                 int target_count = (int)ceil (MS_BLOCK_FREE / target_size);
1819                 int size = MIN ((MS_BLOCK_FREE / target_count) & ~(SGEN_ALLOC_ALIGN - 1), SGEN_MAX_SMALL_OBJ_SIZE);
1820
1821                 if (size != last_size) {
1822                         if (arr)
1823                                 arr [num_sizes] = size;
1824                         ++num_sizes;
1825                         last_size = size;
1826                 }
1827
1828                 target_size *= factor;
1829         } while (last_size < SGEN_MAX_SMALL_OBJ_SIZE);
1830
1831         return num_sizes;
1832 }
1833
1834 /* only valid during minor collections */
1835 static mword old_num_major_sections;
1836
1837 static void
1838 major_start_nursery_collection (void)
1839 {
1840 #ifdef MARKSWEEP_CONSISTENCY_CHECK
1841         consistency_check ();
1842 #endif
1843
1844         old_num_major_sections = num_major_sections;
1845 }
1846
1847 static void
1848 major_finish_nursery_collection (void)
1849 {
1850 #ifdef MARKSWEEP_CONSISTENCY_CHECK
1851         consistency_check ();
1852 #endif
1853         sgen_register_major_sections_alloced (num_major_sections - old_num_major_sections);
1854 }
1855
1856 static void
1857 major_start_major_collection (void)
1858 {
1859         int i;
1860
1861         /* clear the free lists */
1862         for (i = 0; i < num_block_obj_sizes; ++i) {
1863                 if (!evacuate_block_obj_sizes [i])
1864                         continue;
1865
1866                 free_block_lists [0][i] = NULL;
1867                 free_block_lists [MS_BLOCK_FLAG_REFS][i] = NULL;
1868         }
1869
1870         // Sweep all unswept blocks
1871         if (lazy_sweep) {
1872                 MSBlockInfo **iter;
1873
1874                 MONO_GC_SWEEP_BEGIN (GENERATION_OLD, TRUE);
1875
1876                 iter = &all_blocks;
1877                 while (*iter) {
1878                         MSBlockInfo *block = *iter;
1879
1880                         sweep_block (block, TRUE);
1881
1882                         iter = &block->next;
1883                 }
1884
1885                 MONO_GC_SWEEP_END (GENERATION_OLD, TRUE);
1886         }
1887 }
1888
1889 static void
1890 major_finish_major_collection (void)
1891 {
1892 }
1893
1894 #if !defined(FIXED_HEAP) && SIZEOF_VOID_P != 8
1895 static int
1896 compare_pointers (const void *va, const void *vb) {
1897         char *a = *(char**)va, *b = *(char**)vb;
1898         if (a < b)
1899                 return -1;
1900         if (a > b)
1901                 return 1;
1902         return 0;
1903 }
1904 #endif
1905
1906 static void
1907 major_have_computer_minor_collection_allowance (void)
1908 {
1909 #ifndef FIXED_HEAP
1910         size_t section_reserve = sgen_get_minor_collection_allowance () / MS_BLOCK_SIZE;
1911
1912         g_assert (have_swept);
1913
1914 #if SIZEOF_VOID_P != 8
1915         {
1916                 int i, num_empty_blocks_orig, num_blocks, arr_length;
1917                 void *block;
1918                 void **empty_block_arr;
1919                 void **rebuild_next;
1920
1921 #ifdef TARGET_WIN32
1922                 /*
1923                  * sgen_free_os_memory () asserts in mono_vfree () because windows doesn't like freeing the middle of
1924                  * a VirtualAlloc ()-ed block.
1925                  */
1926                 return;
1927 #endif
1928
1929                 if (num_empty_blocks <= section_reserve)
1930                         return;
1931                 SGEN_ASSERT (0, num_empty_blocks > 0, "section reserve can't be negative");
1932
1933                 num_empty_blocks_orig = num_empty_blocks;
1934                 empty_block_arr = (void**)sgen_alloc_internal_dynamic (sizeof (void*) * num_empty_blocks_orig,
1935                                 INTERNAL_MEM_MS_BLOCK_INFO_SORT, FALSE);
1936                 if (!empty_block_arr)
1937                         goto fallback;
1938
1939                 i = 0;
1940                 for (block = empty_blocks; block; block = *(void**)block)
1941                         empty_block_arr [i++] = block;
1942                 SGEN_ASSERT (0, i == num_empty_blocks, "empty block count wrong");
1943
1944                 sgen_qsort (empty_block_arr, num_empty_blocks, sizeof (void*), compare_pointers);
1945
1946                 /*
1947                  * We iterate over the free blocks, trying to find MS_BLOCK_ALLOC_NUM
1948                  * contiguous ones.  If we do, we free them.  If that's not enough to get to
1949                  * section_reserve, we halve the number of contiguous blocks we're looking
1950                  * for and have another go, until we're done with looking for pairs of
1951                  * blocks, at which point we give up and go to the fallback.
1952                  */
1953                 arr_length = num_empty_blocks_orig;
1954                 num_blocks = MS_BLOCK_ALLOC_NUM;
1955                 while (num_empty_blocks > section_reserve && num_blocks > 1) {
1956                         int first = -1;
1957                         int dest = 0;
1958
1959                         dest = 0;
1960                         for (i = 0; i < arr_length; ++i) {
1961                                 int d = dest;
1962                                 void *block = empty_block_arr [i];
1963                                 SGEN_ASSERT (0, block, "we're not shifting correctly");
1964                                 if (i != dest) {
1965                                         empty_block_arr [dest] = block;
1966                                         /*
1967                                          * This is not strictly necessary, but we're
1968                                          * cautious.
1969                                          */
1970                                         empty_block_arr [i] = NULL;
1971                                 }
1972                                 ++dest;
1973
1974                                 if (first < 0) {
1975                                         first = d;
1976                                         continue;
1977                                 }
1978
1979                                 SGEN_ASSERT (0, first >= 0 && d > first, "algorithm is wrong");
1980
1981                                 if ((char*)block != ((char*)empty_block_arr [d-1]) + MS_BLOCK_SIZE) {
1982                                         first = d;
1983                                         continue;
1984                                 }
1985
1986                                 if (d + 1 - first == num_blocks) {
1987                                         /*
1988                                          * We found num_blocks contiguous blocks.  Free them
1989                                          * and null their array entries.  As an optimization
1990                                          * we could, instead of nulling the entries, shift
1991                                          * the following entries over to the left, while
1992                                          * we're iterating.
1993                                          */
1994                                         int j;
1995                                         sgen_free_os_memory (empty_block_arr [first], MS_BLOCK_SIZE * num_blocks, SGEN_ALLOC_HEAP);
1996                                         for (j = first; j <= d; ++j)
1997                                                 empty_block_arr [j] = NULL;
1998                                         dest = first;
1999                                         first = -1;
2000
2001                                         num_empty_blocks -= num_blocks;
2002
2003                                         stat_major_blocks_freed += num_blocks;
2004                                         if (num_blocks == MS_BLOCK_ALLOC_NUM)
2005                                                 stat_major_blocks_freed_ideal += num_blocks;
2006                                         else
2007                                                 stat_major_blocks_freed_less_ideal += num_blocks;
2008
2009                                 }
2010                         }
2011
2012                         SGEN_ASSERT (0, dest <= i && dest <= arr_length, "array length is off");
2013                         arr_length = dest;
2014                         SGEN_ASSERT (0, arr_length == num_empty_blocks, "array length is off");
2015
2016                         num_blocks >>= 1;
2017                 }
2018
2019                 /* rebuild empty_blocks free list */
2020                 rebuild_next = (void**)&empty_blocks;
2021                 for (i = 0; i < arr_length; ++i) {
2022                         void *block = empty_block_arr [i];
2023                         SGEN_ASSERT (0, block, "we're missing blocks");
2024                         *rebuild_next = block;
2025                         rebuild_next = (void**)block;
2026                 }
2027                 *rebuild_next = NULL;
2028
2029                 /* free array */
2030                 sgen_free_internal_dynamic (empty_block_arr, sizeof (void*) * num_empty_blocks_orig, INTERNAL_MEM_MS_BLOCK_INFO_SORT);
2031         }
2032
2033         SGEN_ASSERT (0, num_empty_blocks >= 0, "we freed more blocks than we had in the first place?");
2034
2035  fallback:
2036         /*
2037          * This is our threshold.  If there's not more empty than used blocks, we won't
2038          * release uncontiguous blocks, in fear of fragmenting the address space.
2039          */
2040         if (num_empty_blocks <= num_major_sections)
2041                 return;
2042 #endif
2043
2044         while (num_empty_blocks > section_reserve) {
2045                 void *next = *(void**)empty_blocks;
2046                 sgen_free_os_memory (empty_blocks, MS_BLOCK_SIZE, SGEN_ALLOC_HEAP);
2047                 empty_blocks = next;
2048                 /*
2049                  * Needs not be atomic because this is running
2050                  * single-threaded.
2051                  */
2052                 --num_empty_blocks;
2053
2054                 ++stat_major_blocks_freed;
2055 #if SIZEOF_VOID_P != 8
2056                 ++stat_major_blocks_freed_individual;
2057 #endif
2058         }
2059 #endif
2060 }
2061
2062 static void
2063 major_find_pin_queue_start_ends (SgenGrayQueue *queue)
2064 {
2065         MSBlockInfo *block;
2066
2067         FOREACH_BLOCK (block) {
2068                 block->pin_queue_start = sgen_find_optimized_pin_queue_area (block->block + MS_BLOCK_SKIP, block->block + MS_BLOCK_SIZE,
2069                                 &block->pin_queue_num_entries);
2070         } END_FOREACH_BLOCK;
2071 }
2072
2073 static void
2074 major_pin_objects (SgenGrayQueue *queue)
2075 {
2076         MSBlockInfo *block;
2077
2078         FOREACH_BLOCK (block) {
2079                 mark_pinned_objects_in_block (block, queue);
2080         } END_FOREACH_BLOCK;
2081 }
2082
2083 static void
2084 major_init_to_space (void)
2085 {
2086 }
2087
2088 static void
2089 major_report_pinned_memory_usage (void)
2090 {
2091         g_assert_not_reached ();
2092 }
2093
2094 static gint64
2095 major_get_used_size (void)
2096 {
2097         gint64 size = 0;
2098         MSBlockInfo *block;
2099
2100         FOREACH_BLOCK (block) {
2101                 int count = MS_BLOCK_FREE / block->obj_size;
2102                 void **iter;
2103                 size += count * block->obj_size;
2104                 for (iter = block->free_list; iter; iter = (void**)*iter)
2105                         size -= block->obj_size;
2106         } END_FOREACH_BLOCK;
2107
2108         return size;
2109 }
2110
2111 static size_t
2112 get_num_major_sections (void)
2113 {
2114         return num_major_sections;
2115 }
2116
2117 static gboolean
2118 major_handle_gc_param (const char *opt)
2119 {
2120 #ifdef FIXED_HEAP
2121         if (g_str_has_prefix (opt, "major-heap-size=")) {
2122                 const char *arg = strchr (opt, '=') + 1;
2123                 size_t size;
2124                 if (!mono_gc_parse_environment_string_extract_number (arg, &size))
2125                         return FALSE;
2126                 ms_heap_num_blocks = (size + MS_BLOCK_SIZE - 1) / MS_BLOCK_SIZE;
2127                 g_assert (ms_heap_num_blocks > 0);
2128                 return TRUE;
2129         } else
2130 #endif
2131         if (g_str_has_prefix (opt, "evacuation-threshold=")) {
2132                 const char *arg = strchr (opt, '=') + 1;
2133                 int percentage = atoi (arg);
2134                 if (percentage < 0 || percentage > 100) {
2135                         fprintf (stderr, "evacuation-threshold must be an integer in the range 0-100.\n");
2136                         exit (1);
2137                 }
2138                 evacuation_threshold = (float)percentage / 100.0f;
2139                 return TRUE;
2140         } else if (!strcmp (opt, "lazy-sweep")) {
2141                 lazy_sweep = TRUE;
2142                 return TRUE;
2143         } else if (!strcmp (opt, "no-lazy-sweep")) {
2144                 lazy_sweep = FALSE;
2145                 return TRUE;
2146         }
2147
2148         return FALSE;
2149 }
2150
2151 static void
2152 major_print_gc_param_usage (void)
2153 {
2154         fprintf (stderr,
2155                         ""
2156 #ifdef FIXED_HEAP
2157                         "  major-heap-size=N (where N is an integer, possibly with a k, m or a g suffix)\n"
2158 #endif
2159                         "  evacuation-threshold=P (where P is a percentage, an integer in 0-100)\n"
2160                         "  (no-)lazy-sweep\n"
2161                         );
2162 }
2163
2164 static void
2165 major_iterate_live_block_ranges (sgen_cardtable_block_callback callback)
2166 {
2167         MSBlockInfo *block;
2168
2169         FOREACH_BLOCK (block) {
2170                 if (block->has_references)
2171                         callback ((mword)block->block, MS_BLOCK_SIZE);
2172         } END_FOREACH_BLOCK;
2173 }
2174
2175 #ifdef HEAVY_STATISTICS
2176 extern long long marked_cards;
2177 extern long long scanned_cards;
2178 extern long long scanned_objects;
2179 extern long long remarked_cards;
2180 #endif
2181
2182 #define CARD_WORDS_PER_BLOCK (CARDS_PER_BLOCK / SIZEOF_VOID_P)
2183 /*
2184  * MS blocks are 16K aligned.
2185  * Cardtables are 4K aligned, at least.
2186  * This means that the cardtable of a given block is 32 bytes aligned.
2187  */
2188 static guint8*
2189 initial_skip_card (guint8 *card_data)
2190 {
2191         mword *cards = (mword*)card_data;
2192         mword card;
2193         int i;
2194         for (i = 0; i < CARD_WORDS_PER_BLOCK; ++i) {
2195                 card = cards [i];
2196                 if (card)
2197                         break;
2198         }
2199
2200         if (i == CARD_WORDS_PER_BLOCK)
2201                 return card_data + CARDS_PER_BLOCK;
2202
2203 #if defined(__i386__) && defined(__GNUC__)
2204         return card_data + i * 4 +  (__builtin_ffs (card) - 1) / 8;
2205 #elif defined(__x86_64__) && defined(__GNUC__)
2206         return card_data + i * 8 +  (__builtin_ffsll (card) - 1) / 8;
2207 #elif defined(__s390x__) && defined(__GNUC__)
2208         return card_data + i * 8 +  (__builtin_ffsll (GUINT64_TO_LE(card)) - 1) / 8;
2209 #else
2210         for (i = i * SIZEOF_VOID_P; i < CARDS_PER_BLOCK; ++i) {
2211                 if (card_data [i])
2212                         return &card_data [i];
2213         }
2214         return card_data;
2215 #endif
2216 }
2217
2218
2219 static G_GNUC_UNUSED guint8*
2220 skip_card (guint8 *card_data, guint8 *card_data_end)
2221 {
2222         while (card_data < card_data_end && !*card_data)
2223                 ++card_data;
2224         return card_data;
2225 }
2226
2227 #define MS_BLOCK_OBJ_INDEX_FAST(o,b,os) (((char*)(o) - ((b) + MS_BLOCK_SKIP)) / (os))
2228 #define MS_BLOCK_OBJ_FAST(b,os,i)                       ((b) + MS_BLOCK_SKIP + (os) * (i))
2229 #define MS_OBJ_ALLOCED_FAST(o,b)                (*(void**)(o) && (*(char**)(o) < (b) || *(char**)(o) >= (b) + MS_BLOCK_SIZE))
2230
2231 static void
2232 major_scan_card_table (gboolean mod_union, SgenGrayQueue *queue)
2233 {
2234         MSBlockInfo *block;
2235         ScanObjectFunc scan_func = sgen_get_current_object_ops ()->scan_object;
2236
2237 #ifdef SGEN_HAVE_CONCURRENT_MARK
2238         if (!concurrent_mark)
2239                 g_assert (!mod_union);
2240 #else
2241         g_assert (!mod_union);
2242 #endif
2243
2244         FOREACH_BLOCK (block) {
2245                 int block_obj_size;
2246                 char *block_start;
2247
2248                 if (!block->has_references)
2249                         continue;
2250
2251                 block_obj_size = block->obj_size;
2252                 block_start = block->block;
2253
2254                 if (block_obj_size >= CARD_SIZE_IN_BYTES) {
2255                         guint8 *cards;
2256 #ifndef SGEN_HAVE_OVERLAPPING_CARDS
2257                         guint8 cards_data [CARDS_PER_BLOCK];
2258 #endif
2259                         char *obj, *end, *base;
2260
2261                         if (mod_union) {
2262 #ifdef SGEN_HAVE_CONCURRENT_MARK
2263                                 cards = block->cardtable_mod_union;
2264                                 /*
2265                                  * This happens when the nursery
2266                                  * collection that precedes finishing
2267                                  * the concurrent collection allocates
2268                                  * new major blocks.
2269                                  */
2270                                 if (!cards)
2271                                         continue;
2272 #endif
2273                         } else {
2274                         /*We can avoid the extra copy since the remark cardtable was cleaned before */
2275 #ifdef SGEN_HAVE_OVERLAPPING_CARDS
2276                                 cards = sgen_card_table_get_card_scan_address ((mword)block_start);
2277 #else
2278                                 cards = cards_data;
2279                                 if (!sgen_card_table_get_card_data (cards_data, (mword)block_start, CARDS_PER_BLOCK))
2280                                         continue;
2281 #endif
2282                         }
2283
2284                         obj = (char*)MS_BLOCK_OBJ_FAST (block_start, block_obj_size, 0);
2285                         end = block_start + MS_BLOCK_SIZE;
2286                         base = sgen_card_table_align_pointer (obj);
2287
2288                         while (obj < end) {
2289                                 size_t card_offset;
2290
2291                                 if (!block->swept)
2292                                         sweep_block (block, FALSE);
2293
2294                                 if (!MS_OBJ_ALLOCED_FAST (obj, block_start))
2295                                         goto next_large;
2296
2297                                 if (mod_union) {
2298                                         /* FIXME: do this more efficiently */
2299                                         int w, b;
2300                                         MS_CALC_MARK_BIT (w, b, obj);
2301                                         if (!MS_MARK_BIT (block, w, b))
2302                                                 goto next_large;
2303                                 }
2304
2305                                 card_offset = (obj - base) >> CARD_BITS;
2306                                 sgen_cardtable_scan_object (obj, block_obj_size, cards + card_offset, mod_union, queue);
2307
2308                         next_large:
2309                                 obj += block_obj_size;
2310                         }
2311                 } else {
2312                         guint8 *card_data, *card_base;
2313                         guint8 *card_data_end;
2314
2315                         /*
2316                          * This is safe in face of card aliasing for the following reason:
2317                          *
2318                          * Major blocks are 16k aligned, or 32 cards aligned.
2319                          * Cards aliasing happens in powers of two, so as long as major blocks are aligned to their
2320                          * sizes, they won't overflow the cardtable overlap modulus.
2321                          */
2322                         if (mod_union) {
2323 #ifdef SGEN_HAVE_CONCURRENT_MARK
2324                                 card_data = card_base = block->cardtable_mod_union;
2325                                 /*
2326                                  * This happens when the nursery
2327                                  * collection that precedes finishing
2328                                  * the concurrent collection allocates
2329                                  * new major blocks.
2330                                  */
2331                                 if (!card_data)
2332                                         continue;
2333 #else
2334                                 g_assert_not_reached ();
2335                                 card_data = NULL;
2336 #endif
2337                         } else {
2338                                 card_data = card_base = sgen_card_table_get_card_scan_address ((mword)block_start);
2339                         }
2340                         card_data_end = card_data + CARDS_PER_BLOCK;
2341
2342                         for (card_data = initial_skip_card (card_data); card_data < card_data_end; ++card_data) { //card_data = skip_card (card_data + 1, card_data_end)) {
2343                                 size_t index;
2344                                 size_t idx = card_data - card_base;
2345                                 char *start = (char*)(block_start + idx * CARD_SIZE_IN_BYTES);
2346                                 char *end = start + CARD_SIZE_IN_BYTES;
2347                                 char *first_obj, *obj;
2348
2349                                 HEAVY_STAT (++scanned_cards);
2350
2351                                 if (!*card_data)
2352                                         continue;
2353
2354                                 if (!block->swept)
2355                                         sweep_block (block, FALSE);
2356
2357                                 HEAVY_STAT (++marked_cards);
2358
2359                                 sgen_card_table_prepare_card_for_scanning (card_data);
2360
2361                                 if (idx == 0)
2362                                         index = 0;
2363                                 else
2364                                         index = MS_BLOCK_OBJ_INDEX_FAST (start, block_start, block_obj_size);
2365
2366                                 obj = first_obj = (char*)MS_BLOCK_OBJ_FAST (block_start, block_obj_size, index);
2367                                 while (obj < end) {
2368                                         if (!MS_OBJ_ALLOCED_FAST (obj, block_start))
2369                                                 goto next_small;
2370
2371                                         if (mod_union) {
2372                                                 /* FIXME: do this more efficiently */
2373                                                 int w, b;
2374                                                 MS_CALC_MARK_BIT (w, b, obj);
2375                                                 if (!MS_MARK_BIT (block, w, b))
2376                                                         goto next_small;
2377                                         }
2378
2379                                         HEAVY_STAT (++scanned_objects);
2380                                         scan_func (obj, queue);
2381                                 next_small:
2382                                         obj += block_obj_size;
2383                                 }
2384                                 HEAVY_STAT (if (*card_data) ++remarked_cards);
2385                                 binary_protocol_card_scan (first_obj, obj - first_obj);
2386                         }
2387                 }
2388         } END_FOREACH_BLOCK;
2389 }
2390
2391 static void
2392 major_count_cards (long long *num_total_cards, long long *num_marked_cards)
2393 {
2394         MSBlockInfo *block;
2395         long long total_cards = 0;
2396         long long marked_cards = 0;
2397
2398         FOREACH_BLOCK (block) {
2399                 guint8 *cards = sgen_card_table_get_card_scan_address ((mword) block->block);
2400                 int i;
2401
2402                 if (!block->has_references)
2403                         continue;
2404
2405                 total_cards += CARDS_PER_BLOCK;
2406                 for (i = 0; i < CARDS_PER_BLOCK; ++i) {
2407                         if (cards [i])
2408                                 ++marked_cards;
2409                 }
2410         } END_FOREACH_BLOCK;
2411
2412         *num_total_cards = total_cards;
2413         *num_marked_cards = marked_cards;
2414 }
2415
2416 #ifdef SGEN_HAVE_CONCURRENT_MARK
2417 static void
2418 update_cardtable_mod_union (void)
2419 {
2420         MSBlockInfo *block;
2421
2422         FOREACH_BLOCK (block) {
2423                 size_t num_cards;
2424
2425                 block->cardtable_mod_union = sgen_card_table_update_mod_union (block->cardtable_mod_union,
2426                                 block->block, MS_BLOCK_SIZE, &num_cards);
2427
2428                 SGEN_ASSERT (0, num_cards == CARDS_PER_BLOCK, "Number of cards calculation is wrong");
2429         } END_FOREACH_BLOCK;
2430 }
2431
2432 static guint8*
2433 major_get_cardtable_mod_union_for_object (char *obj)
2434 {
2435         MSBlockInfo *block = MS_BLOCK_FOR_OBJ (obj);
2436         return &block->cardtable_mod_union [(obj - (char*)sgen_card_table_align_pointer (block->block)) >> CARD_BITS];
2437 }
2438 #endif
2439
2440 static void
2441 alloc_free_block_lists (MSBlockInfo ***lists)
2442 {
2443         int i;
2444         for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i)
2445                 lists [i] = sgen_alloc_internal_dynamic (sizeof (MSBlockInfo*) * num_block_obj_sizes, INTERNAL_MEM_MS_TABLES, TRUE);
2446 }
2447
2448 #ifdef SGEN_PARALLEL_MARK
2449 static void*
2450 major_alloc_worker_data (void)
2451 {
2452         /* FIXME: free this when the workers come down */
2453         MSBlockInfo ***lists = malloc (sizeof (MSBlockInfo**) * MS_BLOCK_TYPE_MAX);
2454         alloc_free_block_lists (lists);
2455         return lists;
2456 }
2457
2458 static void
2459 major_init_worker_thread (void *data)
2460 {
2461         MSBlockInfo ***lists = data;
2462         int i;
2463
2464         g_assert (lists && lists != free_block_lists);
2465         for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i) {
2466                 int j;
2467                 for (j = 0; j < num_block_obj_sizes; ++j)
2468                         g_assert (!lists [i][j]);
2469         }
2470
2471 #ifdef HAVE_KW_THREAD
2472         workers_free_block_lists = data;
2473 #else
2474         mono_native_tls_set_value (workers_free_block_lists_key, data);
2475 #endif
2476 }
2477
2478 static void
2479 major_reset_worker_data (void *data)
2480 {
2481         MSBlockInfo ***lists = data;
2482         int i;
2483         for (i = 0; i < MS_BLOCK_TYPE_MAX; ++i) {
2484                 int j;
2485                 for (j = 0; j < num_block_obj_sizes; ++j)
2486                         lists [i][j] = NULL;
2487         }
2488 }
2489 #endif
2490
2491 #undef pthread_create
2492
2493 static void
2494 post_param_init (SgenMajorCollector *collector)
2495 {
2496         collector->sweeps_lazily = lazy_sweep;
2497 }
2498
2499 #ifdef SGEN_HAVE_CONCURRENT_MARK
2500 static void
2501 sgen_marksweep_init_internal (SgenMajorCollector *collector, gboolean is_concurrent)
2502 #else // SGEN_HAVE_CONCURRENT_MARK
2503 #ifdef SGEN_PARALLEL_MARK
2504 #ifdef FIXED_HEAP
2505 void
2506 sgen_marksweep_fixed_par_init (SgenMajorCollector *collector)
2507 #else // FIXED_HEAP
2508 void
2509 sgen_marksweep_par_init (SgenMajorCollector *collector)
2510 #endif // FIXED_HEAP
2511 #else // SGEN_PARALLEL_MARK
2512 #ifdef FIXED_HEAP
2513 void
2514 sgen_marksweep_fixed_init (SgenMajorCollector *collector)
2515 #else // FIXED_HEAP
2516 #error unknown configuration
2517 #endif // FIXED_HEAP
2518 #endif // SGEN_PARALLEL_MARK
2519 #endif // SGEN_HAVE_CONCURRENT_MARK
2520 {
2521         int i;
2522
2523 #ifndef FIXED_HEAP
2524         sgen_register_fixed_internal_mem_type (INTERNAL_MEM_MS_BLOCK_INFO, sizeof (MSBlockInfo));
2525 #endif
2526
2527         num_block_obj_sizes = ms_calculate_block_obj_sizes (MS_BLOCK_OBJ_SIZE_FACTOR, NULL);
2528         block_obj_sizes = sgen_alloc_internal_dynamic (sizeof (int) * num_block_obj_sizes, INTERNAL_MEM_MS_TABLES, TRUE);
2529         ms_calculate_block_obj_sizes (MS_BLOCK_OBJ_SIZE_FACTOR, block_obj_sizes);
2530
2531         evacuate_block_obj_sizes = sgen_alloc_internal_dynamic (sizeof (gboolean) * num_block_obj_sizes, INTERNAL_MEM_MS_TABLES, TRUE);
2532         for (i = 0; i < num_block_obj_sizes; ++i)
2533                 evacuate_block_obj_sizes [i] = FALSE;
2534
2535         /*
2536         {
2537                 int i;
2538                 g_print ("block object sizes:\n");
2539                 for (i = 0; i < num_block_obj_sizes; ++i)
2540                         g_print ("%d\n", block_obj_sizes [i]);
2541         }
2542         */
2543
2544         alloc_free_block_lists (free_block_lists);
2545
2546         for (i = 0; i < MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES; ++i)
2547                 fast_block_obj_size_indexes [i] = ms_find_block_obj_size_index (i * 8);
2548         for (i = 0; i < MS_NUM_FAST_BLOCK_OBJ_SIZE_INDEXES * 8; ++i)
2549                 g_assert (MS_BLOCK_OBJ_SIZE_INDEX (i) == ms_find_block_obj_size_index (i));
2550
2551 #ifdef SGEN_PARALLEL_MARK
2552         LOCK_INIT (ms_block_list_mutex);
2553 #endif
2554
2555         mono_counters_register ("# major blocks allocated", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_alloced);
2556         mono_counters_register ("# major blocks freed", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_freed);
2557         mono_counters_register ("# major blocks lazy swept", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_lazy_swept);
2558         mono_counters_register ("# major objects evacuated", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_objects_evacuated);
2559 #if SIZEOF_VOID_P != 8
2560         mono_counters_register ("# major blocks freed ideally", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_freed_ideal);
2561         mono_counters_register ("# major blocks freed less ideally", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_freed_less_ideal);
2562         mono_counters_register ("# major blocks freed individually", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_freed_individual);
2563         mono_counters_register ("# major blocks allocated less ideally", MONO_COUNTER_GC | MONO_COUNTER_LONG, &stat_major_blocks_alloced_less_ideal);
2564 #endif
2565
2566 #ifdef SGEN_PARALLEL_MARK
2567 #ifndef HAVE_KW_THREAD
2568         mono_native_tls_alloc (&workers_free_block_lists_key, NULL);
2569 #endif
2570 #endif
2571
2572         collector->section_size = MAJOR_SECTION_SIZE;
2573 #ifdef SGEN_PARALLEL_MARK
2574         collector->is_parallel = TRUE;
2575         collector->alloc_worker_data = major_alloc_worker_data;
2576         collector->init_worker_thread = major_init_worker_thread;
2577         collector->reset_worker_data = major_reset_worker_data;
2578 #else
2579         collector->is_parallel = FALSE;
2580 #endif
2581 #ifdef SGEN_HAVE_CONCURRENT_MARK
2582         concurrent_mark = is_concurrent;
2583         if (is_concurrent) {
2584                 collector->is_concurrent = TRUE;
2585                 collector->want_synchronous_collection = &want_evacuation;
2586                 collector->get_and_reset_num_major_objects_marked = major_get_and_reset_num_major_objects_marked;
2587         } else
2588 #endif
2589         {
2590                 collector->is_concurrent = FALSE;
2591                 collector->want_synchronous_collection = NULL;
2592         }
2593         collector->supports_cardtable = TRUE;
2594
2595         collector->have_swept = &have_swept;
2596
2597         collector->alloc_heap = major_alloc_heap;
2598         collector->is_object_live = major_is_object_live;
2599         collector->alloc_small_pinned_obj = major_alloc_small_pinned_obj;
2600         collector->alloc_degraded = major_alloc_degraded;
2601
2602         collector->alloc_object = major_alloc_object;
2603 #ifdef SGEN_PARALLEL_MARK
2604         collector->par_alloc_object = major_par_alloc_object;
2605 #endif
2606         collector->free_pinned_object = free_pinned_object;
2607         collector->iterate_objects = major_iterate_objects;
2608         collector->free_non_pinned_object = major_free_non_pinned_object;
2609         collector->find_pin_queue_start_ends = major_find_pin_queue_start_ends;
2610         collector->pin_objects = major_pin_objects;
2611         collector->pin_major_object = pin_major_object;
2612         collector->scan_card_table = major_scan_card_table;
2613         collector->iterate_live_block_ranges = (void*)(void*) major_iterate_live_block_ranges;
2614 #ifdef SGEN_HAVE_CONCURRENT_MARK
2615         if (is_concurrent) {
2616                 collector->update_cardtable_mod_union = update_cardtable_mod_union;
2617                 collector->get_cardtable_mod_union_for_object = major_get_cardtable_mod_union_for_object;
2618         }
2619 #endif
2620         collector->init_to_space = major_init_to_space;
2621         collector->sweep = major_sweep;
2622         collector->check_scan_starts = major_check_scan_starts;
2623         collector->dump_heap = major_dump_heap;
2624         collector->get_used_size = major_get_used_size;
2625         collector->start_nursery_collection = major_start_nursery_collection;
2626         collector->finish_nursery_collection = major_finish_nursery_collection;
2627         collector->start_major_collection = major_start_major_collection;
2628         collector->finish_major_collection = major_finish_major_collection;
2629         collector->have_computed_minor_collection_allowance = major_have_computer_minor_collection_allowance;
2630         collector->ptr_is_in_non_pinned_space = major_ptr_is_in_non_pinned_space;
2631         collector->obj_is_from_pinned_alloc = obj_is_from_pinned_alloc;
2632         collector->report_pinned_memory_usage = major_report_pinned_memory_usage;
2633         collector->get_num_major_sections = get_num_major_sections;
2634         collector->handle_gc_param = major_handle_gc_param;
2635         collector->print_gc_param_usage = major_print_gc_param_usage;
2636         collector->post_param_init = post_param_init;
2637         collector->is_valid_object = major_is_valid_object;
2638         collector->describe_pointer = major_describe_pointer;
2639         collector->count_cards = major_count_cards;
2640
2641         collector->major_ops.copy_or_mark_object = major_copy_or_mark_object_canonical;
2642         collector->major_ops.scan_object = major_scan_object;
2643         collector->major_ops.scan_work = major_scan_work;
2644 #ifdef SGEN_HAVE_CONCURRENT_MARK
2645         if (is_concurrent) {
2646                 collector->major_concurrent_ops.copy_or_mark_object = major_copy_or_mark_object_concurrent_canonical;
2647                 collector->major_concurrent_ops.scan_object = major_scan_object_concurrent;
2648                 collector->major_concurrent_ops.scan_vtype = major_scan_vtype_concurrent;
2649                 collector->major_concurrent_ops.scan_work = major_scan_work_concurrent;
2650         }
2651 #endif
2652
2653         /*cardtable requires major pages to be 8 cards aligned*/
2654         g_assert ((MS_BLOCK_SIZE % (8 * CARD_SIZE_IN_BYTES)) == 0);
2655 }
2656
2657 #ifdef SGEN_HAVE_CONCURRENT_MARK
2658 void
2659 sgen_marksweep_init (SgenMajorCollector *collector)
2660 {
2661         sgen_marksweep_init_internal (collector, FALSE);
2662 }
2663
2664 void
2665 sgen_marksweep_conc_init (SgenMajorCollector *collector)
2666 {
2667         sgen_marksweep_init_internal (collector, TRUE);
2668 }
2669 #endif
2670
2671 #endif