X-Git-Url: http://wien.tomnetworks.com/gitweb/?a=blobdiff_plain;f=mono%2Fsgen%2Fsgen-nursery-allocator.c;h=785f46d5645c183043c8f09d047f4f2f3e7efeca;hb=3fc2fee18d61305b6b63b61f47c059dc2d5f058a;hp=ab12803f10813400c506a49e71ed198e3f4ab0ec;hpb=5a57d12ffc89bbccfd7aaad6ba3b89547701e0a0;p=mono.git diff --git a/mono/sgen/sgen-nursery-allocator.c b/mono/sgen/sgen-nursery-allocator.c index ab12803f108..785f46d5645 100644 --- a/mono/sgen/sgen-nursery-allocator.c +++ b/mono/sgen/sgen-nursery-allocator.c @@ -7,18 +7,7 @@ * Copyright 2011 Xamarin Inc (http://www.xamarin.com) * Copyright (C) 2012 Xamarin Inc * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Library General Public - * License 2.0 as published by the Free Software Foundation; - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Library General Public License for more details. - * - * You should have received a copy of the GNU Library General Public - * License 2.0 along with this library; if not, write to the Free - * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + * Licensed under the MIT license. See LICENSE file in the project root for full license information. */ /* @@ -253,7 +242,7 @@ sgen_fragment_allocator_alloc (void) frag->next = frag->next_in_order = NULL; return frag; } - frag = sgen_alloc_internal (INTERNAL_MEM_FRAGMENT); + frag = (SgenFragment *)sgen_alloc_internal (INTERNAL_MEM_FRAGMENT); frag->next = frag->next_in_order = NULL; return frag; } @@ -267,7 +256,7 @@ sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char fragment->fragment_start = start; fragment->fragment_next = start; fragment->fragment_end = end; - fragment->next_in_order = fragment->next = unmask (allocator->region_head); + fragment->next_in_order = fragment->next = (SgenFragment *)unmask (allocator->region_head); allocator->region_head = allocator->alloc_head = fragment; g_assert (fragment->fragment_end > fragment->fragment_start); @@ -304,7 +293,7 @@ try_again: printf ("retry count for fppf is %d\n", count); #endif - cur = unmask (*prev); + cur = (SgenFragment *)unmask (*prev); while (1) { if (cur == NULL) @@ -326,14 +315,14 @@ try_again: return prev; prev = &cur->next; } else { - next = unmask (next); + next = (SgenFragment *)unmask (next); if (InterlockedCompareExchangePointer ((volatile gpointer*)prev, next, cur) != cur) goto try_again; /*we must make sure that the next from cur->next happens after*/ mono_memory_write_barrier (); } - cur = unmask (next); + cur = (SgenFragment *)unmask (next); } return NULL; } @@ -375,7 +364,7 @@ par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, s */ if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) { sgen_clear_range (end, frag->fragment_end); - HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_trailer, frag->fragment_end - end)); + HEAVY_STAT (stat_wasted_bytes_trailer += frag->fragment_end - end); #ifdef NALLOC_DEBUG add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING); #endif @@ -448,13 +437,13 @@ sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size #endif restart: - for (frag = unmask (allocator->alloc_head); unmask (frag); frag = unmask (frag->next)) { - HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations)); + for (frag = (SgenFragment *)unmask (allocator->alloc_head); unmask (frag); frag = (SgenFragment *)unmask (frag->next)) { + HEAVY_STAT (++stat_alloc_iterations); if (size <= (size_t)(frag->fragment_end - frag->fragment_next)) { void *p = par_alloc_from_fragment (allocator, frag, size); if (!p) { - HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries)); + HEAVY_STAT (++stat_alloc_retries); goto restart; } #ifdef NALLOC_DEBUG @@ -478,9 +467,9 @@ sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator *allocator, size_t s previous = &allocator->alloc_head; for (frag = *previous; frag; frag = *previous) { - char *p = serial_alloc_from_fragment (previous, frag, size); + char *p = (char *)serial_alloc_from_fragment (previous, frag, size); - HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations)); + HEAVY_STAT (++stat_alloc_iterations); if (p) { #ifdef NALLOC_DEBUG @@ -508,7 +497,7 @@ sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, si for (frag = *previous; frag; frag = *previous) { size_t frag_size = frag->fragment_end - frag->fragment_next; - HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations)); + HEAVY_STAT (++stat_alloc_range_iterations); if (desired_size <= frag_size) { void *p; @@ -558,10 +547,10 @@ restart: InterlockedIncrement (&alloc_count); #endif - for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) { + for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) { size_t frag_size = frag->fragment_end - frag->fragment_next; - HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations)); + HEAVY_STAT (++stat_alloc_range_iterations); if (desired_size <= frag_size) { void *p; @@ -569,7 +558,7 @@ restart: p = par_alloc_from_fragment (allocator, frag, desired_size); if (!p) { - HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_retries)); + HEAVY_STAT (++stat_alloc_range_retries); goto restart; } #ifdef NALLOC_DEBUG @@ -601,7 +590,7 @@ restart: /*XXX restarting here is quite dubious given this is already second chance allocation. */ if (!p) { - HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries)); + HEAVY_STAT (++stat_alloc_retries); goto restart; } #ifdef NALLOC_DEBUG @@ -618,7 +607,7 @@ sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator) { SgenFragment *frag; - for (frag = unmask (allocator->alloc_head); frag; frag = unmask (frag->next)) { + for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) { SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end); sgen_clear_range (frag->fragment_next, frag->fragment_end); #ifdef NALLOC_DEBUG @@ -696,7 +685,7 @@ add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag } else { /* Clear unused fragments, pinning depends on this */ sgen_clear_range (frag_start, frag_end); - HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_small_areas, frag_size)); + HEAVY_STAT (stat_wasted_bytes_small_areas += frag_size); } } @@ -747,13 +736,13 @@ sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpi addr0 = addr1 = sgen_nursery_end; if (pin_entry < pin_end) - addr0 = *pin_entry; + addr0 = (char *)*pin_entry; if (frags_ranges) addr1 = frags_ranges->fragment_start; if (addr0 < addr1) { if (unpin_queue) - GRAY_OBJECT_ENQUEUE (unpin_queue, addr0, sgen_obj_get_descriptor_safe (addr0)); + GRAY_OBJECT_ENQUEUE (unpin_queue, (GCObject*)addr0, sgen_obj_get_descriptor_safe ((GCObject*)addr0)); else SGEN_UNPIN_OBJECT (addr0); size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject*)addr0)); @@ -802,7 +791,7 @@ sgen_build_nursery_fragments (GCMemSection *nursery_section, SgenGrayQueue *unpi if (!unmask (mutator_allocator.alloc_head)) { SGEN_LOG (1, "Nursery fully pinned"); for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) { - void *p = *pin_entry; + GCObject *p = (GCObject *)*pin_entry; SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p)), sgen_safe_object_get_size (p)); } } @@ -820,7 +809,7 @@ sgen_nursery_alloc_get_upper_alloc_bound (void) void sgen_nursery_retire_region (void *address, ptrdiff_t size) { - HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_discarded_fragments, size)); + HEAVY_STAT (stat_wasted_bytes_discarded_fragments += size); } gboolean @@ -833,7 +822,7 @@ sgen_can_alloc_size (size_t size) size = SGEN_ALIGN_UP (size); - for (frag = unmask (mutator_allocator.alloc_head); frag; frag = unmask (frag->next)) { + for (frag = (SgenFragment *)unmask (mutator_allocator.alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) { if ((size_t)(frag->fragment_end - frag->fragment_next) >= size) return TRUE; } @@ -848,7 +837,7 @@ sgen_nursery_alloc (size_t size) SGEN_LOG (4, "Searching nursery for size: %zd", size); size = SGEN_ALIGN_UP (size); - HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests)); + HEAVY_STAT (++stat_nursery_alloc_requests); return sgen_fragment_allocator_par_alloc (&mutator_allocator, size); } @@ -858,7 +847,7 @@ sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_ { SGEN_LOG (4, "Searching for byte range desired size: %zd minimum size %zd", desired_size, minimum_size); - HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_range_requests)); + HEAVY_STAT (++stat_nursery_alloc_range_requests); return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size); } @@ -918,7 +907,7 @@ sgen_nursery_allocator_set_nursery_bounds (char *start, char *end) * since the nursery size must be a power of 2. */ sgen_space_bitmap_size = (end - start + SGEN_TO_SPACE_GRANULE_IN_BYTES * 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8); - sgen_space_bitmap = g_malloc0 (sgen_space_bitmap_size); + sgen_space_bitmap = (char *)g_malloc0 (sgen_space_bitmap_size); /* Setup the single first large fragment */ sgen_minor_collector.init_nursery (&mutator_allocator, start, end);