2 * sgen-nursery-allocator.c: Nursery allocation code.
4 * Copyright 2009-2010 Novell, Inc.
7 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
8 * Copyright (C) 2012 Xamarin Inc
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Library General Public
12 * License 2.0 as published by the Free Software Foundation;
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License 2.0 along with this library; if not, write to the Free
21 * Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 * The young generation is divided into fragments. This is because
26 * we can hand one fragments to a thread for lock-less fast alloc and
27 * because the young generation ends up fragmented anyway by pinned objects.
28 * Once a collection is done, a list of fragments is created. When doing
29 * thread local alloc we use smallish nurseries so we allow new threads to
30 * allocate memory from gen0 without triggering a collection. Threads that
31 * are found to allocate lots of memory are given bigger fragments. This
32 * should make the finalizer thread use little nursery memory after a while.
33 * We should start assigning threads very small fragments: if there are many
34 * threads the nursery will be full of reserved space that the threads may not
35 * use at all, slowing down allocation speed.
36 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
37 * Allocation Buffers (TLABs).
48 #ifdef HAVE_SEMAPHORE_H
49 #include <semaphore.h>
62 #include "metadata/sgen-gc.h"
63 #include "metadata/metadata-internals.h"
64 #include "metadata/class-internals.h"
65 #include "metadata/gc-internal.h"
66 #include "metadata/object-internals.h"
67 #include "metadata/threads.h"
68 #include "metadata/sgen-cardtable.h"
69 #include "metadata/sgen-protocol.h"
70 #include "metadata/sgen-archdep.h"
71 #include "metadata/sgen-bridge.h"
72 #include "metadata/sgen-memory-governor.h"
73 #include "metadata/sgen-pinning.h"
74 #include "metadata/mono-gc.h"
75 #include "metadata/method-builder.h"
76 #include "metadata/profiler-private.h"
77 #include "metadata/monitor.h"
78 #include "metadata/threadpool-internals.h"
79 #include "metadata/mempool-internals.h"
80 #include "metadata/marshal.h"
81 #include "utils/mono-mmap.h"
82 #include "utils/mono-time.h"
83 #include "utils/mono-semaphore.h"
84 #include "utils/mono-counters.h"
85 #include "utils/mono-proclib.h"
86 #include "utils/mono-threads.h"
88 /* Enable it so nursery allocation diagnostic data is collected */
89 //#define NALLOC_DEBUG 1
91 /* The mutator allocs from here. */
92 SgenFragmentAllocator mutator_allocator
;
94 /* freeelist of fragment structures */
95 static SgenFragment
*fragment_freelist
= NULL
;
97 /* Allocator cursors */
98 static char *nursery_last_pinned_end
= NULL
;
100 char *sgen_nursery_start
;
101 char *sgen_nursery_end
;
104 size_t sgen_nursery_size
= (1 << 22);
105 int sgen_nursery_bits
= 22;
108 char *sgen_space_bitmap
;
109 size_t sgen_space_bitmap_size
;
111 #ifdef HEAVY_STATISTICS
113 static gint32 stat_wasted_bytes_trailer
= 0;
114 static gint32 stat_wasted_bytes_small_areas
= 0;
115 static gint32 stat_wasted_bytes_discarded_fragments
= 0;
116 static gint32 stat_nursery_alloc_requests
= 0;
117 static gint32 stat_alloc_iterations
= 0;
118 static gint32 stat_alloc_retries
= 0;
120 static gint32 stat_nursery_alloc_range_requests
= 0;
121 static gint32 stat_alloc_range_iterations
= 0;
122 static gint32 stat_alloc_range_retries
= 0;
126 /************************************Nursery allocation debugging *********************************************/
143 MonoNativeThreadId tid
;
146 #define ALLOC_RECORD_COUNT 128000
149 static AllocRecord
*alloc_records
;
150 static volatile int next_record
;
151 static volatile int alloc_count
;
153 void dump_alloc_records (void);
154 void verify_alloc_records (void);
157 get_reason_name (AllocRecord
*rec
)
159 switch (rec
->reason
) {
160 case FIXED_ALLOC
: return "fixed-alloc";
161 case RANGE_ALLOC
: return "range-alloc";
162 case PINNING
: return "pinning";
163 case BLOCK_ZEROING
: return "block-zeroing";
164 case CLEAR_NURSERY_FRAGS
: return "clear-nursery-frag";
165 default: return "invalid";
170 reset_alloc_records (void)
177 add_alloc_record (char *addr
, size_t size
, int reason
)
179 int idx
= InterlockedIncrement (&next_record
) - 1;
180 alloc_records
[idx
].address
= addr
;
181 alloc_records
[idx
].size
= size
;
182 alloc_records
[idx
].reason
= reason
;
183 alloc_records
[idx
].seq
= idx
;
184 alloc_records
[idx
].tid
= mono_native_thread_id_get ();
188 comp_alloc_record (const void *_a
, const void *_b
)
190 const AllocRecord
*a
= _a
;
191 const AllocRecord
*b
= _b
;
192 if (a
->address
== b
->address
)
193 return a
->seq
- b
->seq
;
194 return a
->address
- b
->address
;
197 #define rec_end(REC) ((REC)->address + (REC)->size)
200 dump_alloc_records (void)
203 sgen_qsort (alloc_records
, next_record
, sizeof (AllocRecord
), comp_alloc_record
);
205 printf ("------------------------------------DUMP RECORDS----------------------------\n");
206 for (i
= 0; i
< next_record
; ++i
) {
207 AllocRecord
*rec
= alloc_records
+ i
;
208 printf ("obj [%p, %p] size %d reason %s seq %d tid %x\n", rec
->address
, rec_end (rec
), (int)rec
->size
, get_reason_name (rec
), rec
->seq
, (size_t)rec
->tid
);
213 verify_alloc_records (void)
219 AllocRecord
*prev
= NULL
;
221 sgen_qsort (alloc_records
, next_record
, sizeof (AllocRecord
), comp_alloc_record
);
222 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record
, alloc_count
);
223 for (i
= 0; i
< next_record
; ++i
) {
224 AllocRecord
*rec
= alloc_records
+ i
;
228 if (rec_end (prev
) > rec
->address
)
229 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev
->address
, rec
->address
);
230 if ((rec
->address
- rec_end (prev
)) >= 8)
232 hole_size
= rec
->address
- rec_end (prev
);
233 max_hole
= MAX (max_hole
, hole_size
);
235 printf ("obj [%p, %p] size %d hole to prev %d reason %s seq %d tid %zx\n", rec
->address
, rec_end (rec
), (int)rec
->size
, hole_size
, get_reason_name (rec
), rec
->seq
, (size_t)rec
->tid
);
238 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total
, holes
, max_hole
);
243 /*********************************************************************************/
246 static inline gpointer
247 mask (gpointer n
, uintptr_t bit
)
249 return (gpointer
)(((uintptr_t)n
) | bit
);
252 static inline gpointer
255 return (gpointer
)((uintptr_t)p
& ~(uintptr_t)0x3);
258 static inline uintptr_t
259 get_mark (gpointer n
)
261 return (uintptr_t)n
& 0x1;
264 /*MUST be called with world stopped*/
266 sgen_fragment_allocator_alloc (void)
268 SgenFragment
*frag
= fragment_freelist
;
270 fragment_freelist
= frag
->next_in_order
;
271 frag
->next
= frag
->next_in_order
= NULL
;
274 frag
= sgen_alloc_internal (INTERNAL_MEM_FRAGMENT
);
275 frag
->next
= frag
->next_in_order
= NULL
;
280 sgen_fragment_allocator_add (SgenFragmentAllocator
*allocator
, char *start
, char *end
)
282 SgenFragment
*fragment
;
284 fragment
= sgen_fragment_allocator_alloc ();
285 fragment
->fragment_start
= start
;
286 fragment
->fragment_next
= start
;
287 fragment
->fragment_end
= end
;
288 fragment
->next_in_order
= fragment
->next
= unmask (allocator
->region_head
);
290 allocator
->region_head
= allocator
->alloc_head
= fragment
;
291 g_assert (fragment
->fragment_end
> fragment
->fragment_start
);
295 sgen_fragment_allocator_release (SgenFragmentAllocator
*allocator
)
297 SgenFragment
*last
= allocator
->region_head
;
301 /* Find the last fragment in insert order */
302 for (; last
->next_in_order
; last
= last
->next_in_order
) ;
304 last
->next_in_order
= fragment_freelist
;
305 fragment_freelist
= allocator
->region_head
;
306 allocator
->alloc_head
= allocator
->region_head
= NULL
;
309 static SgenFragment
**
310 find_previous_pointer_fragment (SgenFragmentAllocator
*allocator
, SgenFragment
*frag
)
313 SgenFragment
*cur
, *next
;
319 prev
= &allocator
->alloc_head
;
322 printf ("retry count for fppf is %d\n", count
);
325 cur
= unmask (*prev
);
333 * We need to make sure that we dereference prev below
334 * after reading cur->next above, so we need a read
337 mono_memory_read_barrier ();
342 if (!get_mark (next
)) {
347 next
= unmask (next
);
348 if (InterlockedCompareExchangePointer ((volatile gpointer
*)prev
, next
, cur
) != cur
)
350 /*we must make sure that the next from cur->next happens after*/
351 mono_memory_write_barrier ();
354 cur
= mono_lls_pointer_unmask (next
);
360 claim_remaining_size (SgenFragment
*frag
, char *alloc_end
)
362 /* All space used, nothing to claim. */
363 if (frag
->fragment_end
<= alloc_end
)
366 /* Try to alloc all the remaining space. */
367 return InterlockedCompareExchangePointer ((volatile gpointer
*)&frag
->fragment_next
, frag
->fragment_end
, alloc_end
) == alloc_end
;
371 par_alloc_from_fragment (SgenFragmentAllocator
*allocator
, SgenFragment
*frag
, size_t size
)
373 char *p
= frag
->fragment_next
;
374 char *end
= p
+ size
;
376 if (end
> frag
->fragment_end
)
379 /* p = frag->fragment_next must happen before */
380 mono_memory_barrier ();
382 if (InterlockedCompareExchangePointer ((volatile gpointer
*)&frag
->fragment_next
, end
, p
) != p
)
385 if (frag
->fragment_end
- end
< SGEN_MAX_NURSERY_WASTE
) {
386 SgenFragment
*next
, **prev_ptr
;
389 * Before we clean the remaining nursery, we must claim the remaining space
390 * as it could end up been used by the range allocator since it can end up
391 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
392 * when doing second chance allocation.
394 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION
|| sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG
) && claim_remaining_size (frag
, end
)) {
395 sgen_clear_range (end
, frag
->fragment_end
);
396 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_trailer
, frag
->fragment_end
- end
));
398 add_alloc_record (end
, frag
->fragment_end
- end
, BLOCK_ZEROING
);
402 prev_ptr
= find_previous_pointer_fragment (allocator
, frag
);
404 /*Use Michaels linked list remove*/
406 /*prev_ptr will be null if the fragment was removed concurrently */
411 if (!get_mark (next
)) {
412 /*frag->next read must happen before the first CAS*/
413 mono_memory_write_barrier ();
415 /*Fail if the next node is removed concurrently and its CAS wins */
416 if (InterlockedCompareExchangePointer ((volatile gpointer
*)&frag
->next
, mask (next
, 1), next
) != next
) {
421 /* The second CAS must happen after the first CAS or frag->next. */
422 mono_memory_write_barrier ();
424 /* Fail if the previous node was deleted and its CAS wins */
425 if (InterlockedCompareExchangePointer ((volatile gpointer
*)prev_ptr
, unmask (next
), frag
) != frag
) {
426 prev_ptr
= find_previous_pointer_fragment (allocator
, frag
);
437 serial_alloc_from_fragment (SgenFragment
**previous
, SgenFragment
*frag
, size_t size
)
439 char *p
= frag
->fragment_next
;
440 char *end
= p
+ size
;
442 if (end
> frag
->fragment_end
)
445 frag
->fragment_next
= end
;
447 if (frag
->fragment_end
- end
< SGEN_MAX_NURSERY_WASTE
) {
448 *previous
= frag
->next
;
450 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
451 memset (end
, 0, frag
->fragment_end
- end
);
453 *previous
= frag
->next
;
460 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator
*allocator
, size_t size
)
465 InterlockedIncrement (&alloc_count
);
469 for (frag
= unmask (allocator
->alloc_head
); unmask (frag
); frag
= unmask (frag
->next
)) {
470 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations
));
472 if (size
<= (size_t)(frag
->fragment_end
- frag
->fragment_next
)) {
473 void *p
= par_alloc_from_fragment (allocator
, frag
, size
);
475 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries
));
479 add_alloc_record (p
, size
, FIXED_ALLOC
);
488 sgen_fragment_allocator_serial_alloc (SgenFragmentAllocator
*allocator
, size_t size
)
491 SgenFragment
**previous
;
493 InterlockedIncrement (&alloc_count
);
496 previous
= &allocator
->alloc_head
;
498 for (frag
= *previous
; frag
; frag
= *previous
) {
499 char *p
= serial_alloc_from_fragment (previous
, frag
, size
);
501 HEAVY_STAT (InterlockedIncrement (&stat_alloc_iterations
));
505 add_alloc_record (p
, size
, FIXED_ALLOC
);
509 previous
= &frag
->next
;
515 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator
*allocator
, size_t desired_size
, size_t minimum_size
, size_t *out_alloc_size
)
517 SgenFragment
*frag
, **previous
, *min_frag
= NULL
, **prev_min_frag
= NULL
;
518 size_t current_minimum
= minimum_size
;
521 InterlockedIncrement (&alloc_count
);
524 previous
= &allocator
->alloc_head
;
526 for (frag
= *previous
; frag
; frag
= *previous
) {
527 size_t frag_size
= frag
->fragment_end
- frag
->fragment_next
;
529 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations
));
531 if (desired_size
<= frag_size
) {
533 *out_alloc_size
= desired_size
;
535 p
= serial_alloc_from_fragment (previous
, frag
, desired_size
);
537 add_alloc_record (p
, desired_size
, RANGE_ALLOC
);
541 if (current_minimum
<= frag_size
) {
543 prev_min_frag
= previous
;
544 current_minimum
= frag_size
;
546 previous
= &frag
->next
;
551 size_t frag_size
= min_frag
->fragment_end
- min_frag
->fragment_next
;
552 *out_alloc_size
= frag_size
;
554 p
= serial_alloc_from_fragment (prev_min_frag
, min_frag
, frag_size
);
557 add_alloc_record (p
, frag_size
, RANGE_ALLOC
);
566 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator
*allocator
, size_t desired_size
, size_t minimum_size
, size_t *out_alloc_size
)
568 SgenFragment
*frag
, *min_frag
;
569 size_t current_minimum
;
573 current_minimum
= minimum_size
;
576 InterlockedIncrement (&alloc_count
);
579 for (frag
= unmask (allocator
->alloc_head
); frag
; frag
= unmask (frag
->next
)) {
580 size_t frag_size
= frag
->fragment_end
- frag
->fragment_next
;
582 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_iterations
));
584 if (desired_size
<= frag_size
) {
586 *out_alloc_size
= desired_size
;
588 p
= par_alloc_from_fragment (allocator
, frag
, desired_size
);
590 HEAVY_STAT (InterlockedIncrement (&stat_alloc_range_retries
));
594 add_alloc_record (p
, desired_size
, RANGE_ALLOC
);
598 if (current_minimum
<= frag_size
) {
600 current_minimum
= frag_size
;
604 /* The second fragment_next read should be ordered in respect to the first code block */
605 mono_memory_barrier ();
611 frag_size
= min_frag
->fragment_end
- min_frag
->fragment_next
;
612 if (frag_size
< minimum_size
)
615 *out_alloc_size
= frag_size
;
617 mono_memory_barrier ();
618 p
= par_alloc_from_fragment (allocator
, min_frag
, frag_size
);
620 /*XXX restarting here is quite dubious given this is already second chance allocation. */
622 HEAVY_STAT (InterlockedIncrement (&stat_alloc_retries
));
626 add_alloc_record (p
, frag_size
, RANGE_ALLOC
);
635 sgen_clear_allocator_fragments (SgenFragmentAllocator
*allocator
)
639 for (frag
= unmask (allocator
->alloc_head
); frag
; frag
= unmask (frag
->next
)) {
640 SGEN_LOG (4, "Clear nursery frag %p-%p", frag
->fragment_next
, frag
->fragment_end
);
641 sgen_clear_range (frag
->fragment_next
, frag
->fragment_end
);
643 add_alloc_record (frag
->fragment_next
, frag
->fragment_end
- frag
->fragment_next
, CLEAR_NURSERY_FRAGS
);
648 /* Clear all remaining nursery fragments */
650 sgen_clear_nursery_fragments (void)
652 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION
|| sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG
) {
653 sgen_clear_allocator_fragments (&mutator_allocator
);
654 sgen_minor_collector
.clear_fragments ();
659 * Mark a given range of memory as invalid.
661 * This can be done either by zeroing memory or by placing
662 * a phony byte[] array. This keeps the heap forward walkable.
664 * This function ignores calls with a zero range, even if
665 * both start and end are NULL.
668 sgen_clear_range (char *start
, char *end
)
671 size_t size
= end
- start
;
673 if ((start
&& !end
) || (start
> end
))
674 g_error ("Invalid range [%p %p]", start
, end
);
676 if (size
< sizeof (MonoArray
)) {
677 memset (start
, 0, size
);
681 o
= (MonoArray
*)start
;
682 o
->obj
.vtable
= sgen_get_array_fill_vtable ();
683 /* Mark this as not a real object */
684 o
->obj
.synchronisation
= GINT_TO_POINTER (-1);
686 o
->max_length
= (mono_array_size_t
)(size
- sizeof (MonoArray
));
687 sgen_set_nursery_scan_start (start
);
688 g_assert (start
+ sgen_safe_object_get_size ((MonoObject
*)o
) == end
);
692 sgen_nursery_allocator_prepare_for_pinning (void)
694 sgen_clear_allocator_fragments (&mutator_allocator
);
695 sgen_minor_collector
.clear_fragments ();
698 static mword fragment_total
= 0;
700 * We found a fragment of free memory in the nursery: memzero it and if
701 * it is big enough, add it to the list of fragments that can be used for
705 add_nursery_frag (SgenFragmentAllocator
*allocator
, size_t frag_size
, char* frag_start
, char* frag_end
)
707 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %zd", frag_start
, frag_end
, frag_size
);
708 binary_protocol_empty (frag_start
, frag_size
);
709 MONO_GC_NURSERY_SWEPT ((mword
)frag_start
, frag_end
- frag_start
);
710 /* Not worth dealing with smaller fragments: need to tune */
711 if (frag_size
>= SGEN_MAX_NURSERY_WASTE
) {
712 /* memsetting just the first chunk start is bound to provide better cache locality */
713 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC
)
714 memset (frag_start
, 0, frag_size
);
715 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG
)
716 memset (frag_start
, 0xff, frag_size
);
719 /* XXX convert this into a flight record entry
720 printf ("\tfragment [%p %p] size %zd\n", frag_start, frag_end, frag_size);
723 sgen_fragment_allocator_add (allocator
, frag_start
, frag_end
);
724 fragment_total
+= frag_size
;
726 /* Clear unused fragments, pinning depends on this */
727 sgen_clear_range (frag_start
, frag_end
);
728 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_small_areas
, frag_size
));
733 fragment_list_reverse (SgenFragmentAllocator
*allocator
)
735 SgenFragment
*prev
= NULL
, *list
= allocator
->region_head
;
737 SgenFragment
*next
= list
->next
;
739 list
->next_in_order
= prev
;
744 allocator
->region_head
= allocator
->alloc_head
= prev
;
748 sgen_build_nursery_fragments (GCMemSection
*nursery_section
, SgenGrayQueue
*unpin_queue
)
750 char *frag_start
, *frag_end
;
752 SgenFragment
*frags_ranges
;
753 void **pin_start
, **pin_entry
, **pin_end
;
756 reset_alloc_records ();
758 /*The mutator fragments are done. We no longer need them. */
759 sgen_fragment_allocator_release (&mutator_allocator
);
761 frag_start
= sgen_nursery_start
;
764 /* The current nursery might give us a fragment list to exclude [start, next[*/
765 frags_ranges
= sgen_minor_collector
.build_fragments_get_exclude_head ();
767 /* clear scan starts */
768 memset (nursery_section
->scan_starts
, 0, nursery_section
->num_scan_start
* sizeof (gpointer
));
770 pin_start
= pin_entry
= sgen_pinning_get_entry (nursery_section
->pin_queue_first_entry
);
771 pin_end
= sgen_pinning_get_entry (nursery_section
->pin_queue_last_entry
);
773 while (pin_entry
< pin_end
|| frags_ranges
) {
777 addr0
= addr1
= sgen_nursery_end
;
778 if (pin_entry
< pin_end
)
781 addr1
= frags_ranges
->fragment_start
;
785 GRAY_OBJECT_ENQUEUE (unpin_queue
, addr0
, sgen_obj_get_descriptor_safe (addr0
));
787 SGEN_UNPIN_OBJECT (addr0
);
788 size
= SGEN_ALIGN_UP (sgen_safe_object_get_size ((MonoObject
*)addr0
));
789 CANARIFY_SIZE (size
);
790 sgen_set_nursery_scan_start (addr0
);
795 size
= frags_ranges
->fragment_next
- addr1
;
796 frags_ranges
= frags_ranges
->next_in_order
;
799 frag_size
= frag_end
- frag_start
;
804 g_assert (frag_size
>= 0);
806 if (frag_size
&& size
)
807 add_nursery_frag (&mutator_allocator
, frag_size
, frag_start
, frag_end
);
811 add_alloc_record (*pin_entry
, frag_size
, PINNING
);
813 frag_start
= frag_end
+ frag_size
;
816 nursery_last_pinned_end
= frag_start
;
817 frag_end
= sgen_nursery_end
;
818 frag_size
= frag_end
- frag_start
;
820 add_nursery_frag (&mutator_allocator
, frag_size
, frag_start
, frag_end
);
822 /* Now it's safe to release the fragments exclude list. */
823 sgen_minor_collector
.build_fragments_release_exclude_head ();
825 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
826 fragment_list_reverse (&mutator_allocator
);
828 /*The collector might want to do something with the final nursery fragment list.*/
829 sgen_minor_collector
.build_fragments_finish (&mutator_allocator
);
831 if (!unmask (mutator_allocator
.alloc_head
)) {
832 SGEN_LOG (1, "Nursery fully pinned");
833 for (pin_entry
= pin_start
; pin_entry
< pin_end
; ++pin_entry
) {
834 void *p
= *pin_entry
;
835 SGEN_LOG (3, "Bastard pinning obj %p (%s), size: %zd", p
, sgen_safe_name (p
), sgen_safe_object_get_size (p
));
838 return fragment_total
;
842 sgen_nursery_alloc_get_upper_alloc_bound (void)
844 /*FIXME we need to calculate the collector upper bound as well, but this must be done in the previous GC. */
845 return sgen_nursery_end
;
848 /*** Nursery memory allocation ***/
850 sgen_nursery_retire_region (void *address
, ptrdiff_t size
)
852 HEAVY_STAT (InterlockedExchangeAdd (&stat_wasted_bytes_discarded_fragments
, size
));
856 sgen_can_alloc_size (size_t size
)
860 if (!SGEN_CAN_ALIGN_UP (size
))
863 size
= SGEN_ALIGN_UP (size
);
865 for (frag
= unmask (mutator_allocator
.alloc_head
); frag
; frag
= unmask (frag
->next
)) {
866 if ((size_t)(frag
->fragment_end
- frag
->fragment_next
) >= size
)
873 sgen_nursery_alloc (size_t size
)
875 SGEN_ASSERT (1, size
>= sizeof (MonoObject
) && size
<= (SGEN_MAX_SMALL_OBJ_SIZE
+ CANARY_SIZE
), "Invalid nursery object size");
877 SGEN_LOG (4, "Searching nursery for size: %zd", size
);
878 size
= SGEN_ALIGN_UP (size
);
880 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_requests
));
882 return sgen_fragment_allocator_par_alloc (&mutator_allocator
, size
);
886 sgen_nursery_alloc_range (size_t desired_size
, size_t minimum_size
, size_t *out_alloc_size
)
888 SGEN_LOG (4, "Searching for byte range desired size: %zd minimum size %zd", desired_size
, minimum_size
);
890 HEAVY_STAT (InterlockedIncrement (&stat_nursery_alloc_range_requests
));
892 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator
, desired_size
, minimum_size
, out_alloc_size
);
895 /*** Initialization ***/
897 #ifdef HEAVY_STATISTICS
900 sgen_nursery_allocator_init_heavy_stats (void)
902 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_wasted_bytes_trailer
);
903 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_wasted_bytes_small_areas
);
904 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_wasted_bytes_discarded_fragments
);
906 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_nursery_alloc_requests
);
907 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_alloc_iterations
);
908 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_alloc_retries
);
910 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_nursery_alloc_range_requests
);
911 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_alloc_range_iterations
);
912 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC
| MONO_COUNTER_INT
, &stat_alloc_range_retries
);
918 sgen_init_nursery_allocator (void)
920 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT
, sizeof (SgenFragment
));
922 alloc_records
= sgen_alloc_os_memory (sizeof (AllocRecord
) * ALLOC_RECORD_COUNT
, SGEN_ALLOC_INTERNAL
| SGEN_ALLOC_ACTIVATE
, "debugging memory");
927 sgen_nursery_alloc_prepare_for_minor (void)
929 sgen_minor_collector
.prepare_to_space (sgen_space_bitmap
, sgen_space_bitmap_size
);
933 sgen_nursery_alloc_prepare_for_major (void)
935 sgen_minor_collector
.prepare_to_space (sgen_space_bitmap
, sgen_space_bitmap_size
);
939 sgen_nursery_allocator_set_nursery_bounds (char *start
, char *end
)
941 sgen_nursery_start
= start
;
942 sgen_nursery_end
= end
;
945 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
946 * the right side of things and round up. We could just do a MIN(1,x) instead,
947 * since the nursery size must be a power of 2.
949 sgen_space_bitmap_size
= (end
- start
+ SGEN_TO_SPACE_GRANULE_IN_BYTES
* 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES
* 8);
950 sgen_space_bitmap
= g_malloc0 (sgen_space_bitmap_size
);
952 /* Setup the single first large fragment */
953 sgen_minor_collector
.init_nursery (&mutator_allocator
, start
, end
);