3 * Nursery allocation code.
5 * Copyright 2009-2010 Novell, Inc.
8 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
9 * Copyright (C) 2012 Xamarin Inc
11 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
15 * The young generation is divided into fragments. This is because
16 * we can hand one fragments to a thread for lock-less fast alloc and
17 * because the young generation ends up fragmented anyway by pinned objects.
18 * Once a collection is done, a list of fragments is created. When doing
19 * thread local alloc we use smallish nurseries so we allow new threads to
20 * allocate memory from gen0 without triggering a collection. Threads that
21 * are found to allocate lots of memory are given bigger fragments. This
22 * should make the finalizer thread use little nursery memory after a while.
23 * We should start assigning threads very small fragments: if there are many
24 * threads the nursery will be full of reserved space that the threads may not
25 * use at all, slowing down allocation speed.
26 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
27 * Allocation Buffers (TLABs).
38 #ifdef HAVE_SEMAPHORE_H
39 #include <semaphore.h>
52 #include "mono/sgen/sgen-gc.h"
53 #include "mono/sgen/sgen-cardtable.h"
54 #include "mono/sgen/sgen-protocol.h"
55 #include "mono/sgen/sgen-memory-governor.h"
56 #include "mono/sgen/sgen-pinning.h"
57 #include "mono/sgen/sgen-client.h"
58 #include "mono/utils/mono-membar.h"
60 /* Enable it so nursery allocation diagnostic data is collected */
61 //#define NALLOC_DEBUG 1
63 /* The mutator allocs from here. */
64 static SgenFragmentAllocator mutator_allocator
;
66 /* freeelist of fragment structures */
67 static SgenFragment
*fragment_freelist
= NULL
;
69 char *sgen_nursery_start
;
70 char *sgen_nursery_end
;
72 /* good sizes are 512KB-1MB: larger ones increase a lot memzeroing time */
73 size_t sgen_nursery_size
;
75 * Maximum size that we can resize the nursery to.
76 * If sgen_nursery_default_size == sgen_nursery_max_size then we are not
77 * dynamically resizing the nursery
79 size_t sgen_nursery_max_size
;
80 size_t sgen_nursery_min_size
;
81 /* The number of trailing 0 bits in sgen_nursery_max_size */
82 int sgen_nursery_bits
;
85 char *sgen_space_bitmap
;
86 size_t sgen_space_bitmap_size
;
88 #ifdef HEAVY_STATISTICS
90 static mword stat_wasted_bytes_trailer
= 0;
91 static mword stat_wasted_bytes_small_areas
= 0;
92 static mword stat_wasted_bytes_discarded_fragments
= 0;
93 static guint64 stat_nursery_alloc_requests
= 0;
94 static guint64 stat_alloc_iterations
= 0;
95 static guint64 stat_alloc_retries
= 0;
97 static guint64 stat_nursery_alloc_range_requests
= 0;
98 static guint64 stat_alloc_range_iterations
= 0;
99 static guint64 stat_alloc_range_retries
= 0;
103 /************************************Nursery allocation debugging *********************************************/
120 MonoNativeThreadId tid
;
123 #define ALLOC_RECORD_COUNT 128000
126 static AllocRecord
*alloc_records
;
127 static volatile int next_record
;
128 static volatile int alloc_count
;
130 void dump_alloc_records (void);
131 void verify_alloc_records (void);
134 get_reason_name (AllocRecord
*rec
)
136 switch (rec
->reason
) {
137 case FIXED_ALLOC
: return "fixed-alloc";
138 case RANGE_ALLOC
: return "range-alloc";
139 case PINNING
: return "pinning";
140 case BLOCK_ZEROING
: return "block-zeroing";
141 case CLEAR_NURSERY_FRAGS
: return "clear-nursery-frag";
142 default: return "invalid";
147 reset_alloc_records (void)
154 add_alloc_record (char *addr
, size_t size
, int reason
)
156 int idx
= mono_atomic_inc_i32 (&next_record
) - 1;
157 alloc_records
[idx
].address
= addr
;
158 alloc_records
[idx
].size
= size
;
159 alloc_records
[idx
].reason
= reason
;
160 alloc_records
[idx
].seq
= idx
;
161 alloc_records
[idx
].tid
= mono_native_thread_id_get ();
165 comp_alloc_record (const void *_a
, const void *_b
)
167 const AllocRecord
*a
= _a
;
168 const AllocRecord
*b
= _b
;
169 if (a
->address
== b
->address
)
170 return a
->seq
- b
->seq
;
171 return a
->address
- b
->address
;
174 #define rec_end(REC) ((REC)->address + (REC)->size)
177 dump_alloc_records (void)
180 sgen_qsort (alloc_records
, next_record
, sizeof (AllocRecord
), comp_alloc_record
);
182 printf ("------------------------------------DUMP RECORDS----------------------------\n");
183 for (i
= 0; i
< next_record
; ++i
) {
184 AllocRecord
*rec
= alloc_records
+ i
;
185 printf ("obj [%p, %p] size %d reason %s seq %d tid %x\n", rec
->address
, rec_end (rec
), (int)rec
->size
, get_reason_name (rec
), rec
->seq
, (size_t)rec
->tid
);
190 verify_alloc_records (void)
196 AllocRecord
*prev
= NULL
;
198 sgen_qsort (alloc_records
, next_record
, sizeof (AllocRecord
), comp_alloc_record
);
199 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record
, alloc_count
);
200 for (i
= 0; i
< next_record
; ++i
) {
201 AllocRecord
*rec
= alloc_records
+ i
;
205 if (rec_end (prev
) > rec
->address
)
206 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev
->address
, rec
->address
);
207 if ((rec
->address
- rec_end (prev
)) >= 8)
209 hole_size
= rec
->address
- rec_end (prev
);
210 max_hole
= MAX (max_hole
, hole_size
);
212 printf ("obj [%p, %p] size %d hole to prev %d reason %s seq %d tid %" G_GSIZE_FORMAT
"x\n", rec
->address
, rec_end (rec
), (int)rec
->size
, hole_size
, get_reason_name (rec
), rec
->seq
, (size_t)rec
->tid
);
215 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total
, holes
, max_hole
);
220 /*********************************************************************************/
223 mask (gpointer n
, uintptr_t bit
)
225 return (gpointer
)(((uintptr_t)n
) | bit
);
231 return (gpointer
)((uintptr_t)p
& ~(uintptr_t)0x3);
235 get_mark (gpointer n
)
237 return (uintptr_t)n
& 0x1;
240 /*MUST be called with world stopped*/
242 sgen_fragment_allocator_alloc (void)
244 SgenFragment
*frag
= fragment_freelist
;
246 fragment_freelist
= frag
->next_in_order
;
247 frag
->next
= frag
->next_in_order
= NULL
;
250 frag
= (SgenFragment
*)sgen_alloc_internal (INTERNAL_MEM_FRAGMENT
);
251 frag
->next
= frag
->next_in_order
= NULL
;
256 sgen_fragment_allocator_add (SgenFragmentAllocator
*allocator
, char *start
, char *end
)
258 SgenFragment
*fragment
;
260 fragment
= sgen_fragment_allocator_alloc ();
261 fragment
->fragment_start
= start
;
262 fragment
->fragment_next
= start
;
263 fragment
->fragment_end
= end
;
264 fragment
->next_in_order
= fragment
->next
= (SgenFragment
*)unmask (allocator
->region_head
);
266 allocator
->region_head
= allocator
->alloc_head
= fragment
;
267 g_assert (fragment
->fragment_end
> fragment
->fragment_start
);
271 sgen_fragment_allocator_release (SgenFragmentAllocator
*allocator
)
273 SgenFragment
*last
= allocator
->region_head
;
277 /* Find the last fragment in insert order */
278 for (; last
->next_in_order
; last
= last
->next_in_order
) ;
280 last
->next_in_order
= fragment_freelist
;
281 fragment_freelist
= allocator
->region_head
;
282 allocator
->alloc_head
= allocator
->region_head
= NULL
;
285 static SgenFragment
**
286 find_previous_pointer_fragment (SgenFragmentAllocator
*allocator
, SgenFragment
*frag
)
289 SgenFragment
*cur
, *next
;
295 prev
= &allocator
->alloc_head
;
298 printf ("retry count for fppf is %d\n", count
);
301 cur
= (SgenFragment
*)unmask (*prev
);
309 * We need to make sure that we dereference prev below
310 * after reading cur->next above, so we need a read
313 mono_memory_read_barrier ();
318 if (!get_mark (next
)) {
323 next
= (SgenFragment
*)unmask (next
);
324 if (mono_atomic_cas_ptr ((volatile gpointer
*)prev
, next
, cur
) != cur
)
326 /*we must make sure that the next from cur->next happens after*/
327 mono_memory_write_barrier ();
330 cur
= (SgenFragment
*)unmask (next
);
336 claim_remaining_size (SgenFragment
*frag
, char *alloc_end
)
338 /* All space used, nothing to claim. */
339 if (frag
->fragment_end
<= alloc_end
)
342 /* Try to alloc all the remaining space. */
343 return mono_atomic_cas_ptr ((volatile gpointer
*)&frag
->fragment_next
, frag
->fragment_end
, alloc_end
) == alloc_end
;
347 par_alloc_from_fragment (SgenFragmentAllocator
*allocator
, SgenFragment
*frag
, size_t size
)
349 char *p
= frag
->fragment_next
;
350 char *end
= p
+ size
;
352 if (end
> frag
->fragment_end
|| end
> (sgen_nursery_start
+ sgen_nursery_size
))
355 /* p = frag->fragment_next must happen before */
356 mono_memory_barrier ();
358 if (mono_atomic_cas_ptr ((volatile gpointer
*)&frag
->fragment_next
, end
, p
) != p
)
361 if (frag
->fragment_end
- end
< SGEN_MAX_NURSERY_WASTE
) {
362 SgenFragment
*next
, **prev_ptr
;
365 * Before we clean the remaining nursery, we must claim the remaining space
366 * as it could end up been used by the range allocator since it can end up
367 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
368 * when doing second chance allocation.
370 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION
|| sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG
) && claim_remaining_size (frag
, end
)) {
371 sgen_clear_range (end
, frag
->fragment_end
);
372 HEAVY_STAT (stat_wasted_bytes_trailer
+= frag
->fragment_end
- end
);
374 add_alloc_record (end
, frag
->fragment_end
- end
, BLOCK_ZEROING
);
378 prev_ptr
= find_previous_pointer_fragment (allocator
, frag
);
380 /*Use Michaels linked list remove*/
382 /*prev_ptr will be null if the fragment was removed concurrently */
387 if (!get_mark (next
)) {
388 /*frag->next read must happen before the first CAS*/
389 mono_memory_write_barrier ();
391 /*Fail if the next node is removed concurrently and its CAS wins */
392 if (mono_atomic_cas_ptr ((volatile gpointer
*)&frag
->next
, mask (next
, 1), next
) != next
) {
397 /* The second CAS must happen after the first CAS or frag->next. */
398 mono_memory_write_barrier ();
400 /* Fail if the previous node was deleted and its CAS wins */
401 if (mono_atomic_cas_ptr ((volatile gpointer
*)prev_ptr
, unmask (next
), frag
) != frag
) {
402 prev_ptr
= find_previous_pointer_fragment (allocator
, frag
);
413 serial_alloc_from_fragment (SgenFragment
**previous
, SgenFragment
*frag
, size_t size
)
415 char *p
= frag
->fragment_next
;
416 char *end
= p
+ size
;
418 if (end
> frag
->fragment_end
)
421 frag
->fragment_next
= end
;
423 if (frag
->fragment_end
- end
< SGEN_MAX_NURSERY_WASTE
) {
424 *previous
= frag
->next
;
426 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
427 memset (end
, 0, frag
->fragment_end
- end
);
429 *previous
= frag
->next
;
436 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator
*allocator
, size_t size
)
441 mono_atomic_inc_i32 (&alloc_count
);
445 for (frag
= (SgenFragment
*)unmask (allocator
->alloc_head
); unmask (frag
); frag
= (SgenFragment
*)unmask (frag
->next
)) {
446 size_t frag_size
= frag
->fragment_end
- frag
->fragment_next
;
448 if (frag
->fragment_next
>= (sgen_nursery_start
+ sgen_nursery_size
))
451 HEAVY_STAT (++stat_alloc_iterations
);
453 if (size
<= frag_size
) {
454 void *p
= par_alloc_from_fragment (allocator
, frag
, size
);
456 HEAVY_STAT (++stat_alloc_retries
);
460 add_alloc_record (p
, size
, FIXED_ALLOC
);
469 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator
*allocator
, size_t desired_size
, size_t minimum_size
, size_t *out_alloc_size
)
471 SgenFragment
*frag
, **previous
, *min_frag
= NULL
, **prev_min_frag
= NULL
;
472 size_t current_minimum
= minimum_size
;
475 mono_atomic_inc_i32 (&alloc_count
);
478 previous
= &allocator
->alloc_head
;
480 for (frag
= *previous
; frag
; frag
= *previous
) {
481 size_t frag_size
= frag
->fragment_end
- frag
->fragment_next
;
483 HEAVY_STAT (++stat_alloc_range_iterations
);
485 if (desired_size
<= frag_size
) {
487 *out_alloc_size
= desired_size
;
489 p
= serial_alloc_from_fragment (previous
, frag
, desired_size
);
491 add_alloc_record (p
, desired_size
, RANGE_ALLOC
);
495 if (current_minimum
<= frag_size
) {
497 prev_min_frag
= previous
;
498 current_minimum
= frag_size
;
500 previous
= &frag
->next
;
505 size_t frag_size
= min_frag
->fragment_end
- min_frag
->fragment_next
;
506 *out_alloc_size
= frag_size
;
508 p
= serial_alloc_from_fragment (prev_min_frag
, min_frag
, frag_size
);
511 add_alloc_record (p
, frag_size
, RANGE_ALLOC
);
520 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator
*allocator
, size_t desired_size
, size_t minimum_size
, size_t *out_alloc_size
)
522 SgenFragment
*frag
, *min_frag
;
523 size_t current_minimum
;
527 current_minimum
= minimum_size
;
530 mono_atomic_inc_i32 (&alloc_count
);
533 for (frag
= (SgenFragment
*)unmask (allocator
->alloc_head
); frag
; frag
= (SgenFragment
*)unmask (frag
->next
)) {
534 size_t frag_size
= frag
->fragment_end
- frag
->fragment_next
;
536 if (frag
->fragment_next
>= (sgen_nursery_start
+ sgen_nursery_size
))
539 HEAVY_STAT (++stat_alloc_range_iterations
);
541 if (desired_size
<= frag_size
) {
543 *out_alloc_size
= desired_size
;
545 p
= par_alloc_from_fragment (allocator
, frag
, desired_size
);
547 HEAVY_STAT (++stat_alloc_range_retries
);
551 add_alloc_record (p
, desired_size
, RANGE_ALLOC
);
555 if (current_minimum
<= frag_size
) {
557 current_minimum
= frag_size
;
561 /* The second fragment_next read should be ordered in respect to the first code block */
562 mono_memory_barrier ();
566 size_t frag_size
= min_frag
->fragment_end
- min_frag
->fragment_next
;
568 if (frag_size
< minimum_size
)
571 *out_alloc_size
= frag_size
;
573 mono_memory_barrier ();
574 p
= par_alloc_from_fragment (allocator
, min_frag
, frag_size
);
576 /*XXX restarting here is quite dubious given this is already second chance allocation. */
578 HEAVY_STAT (++stat_alloc_retries
);
582 add_alloc_record (p
, frag_size
, RANGE_ALLOC
);
591 sgen_clear_allocator_fragments (SgenFragmentAllocator
*allocator
)
595 for (frag
= (SgenFragment
*)unmask (allocator
->alloc_head
); frag
; frag
= (SgenFragment
*)unmask (frag
->next
)) {
596 SGEN_LOG (4, "Clear nursery frag %p-%p", frag
->fragment_next
, frag
->fragment_end
);
597 sgen_clear_range (frag
->fragment_next
, frag
->fragment_end
);
599 add_alloc_record (frag
->fragment_next
, frag
->fragment_end
- frag
->fragment_next
, CLEAR_NURSERY_FRAGS
);
604 /* Clear all remaining nursery fragments */
606 sgen_clear_nursery_fragments (void)
608 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION
|| sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG
) {
609 sgen_clear_allocator_fragments (&mutator_allocator
);
610 sgen_minor_collector
.clear_fragments ();
615 * Mark a given range of memory as invalid.
617 * This can be done either by zeroing memory or by placing
618 * a phony byte[] array. This keeps the heap forward walkable.
620 * This function ignores calls with a zero range, even if
621 * both start and end are NULL.
624 sgen_clear_range (char *start
, char *end
)
626 size_t size
= end
- start
;
628 if ((start
&& !end
) || (start
> end
))
629 g_error ("Invalid range [%p %p]", start
, end
);
631 if (sgen_client_array_fill_range (start
, size
)) {
632 sgen_set_nursery_scan_start (start
);
633 SGEN_ASSERT (0, start
+ sgen_safe_object_get_size ((GCObject
*)start
) == end
, "Array fill produced wrong size");
638 sgen_nursery_allocator_prepare_for_pinning (void)
640 sgen_clear_allocator_fragments (&mutator_allocator
);
641 sgen_minor_collector
.clear_fragments ();
644 static mword fragment_total
= 0;
646 * We found a fragment of free memory in the nursery: memzero it and if
647 * it is big enough, add it to the list of fragments that can be used for
651 add_nursery_frag (SgenFragmentAllocator
*allocator
, size_t frag_size
, char* frag_start
, char* frag_end
)
653 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %" G_GSIZE_FORMAT
"d", frag_start
, frag_end
, frag_size
);
654 sgen_binary_protocol_empty (frag_start
, frag_size
);
655 /* Not worth dealing with smaller fragments: need to tune */
656 if (frag_size
>= SGEN_MAX_NURSERY_WASTE
) {
657 /* memsetting just the first chunk start is bound to provide better cache locality */
658 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC
)
659 memset (frag_start
, 0, frag_size
);
660 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG
)
661 memset (frag_start
, 0xff, frag_size
);
664 /* XXX convert this into a flight record entry
665 printf ("\tfragment [%p %p] size %" G_GSIZE_FORMAT "d\n", frag_start, frag_end, frag_size);
668 sgen_fragment_allocator_add (allocator
, frag_start
, frag_end
);
669 fragment_total
+= frag_size
;
671 /* Clear unused fragments, pinning depends on this */
672 sgen_clear_range (frag_start
, frag_end
);
673 HEAVY_STAT (stat_wasted_bytes_small_areas
+= frag_size
);
678 fragment_list_reverse (SgenFragmentAllocator
*allocator
)
680 SgenFragment
*prev
= NULL
, *list
= allocator
->region_head
;
682 SgenFragment
*next
= list
->next
;
684 list
->next_in_order
= prev
;
689 allocator
->region_head
= allocator
->alloc_head
= prev
;
693 * We split fragments at the border of the current nursery limit. When we
694 * allocate from the nursery we only consider fragments that start in the
695 * current nursery section. We build fragments for the entire nursery in
696 * order to facilitate scanning it for objects (adding a nursery frag also
697 * marks a region in the nursery as being free)
700 add_nursery_frag_checks (SgenFragmentAllocator
*allocator
, char *frag_start
, char *frag_end
)
702 char *nursery_limit
= sgen_nursery_start
+ sgen_nursery_size
;
704 if (frag_start
< nursery_limit
&& frag_end
> nursery_limit
) {
705 add_nursery_frag (allocator
, nursery_limit
- frag_start
, frag_start
, nursery_limit
);
706 add_nursery_frag (allocator
, frag_end
- nursery_limit
, nursery_limit
, frag_end
);
708 add_nursery_frag (allocator
, frag_end
- frag_start
, frag_start
, frag_end
);
713 sgen_build_nursery_fragments (GCMemSection
*nursery_section
)
715 char *frag_start
, *frag_end
;
717 SgenFragment
*frags_ranges
;
718 void **pin_start
, **pin_entry
, **pin_end
;
721 reset_alloc_records ();
723 /*The mutator fragments are done. We no longer need them. */
724 sgen_fragment_allocator_release (&mutator_allocator
);
726 frag_start
= sgen_nursery_start
;
729 /* The current nursery might give us a fragment list to exclude [start, next[*/
730 frags_ranges
= sgen_minor_collector
.build_fragments_get_exclude_head ();
732 /* clear scan starts */
733 memset (nursery_section
->scan_starts
, 0, nursery_section
->num_scan_start
* sizeof (gpointer
));
735 pin_start
= pin_entry
= sgen_pinning_get_entry (nursery_section
->pin_queue_first_entry
);
736 pin_end
= sgen_pinning_get_entry (nursery_section
->pin_queue_last_entry
);
738 while (pin_entry
< pin_end
|| frags_ranges
) {
742 addr0
= addr1
= sgen_nursery_end
;
743 if (pin_entry
< pin_end
)
744 addr0
= (char *)*pin_entry
;
746 addr1
= frags_ranges
->fragment_start
;
749 SGEN_UNPIN_OBJECT (addr0
);
750 size
= SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject
*)addr0
));
751 CANARIFY_SIZE (size
);
752 sgen_set_nursery_scan_start (addr0
);
757 size
= frags_ranges
->fragment_next
- addr1
;
758 frags_ranges
= frags_ranges
->next_in_order
;
761 frag_size
= frag_end
- frag_start
;
766 g_assert (frag_size
>= 0);
768 if (frag_size
&& size
)
769 add_nursery_frag_checks (&mutator_allocator
, frag_start
, frag_end
);
773 add_alloc_record (*pin_entry
, frag_size
, PINNING
);
775 frag_start
= frag_end
+ frag_size
;
778 frag_end
= sgen_nursery_end
;
779 frag_size
= frag_end
- frag_start
;
781 add_nursery_frag_checks (&mutator_allocator
, frag_start
, frag_end
);
783 /* Now it's safe to release the fragments exclude list. */
784 sgen_minor_collector
.build_fragments_release_exclude_head ();
786 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
787 fragment_list_reverse (&mutator_allocator
);
789 /*The collector might want to do something with the final nursery fragment list.*/
790 sgen_minor_collector
.build_fragments_finish (&mutator_allocator
);
792 if (!unmask (mutator_allocator
.alloc_head
)) {
793 SGEN_LOG (1, "Nursery fully pinned");
794 for (pin_entry
= pin_start
; pin_entry
< pin_end
; ++pin_entry
) {
795 GCObject
*p
= (GCObject
*)*pin_entry
;
796 SGEN_LOG (3, "Bad pinning obj %p (%s), size: %ld", p
, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p
)), (long)sgen_safe_object_get_size (p
));
799 return fragment_total
;
802 /*** Nursery memory allocation ***/
804 sgen_nursery_retire_region (void *address
, ptrdiff_t size
)
806 HEAVY_STAT (stat_wasted_bytes_discarded_fragments
+= size
);
810 sgen_can_alloc_size (size_t size
)
814 if (!SGEN_CAN_ALIGN_UP (size
))
817 size
= SGEN_ALIGN_UP (size
);
819 for (frag
= (SgenFragment
*)unmask (mutator_allocator
.alloc_head
); frag
; frag
= (SgenFragment
*)unmask (frag
->next
)) {
820 if ((size_t)(frag
->fragment_end
- frag
->fragment_next
) >= size
)
827 sgen_nursery_alloc (size_t size
)
829 SGEN_ASSERT (1, size
>= (SGEN_CLIENT_MINIMUM_OBJECT_SIZE
+ CANARY_SIZE
) && size
<= (SGEN_MAX_SMALL_OBJ_SIZE
+ CANARY_SIZE
), "Invalid nursery object size");
831 SGEN_LOG (4, "Searching nursery for size: %" G_GSIZE_FORMAT
"d", size
);
832 size
= SGEN_ALIGN_UP (size
);
834 HEAVY_STAT (++stat_nursery_alloc_requests
);
836 return sgen_fragment_allocator_par_alloc (&mutator_allocator
, size
);
840 sgen_nursery_alloc_range (size_t desired_size
, size_t minimum_size
, size_t *out_alloc_size
)
842 SGEN_LOG (4, "Searching for byte range desired size: %" G_GSIZE_FORMAT
"d minimum size %" G_GSIZE_FORMAT
"d", desired_size
, minimum_size
);
844 HEAVY_STAT (++stat_nursery_alloc_range_requests
);
846 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator
, desired_size
, minimum_size
, out_alloc_size
);
849 /*** Initialization ***/
851 #ifdef HEAVY_STATISTICS
854 sgen_nursery_allocator_init_heavy_stats (void)
856 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC
| MONO_COUNTER_WORD
| MONO_COUNTER_BYTES
, &stat_wasted_bytes_trailer
);
857 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC
| MONO_COUNTER_WORD
| MONO_COUNTER_BYTES
, &stat_wasted_bytes_small_areas
);
858 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC
| MONO_COUNTER_WORD
| MONO_COUNTER_BYTES
, &stat_wasted_bytes_discarded_fragments
);
860 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC
| MONO_COUNTER_ULONG
, &stat_nursery_alloc_requests
);
861 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC
| MONO_COUNTER_ULONG
, &stat_alloc_iterations
);
862 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC
| MONO_COUNTER_ULONG
, &stat_alloc_retries
);
864 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC
| MONO_COUNTER_ULONG
, &stat_nursery_alloc_range_requests
);
865 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC
| MONO_COUNTER_ULONG
, &stat_alloc_range_iterations
);
866 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC
| MONO_COUNTER_ULONG
, &stat_alloc_range_retries
);
872 sgen_init_nursery_allocator (void)
874 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT
, sizeof (SgenFragment
));
876 alloc_records
= sgen_alloc_os_memory (sizeof (AllocRecord
) * ALLOC_RECORD_COUNT
, SGEN_ALLOC_INTERNAL
| SGEN_ALLOC_ACTIVATE
, "debugging memory");
881 sgen_nursery_alloc_prepare_for_minor (void)
883 sgen_minor_collector
.prepare_to_space (sgen_space_bitmap
, sgen_space_bitmap_size
);
887 sgen_nursery_alloc_prepare_for_major (void)
889 sgen_minor_collector
.prepare_to_space (sgen_space_bitmap
, sgen_space_bitmap_size
);
893 sgen_nursery_allocator_set_nursery_bounds (char *start
, size_t min_size
, size_t max_size
)
895 sgen_nursery_start
= start
;
896 sgen_nursery_end
= start
+ max_size
;
898 sgen_nursery_size
= min_size
;
899 sgen_nursery_min_size
= min_size
;
900 sgen_nursery_max_size
= max_size
;
902 sgen_nursery_bits
= 0;
903 while (ONE_P
<< (++ sgen_nursery_bits
) != sgen_nursery_max_size
)
907 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
908 * the right side of things and round up. We could just do a MIN(1,x) instead,
909 * since the nursery size must be a power of 2.
911 sgen_space_bitmap_size
= (sgen_nursery_end
- sgen_nursery_start
+ SGEN_TO_SPACE_GRANULE_IN_BYTES
* 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES
* 8);
912 sgen_space_bitmap
= (char *)g_malloc0 (sgen_space_bitmap_size
);
914 /* Setup the single first large fragment */
915 sgen_minor_collector
.init_nursery (&mutator_allocator
, sgen_nursery_start
, sgen_nursery_end
);
919 sgen_resize_nursery (gboolean need_shrink
)
923 if (sgen_nursery_min_size
== sgen_nursery_max_size
)
926 major_size
= sgen_major_collector
.get_num_major_sections () * sgen_major_collector
.section_size
+ sgen_los_memory_usage
;
928 * We attempt to use a larger nursery size, as long as it doesn't
929 * exceed a certain percentage of the major heap.
932 * Commit memory when expanding and release it when shrinking (which
933 * would only be possible if there aren't any pinned objects in the
936 if ((sgen_nursery_size
* 2) < (major_size
/ SGEN_DEFAULT_ALLOWANCE_NURSERY_SIZE_RATIO
) &&
937 (sgen_nursery_size
* 2) <= sgen_nursery_max_size
&& !need_shrink
) {
938 if ((sgen_nursery_section
->end_data
- sgen_nursery_section
->data
) == sgen_nursery_size
)
939 sgen_nursery_section
->end_data
+= sgen_nursery_size
;
940 sgen_nursery_size
*= 2;
941 } else if ((sgen_nursery_size
> (major_size
/ SGEN_DEFAULT_ALLOWANCE_NURSERY_SIZE_RATIO
) || need_shrink
) &&
942 (sgen_nursery_size
/ 2) >= sgen_nursery_min_size
) {
943 sgen_nursery_size
/= 2;