update readme (#21797)
[mono-project.git] / mono / sgen / sgen-nursery-allocator.c
blobc4ac6b2c67381b7651bcaeee25cb6231de6c934f
1 /**
2 * \file
3 * Nursery allocation code.
5 * Copyright 2009-2010 Novell, Inc.
6 * 2011 Rodrigo Kumpera
7 *
8 * Copyright 2011 Xamarin Inc (http://www.xamarin.com)
9 * Copyright (C) 2012 Xamarin Inc
11 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
15 * The young generation is divided into fragments. This is because
16 * we can hand one fragments to a thread for lock-less fast alloc and
17 * because the young generation ends up fragmented anyway by pinned objects.
18 * Once a collection is done, a list of fragments is created. When doing
19 * thread local alloc we use smallish nurseries so we allow new threads to
20 * allocate memory from gen0 without triggering a collection. Threads that
21 * are found to allocate lots of memory are given bigger fragments. This
22 * should make the finalizer thread use little nursery memory after a while.
23 * We should start assigning threads very small fragments: if there are many
24 * threads the nursery will be full of reserved space that the threads may not
25 * use at all, slowing down allocation speed.
26 * Thread local allocation is done from areas of memory Hotspot calls Thread Local
27 * Allocation Buffers (TLABs).
29 #include "config.h"
30 #ifdef HAVE_SGEN_GC
32 #ifdef HAVE_UNISTD_H
33 #include <unistd.h>
34 #endif
35 #ifdef HAVE_PTHREAD_H
36 #include <pthread.h>
37 #endif
38 #ifdef HAVE_SEMAPHORE_H
39 #include <semaphore.h>
40 #endif
41 #include <stdio.h>
42 #include <string.h>
43 #include <errno.h>
44 #include <assert.h>
45 #ifdef __MACH__
46 #undef _XOPEN_SOURCE
47 #endif
48 #ifdef __MACH__
49 #define _XOPEN_SOURCE
50 #endif
52 #include "mono/sgen/sgen-gc.h"
53 #include "mono/sgen/sgen-cardtable.h"
54 #include "mono/sgen/sgen-protocol.h"
55 #include "mono/sgen/sgen-memory-governor.h"
56 #include "mono/sgen/sgen-pinning.h"
57 #include "mono/sgen/sgen-client.h"
58 #include "mono/utils/mono-membar.h"
60 /* Enable it so nursery allocation diagnostic data is collected */
61 //#define NALLOC_DEBUG 1
63 /* The mutator allocs from here. */
64 static SgenFragmentAllocator mutator_allocator;
66 /* freeelist of fragment structures */
67 static SgenFragment *fragment_freelist = NULL;
69 char *sgen_nursery_start;
70 char *sgen_nursery_end;
72 /* good sizes are 512KB-1MB: larger ones increase a lot memzeroing time */
73 size_t sgen_nursery_size;
75 * Maximum size that we can resize the nursery to.
76 * If sgen_nursery_default_size == sgen_nursery_max_size then we are not
77 * dynamically resizing the nursery
79 size_t sgen_nursery_max_size;
80 size_t sgen_nursery_min_size;
81 /* The number of trailing 0 bits in sgen_nursery_max_size */
82 int sgen_nursery_bits;
85 char *sgen_space_bitmap;
86 size_t sgen_space_bitmap_size;
88 #ifdef HEAVY_STATISTICS
90 static mword stat_wasted_bytes_trailer = 0;
91 static mword stat_wasted_bytes_small_areas = 0;
92 static mword stat_wasted_bytes_discarded_fragments = 0;
93 static guint64 stat_nursery_alloc_requests = 0;
94 static guint64 stat_alloc_iterations = 0;
95 static guint64 stat_alloc_retries = 0;
97 static guint64 stat_nursery_alloc_range_requests = 0;
98 static guint64 stat_alloc_range_iterations = 0;
99 static guint64 stat_alloc_range_retries = 0;
101 #endif
103 /************************************Nursery allocation debugging *********************************************/
105 #ifdef NALLOC_DEBUG
107 enum {
108 FIXED_ALLOC = 1,
109 RANGE_ALLOC,
110 PINNING,
111 BLOCK_ZEROING,
112 CLEAR_NURSERY_FRAGS
115 typedef struct {
116 char *address;
117 size_t size;
118 int reason;
119 int seq;
120 MonoNativeThreadId tid;
121 } AllocRecord;
123 #define ALLOC_RECORD_COUNT 128000
126 static AllocRecord *alloc_records;
127 static volatile int next_record;
128 static volatile int alloc_count;
130 void dump_alloc_records (void);
131 void verify_alloc_records (void);
133 static const char*
134 get_reason_name (AllocRecord *rec)
136 switch (rec->reason) {
137 case FIXED_ALLOC: return "fixed-alloc";
138 case RANGE_ALLOC: return "range-alloc";
139 case PINNING: return "pinning";
140 case BLOCK_ZEROING: return "block-zeroing";
141 case CLEAR_NURSERY_FRAGS: return "clear-nursery-frag";
142 default: return "invalid";
146 static void
147 reset_alloc_records (void)
149 next_record = 0;
150 alloc_count = 0;
153 static void
154 add_alloc_record (char *addr, size_t size, int reason)
156 int idx = mono_atomic_inc_i32 (&next_record) - 1;
157 alloc_records [idx].address = addr;
158 alloc_records [idx].size = size;
159 alloc_records [idx].reason = reason;
160 alloc_records [idx].seq = idx;
161 alloc_records [idx].tid = mono_native_thread_id_get ();
164 static int
165 comp_alloc_record (const void *_a, const void *_b)
167 const AllocRecord *a = _a;
168 const AllocRecord *b = _b;
169 if (a->address == b->address)
170 return a->seq - b->seq;
171 return a->address - b->address;
174 #define rec_end(REC) ((REC)->address + (REC)->size)
176 void
177 dump_alloc_records (void)
179 int i;
180 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
182 printf ("------------------------------------DUMP RECORDS----------------------------\n");
183 for (i = 0; i < next_record; ++i) {
184 AllocRecord *rec = alloc_records + i;
185 printf ("obj [%p, %p] size %d reason %s seq %d tid %x\n", rec->address, rec_end (rec), (int)rec->size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
189 void
190 verify_alloc_records (void)
192 int i;
193 int total = 0;
194 int holes = 0;
195 int max_hole = 0;
196 AllocRecord *prev = NULL;
198 sgen_qsort (alloc_records, next_record, sizeof (AllocRecord), comp_alloc_record);
199 printf ("------------------------------------DUMP RECORDS- %d %d---------------------------\n", next_record, alloc_count);
200 for (i = 0; i < next_record; ++i) {
201 AllocRecord *rec = alloc_records + i;
202 int hole_size = 0;
203 total += rec->size;
204 if (prev) {
205 if (rec_end (prev) > rec->address)
206 printf ("WE GOT OVERLAPPING objects %p and %p\n", prev->address, rec->address);
207 if ((rec->address - rec_end (prev)) >= 8)
208 ++holes;
209 hole_size = rec->address - rec_end (prev);
210 max_hole = MAX (max_hole, hole_size);
212 printf ("obj [%p, %p] size %d hole to prev %d reason %s seq %d tid %" G_GSIZE_FORMAT "x\n", rec->address, rec_end (rec), (int)rec->size, hole_size, get_reason_name (rec), rec->seq, (size_t)rec->tid);
213 prev = rec;
215 printf ("SUMMARY total alloc'd %d holes %d max_hole %d\n", total, holes, max_hole);
218 #endif
220 /*********************************************************************************/
222 static gpointer
223 mask (gpointer n, uintptr_t bit)
225 return (gpointer)(((uintptr_t)n) | bit);
228 static gpointer
229 unmask (gpointer p)
231 return (gpointer)((uintptr_t)p & ~(uintptr_t)0x3);
234 static uintptr_t
235 get_mark (gpointer n)
237 return (uintptr_t)n & 0x1;
240 /*MUST be called with world stopped*/
241 SgenFragment*
242 sgen_fragment_allocator_alloc (void)
244 SgenFragment *frag = fragment_freelist;
245 if (frag) {
246 fragment_freelist = frag->next_in_order;
247 frag->next = frag->next_in_order = NULL;
248 return frag;
250 frag = (SgenFragment *)sgen_alloc_internal (INTERNAL_MEM_FRAGMENT);
251 frag->next = frag->next_in_order = NULL;
252 return frag;
255 void
256 sgen_fragment_allocator_add (SgenFragmentAllocator *allocator, char *start, char *end)
258 SgenFragment *fragment;
260 fragment = sgen_fragment_allocator_alloc ();
261 fragment->fragment_start = start;
262 fragment->fragment_next = start;
263 fragment->fragment_end = end;
264 fragment->next_in_order = fragment->next = (SgenFragment *)unmask (allocator->region_head);
266 allocator->region_head = allocator->alloc_head = fragment;
267 g_assert (fragment->fragment_end > fragment->fragment_start);
270 void
271 sgen_fragment_allocator_release (SgenFragmentAllocator *allocator)
273 SgenFragment *last = allocator->region_head;
274 if (!last)
275 return;
277 /* Find the last fragment in insert order */
278 for (; last->next_in_order; last = last->next_in_order) ;
280 last->next_in_order = fragment_freelist;
281 fragment_freelist = allocator->region_head;
282 allocator->alloc_head = allocator->region_head = NULL;
285 static SgenFragment**
286 find_previous_pointer_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag)
288 SgenFragment **prev;
289 SgenFragment *cur, *next;
290 #ifdef NALLOC_DEBUG
291 int count = 0;
292 #endif
294 try_again:
295 prev = &allocator->alloc_head;
296 #ifdef NALLOC_DEBUG
297 if (count++ > 5)
298 printf ("retry count for fppf is %d\n", count);
299 #endif
301 cur = (SgenFragment *)unmask (*prev);
303 while (1) {
304 if (cur == NULL)
305 return NULL;
306 next = cur->next;
309 * We need to make sure that we dereference prev below
310 * after reading cur->next above, so we need a read
311 * barrier.
313 mono_memory_read_barrier ();
315 if (*prev != cur)
316 goto try_again;
318 if (!get_mark (next)) {
319 if (cur == frag)
320 return prev;
321 prev = &cur->next;
322 } else {
323 next = (SgenFragment *)unmask (next);
324 if (mono_atomic_cas_ptr ((volatile gpointer*)prev, next, cur) != cur)
325 goto try_again;
326 /*we must make sure that the next from cur->next happens after*/
327 mono_memory_write_barrier ();
330 cur = (SgenFragment *)unmask (next);
332 return NULL;
335 static gboolean
336 claim_remaining_size (SgenFragment *frag, char *alloc_end)
338 /* All space used, nothing to claim. */
339 if (frag->fragment_end <= alloc_end)
340 return FALSE;
342 /* Try to alloc all the remaining space. */
343 return mono_atomic_cas_ptr ((volatile gpointer*)&frag->fragment_next, frag->fragment_end, alloc_end) == alloc_end;
346 static void*
347 par_alloc_from_fragment (SgenFragmentAllocator *allocator, SgenFragment *frag, size_t size)
349 char *p = frag->fragment_next;
350 char *end = p + size;
352 if (end > frag->fragment_end || end > (sgen_nursery_start + sgen_nursery_size))
353 return NULL;
355 /* p = frag->fragment_next must happen before */
356 mono_memory_barrier ();
358 if (mono_atomic_cas_ptr ((volatile gpointer*)&frag->fragment_next, end, p) != p)
359 return NULL;
361 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
362 SgenFragment *next, **prev_ptr;
365 * Before we clean the remaining nursery, we must claim the remaining space
366 * as it could end up been used by the range allocator since it can end up
367 * allocating from this dying fragment as it doesn't respect SGEN_MAX_NURSERY_WASTE
368 * when doing second chance allocation.
370 if ((sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) && claim_remaining_size (frag, end)) {
371 sgen_clear_range (end, frag->fragment_end);
372 HEAVY_STAT (stat_wasted_bytes_trailer += frag->fragment_end - end);
373 #ifdef NALLOC_DEBUG
374 add_alloc_record (end, frag->fragment_end - end, BLOCK_ZEROING);
375 #endif
378 prev_ptr = find_previous_pointer_fragment (allocator, frag);
380 /*Use Michaels linked list remove*/
382 /*prev_ptr will be null if the fragment was removed concurrently */
383 while (prev_ptr) {
384 next = frag->next;
386 /*already deleted*/
387 if (!get_mark (next)) {
388 /*frag->next read must happen before the first CAS*/
389 mono_memory_write_barrier ();
391 /*Fail if the next node is removed concurrently and its CAS wins */
392 if (mono_atomic_cas_ptr ((volatile gpointer*)&frag->next, mask (next, 1), next) != next) {
393 continue;
397 /* The second CAS must happen after the first CAS or frag->next. */
398 mono_memory_write_barrier ();
400 /* Fail if the previous node was deleted and its CAS wins */
401 if (mono_atomic_cas_ptr ((volatile gpointer*)prev_ptr, unmask (next), frag) != frag) {
402 prev_ptr = find_previous_pointer_fragment (allocator, frag);
403 continue;
405 break;
409 return p;
412 static void*
413 serial_alloc_from_fragment (SgenFragment **previous, SgenFragment *frag, size_t size)
415 char *p = frag->fragment_next;
416 char *end = p + size;
418 if (end > frag->fragment_end)
419 return NULL;
421 frag->fragment_next = end;
423 if (frag->fragment_end - end < SGEN_MAX_NURSERY_WASTE) {
424 *previous = frag->next;
426 /* Clear the remaining space, pinning depends on this. FIXME move this to use phony arrays */
427 memset (end, 0, frag->fragment_end - end);
429 *previous = frag->next;
432 return p;
435 void*
436 sgen_fragment_allocator_par_alloc (SgenFragmentAllocator *allocator, size_t size)
438 SgenFragment *frag;
440 #ifdef NALLOC_DEBUG
441 mono_atomic_inc_i32 (&alloc_count);
442 #endif
444 restart:
445 for (frag = (SgenFragment *)unmask (allocator->alloc_head); unmask (frag); frag = (SgenFragment *)unmask (frag->next)) {
446 size_t frag_size = frag->fragment_end - frag->fragment_next;
448 if (frag->fragment_next >= (sgen_nursery_start + sgen_nursery_size))
449 continue;
451 HEAVY_STAT (++stat_alloc_iterations);
453 if (size <= frag_size) {
454 void *p = par_alloc_from_fragment (allocator, frag, size);
455 if (!p) {
456 HEAVY_STAT (++stat_alloc_retries);
457 goto restart;
459 #ifdef NALLOC_DEBUG
460 add_alloc_record (p, size, FIXED_ALLOC);
461 #endif
462 return p;
465 return NULL;
468 void*
469 sgen_fragment_allocator_serial_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
471 SgenFragment *frag, **previous, *min_frag = NULL, **prev_min_frag = NULL;
472 size_t current_minimum = minimum_size;
474 #ifdef NALLOC_DEBUG
475 mono_atomic_inc_i32 (&alloc_count);
476 #endif
478 previous = &allocator->alloc_head;
480 for (frag = *previous; frag; frag = *previous) {
481 size_t frag_size = frag->fragment_end - frag->fragment_next;
483 HEAVY_STAT (++stat_alloc_range_iterations);
485 if (desired_size <= frag_size) {
486 void *p;
487 *out_alloc_size = desired_size;
489 p = serial_alloc_from_fragment (previous, frag, desired_size);
490 #ifdef NALLOC_DEBUG
491 add_alloc_record (p, desired_size, RANGE_ALLOC);
492 #endif
493 return p;
495 if (current_minimum <= frag_size) {
496 min_frag = frag;
497 prev_min_frag = previous;
498 current_minimum = frag_size;
500 previous = &frag->next;
503 if (min_frag) {
504 void *p;
505 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
506 *out_alloc_size = frag_size;
508 p = serial_alloc_from_fragment (prev_min_frag, min_frag, frag_size);
510 #ifdef NALLOC_DEBUG
511 add_alloc_record (p, frag_size, RANGE_ALLOC);
512 #endif
513 return p;
516 return NULL;
519 void*
520 sgen_fragment_allocator_par_range_alloc (SgenFragmentAllocator *allocator, size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
522 SgenFragment *frag, *min_frag;
523 size_t current_minimum;
525 restart:
526 min_frag = NULL;
527 current_minimum = minimum_size;
529 #ifdef NALLOC_DEBUG
530 mono_atomic_inc_i32 (&alloc_count);
531 #endif
533 for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
534 size_t frag_size = frag->fragment_end - frag->fragment_next;
536 if (frag->fragment_next >= (sgen_nursery_start + sgen_nursery_size))
537 continue;
539 HEAVY_STAT (++stat_alloc_range_iterations);
541 if (desired_size <= frag_size) {
542 void *p;
543 *out_alloc_size = desired_size;
545 p = par_alloc_from_fragment (allocator, frag, desired_size);
546 if (!p) {
547 HEAVY_STAT (++stat_alloc_range_retries);
548 goto restart;
550 #ifdef NALLOC_DEBUG
551 add_alloc_record (p, desired_size, RANGE_ALLOC);
552 #endif
553 return p;
555 if (current_minimum <= frag_size) {
556 min_frag = frag;
557 current_minimum = frag_size;
561 /* The second fragment_next read should be ordered in respect to the first code block */
562 mono_memory_barrier ();
564 if (min_frag) {
565 void *p;
566 size_t frag_size = min_frag->fragment_end - min_frag->fragment_next;
568 if (frag_size < minimum_size)
569 goto restart;
571 *out_alloc_size = frag_size;
573 mono_memory_barrier ();
574 p = par_alloc_from_fragment (allocator, min_frag, frag_size);
576 /*XXX restarting here is quite dubious given this is already second chance allocation. */
577 if (!p) {
578 HEAVY_STAT (++stat_alloc_retries);
579 goto restart;
581 #ifdef NALLOC_DEBUG
582 add_alloc_record (p, frag_size, RANGE_ALLOC);
583 #endif
584 return p;
587 return NULL;
590 void
591 sgen_clear_allocator_fragments (SgenFragmentAllocator *allocator)
593 SgenFragment *frag;
595 for (frag = (SgenFragment *)unmask (allocator->alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
596 SGEN_LOG (4, "Clear nursery frag %p-%p", frag->fragment_next, frag->fragment_end);
597 sgen_clear_range (frag->fragment_next, frag->fragment_end);
598 #ifdef NALLOC_DEBUG
599 add_alloc_record (frag->fragment_next, frag->fragment_end - frag->fragment_next, CLEAR_NURSERY_FRAGS);
600 #endif
604 /* Clear all remaining nursery fragments */
605 void
606 sgen_clear_nursery_fragments (void)
608 if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION || sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG) {
609 sgen_clear_allocator_fragments (&mutator_allocator);
610 sgen_minor_collector.clear_fragments ();
615 * Mark a given range of memory as invalid.
617 * This can be done either by zeroing memory or by placing
618 * a phony byte[] array. This keeps the heap forward walkable.
620 * This function ignores calls with a zero range, even if
621 * both start and end are NULL.
623 void
624 sgen_clear_range (char *start, char *end)
626 size_t size = end - start;
628 if ((start && !end) || (start > end))
629 g_error ("Invalid range [%p %p]", start, end);
631 if (sgen_client_array_fill_range (start, size)) {
632 sgen_set_nursery_scan_start (start);
633 SGEN_ASSERT (0, start + sgen_safe_object_get_size ((GCObject*)start) == end, "Array fill produced wrong size");
637 void
638 sgen_nursery_allocator_prepare_for_pinning (void)
640 sgen_clear_allocator_fragments (&mutator_allocator);
641 sgen_minor_collector.clear_fragments ();
644 static mword fragment_total = 0;
646 * We found a fragment of free memory in the nursery: memzero it and if
647 * it is big enough, add it to the list of fragments that can be used for
648 * allocation.
650 static void
651 add_nursery_frag (SgenFragmentAllocator *allocator, size_t frag_size, char* frag_start, char* frag_end)
653 SGEN_LOG (4, "Found empty fragment: %p-%p, size: %" G_GSIZE_FORMAT "d", frag_start, frag_end, frag_size);
654 sgen_binary_protocol_empty (frag_start, frag_size);
655 /* Not worth dealing with smaller fragments: need to tune */
656 if (frag_size >= SGEN_MAX_NURSERY_WASTE) {
657 /* memsetting just the first chunk start is bound to provide better cache locality */
658 if (sgen_get_nursery_clear_policy () == CLEAR_AT_GC)
659 memset (frag_start, 0, frag_size);
660 else if (sgen_get_nursery_clear_policy () == CLEAR_AT_TLAB_CREATION_DEBUG)
661 memset (frag_start, 0xff, frag_size);
663 #ifdef NALLOC_DEBUG
664 /* XXX convert this into a flight record entry
665 printf ("\tfragment [%p %p] size %" G_GSIZE_FORMAT "d\n", frag_start, frag_end, frag_size);
667 #endif
668 sgen_fragment_allocator_add (allocator, frag_start, frag_end);
669 fragment_total += frag_size;
670 } else {
671 /* Clear unused fragments, pinning depends on this */
672 sgen_clear_range (frag_start, frag_end);
673 HEAVY_STAT (stat_wasted_bytes_small_areas += frag_size);
677 static void
678 fragment_list_reverse (SgenFragmentAllocator *allocator)
680 SgenFragment *prev = NULL, *list = allocator->region_head;
681 while (list) {
682 SgenFragment *next = list->next;
683 list->next = prev;
684 list->next_in_order = prev;
685 prev = list;
686 list = next;
689 allocator->region_head = allocator->alloc_head = prev;
693 * We split fragments at the border of the current nursery limit. When we
694 * allocate from the nursery we only consider fragments that start in the
695 * current nursery section. We build fragments for the entire nursery in
696 * order to facilitate scanning it for objects (adding a nursery frag also
697 * marks a region in the nursery as being free)
699 static void
700 add_nursery_frag_checks (SgenFragmentAllocator *allocator, char *frag_start, char *frag_end)
702 char *nursery_limit = sgen_nursery_start + sgen_nursery_size;
704 if (frag_start < nursery_limit && frag_end > nursery_limit) {
705 add_nursery_frag (allocator, nursery_limit - frag_start, frag_start, nursery_limit);
706 add_nursery_frag (allocator, frag_end - nursery_limit, nursery_limit, frag_end);
707 } else {
708 add_nursery_frag (allocator, frag_end - frag_start, frag_start, frag_end);
712 mword
713 sgen_build_nursery_fragments (GCMemSection *nursery_section)
715 char *frag_start, *frag_end;
716 size_t frag_size;
717 SgenFragment *frags_ranges;
718 void **pin_start, **pin_entry, **pin_end;
720 #ifdef NALLOC_DEBUG
721 reset_alloc_records ();
722 #endif
723 /*The mutator fragments are done. We no longer need them. */
724 sgen_fragment_allocator_release (&mutator_allocator);
726 frag_start = sgen_nursery_start;
727 fragment_total = 0;
729 /* The current nursery might give us a fragment list to exclude [start, next[*/
730 frags_ranges = sgen_minor_collector.build_fragments_get_exclude_head ();
732 /* clear scan starts */
733 memset (nursery_section->scan_starts, 0, nursery_section->num_scan_start * sizeof (gpointer));
735 pin_start = pin_entry = sgen_pinning_get_entry (nursery_section->pin_queue_first_entry);
736 pin_end = sgen_pinning_get_entry (nursery_section->pin_queue_last_entry);
738 while (pin_entry < pin_end || frags_ranges) {
739 char *addr0, *addr1;
740 size_t size;
742 addr0 = addr1 = sgen_nursery_end;
743 if (pin_entry < pin_end)
744 addr0 = (char *)*pin_entry;
745 if (frags_ranges)
746 addr1 = frags_ranges->fragment_start;
748 if (addr0 < addr1) {
749 SGEN_UNPIN_OBJECT (addr0);
750 size = SGEN_ALIGN_UP (sgen_safe_object_get_size ((GCObject*)addr0));
751 CANARIFY_SIZE (size);
752 sgen_set_nursery_scan_start (addr0);
753 frag_end = addr0;
754 ++pin_entry;
755 } else {
756 frag_end = addr1;
757 size = frags_ranges->fragment_next - addr1;
758 frags_ranges = frags_ranges->next_in_order;
761 frag_size = frag_end - frag_start;
763 if (size == 0)
764 continue;
766 g_assert (frag_size >= 0);
767 g_assert (size > 0);
768 if (frag_size && size)
769 add_nursery_frag_checks (&mutator_allocator, frag_start, frag_end);
771 frag_size = size;
772 #ifdef NALLOC_DEBUG
773 add_alloc_record (*pin_entry, frag_size, PINNING);
774 #endif
775 frag_start = frag_end + frag_size;
778 frag_end = sgen_nursery_end;
779 frag_size = frag_end - frag_start;
780 if (frag_size)
781 add_nursery_frag_checks (&mutator_allocator, frag_start, frag_end);
783 /* Now it's safe to release the fragments exclude list. */
784 sgen_minor_collector.build_fragments_release_exclude_head ();
786 /* First we reorder the fragment list to be in ascending address order. This makes H/W prefetchers happier. */
787 fragment_list_reverse (&mutator_allocator);
789 /*The collector might want to do something with the final nursery fragment list.*/
790 sgen_minor_collector.build_fragments_finish (&mutator_allocator);
792 if (!unmask (mutator_allocator.alloc_head)) {
793 SGEN_LOG (1, "Nursery fully pinned");
794 for (pin_entry = pin_start; pin_entry < pin_end; ++pin_entry) {
795 GCObject *p = (GCObject *)*pin_entry;
796 SGEN_LOG (3, "Bad pinning obj %p (%s), size: %ld", p, sgen_client_vtable_get_name (SGEN_LOAD_VTABLE (p)), (long)sgen_safe_object_get_size (p));
799 return fragment_total;
802 /*** Nursery memory allocation ***/
803 void
804 sgen_nursery_retire_region (void *address, ptrdiff_t size)
806 HEAVY_STAT (stat_wasted_bytes_discarded_fragments += size);
809 gboolean
810 sgen_can_alloc_size (size_t size)
812 SgenFragment *frag;
814 if (!SGEN_CAN_ALIGN_UP (size))
815 return FALSE;
817 size = SGEN_ALIGN_UP (size);
819 for (frag = (SgenFragment *)unmask (mutator_allocator.alloc_head); frag; frag = (SgenFragment *)unmask (frag->next)) {
820 if ((size_t)(frag->fragment_end - frag->fragment_next) >= size)
821 return TRUE;
823 return FALSE;
826 void*
827 sgen_nursery_alloc (size_t size)
829 SGEN_ASSERT (1, size >= (SGEN_CLIENT_MINIMUM_OBJECT_SIZE + CANARY_SIZE) && size <= (SGEN_MAX_SMALL_OBJ_SIZE + CANARY_SIZE), "Invalid nursery object size");
831 SGEN_LOG (4, "Searching nursery for size: %" G_GSIZE_FORMAT "d", size);
832 size = SGEN_ALIGN_UP (size);
834 HEAVY_STAT (++stat_nursery_alloc_requests);
836 return sgen_fragment_allocator_par_alloc (&mutator_allocator, size);
839 void*
840 sgen_nursery_alloc_range (size_t desired_size, size_t minimum_size, size_t *out_alloc_size)
842 SGEN_LOG (4, "Searching for byte range desired size: %" G_GSIZE_FORMAT "d minimum size %" G_GSIZE_FORMAT "d", desired_size, minimum_size);
844 HEAVY_STAT (++stat_nursery_alloc_range_requests);
846 return sgen_fragment_allocator_par_range_alloc (&mutator_allocator, desired_size, minimum_size, out_alloc_size);
849 /*** Initialization ***/
851 #ifdef HEAVY_STATISTICS
853 void
854 sgen_nursery_allocator_init_heavy_stats (void)
856 mono_counters_register ("bytes wasted trailer fragments", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_trailer);
857 mono_counters_register ("bytes wasted small areas", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_small_areas);
858 mono_counters_register ("bytes wasted discarded fragments", MONO_COUNTER_GC | MONO_COUNTER_WORD | MONO_COUNTER_BYTES, &stat_wasted_bytes_discarded_fragments);
860 mono_counters_register ("# nursery alloc requests", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_nursery_alloc_requests);
861 mono_counters_register ("# nursery alloc iterations", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_iterations);
862 mono_counters_register ("# nursery alloc retries", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_retries);
864 mono_counters_register ("# nursery alloc range requests", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_nursery_alloc_range_requests);
865 mono_counters_register ("# nursery alloc range iterations", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_range_iterations);
866 mono_counters_register ("# nursery alloc range restries", MONO_COUNTER_GC | MONO_COUNTER_ULONG, &stat_alloc_range_retries);
869 #endif
871 void
872 sgen_init_nursery_allocator (void)
874 sgen_register_fixed_internal_mem_type (INTERNAL_MEM_FRAGMENT, sizeof (SgenFragment));
875 #ifdef NALLOC_DEBUG
876 alloc_records = sgen_alloc_os_memory (sizeof (AllocRecord) * ALLOC_RECORD_COUNT, SGEN_ALLOC_INTERNAL | SGEN_ALLOC_ACTIVATE, "debugging memory");
877 #endif
880 void
881 sgen_nursery_alloc_prepare_for_minor (void)
883 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
886 void
887 sgen_nursery_alloc_prepare_for_major (void)
889 sgen_minor_collector.prepare_to_space (sgen_space_bitmap, sgen_space_bitmap_size);
892 void
893 sgen_nursery_allocator_set_nursery_bounds (char *start, size_t min_size, size_t max_size)
895 sgen_nursery_start = start;
896 sgen_nursery_end = start + max_size;
898 sgen_nursery_size = min_size;
899 sgen_nursery_min_size = min_size;
900 sgen_nursery_max_size = max_size;
902 sgen_nursery_bits = 0;
903 while (ONE_P << (++ sgen_nursery_bits) != sgen_nursery_max_size)
907 * This will not divide evenly for tiny nurseries (<4kb), so we make sure to be on
908 * the right side of things and round up. We could just do a MIN(1,x) instead,
909 * since the nursery size must be a power of 2.
911 sgen_space_bitmap_size = (sgen_nursery_end - sgen_nursery_start + SGEN_TO_SPACE_GRANULE_IN_BYTES * 8 - 1) / (SGEN_TO_SPACE_GRANULE_IN_BYTES * 8);
912 sgen_space_bitmap = (char *)g_malloc0 (sgen_space_bitmap_size);
914 /* Setup the single first large fragment */
915 sgen_minor_collector.init_nursery (&mutator_allocator, sgen_nursery_start, sgen_nursery_end);
918 void
919 sgen_resize_nursery (gboolean need_shrink)
921 size_t major_size;
923 if (sgen_nursery_min_size == sgen_nursery_max_size)
924 return;
926 major_size = sgen_major_collector.get_num_major_sections () * sgen_major_collector.section_size + sgen_los_memory_usage;
928 * We attempt to use a larger nursery size, as long as it doesn't
929 * exceed a certain percentage of the major heap.
931 * FIXME
932 * Commit memory when expanding and release it when shrinking (which
933 * would only be possible if there aren't any pinned objects in the
934 * section).
936 if ((sgen_nursery_size * 2) < (major_size / SGEN_DEFAULT_ALLOWANCE_NURSERY_SIZE_RATIO) &&
937 (sgen_nursery_size * 2) <= sgen_nursery_max_size && !need_shrink) {
938 if ((sgen_nursery_section->end_data - sgen_nursery_section->data) == sgen_nursery_size)
939 sgen_nursery_section->end_data += sgen_nursery_size;
940 sgen_nursery_size *= 2;
941 } else if ((sgen_nursery_size > (major_size / SGEN_DEFAULT_ALLOWANCE_NURSERY_SIZE_RATIO) || need_shrink) &&
942 (sgen_nursery_size / 2) >= sgen_nursery_min_size) {
943 sgen_nursery_size /= 2;
948 #endif