3 * The copy/mark and gray stack draining functions of the M&S major collector.
5 * Copyright (C) 2014 Xamarin Inc
7 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
11 * COPY_OR_MARK_FUNCTION_NAME must be defined to be the function name of the copy/mark
14 * SCAN_OBJECT_FUNCTION_NAME must be defined to be the function name of the object scanning
17 * DRAIN_GRAY_STACK_FUNCTION_NAME must be defined to be the function name of the gray stack
20 * Define COPY_OR_MARK_WITH_EVACUATION to support evacuation.
23 /* Returns whether the object is still in the nursery. */
24 static inline MONO_ALWAYS_INLINE gboolean
25 COPY_OR_MARK_FUNCTION_NAME (GCObject
**ptr
, GCObject
*obj
, SgenGrayQueue
*queue
)
29 #ifdef HEAVY_STATISTICS
30 ++stat_optimized_copy
;
34 if ((forwarded
= SGEN_OBJECT_IS_FORWARDED (obj
)))
35 desc
= sgen_obj_get_descriptor_safe (forwarded
);
37 desc
= sgen_obj_get_descriptor_safe (obj
);
39 sgen_descriptor_count_copied_object (desc
);
43 SGEN_ASSERT (9, obj
, "null object from pointer %p", ptr
);
44 #if !defined(COPY_OR_MARK_CONCURRENT) && !defined(COPY_OR_MARK_CONCURRENT_WITH_EVACUATION)
45 SGEN_ASSERT (9, sgen_current_collection_generation
== GENERATION_OLD
, "old gen parallel allocator called from a %d collection", sgen_current_collection_generation
);
48 if (sgen_ptr_in_nursery (obj
)) {
49 #if !defined(COPY_OR_MARK_CONCURRENT) && !defined(COPY_OR_MARK_CONCURRENT_WITH_EVACUATION)
53 GCObject
*forwarded
, *old_obj
;
55 vtable_word
= *(mword
*)obj
;
57 HEAVY_STAT (++stat_optimized_copy_nursery
);
59 #if SGEN_MAX_DEBUG_LEVEL >= 9
60 if (sgen_nursery_is_to_space (obj
))
61 SGEN_ASSERT (9, !SGEN_VTABLE_IS_PINNED (vtable_word
) && !SGEN_VTABLE_IS_FORWARDED (vtable_word
), "To-space object can't be pinned or forwarded.");
64 if (SGEN_VTABLE_IS_PINNED (vtable_word
)) {
65 SGEN_ASSERT (9, !SGEN_VTABLE_IS_FORWARDED (vtable_word
), "Cannot be both pinned and forwarded.");
66 HEAVY_STAT (++stat_optimized_copy_nursery_pinned
);
69 if ((forwarded
= (GCObject
*)SGEN_VTABLE_IS_FORWARDED (vtable_word
))) {
70 HEAVY_STAT (++stat_optimized_copy_nursery_forwarded
);
71 SGEN_UPDATE_REFERENCE (ptr
, forwarded
);
72 return sgen_ptr_in_nursery (forwarded
);
75 /* An object in the nursery To Space has already been copied and grayed. Nothing to do. */
76 if (sgen_nursery_is_to_space (obj
))
79 #ifdef COPY_OR_MARK_WITH_EVACUATION
83 #ifdef COPY_OR_MARK_PARALLEL
84 obj
= copy_object_no_checks_par (obj
, queue
);
86 obj
= copy_object_no_checks (obj
, queue
);
88 if (G_UNLIKELY (old_obj
== obj
)) {
90 * If we fail to evacuate an object we just stop doing it for a
91 * given block size as all other will surely fail too.
93 /* FIXME: test this case somehow. */
94 if (!sgen_ptr_in_nursery (obj
)) {
96 block
= MS_BLOCK_FOR_OBJ (obj
);
97 size_index
= block
->obj_size_index
;
98 evacuate_block_obj_sizes
[size_index
] = FALSE
;
99 MS_MARK_OBJECT_AND_ENQUEUE (obj
, sgen_obj_get_descriptor (obj
), block
, queue
);
104 HEAVY_STAT (++stat_objects_copied_major
);
105 SGEN_UPDATE_REFERENCE (ptr
, obj
);
107 if (sgen_ptr_in_nursery (obj
))
111 * FIXME: See comment for copy_object_no_checks(). If
112 * we have that, we can let the allocation function
113 * give us the block info, too, and we won't have to
116 * FIXME (2): We should rework this to avoid all those nursery checks.
119 * For the split nursery allocator the object might
120 * still be in the nursery despite having being
121 * promoted, in which case we can't mark it.
123 block
= MS_BLOCK_FOR_OBJ (obj
);
124 MS_CALC_MARK_BIT (word
, bit
, obj
);
125 SGEN_ASSERT (9, !MS_MARK_BIT (block
, word
, bit
), "object %p already marked", obj
);
126 #ifdef COPY_OR_MARK_PARALLEL
127 MS_SET_MARK_BIT_PAR (block
, word
, bit
, first
);
129 MS_SET_MARK_BIT (block
, word
, bit
);
132 sgen_binary_protocol_mark (obj
, (gpointer
)SGEN_LOAD_VTABLE (obj
), sgen_safe_object_get_size (obj
));
137 mword vtable_word
= *(mword
*)obj
;
141 HEAVY_STAT (++stat_optimized_copy_major
);
143 #ifdef COPY_OR_MARK_WITH_EVACUATION
146 if ((forwarded
= (GCObject
*)SGEN_VTABLE_IS_FORWARDED (vtable_word
))) {
147 HEAVY_STAT (++stat_optimized_copy_major_forwarded
);
148 SGEN_UPDATE_REFERENCE (ptr
, forwarded
);
149 SGEN_ASSERT (9, !sgen_ptr_in_nursery (forwarded
), "Cannot be forwarded to nursery.");
155 SGEN_ASSERT (9, !SGEN_VTABLE_IS_PINNED (vtable_word
), "Pinned object in non-pinned block?");
157 /* We untag the vtable for concurrent M&S, in case bridge is running and it tagged it */
158 desc
= sgen_vtable_get_descriptor ((GCVTable
)SGEN_POINTER_UNTAG_VTABLE (vtable_word
));
159 type
= desc
& DESC_TYPE_MASK
;
161 if (sgen_safe_object_is_small (obj
, type
)) {
162 #ifdef HEAVY_STATISTICS
163 if (type
<= DESC_TYPE_MAX_SMALL_OBJ
)
164 ++stat_optimized_copy_major_small_fast
;
166 ++stat_optimized_copy_major_small_slow
;
169 block
= MS_BLOCK_FOR_OBJ (obj
);
171 #ifdef COPY_OR_MARK_CONCURRENT_WITH_EVACUATION
172 if (G_UNLIKELY (major_block_is_evacuating (block
))) {
174 * We don't copy within the concurrent phase. These objects will
175 * be handled below in the finishing pause, by scanning the mod-union
182 #ifdef COPY_OR_MARK_WITH_EVACUATION
183 if (major_block_is_evacuating (block
)) {
184 HEAVY_STAT (++stat_optimized_copy_major_small_evacuate
);
189 #ifdef COPY_OR_MARK_PARALLEL
190 MS_MARK_OBJECT_AND_ENQUEUE_PAR (obj
, desc
, block
, queue
);
192 MS_MARK_OBJECT_AND_ENQUEUE (obj
, desc
, block
, queue
);
195 gboolean first
= TRUE
;
196 HEAVY_STAT (++stat_optimized_copy_major_large
);
197 #ifdef COPY_OR_MARK_PARALLEL
198 first
= sgen_los_pin_object_par (obj
);
200 if (sgen_los_object_is_pinned (obj
))
203 sgen_los_pin_object (obj
);
207 sgen_binary_protocol_pin (obj
, (gpointer
)SGEN_LOAD_VTABLE (obj
), sgen_safe_object_get_size (obj
));
208 if (SGEN_OBJECT_HAS_REFERENCES (obj
))
209 #ifdef COPY_OR_MARK_PARALLEL
210 GRAY_OBJECT_ENQUEUE_PARALLEL (queue
, obj
, desc
);
212 GRAY_OBJECT_ENQUEUE_SERIAL (queue
, obj
, desc
);
223 SCAN_OBJECT_FUNCTION_NAME (GCObject
*full_object
, SgenDescriptor desc
, SgenGrayQueue
*queue
)
225 char *start
= (char*)full_object
;
227 #ifdef HEAVY_STATISTICS
228 ++stat_optimized_major_scan
;
229 if (!sgen_gc_descr_has_references (desc
))
230 ++stat_optimized_major_scan_no_refs
;
231 sgen_descriptor_count_scanned_object (desc
);
233 #ifdef SGEN_HEAVY_BINARY_PROTOCOL
234 add_scanned_object (start
);
237 /* Now scan the object. */
240 #if defined(COPY_OR_MARK_CONCURRENT_WITH_EVACUATION)
241 #define HANDLE_PTR(ptr,obj) do { \
242 GCObject *__old = *(ptr); \
243 sgen_binary_protocol_scan_process_reference ((full_object), (ptr), __old); \
244 if (__old && !sgen_ptr_in_nursery (__old)) { \
245 if (G_UNLIKELY (full_object && !sgen_ptr_in_nursery (ptr) && \
246 sgen_safe_object_is_small (__old, sgen_obj_get_descriptor (__old) & DESC_TYPE_MASK) && \
247 major_block_is_evacuating (MS_BLOCK_FOR_OBJ (__old)))) { \
248 mark_mod_union_card ((full_object), (void**)(ptr), __old); \
250 PREFETCH_READ (__old); \
251 COPY_OR_MARK_FUNCTION_NAME ((ptr), __old, queue); \
254 if (G_UNLIKELY (full_object && sgen_ptr_in_nursery (__old) && !sgen_ptr_in_nursery ((ptr)) && !sgen_cement_is_forced (__old))) \
255 mark_mod_union_card ((full_object), (void**)(ptr), __old); \
258 #elif defined(COPY_OR_MARK_CONCURRENT)
259 #define HANDLE_PTR(ptr,obj) do { \
260 GCObject *__old = *(ptr); \
261 sgen_binary_protocol_scan_process_reference ((full_object), (ptr), __old); \
262 if (__old && !sgen_ptr_in_nursery (__old)) { \
263 PREFETCH_READ (__old); \
264 COPY_OR_MARK_FUNCTION_NAME ((ptr), __old, queue); \
266 if (G_UNLIKELY (full_object && sgen_ptr_in_nursery (__old) && !sgen_ptr_in_nursery ((ptr)) && !sgen_cement_is_forced (__old))) \
267 mark_mod_union_card ((full_object), (void**)(ptr), __old); \
271 #define HANDLE_PTR(ptr,obj) do { \
272 GCObject *__old = *(ptr); \
273 sgen_binary_protocol_scan_process_reference ((full_object), (ptr), __old); \
275 gboolean __still_in_nursery = COPY_OR_MARK_FUNCTION_NAME ((ptr), __old, queue); \
276 if (G_UNLIKELY (__still_in_nursery && !sgen_ptr_in_nursery ((ptr)) && !SGEN_OBJECT_IS_CEMENTED (*(ptr)))) { \
277 GCObject *__copy = *(ptr); \
278 sgen_add_to_global_remset ((ptr), __copy); \
284 #define SCAN_OBJECT_PROTOCOL
285 #include "sgen-scan-object.h"
288 #ifdef SCAN_VTYPE_FUNCTION_NAME
290 SCAN_VTYPE_FUNCTION_NAME (GCObject
*full_object
, char *start
, SgenDescriptor desc
, SgenGrayQueue
*queue
BINARY_PROTOCOL_ARG (size_t size
))
292 SGEN_OBJECT_LAYOUT_STATISTICS_DECLARE_BITMAP
;
294 #ifdef HEAVY_STATISTICS
295 /* FIXME: We're half scanning this object. How do we account for that? */
296 //add_scanned_object (start);
299 /* The descriptors include info about the object header as well */
300 start
-= SGEN_CLIENT_OBJECT_HEADER_SIZE
;
302 /* We use the same HANDLE_PTR from the obj scan function */
303 #define SCAN_OBJECT_NOVTABLE
304 #define SCAN_OBJECT_PROTOCOL
305 #include "sgen-scan-object.h"
307 SGEN_OBJECT_LAYOUT_STATISTICS_COMMIT_BITMAP
;
311 #ifdef SCAN_PTR_FIELD_FUNCTION_NAME
313 SCAN_PTR_FIELD_FUNCTION_NAME (GCObject
*full_object
, GCObject
**ptr
, SgenGrayQueue
*queue
)
316 * full_object is NULL if we scan unmanaged memory. This means we can't mark
317 * mod unions for it, so these types of roots currently don't have support
318 * for the concurrent collector (aka they need to be scanned as normal roots
319 * both in the start and finishing pause)
321 HANDLE_PTR (ptr
, NULL
);
326 DRAIN_GRAY_STACK_FUNCTION_NAME (SgenGrayQueue
*queue
)
328 #if defined(COPY_OR_MARK_CONCURRENT) || defined(COPY_OR_MARK_CONCURRENT_WITH_EVACUATION) || defined(COPY_OR_MARK_PARALLEL)
330 for (i
= 0; i
< 32; i
++) {
337 HEAVY_STAT (++stat_drain_loops
);
339 #if defined(COPY_OR_MARK_PARALLEL)
340 GRAY_OBJECT_DEQUEUE_PARALLEL (queue
, &obj
, &desc
);
342 GRAY_OBJECT_DEQUEUE_SERIAL (queue
, &obj
, &desc
);
347 SCAN_OBJECT_FUNCTION_NAME (obj
, desc
, queue
);
352 #undef COPY_OR_MARK_PARALLEL
353 #undef COPY_OR_MARK_FUNCTION_NAME
354 #undef COPY_OR_MARK_WITH_EVACUATION
355 #undef COPY_OR_MARK_CONCURRENT
356 #undef COPY_OR_MARK_CONCURRENT_WITH_EVACUATION
357 #undef SCAN_OBJECT_FUNCTION_NAME
358 #undef SCAN_VTYPE_FUNCTION_NAME
359 #undef SCAN_PTR_FIELD_FUNCTION_NAME
360 #undef DRAIN_GRAY_STACK_FUNCTION_NAME