1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "gc/Marking-inl.h"
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/IntegerRange.h"
11 #include "mozilla/MathAlgorithms.h"
12 #include "mozilla/Maybe.h"
13 #include "mozilla/PodOperations.h"
14 #include "mozilla/ScopeExit.h"
17 #include <type_traits>
19 #include "gc/GCInternals.h"
20 #include "gc/ParallelMarking.h"
21 #include "gc/TraceKind.h"
22 #include "jit/JitCode.h"
23 #include "js/GCTypeMacros.h" // JS_FOR_EACH_PUBLIC_{,TAGGED_}GC_POINTER_TYPE
24 #include "js/SliceBudget.h"
25 #include "util/Poison.h"
26 #include "vm/GeneratorObject.h"
28 #include "gc/GC-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/TraceMethods-inl.h"
31 #include "gc/WeakMap-inl.h"
32 #include "vm/GeckoProfiler-inl.h"
35 using namespace js::gc
;
37 using JS::MapTypeToTraceKind
;
39 using mozilla::DebugOnly
;
40 using mozilla::IntegerRange
;
41 using mozilla::PodCopy
;
48 // Tracing, in this context, refers to an abstract visitation of some or all of
49 // the GC-controlled heap. The effect of tracing an edge of the graph depends
50 // on the subclass of the JSTracer on whose behalf we are tracing.
55 // The primary JSTracer is the GCMarker. The marking tracer causes the target
56 // of each traversed edge to be marked black and the target edge's children to
57 // be marked either gray (in the gc algorithm sense) or immediately black.
62 // The secondary JSTracer is the CallbackTracer. This simply invokes a callback
63 // on each edge in a child.
65 // The following is a rough outline of the general struture of the tracing
68 /* clang-format off */
70 // +-------------------+ ......................
73 // | TraceRoot TraceEdge TraceRange GCMarker:: | |
74 // | | | | processMarkStackTop | Mark |
75 // | +-----------------------+ | | Stack |
78 // | TraceEdgeInternal | ^
79 // | | +<-------------+ :
82 // | CallbackTracer:: markAndTraverseEdge | :
86 // | +-------------+---------------+ | :
89 // | markAndTraverse | :
94 // | +--------------------------------------+ | :
97 // | markAndTraceChildren markAndPush eagerlyMarkChildren | :
99 // | v : +-----------+ :
100 // | T::traceChildren : :
102 // +-------------+ ......................................
105 // ------- Direct calls
108 /* clang-format on */
110 /*** Tracing Invariants *****************************************************/
112 template <typename T
>
113 static inline bool IsOwnedByOtherRuntime(JSRuntime
* rt
, T thing
) {
114 bool other
= thing
->runtimeFromAnyThread() != rt
;
115 MOZ_ASSERT_IF(other
, thing
->isPermanentAndMayBeShared());
121 static inline bool IsInFreeList(TenuredCell
* cell
) {
122 Arena
* arena
= cell
->arena();
123 uintptr_t addr
= reinterpret_cast<uintptr_t>(cell
);
124 MOZ_ASSERT(Arena::isAligned(addr
, arena
->getThingSize()));
125 return arena
->inFreeList(addr
);
128 template <typename T
>
129 void js::CheckTracedThing(JSTracer
* trc
, T
* thing
) {
133 if (IsForwarded(thing
)) {
134 JS::TracerKind kind
= trc
->kind();
135 MOZ_ASSERT(kind
== JS::TracerKind::Tenuring
||
136 kind
== JS::TracerKind::MinorSweeping
||
137 kind
== JS::TracerKind::Moving
);
138 thing
= Forwarded(thing
);
141 /* This function uses data that's not available in the nursery. */
142 if (IsInsideNursery(thing
)) {
147 * Permanent shared things that are not associated with this runtime will be
148 * ignored during marking.
150 Zone
* zone
= thing
->zoneFromAnyThread();
151 if (IsOwnedByOtherRuntime(trc
->runtime(), thing
)) {
152 MOZ_ASSERT(!zone
->wasGCStarted());
153 MOZ_ASSERT(thing
->isMarkedBlack());
157 JSRuntime
* rt
= trc
->runtime();
158 MOZ_ASSERT(zone
->runtimeFromAnyThread() == rt
);
160 bool isGcMarkingTracer
= trc
->isMarkingTracer();
161 bool isUnmarkGrayTracer
= IsTracerKind(trc
, JS::TracerKind::UnmarkGray
);
162 bool isClearEdgesTracer
= IsTracerKind(trc
, JS::TracerKind::ClearEdges
);
164 if (TlsContext
.get()) {
165 // If we're on the main thread we must have access to the runtime and zone.
166 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt
));
167 MOZ_ASSERT(CurrentThreadCanAccessZone(zone
));
169 MOZ_ASSERT(isGcMarkingTracer
|| isUnmarkGrayTracer
|| isClearEdgesTracer
||
170 IsTracerKind(trc
, JS::TracerKind::Moving
) ||
171 IsTracerKind(trc
, JS::TracerKind::Sweeping
));
172 MOZ_ASSERT_IF(!isClearEdgesTracer
, CurrentThreadIsPerformingGC());
175 MOZ_ASSERT(thing
->isAligned());
176 MOZ_ASSERT(MapTypeToTraceKind
<std::remove_pointer_t
<T
>>::kind
==
177 thing
->getTraceKind());
180 * Check that we only mark allocated cells.
182 * This check is restricted to marking for two reasons: Firstly, if background
183 * sweeping is running and concurrently modifying the free list then it is not
184 * safe. Secondly, it was thought to be slow so this is a compromise so as to
185 * not affect test times too much.
187 MOZ_ASSERT_IF(zone
->isGCMarking(), !IsInFreeList(&thing
->asTenured()));
190 template <typename T
>
191 void js::CheckTracedThing(JSTracer
* trc
, const T
& thing
) {
192 ApplyGCThingTyped(thing
, [trc
](auto t
) { CheckTracedThing(trc
, t
); });
195 template <typename T
>
196 static void CheckMarkedThing(GCMarker
* gcMarker
, T
* thing
) {
197 Zone
* zone
= thing
->zoneFromAnyThread();
199 MOZ_ASSERT(zone
->shouldMarkInZone(gcMarker
->markColor()));
201 MOZ_ASSERT_IF(gcMarker
->shouldCheckCompartments(),
202 zone
->isCollectingFromAnyThread() || zone
->isAtomsZone());
204 MOZ_ASSERT_IF(gcMarker
->markColor() == MarkColor::Gray
,
205 !zone
->isGCMarkingBlackOnly() || zone
->isAtomsZone());
207 MOZ_ASSERT(!(zone
->isGCSweeping() || zone
->isGCFinished() ||
208 zone
->isGCCompacting()));
210 // Check that we don't stray from the current compartment and zone without
211 // using TraceCrossCompartmentEdge.
212 Compartment
* comp
= thing
->maybeCompartment();
213 MOZ_ASSERT_IF(gcMarker
->tracingCompartment
&& comp
,
214 gcMarker
->tracingCompartment
== comp
);
215 MOZ_ASSERT_IF(gcMarker
->tracingZone
,
216 gcMarker
->tracingZone
== zone
|| zone
->isAtomsZone());
221 # define IMPL_CHECK_TRACED_THING(_, type, _1, _2) \
222 template void CheckTracedThing<type>(JSTracer*, type*);
223 JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING
);
224 # undef IMPL_CHECK_TRACED_THING
226 template void CheckTracedThing
<Value
>(JSTracer
*, const Value
&);
227 template void CheckTracedThing
<wasm::AnyRef
>(JSTracer
*, const wasm::AnyRef
&);
233 static inline bool ShouldMarkCrossCompartment(GCMarker
* marker
, JSObject
* src
,
235 MarkColor color
= marker
->markColor();
237 if (!dstCell
->isTenured()) {
239 // Bug 1743098: This shouldn't be possible but it does seem to happen. Log
240 // some useful information in debug builds.
241 if (color
!= MarkColor::Black
) {
243 "ShouldMarkCrossCompartment: cross compartment edge from gray "
244 "object to nursery thing\n");
245 fprintf(stderr
, "src: ");
247 fprintf(stderr
, "dst: ");
251 MOZ_ASSERT(color
== MarkColor::Black
);
254 TenuredCell
& dst
= dstCell
->asTenured();
256 JS::Zone
* dstZone
= dst
.zone();
257 if (!src
->zone()->isGCMarking() && !dstZone
->isGCMarking()) {
261 if (color
== MarkColor::Black
) {
262 // Check our sweep groups are correct: we should never have to
263 // mark something in a zone that we have started sweeping.
264 MOZ_ASSERT_IF(!dst
.isMarkedBlack(), !dstZone
->isGCSweeping());
267 * Having black->gray edges violates our promise to the cycle collector so
268 * we ensure that gray things we encounter when marking black end up getting
271 * This can happen for two reasons:
273 * 1) If we're collecting a compartment and it has an edge to an uncollected
274 * compartment it's possible that the source and destination of the
275 * cross-compartment edge should be gray, but the source was marked black by
278 * 2) If we yield during gray marking and the write barrier marks a gray
281 * We handle the first case before returning whereas the second case happens
282 * as part of normal marking.
284 if (dst
.isMarkedGray() && !dstZone
->isGCMarking()) {
285 UnmarkGrayGCThingUnchecked(marker
,
286 JS::GCCellPtr(&dst
, dst
.getTraceKind()));
290 return dstZone
->isGCMarking();
293 // Check our sweep groups are correct as above.
294 MOZ_ASSERT_IF(!dst
.isMarkedAny(), !dstZone
->isGCSweeping());
296 if (dstZone
->isGCMarkingBlackOnly()) {
298 * The destination compartment is being not being marked gray now,
299 * but it will be later, so record the cell so it can be marked gray
300 * at the appropriate time.
302 if (!dst
.isMarkedAny()) {
303 DelayCrossCompartmentGrayMarking(marker
, src
);
308 return dstZone
->isGCMarkingBlackAndGray();
311 static bool ShouldTraceCrossCompartment(JSTracer
* trc
, JSObject
* src
,
313 if (!trc
->isMarkingTracer()) {
317 return ShouldMarkCrossCompartment(GCMarker::fromTracer(trc
), src
, dstCell
);
320 static bool ShouldTraceCrossCompartment(JSTracer
* trc
, JSObject
* src
,
322 return val
.isGCThing() &&
323 ShouldTraceCrossCompartment(trc
, src
, val
.toGCThing());
328 inline void js::gc::AssertShouldMarkInZone(GCMarker
* marker
, Cell
* thing
) {
329 if (!thing
->isMarkedBlack()) {
330 Zone
* zone
= thing
->zone();
331 MOZ_ASSERT(zone
->isAtomsZone() ||
332 zone
->shouldMarkInZone(marker
->markColor()));
336 void js::gc::AssertRootMarkingPhase(JSTracer
* trc
) {
337 MOZ_ASSERT_IF(trc
->isMarkingTracer(),
338 trc
->runtime()->gc
.state() == State::NotActive
||
339 trc
->runtime()->gc
.state() == State::MarkRoots
);
344 /*** Tracing Interface ******************************************************/
346 template <typename T
>
347 static void TraceExternalEdgeHelper(JSTracer
* trc
, T
* thingp
,
349 MOZ_ASSERT(InternalBarrierMethods
<T
>::isMarkable(*thingp
));
350 TraceEdgeInternal(trc
, ConvertToBase(thingp
), name
);
353 JS_PUBLIC_API
void js::UnsafeTraceManuallyBarrieredEdge(JSTracer
* trc
,
356 TraceEdgeInternal(trc
, ConvertToBase(thingp
), name
);
359 template <typename T
>
360 static void TraceRootHelper(JSTracer
* trc
, T
* thingp
, const char* name
) {
362 js::TraceNullableRoot(trc
, thingp
, name
);
366 class AbstractGeneratorObject
;
370 #define DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION(type) \
371 JS_PUBLIC_API void js::gc::TraceExternalEdge(JSTracer* trc, type* thingp, \
372 const char* name) { \
373 TraceExternalEdgeHelper(trc, thingp, name); \
376 // Define TraceExternalEdge for each public GC pointer type.
377 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
)
378 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
)
380 #undef DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
382 #define DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(type) \
383 JS_PUBLIC_API void JS::TraceRoot(JSTracer* trc, type* thingp, \
384 const char* name) { \
385 TraceRootHelper(trc, thingp, name); \
388 // Define TraceRoot for each public GC pointer type.
389 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
)
390 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
)
392 // Also, for the moment, define TraceRoot for internal GC pointer types.
393 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(AbstractGeneratorObject
*)
394 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(SavedFrame
*)
395 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(wasm::AnyRef
)
397 #undef DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
401 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type) \
402 template void TraceRangeInternal<type>(JSTracer*, size_t len, type*, \
405 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, _3) \
406 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type*)
408 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
)
409 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
)
410 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(TaggedProto
)
412 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
413 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
415 } // namespace js::gc
417 // In debug builds, makes a note of the current compartment before calling a
418 // trace hook or traceChildren() method on a GC thing.
419 class MOZ_RAII AutoSetTracingSource
{
422 template <typename T
>
423 AutoSetTracingSource(JSTracer
* trc
, T
* thing
) {}
424 ~AutoSetTracingSource() {}
426 GCMarker
* marker
= nullptr;
429 template <typename T
>
430 AutoSetTracingSource(JSTracer
* trc
, T
* thing
) {
431 if (trc
->isMarkingTracer() && thing
) {
432 marker
= GCMarker::fromTracer(trc
);
433 MOZ_ASSERT(!marker
->tracingZone
);
434 marker
->tracingZone
= thing
->asTenured().zone();
435 MOZ_ASSERT(!marker
->tracingCompartment
);
436 marker
->tracingCompartment
= thing
->maybeCompartment();
440 ~AutoSetTracingSource() {
442 marker
->tracingZone
= nullptr;
443 marker
->tracingCompartment
= nullptr;
449 // In debug builds, clear the trace hook compartment. This happens after the
450 // trace hook has called back into one of our trace APIs and we've checked the
452 class MOZ_RAII AutoClearTracingSource
{
455 explicit AutoClearTracingSource(GCMarker
* marker
) {}
456 explicit AutoClearTracingSource(JSTracer
* trc
) {}
457 ~AutoClearTracingSource() {}
459 GCMarker
* marker
= nullptr;
460 JS::Zone
* prevZone
= nullptr;
461 Compartment
* prevCompartment
= nullptr;
464 explicit AutoClearTracingSource(JSTracer
* trc
) {
465 if (trc
->isMarkingTracer()) {
466 marker
= GCMarker::fromTracer(trc
);
467 prevZone
= marker
->tracingZone
;
468 marker
->tracingZone
= nullptr;
469 prevCompartment
= marker
->tracingCompartment
;
470 marker
->tracingCompartment
= nullptr;
473 ~AutoClearTracingSource() {
475 marker
->tracingZone
= prevZone
;
476 marker
->tracingCompartment
= prevCompartment
;
482 template <typename T
>
483 void js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer
* trc
,
484 JSObject
* src
, T
* dst
,
486 // Clear expected compartment for cross-compartment edge.
487 AutoClearTracingSource
acts(trc
);
489 if (ShouldTraceCrossCompartment(trc
, src
, *dst
)) {
490 TraceEdgeInternal(trc
, dst
, name
);
493 template void js::TraceManuallyBarrieredCrossCompartmentEdge
<Value
>(
494 JSTracer
*, JSObject
*, Value
*, const char*);
495 template void js::TraceManuallyBarrieredCrossCompartmentEdge
<JSObject
*>(
496 JSTracer
*, JSObject
*, JSObject
**, const char*);
497 template void js::TraceManuallyBarrieredCrossCompartmentEdge
<BaseScript
*>(
498 JSTracer
*, JSObject
*, BaseScript
**, const char*);
500 template <typename T
>
501 void js::TraceSameZoneCrossCompartmentEdge(JSTracer
* trc
,
502 const BarrieredBase
<T
>* dst
,
505 if (trc
->isMarkingTracer()) {
506 T thing
= *dst
->unbarrieredAddress();
507 MOZ_ASSERT(thing
->maybeCompartment(),
508 "Use TraceEdge for GC things without a compartment");
510 GCMarker
* gcMarker
= GCMarker::fromTracer(trc
);
511 MOZ_ASSERT_IF(gcMarker
->tracingZone
,
512 thing
->zone() == gcMarker
->tracingZone
);
515 // Skip compartment checks for this edge.
516 if (trc
->kind() == JS::TracerKind::CompartmentCheck
) {
521 // Clear expected compartment for cross-compartment edge.
522 AutoClearTracingSource
acts(trc
);
523 TraceEdgeInternal(trc
, ConvertToBase(dst
->unbarrieredAddress()), name
);
525 template void js::TraceSameZoneCrossCompartmentEdge(
526 JSTracer
*, const BarrieredBase
<Shape
*>*, const char*);
528 template <typename T
>
529 void js::TraceWeakMapKeyEdgeInternal(JSTracer
* trc
, Zone
* weakMapZone
,
530 T
** thingp
, const char* name
) {
531 // We can't use ShouldTraceCrossCompartment here because that assumes the
532 // source of the edge is a CCW object which could be used to delay gray
533 // marking. Instead, assert that the weak map zone is in the same marking
534 // state as the target thing's zone and therefore we can go ahead and mark it.
536 auto thing
= *thingp
;
537 if (trc
->isMarkingTracer()) {
538 MOZ_ASSERT(weakMapZone
->isGCMarking());
539 MOZ_ASSERT(weakMapZone
->gcState() == thing
->zone()->gcState());
543 // Clear expected compartment for cross-compartment edge.
544 AutoClearTracingSource
acts(trc
);
546 TraceEdgeInternal(trc
, thingp
, name
);
549 template <typename T
>
550 void js::TraceWeakMapKeyEdgeInternal(JSTracer
* trc
, Zone
* weakMapZone
,
551 T
* thingp
, const char* name
) {
552 // We can't use ShouldTraceCrossCompartment here because that assumes the
553 // source of the edge is a CCW object which could be used to delay gray
554 // marking. Instead, assert that the weak map zone is in the same marking
555 // state as the target thing's zone and therefore we can go ahead and mark it.
557 if (trc
->isMarkingTracer()) {
558 MOZ_ASSERT(weakMapZone
->isGCMarking());
559 MOZ_ASSERT(weakMapZone
->gcState() ==
560 gc::ToMarkable(*thingp
)->zone()->gcState());
564 // Clear expected compartment for cross-compartment edge.
565 AutoClearTracingSource
acts(trc
);
567 TraceEdgeInternal(trc
, thingp
, name
);
570 template void js::TraceWeakMapKeyEdgeInternal
<JSObject
>(JSTracer
*, Zone
*,
573 template void js::TraceWeakMapKeyEdgeInternal
<BaseScript
>(JSTracer
*, Zone
*,
576 template void js::TraceWeakMapKeyEdgeInternal
<JS::Value
>(JSTracer
*, Zone
*,
580 static Cell
* TraceGenericPointerRootAndType(JSTracer
* trc
, Cell
* thing
,
583 return MapGCThingTyped(thing
, kind
, [trc
, name
](auto t
) -> Cell
* {
584 TraceRoot(trc
, &t
, name
);
589 void js::TraceGenericPointerRoot(JSTracer
* trc
, Cell
** thingp
,
592 Cell
* thing
= *thingp
;
598 TraceGenericPointerRootAndType(trc
, thing
, thing
->getTraceKind(), name
);
599 if (traced
!= thing
) {
604 void js::TraceManuallyBarrieredGenericPointerEdge(JSTracer
* trc
, Cell
** thingp
,
607 Cell
* thing
= *thingp
;
612 auto* traced
= MapGCThingTyped(thing
, thing
->getTraceKind(),
613 [trc
, name
](auto t
) -> Cell
* {
614 TraceManuallyBarrieredEdge(trc
, &t
, name
);
617 if (traced
!= thing
) {
622 void js::TraceGCCellPtrRoot(JSTracer
* trc
, JS::GCCellPtr
* thingp
,
624 Cell
* thing
= thingp
->asCell();
630 TraceGenericPointerRootAndType(trc
, thing
, thingp
->kind(), name
);
633 *thingp
= JS::GCCellPtr();
634 } else if (traced
!= thingp
->asCell()) {
635 *thingp
= JS::GCCellPtr(traced
, thingp
->kind());
639 void js::TraceManuallyBarrieredGCCellPtr(JSTracer
* trc
, JS::GCCellPtr
* thingp
,
641 Cell
* thing
= thingp
->asCell();
646 Cell
* traced
= MapGCThingTyped(thing
, thing
->getTraceKind(),
647 [trc
, name
](auto t
) -> Cell
* {
648 TraceManuallyBarrieredEdge(trc
, &t
, name
);
653 // If we are clearing edges, also erase the type. This happens when using
655 *thingp
= JS::GCCellPtr();
656 } else if (traced
!= thingp
->asCell()) {
657 *thingp
= JS::GCCellPtr(traced
, thingp
->kind());
661 template <typename T
>
662 inline bool TraceTaggedPtrEdge(JSTracer
* trc
, T
* thingp
, const char* name
) {
663 // Return true by default. For some types the lambda below won't be called.
665 auto thing
= MapGCThingTyped(*thingp
, [&](auto thing
) {
666 if (!TraceEdgeInternal(trc
, &thing
, name
)) {
668 return TaggedPtr
<T
>::empty();
671 return TaggedPtr
<T
>::wrap(thing
);
674 // Only update *thingp if the value changed, to avoid TSan false positives for
675 // template objects when using DumpHeapTracer or UbiNode tracers while Ion
676 // compiling off-thread.
677 if (thing
.isSome() && thing
.value() != *thingp
) {
678 *thingp
= thing
.value();
684 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, Value
* thingp
, const char* name
) {
685 return TraceTaggedPtrEdge(trc
, thingp
, name
);
687 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, jsid
* thingp
, const char* name
) {
688 return TraceTaggedPtrEdge(trc
, thingp
, name
);
690 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, TaggedProto
* thingp
,
692 return TraceTaggedPtrEdge(trc
, thingp
, name
);
694 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, wasm::AnyRef
* thingp
,
696 return TraceTaggedPtrEdge(trc
, thingp
, name
);
699 template <typename T
>
700 void js::gc::TraceRangeInternal(JSTracer
* trc
, size_t len
, T
* vec
,
702 JS::AutoTracingIndex
index(trc
);
703 for (auto i
: IntegerRange(len
)) {
704 if (InternalBarrierMethods
<T
>::isMarkable(vec
[i
])) {
705 TraceEdgeInternal(trc
, &vec
[i
], name
);
711 /*** GC Marking Interface ***************************************************/
715 using HasNoImplicitEdgesType
= bool;
717 template <typename T
>
718 struct ImplicitEdgeHolderType
{
719 using Type
= HasNoImplicitEdgesType
;
722 // For now, we only handle JSObject* and BaseScript* keys, but the linear time
723 // algorithm can be easily extended by adding in more types here, then making
724 // GCMarker::traverse<T> call markImplicitEdges.
726 struct ImplicitEdgeHolderType
<JSObject
*> {
727 using Type
= JSObject
*;
731 struct ImplicitEdgeHolderType
<BaseScript
*> {
732 using Type
= BaseScript
*;
735 void GCMarker::markEphemeronEdges(EphemeronEdgeVector
& edges
,
736 gc::CellColor srcColor
) {
737 // This is called as part of GC weak marking or by barriers outside of GC.
738 MOZ_ASSERT_IF(CurrentThreadIsPerformingGC(),
739 state
== MarkingState::WeakMarking
);
741 DebugOnly
<size_t> initialLength
= edges
.length();
743 for (auto& edge
: edges
) {
744 CellColor targetColor
= std::min(srcColor
, edge
.color
);
745 MOZ_ASSERT(CellColor(markColor()) >= targetColor
);
746 if (targetColor
== markColor()) {
747 ApplyGCThingTyped(edge
.target
, edge
.target
->getTraceKind(),
749 markAndTraverse
<MarkingOptions::MarkImplicitEdges
>(t
);
754 // The above marking always goes through markAndPush, which will not cause
755 // 'edges' to be appended to while iterating.
756 MOZ_ASSERT(edges
.length() == initialLength
);
758 // This is not just an optimization. When nuking a CCW, we conservatively
759 // mark through the related edges and then lose the CCW->target connection
760 // that induces a sweep group edge. As a result, it is possible for the
761 // delegate zone to get marked later, look up an edge in this table, and
762 // then try to mark something in a Zone that is no longer marking.
763 if (srcColor
== CellColor::Black
&& markColor() == MarkColor::Black
) {
764 edges
.eraseIf([](auto& edge
) { return edge
.color
== MarkColor::Black
; });
768 // 'delegate' is no longer the delegate of 'key'.
769 void GCMarker::severWeakDelegate(JSObject
* key
, JSObject
* delegate
) {
770 MOZ_ASSERT(CurrentThreadIsMainThread());
772 JS::Zone
* zone
= delegate
->zone();
773 if (!zone
->needsIncrementalBarrier()) {
775 !zone
->gcEphemeronEdges(delegate
).get(delegate
),
776 "non-collecting zone should not have populated gcEphemeronEdges");
779 auto* p
= zone
->gcEphemeronEdges(delegate
).get(delegate
);
784 // We are losing 3 edges here: key -> delegate, delegate -> key, and
785 // <delegate, map> -> value. Maintain snapshot-at-beginning (hereafter,
786 // S-A-B) by conservatively assuming the delegate will end up black and
787 // marking through the latter 2 edges.
789 // Note that this does not fully give S-A-B:
791 // 1. If the map is gray, then the value will only be marked gray here even
792 // though the map could later be discovered to be black.
794 // 2. If the map has not yet been marked, we won't have any entries to mark
795 // here in the first place.
797 // 3. We're not marking the delegate, since that would cause eg nukeAllCCWs
798 // to keep everything alive for another collection.
800 // We can't even assume that the delegate passed in here is live, because we
801 // could have gotten here from nukeAllCCWs, which iterates over all CCWs
802 // including dead ones.
804 // This is ok because S-A-B is only needed to prevent the case where an
805 // unmarked object is removed from the graph and then re-inserted where it is
806 // reachable only by things that have already been marked. None of the 3
807 // target objects will be re-inserted anywhere as a result of this action.
809 EphemeronEdgeVector
& edges
= p
->value
;
810 MOZ_ASSERT(markColor() == MarkColor::Black
);
811 markEphemeronEdges(edges
, MarkColor::Black
);
814 // 'delegate' is now the delegate of 'key'. Update weakmap marking state.
815 void GCMarker::restoreWeakDelegate(JSObject
* key
, JSObject
* delegate
) {
816 MOZ_ASSERT(CurrentThreadIsMainThread());
818 if (!key
->zone()->needsIncrementalBarrier()) {
819 // Temporary diagnostic printouts for when this would have asserted.
820 if (key
->zone()->gcEphemeronEdges(key
).has(key
)) {
821 fprintf(stderr
, "key zone: %d\n", int(key
->zone()->gcState()));
825 fprintf(stderr
, "delegate zone: %d\n", int(delegate
->zone()->gcState()));
831 !key
->zone()->gcEphemeronEdges(key
).has(key
),
832 "non-collecting zone should not have populated gcEphemeronEdges");
835 if (!delegate
->zone()->needsIncrementalBarrier()) {
836 // Normally we should not have added the key -> value edge if the delegate
837 // zone is not marking (because the delegate would have been seen as black,
838 // so we would mark the key immediately instead). But if there wasn't a
839 // delegate (the key was nuked), then we won't have consulted it. So we
840 // can't do the same assertion as above.
842 // Specifically, the sequence would be:
844 // 2. Start the incremental GC.
845 // 3. Mark the WeakMap. Insert a key->value edge with a DeadObjectProxy key.
846 // 4. Un-nuke the key with a delegate in a nonmarking Zone.
848 // The result is an ephemeron edge (from <map,key> to value, but stored
849 // as key to value) involving a key with a delegate in a nonmarking Zone,
850 // something that ordinarily would not happen.
853 auto* p
= key
->zone()->gcEphemeronEdges(key
).get(key
);
858 // Similar to severWeakDelegate above, mark through the key -> value edge.
859 EphemeronEdgeVector
& edges
= p
->value
;
860 MOZ_ASSERT(markColor() == MarkColor::Black
);
861 markEphemeronEdges(edges
, MarkColor::Black
);
864 template <typename T
>
865 void GCMarker::markImplicitEdgesHelper(T markedThing
) {
866 if (!isWeakMarking()) {
870 Zone
* zone
= markedThing
->asTenured().zone();
871 MOZ_ASSERT(zone
->isGCMarking());
872 MOZ_ASSERT(!zone
->isGCSweeping());
874 auto p
= zone
->gcEphemeronEdges().get(markedThing
);
878 EphemeronEdgeVector
& edges
= p
->value
;
880 // markedThing might be a key in a debugger weakmap, which can end up marking
881 // values that are in a different compartment.
882 AutoClearTracingSource
acts(tracer());
884 CellColor thingColor
= gc::detail::GetEffectiveColor(this, markedThing
);
885 markEphemeronEdges(edges
, thingColor
);
889 void GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType
) {}
891 template <typename T
>
892 void GCMarker::markImplicitEdges(T
* thing
) {
893 markImplicitEdgesHelper
<typename ImplicitEdgeHolderType
<T
*>::Type
>(thing
);
896 template void GCMarker::markImplicitEdges(JSObject
*);
897 template void GCMarker::markImplicitEdges(BaseScript
*);
901 template <typename T
>
902 static inline bool ShouldMark(GCMarker
* gcmarker
, T
* thing
) {
903 // We may encounter nursery things during normal marking since we don't
904 // collect the nursery at the start of every GC slice.
905 if (!thing
->isTenured()) {
909 // Don't mark things outside a zone if we are in a per-zone GC. Don't mark
910 // permanent shared things owned by other runtimes (we will never observe
911 // their zone being collected).
912 Zone
* zone
= thing
->asTenured().zoneFromAnyThread();
913 return zone
->shouldMarkInZone(gcmarker
->markColor());
916 template <uint32_t opts
>
917 MarkingTracerT
<opts
>::MarkingTracerT(JSRuntime
* runtime
, GCMarker
* marker
)
918 : GenericTracerImpl
<MarkingTracerT
<opts
>>(
919 runtime
, JS::TracerKind::Marking
,
920 JS::TraceOptions(JS::WeakMapTraceAction::Expand
,
921 JS::WeakEdgeTraceAction::Skip
)) {
922 // Marking tracers are owned by (and part of) a GCMarker.
923 MOZ_ASSERT(this == marker
->tracer());
924 MOZ_ASSERT(getMarker() == marker
);
927 template <uint32_t opts
>
928 MOZ_ALWAYS_INLINE GCMarker
* MarkingTracerT
<opts
>::getMarker() {
929 return GCMarker::fromTracer(this);
932 template <uint32_t opts
>
933 template <typename T
>
934 void MarkingTracerT
<opts
>::onEdge(T
** thingp
, const char* name
) {
937 // Do per-type marking precondition checks.
938 GCMarker
* marker
= getMarker();
939 if (!ShouldMark(marker
, thing
)) {
940 MOZ_ASSERT(gc::detail::GetEffectiveColor(marker
, thing
) ==
941 js::gc::CellColor::Black
);
945 MOZ_ASSERT(!IsOwnedByOtherRuntime(this->runtime(), thing
));
948 CheckMarkedThing(marker
, thing
);
951 AutoClearTracingSource
acts(this);
952 marker
->markAndTraverse
<opts
>(thing
);
955 #define INSTANTIATE_ONEDGE_METHOD(name, type, _1, _2) \
956 template void MarkingTracerT<MarkingOptions::None>::onEdge<type>( \
957 type * *thingp, const char* name); \
959 MarkingTracerT<MarkingOptions::MarkImplicitEdges>::onEdge<type>( \
960 type * *thingp, const char* name); \
962 MarkingTracerT<MarkingOptions::MarkRootCompartments>::onEdge<type>( \
963 type * *thingp, const char* name);
964 JS_FOR_EACH_TRACEKIND(INSTANTIATE_ONEDGE_METHOD
)
965 #undef INSTANTIATE_ONEDGE_METHOD
967 static void TraceEdgeForBarrier(GCMarker
* gcmarker
, TenuredCell
* thing
,
968 JS::TraceKind kind
) {
969 // Dispatch to markAndTraverse without checking ShouldMark.
970 ApplyGCThingTyped(thing
, kind
, [gcmarker
](auto thing
) {
971 MOZ_ASSERT(ShouldMark(gcmarker
, thing
));
972 CheckTracedThing(gcmarker
->tracer(), thing
);
973 AutoClearTracingSource
acts(gcmarker
->tracer());
974 gcmarker
->markAndTraverse
<NormalMarkingOptions
>(thing
);
978 JS_PUBLIC_API
void js::gc::PerformIncrementalReadBarrier(JS::GCCellPtr thing
) {
979 // Optimized marking for read barriers. This is called from
980 // ExposeGCThingToActiveJS which has already checked the prerequisites for
981 // performing a read barrier. This means we can skip a bunch of checks and
982 // call into the tracer directly.
985 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
987 TenuredCell
* cell
= &thing
.asCell()->asTenured();
988 MOZ_ASSERT(!cell
->isMarkedBlack());
990 Zone
* zone
= cell
->zone();
991 MOZ_ASSERT(zone
->needsIncrementalBarrier());
993 // Skip dispatching on known tracer type.
994 GCMarker
* gcmarker
= GCMarker::fromTracer(zone
->barrierTracer());
995 TraceEdgeForBarrier(gcmarker
, cell
, thing
.kind());
998 void js::gc::PerformIncrementalReadBarrier(TenuredCell
* cell
) {
999 // Internal version of previous function.
1002 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
1004 if (cell
->isMarkedBlack()) {
1008 Zone
* zone
= cell
->zone();
1009 MOZ_ASSERT(zone
->needsIncrementalBarrier());
1011 // Skip dispatching on known tracer type.
1012 GCMarker
* gcmarker
= GCMarker::fromTracer(zone
->barrierTracer());
1013 TraceEdgeForBarrier(gcmarker
, cell
, cell
->getTraceKind());
1016 void js::gc::PerformIncrementalPreWriteBarrier(TenuredCell
* cell
) {
1017 // The same as PerformIncrementalReadBarrier except for an extra check on the
1018 // runtime for cells in atoms zone.
1020 Zone
* zone
= cell
->zoneFromAnyThread();
1021 MOZ_ASSERT(zone
->needsIncrementalBarrier());
1024 if (cell
->isMarkedBlack()) {
1028 // Barriers can be triggered off the main thread by background finalization of
1029 // HeapPtrs to the atoms zone. We don't want to trigger the barrier in this
1031 bool checkThread
= zone
->isAtomsZone();
1032 JSRuntime
* runtime
= cell
->runtimeFromAnyThread();
1033 if (checkThread
&& !CurrentThreadCanAccessRuntime(runtime
)) {
1034 MOZ_ASSERT(CurrentThreadIsGCFinalizing());
1038 MOZ_ASSERT(CurrentThreadIsMainThread());
1039 MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
1041 // Skip dispatching on known tracer type.
1042 GCMarker
* gcmarker
= GCMarker::fromTracer(zone
->barrierTracer());
1043 TraceEdgeForBarrier(gcmarker
, cell
, cell
->getTraceKind());
1046 void js::gc::PerformIncrementalBarrierDuringFlattening(JSString
* str
) {
1047 TenuredCell
* cell
= &str
->asTenured();
1049 // Skip eager marking of ropes during flattening. Their children will also be
1050 // barriered by flattening process so we don't need to traverse them.
1051 if (str
->isRope()) {
1056 PerformIncrementalPreWriteBarrier(cell
);
1059 template <uint32_t opts
, typename T
>
1060 void js::GCMarker::markAndTraverse(T
* thing
) {
1061 if (mark
<opts
>(thing
)) {
1062 // We only mark permanent things during initialization.
1063 MOZ_ASSERT_IF(thing
->isPermanentAndMayBeShared(),
1064 !runtime()->permanentAtomsPopulated());
1066 // We don't need to pass MarkRootCompartments options on to children.
1067 constexpr uint32_t traverseOpts
=
1068 opts
& ~MarkingOptions::MarkRootCompartments
;
1070 traverse
<traverseOpts
>(thing
);
1072 if constexpr (bool(opts
& MarkingOptions::MarkRootCompartments
)) {
1073 // Mark the compartment as live.
1074 SetCompartmentHasMarkedCells(thing
);
1079 // The |traverse| method overloads select the traversal strategy for each kind.
1081 // There are three possible strategies:
1085 // The simplest traversal calls out to the fully generic traceChildren
1086 // function to visit the child edges. In the absence of other traversal
1087 // mechanisms, this function will rapidly grow the stack past its bounds and
1088 // crash the process. Thus, this generic tracing should only be used in cases
1089 // where subsequent tracing will not recurse.
1093 // Strings, Shapes, and Scopes are extremely common, but have simple patterns
1094 // of recursion. We traverse trees of these edges immediately, with
1095 // aggressive, manual inlining, implemented by eagerlyTraceChildren.
1099 // Objects are extremely common and can contain arbitrarily nested graphs, so
1100 // are not trivially inlined. In this case we use the mark stack to control
1101 // recursion. JitCode shares none of these properties, but is included for
1102 // historical reasons. JSScript normally cannot recurse, but may be used as a
1103 // weakmap key and thereby recurse into weakmapped values.
1105 template <uint32_t opts
>
1106 void GCMarker::traverse(BaseShape
* thing
) {
1107 traceChildren
<opts
>(thing
);
1109 template <uint32_t opts
>
1110 void GCMarker::traverse(GetterSetter
* thing
) {
1111 traceChildren
<opts
>(thing
);
1113 template <uint32_t opts
>
1114 void GCMarker::traverse(JS::Symbol
* thing
) {
1115 traceChildren
<opts
>(thing
);
1117 template <uint32_t opts
>
1118 void GCMarker::traverse(JS::BigInt
* thing
) {
1119 traceChildren
<opts
>(thing
);
1121 template <uint32_t opts
>
1122 void GCMarker::traverse(RegExpShared
* thing
) {
1123 traceChildren
<opts
>(thing
);
1125 template <uint32_t opts
>
1126 void GCMarker::traverse(JSString
* thing
) {
1127 scanChildren
<opts
>(thing
);
1129 template <uint32_t opts
>
1130 void GCMarker::traverse(Shape
* thing
) {
1131 scanChildren
<opts
>(thing
);
1133 template <uint32_t opts
>
1134 void GCMarker::traverse(PropMap
* thing
) {
1135 scanChildren
<opts
>(thing
);
1137 template <uint32_t opts
>
1138 void GCMarker::traverse(js::Scope
* thing
) {
1139 scanChildren
<opts
>(thing
);
1141 template <uint32_t opts
>
1142 void GCMarker::traverse(JSObject
* thing
) {
1143 pushThing
<opts
>(thing
);
1145 template <uint32_t opts
>
1146 void GCMarker::traverse(jit::JitCode
* thing
) {
1147 pushThing
<opts
>(thing
);
1149 template <uint32_t opts
>
1150 void GCMarker::traverse(BaseScript
* thing
) {
1151 pushThing
<opts
>(thing
);
1154 template <uint32_t opts
, typename T
>
1155 void js::GCMarker::traceChildren(T
* thing
) {
1156 MOZ_ASSERT(!thing
->isPermanentAndMayBeShared());
1157 MOZ_ASSERT(thing
->isMarkedAny());
1158 AutoSetTracingSource
asts(tracer(), thing
);
1159 thing
->traceChildren(tracer());
1162 template <uint32_t opts
, typename T
>
1163 void js::GCMarker::scanChildren(T
* thing
) {
1164 MOZ_ASSERT(!thing
->isPermanentAndMayBeShared());
1165 MOZ_ASSERT(thing
->isMarkedAny());
1166 eagerlyMarkChildren
<opts
>(thing
);
1169 template <uint32_t opts
, typename T
>
1170 void js::GCMarker::pushThing(T
* thing
) {
1171 MOZ_ASSERT(!thing
->isPermanentAndMayBeShared());
1172 MOZ_ASSERT(thing
->isMarkedAny());
1173 pushTaggedPtr(thing
);
1176 template void js::GCMarker::markAndTraverse
<MarkingOptions::None
, JSObject
>(
1178 template void js::GCMarker::markAndTraverse
<MarkingOptions::MarkImplicitEdges
,
1179 JSObject
>(JSObject
* thing
);
1180 template void js::GCMarker::markAndTraverse
<
1181 MarkingOptions::MarkRootCompartments
, JSObject
>(JSObject
* thing
);
1184 void GCMarker::setCheckAtomMarking(bool check
) {
1185 MOZ_ASSERT(check
!= checkAtomMarking
);
1186 checkAtomMarking
= check
;
1190 template <typename S
, typename T
>
1191 inline void GCMarker::checkTraversedEdge(S source
, T
* target
) {
1193 // Atoms and Symbols do not have or mark their internal pointers,
1195 MOZ_ASSERT(!source
->isPermanentAndMayBeShared());
1197 // Shared things are already black so we will not mark them.
1198 if (target
->isPermanentAndMayBeShared()) {
1199 Zone
* zone
= target
->zoneFromAnyThread();
1200 MOZ_ASSERT(!zone
->wasGCStarted());
1201 MOZ_ASSERT(!zone
->needsIncrementalBarrier());
1202 MOZ_ASSERT(target
->isMarkedBlack());
1203 MOZ_ASSERT(!target
->maybeCompartment());
1207 Zone
* sourceZone
= source
->zone();
1208 Zone
* targetZone
= target
->zone();
1210 // Atoms and Symbols do not have access to a compartment pointer, or we'd need
1211 // to adjust the subsequent check to catch that case.
1212 MOZ_ASSERT_IF(targetZone
->isAtomsZone(), !target
->maybeCompartment());
1214 // The Zones must match, unless the target is an atom.
1215 MOZ_ASSERT(targetZone
== sourceZone
|| targetZone
->isAtomsZone());
1217 // If we are marking an atom, that atom must be marked in the source zone's
1219 if (checkAtomMarking
&& !sourceZone
->isAtomsZone() &&
1220 targetZone
->isAtomsZone()) {
1221 MOZ_ASSERT(target
->runtimeFromAnyThread()->gc
.atomMarking
.atomIsMarked(
1222 sourceZone
, reinterpret_cast<TenuredCell
*>(target
)));
1225 // If we have access to a compartment pointer for both things, they must
1227 MOZ_ASSERT_IF(source
->maybeCompartment() && target
->maybeCompartment(),
1228 source
->maybeCompartment() == target
->maybeCompartment());
1232 template <uint32_t opts
, typename S
, typename T
>
1233 void js::GCMarker::markAndTraverseEdge(S source
, T
* target
) {
1234 checkTraversedEdge(source
, target
);
1235 markAndTraverse
<opts
>(target
);
1238 template <uint32_t opts
, typename S
, typename T
>
1239 void js::GCMarker::markAndTraverseEdge(S source
, const T
& target
) {
1240 ApplyGCThingTyped(target
, [this, source
](auto t
) {
1241 this->markAndTraverseEdge
<opts
>(source
, t
);
1245 template <uint32_t opts
, typename T
>
1246 bool js::GCMarker::mark(T
* thing
) {
1247 if (!thing
->isTenured()) {
1251 AssertShouldMarkInZone(this, thing
);
1254 TraceKindCanBeGray
<T
>::value
? markColor() : MarkColor::Black
;
1256 if constexpr (bool(opts
& MarkingOptions::ParallelMarking
)) {
1257 return thing
->asTenured().markIfUnmarkedAtomic(color
);
1260 return thing
->asTenured().markIfUnmarked(color
);
1263 /*** Mark-stack Marking *****************************************************/
1265 // Call the trace hook set on the object, if present.
1266 static inline void CallTraceHook(JSTracer
* trc
, JSObject
* obj
) {
1267 const JSClass
* clasp
= obj
->getClass();
1270 if (clasp
->hasTrace()) {
1271 AutoSetTracingSource
asts(trc
, obj
);
1272 clasp
->doTrace(trc
, obj
);
1276 static gcstats::PhaseKind
GrayMarkingPhaseForCurrentPhase(
1277 const gcstats::Statistics
& stats
) {
1278 using namespace gcstats
;
1279 switch (stats
.currentPhaseKind()) {
1280 case PhaseKind::MARK
:
1281 return PhaseKind::MARK_GRAY
;
1282 case PhaseKind::MARK_WEAK
:
1283 return PhaseKind::MARK_GRAY_WEAK
;
1285 MOZ_CRASH("Unexpected current phase");
1289 void GCMarker::moveWork(GCMarker
* dst
, GCMarker
* src
) {
1290 MOZ_ASSERT(dst
->stack
.isEmpty());
1291 MOZ_ASSERT(src
->canDonateWork());
1293 MarkStack::moveWork(dst
->stack
, src
->stack
);
1296 bool GCMarker::initStack() {
1297 MOZ_ASSERT(!isActive());
1298 MOZ_ASSERT(markColor_
== gc::MarkColor::Black
);
1299 return stack
.init();
1302 void GCMarker::resetStackCapacity() {
1303 MOZ_ASSERT(!isActive());
1304 MOZ_ASSERT(markColor_
== gc::MarkColor::Black
);
1305 (void)stack
.resetStackCapacity();
1308 void GCMarker::freeStack() {
1309 MOZ_ASSERT(!isActive());
1310 MOZ_ASSERT(markColor_
== gc::MarkColor::Black
);
1311 stack
.clearAndFreeStack();
1314 bool GCMarker::markUntilBudgetExhausted(SliceBudget
& budget
,
1315 ShouldReportMarkTime reportTime
) {
1317 MOZ_ASSERT(!strictCompartmentChecking
);
1318 strictCompartmentChecking
= true;
1319 auto acc
= mozilla::MakeScopeExit([&] { strictCompartmentChecking
= false; });
1322 if (budget
.isOverBudget()) {
1326 if (isWeakMarking()) {
1327 return doMarking
<MarkingOptions::MarkImplicitEdges
>(budget
, reportTime
);
1330 return doMarking
<MarkingOptions::None
>(budget
, reportTime
);
1333 template <uint32_t opts
>
1334 bool GCMarker::doMarking(SliceBudget
& budget
, ShouldReportMarkTime reportTime
) {
1335 GCRuntime
& gc
= runtime()->gc
;
1337 // This method leaves the mark color as it found it.
1339 if (hasBlackEntries() && !markOneColor
<opts
, MarkColor::Black
>(budget
)) {
1343 if (hasGrayEntries()) {
1344 mozilla::Maybe
<gcstats::AutoPhase
> ap
;
1346 auto& stats
= runtime()->gc
.stats();
1347 ap
.emplace(stats
, GrayMarkingPhaseForCurrentPhase(stats
));
1350 if (!markOneColor
<opts
, MarkColor::Gray
>(budget
)) {
1355 // Mark children of things that caused too deep recursion during the above
1356 // tracing. All normal marking happens before any delayed marking.
1357 if (gc
.hasDelayedMarking()) {
1358 gc
.markAllDelayedChildren(reportTime
);
1361 MOZ_ASSERT(!gc
.hasDelayedMarking());
1362 MOZ_ASSERT(isDrained());
1367 template <uint32_t opts
, MarkColor color
>
1368 bool GCMarker::markOneColor(SliceBudget
& budget
) {
1369 AutoSetMarkColor
setColor(*this, color
);
1371 while (processMarkStackTop
<opts
>(budget
)) {
1372 if (stack
.isEmpty()) {
1380 bool GCMarker::markCurrentColorInParallel(SliceBudget
& budget
) {
1381 ParallelMarker::AtomicCount
& waitingTaskCount
=
1382 parallelMarker_
->waitingTaskCountRef();
1384 while (processMarkStackTop
<MarkingOptions::ParallelMarking
>(budget
)) {
1385 if (stack
.isEmpty()) {
1389 // TODO: It might be better to only check this occasionally, possibly
1390 // combined with the slice budget check. Experiments with giving this its
1391 // own counter resulted in worse performance.
1392 if (waitingTaskCount
&& canDonateWork()) {
1393 parallelMarker_
->donateWorkFrom(this);
1400 static inline void CheckForCompartmentMismatch(JSObject
* obj
, JSObject
* obj2
) {
1402 if (MOZ_UNLIKELY(obj
->compartment() != obj2
->compartment())) {
1405 "Compartment mismatch in pointer from %s object slot to %s object\n",
1406 obj
->getClass()->name
, obj2
->getClass()->name
);
1407 MOZ_CRASH("Compartment mismatch");
1412 static inline size_t NumUsedFixedSlots(NativeObject
* obj
) {
1413 return std::min(obj
->numFixedSlots(), obj
->slotSpan());
1416 static inline size_t NumUsedDynamicSlots(NativeObject
* obj
) {
1417 size_t nfixed
= obj
->numFixedSlots();
1418 size_t nslots
= obj
->slotSpan();
1419 if (nslots
< nfixed
) {
1423 return nslots
- nfixed
;
1426 template <uint32_t opts
>
1427 inline bool GCMarker::processMarkStackTop(SliceBudget
& budget
) {
1429 * This function uses explicit goto and scans objects directly. This allows us
1430 * to eliminate tail recursion and significantly improve the marking
1431 * performance, see bug 641025.
1433 * Note that the mutator can change the size and layout of objects between
1434 * marking slices, so we must check slots and element ranges read from the
1438 MOZ_ASSERT(!stack
.isEmpty());
1439 MOZ_ASSERT_IF(markColor() == MarkColor::Gray
, !hasBlackEntries());
1441 JSObject
* obj
; // The object being scanned.
1442 SlotsOrElementsKind kind
; // The kind of slot range being scanned, if any.
1443 HeapSlot
* base
; // Slot range base pointer.
1444 size_t index
; // Index of the next slot to mark.
1445 size_t end
; // End of slot range to mark.
1447 if (stack
.peekTag() == MarkStack::SlotsOrElementsRangeTag
) {
1448 auto range
= stack
.popSlotsOrElementsRange();
1449 obj
= range
.ptr().asRangeObject();
1450 NativeObject
* nobj
= &obj
->as
<NativeObject
>();
1451 kind
= range
.kind();
1452 index
= range
.start();
1455 case SlotsOrElementsKind::FixedSlots
: {
1456 base
= nobj
->fixedSlots();
1457 end
= NumUsedFixedSlots(nobj
);
1461 case SlotsOrElementsKind::DynamicSlots
: {
1462 base
= nobj
->slots_
;
1463 end
= NumUsedDynamicSlots(nobj
);
1467 case SlotsOrElementsKind::Elements
: {
1468 base
= nobj
->getDenseElements();
1470 // Account for shifted elements.
1471 size_t numShifted
= nobj
->getElementsHeader()->numShiftedElements();
1472 size_t initlen
= nobj
->getDenseInitializedLength();
1473 index
= std::max(index
, numShifted
) - numShifted
;
1478 case SlotsOrElementsKind::Unused
: {
1479 MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unused SlotsOrElementsKind");
1483 goto scan_value_range
;
1487 if (budget
.isOverBudget()) {
1492 MarkStack::TaggedPtr ptr
= stack
.popPtr();
1493 switch (ptr
.tag()) {
1494 case MarkStack::ObjectTag
: {
1495 obj
= ptr
.as
<JSObject
>();
1496 AssertShouldMarkInZone(this, obj
);
1500 case MarkStack::JitCodeTag
: {
1501 auto* code
= ptr
.as
<jit::JitCode
>();
1502 AutoSetTracingSource
asts(tracer(), code
);
1503 code
->traceChildren(tracer());
1507 case MarkStack::ScriptTag
: {
1508 auto* script
= ptr
.as
<BaseScript
>();
1509 if constexpr (bool(opts
& MarkingOptions::MarkImplicitEdges
)) {
1510 markImplicitEdges(script
);
1512 AutoSetTracingSource
asts(tracer(), script
);
1513 script
->traceChildren(tracer());
1518 MOZ_CRASH("Invalid tag in mark stack");
1525 while (index
< end
) {
1527 if (budget
.isOverBudget()) {
1528 pushValueRange(obj
, kind
, index
, end
);
1532 const Value
& v
= base
[index
];
1536 markAndTraverseEdge
<opts
>(obj
, v
.toString());
1537 } else if (v
.hasObjectPayload()) {
1538 JSObject
* obj2
= &v
.getObjectPayload();
1542 "processMarkStackTop found ObjectValue(nullptr) "
1543 "at %zu Values from end of range in object:\n",
1544 size_t(end
- (index
- 1)));
1548 CheckForCompartmentMismatch(obj
, obj2
);
1549 if (mark
<opts
>(obj2
)) {
1550 // Save the rest of this value range for later and start scanning obj2's
1552 pushValueRange(obj
, kind
, index
, end
);
1556 } else if (v
.isSymbol()) {
1557 markAndTraverseEdge
<opts
>(obj
, v
.toSymbol());
1558 } else if (v
.isBigInt()) {
1559 markAndTraverseEdge
<opts
>(obj
, v
.toBigInt());
1560 } else if (v
.isPrivateGCThing()) {
1561 // v.toGCCellPtr cannot be inlined, so construct one manually.
1562 Cell
* cell
= v
.toGCThing();
1563 markAndTraverseEdge
<opts
>(obj
, JS::GCCellPtr(cell
, cell
->getTraceKind()));
1570 AssertShouldMarkInZone(this, obj
);
1572 if constexpr (bool(opts
& MarkingOptions::MarkImplicitEdges
)) {
1573 markImplicitEdges(obj
);
1575 markAndTraverseEdge
<opts
>(obj
, obj
->shape());
1577 CallTraceHook(tracer(), obj
);
1579 if (!obj
->is
<NativeObject
>()) {
1583 NativeObject
* nobj
= &obj
->as
<NativeObject
>();
1585 unsigned nslots
= nobj
->slotSpan();
1588 if (nobj
->hasEmptyElements()) {
1592 base
= nobj
->getDenseElements();
1593 kind
= SlotsOrElementsKind::Elements
;
1595 end
= nobj
->getDenseInitializedLength();
1598 goto scan_value_range
;
1600 pushValueRange(nobj
, kind
, index
, end
);
1603 unsigned nfixed
= nobj
->numFixedSlots();
1605 base
= nobj
->fixedSlots();
1606 kind
= SlotsOrElementsKind::FixedSlots
;
1609 if (nslots
> nfixed
) {
1610 pushValueRange(nobj
, kind
, index
, nfixed
);
1611 kind
= SlotsOrElementsKind::DynamicSlots
;
1612 base
= nobj
->slots_
;
1613 end
= nslots
- nfixed
;
1614 goto scan_value_range
;
1617 MOZ_ASSERT(nslots
<= nobj
->numFixedSlots());
1619 goto scan_value_range
;
1623 /*** Mark Stack *************************************************************/
1625 static_assert(sizeof(MarkStack::TaggedPtr
) == sizeof(uintptr_t),
1626 "A TaggedPtr should be the same size as a pointer");
1627 static_assert((sizeof(MarkStack::SlotsOrElementsRange
) % sizeof(uintptr_t)) ==
1629 "SlotsOrElementsRange size should be a multiple of "
1630 "the pointer size");
1632 static const size_t ValueRangeWords
=
1633 sizeof(MarkStack::SlotsOrElementsRange
) / sizeof(uintptr_t);
1635 template <typename T
>
1636 struct MapTypeToMarkStackTag
{};
1638 struct MapTypeToMarkStackTag
<JSObject
*> {
1639 static const auto value
= MarkStack::ObjectTag
;
1642 struct MapTypeToMarkStackTag
<jit::JitCode
*> {
1643 static const auto value
= MarkStack::JitCodeTag
;
1646 struct MapTypeToMarkStackTag
<BaseScript
*> {
1647 static const auto value
= MarkStack::ScriptTag
;
1651 static inline bool TagIsRangeTag(MarkStack::Tag tag
) {
1652 return tag
== MarkStack::SlotsOrElementsRangeTag
;
1656 inline MarkStack::TaggedPtr::TaggedPtr(Tag tag
, Cell
* ptr
)
1657 : bits(tag
| uintptr_t(ptr
)) {
1661 inline uintptr_t MarkStack::TaggedPtr::tagUnchecked() const {
1662 return bits
& TagMask
;
1665 inline MarkStack::Tag
MarkStack::TaggedPtr::tag() const {
1666 auto tag
= Tag(bits
& TagMask
);
1667 MOZ_ASSERT(tag
<= LastTag
);
1671 inline Cell
* MarkStack::TaggedPtr::ptr() const {
1672 return reinterpret_cast<Cell
*>(bits
& ~TagMask
);
1675 inline void MarkStack::TaggedPtr::assertValid() const {
1677 MOZ_ASSERT(IsCellPointerValid(ptr()));
1680 template <typename T
>
1681 inline T
* MarkStack::TaggedPtr::as() const {
1682 MOZ_ASSERT(tag() == MapTypeToMarkStackTag
<T
*>::value
);
1683 MOZ_ASSERT(ptr()->isTenured());
1684 MOZ_ASSERT(ptr()->is
<T
>());
1685 return static_cast<T
*>(ptr());
1688 inline JSObject
* MarkStack::TaggedPtr::asRangeObject() const {
1689 MOZ_ASSERT(TagIsRangeTag(tag()));
1690 MOZ_ASSERT(ptr()->isTenured());
1691 return ptr()->as
<JSObject
>();
1694 inline JSRope
* MarkStack::TaggedPtr::asTempRope() const {
1695 MOZ_ASSERT(tag() == TempRopeTag
);
1696 return &ptr()->as
<JSString
>()->asRope();
1699 inline MarkStack::SlotsOrElementsRange::SlotsOrElementsRange(
1700 SlotsOrElementsKind kindArg
, JSObject
* obj
, size_t startArg
)
1701 : startAndKind_((startArg
<< StartShift
) | size_t(kindArg
)),
1702 ptr_(SlotsOrElementsRangeTag
, obj
) {
1704 MOZ_ASSERT(kind() == kindArg
);
1705 MOZ_ASSERT(start() == startArg
);
1708 inline void MarkStack::SlotsOrElementsRange::assertValid() const {
1710 MOZ_ASSERT(TagIsRangeTag(ptr_
.tag()));
1713 inline SlotsOrElementsKind
MarkStack::SlotsOrElementsRange::kind() const {
1714 return SlotsOrElementsKind(startAndKind_
& KindMask
);
1717 inline size_t MarkStack::SlotsOrElementsRange::start() const {
1718 return startAndKind_
>> StartShift
;
1721 inline MarkStack::TaggedPtr
MarkStack::SlotsOrElementsRange::ptr() const {
1725 MarkStack::MarkStack() { MOZ_ASSERT(isEmpty()); }
1727 MarkStack::~MarkStack() { MOZ_ASSERT(isEmpty()); }
1729 MarkStack::MarkStack(const MarkStack
& other
) {
1730 MOZ_CRASH("Compiler requires this but doesn't call it");
1733 MarkStack
& MarkStack::operator=(const MarkStack
& other
) {
1734 new (this) MarkStack(other
);
1738 MarkStack::MarkStack(MarkStack
&& other
) noexcept
1739 : stack_(std::move(other
.stack_
.ref())),
1740 topIndex_(other
.topIndex_
.ref())
1743 maxCapacity_(other
.maxCapacity_
)
1746 other
.topIndex_
= 0;
1749 MarkStack
& MarkStack::operator=(MarkStack
&& other
) noexcept
{
1750 new (this) MarkStack(std::move(other
));
1754 bool MarkStack::init() { return resetStackCapacity(); }
1756 bool MarkStack::resetStackCapacity() {
1757 MOZ_ASSERT(isEmpty());
1759 size_t capacity
= MARK_STACK_BASE_CAPACITY
;
1762 capacity
= std::min(capacity
, maxCapacity_
.ref());
1765 return resize(capacity
);
1769 void MarkStack::setMaxCapacity(size_t maxCapacity
) {
1770 MOZ_ASSERT(maxCapacity
!= 0);
1771 MOZ_ASSERT(isEmpty());
1773 maxCapacity_
= maxCapacity
;
1774 if (capacity() > maxCapacity_
) {
1775 // If the realloc fails, just keep using the existing stack; it's
1776 // not ideal but better than failing.
1777 (void)resize(maxCapacity_
);
1782 MOZ_ALWAYS_INLINE
bool MarkStack::indexIsEntryBase(size_t index
) const {
1783 // The mark stack holds both TaggedPtr and SlotsOrElementsRange entries, which
1784 // are one or two words long respectively. Determine whether |index| points to
1785 // the base of an entry (i.e. the lowest word in memory).
1787 // The possible cases are that |index| points to:
1788 // 1. a single word TaggedPtr entry => true
1789 // 2. the startAndKind_ word of SlotsOrElementsRange => true
1790 // (startAndKind_ is a uintptr_t tagged with SlotsOrElementsKind)
1791 // 3. the ptr_ word of SlotsOrElementsRange (itself a TaggedPtr) => false
1793 // To check for case 3, interpret the word as a TaggedPtr: if it is tagged as
1794 // a SlotsOrElementsRange tagged pointer then we are inside such a range and
1795 // |index| does not point to the base of an entry. This requires that no
1796 // startAndKind_ word can be interpreted as such, which is arranged by making
1797 // SlotsOrElementsRangeTag zero and all SlotsOrElementsKind tags non-zero.
1799 MOZ_ASSERT(index
< position());
1800 return stack()[index
].tagUnchecked() != SlotsOrElementsRangeTag
;
1804 void MarkStack::moveWork(MarkStack
& dst
, MarkStack
& src
) {
1805 // Move some work from |src| to |dst|. Assumes |dst| is empty.
1807 // When this method runs during parallel marking, we are on the thread that
1808 // owns |src|, and the thread that owns |dst| is blocked waiting on the
1809 // ParallelMarkTask::resumed condition variable.
1811 // Limit the size of moves to stop threads with work spending too much time
1813 static const size_t MaxWordsToMove
= 4096;
1815 size_t totalWords
= src
.position();
1816 size_t wordsToMove
= std::min(totalWords
/ 2, MaxWordsToMove
);
1818 size_t targetPos
= src
.position() - wordsToMove
;
1820 // Adjust the target position in case it points to the middle of a two word
1822 if (!src
.indexIsEntryBase(targetPos
)) {
1826 MOZ_ASSERT(src
.indexIsEntryBase(targetPos
));
1827 MOZ_ASSERT(targetPos
< src
.position());
1828 MOZ_ASSERT(targetPos
> 0);
1829 MOZ_ASSERT(wordsToMove
== src
.position() - targetPos
);
1831 if (!dst
.ensureSpace(wordsToMove
)) {
1835 // TODO: This doesn't have good cache behaviour when moving work between
1836 // threads. It might be better if the original thread ended up with the top
1837 // part of the stack, in src words if this method stole from the bottom of
1838 // the stack rather than the top.
1840 mozilla::PodCopy(dst
.topPtr(), src
.stack().begin() + targetPos
, wordsToMove
);
1841 dst
.topIndex_
+= wordsToMove
;
1842 dst
.peekPtr().assertValid();
1844 src
.topIndex_
= targetPos
;
1848 src
.peekPtr().assertValid();
1851 void MarkStack::clearAndResetCapacity() {
1852 // Fall back to the smaller initial capacity so we don't hold on to excess
1853 // memory between GCs.
1856 (void)resetStackCapacity();
1859 void MarkStack::clearAndFreeStack() {
1860 // Free all stack memory so we don't hold on to excess memory between GCs.
1861 stack().clearAndFree();
1865 inline MarkStack::TaggedPtr
* MarkStack::topPtr() { return &stack()[topIndex_
]; }
1867 template <typename T
>
1868 inline bool MarkStack::push(T
* ptr
) {
1869 return push(TaggedPtr(MapTypeToMarkStackTag
<T
*>::value
, ptr
));
1872 inline bool MarkStack::pushTempRope(JSRope
* rope
) {
1873 return push(TaggedPtr(TempRopeTag
, rope
));
1876 inline bool MarkStack::push(const TaggedPtr
& ptr
) {
1877 if (!ensureSpace(1)) {
1881 infalliblePush(ptr
);
1885 inline void MarkStack::infalliblePush(const TaggedPtr
& ptr
) {
1888 MOZ_ASSERT(position() <= capacity());
1891 inline bool MarkStack::push(JSObject
* obj
, SlotsOrElementsKind kind
,
1893 return push(SlotsOrElementsRange(kind
, obj
, start
));
1896 inline bool MarkStack::push(const SlotsOrElementsRange
& array
) {
1897 array
.assertValid();
1899 if (!ensureSpace(ValueRangeWords
)) {
1903 infalliblePush(array
);
1907 inline void MarkStack::infalliblePush(const SlotsOrElementsRange
& array
) {
1908 *reinterpret_cast<SlotsOrElementsRange
*>(topPtr()) = array
;
1909 topIndex_
+= ValueRangeWords
;
1910 MOZ_ASSERT(position() <= capacity());
1911 MOZ_ASSERT(TagIsRangeTag(peekTag()));
1914 inline const MarkStack::TaggedPtr
& MarkStack::peekPtr() const {
1915 MOZ_ASSERT(!isEmpty());
1916 return stack()[topIndex_
- 1];
1919 inline MarkStack::Tag
MarkStack::peekTag() const {
1920 MOZ_ASSERT(!isEmpty());
1921 return peekPtr().tag();
1924 inline MarkStack::TaggedPtr
MarkStack::popPtr() {
1925 MOZ_ASSERT(!isEmpty());
1926 MOZ_ASSERT(!TagIsRangeTag(peekTag()));
1927 peekPtr().assertValid();
1932 inline MarkStack::SlotsOrElementsRange
MarkStack::popSlotsOrElementsRange() {
1933 MOZ_ASSERT(!isEmpty());
1934 MOZ_ASSERT(TagIsRangeTag(peekTag()));
1935 MOZ_ASSERT(position() >= ValueRangeWords
);
1937 topIndex_
-= ValueRangeWords
;
1938 const auto& array
= *reinterpret_cast<SlotsOrElementsRange
*>(topPtr());
1939 array
.assertValid();
1943 inline bool MarkStack::ensureSpace(size_t count
) {
1944 if (MOZ_LIKELY((topIndex_
+ count
) <= capacity())) {
1945 return !js::oom::ShouldFailWithOOM();
1948 return enlarge(count
);
1951 MOZ_NEVER_INLINE
bool MarkStack::enlarge(size_t count
) {
1952 size_t required
= capacity() + count
;
1953 size_t newCapacity
= mozilla::RoundUpPow2(required
);
1956 newCapacity
= std::min(newCapacity
, maxCapacity_
.ref());
1957 if (newCapacity
< required
) {
1962 return resize(newCapacity
);
1965 bool MarkStack::resize(size_t newCapacity
) {
1966 MOZ_ASSERT(newCapacity
!= 0);
1967 MOZ_ASSERT(newCapacity
>= position());
1969 if (!stack().resize(newCapacity
)) {
1977 inline void MarkStack::poisonUnused() {
1978 static_assert((JS_FRESH_MARK_STACK_PATTERN
& TagMask
) > LastTag
,
1979 "The mark stack poison pattern must not look like a valid "
1982 AlwaysPoison(stack().begin() + topIndex_
, JS_FRESH_MARK_STACK_PATTERN
,
1983 stack().capacity() - topIndex_
, MemCheckKind::MakeUndefined
);
1986 size_t MarkStack::sizeOfExcludingThis(
1987 mozilla::MallocSizeOf mallocSizeOf
) const {
1988 return stack().sizeOfExcludingThis(mallocSizeOf
);
1991 /*** GCMarker ***************************************************************/
1994 * WeakMapTraceAction::Expand: the GC is recomputing the liveness of WeakMap
1995 * entries by expanding each live WeakMap into its constituent key->value edges,
1996 * a table of which will be consulted in a later phase whenever marking a
1999 GCMarker::GCMarker(JSRuntime
* rt
)
2000 : tracer_(mozilla::VariantType
<MarkingTracer
>(), rt
, this),
2002 haveSwappedStacks(false),
2003 markColor_(MarkColor::Black
),
2005 incrementalWeakMapMarkingEnabled(
2006 TuningDefaults::IncrementalWeakMapMarkingEnabled
)
2009 checkAtomMarking(true),
2010 strictCompartmentChecking(false)
2015 bool GCMarker::init() { return stack
.init(); }
2017 void GCMarker::start() {
2018 MOZ_ASSERT(state
== NotActive
);
2019 MOZ_ASSERT(stack
.isEmpty());
2020 state
= RegularMarking
;
2021 haveAllImplicitEdges
= true;
2022 setMarkColor(MarkColor::Black
);
2025 static void ClearEphemeronEdges(JSRuntime
* rt
) {
2026 AutoEnterOOMUnsafeRegion oomUnsafe
;
2027 for (GCZonesIter
zone(rt
); !zone
.done(); zone
.next()) {
2028 if (!zone
->gcEphemeronEdges().clear()) {
2029 oomUnsafe
.crash("clearing weak keys in GCMarker::stop()");
2031 if (!zone
->gcNurseryEphemeronEdges().clear()) {
2032 oomUnsafe
.crash("clearing (nursery) weak keys in GCMarker::stop()");
2037 void GCMarker::stop() {
2038 MOZ_ASSERT(isDrained());
2039 MOZ_ASSERT(markColor() == MarkColor::Black
);
2040 MOZ_ASSERT(!haveSwappedStacks
);
2042 if (state
== NotActive
) {
2047 otherStack
.clearAndFreeStack();
2048 ClearEphemeronEdges(runtime());
2049 unmarkGrayStack
.clearAndFree();
2052 void GCMarker::reset() {
2055 stack
.clearAndResetCapacity();
2056 otherStack
.clearAndFreeStack();
2057 ClearEphemeronEdges(runtime());
2058 MOZ_ASSERT(isDrained());
2060 setMarkColor(MarkColor::Black
);
2061 MOZ_ASSERT(!haveSwappedStacks
);
2063 unmarkGrayStack
.clearAndFree();
2066 void GCMarker::setMarkColor(gc::MarkColor newColor
) {
2067 if (markColor_
== newColor
) {
2071 // We don't support gray marking while there is black marking work to do.
2072 MOZ_ASSERT(!hasBlackEntries());
2074 markColor_
= newColor
;
2076 // Switch stacks. We only need to do this if there are any stack entries (as
2077 // empty stacks are interchangeable) or to swtich back to the original stack.
2078 if (!isDrained() || haveSwappedStacks
) {
2079 std::swap(stack
, otherStack
);
2080 haveSwappedStacks
= !haveSwappedStacks
;
2084 bool GCMarker::hasEntries(MarkColor color
) const {
2085 const MarkStack
& stackForColor
= color
== markColor() ? stack
: otherStack
;
2086 return stackForColor
.hasEntries();
2089 template <typename T
>
2090 inline void GCMarker::pushTaggedPtr(T
* ptr
) {
2092 if (!stack
.push(ptr
)) {
2093 delayMarkingChildrenOnOOM(ptr
);
2097 inline void GCMarker::pushValueRange(JSObject
* obj
, SlotsOrElementsKind kind
,
2098 size_t start
, size_t end
) {
2100 MOZ_ASSERT(obj
->is
<NativeObject
>());
2101 MOZ_ASSERT(start
<= end
);
2107 if (MOZ_UNLIKELY(!stack
.push(obj
, kind
, start
))) {
2108 delayMarkingChildrenOnOOM(obj
);
2112 void GCMarker::repush(JSObject
* obj
) {
2113 MOZ_ASSERT(obj
->asTenured().isMarkedAtLeast(markColor()));
2117 void GCMarker::setRootMarkingMode(bool newState
) {
2119 setMarkingStateAndTracer
<RootMarkingTracer
>(RegularMarking
, RootMarking
);
2121 setMarkingStateAndTracer
<MarkingTracer
>(RootMarking
, RegularMarking
);
2125 void GCMarker::enterParallelMarkingMode(ParallelMarker
* pm
) {
2127 MOZ_ASSERT(!parallelMarker_
);
2128 setMarkingStateAndTracer
<ParallelMarkingTracer
>(RegularMarking
,
2130 parallelMarker_
= pm
;
2133 void GCMarker::leaveParallelMarkingMode() {
2134 MOZ_ASSERT(parallelMarker_
);
2135 setMarkingStateAndTracer
<MarkingTracer
>(ParallelMarking
, RegularMarking
);
2136 parallelMarker_
= nullptr;
2139 bool GCMarker::canDonateWork() const {
2140 // It's not worth the overhead of donating very few entries. For some
2141 // (non-parallelizable) workloads this can lead to constantly interrupting
2142 // marking work and makes parallel marking slower than single threaded.
2143 constexpr size_t MinWordCount
= 12;
2145 static_assert(MinWordCount
>= ValueRangeWords
,
2146 "We must always leave at least one stack entry.");
2148 return stack
.position() > MinWordCount
;
2151 template <typename Tracer
>
2152 void GCMarker::setMarkingStateAndTracer(MarkingState prev
, MarkingState next
) {
2153 MOZ_ASSERT(state
== prev
);
2155 tracer_
.emplace
<Tracer
>(runtime(), this);
2158 bool GCMarker::enterWeakMarkingMode() {
2159 MOZ_ASSERT(tracer()->weakMapAction() == JS::WeakMapTraceAction::Expand
);
2160 if (!haveAllImplicitEdges
) {
2164 // During weak marking mode, we maintain a table mapping weak keys to
2165 // entries in known-live weakmaps. Initialize it with the keys of marked
2166 // weakmaps -- or more precisely, the keys of marked weakmaps that are
2167 // mapped to not yet live values. (Once bug 1167452 implements incremental
2168 // weakmap marking, this initialization step will become unnecessary, as
2169 // the table will already hold all such keys.)
2171 // Set state before doing anything else, so any new key that is marked
2172 // during the following gcEphemeronEdges scan will itself be looked up in
2173 // gcEphemeronEdges and marked according to ephemeron rules.
2174 setMarkingStateAndTracer
<WeakMarkingTracer
>(RegularMarking
, WeakMarking
);
2179 IncrementalProgress
JS::Zone::enterWeakMarkingMode(GCMarker
* marker
,
2180 SliceBudget
& budget
) {
2181 MOZ_ASSERT(marker
->isWeakMarking());
2183 if (!marker
->incrementalWeakMapMarkingEnabled
) {
2184 for (WeakMapBase
* m
: gcWeakMapList()) {
2186 (void)m
->markEntries(marker
);
2189 return IncrementalProgress::Finished
;
2192 // gcEphemeronEdges contains the keys from all weakmaps marked so far, or at
2193 // least the keys that might still need to be marked through. Scan through
2194 // gcEphemeronEdges and mark all values whose keys are marked. This marking
2195 // may recursively mark through other weakmap entries (immediately since we
2196 // are now in WeakMarking mode). The end result is a consistent state where
2197 // all values are marked if both their map and key are marked -- though note
2198 // that we may later leave weak marking mode, do some more marking, and then
2200 if (!isGCMarking()) {
2201 return IncrementalProgress::Finished
;
2204 MOZ_ASSERT(gcNurseryEphemeronEdges().count() == 0);
2206 // An OrderedHashMap::MutableRange stays valid even when the underlying table
2207 // (zone->gcEphemeronEdges) is mutated, which is useful here since we may add
2208 // additional entries while iterating over the Range.
2209 EphemeronEdgeTable::MutableRange r
= gcEphemeronEdges().mutableAll();
2210 while (!r
.empty()) {
2211 Cell
* src
= r
.front().key
;
2212 CellColor srcColor
= gc::detail::GetEffectiveColor(marker
, src
);
2213 auto& edges
= r
.front().value
;
2214 r
.popFront(); // Pop before any mutations happen.
2216 if (edges
.length() > 0) {
2217 uint32_t steps
= edges
.length();
2218 marker
->markEphemeronEdges(edges
, srcColor
);
2220 if (budget
.isOverBudget()) {
2226 return IncrementalProgress::Finished
;
2229 void GCMarker::leaveWeakMarkingMode() {
2230 if (state
== RegularMarking
) {
2234 setMarkingStateAndTracer
<MarkingTracer
>(WeakMarking
, RegularMarking
);
2236 // The gcEphemeronEdges table is still populated and may be used during a
2237 // future weak marking mode within this GC.
2240 void GCMarker::abortLinearWeakMarking() {
2241 haveAllImplicitEdges
= false;
2242 if (state
== WeakMarking
) {
2243 leaveWeakMarkingMode();
2247 MOZ_NEVER_INLINE
void GCMarker::delayMarkingChildrenOnOOM(Cell
* cell
) {
2248 runtime()->gc
.delayMarkingChildren(cell
, markColor());
2251 bool GCRuntime::hasDelayedMarking() const {
2252 bool result
= delayedMarkingList
;
2253 MOZ_ASSERT(result
== (markLaterArenas
!= 0));
2257 void GCRuntime::delayMarkingChildren(Cell
* cell
, MarkColor color
) {
2258 // Synchronize access to delayed marking state during parallel marking.
2259 LockGuard
<Mutex
> lock(delayedMarkingLock
);
2261 Arena
* arena
= cell
->asTenured().arena();
2262 if (!arena
->onDelayedMarkingList()) {
2263 arena
->setNextDelayedMarkingArena(delayedMarkingList
);
2264 delayedMarkingList
= arena
;
2270 if (!arena
->hasDelayedMarking(color
)) {
2271 arena
->setHasDelayedMarking(color
, true);
2272 delayedMarkingWorkAdded
= true;
2276 void GCRuntime::markDelayedChildren(Arena
* arena
, MarkColor color
) {
2277 JSTracer
* trc
= marker().tracer();
2278 JS::TraceKind kind
= MapAllocToTraceKind(arena
->getAllocKind());
2279 MarkColor colorToCheck
=
2280 TraceKindCanBeMarkedGray(kind
) ? color
: MarkColor::Black
;
2282 for (ArenaCellIterUnderGC
cell(arena
); !cell
.done(); cell
.next()) {
2283 if (cell
->isMarked(colorToCheck
)) {
2284 ApplyGCThingTyped(cell
, kind
, [trc
, this](auto t
) {
2285 t
->traceChildren(trc
);
2286 marker().markImplicitEdges(t
);
2293 * Process arenas from |delayedMarkingList| by marking the unmarked children of
2294 * marked cells of color |color|.
2296 * This is called twice, first to mark gray children and then to mark black
2299 void GCRuntime::processDelayedMarkingList(MarkColor color
) {
2300 // Marking delayed children may add more arenas to the list, including arenas
2301 // we are currently processing or have previously processed. Handle this by
2302 // clearing a flag on each arena before marking its children. This flag will
2303 // be set again if the arena is re-added. Iterate the list until no new arenas
2306 AutoSetMarkColor
setColor(marker(), color
);
2309 delayedMarkingWorkAdded
= false;
2310 for (Arena
* arena
= delayedMarkingList
; arena
;
2311 arena
= arena
->getNextDelayedMarking()) {
2312 if (arena
->hasDelayedMarking(color
)) {
2313 arena
->setHasDelayedMarking(color
, false);
2314 markDelayedChildren(arena
, color
);
2317 while (marker().hasEntriesForCurrentColor()) {
2318 SliceBudget budget
= SliceBudget::unlimited();
2320 marker().processMarkStackTop
<NormalMarkingOptions
>(budget
));
2322 } while (delayedMarkingWorkAdded
);
2324 MOZ_ASSERT(marker().isDrained());
2327 void GCRuntime::markAllDelayedChildren(ShouldReportMarkTime reportTime
) {
2328 MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsPerformingGC());
2329 MOZ_ASSERT(marker().isDrained());
2330 MOZ_ASSERT(hasDelayedMarking());
2332 mozilla::Maybe
<gcstats::AutoPhase
> ap
;
2334 ap
.emplace(stats(), gcstats::PhaseKind::MARK_DELAYED
);
2337 // We have a list of arenas containing marked cells with unmarked children
2338 // where we ran out of stack space during marking. Both black and gray cells
2339 // in these arenas may have unmarked children. Mark black children first.
2341 const MarkColor colors
[] = {MarkColor::Black
, MarkColor::Gray
};
2342 for (MarkColor color
: colors
) {
2343 processDelayedMarkingList(color
);
2344 rebuildDelayedMarkingList();
2347 MOZ_ASSERT(!hasDelayedMarking());
2350 void GCRuntime::rebuildDelayedMarkingList() {
2351 // Rebuild the delayed marking list, removing arenas which do not need further
2354 Arena
* listTail
= nullptr;
2355 forEachDelayedMarkingArena([&](Arena
* arena
) {
2356 if (!arena
->hasAnyDelayedMarking()) {
2357 arena
->clearDelayedMarkingState();
2359 MOZ_ASSERT(markLaterArenas
);
2365 appendToDelayedMarkingList(&listTail
, arena
);
2367 appendToDelayedMarkingList(&listTail
, nullptr);
2370 void GCRuntime::resetDelayedMarking() {
2371 MOZ_ASSERT(CurrentThreadIsMainThread());
2373 forEachDelayedMarkingArena([&](Arena
* arena
) {
2374 MOZ_ASSERT(arena
->onDelayedMarkingList());
2375 arena
->clearDelayedMarkingState();
2377 MOZ_ASSERT(markLaterArenas
);
2381 delayedMarkingList
= nullptr;
2382 MOZ_ASSERT(!markLaterArenas
);
2385 inline void GCRuntime::appendToDelayedMarkingList(Arena
** listTail
,
2388 (*listTail
)->updateNextDelayedMarkingArena(arena
);
2390 delayedMarkingList
= arena
;
2395 template <typename F
>
2396 inline void GCRuntime::forEachDelayedMarkingArena(F
&& f
) {
2397 Arena
* arena
= delayedMarkingList
;
2400 next
= arena
->getNextDelayedMarking();
2407 void GCMarker::checkZone(void* p
) {
2408 MOZ_ASSERT(state
!= NotActive
);
2409 DebugOnly
<Cell
*> cell
= static_cast<Cell
*>(p
);
2410 MOZ_ASSERT_IF(cell
->isTenured(),
2411 cell
->asTenured().zone()->isCollectingFromAnyThread());
2415 size_t GCMarker::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
2416 return mallocSizeOf(this) + stack
.sizeOfExcludingThis(mallocSizeOf
) +
2417 otherStack
.sizeOfExcludingThis(mallocSizeOf
);
2420 /*** IsMarked / IsAboutToBeFinalized ****************************************/
2422 template <typename T
>
2423 static inline void CheckIsMarkedThing(T
* thing
) {
2424 #define IS_SAME_TYPE_OR(name, type, _, _1) std::is_same_v<type, T> ||
2425 static_assert(JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR
) false,
2426 "Only the base cell layout types are allowed into "
2427 "marking/tracing internals");
2428 #undef IS_SAME_TYPE_OR
2433 // Allow any thread access to uncollected things.
2434 Zone
* zone
= thing
->zoneFromAnyThread();
2435 if (thing
->isPermanentAndMayBeShared()) {
2436 MOZ_ASSERT(!zone
->wasGCStarted());
2437 MOZ_ASSERT(!zone
->needsIncrementalBarrier());
2438 MOZ_ASSERT(thing
->isMarkedBlack());
2442 // Allow the current thread access if it is sweeping or in sweep-marking, but
2443 // try to check the zone. Some threads have access to all zones when sweeping.
2444 JS::GCContext
* gcx
= TlsGCContext
.get();
2445 MOZ_ASSERT(gcx
->gcUse() != GCUse::Finalizing
);
2446 if (gcx
->gcUse() == GCUse::Sweeping
|| gcx
->gcUse() == GCUse::Marking
) {
2447 MOZ_ASSERT_IF(gcx
->gcSweepZone(),
2448 gcx
->gcSweepZone() == zone
|| zone
->isAtomsZone());
2452 // Otherwise only allow access from the main thread or this zone's associated
2454 MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing
->runtimeFromAnyThread()) ||
2455 CurrentThreadCanAccessZone(thing
->zoneFromAnyThread()));
2459 template <typename T
>
2460 bool js::gc::IsMarkedInternal(JSRuntime
* rt
, T
* thing
) {
2461 // Don't depend on the mark state of other cells during finalization.
2462 MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
2463 MOZ_ASSERT(rt
->heapState() != JS::HeapState::MinorCollecting
);
2465 CheckIsMarkedThing(thing
);
2467 // This is not used during minor sweeping nor used to update moved GC things.
2468 MOZ_ASSERT(!IsForwarded(thing
));
2470 // Permanent things are never marked by non-owning runtimes.
2471 TenuredCell
* cell
= &thing
->asTenured();
2472 Zone
* zone
= cell
->zoneFromAnyThread();
2474 if (IsOwnedByOtherRuntime(rt
, thing
)) {
2475 MOZ_ASSERT(!zone
->wasGCStarted());
2476 MOZ_ASSERT(thing
->isMarkedBlack());
2480 return !zone
->isGCMarking() || TenuredThingIsMarkedAny(thing
);
2483 template <typename T
>
2484 bool js::gc::IsAboutToBeFinalizedInternal(T
* thing
) {
2485 // Don't depend on the mark state of other cells during finalization.
2486 MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
2488 CheckIsMarkedThing(thing
);
2490 // This is not used during minor sweeping nor used to update moved GC things.
2491 MOZ_ASSERT(!IsForwarded(thing
));
2493 if (!thing
->isTenured()) {
2497 // Permanent things are never finalized by non-owning runtimes.
2498 TenuredCell
* cell
= &thing
->asTenured();
2499 Zone
* zone
= cell
->zoneFromAnyThread();
2501 JSRuntime
* rt
= TlsGCContext
.get()->runtimeFromAnyThread();
2502 if (IsOwnedByOtherRuntime(rt
, thing
)) {
2503 MOZ_ASSERT(!zone
->wasGCStarted());
2504 MOZ_ASSERT(thing
->isMarkedBlack());
2508 return zone
->isGCSweeping() && !TenuredThingIsMarkedAny(thing
);
2511 template <typename T
>
2512 bool js::gc::IsAboutToBeFinalizedInternal(const T
& thing
) {
2515 thing
, [&dying
](auto t
) { dying
= IsAboutToBeFinalizedInternal(t
); });
2519 SweepingTracer::SweepingTracer(JSRuntime
* rt
)
2520 : GenericTracerImpl(rt
, JS::TracerKind::Sweeping
,
2521 JS::WeakMapTraceAction::TraceKeysAndValues
) {}
2523 template <typename T
>
2524 inline void SweepingTracer::onEdge(T
** thingp
, const char* name
) {
2526 CheckIsMarkedThing(thing
);
2528 if (!thing
->isTenured()) {
2532 // Permanent things are never finalized by non-owning runtimes.
2533 TenuredCell
* cell
= &thing
->asTenured();
2534 Zone
* zone
= cell
->zoneFromAnyThread();
2536 if (IsOwnedByOtherRuntime(runtime(), thing
)) {
2537 MOZ_ASSERT(!zone
->wasGCStarted());
2538 MOZ_ASSERT(thing
->isMarkedBlack());
2542 // It would be nice if we could assert that the zone of the tenured cell is in
2543 // the Sweeping state, but that isn't always true for:
2545 // - the jitcode map
2547 if (zone
->isGCSweeping() && !cell
->isMarkedAny()) {
2554 template <typename T
>
2555 JS_PUBLIC_API
bool TraceWeakEdge(JSTracer
* trc
, JS::Heap
<T
>* thingp
) {
2556 return TraceEdgeInternal(trc
, gc::ConvertToBase(thingp
->unsafeGet()),
2560 template <typename T
>
2561 JS_PUBLIC_API
bool EdgeNeedsSweepUnbarrieredSlow(T
* thingp
) {
2562 return IsAboutToBeFinalizedInternal(*ConvertToBase(thingp
));
2565 // Instantiate a copy of the Tracing templates for each public GC type.
2566 #define INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS(type) \
2567 template JS_PUBLIC_API bool TraceWeakEdge<type>(JSTracer * trc, \
2569 template JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow<type>(type*);
2570 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS
)
2571 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
2572 INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS
)
2574 #define INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type) \
2575 template bool IsMarkedInternal(JSRuntime* rt, type thing);
2577 #define INSTANTIATE_INTERNAL_IATBF_FUNCTION(type) \
2578 template bool IsAboutToBeFinalizedInternal(type thingp);
2580 #define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, \
2582 INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type*) \
2583 INSTANTIATE_INTERNAL_IATBF_FUNCTION(type*)
2585 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND
)
2587 #define INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER(type) \
2588 INSTANTIATE_INTERNAL_IATBF_FUNCTION(const type&)
2590 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
2591 INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER
)
2593 #undef INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION
2594 #undef INSTANTIATE_INTERNAL_IATBF_FUNCTION
2595 #undef INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND
2596 #undef INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER
2598 } // namespace js::gc
2600 /*** Cycle Collector Barrier Implementation *********************************/
2603 * The GC and CC are run independently. Consequently, the following sequence of
2605 * 1. GC runs and marks an object gray.
2606 * 2. The mutator runs (specifically, some C++ code with access to gray
2607 * objects) and creates a pointer from a JS root or other black object to
2608 * the gray object. If we re-ran a GC at this point, the object would now be
2610 * 3. Now we run the CC. It may think it can collect the gray object, even
2611 * though it's reachable from the JS heap.
2613 * To prevent this badness, we unmark the gray bit of an object when it is
2614 * accessed by callers outside XPConnect. This would cause the object to go
2615 * black in step 2 above. This must be done on everything reachable from the
2616 * object being returned. The following code takes care of the recursive
2619 * There is an additional complication for certain kinds of edges that are not
2620 * contained explicitly in the source object itself, such as from a weakmap key
2621 * to its value. These "implicit edges" are represented in some other
2622 * container object, such as the weakmap itself. In these
2623 * cases, calling unmark gray on an object won't find all of its children.
2625 * Handling these implicit edges has two parts:
2626 * - A special pass enumerating all of the containers that know about the
2627 * implicit edges to fix any black-gray edges that have been created. This
2628 * is implemented in nsXPConnect::FixWeakMappingGrayBits.
2629 * - To prevent any incorrectly gray objects from escaping to live JS outside
2630 * of the containers, we must add unmark-graying read barriers to these
2635 struct AssertNonGrayTracer final
: public JS::CallbackTracer
{
2636 // This is used by the UnmarkGray tracer only, and needs to report itself as
2637 // the non-gray tracer to not trigger assertions. Do not use it in another
2638 // context without making this more generic.
2639 explicit AssertNonGrayTracer(JSRuntime
* rt
)
2640 : JS::CallbackTracer(rt
, JS::TracerKind::UnmarkGray
) {}
2641 void onChild(JS::GCCellPtr thing
, const char* name
) override
{
2642 MOZ_ASSERT(!thing
.asCell()->isMarkedGray());
2647 class js::gc::UnmarkGrayTracer final
: public JS::CallbackTracer
{
2649 // We set weakMapAction to WeakMapTraceAction::Skip because the cycle
2650 // collector will fix up any color mismatches involving weakmaps when it runs.
2651 explicit UnmarkGrayTracer(GCMarker
* marker
)
2652 : JS::CallbackTracer(marker
->runtime(), JS::TracerKind::UnmarkGray
,
2653 JS::WeakMapTraceAction::Skip
),
2657 stack(marker
->unmarkGrayStack
) {}
2659 void unmark(JS::GCCellPtr cell
);
2661 // Whether we unmarked anything.
2664 // Whether we ran out of memory.
2668 // Marker to use if we need to unmark in zones that are currently being
2672 // Stack of cells to traverse.
2673 Vector
<JS::GCCellPtr
, 0, SystemAllocPolicy
>& stack
;
2675 void onChild(JS::GCCellPtr thing
, const char* name
) override
;
2678 void UnmarkGrayTracer::onChild(JS::GCCellPtr thing
, const char* name
) {
2679 Cell
* cell
= thing
.asCell();
2681 // Cells in the nursery cannot be gray, and nor can certain kinds of tenured
2682 // cells. These must necessarily point only to black edges.
2683 if (!cell
->isTenured() || !TraceKindCanBeMarkedGray(thing
.kind())) {
2685 MOZ_ASSERT(!cell
->isMarkedGray());
2686 AssertNonGrayTracer
nongray(runtime());
2687 JS::TraceChildren(&nongray
, thing
);
2692 TenuredCell
& tenured
= cell
->asTenured();
2693 Zone
* zone
= tenured
.zone();
2695 // If the cell is in a zone whose mark bits are being cleared, then it will
2697 if (zone
->isGCPreparing()) {
2701 // If the cell is in a zone that we're currently marking, then it's possible
2702 // that it is currently white but will end up gray. To handle this case,
2703 // trigger the barrier for any cells in zones that are currently being
2704 // marked. This will ensure they will eventually get marked black.
2705 if (zone
->isGCMarking()) {
2706 if (!cell
->isMarkedBlack()) {
2707 TraceEdgeForBarrier(marker
, &tenured
, thing
.kind());
2713 if (!tenured
.isMarkedGray()) {
2717 // TODO: It may be a small improvement to only use the atomic version during
2718 // parallel marking.
2719 tenured
.markBlackAtomic();
2722 if (!stack
.append(thing
)) {
2727 void UnmarkGrayTracer::unmark(JS::GCCellPtr cell
) {
2728 MOZ_ASSERT(stack
.empty());
2730 onChild(cell
, "unmarking root");
2732 while (!stack
.empty() && !oom
) {
2733 TraceChildren(this, stack
.popCopy());
2737 // If we run out of memory, we take a drastic measure: require that we
2738 // GC again before the next CC.
2740 runtime()->gc
.setGrayBitsInvalid();
2745 bool js::gc::UnmarkGrayGCThingUnchecked(GCMarker
* marker
, JS::GCCellPtr thing
) {
2747 MOZ_ASSERT(thing
.asCell()->isMarkedGray());
2749 mozilla::Maybe
<AutoGeckoProfilerEntry
> profilingStackFrame
;
2750 if (JSContext
* cx
= TlsContext
.get()) {
2751 profilingStackFrame
.emplace(cx
, "UnmarkGrayGCThing",
2752 JS::ProfilingCategoryPair::GCCC_UnmarkGray
);
2755 UnmarkGrayTracer
unmarker(marker
);
2756 unmarker
.unmark(thing
);
2757 return unmarker
.unmarkedAny
;
2760 JS_PUBLIC_API
bool JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing
) {
2761 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
2762 MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
2764 JSRuntime
* rt
= thing
.asCell()->runtimeFromMainThread();
2765 if (thing
.asCell()->zone()->isGCPreparing()) {
2766 // Mark bits are being cleared in preparation for GC.
2770 return UnmarkGrayGCThingUnchecked(&rt
->gc
.marker(), thing
);
2773 void js::gc::UnmarkGrayGCThingRecursively(TenuredCell
* cell
) {
2774 JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(cell
, cell
->getTraceKind()));
2777 bool js::UnmarkGrayShapeRecursively(Shape
* shape
) {
2778 return JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(shape
));
2782 Cell
* js::gc::UninlinedForwarded(const Cell
* cell
) { return Forwarded(cell
); }
2785 namespace js::debug
{
2787 MarkInfo
GetMarkInfo(void* vp
) {
2788 GCRuntime
& gc
= TlsGCContext
.get()->runtime()->gc
;
2789 if (gc
.nursery().isInside(vp
)) {
2790 return MarkInfo::NURSERY
;
2793 if (!gc
.isPointerWithinTenuredCell(vp
)) {
2794 return MarkInfo::UNKNOWN
;
2797 if (!IsCellPointerValid(vp
)) {
2798 return MarkInfo::UNKNOWN
;
2801 TenuredCell
* cell
= reinterpret_cast<TenuredCell
*>(vp
);
2802 if (cell
->isMarkedGray()) {
2803 return MarkInfo::GRAY
;
2805 if (cell
->isMarkedBlack()) {
2806 return MarkInfo::BLACK
;
2808 return MarkInfo::UNMARKED
;
2811 uintptr_t* GetMarkWordAddress(Cell
* cell
) {
2812 if (!cell
->isTenured()) {
2816 MarkBitmapWord
* wordp
;
2818 TenuredChunkBase
* chunk
= gc::detail::GetCellChunkBase(&cell
->asTenured());
2819 chunk
->markBits
.getMarkWordAndMask(&cell
->asTenured(), ColorBit::BlackBit
,
2821 return reinterpret_cast<uintptr_t*>(wordp
);
2824 uintptr_t GetMarkMask(Cell
* cell
, uint32_t colorBit
) {
2825 MOZ_ASSERT(colorBit
== 0 || colorBit
== 1);
2827 if (!cell
->isTenured()) {
2831 ColorBit bit
= colorBit
== 0 ? ColorBit::BlackBit
: ColorBit::GrayOrBlackBit
;
2832 MarkBitmapWord
* wordp
;
2834 TenuredChunkBase
* chunk
= gc::detail::GetCellChunkBase(&cell
->asTenured());
2835 chunk
->markBits
.getMarkWordAndMask(&cell
->asTenured(), bit
, &wordp
, &mask
);
2839 } // namespace js::debug