1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "gc/Marking-inl.h"
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/IntegerRange.h"
11 #include "mozilla/MathAlgorithms.h"
12 #include "mozilla/Maybe.h"
13 #include "mozilla/PodOperations.h"
14 #include "mozilla/ScopeExit.h"
17 #include <type_traits>
19 #include "gc/GCInternals.h"
20 #include "gc/ParallelMarking.h"
21 #include "gc/TraceKind.h"
22 #include "jit/JitCode.h"
23 #include "js/GCTypeMacros.h" // JS_FOR_EACH_PUBLIC_{,TAGGED_}GC_POINTER_TYPE
24 #include "js/SliceBudget.h"
25 #include "util/Poison.h"
26 #include "vm/GeneratorObject.h"
28 #include "gc/GC-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/TraceMethods-inl.h"
31 #include "gc/WeakMap-inl.h"
32 #include "vm/GeckoProfiler-inl.h"
35 using namespace js::gc
;
37 using JS::MapTypeToTraceKind
;
39 using mozilla::DebugOnly
;
40 using mozilla::IntegerRange
;
41 using mozilla::PodCopy
;
48 // Tracing, in this context, refers to an abstract visitation of some or all of
49 // the GC-controlled heap. The effect of tracing an edge of the graph depends
50 // on the subclass of the JSTracer on whose behalf we are tracing.
55 // The primary JSTracer is the GCMarker. The marking tracer causes the target
56 // of each traversed edge to be marked black and the target edge's children to
57 // be marked either gray (in the gc algorithm sense) or immediately black.
62 // The secondary JSTracer is the CallbackTracer. This simply invokes a callback
63 // on each edge in a child.
65 // The following is a rough outline of the general struture of the tracing
68 /* clang-format off */
70 // +-------------------+ ......................
73 // | TraceRoot TraceEdge TraceRange GCMarker:: | |
74 // | | | | processMarkStackTop | Mark |
75 // | +-----------------------+ | | Stack |
78 // | TraceEdgeInternal | ^
79 // | | +<-------------+ :
82 // | CallbackTracer:: markAndTraverseEdge | :
86 // | +-------------+---------------+ | :
89 // | markAndTraverse | :
94 // | +--------------------------------------+ | :
97 // | markAndTraceChildren markAndPush eagerlyMarkChildren | :
99 // | v : +-----------+ :
100 // | T::traceChildren : :
102 // +-------------+ ......................................
105 // ------- Direct calls
108 /* clang-format on */
110 /*** Tracing Invariants *****************************************************/
112 template <typename T
>
113 static inline bool IsOwnedByOtherRuntime(JSRuntime
* rt
, T thing
) {
114 bool other
= thing
->runtimeFromAnyThread() != rt
;
115 MOZ_ASSERT_IF(other
, thing
->isPermanentAndMayBeShared());
121 static inline bool IsInFreeList(TenuredCell
* cell
) {
122 Arena
* arena
= cell
->arena();
123 uintptr_t addr
= reinterpret_cast<uintptr_t>(cell
);
124 MOZ_ASSERT(Arena::isAligned(addr
, arena
->getThingSize()));
125 return arena
->inFreeList(addr
);
128 template <typename T
>
129 void js::CheckTracedThing(JSTracer
* trc
, T
* thing
) {
133 if (IsForwarded(thing
)) {
134 JS::TracerKind kind
= trc
->kind();
135 MOZ_ASSERT(kind
== JS::TracerKind::Tenuring
||
136 kind
== JS::TracerKind::MinorSweeping
||
137 kind
== JS::TracerKind::Moving
);
138 thing
= Forwarded(thing
);
141 /* This function uses data that's not available in the nursery. */
142 if (IsInsideNursery(thing
)) {
147 * Permanent shared things that are not associated with this runtime will be
148 * ignored during marking.
150 Zone
* zone
= thing
->zoneFromAnyThread();
151 if (IsOwnedByOtherRuntime(trc
->runtime(), thing
)) {
152 MOZ_ASSERT(!zone
->wasGCStarted());
153 MOZ_ASSERT(thing
->isMarkedBlack());
157 JSRuntime
* rt
= trc
->runtime();
158 MOZ_ASSERT(zone
->runtimeFromAnyThread() == rt
);
160 bool isGcMarkingTracer
= trc
->isMarkingTracer();
161 bool isUnmarkGrayTracer
= IsTracerKind(trc
, JS::TracerKind::UnmarkGray
);
162 bool isClearEdgesTracer
= IsTracerKind(trc
, JS::TracerKind::ClearEdges
);
164 if (TlsContext
.get()) {
165 // If we're on the main thread we must have access to the runtime and zone.
166 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt
));
167 MOZ_ASSERT(CurrentThreadCanAccessZone(zone
));
169 MOZ_ASSERT(isGcMarkingTracer
|| isUnmarkGrayTracer
|| isClearEdgesTracer
||
170 IsTracerKind(trc
, JS::TracerKind::Moving
) ||
171 IsTracerKind(trc
, JS::TracerKind::Sweeping
));
172 MOZ_ASSERT_IF(!isClearEdgesTracer
, CurrentThreadIsPerformingGC());
175 MOZ_ASSERT(thing
->isAligned());
176 MOZ_ASSERT(MapTypeToTraceKind
<std::remove_pointer_t
<T
>>::kind
==
177 thing
->getTraceKind());
180 * Check that we only mark allocated cells.
182 * This check is restricted to marking for two reasons: Firstly, if background
183 * sweeping is running and concurrently modifying the free list then it is not
184 * safe. Secondly, it was thought to be slow so this is a compromise so as to
185 * not affect test times too much.
187 MOZ_ASSERT_IF(zone
->isGCMarking(), !IsInFreeList(&thing
->asTenured()));
190 template <typename T
>
191 void js::CheckTracedThing(JSTracer
* trc
, const T
& thing
) {
192 ApplyGCThingTyped(thing
, [trc
](auto t
) { CheckTracedThing(trc
, t
); });
195 template <typename T
>
196 static void CheckMarkedThing(GCMarker
* gcMarker
, T
* thing
) {
197 Zone
* zone
= thing
->zoneFromAnyThread();
199 MOZ_ASSERT(zone
->shouldMarkInZone(gcMarker
->markColor()));
201 MOZ_ASSERT_IF(gcMarker
->shouldCheckCompartments(),
202 zone
->isCollectingFromAnyThread() || zone
->isAtomsZone());
204 MOZ_ASSERT_IF(gcMarker
->markColor() == MarkColor::Gray
,
205 !zone
->isGCMarkingBlackOnly() || zone
->isAtomsZone());
207 MOZ_ASSERT(!(zone
->isGCSweeping() || zone
->isGCFinished() ||
208 zone
->isGCCompacting()));
210 // Check that we don't stray from the current compartment and zone without
211 // using TraceCrossCompartmentEdge.
212 Compartment
* comp
= thing
->maybeCompartment();
213 MOZ_ASSERT_IF(gcMarker
->tracingCompartment
&& comp
,
214 gcMarker
->tracingCompartment
== comp
);
215 MOZ_ASSERT_IF(gcMarker
->tracingZone
,
216 gcMarker
->tracingZone
== zone
|| zone
->isAtomsZone());
221 # define IMPL_CHECK_TRACED_THING(_, type, _1, _2) \
222 template void CheckTracedThing<type>(JSTracer*, type*);
223 JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING
);
224 # undef IMPL_CHECK_TRACED_THING
226 template void CheckTracedThing
<Value
>(JSTracer
*, const Value
&);
227 template void CheckTracedThing
<wasm::AnyRef
>(JSTracer
*, const wasm::AnyRef
&);
233 static inline bool ShouldMarkCrossCompartment(GCMarker
* marker
, JSObject
* src
,
235 MarkColor color
= marker
->markColor();
237 if (!dstCell
->isTenured()) {
239 // Bug 1743098: This shouldn't be possible but it does seem to happen. Log
240 // some useful information in debug builds.
241 if (color
!= MarkColor::Black
) {
243 "ShouldMarkCrossCompartment: cross compartment edge from gray "
244 "object to nursery thing\n");
245 fprintf(stderr
, "src: ");
247 fprintf(stderr
, "dst: ");
251 MOZ_ASSERT(color
== MarkColor::Black
);
254 TenuredCell
& dst
= dstCell
->asTenured();
256 JS::Zone
* dstZone
= dst
.zone();
257 if (!src
->zone()->isGCMarking() && !dstZone
->isGCMarking()) {
261 if (color
== MarkColor::Black
) {
262 // Check our sweep groups are correct: we should never have to
263 // mark something in a zone that we have started sweeping.
264 MOZ_ASSERT_IF(!dst
.isMarkedBlack(), !dstZone
->isGCSweeping());
267 * Having black->gray edges violates our promise to the cycle collector so
268 * we ensure that gray things we encounter when marking black end up getting
271 * This can happen for two reasons:
273 * 1) If we're collecting a compartment and it has an edge to an uncollected
274 * compartment it's possible that the source and destination of the
275 * cross-compartment edge should be gray, but the source was marked black by
278 * 2) If we yield during gray marking and the write barrier marks a gray
281 * We handle the first case before returning whereas the second case happens
282 * as part of normal marking.
284 if (dst
.isMarkedGray() && !dstZone
->isGCMarking()) {
285 UnmarkGrayGCThingUnchecked(marker
,
286 JS::GCCellPtr(&dst
, dst
.getTraceKind()));
290 return dstZone
->isGCMarking();
293 // Check our sweep groups are correct as above.
294 MOZ_ASSERT_IF(!dst
.isMarkedAny(), !dstZone
->isGCSweeping());
296 if (dstZone
->isGCMarkingBlackOnly()) {
298 * The destination compartment is being not being marked gray now,
299 * but it will be later, so record the cell so it can be marked gray
300 * at the appropriate time.
302 if (!dst
.isMarkedAny()) {
303 DelayCrossCompartmentGrayMarking(marker
, src
);
308 return dstZone
->isGCMarkingBlackAndGray();
311 static bool ShouldTraceCrossCompartment(JSTracer
* trc
, JSObject
* src
,
313 if (!trc
->isMarkingTracer()) {
317 return ShouldMarkCrossCompartment(GCMarker::fromTracer(trc
), src
, dstCell
);
320 static bool ShouldTraceCrossCompartment(JSTracer
* trc
, JSObject
* src
,
322 return val
.isGCThing() &&
323 ShouldTraceCrossCompartment(trc
, src
, val
.toGCThing());
328 inline void js::gc::AssertShouldMarkInZone(GCMarker
* marker
, Cell
* thing
) {
329 if (!thing
->isMarkedBlack()) {
330 Zone
* zone
= thing
->zone();
331 MOZ_ASSERT(zone
->isAtomsZone() ||
332 zone
->shouldMarkInZone(marker
->markColor()));
336 void js::gc::AssertRootMarkingPhase(JSTracer
* trc
) {
337 MOZ_ASSERT_IF(trc
->isMarkingTracer(),
338 trc
->runtime()->gc
.state() == State::NotActive
||
339 trc
->runtime()->gc
.state() == State::MarkRoots
);
344 /*** Tracing Interface ******************************************************/
346 template <typename T
>
347 static void TraceExternalEdgeHelper(JSTracer
* trc
, T
* thingp
,
349 MOZ_ASSERT(InternalBarrierMethods
<T
>::isMarkable(*thingp
));
350 TraceEdgeInternal(trc
, ConvertToBase(thingp
), name
);
353 JS_PUBLIC_API
void js::UnsafeTraceManuallyBarrieredEdge(JSTracer
* trc
,
356 TraceEdgeInternal(trc
, ConvertToBase(thingp
), name
);
359 template <typename T
>
360 static void TraceRootHelper(JSTracer
* trc
, T
* thingp
, const char* name
) {
362 js::TraceNullableRoot(trc
, thingp
, name
);
366 class AbstractGeneratorObject
;
370 #define DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION(type) \
371 JS_PUBLIC_API void js::gc::TraceExternalEdge(JSTracer* trc, type* thingp, \
372 const char* name) { \
373 TraceExternalEdgeHelper(trc, thingp, name); \
376 // Define TraceExternalEdge for each public GC pointer type.
377 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
)
378 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
)
380 #undef DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
382 #define DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(type) \
383 JS_PUBLIC_API void JS::TraceRoot(JSTracer* trc, type* thingp, \
384 const char* name) { \
385 TraceRootHelper(trc, thingp, name); \
388 // Define TraceRoot for each public GC pointer type.
389 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
)
390 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
)
392 // Also, for the moment, define TraceRoot for internal GC pointer types.
393 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(AbstractGeneratorObject
*)
394 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(SavedFrame
*)
395 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(wasm::AnyRef
)
397 #undef DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
401 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type) \
402 template void TraceRangeInternal<type>(JSTracer*, size_t len, type*, \
405 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, _3) \
406 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type*)
408 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
)
409 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
)
410 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(TaggedProto
)
412 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
413 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
415 } // namespace js::gc
417 // In debug builds, makes a note of the current compartment before calling a
418 // trace hook or traceChildren() method on a GC thing.
419 class MOZ_RAII AutoSetTracingSource
{
422 template <typename T
>
423 AutoSetTracingSource(JSTracer
* trc
, T
* thing
) {}
424 ~AutoSetTracingSource() {}
426 GCMarker
* marker
= nullptr;
429 template <typename T
>
430 AutoSetTracingSource(JSTracer
* trc
, T
* thing
) {
431 if (trc
->isMarkingTracer() && thing
) {
432 marker
= GCMarker::fromTracer(trc
);
433 MOZ_ASSERT(!marker
->tracingZone
);
434 marker
->tracingZone
= thing
->asTenured().zone();
435 MOZ_ASSERT(!marker
->tracingCompartment
);
436 marker
->tracingCompartment
= thing
->maybeCompartment();
440 ~AutoSetTracingSource() {
442 marker
->tracingZone
= nullptr;
443 marker
->tracingCompartment
= nullptr;
449 // In debug builds, clear the trace hook compartment. This happens after the
450 // trace hook has called back into one of our trace APIs and we've checked the
452 class MOZ_RAII AutoClearTracingSource
{
455 explicit AutoClearTracingSource(GCMarker
* marker
) {}
456 explicit AutoClearTracingSource(JSTracer
* trc
) {}
457 ~AutoClearTracingSource() {}
459 GCMarker
* marker
= nullptr;
460 JS::Zone
* prevZone
= nullptr;
461 Compartment
* prevCompartment
= nullptr;
464 explicit AutoClearTracingSource(JSTracer
* trc
) {
465 if (trc
->isMarkingTracer()) {
466 marker
= GCMarker::fromTracer(trc
);
467 prevZone
= marker
->tracingZone
;
468 marker
->tracingZone
= nullptr;
469 prevCompartment
= marker
->tracingCompartment
;
470 marker
->tracingCompartment
= nullptr;
473 ~AutoClearTracingSource() {
475 marker
->tracingZone
= prevZone
;
476 marker
->tracingCompartment
= prevCompartment
;
482 template <typename T
>
483 void js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer
* trc
,
484 JSObject
* src
, T
* dst
,
486 // Clear expected compartment for cross-compartment edge.
487 AutoClearTracingSource
acts(trc
);
489 if (ShouldTraceCrossCompartment(trc
, src
, *dst
)) {
490 TraceEdgeInternal(trc
, dst
, name
);
493 template void js::TraceManuallyBarrieredCrossCompartmentEdge
<Value
>(
494 JSTracer
*, JSObject
*, Value
*, const char*);
495 template void js::TraceManuallyBarrieredCrossCompartmentEdge
<JSObject
*>(
496 JSTracer
*, JSObject
*, JSObject
**, const char*);
497 template void js::TraceManuallyBarrieredCrossCompartmentEdge
<BaseScript
*>(
498 JSTracer
*, JSObject
*, BaseScript
**, const char*);
500 template <typename T
>
501 void js::TraceSameZoneCrossCompartmentEdge(JSTracer
* trc
,
502 const BarrieredBase
<T
>* dst
,
505 if (trc
->isMarkingTracer()) {
506 T thing
= *dst
->unbarrieredAddress();
507 MOZ_ASSERT(thing
->maybeCompartment(),
508 "Use TraceEdge for GC things without a compartment");
510 GCMarker
* gcMarker
= GCMarker::fromTracer(trc
);
511 MOZ_ASSERT_IF(gcMarker
->tracingZone
,
512 thing
->zone() == gcMarker
->tracingZone
);
515 // Skip compartment checks for this edge.
516 if (trc
->kind() == JS::TracerKind::CompartmentCheck
) {
521 // Clear expected compartment for cross-compartment edge.
522 AutoClearTracingSource
acts(trc
);
523 TraceEdgeInternal(trc
, ConvertToBase(dst
->unbarrieredAddress()), name
);
525 template void js::TraceSameZoneCrossCompartmentEdge(
526 JSTracer
*, const BarrieredBase
<Shape
*>*, const char*);
528 template <typename T
>
529 void js::TraceWeakMapKeyEdgeInternal(JSTracer
* trc
, Zone
* weakMapZone
,
530 T
** thingp
, const char* name
) {
531 // We can't use ShouldTraceCrossCompartment here because that assumes the
532 // source of the edge is a CCW object which could be used to delay gray
533 // marking. Instead, assert that the weak map zone is in the same marking
534 // state as the target thing's zone and therefore we can go ahead and mark it.
536 auto thing
= *thingp
;
537 if (trc
->isMarkingTracer()) {
538 MOZ_ASSERT(weakMapZone
->isGCMarking());
539 MOZ_ASSERT(weakMapZone
->gcState() == thing
->zone()->gcState());
543 // Clear expected compartment for cross-compartment edge.
544 AutoClearTracingSource
acts(trc
);
546 TraceEdgeInternal(trc
, thingp
, name
);
549 template void js::TraceWeakMapKeyEdgeInternal
<JSObject
>(JSTracer
*, Zone
*,
552 template void js::TraceWeakMapKeyEdgeInternal
<BaseScript
>(JSTracer
*, Zone
*,
556 static Cell
* TraceGenericPointerRootAndType(JSTracer
* trc
, Cell
* thing
,
559 return MapGCThingTyped(thing
, kind
, [trc
, name
](auto t
) -> Cell
* {
560 TraceRoot(trc
, &t
, name
);
565 void js::TraceGenericPointerRoot(JSTracer
* trc
, Cell
** thingp
,
568 Cell
* thing
= *thingp
;
574 TraceGenericPointerRootAndType(trc
, thing
, thing
->getTraceKind(), name
);
575 if (traced
!= thing
) {
580 void js::TraceManuallyBarrieredGenericPointerEdge(JSTracer
* trc
, Cell
** thingp
,
583 Cell
* thing
= *thingp
;
588 auto* traced
= MapGCThingTyped(thing
, thing
->getTraceKind(),
589 [trc
, name
](auto t
) -> Cell
* {
590 TraceManuallyBarrieredEdge(trc
, &t
, name
);
593 if (traced
!= thing
) {
598 void js::TraceGCCellPtrRoot(JSTracer
* trc
, JS::GCCellPtr
* thingp
,
600 Cell
* thing
= thingp
->asCell();
606 TraceGenericPointerRootAndType(trc
, thing
, thingp
->kind(), name
);
609 *thingp
= JS::GCCellPtr();
610 } else if (traced
!= thingp
->asCell()) {
611 *thingp
= JS::GCCellPtr(traced
, thingp
->kind());
615 void js::TraceManuallyBarrieredGCCellPtr(JSTracer
* trc
, JS::GCCellPtr
* thingp
,
617 Cell
* thing
= thingp
->asCell();
622 Cell
* traced
= MapGCThingTyped(thing
, thing
->getTraceKind(),
623 [trc
, name
](auto t
) -> Cell
* {
624 TraceManuallyBarrieredEdge(trc
, &t
, name
);
629 // If we are clearing edges, also erase the type. This happens when using
631 *thingp
= JS::GCCellPtr();
632 } else if (traced
!= thingp
->asCell()) {
633 *thingp
= JS::GCCellPtr(traced
, thingp
->kind());
637 template <typename T
>
638 inline bool TraceTaggedPtrEdge(JSTracer
* trc
, T
* thingp
, const char* name
) {
639 // Return true by default. For some types the lambda below won't be called.
641 auto thing
= MapGCThingTyped(*thingp
, [&](auto thing
) {
642 if (!TraceEdgeInternal(trc
, &thing
, name
)) {
644 return TaggedPtr
<T
>::empty();
647 return TaggedPtr
<T
>::wrap(thing
);
650 // Only update *thingp if the value changed, to avoid TSan false positives for
651 // template objects when using DumpHeapTracer or UbiNode tracers while Ion
652 // compiling off-thread.
653 if (thing
.isSome() && thing
.value() != *thingp
) {
654 *thingp
= thing
.value();
660 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, Value
* thingp
, const char* name
) {
661 return TraceTaggedPtrEdge(trc
, thingp
, name
);
663 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, jsid
* thingp
, const char* name
) {
664 return TraceTaggedPtrEdge(trc
, thingp
, name
);
666 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, TaggedProto
* thingp
,
668 return TraceTaggedPtrEdge(trc
, thingp
, name
);
670 bool js::gc::TraceEdgeInternal(JSTracer
* trc
, wasm::AnyRef
* thingp
,
672 return TraceTaggedPtrEdge(trc
, thingp
, name
);
675 template <typename T
>
676 void js::gc::TraceRangeInternal(JSTracer
* trc
, size_t len
, T
* vec
,
678 JS::AutoTracingIndex
index(trc
);
679 for (auto i
: IntegerRange(len
)) {
680 if (InternalBarrierMethods
<T
>::isMarkable(vec
[i
])) {
681 TraceEdgeInternal(trc
, &vec
[i
], name
);
687 /*** GC Marking Interface ***************************************************/
691 using HasNoImplicitEdgesType
= bool;
693 template <typename T
>
694 struct ImplicitEdgeHolderType
{
695 using Type
= HasNoImplicitEdgesType
;
698 // For now, we only handle JSObject* and BaseScript* keys, but the linear time
699 // algorithm can be easily extended by adding in more types here, then making
700 // GCMarker::traverse<T> call markImplicitEdges.
702 struct ImplicitEdgeHolderType
<JSObject
*> {
703 using Type
= JSObject
*;
707 struct ImplicitEdgeHolderType
<BaseScript
*> {
708 using Type
= BaseScript
*;
711 void GCMarker::markEphemeronEdges(EphemeronEdgeVector
& edges
,
712 gc::CellColor srcColor
) {
713 // This is called as part of GC weak marking or by barriers outside of GC.
714 MOZ_ASSERT_IF(CurrentThreadIsPerformingGC(),
715 state
== MarkingState::WeakMarking
);
717 DebugOnly
<size_t> initialLength
= edges
.length();
719 for (auto& edge
: edges
) {
720 CellColor targetColor
= std::min(srcColor
, edge
.color
);
721 MOZ_ASSERT(CellColor(markColor()) >= targetColor
);
722 if (targetColor
== markColor()) {
723 ApplyGCThingTyped(edge
.target
, edge
.target
->getTraceKind(),
725 markAndTraverse
<MarkingOptions::MarkImplicitEdges
>(t
);
730 // The above marking always goes through markAndPush, which will not cause
731 // 'edges' to be appended to while iterating.
732 MOZ_ASSERT(edges
.length() == initialLength
);
734 // This is not just an optimization. When nuking a CCW, we conservatively
735 // mark through the related edges and then lose the CCW->target connection
736 // that induces a sweep group edge. As a result, it is possible for the
737 // delegate zone to get marked later, look up an edge in this table, and
738 // then try to mark something in a Zone that is no longer marking.
739 if (srcColor
== CellColor::Black
&& markColor() == MarkColor::Black
) {
740 edges
.eraseIf([](auto& edge
) { return edge
.color
== MarkColor::Black
; });
744 // 'delegate' is no longer the delegate of 'key'.
745 void GCMarker::severWeakDelegate(JSObject
* key
, JSObject
* delegate
) {
746 MOZ_ASSERT(CurrentThreadIsMainThread());
748 JS::Zone
* zone
= delegate
->zone();
749 if (!zone
->needsIncrementalBarrier()) {
751 !zone
->gcEphemeronEdges(delegate
).get(delegate
),
752 "non-collecting zone should not have populated gcEphemeronEdges");
755 auto* p
= zone
->gcEphemeronEdges(delegate
).get(delegate
);
760 // We are losing 3 edges here: key -> delegate, delegate -> key, and
761 // <delegate, map> -> value. Maintain snapshot-at-beginning (hereafter,
762 // S-A-B) by conservatively assuming the delegate will end up black and
763 // marking through the latter 2 edges.
765 // Note that this does not fully give S-A-B:
767 // 1. If the map is gray, then the value will only be marked gray here even
768 // though the map could later be discovered to be black.
770 // 2. If the map has not yet been marked, we won't have any entries to mark
771 // here in the first place.
773 // 3. We're not marking the delegate, since that would cause eg nukeAllCCWs
774 // to keep everything alive for another collection.
776 // We can't even assume that the delegate passed in here is live, because we
777 // could have gotten here from nukeAllCCWs, which iterates over all CCWs
778 // including dead ones.
780 // This is ok because S-A-B is only needed to prevent the case where an
781 // unmarked object is removed from the graph and then re-inserted where it is
782 // reachable only by things that have already been marked. None of the 3
783 // target objects will be re-inserted anywhere as a result of this action.
785 EphemeronEdgeVector
& edges
= p
->value
;
786 MOZ_ASSERT(markColor() == MarkColor::Black
);
787 markEphemeronEdges(edges
, MarkColor::Black
);
790 // 'delegate' is now the delegate of 'key'. Update weakmap marking state.
791 void GCMarker::restoreWeakDelegate(JSObject
* key
, JSObject
* delegate
) {
792 MOZ_ASSERT(CurrentThreadIsMainThread());
794 if (!key
->zone()->needsIncrementalBarrier()) {
795 // Temporary diagnostic printouts for when this would have asserted.
796 if (key
->zone()->gcEphemeronEdges(key
).has(key
)) {
797 fprintf(stderr
, "key zone: %d\n", int(key
->zone()->gcState()));
801 fprintf(stderr
, "delegate zone: %d\n", int(delegate
->zone()->gcState()));
807 !key
->zone()->gcEphemeronEdges(key
).has(key
),
808 "non-collecting zone should not have populated gcEphemeronEdges");
811 if (!delegate
->zone()->needsIncrementalBarrier()) {
812 // Normally we should not have added the key -> value edge if the delegate
813 // zone is not marking (because the delegate would have been seen as black,
814 // so we would mark the key immediately instead). But if there wasn't a
815 // delegate (the key was nuked), then we won't have consulted it. So we
816 // can't do the same assertion as above.
818 // Specifically, the sequence would be:
820 // 2. Start the incremental GC.
821 // 3. Mark the WeakMap. Insert a key->value edge with a DeadObjectProxy key.
822 // 4. Un-nuke the key with a delegate in a nonmarking Zone.
824 // The result is an ephemeron edge (from <map,key> to value, but stored
825 // as key to value) involving a key with a delegate in a nonmarking Zone,
826 // something that ordinarily would not happen.
829 auto* p
= key
->zone()->gcEphemeronEdges(key
).get(key
);
834 // Similar to severWeakDelegate above, mark through the key -> value edge.
835 EphemeronEdgeVector
& edges
= p
->value
;
836 MOZ_ASSERT(markColor() == MarkColor::Black
);
837 markEphemeronEdges(edges
, MarkColor::Black
);
840 template <typename T
>
841 void GCMarker::markImplicitEdgesHelper(T markedThing
) {
842 if (!isWeakMarking()) {
846 Zone
* zone
= markedThing
->asTenured().zone();
847 MOZ_ASSERT(zone
->isGCMarking());
848 MOZ_ASSERT(!zone
->isGCSweeping());
850 auto p
= zone
->gcEphemeronEdges().get(markedThing
);
854 EphemeronEdgeVector
& edges
= p
->value
;
856 // markedThing might be a key in a debugger weakmap, which can end up marking
857 // values that are in a different compartment.
858 AutoClearTracingSource
acts(tracer());
860 CellColor thingColor
= gc::detail::GetEffectiveColor(this, markedThing
);
861 markEphemeronEdges(edges
, thingColor
);
865 void GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType
) {}
867 template <typename T
>
868 void GCMarker::markImplicitEdges(T
* thing
) {
869 markImplicitEdgesHelper
<typename ImplicitEdgeHolderType
<T
*>::Type
>(thing
);
872 template void GCMarker::markImplicitEdges(JSObject
*);
873 template void GCMarker::markImplicitEdges(BaseScript
*);
877 template <typename T
>
878 static inline bool ShouldMark(GCMarker
* gcmarker
, T
* thing
) {
879 // We may encounter nursery things during normal marking since we don't
880 // collect the nursery at the start of every GC slice.
881 if (!thing
->isTenured()) {
885 // Don't mark things outside a zone if we are in a per-zone GC. Don't mark
886 // permanent shared things owned by other runtimes (we will never observe
887 // their zone being collected).
888 Zone
* zone
= thing
->asTenured().zoneFromAnyThread();
889 return zone
->shouldMarkInZone(gcmarker
->markColor());
892 template <uint32_t opts
>
893 MarkingTracerT
<opts
>::MarkingTracerT(JSRuntime
* runtime
, GCMarker
* marker
)
894 : GenericTracerImpl
<MarkingTracerT
<opts
>>(
895 runtime
, JS::TracerKind::Marking
,
896 JS::TraceOptions(JS::WeakMapTraceAction::Expand
,
897 JS::WeakEdgeTraceAction::Skip
)) {
898 // Marking tracers are owned by (and part of) a GCMarker.
899 MOZ_ASSERT(this == marker
->tracer());
900 MOZ_ASSERT(getMarker() == marker
);
903 template <uint32_t opts
>
904 MOZ_ALWAYS_INLINE GCMarker
* MarkingTracerT
<opts
>::getMarker() {
905 return GCMarker::fromTracer(this);
908 template <uint32_t opts
>
909 template <typename T
>
910 void MarkingTracerT
<opts
>::onEdge(T
** thingp
, const char* name
) {
913 // Do per-type marking precondition checks.
914 GCMarker
* marker
= getMarker();
915 if (!ShouldMark(marker
, thing
)) {
916 MOZ_ASSERT(gc::detail::GetEffectiveColor(marker
, thing
) ==
917 js::gc::CellColor::Black
);
921 MOZ_ASSERT(!IsOwnedByOtherRuntime(this->runtime(), thing
));
924 CheckMarkedThing(marker
, thing
);
927 AutoClearTracingSource
acts(this);
928 marker
->markAndTraverse
<opts
>(thing
);
931 #define INSTANTIATE_ONEDGE_METHOD(name, type, _1, _2) \
932 template void MarkingTracerT<MarkingOptions::None>::onEdge<type>( \
933 type * *thingp, const char* name); \
935 MarkingTracerT<MarkingOptions::MarkImplicitEdges>::onEdge<type>( \
936 type * *thingp, const char* name); \
938 MarkingTracerT<MarkingOptions::MarkRootCompartments>::onEdge<type>( \
939 type * *thingp, const char* name);
940 JS_FOR_EACH_TRACEKIND(INSTANTIATE_ONEDGE_METHOD
)
941 #undef INSTANTIATE_ONEDGE_METHOD
943 static void TraceEdgeForBarrier(GCMarker
* gcmarker
, TenuredCell
* thing
,
944 JS::TraceKind kind
) {
945 // Dispatch to markAndTraverse without checking ShouldMark.
946 ApplyGCThingTyped(thing
, kind
, [gcmarker
](auto thing
) {
947 MOZ_ASSERT(ShouldMark(gcmarker
, thing
));
948 CheckTracedThing(gcmarker
->tracer(), thing
);
949 AutoClearTracingSource
acts(gcmarker
->tracer());
950 gcmarker
->markAndTraverse
<NormalMarkingOptions
>(thing
);
954 JS_PUBLIC_API
void js::gc::PerformIncrementalReadBarrier(JS::GCCellPtr thing
) {
955 // Optimized marking for read barriers. This is called from
956 // ExposeGCThingToActiveJS which has already checked the prerequisites for
957 // performing a read barrier. This means we can skip a bunch of checks and
958 // call into the tracer directly.
961 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
963 TenuredCell
* cell
= &thing
.asCell()->asTenured();
964 MOZ_ASSERT(!cell
->isMarkedBlack());
966 Zone
* zone
= cell
->zone();
967 MOZ_ASSERT(zone
->needsIncrementalBarrier());
969 // Skip dispatching on known tracer type.
970 GCMarker
* gcmarker
= GCMarker::fromTracer(zone
->barrierTracer());
971 TraceEdgeForBarrier(gcmarker
, cell
, thing
.kind());
974 void js::gc::PerformIncrementalReadBarrier(TenuredCell
* cell
) {
975 // Internal version of previous function.
978 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
980 if (cell
->isMarkedBlack()) {
984 Zone
* zone
= cell
->zone();
985 MOZ_ASSERT(zone
->needsIncrementalBarrier());
987 // Skip dispatching on known tracer type.
988 GCMarker
* gcmarker
= GCMarker::fromTracer(zone
->barrierTracer());
989 TraceEdgeForBarrier(gcmarker
, cell
, cell
->getTraceKind());
992 void js::gc::PerformIncrementalPreWriteBarrier(TenuredCell
* cell
) {
993 // The same as PerformIncrementalReadBarrier except for an extra check on the
994 // runtime for cells in atoms zone.
996 Zone
* zone
= cell
->zoneFromAnyThread();
997 MOZ_ASSERT(zone
->needsIncrementalBarrier());
1000 if (cell
->isMarkedBlack()) {
1004 // Barriers can be triggered off the main thread by background finalization of
1005 // HeapPtrs to the atoms zone. We don't want to trigger the barrier in this
1007 bool checkThread
= zone
->isAtomsZone();
1008 JSRuntime
* runtime
= cell
->runtimeFromAnyThread();
1009 if (checkThread
&& !CurrentThreadCanAccessRuntime(runtime
)) {
1010 MOZ_ASSERT(CurrentThreadIsGCFinalizing());
1014 MOZ_ASSERT(CurrentThreadIsMainThread());
1015 MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
1017 // Skip dispatching on known tracer type.
1018 GCMarker
* gcmarker
= GCMarker::fromTracer(zone
->barrierTracer());
1019 TraceEdgeForBarrier(gcmarker
, cell
, cell
->getTraceKind());
1022 void js::gc::PerformIncrementalBarrierDuringFlattening(JSString
* str
) {
1023 TenuredCell
* cell
= &str
->asTenured();
1025 // Skip eager marking of ropes during flattening. Their children will also be
1026 // barriered by flattening process so we don't need to traverse them.
1027 if (str
->isRope()) {
1032 PerformIncrementalPreWriteBarrier(cell
);
1035 template <uint32_t opts
, typename T
>
1036 void js::GCMarker::markAndTraverse(T
* thing
) {
1037 if (mark
<opts
>(thing
)) {
1038 // We only mark permanent things during initialization.
1039 MOZ_ASSERT_IF(thing
->isPermanentAndMayBeShared(),
1040 !runtime()->permanentAtomsPopulated());
1042 // We don't need to pass MarkRootCompartments options on to children.
1043 constexpr uint32_t traverseOpts
=
1044 opts
& ~MarkingOptions::MarkRootCompartments
;
1046 traverse
<traverseOpts
>(thing
);
1048 if constexpr (bool(opts
& MarkingOptions::MarkRootCompartments
)) {
1049 // Mark the compartment as live.
1050 SetCompartmentHasMarkedCells(thing
);
1055 // The |traverse| method overloads select the traversal strategy for each kind.
1057 // There are three possible strategies:
1061 // The simplest traversal calls out to the fully generic traceChildren
1062 // function to visit the child edges. In the absence of other traversal
1063 // mechanisms, this function will rapidly grow the stack past its bounds and
1064 // crash the process. Thus, this generic tracing should only be used in cases
1065 // where subsequent tracing will not recurse.
1069 // Strings, Shapes, and Scopes are extremely common, but have simple patterns
1070 // of recursion. We traverse trees of these edges immediately, with
1071 // aggressive, manual inlining, implemented by eagerlyTraceChildren.
1075 // Objects are extremely common and can contain arbitrarily nested graphs, so
1076 // are not trivially inlined. In this case we use the mark stack to control
1077 // recursion. JitCode shares none of these properties, but is included for
1078 // historical reasons. JSScript normally cannot recurse, but may be used as a
1079 // weakmap key and thereby recurse into weakmapped values.
1081 template <uint32_t opts
>
1082 void GCMarker::traverse(BaseShape
* thing
) {
1083 traceChildren
<opts
>(thing
);
1085 template <uint32_t opts
>
1086 void GCMarker::traverse(GetterSetter
* thing
) {
1087 traceChildren
<opts
>(thing
);
1089 template <uint32_t opts
>
1090 void GCMarker::traverse(JS::Symbol
* thing
) {
1091 traceChildren
<opts
>(thing
);
1093 template <uint32_t opts
>
1094 void GCMarker::traverse(JS::BigInt
* thing
) {
1095 traceChildren
<opts
>(thing
);
1097 template <uint32_t opts
>
1098 void GCMarker::traverse(RegExpShared
* thing
) {
1099 traceChildren
<opts
>(thing
);
1101 template <uint32_t opts
>
1102 void GCMarker::traverse(JSString
* thing
) {
1103 scanChildren
<opts
>(thing
);
1105 template <uint32_t opts
>
1106 void GCMarker::traverse(Shape
* thing
) {
1107 scanChildren
<opts
>(thing
);
1109 template <uint32_t opts
>
1110 void GCMarker::traverse(PropMap
* thing
) {
1111 scanChildren
<opts
>(thing
);
1113 template <uint32_t opts
>
1114 void GCMarker::traverse(js::Scope
* thing
) {
1115 scanChildren
<opts
>(thing
);
1117 template <uint32_t opts
>
1118 void GCMarker::traverse(JSObject
* thing
) {
1119 pushThing
<opts
>(thing
);
1121 template <uint32_t opts
>
1122 void GCMarker::traverse(jit::JitCode
* thing
) {
1123 pushThing
<opts
>(thing
);
1125 template <uint32_t opts
>
1126 void GCMarker::traverse(BaseScript
* thing
) {
1127 pushThing
<opts
>(thing
);
1130 template <uint32_t opts
, typename T
>
1131 void js::GCMarker::traceChildren(T
* thing
) {
1132 MOZ_ASSERT(!thing
->isPermanentAndMayBeShared());
1133 MOZ_ASSERT(thing
->isMarkedAny());
1134 AutoSetTracingSource
asts(tracer(), thing
);
1135 thing
->traceChildren(tracer());
1138 template <uint32_t opts
, typename T
>
1139 void js::GCMarker::scanChildren(T
* thing
) {
1140 MOZ_ASSERT(!thing
->isPermanentAndMayBeShared());
1141 MOZ_ASSERT(thing
->isMarkedAny());
1142 eagerlyMarkChildren
<opts
>(thing
);
1145 template <uint32_t opts
, typename T
>
1146 void js::GCMarker::pushThing(T
* thing
) {
1147 MOZ_ASSERT(!thing
->isPermanentAndMayBeShared());
1148 MOZ_ASSERT(thing
->isMarkedAny());
1149 pushTaggedPtr(thing
);
1152 template void js::GCMarker::markAndTraverse
<MarkingOptions::None
, JSObject
>(
1154 template void js::GCMarker::markAndTraverse
<MarkingOptions::MarkImplicitEdges
,
1155 JSObject
>(JSObject
* thing
);
1156 template void js::GCMarker::markAndTraverse
<
1157 MarkingOptions::MarkRootCompartments
, JSObject
>(JSObject
* thing
);
1160 void GCMarker::setCheckAtomMarking(bool check
) {
1161 MOZ_ASSERT(check
!= checkAtomMarking
);
1162 checkAtomMarking
= check
;
1166 template <typename S
, typename T
>
1167 inline void GCMarker::checkTraversedEdge(S source
, T
* target
) {
1169 // Atoms and Symbols do not have or mark their internal pointers,
1171 MOZ_ASSERT(!source
->isPermanentAndMayBeShared());
1173 // Shared things are already black so we will not mark them.
1174 if (target
->isPermanentAndMayBeShared()) {
1175 Zone
* zone
= target
->zoneFromAnyThread();
1176 MOZ_ASSERT(!zone
->wasGCStarted());
1177 MOZ_ASSERT(!zone
->needsIncrementalBarrier());
1178 MOZ_ASSERT(target
->isMarkedBlack());
1179 MOZ_ASSERT(!target
->maybeCompartment());
1183 Zone
* sourceZone
= source
->zone();
1184 Zone
* targetZone
= target
->zone();
1186 // Atoms and Symbols do not have access to a compartment pointer, or we'd need
1187 // to adjust the subsequent check to catch that case.
1188 MOZ_ASSERT_IF(targetZone
->isAtomsZone(), !target
->maybeCompartment());
1190 // The Zones must match, unless the target is an atom.
1191 MOZ_ASSERT(targetZone
== sourceZone
|| targetZone
->isAtomsZone());
1193 // If we are marking an atom, that atom must be marked in the source zone's
1195 if (checkAtomMarking
&& !sourceZone
->isAtomsZone() &&
1196 targetZone
->isAtomsZone()) {
1197 MOZ_ASSERT(target
->runtimeFromAnyThread()->gc
.atomMarking
.atomIsMarked(
1198 sourceZone
, reinterpret_cast<TenuredCell
*>(target
)));
1201 // If we have access to a compartment pointer for both things, they must
1203 MOZ_ASSERT_IF(source
->maybeCompartment() && target
->maybeCompartment(),
1204 source
->maybeCompartment() == target
->maybeCompartment());
1208 template <uint32_t opts
, typename S
, typename T
>
1209 void js::GCMarker::markAndTraverseEdge(S source
, T
* target
) {
1210 checkTraversedEdge(source
, target
);
1211 markAndTraverse
<opts
>(target
);
1214 template <uint32_t opts
, typename S
, typename T
>
1215 void js::GCMarker::markAndTraverseEdge(S source
, const T
& target
) {
1216 ApplyGCThingTyped(target
, [this, source
](auto t
) {
1217 this->markAndTraverseEdge
<opts
>(source
, t
);
1221 template <uint32_t opts
, typename T
>
1222 bool js::GCMarker::mark(T
* thing
) {
1223 if (!thing
->isTenured()) {
1227 AssertShouldMarkInZone(this, thing
);
1230 TraceKindCanBeGray
<T
>::value
? markColor() : MarkColor::Black
;
1232 if constexpr (bool(opts
& MarkingOptions::ParallelMarking
)) {
1233 return thing
->asTenured().markIfUnmarkedAtomic(color
);
1236 return thing
->asTenured().markIfUnmarked(color
);
1239 /*** Mark-stack Marking *****************************************************/
1241 // Call the trace hook set on the object, if present.
1242 static inline void CallTraceHook(JSTracer
* trc
, JSObject
* obj
) {
1243 const JSClass
* clasp
= obj
->getClass();
1246 if (clasp
->hasTrace()) {
1247 AutoSetTracingSource
asts(trc
, obj
);
1248 clasp
->doTrace(trc
, obj
);
1252 static gcstats::PhaseKind
GrayMarkingPhaseForCurrentPhase(
1253 const gcstats::Statistics
& stats
) {
1254 using namespace gcstats
;
1255 switch (stats
.currentPhaseKind()) {
1256 case PhaseKind::MARK
:
1257 return PhaseKind::MARK_GRAY
;
1258 case PhaseKind::MARK_WEAK
:
1259 return PhaseKind::MARK_GRAY_WEAK
;
1261 MOZ_CRASH("Unexpected current phase");
1265 void GCMarker::moveWork(GCMarker
* dst
, GCMarker
* src
) {
1266 MOZ_ASSERT(dst
->stack
.isEmpty());
1267 MOZ_ASSERT(src
->canDonateWork());
1269 MarkStack::moveWork(dst
->stack
, src
->stack
);
1272 bool GCMarker::initStack() {
1273 MOZ_ASSERT(!isActive());
1274 MOZ_ASSERT(markColor_
== gc::MarkColor::Black
);
1275 return stack
.init();
1278 void GCMarker::resetStackCapacity() {
1279 MOZ_ASSERT(!isActive());
1280 MOZ_ASSERT(markColor_
== gc::MarkColor::Black
);
1281 (void)stack
.resetStackCapacity();
1284 void GCMarker::freeStack() {
1285 MOZ_ASSERT(!isActive());
1286 MOZ_ASSERT(markColor_
== gc::MarkColor::Black
);
1287 stack
.clearAndFreeStack();
1290 bool GCMarker::markUntilBudgetExhausted(SliceBudget
& budget
,
1291 ShouldReportMarkTime reportTime
) {
1293 MOZ_ASSERT(!strictCompartmentChecking
);
1294 strictCompartmentChecking
= true;
1295 auto acc
= mozilla::MakeScopeExit([&] { strictCompartmentChecking
= false; });
1298 if (budget
.isOverBudget()) {
1302 if (isWeakMarking()) {
1303 return doMarking
<MarkingOptions::MarkImplicitEdges
>(budget
, reportTime
);
1306 return doMarking
<MarkingOptions::None
>(budget
, reportTime
);
1309 template <uint32_t opts
>
1310 bool GCMarker::doMarking(SliceBudget
& budget
, ShouldReportMarkTime reportTime
) {
1311 GCRuntime
& gc
= runtime()->gc
;
1313 // This method leaves the mark color as it found it.
1315 if (hasBlackEntries() && !markOneColor
<opts
, MarkColor::Black
>(budget
)) {
1319 if (hasGrayEntries()) {
1320 mozilla::Maybe
<gcstats::AutoPhase
> ap
;
1322 auto& stats
= runtime()->gc
.stats();
1323 ap
.emplace(stats
, GrayMarkingPhaseForCurrentPhase(stats
));
1326 if (!markOneColor
<opts
, MarkColor::Gray
>(budget
)) {
1331 // Mark children of things that caused too deep recursion during the above
1332 // tracing. All normal marking happens before any delayed marking.
1333 if (gc
.hasDelayedMarking()) {
1334 gc
.markAllDelayedChildren(reportTime
);
1337 MOZ_ASSERT(!gc
.hasDelayedMarking());
1338 MOZ_ASSERT(isDrained());
1343 template <uint32_t opts
, MarkColor color
>
1344 bool GCMarker::markOneColor(SliceBudget
& budget
) {
1345 AutoSetMarkColor
setColor(*this, color
);
1347 while (processMarkStackTop
<opts
>(budget
)) {
1348 if (stack
.isEmpty()) {
1356 bool GCMarker::markCurrentColorInParallel(SliceBudget
& budget
) {
1357 ParallelMarker::AtomicCount
& waitingTaskCount
=
1358 parallelMarker_
->waitingTaskCountRef();
1360 while (processMarkStackTop
<MarkingOptions::ParallelMarking
>(budget
)) {
1361 if (stack
.isEmpty()) {
1365 // TODO: It might be better to only check this occasionally, possibly
1366 // combined with the slice budget check. Experiments with giving this its
1367 // own counter resulted in worse performance.
1368 if (waitingTaskCount
&& canDonateWork()) {
1369 parallelMarker_
->donateWorkFrom(this);
1376 static inline void CheckForCompartmentMismatch(JSObject
* obj
, JSObject
* obj2
) {
1378 if (MOZ_UNLIKELY(obj
->compartment() != obj2
->compartment())) {
1381 "Compartment mismatch in pointer from %s object slot to %s object\n",
1382 obj
->getClass()->name
, obj2
->getClass()->name
);
1383 MOZ_CRASH("Compartment mismatch");
1388 static inline size_t NumUsedFixedSlots(NativeObject
* obj
) {
1389 return std::min(obj
->numFixedSlots(), obj
->slotSpan());
1392 static inline size_t NumUsedDynamicSlots(NativeObject
* obj
) {
1393 size_t nfixed
= obj
->numFixedSlots();
1394 size_t nslots
= obj
->slotSpan();
1395 if (nslots
< nfixed
) {
1399 return nslots
- nfixed
;
1402 template <uint32_t opts
>
1403 inline bool GCMarker::processMarkStackTop(SliceBudget
& budget
) {
1405 * This function uses explicit goto and scans objects directly. This allows us
1406 * to eliminate tail recursion and significantly improve the marking
1407 * performance, see bug 641025.
1409 * Note that the mutator can change the size and layout of objects between
1410 * marking slices, so we must check slots and element ranges read from the
1414 MOZ_ASSERT(!stack
.isEmpty());
1415 MOZ_ASSERT_IF(markColor() == MarkColor::Gray
, !hasBlackEntries());
1417 JSObject
* obj
; // The object being scanned.
1418 SlotsOrElementsKind kind
; // The kind of slot range being scanned, if any.
1419 HeapSlot
* base
; // Slot range base pointer.
1420 size_t index
; // Index of the next slot to mark.
1421 size_t end
; // End of slot range to mark.
1423 if (stack
.peekTag() == MarkStack::SlotsOrElementsRangeTag
) {
1424 auto range
= stack
.popSlotsOrElementsRange();
1425 obj
= range
.ptr().asRangeObject();
1426 NativeObject
* nobj
= &obj
->as
<NativeObject
>();
1427 kind
= range
.kind();
1428 index
= range
.start();
1431 case SlotsOrElementsKind::FixedSlots
: {
1432 base
= nobj
->fixedSlots();
1433 end
= NumUsedFixedSlots(nobj
);
1437 case SlotsOrElementsKind::DynamicSlots
: {
1438 base
= nobj
->slots_
;
1439 end
= NumUsedDynamicSlots(nobj
);
1443 case SlotsOrElementsKind::Elements
: {
1444 base
= nobj
->getDenseElements();
1446 // Account for shifted elements.
1447 size_t numShifted
= nobj
->getElementsHeader()->numShiftedElements();
1448 size_t initlen
= nobj
->getDenseInitializedLength();
1449 index
= std::max(index
, numShifted
) - numShifted
;
1454 case SlotsOrElementsKind::Unused
: {
1455 MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unused SlotsOrElementsKind");
1459 goto scan_value_range
;
1463 if (budget
.isOverBudget()) {
1468 MarkStack::TaggedPtr ptr
= stack
.popPtr();
1469 switch (ptr
.tag()) {
1470 case MarkStack::ObjectTag
: {
1471 obj
= ptr
.as
<JSObject
>();
1472 AssertShouldMarkInZone(this, obj
);
1476 case MarkStack::JitCodeTag
: {
1477 auto* code
= ptr
.as
<jit::JitCode
>();
1478 AutoSetTracingSource
asts(tracer(), code
);
1479 code
->traceChildren(tracer());
1483 case MarkStack::ScriptTag
: {
1484 auto* script
= ptr
.as
<BaseScript
>();
1485 if constexpr (bool(opts
& MarkingOptions::MarkImplicitEdges
)) {
1486 markImplicitEdges(script
);
1488 AutoSetTracingSource
asts(tracer(), script
);
1489 script
->traceChildren(tracer());
1494 MOZ_CRASH("Invalid tag in mark stack");
1501 while (index
< end
) {
1503 if (budget
.isOverBudget()) {
1504 pushValueRange(obj
, kind
, index
, end
);
1508 const Value
& v
= base
[index
];
1512 markAndTraverseEdge
<opts
>(obj
, v
.toString());
1513 } else if (v
.hasObjectPayload()) {
1514 JSObject
* obj2
= &v
.getObjectPayload();
1518 "processMarkStackTop found ObjectValue(nullptr) "
1519 "at %zu Values from end of range in object:\n",
1520 size_t(end
- (index
- 1)));
1524 CheckForCompartmentMismatch(obj
, obj2
);
1525 if (mark
<opts
>(obj2
)) {
1526 // Save the rest of this value range for later and start scanning obj2's
1528 pushValueRange(obj
, kind
, index
, end
);
1532 } else if (v
.isSymbol()) {
1533 markAndTraverseEdge
<opts
>(obj
, v
.toSymbol());
1534 } else if (v
.isBigInt()) {
1535 markAndTraverseEdge
<opts
>(obj
, v
.toBigInt());
1536 } else if (v
.isPrivateGCThing()) {
1537 // v.toGCCellPtr cannot be inlined, so construct one manually.
1538 Cell
* cell
= v
.toGCThing();
1539 markAndTraverseEdge
<opts
>(obj
, JS::GCCellPtr(cell
, cell
->getTraceKind()));
1546 AssertShouldMarkInZone(this, obj
);
1548 if constexpr (bool(opts
& MarkingOptions::MarkImplicitEdges
)) {
1549 markImplicitEdges(obj
);
1551 markAndTraverseEdge
<opts
>(obj
, obj
->shape());
1553 CallTraceHook(tracer(), obj
);
1555 if (!obj
->is
<NativeObject
>()) {
1559 NativeObject
* nobj
= &obj
->as
<NativeObject
>();
1561 unsigned nslots
= nobj
->slotSpan();
1564 if (nobj
->hasEmptyElements()) {
1568 base
= nobj
->getDenseElements();
1569 kind
= SlotsOrElementsKind::Elements
;
1571 end
= nobj
->getDenseInitializedLength();
1574 goto scan_value_range
;
1576 pushValueRange(nobj
, kind
, index
, end
);
1579 unsigned nfixed
= nobj
->numFixedSlots();
1581 base
= nobj
->fixedSlots();
1582 kind
= SlotsOrElementsKind::FixedSlots
;
1585 if (nslots
> nfixed
) {
1586 pushValueRange(nobj
, kind
, index
, nfixed
);
1587 kind
= SlotsOrElementsKind::DynamicSlots
;
1588 base
= nobj
->slots_
;
1589 end
= nslots
- nfixed
;
1590 goto scan_value_range
;
1593 MOZ_ASSERT(nslots
<= nobj
->numFixedSlots());
1595 goto scan_value_range
;
1599 /*** Mark Stack *************************************************************/
1601 static_assert(sizeof(MarkStack::TaggedPtr
) == sizeof(uintptr_t),
1602 "A TaggedPtr should be the same size as a pointer");
1603 static_assert((sizeof(MarkStack::SlotsOrElementsRange
) % sizeof(uintptr_t)) ==
1605 "SlotsOrElementsRange size should be a multiple of "
1606 "the pointer size");
1608 static const size_t ValueRangeWords
=
1609 sizeof(MarkStack::SlotsOrElementsRange
) / sizeof(uintptr_t);
1611 template <typename T
>
1612 struct MapTypeToMarkStackTag
{};
1614 struct MapTypeToMarkStackTag
<JSObject
*> {
1615 static const auto value
= MarkStack::ObjectTag
;
1618 struct MapTypeToMarkStackTag
<jit::JitCode
*> {
1619 static const auto value
= MarkStack::JitCodeTag
;
1622 struct MapTypeToMarkStackTag
<BaseScript
*> {
1623 static const auto value
= MarkStack::ScriptTag
;
1627 static inline bool TagIsRangeTag(MarkStack::Tag tag
) {
1628 return tag
== MarkStack::SlotsOrElementsRangeTag
;
1632 inline MarkStack::TaggedPtr::TaggedPtr(Tag tag
, Cell
* ptr
)
1633 : bits(tag
| uintptr_t(ptr
)) {
1637 inline uintptr_t MarkStack::TaggedPtr::tagUnchecked() const {
1638 return bits
& TagMask
;
1641 inline MarkStack::Tag
MarkStack::TaggedPtr::tag() const {
1642 auto tag
= Tag(bits
& TagMask
);
1643 MOZ_ASSERT(tag
<= LastTag
);
1647 inline Cell
* MarkStack::TaggedPtr::ptr() const {
1648 return reinterpret_cast<Cell
*>(bits
& ~TagMask
);
1651 inline void MarkStack::TaggedPtr::assertValid() const {
1653 MOZ_ASSERT(IsCellPointerValid(ptr()));
1656 template <typename T
>
1657 inline T
* MarkStack::TaggedPtr::as() const {
1658 MOZ_ASSERT(tag() == MapTypeToMarkStackTag
<T
*>::value
);
1659 MOZ_ASSERT(ptr()->isTenured());
1660 MOZ_ASSERT(ptr()->is
<T
>());
1661 return static_cast<T
*>(ptr());
1664 inline JSObject
* MarkStack::TaggedPtr::asRangeObject() const {
1665 MOZ_ASSERT(TagIsRangeTag(tag()));
1666 MOZ_ASSERT(ptr()->isTenured());
1667 return ptr()->as
<JSObject
>();
1670 inline JSRope
* MarkStack::TaggedPtr::asTempRope() const {
1671 MOZ_ASSERT(tag() == TempRopeTag
);
1672 return &ptr()->as
<JSString
>()->asRope();
1675 inline MarkStack::SlotsOrElementsRange::SlotsOrElementsRange(
1676 SlotsOrElementsKind kindArg
, JSObject
* obj
, size_t startArg
)
1677 : startAndKind_((startArg
<< StartShift
) | size_t(kindArg
)),
1678 ptr_(SlotsOrElementsRangeTag
, obj
) {
1680 MOZ_ASSERT(kind() == kindArg
);
1681 MOZ_ASSERT(start() == startArg
);
1684 inline void MarkStack::SlotsOrElementsRange::assertValid() const {
1686 MOZ_ASSERT(TagIsRangeTag(ptr_
.tag()));
1689 inline SlotsOrElementsKind
MarkStack::SlotsOrElementsRange::kind() const {
1690 return SlotsOrElementsKind(startAndKind_
& KindMask
);
1693 inline size_t MarkStack::SlotsOrElementsRange::start() const {
1694 return startAndKind_
>> StartShift
;
1697 inline MarkStack::TaggedPtr
MarkStack::SlotsOrElementsRange::ptr() const {
1701 MarkStack::MarkStack() { MOZ_ASSERT(isEmpty()); }
1703 MarkStack::~MarkStack() { MOZ_ASSERT(isEmpty()); }
1705 MarkStack::MarkStack(const MarkStack
& other
) {
1706 MOZ_CRASH("Compiler requires this but doesn't call it");
1709 MarkStack
& MarkStack::operator=(const MarkStack
& other
) {
1710 new (this) MarkStack(other
);
1714 MarkStack::MarkStack(MarkStack
&& other
) noexcept
1715 : stack_(std::move(other
.stack_
.ref())),
1716 topIndex_(other
.topIndex_
.ref())
1719 maxCapacity_(other
.maxCapacity_
)
1722 other
.topIndex_
= 0;
1725 MarkStack
& MarkStack::operator=(MarkStack
&& other
) noexcept
{
1726 new (this) MarkStack(std::move(other
));
1730 bool MarkStack::init() { return resetStackCapacity(); }
1732 bool MarkStack::resetStackCapacity() {
1733 MOZ_ASSERT(isEmpty());
1735 size_t capacity
= MARK_STACK_BASE_CAPACITY
;
1738 capacity
= std::min(capacity
, maxCapacity_
.ref());
1741 return resize(capacity
);
1745 void MarkStack::setMaxCapacity(size_t maxCapacity
) {
1746 MOZ_ASSERT(maxCapacity
!= 0);
1747 MOZ_ASSERT(isEmpty());
1749 maxCapacity_
= maxCapacity
;
1750 if (capacity() > maxCapacity_
) {
1751 // If the realloc fails, just keep using the existing stack; it's
1752 // not ideal but better than failing.
1753 (void)resize(maxCapacity_
);
1758 MOZ_ALWAYS_INLINE
bool MarkStack::indexIsEntryBase(size_t index
) const {
1759 // The mark stack holds both TaggedPtr and SlotsOrElementsRange entries, which
1760 // are one or two words long respectively. Determine whether |index| points to
1761 // the base of an entry (i.e. the lowest word in memory).
1763 // The possible cases are that |index| points to:
1764 // 1. a single word TaggedPtr entry => true
1765 // 2. the startAndKind_ word of SlotsOrElementsRange => true
1766 // (startAndKind_ is a uintptr_t tagged with SlotsOrElementsKind)
1767 // 3. the ptr_ word of SlotsOrElementsRange (itself a TaggedPtr) => false
1769 // To check for case 3, interpret the word as a TaggedPtr: if it is tagged as
1770 // a SlotsOrElementsRange tagged pointer then we are inside such a range and
1771 // |index| does not point to the base of an entry. This requires that no
1772 // startAndKind_ word can be interpreted as such, which is arranged by making
1773 // SlotsOrElementsRangeTag zero and all SlotsOrElementsKind tags non-zero.
1775 MOZ_ASSERT(index
< position());
1776 return stack()[index
].tagUnchecked() != SlotsOrElementsRangeTag
;
1780 void MarkStack::moveWork(MarkStack
& dst
, MarkStack
& src
) {
1781 // Move some work from |src| to |dst|. Assumes |dst| is empty.
1783 // When this method runs during parallel marking, we are on the thread that
1784 // owns |src|, and the thread that owns |dst| is blocked waiting on the
1785 // ParallelMarkTask::resumed condition variable.
1787 // Limit the size of moves to stop threads with work spending too much time
1789 static const size_t MaxWordsToMove
= 4096;
1791 size_t totalWords
= src
.position();
1792 size_t wordsToMove
= std::min(totalWords
/ 2, MaxWordsToMove
);
1794 size_t targetPos
= src
.position() - wordsToMove
;
1796 // Adjust the target position in case it points to the middle of a two word
1798 if (!src
.indexIsEntryBase(targetPos
)) {
1802 MOZ_ASSERT(src
.indexIsEntryBase(targetPos
));
1803 MOZ_ASSERT(targetPos
< src
.position());
1804 MOZ_ASSERT(targetPos
> 0);
1805 MOZ_ASSERT(wordsToMove
== src
.position() - targetPos
);
1807 if (!dst
.ensureSpace(wordsToMove
)) {
1811 // TODO: This doesn't have good cache behaviour when moving work between
1812 // threads. It might be better if the original thread ended up with the top
1813 // part of the stack, in src words if this method stole from the bottom of
1814 // the stack rather than the top.
1816 mozilla::PodCopy(dst
.topPtr(), src
.stack().begin() + targetPos
, wordsToMove
);
1817 dst
.topIndex_
+= wordsToMove
;
1818 dst
.peekPtr().assertValid();
1820 src
.topIndex_
= targetPos
;
1824 src
.peekPtr().assertValid();
1827 void MarkStack::clearAndResetCapacity() {
1828 // Fall back to the smaller initial capacity so we don't hold on to excess
1829 // memory between GCs.
1832 (void)resetStackCapacity();
1835 void MarkStack::clearAndFreeStack() {
1836 // Free all stack memory so we don't hold on to excess memory between GCs.
1837 stack().clearAndFree();
1841 inline MarkStack::TaggedPtr
* MarkStack::topPtr() { return &stack()[topIndex_
]; }
1843 template <typename T
>
1844 inline bool MarkStack::push(T
* ptr
) {
1845 return push(TaggedPtr(MapTypeToMarkStackTag
<T
*>::value
, ptr
));
1848 inline bool MarkStack::pushTempRope(JSRope
* rope
) {
1849 return push(TaggedPtr(TempRopeTag
, rope
));
1852 inline bool MarkStack::push(const TaggedPtr
& ptr
) {
1853 if (!ensureSpace(1)) {
1857 infalliblePush(ptr
);
1861 inline void MarkStack::infalliblePush(const TaggedPtr
& ptr
) {
1864 MOZ_ASSERT(position() <= capacity());
1867 inline bool MarkStack::push(JSObject
* obj
, SlotsOrElementsKind kind
,
1869 return push(SlotsOrElementsRange(kind
, obj
, start
));
1872 inline bool MarkStack::push(const SlotsOrElementsRange
& array
) {
1873 array
.assertValid();
1875 if (!ensureSpace(ValueRangeWords
)) {
1879 infalliblePush(array
);
1883 inline void MarkStack::infalliblePush(const SlotsOrElementsRange
& array
) {
1884 *reinterpret_cast<SlotsOrElementsRange
*>(topPtr()) = array
;
1885 topIndex_
+= ValueRangeWords
;
1886 MOZ_ASSERT(position() <= capacity());
1887 MOZ_ASSERT(TagIsRangeTag(peekTag()));
1890 inline const MarkStack::TaggedPtr
& MarkStack::peekPtr() const {
1891 MOZ_ASSERT(!isEmpty());
1892 return stack()[topIndex_
- 1];
1895 inline MarkStack::Tag
MarkStack::peekTag() const {
1896 MOZ_ASSERT(!isEmpty());
1897 return peekPtr().tag();
1900 inline MarkStack::TaggedPtr
MarkStack::popPtr() {
1901 MOZ_ASSERT(!isEmpty());
1902 MOZ_ASSERT(!TagIsRangeTag(peekTag()));
1903 peekPtr().assertValid();
1908 inline MarkStack::SlotsOrElementsRange
MarkStack::popSlotsOrElementsRange() {
1909 MOZ_ASSERT(!isEmpty());
1910 MOZ_ASSERT(TagIsRangeTag(peekTag()));
1911 MOZ_ASSERT(position() >= ValueRangeWords
);
1913 topIndex_
-= ValueRangeWords
;
1914 const auto& array
= *reinterpret_cast<SlotsOrElementsRange
*>(topPtr());
1915 array
.assertValid();
1919 inline bool MarkStack::ensureSpace(size_t count
) {
1920 if (MOZ_LIKELY((topIndex_
+ count
) <= capacity())) {
1921 return !js::oom::ShouldFailWithOOM();
1924 return enlarge(count
);
1927 MOZ_NEVER_INLINE
bool MarkStack::enlarge(size_t count
) {
1928 size_t required
= capacity() + count
;
1929 size_t newCapacity
= mozilla::RoundUpPow2(required
);
1932 newCapacity
= std::min(newCapacity
, maxCapacity_
.ref());
1933 if (newCapacity
< required
) {
1938 return resize(newCapacity
);
1941 bool MarkStack::resize(size_t newCapacity
) {
1942 MOZ_ASSERT(newCapacity
!= 0);
1943 MOZ_ASSERT(newCapacity
>= position());
1945 if (!stack().resize(newCapacity
)) {
1953 inline void MarkStack::poisonUnused() {
1954 static_assert((JS_FRESH_MARK_STACK_PATTERN
& TagMask
) > LastTag
,
1955 "The mark stack poison pattern must not look like a valid "
1958 AlwaysPoison(stack().begin() + topIndex_
, JS_FRESH_MARK_STACK_PATTERN
,
1959 stack().capacity() - topIndex_
, MemCheckKind::MakeUndefined
);
1962 size_t MarkStack::sizeOfExcludingThis(
1963 mozilla::MallocSizeOf mallocSizeOf
) const {
1964 return stack().sizeOfExcludingThis(mallocSizeOf
);
1967 /*** GCMarker ***************************************************************/
1970 * WeakMapTraceAction::Expand: the GC is recomputing the liveness of WeakMap
1971 * entries by expanding each live WeakMap into its constituent key->value edges,
1972 * a table of which will be consulted in a later phase whenever marking a
1975 GCMarker::GCMarker(JSRuntime
* rt
)
1976 : tracer_(mozilla::VariantType
<MarkingTracer
>(), rt
, this),
1978 haveSwappedStacks(false),
1979 markColor_(MarkColor::Black
),
1981 incrementalWeakMapMarkingEnabled(
1982 TuningDefaults::IncrementalWeakMapMarkingEnabled
)
1985 checkAtomMarking(true),
1986 strictCompartmentChecking(false)
1991 bool GCMarker::init() { return stack
.init(); }
1993 void GCMarker::start() {
1994 MOZ_ASSERT(state
== NotActive
);
1995 MOZ_ASSERT(stack
.isEmpty());
1996 state
= RegularMarking
;
1997 haveAllImplicitEdges
= true;
1998 setMarkColor(MarkColor::Black
);
2001 static void ClearEphemeronEdges(JSRuntime
* rt
) {
2002 AutoEnterOOMUnsafeRegion oomUnsafe
;
2003 for (GCZonesIter
zone(rt
); !zone
.done(); zone
.next()) {
2004 if (!zone
->gcEphemeronEdges().clear()) {
2005 oomUnsafe
.crash("clearing weak keys in GCMarker::stop()");
2007 if (!zone
->gcNurseryEphemeronEdges().clear()) {
2008 oomUnsafe
.crash("clearing (nursery) weak keys in GCMarker::stop()");
2013 void GCMarker::stop() {
2014 MOZ_ASSERT(isDrained());
2015 MOZ_ASSERT(markColor() == MarkColor::Black
);
2016 MOZ_ASSERT(!haveSwappedStacks
);
2018 if (state
== NotActive
) {
2023 otherStack
.clearAndFreeStack();
2024 ClearEphemeronEdges(runtime());
2025 unmarkGrayStack
.clearAndFree();
2028 void GCMarker::reset() {
2031 stack
.clearAndResetCapacity();
2032 otherStack
.clearAndFreeStack();
2033 ClearEphemeronEdges(runtime());
2034 MOZ_ASSERT(isDrained());
2036 setMarkColor(MarkColor::Black
);
2037 MOZ_ASSERT(!haveSwappedStacks
);
2039 unmarkGrayStack
.clearAndFree();
2042 void GCMarker::setMarkColor(gc::MarkColor newColor
) {
2043 if (markColor_
== newColor
) {
2047 // We don't support gray marking while there is black marking work to do.
2048 MOZ_ASSERT(!hasBlackEntries());
2050 markColor_
= newColor
;
2052 // Switch stacks. We only need to do this if there are any stack entries (as
2053 // empty stacks are interchangeable) or to swtich back to the original stack.
2054 if (!isDrained() || haveSwappedStacks
) {
2055 std::swap(stack
, otherStack
);
2056 haveSwappedStacks
= !haveSwappedStacks
;
2060 bool GCMarker::hasEntries(MarkColor color
) const {
2061 const MarkStack
& stackForColor
= color
== markColor() ? stack
: otherStack
;
2062 return stackForColor
.hasEntries();
2065 template <typename T
>
2066 inline void GCMarker::pushTaggedPtr(T
* ptr
) {
2068 if (!stack
.push(ptr
)) {
2069 delayMarkingChildrenOnOOM(ptr
);
2073 inline void GCMarker::pushValueRange(JSObject
* obj
, SlotsOrElementsKind kind
,
2074 size_t start
, size_t end
) {
2076 MOZ_ASSERT(obj
->is
<NativeObject
>());
2077 MOZ_ASSERT(start
<= end
);
2083 if (MOZ_UNLIKELY(!stack
.push(obj
, kind
, start
))) {
2084 delayMarkingChildrenOnOOM(obj
);
2088 void GCMarker::repush(JSObject
* obj
) {
2089 MOZ_ASSERT(obj
->asTenured().isMarkedAtLeast(markColor()));
2093 void GCMarker::setRootMarkingMode(bool newState
) {
2095 setMarkingStateAndTracer
<RootMarkingTracer
>(RegularMarking
, RootMarking
);
2097 setMarkingStateAndTracer
<MarkingTracer
>(RootMarking
, RegularMarking
);
2101 void GCMarker::enterParallelMarkingMode(ParallelMarker
* pm
) {
2103 MOZ_ASSERT(!parallelMarker_
);
2104 setMarkingStateAndTracer
<ParallelMarkingTracer
>(RegularMarking
,
2106 parallelMarker_
= pm
;
2109 void GCMarker::leaveParallelMarkingMode() {
2110 MOZ_ASSERT(parallelMarker_
);
2111 setMarkingStateAndTracer
<MarkingTracer
>(ParallelMarking
, RegularMarking
);
2112 parallelMarker_
= nullptr;
2115 bool GCMarker::canDonateWork() const {
2116 // It's not worth the overhead of donating very few entries. For some
2117 // (non-parallelizable) workloads this can lead to constantly interrupting
2118 // marking work and makes parallel marking slower than single threaded.
2119 constexpr size_t MinWordCount
= 12;
2121 static_assert(MinWordCount
>= ValueRangeWords
,
2122 "We must always leave at least one stack entry.");
2124 return stack
.position() > MinWordCount
;
2127 template <typename Tracer
>
2128 void GCMarker::setMarkingStateAndTracer(MarkingState prev
, MarkingState next
) {
2129 MOZ_ASSERT(state
== prev
);
2131 tracer_
.emplace
<Tracer
>(runtime(), this);
2134 bool GCMarker::enterWeakMarkingMode() {
2135 MOZ_ASSERT(tracer()->weakMapAction() == JS::WeakMapTraceAction::Expand
);
2136 if (!haveAllImplicitEdges
) {
2140 // During weak marking mode, we maintain a table mapping weak keys to
2141 // entries in known-live weakmaps. Initialize it with the keys of marked
2142 // weakmaps -- or more precisely, the keys of marked weakmaps that are
2143 // mapped to not yet live values. (Once bug 1167452 implements incremental
2144 // weakmap marking, this initialization step will become unnecessary, as
2145 // the table will already hold all such keys.)
2147 // Set state before doing anything else, so any new key that is marked
2148 // during the following gcEphemeronEdges scan will itself be looked up in
2149 // gcEphemeronEdges and marked according to ephemeron rules.
2150 setMarkingStateAndTracer
<WeakMarkingTracer
>(RegularMarking
, WeakMarking
);
2155 IncrementalProgress
JS::Zone::enterWeakMarkingMode(GCMarker
* marker
,
2156 SliceBudget
& budget
) {
2157 MOZ_ASSERT(marker
->isWeakMarking());
2159 if (!marker
->incrementalWeakMapMarkingEnabled
) {
2160 for (WeakMapBase
* m
: gcWeakMapList()) {
2162 (void)m
->markEntries(marker
);
2165 return IncrementalProgress::Finished
;
2168 // gcEphemeronEdges contains the keys from all weakmaps marked so far, or at
2169 // least the keys that might still need to be marked through. Scan through
2170 // gcEphemeronEdges and mark all values whose keys are marked. This marking
2171 // may recursively mark through other weakmap entries (immediately since we
2172 // are now in WeakMarking mode). The end result is a consistent state where
2173 // all values are marked if both their map and key are marked -- though note
2174 // that we may later leave weak marking mode, do some more marking, and then
2176 if (!isGCMarking()) {
2177 return IncrementalProgress::Finished
;
2180 MOZ_ASSERT(gcNurseryEphemeronEdges().count() == 0);
2182 // An OrderedHashMap::MutableRange stays valid even when the underlying table
2183 // (zone->gcEphemeronEdges) is mutated, which is useful here since we may add
2184 // additional entries while iterating over the Range.
2185 EphemeronEdgeTable::MutableRange r
= gcEphemeronEdges().mutableAll();
2186 while (!r
.empty()) {
2187 Cell
* src
= r
.front().key
;
2188 CellColor srcColor
= gc::detail::GetEffectiveColor(marker
, src
);
2189 auto& edges
= r
.front().value
;
2190 r
.popFront(); // Pop before any mutations happen.
2192 if (edges
.length() > 0) {
2193 uint32_t steps
= edges
.length();
2194 marker
->markEphemeronEdges(edges
, srcColor
);
2196 if (budget
.isOverBudget()) {
2202 return IncrementalProgress::Finished
;
2205 void GCMarker::leaveWeakMarkingMode() {
2206 if (state
== RegularMarking
) {
2210 setMarkingStateAndTracer
<MarkingTracer
>(WeakMarking
, RegularMarking
);
2212 // The gcEphemeronEdges table is still populated and may be used during a
2213 // future weak marking mode within this GC.
2216 void GCMarker::abortLinearWeakMarking() {
2217 haveAllImplicitEdges
= false;
2218 if (state
== WeakMarking
) {
2219 leaveWeakMarkingMode();
2223 MOZ_NEVER_INLINE
void GCMarker::delayMarkingChildrenOnOOM(Cell
* cell
) {
2224 runtime()->gc
.delayMarkingChildren(cell
, markColor());
2227 bool GCRuntime::hasDelayedMarking() const {
2228 bool result
= delayedMarkingList
;
2229 MOZ_ASSERT(result
== (markLaterArenas
!= 0));
2233 void GCRuntime::delayMarkingChildren(Cell
* cell
, MarkColor color
) {
2234 // Synchronize access to delayed marking state during parallel marking.
2235 LockGuard
<Mutex
> lock(delayedMarkingLock
);
2237 Arena
* arena
= cell
->asTenured().arena();
2238 if (!arena
->onDelayedMarkingList()) {
2239 arena
->setNextDelayedMarkingArena(delayedMarkingList
);
2240 delayedMarkingList
= arena
;
2246 if (!arena
->hasDelayedMarking(color
)) {
2247 arena
->setHasDelayedMarking(color
, true);
2248 delayedMarkingWorkAdded
= true;
2252 void GCRuntime::markDelayedChildren(Arena
* arena
, MarkColor color
) {
2253 JSTracer
* trc
= marker().tracer();
2254 JS::TraceKind kind
= MapAllocToTraceKind(arena
->getAllocKind());
2255 MarkColor colorToCheck
=
2256 TraceKindCanBeMarkedGray(kind
) ? color
: MarkColor::Black
;
2258 for (ArenaCellIterUnderGC
cell(arena
); !cell
.done(); cell
.next()) {
2259 if (cell
->isMarked(colorToCheck
)) {
2260 ApplyGCThingTyped(cell
, kind
, [trc
, this](auto t
) {
2261 t
->traceChildren(trc
);
2262 marker().markImplicitEdges(t
);
2269 * Process arenas from |delayedMarkingList| by marking the unmarked children of
2270 * marked cells of color |color|.
2272 * This is called twice, first to mark gray children and then to mark black
2275 void GCRuntime::processDelayedMarkingList(MarkColor color
) {
2276 // Marking delayed children may add more arenas to the list, including arenas
2277 // we are currently processing or have previously processed. Handle this by
2278 // clearing a flag on each arena before marking its children. This flag will
2279 // be set again if the arena is re-added. Iterate the list until no new arenas
2282 AutoSetMarkColor
setColor(marker(), color
);
2285 delayedMarkingWorkAdded
= false;
2286 for (Arena
* arena
= delayedMarkingList
; arena
;
2287 arena
= arena
->getNextDelayedMarking()) {
2288 if (arena
->hasDelayedMarking(color
)) {
2289 arena
->setHasDelayedMarking(color
, false);
2290 markDelayedChildren(arena
, color
);
2293 while (marker().hasEntriesForCurrentColor()) {
2294 SliceBudget budget
= SliceBudget::unlimited();
2296 marker().processMarkStackTop
<NormalMarkingOptions
>(budget
));
2298 } while (delayedMarkingWorkAdded
);
2300 MOZ_ASSERT(marker().isDrained());
2303 void GCRuntime::markAllDelayedChildren(ShouldReportMarkTime reportTime
) {
2304 MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsPerformingGC());
2305 MOZ_ASSERT(marker().isDrained());
2306 MOZ_ASSERT(hasDelayedMarking());
2308 mozilla::Maybe
<gcstats::AutoPhase
> ap
;
2310 ap
.emplace(stats(), gcstats::PhaseKind::MARK_DELAYED
);
2313 // We have a list of arenas containing marked cells with unmarked children
2314 // where we ran out of stack space during marking. Both black and gray cells
2315 // in these arenas may have unmarked children. Mark black children first.
2317 const MarkColor colors
[] = {MarkColor::Black
, MarkColor::Gray
};
2318 for (MarkColor color
: colors
) {
2319 processDelayedMarkingList(color
);
2320 rebuildDelayedMarkingList();
2323 MOZ_ASSERT(!hasDelayedMarking());
2326 void GCRuntime::rebuildDelayedMarkingList() {
2327 // Rebuild the delayed marking list, removing arenas which do not need further
2330 Arena
* listTail
= nullptr;
2331 forEachDelayedMarkingArena([&](Arena
* arena
) {
2332 if (!arena
->hasAnyDelayedMarking()) {
2333 arena
->clearDelayedMarkingState();
2335 MOZ_ASSERT(markLaterArenas
);
2341 appendToDelayedMarkingList(&listTail
, arena
);
2343 appendToDelayedMarkingList(&listTail
, nullptr);
2346 void GCRuntime::resetDelayedMarking() {
2347 MOZ_ASSERT(CurrentThreadIsMainThread());
2349 forEachDelayedMarkingArena([&](Arena
* arena
) {
2350 MOZ_ASSERT(arena
->onDelayedMarkingList());
2351 arena
->clearDelayedMarkingState();
2353 MOZ_ASSERT(markLaterArenas
);
2357 delayedMarkingList
= nullptr;
2358 MOZ_ASSERT(!markLaterArenas
);
2361 inline void GCRuntime::appendToDelayedMarkingList(Arena
** listTail
,
2364 (*listTail
)->updateNextDelayedMarkingArena(arena
);
2366 delayedMarkingList
= arena
;
2371 template <typename F
>
2372 inline void GCRuntime::forEachDelayedMarkingArena(F
&& f
) {
2373 Arena
* arena
= delayedMarkingList
;
2376 next
= arena
->getNextDelayedMarking();
2383 void GCMarker::checkZone(void* p
) {
2384 MOZ_ASSERT(state
!= NotActive
);
2385 DebugOnly
<Cell
*> cell
= static_cast<Cell
*>(p
);
2386 MOZ_ASSERT_IF(cell
->isTenured(),
2387 cell
->asTenured().zone()->isCollectingFromAnyThread());
2391 size_t GCMarker::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
2392 return mallocSizeOf(this) + stack
.sizeOfExcludingThis(mallocSizeOf
) +
2393 otherStack
.sizeOfExcludingThis(mallocSizeOf
);
2396 /*** IsMarked / IsAboutToBeFinalized ****************************************/
2398 template <typename T
>
2399 static inline void CheckIsMarkedThing(T
* thing
) {
2400 #define IS_SAME_TYPE_OR(name, type, _, _1) std::is_same_v<type, T> ||
2401 static_assert(JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR
) false,
2402 "Only the base cell layout types are allowed into "
2403 "marking/tracing internals");
2404 #undef IS_SAME_TYPE_OR
2409 // Allow any thread access to uncollected things.
2410 Zone
* zone
= thing
->zoneFromAnyThread();
2411 if (thing
->isPermanentAndMayBeShared()) {
2412 MOZ_ASSERT(!zone
->wasGCStarted());
2413 MOZ_ASSERT(!zone
->needsIncrementalBarrier());
2414 MOZ_ASSERT(thing
->isMarkedBlack());
2418 // Allow the current thread access if it is sweeping or in sweep-marking, but
2419 // try to check the zone. Some threads have access to all zones when sweeping.
2420 JS::GCContext
* gcx
= TlsGCContext
.get();
2421 MOZ_ASSERT(gcx
->gcUse() != GCUse::Finalizing
);
2422 if (gcx
->gcUse() == GCUse::Sweeping
|| gcx
->gcUse() == GCUse::Marking
) {
2423 MOZ_ASSERT_IF(gcx
->gcSweepZone(),
2424 gcx
->gcSweepZone() == zone
|| zone
->isAtomsZone());
2428 // Otherwise only allow access from the main thread or this zone's associated
2430 MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing
->runtimeFromAnyThread()) ||
2431 CurrentThreadCanAccessZone(thing
->zoneFromAnyThread()));
2435 template <typename T
>
2436 bool js::gc::IsMarkedInternal(JSRuntime
* rt
, T
* thing
) {
2437 // Don't depend on the mark state of other cells during finalization.
2438 MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
2439 MOZ_ASSERT(rt
->heapState() != JS::HeapState::MinorCollecting
);
2441 CheckIsMarkedThing(thing
);
2443 // This is not used during minor sweeping nor used to update moved GC things.
2444 MOZ_ASSERT(!IsForwarded(thing
));
2446 // Permanent things are never marked by non-owning runtimes.
2447 TenuredCell
* cell
= &thing
->asTenured();
2448 Zone
* zone
= cell
->zoneFromAnyThread();
2450 if (IsOwnedByOtherRuntime(rt
, thing
)) {
2451 MOZ_ASSERT(!zone
->wasGCStarted());
2452 MOZ_ASSERT(thing
->isMarkedBlack());
2456 return !zone
->isGCMarking() || TenuredThingIsMarkedAny(thing
);
2459 template <typename T
>
2460 bool js::gc::IsAboutToBeFinalizedInternal(T
* thing
) {
2461 // Don't depend on the mark state of other cells during finalization.
2462 MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
2464 CheckIsMarkedThing(thing
);
2466 // This is not used during minor sweeping nor used to update moved GC things.
2467 MOZ_ASSERT(!IsForwarded(thing
));
2469 if (!thing
->isTenured()) {
2473 // Permanent things are never finalized by non-owning runtimes.
2474 TenuredCell
* cell
= &thing
->asTenured();
2475 Zone
* zone
= cell
->zoneFromAnyThread();
2477 JSRuntime
* rt
= TlsGCContext
.get()->runtimeFromAnyThread();
2478 if (IsOwnedByOtherRuntime(rt
, thing
)) {
2479 MOZ_ASSERT(!zone
->wasGCStarted());
2480 MOZ_ASSERT(thing
->isMarkedBlack());
2484 return zone
->isGCSweeping() && !TenuredThingIsMarkedAny(thing
);
2487 template <typename T
>
2488 bool js::gc::IsAboutToBeFinalizedInternal(const T
& thing
) {
2491 thing
, [&dying
](auto t
) { dying
= IsAboutToBeFinalizedInternal(t
); });
2495 SweepingTracer::SweepingTracer(JSRuntime
* rt
)
2496 : GenericTracerImpl(rt
, JS::TracerKind::Sweeping
,
2497 JS::WeakMapTraceAction::TraceKeysAndValues
) {}
2499 template <typename T
>
2500 inline void SweepingTracer::onEdge(T
** thingp
, const char* name
) {
2502 CheckIsMarkedThing(thing
);
2504 if (!thing
->isTenured()) {
2508 // Permanent things are never finalized by non-owning runtimes.
2509 TenuredCell
* cell
= &thing
->asTenured();
2510 Zone
* zone
= cell
->zoneFromAnyThread();
2512 if (IsOwnedByOtherRuntime(runtime(), thing
)) {
2513 MOZ_ASSERT(!zone
->wasGCStarted());
2514 MOZ_ASSERT(thing
->isMarkedBlack());
2518 // It would be nice if we could assert that the zone of the tenured cell is in
2519 // the Sweeping state, but that isn't always true for:
2521 // - the jitcode map
2523 if (zone
->isGCSweeping() && !cell
->isMarkedAny()) {
2530 template <typename T
>
2531 JS_PUBLIC_API
bool TraceWeakEdge(JSTracer
* trc
, JS::Heap
<T
>* thingp
) {
2532 return TraceEdgeInternal(trc
, gc::ConvertToBase(thingp
->unsafeGet()),
2536 template <typename T
>
2537 JS_PUBLIC_API
bool EdgeNeedsSweepUnbarrieredSlow(T
* thingp
) {
2538 return IsAboutToBeFinalizedInternal(*ConvertToBase(thingp
));
2541 // Instantiate a copy of the Tracing templates for each public GC type.
2542 #define INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS(type) \
2543 template JS_PUBLIC_API bool TraceWeakEdge<type>(JSTracer * trc, \
2545 template JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow<type>(type*);
2546 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS
)
2547 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
2548 INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS
)
2550 #define INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type) \
2551 template bool IsMarkedInternal(JSRuntime* rt, type thing);
2553 #define INSTANTIATE_INTERNAL_IATBF_FUNCTION(type) \
2554 template bool IsAboutToBeFinalizedInternal(type thingp);
2556 #define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, \
2558 INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type*) \
2559 INSTANTIATE_INTERNAL_IATBF_FUNCTION(type*)
2561 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND
)
2563 #define INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER(type) \
2564 INSTANTIATE_INTERNAL_IATBF_FUNCTION(const type&)
2566 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
2567 INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER
)
2569 #undef INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION
2570 #undef INSTANTIATE_INTERNAL_IATBF_FUNCTION
2571 #undef INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND
2572 #undef INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER
2574 } // namespace js::gc
2576 /*** Cycle Collector Barrier Implementation *********************************/
2579 * The GC and CC are run independently. Consequently, the following sequence of
2581 * 1. GC runs and marks an object gray.
2582 * 2. The mutator runs (specifically, some C++ code with access to gray
2583 * objects) and creates a pointer from a JS root or other black object to
2584 * the gray object. If we re-ran a GC at this point, the object would now be
2586 * 3. Now we run the CC. It may think it can collect the gray object, even
2587 * though it's reachable from the JS heap.
2589 * To prevent this badness, we unmark the gray bit of an object when it is
2590 * accessed by callers outside XPConnect. This would cause the object to go
2591 * black in step 2 above. This must be done on everything reachable from the
2592 * object being returned. The following code takes care of the recursive
2595 * There is an additional complication for certain kinds of edges that are not
2596 * contained explicitly in the source object itself, such as from a weakmap key
2597 * to its value. These "implicit edges" are represented in some other
2598 * container object, such as the weakmap itself. In these
2599 * cases, calling unmark gray on an object won't find all of its children.
2601 * Handling these implicit edges has two parts:
2602 * - A special pass enumerating all of the containers that know about the
2603 * implicit edges to fix any black-gray edges that have been created. This
2604 * is implemented in nsXPConnect::FixWeakMappingGrayBits.
2605 * - To prevent any incorrectly gray objects from escaping to live JS outside
2606 * of the containers, we must add unmark-graying read barriers to these
2611 struct AssertNonGrayTracer final
: public JS::CallbackTracer
{
2612 // This is used by the UnmarkGray tracer only, and needs to report itself as
2613 // the non-gray tracer to not trigger assertions. Do not use it in another
2614 // context without making this more generic.
2615 explicit AssertNonGrayTracer(JSRuntime
* rt
)
2616 : JS::CallbackTracer(rt
, JS::TracerKind::UnmarkGray
) {}
2617 void onChild(JS::GCCellPtr thing
, const char* name
) override
{
2618 MOZ_ASSERT(!thing
.asCell()->isMarkedGray());
2623 class js::gc::UnmarkGrayTracer final
: public JS::CallbackTracer
{
2625 // We set weakMapAction to WeakMapTraceAction::Skip because the cycle
2626 // collector will fix up any color mismatches involving weakmaps when it runs.
2627 explicit UnmarkGrayTracer(GCMarker
* marker
)
2628 : JS::CallbackTracer(marker
->runtime(), JS::TracerKind::UnmarkGray
,
2629 JS::WeakMapTraceAction::Skip
),
2633 stack(marker
->unmarkGrayStack
) {}
2635 void unmark(JS::GCCellPtr cell
);
2637 // Whether we unmarked anything.
2640 // Whether we ran out of memory.
2644 // Marker to use if we need to unmark in zones that are currently being
2648 // Stack of cells to traverse.
2649 Vector
<JS::GCCellPtr
, 0, SystemAllocPolicy
>& stack
;
2651 void onChild(JS::GCCellPtr thing
, const char* name
) override
;
2654 void UnmarkGrayTracer::onChild(JS::GCCellPtr thing
, const char* name
) {
2655 Cell
* cell
= thing
.asCell();
2657 // Cells in the nursery cannot be gray, and nor can certain kinds of tenured
2658 // cells. These must necessarily point only to black edges.
2659 if (!cell
->isTenured() || !TraceKindCanBeMarkedGray(thing
.kind())) {
2661 MOZ_ASSERT(!cell
->isMarkedGray());
2662 AssertNonGrayTracer
nongray(runtime());
2663 JS::TraceChildren(&nongray
, thing
);
2668 TenuredCell
& tenured
= cell
->asTenured();
2669 Zone
* zone
= tenured
.zone();
2671 // If the cell is in a zone whose mark bits are being cleared, then it will
2673 if (zone
->isGCPreparing()) {
2677 // If the cell is in a zone that we're currently marking, then it's possible
2678 // that it is currently white but will end up gray. To handle this case,
2679 // trigger the barrier for any cells in zones that are currently being
2680 // marked. This will ensure they will eventually get marked black.
2681 if (zone
->isGCMarking()) {
2682 if (!cell
->isMarkedBlack()) {
2683 TraceEdgeForBarrier(marker
, &tenured
, thing
.kind());
2689 if (!tenured
.isMarkedGray()) {
2693 // TODO: It may be a small improvement to only use the atomic version during
2694 // parallel marking.
2695 tenured
.markBlackAtomic();
2698 if (!stack
.append(thing
)) {
2703 void UnmarkGrayTracer::unmark(JS::GCCellPtr cell
) {
2704 MOZ_ASSERT(stack
.empty());
2706 onChild(cell
, "unmarking root");
2708 while (!stack
.empty() && !oom
) {
2709 TraceChildren(this, stack
.popCopy());
2713 // If we run out of memory, we take a drastic measure: require that we
2714 // GC again before the next CC.
2716 runtime()->gc
.setGrayBitsInvalid();
2721 bool js::gc::UnmarkGrayGCThingUnchecked(GCMarker
* marker
, JS::GCCellPtr thing
) {
2723 MOZ_ASSERT(thing
.asCell()->isMarkedGray());
2725 mozilla::Maybe
<AutoGeckoProfilerEntry
> profilingStackFrame
;
2726 if (JSContext
* cx
= TlsContext
.get()) {
2727 profilingStackFrame
.emplace(cx
, "UnmarkGrayGCThing",
2728 JS::ProfilingCategoryPair::GCCC_UnmarkGray
);
2731 UnmarkGrayTracer
unmarker(marker
);
2732 unmarker
.unmark(thing
);
2733 return unmarker
.unmarkedAny
;
2736 JS_PUBLIC_API
bool JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing
) {
2737 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
2738 MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
2740 JSRuntime
* rt
= thing
.asCell()->runtimeFromMainThread();
2741 if (thing
.asCell()->zone()->isGCPreparing()) {
2742 // Mark bits are being cleared in preparation for GC.
2746 return UnmarkGrayGCThingUnchecked(&rt
->gc
.marker(), thing
);
2749 void js::gc::UnmarkGrayGCThingRecursively(TenuredCell
* cell
) {
2750 JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(cell
, cell
->getTraceKind()));
2753 bool js::UnmarkGrayShapeRecursively(Shape
* shape
) {
2754 return JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(shape
));
2758 Cell
* js::gc::UninlinedForwarded(const Cell
* cell
) { return Forwarded(cell
); }
2761 namespace js::debug
{
2763 MarkInfo
GetMarkInfo(Cell
* rawCell
) {
2764 if (!rawCell
->isTenured()) {
2765 return MarkInfo::NURSERY
;
2768 TenuredCell
* cell
= &rawCell
->asTenured();
2769 if (cell
->isMarkedGray()) {
2770 return MarkInfo::GRAY
;
2772 if (cell
->isMarkedBlack()) {
2773 return MarkInfo::BLACK
;
2775 return MarkInfo::UNMARKED
;
2778 uintptr_t* GetMarkWordAddress(Cell
* cell
) {
2779 if (!cell
->isTenured()) {
2783 MarkBitmapWord
* wordp
;
2785 TenuredChunkBase
* chunk
= gc::detail::GetCellChunkBase(&cell
->asTenured());
2786 chunk
->markBits
.getMarkWordAndMask(&cell
->asTenured(), ColorBit::BlackBit
,
2788 return reinterpret_cast<uintptr_t*>(wordp
);
2791 uintptr_t GetMarkMask(Cell
* cell
, uint32_t colorBit
) {
2792 MOZ_ASSERT(colorBit
== 0 || colorBit
== 1);
2794 if (!cell
->isTenured()) {
2798 ColorBit bit
= colorBit
== 0 ? ColorBit::BlackBit
: ColorBit::GrayOrBlackBit
;
2799 MarkBitmapWord
* wordp
;
2801 TenuredChunkBase
* chunk
= gc::detail::GetCellChunkBase(&cell
->asTenured());
2802 chunk
->markBits
.getMarkWordAndMask(&cell
->asTenured(), bit
, &wordp
, &mask
);
2806 } // namespace js::debug