Bug 1827389 - Fix mark rate telemetry to include marking done in sweep phase r=sfink
[gecko.git] / js / src / gc / Marking.cpp
blobe3b0dce4910fd35a6b668a76f4e6a4cb759f6713
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "gc/Marking-inl.h"
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/IntegerRange.h"
11 #include "mozilla/MathAlgorithms.h"
12 #include "mozilla/Maybe.h"
13 #include "mozilla/PodOperations.h"
14 #include "mozilla/ScopeExit.h"
16 #include <algorithm>
17 #include <type_traits>
19 #include "gc/GCInternals.h"
20 #include "gc/ParallelMarking.h"
21 #include "gc/TraceKind.h"
22 #include "jit/JitCode.h"
23 #include "js/GCTypeMacros.h" // JS_FOR_EACH_PUBLIC_{,TAGGED_}GC_POINTER_TYPE
24 #include "js/SliceBudget.h"
25 #include "util/Poison.h"
26 #include "vm/GeneratorObject.h"
28 #include "gc/GC-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/TraceMethods-inl.h"
31 #include "gc/WeakMap-inl.h"
32 #include "vm/GeckoProfiler-inl.h"
34 using namespace js;
35 using namespace js::gc;
37 using JS::MapTypeToTraceKind;
39 using mozilla::DebugOnly;
40 using mozilla::IntegerRange;
41 using mozilla::PodCopy;
43 // [SMDOC] GC Tracing
45 // Tracing Overview
46 // ================
48 // Tracing, in this context, refers to an abstract visitation of some or all of
49 // the GC-controlled heap. The effect of tracing an edge of the graph depends
50 // on the subclass of the JSTracer on whose behalf we are tracing.
52 // Marking
53 // -------
55 // The primary JSTracer is the GCMarker. The marking tracer causes the target
56 // of each traversed edge to be marked black and the target edge's children to
57 // be marked either gray (in the gc algorithm sense) or immediately black.
59 // Callback
60 // --------
62 // The secondary JSTracer is the CallbackTracer. This simply invokes a callback
63 // on each edge in a child.
65 // The following is a rough outline of the general struture of the tracing
66 // internals.
68 /* clang-format off */
70 // +-------------------+ ......................
71 // | | : :
72 // | v v +---+---+
73 // | TraceRoot TraceEdge TraceRange GCMarker:: | |
74 // | | | | processMarkStackTop | Mark |
75 // | +-----------------------+ | | Stack |
76 // | | | | |
77 // | v | +---+---+
78 // | TraceEdgeInternal | ^
79 // | | +<-------------+ :
80 // | | | | :
81 // | v v | :
82 // | CallbackTracer:: markAndTraverseEdge | :
83 // | onSomeEdge | | :
84 // | | | | :
85 // | | | | :
86 // | +-------------+---------------+ | :
87 // | | | :
88 // | v | :
89 // | markAndTraverse | :
90 // | | | :
91 // | | | :
92 // | traverse | :
93 // | | | :
94 // | +--------------------------------------+ | :
95 // | | | | | :
96 // | v v v | :
97 // | markAndTraceChildren markAndPush eagerlyMarkChildren | :
98 // | | : | | :
99 // | v : +-----------+ :
100 // | T::traceChildren : :
101 // | | : :
102 // +-------------+ ......................................
104 // Legend:
105 // ------- Direct calls
106 // ....... Data flow
108 /* clang-format on */
110 /*** Tracing Invariants *****************************************************/
112 template <typename T>
113 static inline bool IsOwnedByOtherRuntime(JSRuntime* rt, T thing) {
114 bool other = thing->runtimeFromAnyThread() != rt;
115 MOZ_ASSERT_IF(other, thing->isPermanentAndMayBeShared());
116 return other;
119 #ifdef DEBUG
121 static inline bool IsInFreeList(TenuredCell* cell) {
122 Arena* arena = cell->arena();
123 uintptr_t addr = reinterpret_cast<uintptr_t>(cell);
124 MOZ_ASSERT(Arena::isAligned(addr, arena->getThingSize()));
125 return arena->inFreeList(addr);
128 template <typename T>
129 void js::CheckTracedThing(JSTracer* trc, T* thing) {
130 MOZ_ASSERT(trc);
131 MOZ_ASSERT(thing);
133 if (IsForwarded(thing)) {
134 JS::TracerKind kind = trc->kind();
135 MOZ_ASSERT(kind == JS::TracerKind::Tenuring ||
136 kind == JS::TracerKind::MinorSweeping ||
137 kind == JS::TracerKind::Moving);
138 thing = Forwarded(thing);
141 /* This function uses data that's not available in the nursery. */
142 if (IsInsideNursery(thing)) {
143 return;
147 * Permanent shared things that are not associated with this runtime will be
148 * ignored during marking.
150 Zone* zone = thing->zoneFromAnyThread();
151 if (IsOwnedByOtherRuntime(trc->runtime(), thing)) {
152 MOZ_ASSERT(!zone->wasGCStarted());
153 MOZ_ASSERT(thing->isMarkedBlack());
154 return;
157 JSRuntime* rt = trc->runtime();
158 MOZ_ASSERT(zone->runtimeFromAnyThread() == rt);
160 bool isGcMarkingTracer = trc->isMarkingTracer();
161 bool isUnmarkGrayTracer = IsTracerKind(trc, JS::TracerKind::UnmarkGray);
162 bool isClearEdgesTracer = IsTracerKind(trc, JS::TracerKind::ClearEdges);
164 if (TlsContext.get()) {
165 // If we're on the main thread we must have access to the runtime and zone.
166 MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
167 MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
168 } else {
169 MOZ_ASSERT(isGcMarkingTracer || isUnmarkGrayTracer || isClearEdgesTracer ||
170 IsTracerKind(trc, JS::TracerKind::Moving) ||
171 IsTracerKind(trc, JS::TracerKind::Sweeping));
172 MOZ_ASSERT_IF(!isClearEdgesTracer, CurrentThreadIsPerformingGC());
175 MOZ_ASSERT(thing->isAligned());
176 MOZ_ASSERT(MapTypeToTraceKind<std::remove_pointer_t<T>>::kind ==
177 thing->getTraceKind());
180 * Check that we only mark allocated cells.
182 * This check is restricted to marking for two reasons: Firstly, if background
183 * sweeping is running and concurrently modifying the free list then it is not
184 * safe. Secondly, it was thought to be slow so this is a compromise so as to
185 * not affect test times too much.
187 MOZ_ASSERT_IF(zone->isGCMarking(), !IsInFreeList(&thing->asTenured()));
190 template <typename T>
191 void js::CheckTracedThing(JSTracer* trc, const T& thing) {
192 ApplyGCThingTyped(thing, [trc](auto t) { CheckTracedThing(trc, t); });
195 template <typename T>
196 static void CheckMarkedThing(GCMarker* gcMarker, T* thing) {
197 Zone* zone = thing->zoneFromAnyThread();
199 MOZ_ASSERT(zone->shouldMarkInZone(gcMarker->markColor()));
201 MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
202 zone->isCollectingFromAnyThread() || zone->isAtomsZone());
204 MOZ_ASSERT_IF(gcMarker->markColor() == MarkColor::Gray,
205 !zone->isGCMarkingBlackOnly() || zone->isAtomsZone());
207 MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() ||
208 zone->isGCCompacting()));
210 // Check that we don't stray from the current compartment and zone without
211 // using TraceCrossCompartmentEdge.
212 Compartment* comp = thing->maybeCompartment();
213 MOZ_ASSERT_IF(gcMarker->tracingCompartment && comp,
214 gcMarker->tracingCompartment == comp);
215 MOZ_ASSERT_IF(gcMarker->tracingZone,
216 gcMarker->tracingZone == zone || zone->isAtomsZone());
219 namespace js {
221 # define IMPL_CHECK_TRACED_THING(_, type, _1, _2) \
222 template void CheckTracedThing<type>(JSTracer*, type*);
223 JS_FOR_EACH_TRACEKIND(IMPL_CHECK_TRACED_THING);
224 # undef IMPL_CHECK_TRACED_THING
226 template void CheckTracedThing<Value>(JSTracer*, const Value&);
228 } // namespace js
230 #endif
232 static inline bool ShouldMarkCrossCompartment(GCMarker* marker, JSObject* src,
233 Cell* dstCell) {
234 MarkColor color = marker->markColor();
236 if (!dstCell->isTenured()) {
237 #ifdef DEBUG
238 // Bug 1743098: This shouldn't be possible but it does seem to happen. Log
239 // some useful information in debug builds.
240 if (color != MarkColor::Black) {
241 fprintf(stderr,
242 "ShouldMarkCrossCompartment: cross compartment edge from gray "
243 "object to nursery thing\n");
244 fprintf(stderr, "src: ");
245 src->dump();
246 fprintf(stderr, "dst: ");
247 dstCell->dump();
249 #endif
250 MOZ_ASSERT(color == MarkColor::Black);
251 return false;
253 TenuredCell& dst = dstCell->asTenured();
255 JS::Zone* dstZone = dst.zone();
256 if (!src->zone()->isGCMarking() && !dstZone->isGCMarking()) {
257 return false;
260 if (color == MarkColor::Black) {
261 // Check our sweep groups are correct: we should never have to
262 // mark something in a zone that we have started sweeping.
263 MOZ_ASSERT_IF(!dst.isMarkedBlack(), !dstZone->isGCSweeping());
266 * Having black->gray edges violates our promise to the cycle collector so
267 * we ensure that gray things we encounter when marking black end up getting
268 * marked black.
270 * This can happen for two reasons:
272 * 1) If we're collecting a compartment and it has an edge to an uncollected
273 * compartment it's possible that the source and destination of the
274 * cross-compartment edge should be gray, but the source was marked black by
275 * the write barrier.
277 * 2) If we yield during gray marking and the write barrier marks a gray
278 * thing black.
280 * We handle the first case before returning whereas the second case happens
281 * as part of normal marking.
283 if (dst.isMarkedGray() && !dstZone->isGCMarking()) {
284 UnmarkGrayGCThingUnchecked(marker,
285 JS::GCCellPtr(&dst, dst.getTraceKind()));
286 return false;
289 return dstZone->isGCMarking();
290 } else {
291 // Check our sweep groups are correct as above.
292 MOZ_ASSERT_IF(!dst.isMarkedAny(), !dstZone->isGCSweeping());
294 if (dstZone->isGCMarkingBlackOnly()) {
296 * The destination compartment is being not being marked gray now,
297 * but it will be later, so record the cell so it can be marked gray
298 * at the appropriate time.
300 if (!dst.isMarkedAny()) {
301 DelayCrossCompartmentGrayMarking(marker, src);
303 return false;
306 return dstZone->isGCMarkingBlackAndGray();
310 static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
311 Cell* dstCell) {
312 if (!trc->isMarkingTracer()) {
313 return true;
316 return ShouldMarkCrossCompartment(GCMarker::fromTracer(trc), src, dstCell);
319 static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src,
320 const Value& val) {
321 return val.isGCThing() &&
322 ShouldTraceCrossCompartment(trc, src, val.toGCThing());
325 #ifdef DEBUG
327 inline void js::gc::AssertShouldMarkInZone(GCMarker* marker, Cell* thing) {
328 if (!thing->isMarkedBlack()) {
329 Zone* zone = thing->zone();
330 MOZ_ASSERT(zone->isAtomsZone() ||
331 zone->shouldMarkInZone(marker->markColor()));
335 void js::gc::AssertRootMarkingPhase(JSTracer* trc) {
336 MOZ_ASSERT_IF(trc->isMarkingTracer(),
337 trc->runtime()->gc.state() == State::NotActive ||
338 trc->runtime()->gc.state() == State::MarkRoots);
341 #endif // DEBUG
343 /*** Tracing Interface ******************************************************/
345 template <typename T>
346 static void TraceExternalEdgeHelper(JSTracer* trc, T* thingp,
347 const char* name) {
348 MOZ_ASSERT(InternalBarrierMethods<T>::isMarkable(*thingp));
349 TraceEdgeInternal(trc, ConvertToBase(thingp), name);
352 JS_PUBLIC_API void js::UnsafeTraceManuallyBarrieredEdge(JSTracer* trc,
353 JSObject** thingp,
354 const char* name) {
355 TraceEdgeInternal(trc, ConvertToBase(thingp), name);
358 template <typename T>
359 static void TraceRootHelper(JSTracer* trc, T* thingp, const char* name) {
360 MOZ_ASSERT(thingp);
361 js::TraceNullableRoot(trc, thingp, name);
364 namespace js {
365 class AbstractGeneratorObject;
366 class SavedFrame;
367 } // namespace js
369 #define DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION(type) \
370 JS_PUBLIC_API void js::gc::TraceExternalEdge(JSTracer* trc, type* thingp, \
371 const char* name) { \
372 TraceExternalEdgeHelper(trc, thingp, name); \
375 // Define TraceExternalEdge for each public GC pointer type.
376 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
377 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION)
379 #undef DEFINE_TRACE_EXTERNAL_EDGE_FUNCTION
381 #define DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(type) \
382 JS_PUBLIC_API void JS::TraceRoot(JSTracer* trc, type* thingp, \
383 const char* name) { \
384 TraceRootHelper(trc, thingp, name); \
387 // Define TraceRoot for each public GC pointer type.
388 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
389 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(DEFINE_UNSAFE_TRACE_ROOT_FUNCTION)
391 // Also, for the moment, define TraceRoot for internal GC pointer types.
392 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(AbstractGeneratorObject*)
393 DEFINE_UNSAFE_TRACE_ROOT_FUNCTION(SavedFrame*)
395 #undef DEFINE_UNSAFE_TRACE_ROOT_FUNCTION
397 namespace js {
398 namespace gc {
400 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type) \
401 template void TraceRangeInternal<type>(JSTracer*, size_t len, type*, \
402 const char*);
404 #define INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, _3) \
405 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(type*)
407 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND)
408 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(INSTANTIATE_INTERNAL_TRACE_FUNCTIONS)
409 INSTANTIATE_INTERNAL_TRACE_FUNCTIONS(TaggedProto)
411 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS_FROM_TRACEKIND
412 #undef INSTANTIATE_INTERNAL_TRACE_FUNCTIONS
414 } // namespace gc
415 } // namespace js
417 // In debug builds, makes a note of the current compartment before calling a
418 // trace hook or traceChildren() method on a GC thing.
419 class MOZ_RAII AutoSetTracingSource {
420 #ifndef DEBUG
421 public:
422 template <typename T>
423 AutoSetTracingSource(JSTracer* trc, T* thing) {}
424 ~AutoSetTracingSource() {}
425 #else
426 GCMarker* marker = nullptr;
428 public:
429 template <typename T>
430 AutoSetTracingSource(JSTracer* trc, T* thing) {
431 if (trc->isMarkingTracer() && thing) {
432 marker = GCMarker::fromTracer(trc);
433 MOZ_ASSERT(!marker->tracingZone);
434 marker->tracingZone = thing->asTenured().zone();
435 MOZ_ASSERT(!marker->tracingCompartment);
436 marker->tracingCompartment = thing->maybeCompartment();
440 ~AutoSetTracingSource() {
441 if (marker) {
442 marker->tracingZone = nullptr;
443 marker->tracingCompartment = nullptr;
446 #endif
449 // In debug builds, clear the trace hook compartment. This happens after the
450 // trace hook has called back into one of our trace APIs and we've checked the
451 // traced thing.
452 class MOZ_RAII AutoClearTracingSource {
453 #ifndef DEBUG
454 public:
455 explicit AutoClearTracingSource(GCMarker* marker) {}
456 explicit AutoClearTracingSource(JSTracer* trc) {}
457 ~AutoClearTracingSource() {}
458 #else
459 GCMarker* marker = nullptr;
460 JS::Zone* prevZone = nullptr;
461 Compartment* prevCompartment = nullptr;
463 public:
464 explicit AutoClearTracingSource(JSTracer* trc) {
465 if (trc->isMarkingTracer()) {
466 marker = GCMarker::fromTracer(trc);
467 prevZone = marker->tracingZone;
468 marker->tracingZone = nullptr;
469 prevCompartment = marker->tracingCompartment;
470 marker->tracingCompartment = nullptr;
473 ~AutoClearTracingSource() {
474 if (marker) {
475 marker->tracingZone = prevZone;
476 marker->tracingCompartment = prevCompartment;
479 #endif
482 template <typename T>
483 void js::TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc,
484 JSObject* src, T* dst,
485 const char* name) {
486 // Clear expected compartment for cross-compartment edge.
487 AutoClearTracingSource acts(trc);
489 if (ShouldTraceCrossCompartment(trc, src, *dst)) {
490 TraceEdgeInternal(trc, dst, name);
493 template void js::TraceManuallyBarrieredCrossCompartmentEdge<Value>(
494 JSTracer*, JSObject*, Value*, const char*);
495 template void js::TraceManuallyBarrieredCrossCompartmentEdge<JSObject*>(
496 JSTracer*, JSObject*, JSObject**, const char*);
497 template void js::TraceManuallyBarrieredCrossCompartmentEdge<BaseScript*>(
498 JSTracer*, JSObject*, BaseScript**, const char*);
500 template <typename T>
501 void js::TraceSameZoneCrossCompartmentEdge(JSTracer* trc,
502 const WriteBarriered<T>* dst,
503 const char* name) {
504 #ifdef DEBUG
505 if (trc->isMarkingTracer()) {
506 MOZ_ASSERT((*dst)->maybeCompartment(),
507 "Use TraceEdge for GC things without a compartment");
509 GCMarker* gcMarker = GCMarker::fromTracer(trc);
510 MOZ_ASSERT_IF(gcMarker->tracingZone,
511 (*dst)->zone() == gcMarker->tracingZone);
514 // Skip compartment checks for this edge.
515 if (trc->kind() == JS::TracerKind::CompartmentCheck) {
516 return;
518 #endif
520 // Clear expected compartment for cross-compartment edge.
521 AutoClearTracingSource acts(trc);
522 TraceEdgeInternal(trc, ConvertToBase(dst->unbarrieredAddress()), name);
524 template void js::TraceSameZoneCrossCompartmentEdge(
525 JSTracer*, const WriteBarriered<Shape*>*, const char*);
527 template <typename T>
528 void js::TraceWeakMapKeyEdgeInternal(JSTracer* trc, Zone* weakMapZone,
529 T** thingp, const char* name) {
530 // We can't use ShouldTraceCrossCompartment here because that assumes the
531 // source of the edge is a CCW object which could be used to delay gray
532 // marking. Instead, assert that the weak map zone is in the same marking
533 // state as the target thing's zone and therefore we can go ahead and mark it.
534 #ifdef DEBUG
535 auto thing = *thingp;
536 if (trc->isMarkingTracer()) {
537 MOZ_ASSERT(weakMapZone->isGCMarking());
538 MOZ_ASSERT(weakMapZone->gcState() == thing->zone()->gcState());
540 #endif
542 // Clear expected compartment for cross-compartment edge.
543 AutoClearTracingSource acts(trc);
545 TraceEdgeInternal(trc, thingp, name);
548 template void js::TraceWeakMapKeyEdgeInternal<JSObject>(JSTracer*, Zone*,
549 JSObject**,
550 const char*);
551 template void js::TraceWeakMapKeyEdgeInternal<BaseScript>(JSTracer*, Zone*,
552 BaseScript**,
553 const char*);
555 static Cell* TraceGenericPointerRootAndType(JSTracer* trc, Cell* thing,
556 JS::TraceKind kind,
557 const char* name) {
558 return MapGCThingTyped(thing, kind, [trc, name](auto t) -> Cell* {
559 TraceRoot(trc, &t, name);
560 return t;
564 void js::TraceGenericPointerRoot(JSTracer* trc, Cell** thingp,
565 const char* name) {
566 MOZ_ASSERT(thingp);
567 Cell* thing = *thingp;
568 if (!thing) {
569 return;
572 Cell* traced =
573 TraceGenericPointerRootAndType(trc, thing, thing->getTraceKind(), name);
574 if (traced != thing) {
575 *thingp = traced;
579 void js::TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp,
580 const char* name) {
581 MOZ_ASSERT(thingp);
582 Cell* thing = *thingp;
583 if (!*thingp) {
584 return;
587 auto traced = MapGCThingTyped(thing, thing->getTraceKind(),
588 [trc, name](auto t) -> Cell* {
589 TraceManuallyBarrieredEdge(trc, &t, name);
590 return t;
592 if (traced != thing) {
593 *thingp = traced;
597 void js::TraceGCCellPtrRoot(JSTracer* trc, JS::GCCellPtr* thingp,
598 const char* name) {
599 Cell* thing = thingp->asCell();
600 if (!thing) {
601 return;
604 Cell* traced =
605 TraceGenericPointerRootAndType(trc, thing, thingp->kind(), name);
607 if (!traced) {
608 *thingp = JS::GCCellPtr();
609 } else if (traced != thingp->asCell()) {
610 *thingp = JS::GCCellPtr(traced, thingp->kind());
614 void js::TraceManuallyBarrieredGCCellPtr(JSTracer* trc, JS::GCCellPtr* thingp,
615 const char* name) {
616 Cell* thing = thingp->asCell();
617 if (!thing) {
618 return;
621 Cell* traced = MapGCThingTyped(thing, thing->getTraceKind(),
622 [trc, name](auto t) -> Cell* {
623 TraceManuallyBarrieredEdge(trc, &t, name);
624 return t;
627 if (!traced) {
628 // If we are clearing edges, also erase the type. This happens when using
629 // ClearEdgesTracer.
630 *thingp = JS::GCCellPtr();
631 } else if (traced != thingp->asCell()) {
632 *thingp = JS::GCCellPtr(traced, thingp->kind());
636 template <typename T>
637 inline bool TraceTaggedPtrEdge(JSTracer* trc, T* thingp, const char* name) {
638 // Return true by default. For some types the lambda below won't be called.
639 bool ret = true;
640 auto thing = MapGCThingTyped(*thingp, [&](auto thing) {
641 if (!TraceEdgeInternal(trc, &thing, name)) {
642 ret = false;
643 return TaggedPtr<T>::empty();
646 return TaggedPtr<T>::wrap(thing);
649 // Only update *thingp if the value changed, to avoid TSan false positives for
650 // template objects when using DumpHeapTracer or UbiNode tracers while Ion
651 // compiling off-thread.
652 if (thing.isSome() && thing.value() != *thingp) {
653 *thingp = thing.value();
656 return ret;
659 bool js::gc::TraceEdgeInternal(JSTracer* trc, Value* thingp, const char* name) {
660 return TraceTaggedPtrEdge(trc, thingp, name);
662 bool js::gc::TraceEdgeInternal(JSTracer* trc, jsid* thingp, const char* name) {
663 return TraceTaggedPtrEdge(trc, thingp, name);
665 bool js::gc::TraceEdgeInternal(JSTracer* trc, TaggedProto* thingp,
666 const char* name) {
667 return TraceTaggedPtrEdge(trc, thingp, name);
670 template <typename T>
671 void js::gc::TraceRangeInternal(JSTracer* trc, size_t len, T* vec,
672 const char* name) {
673 JS::AutoTracingIndex index(trc);
674 for (auto i : IntegerRange(len)) {
675 if (InternalBarrierMethods<T>::isMarkable(vec[i])) {
676 TraceEdgeInternal(trc, &vec[i], name);
678 ++index;
682 /*** GC Marking Interface ***************************************************/
684 namespace js {
686 using HasNoImplicitEdgesType = bool;
688 template <typename T>
689 struct ImplicitEdgeHolderType {
690 using Type = HasNoImplicitEdgesType;
693 // For now, we only handle JSObject* and BaseScript* keys, but the linear time
694 // algorithm can be easily extended by adding in more types here, then making
695 // GCMarker::traverse<T> call markImplicitEdges.
696 template <>
697 struct ImplicitEdgeHolderType<JSObject*> {
698 using Type = JSObject*;
701 template <>
702 struct ImplicitEdgeHolderType<BaseScript*> {
703 using Type = BaseScript*;
706 void GCMarker::markEphemeronEdges(EphemeronEdgeVector& edges,
707 gc::CellColor srcColor) {
708 // This is called as part of GC weak marking or by barriers outside of GC.
709 MOZ_ASSERT_IF(CurrentThreadIsPerformingGC(),
710 state == MarkingState::WeakMarking);
712 DebugOnly<size_t> initialLength = edges.length();
714 for (auto& edge : edges) {
715 CellColor targetColor = std::min(srcColor, edge.color);
716 MOZ_ASSERT(CellColor(markColor()) >= targetColor);
717 if (targetColor == markColor()) {
718 ApplyGCThingTyped(
719 edge.target, edge.target->getTraceKind(),
720 [this](auto t) { markAndTraverse<NormalMarkingOptions>(t); });
724 // The above marking always goes through markAndPush, which will not cause
725 // 'edges' to be appended to while iterating.
726 MOZ_ASSERT(edges.length() == initialLength);
728 // This is not just an optimization. When nuking a CCW, we conservatively
729 // mark through the related edges and then lose the CCW->target connection
730 // that induces a sweep group edge. As a result, it is possible for the
731 // delegate zone to get marked later, look up an edge in this table, and
732 // then try to mark something in a Zone that is no longer marking.
733 if (srcColor == CellColor::Black && markColor() == MarkColor::Black) {
734 edges.eraseIf([](auto& edge) { return edge.color == MarkColor::Black; });
738 // 'delegate' is no longer the delegate of 'key'.
739 void GCMarker::severWeakDelegate(JSObject* key, JSObject* delegate) {
740 MOZ_ASSERT(CurrentThreadIsMainThread());
742 JS::Zone* zone = delegate->zone();
743 if (!zone->needsIncrementalBarrier()) {
744 MOZ_ASSERT(
745 !zone->gcEphemeronEdges(delegate).get(delegate),
746 "non-collecting zone should not have populated gcEphemeronEdges");
747 return;
749 auto* p = zone->gcEphemeronEdges(delegate).get(delegate);
750 if (!p) {
751 return;
754 // We are losing 3 edges here: key -> delegate, delegate -> key, and
755 // <delegate, map> -> value. Maintain snapshot-at-beginning (hereafter,
756 // S-A-B) by conservatively assuming the delegate will end up black and
757 // marking through the latter 2 edges.
759 // Note that this does not fully give S-A-B:
761 // 1. If the map is gray, then the value will only be marked gray here even
762 // though the map could later be discovered to be black.
764 // 2. If the map has not yet been marked, we won't have any entries to mark
765 // here in the first place.
767 // 3. We're not marking the delegate, since that would cause eg nukeAllCCWs
768 // to keep everything alive for another collection.
770 // We can't even assume that the delegate passed in here is live, because we
771 // could have gotten here from nukeAllCCWs, which iterates over all CCWs
772 // including dead ones.
774 // This is ok because S-A-B is only needed to prevent the case where an
775 // unmarked object is removed from the graph and then re-inserted where it is
776 // reachable only by things that have already been marked. None of the 3
777 // target objects will be re-inserted anywhere as a result of this action.
779 EphemeronEdgeVector& edges = p->value;
780 MOZ_ASSERT(markColor() == MarkColor::Black);
781 markEphemeronEdges(edges, MarkColor::Black);
784 // 'delegate' is now the delegate of 'key'. Update weakmap marking state.
785 void GCMarker::restoreWeakDelegate(JSObject* key, JSObject* delegate) {
786 MOZ_ASSERT(CurrentThreadIsMainThread());
788 if (!key->zone()->needsIncrementalBarrier()) {
789 // Temporary diagnostic printouts for when this would have asserted.
790 if (key->zone()->gcEphemeronEdges(key).has(key)) {
791 fprintf(stderr, "key zone: %d\n", int(key->zone()->gcState()));
792 #ifdef DEBUG
793 key->dump();
794 #endif
795 fprintf(stderr, "delegate zone: %d\n", int(delegate->zone()->gcState()));
796 #ifdef DEBUG
797 delegate->dump();
798 #endif
800 MOZ_ASSERT(
801 !key->zone()->gcEphemeronEdges(key).has(key),
802 "non-collecting zone should not have populated gcEphemeronEdges");
803 return;
805 if (!delegate->zone()->needsIncrementalBarrier()) {
806 // Normally we should not have added the key -> value edge if the delegate
807 // zone is not marking (because the delegate would have been seen as black,
808 // so we would mark the key immediately instead). But if there wasn't a
809 // delegate (the key was nuked), then we won't have consulted it. So we
810 // can't do the same assertion as above.
812 // Specifically, the sequence would be:
813 // 1. Nuke the key.
814 // 2. Start the incremental GC.
815 // 3. Mark the WeakMap. Insert a key->value edge with a DeadObjectProxy key.
816 // 4. Un-nuke the key with a delegate in a nonmarking Zone.
818 // The result is an ephemeron edge (from <map,key> to value, but stored
819 // as key to value) involving a key with a delegate in a nonmarking Zone,
820 // something that ordinarily would not happen.
821 return;
823 auto* p = key->zone()->gcEphemeronEdges(key).get(key);
824 if (!p) {
825 return;
828 // Similar to severWeakDelegate above, mark through the key -> value edge.
829 EphemeronEdgeVector& edges = p->value;
830 MOZ_ASSERT(markColor() == MarkColor::Black);
831 markEphemeronEdges(edges, MarkColor::Black);
834 template <typename T>
835 void GCMarker::markImplicitEdgesHelper(T markedThing) {
836 if (!isWeakMarking()) {
837 return;
840 Zone* zone = markedThing->asTenured().zone();
841 MOZ_ASSERT(zone->isGCMarking());
842 MOZ_ASSERT(!zone->isGCSweeping());
844 auto p = zone->gcEphemeronEdges().get(markedThing);
845 if (!p) {
846 return;
848 EphemeronEdgeVector& edges = p->value;
850 // markedThing might be a key in a debugger weakmap, which can end up marking
851 // values that are in a different compartment.
852 AutoClearTracingSource acts(tracer());
854 CellColor thingColor = gc::detail::GetEffectiveColor(this, markedThing);
855 markEphemeronEdges(edges, thingColor);
858 template <>
859 void GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType) {}
861 template <typename T>
862 void GCMarker::markImplicitEdges(T* thing) {
863 markImplicitEdgesHelper<typename ImplicitEdgeHolderType<T*>::Type>(thing);
866 template void GCMarker::markImplicitEdges(JSObject*);
867 template void GCMarker::markImplicitEdges(BaseScript*);
869 } // namespace js
871 template <typename T>
872 static inline bool ShouldMark(GCMarker* gcmarker, T* thing) {
873 // We may encounter nursery things during normal marking since we don't
874 // collect the nursery at the start of every GC slice.
875 if (!thing->isTenured()) {
876 return false;
879 // Don't mark things outside a zone if we are in a per-zone GC. Don't mark
880 // permanent shared things owned by other runtimes (we will never observe
881 // their zone being collected).
882 Zone* zone = thing->asTenured().zoneFromAnyThread();
883 return zone->shouldMarkInZone(gcmarker->markColor());
886 template <uint32_t opts>
887 MarkingTracerT<opts>::MarkingTracerT(JSRuntime* runtime, GCMarker* marker)
888 : GenericTracerImpl<MarkingTracerT<opts>>(
889 runtime, JS::TracerKind::Marking,
890 JS::TraceOptions(JS::WeakMapTraceAction::Expand,
891 JS::WeakEdgeTraceAction::Skip)) {
892 // Marking tracers are owned by (and part of) a GCMarker.
893 MOZ_ASSERT(this == marker->tracer());
894 MOZ_ASSERT(getMarker() == marker);
897 template <uint32_t opts>
898 MOZ_ALWAYS_INLINE GCMarker* MarkingTracerT<opts>::getMarker() {
899 return GCMarker::fromTracer(this);
902 template <uint32_t opts>
903 template <typename T>
904 void MarkingTracerT<opts>::onEdge(T** thingp, const char* name) {
905 T* thing = *thingp;
907 // Do per-type marking precondition checks.
908 GCMarker* marker = getMarker();
909 if (!ShouldMark(marker, thing)) {
910 MOZ_ASSERT(gc::detail::GetEffectiveColor(marker, thing) ==
911 js::gc::CellColor::Black);
912 return;
915 MOZ_ASSERT(!IsOwnedByOtherRuntime(this->runtime(), thing));
917 #ifdef DEBUG
918 CheckMarkedThing(marker, thing);
919 #endif
921 AutoClearTracingSource acts(this);
922 marker->markAndTraverse<opts>(thing);
925 #define INSTANTIATE_ONEDGE_METHOD(name, type, _1, _2) \
926 template void MarkingTracerT<NormalMarkingOptions>::onEdge<type>( \
927 type * *thingp, const char* name); \
928 template void \
929 MarkingTracerT<MarkingOptions::MarkRootCompartments>::onEdge<type>( \
930 type * *thingp, const char* name);
931 JS_FOR_EACH_TRACEKIND(INSTANTIATE_ONEDGE_METHOD)
932 #undef INSTANTIATE_ONEDGE_METHOD
934 static void TraceEdgeForBarrier(GCMarker* gcmarker, TenuredCell* thing,
935 JS::TraceKind kind) {
936 // Dispatch to markAndTraverse without checking ShouldMark.
937 ApplyGCThingTyped(thing, kind, [gcmarker](auto thing) {
938 MOZ_ASSERT(ShouldMark(gcmarker, thing));
939 CheckTracedThing(gcmarker->tracer(), thing);
940 AutoClearTracingSource acts(gcmarker->tracer());
941 gcmarker->markAndTraverse<NormalMarkingOptions>(thing);
945 JS_PUBLIC_API void js::gc::PerformIncrementalReadBarrier(JS::GCCellPtr thing) {
946 // Optimized marking for read barriers. This is called from
947 // ExposeGCThingToActiveJS which has already checked the prerequisites for
948 // performing a read barrier. This means we can skip a bunch of checks and
949 // call into the tracer directly.
951 MOZ_ASSERT(thing);
952 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
954 TenuredCell* cell = &thing.asCell()->asTenured();
955 MOZ_ASSERT(!cell->isMarkedBlack());
957 Zone* zone = cell->zone();
958 MOZ_ASSERT(zone->needsIncrementalBarrier());
960 // Skip dispatching on known tracer type.
961 GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
962 TraceEdgeForBarrier(gcmarker, cell, thing.kind());
965 void js::gc::PerformIncrementalReadBarrier(TenuredCell* cell) {
966 // Internal version of previous function.
968 MOZ_ASSERT(cell);
969 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
971 if (cell->isMarkedBlack()) {
972 return;
975 Zone* zone = cell->zone();
976 MOZ_ASSERT(zone->needsIncrementalBarrier());
978 // Skip dispatching on known tracer type.
979 GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
980 TraceEdgeForBarrier(gcmarker, cell, cell->getTraceKind());
983 void js::gc::PerformIncrementalPreWriteBarrier(TenuredCell* cell) {
984 // The same as PerformIncrementalReadBarrier except for an extra check on the
985 // runtime for cells in atoms zone.
987 Zone* zone = cell->zoneFromAnyThread();
988 MOZ_ASSERT(zone->needsIncrementalBarrier());
990 MOZ_ASSERT(cell);
991 if (cell->isMarkedBlack()) {
992 return;
995 // Barriers can be triggered off the main thread by background finalization of
996 // HeapPtrs to the atoms zone. We don't want to trigger the barrier in this
997 // case.
998 bool checkThread = zone->isAtomsZone();
999 JSRuntime* runtime = cell->runtimeFromAnyThread();
1000 if (checkThread && !CurrentThreadCanAccessRuntime(runtime)) {
1001 MOZ_ASSERT(CurrentThreadIsGCFinalizing());
1002 return;
1005 MOZ_ASSERT(CurrentThreadIsMainThread());
1006 MOZ_ASSERT(!JS::RuntimeHeapIsMajorCollecting());
1008 // Skip dispatching on known tracer type.
1009 GCMarker* gcmarker = GCMarker::fromTracer(zone->barrierTracer());
1010 TraceEdgeForBarrier(gcmarker, cell, cell->getTraceKind());
1013 void js::gc::PerformIncrementalBarrierDuringFlattening(JSString* str) {
1014 TenuredCell* cell = &str->asTenured();
1016 // Skip eager marking of ropes during flattening. Their children will also be
1017 // barriered by flattening process so we don't need to traverse them.
1018 if (str->isRope()) {
1019 cell->markBlack();
1020 return;
1023 PerformIncrementalPreWriteBarrier(cell);
1026 template <uint32_t opts, typename T>
1027 void js::GCMarker::markAndTraverse(T* thing) {
1028 if (mark<opts>(thing)) {
1029 // We only mark permanent things during initialization.
1030 MOZ_ASSERT_IF(thing->isPermanentAndMayBeShared(),
1031 !runtime()->permanentAtomsPopulated());
1033 // We don't need to pass MarkRootCompartments options on to children.
1034 constexpr uint32_t traverseOpts =
1035 opts & ~MarkingOptions::MarkRootCompartments;
1037 traverse<traverseOpts>(thing);
1039 if constexpr (bool(opts & MarkingOptions::MarkRootCompartments)) {
1040 // Mark the compartment as live.
1041 SetCompartmentHasMarkedCells(thing);
1046 // The |traverse| method overloads select the traversal strategy for each kind.
1048 // There are three possible strategies:
1050 // 1. traceChildren
1052 // The simplest traversal calls out to the fully generic traceChildren
1053 // function to visit the child edges. In the absence of other traversal
1054 // mechanisms, this function will rapidly grow the stack past its bounds and
1055 // crash the process. Thus, this generic tracing should only be used in cases
1056 // where subsequent tracing will not recurse.
1058 // 2. scanChildren
1060 // Strings, Shapes, and Scopes are extremely common, but have simple patterns
1061 // of recursion. We traverse trees of these edges immediately, with
1062 // aggressive, manual inlining, implemented by eagerlyTraceChildren.
1064 // 3. pushThing
1066 // Objects are extremely common and can contain arbitrarily nested graphs, so
1067 // are not trivially inlined. In this case we use the mark stack to control
1068 // recursion. JitCode shares none of these properties, but is included for
1069 // historical reasons. JSScript normally cannot recurse, but may be used as a
1070 // weakmap key and thereby recurse into weakmapped values.
1072 template <uint32_t opts>
1073 void GCMarker::traverse(BaseShape* thing) {
1074 traceChildren<opts>(thing);
1076 template <uint32_t opts>
1077 void GCMarker::traverse(GetterSetter* thing) {
1078 traceChildren<opts>(thing);
1080 template <uint32_t opts>
1081 void GCMarker::traverse(JS::Symbol* thing) {
1082 traceChildren<opts>(thing);
1084 template <uint32_t opts>
1085 void GCMarker::traverse(JS::BigInt* thing) {
1086 traceChildren<opts>(thing);
1088 template <uint32_t opts>
1089 void GCMarker::traverse(RegExpShared* thing) {
1090 traceChildren<opts>(thing);
1092 template <uint32_t opts>
1093 void GCMarker::traverse(JSString* thing) {
1094 scanChildren<opts>(thing);
1096 template <uint32_t opts>
1097 void GCMarker::traverse(Shape* thing) {
1098 scanChildren<opts>(thing);
1100 template <uint32_t opts>
1101 void GCMarker::traverse(PropMap* thing) {
1102 scanChildren<opts>(thing);
1104 template <uint32_t opts>
1105 void GCMarker::traverse(js::Scope* thing) {
1106 scanChildren<opts>(thing);
1108 template <uint32_t opts>
1109 void GCMarker::traverse(JSObject* thing) {
1110 pushThing<opts>(thing);
1112 template <uint32_t opts>
1113 void GCMarker::traverse(jit::JitCode* thing) {
1114 pushThing<opts>(thing);
1116 template <uint32_t opts>
1117 void GCMarker::traverse(BaseScript* thing) {
1118 pushThing<opts>(thing);
1121 template <uint32_t opts, typename T>
1122 void js::GCMarker::traceChildren(T* thing) {
1123 MOZ_ASSERT(!thing->isPermanentAndMayBeShared());
1124 MOZ_ASSERT(thing->isMarkedAny());
1125 AutoSetTracingSource asts(tracer(), thing);
1126 thing->traceChildren(tracer());
1129 template <uint32_t opts, typename T>
1130 void js::GCMarker::scanChildren(T* thing) {
1131 MOZ_ASSERT(!thing->isPermanentAndMayBeShared());
1132 MOZ_ASSERT(thing->isMarkedAny());
1133 eagerlyMarkChildren<opts>(thing);
1136 template <uint32_t opts, typename T>
1137 void js::GCMarker::pushThing(T* thing) {
1138 MOZ_ASSERT(!thing->isPermanentAndMayBeShared());
1139 MOZ_ASSERT(thing->isMarkedAny());
1140 pushTaggedPtr(thing);
1143 template void js::GCMarker::markAndTraverse<NormalMarkingOptions, JSObject>(
1144 JSObject* thing);
1145 template void js::GCMarker::markAndTraverse<
1146 MarkingOptions::MarkRootCompartments, JSObject>(JSObject* thing);
1148 #ifdef DEBUG
1149 void GCMarker::setCheckAtomMarking(bool check) {
1150 MOZ_ASSERT(check != checkAtomMarking);
1151 checkAtomMarking = check;
1153 #endif
1155 template <typename S, typename T>
1156 inline void GCMarker::checkTraversedEdge(S source, T* target) {
1157 #ifdef DEBUG
1158 // Atoms and Symbols do not have or mark their internal pointers,
1159 // respectively.
1160 MOZ_ASSERT(!source->isPermanentAndMayBeShared());
1162 // Shared things are already black so we will not mark them.
1163 if (target->isPermanentAndMayBeShared()) {
1164 Zone* zone = target->zoneFromAnyThread();
1165 MOZ_ASSERT(!zone->wasGCStarted());
1166 MOZ_ASSERT(!zone->needsIncrementalBarrier());
1167 MOZ_ASSERT(target->isMarkedBlack());
1168 MOZ_ASSERT(!target->maybeCompartment());
1169 return;
1172 Zone* sourceZone = source->zone();
1173 Zone* targetZone = target->zone();
1175 // Atoms and Symbols do not have access to a compartment pointer, or we'd need
1176 // to adjust the subsequent check to catch that case.
1177 MOZ_ASSERT_IF(targetZone->isAtomsZone(), !target->maybeCompartment());
1179 // The Zones must match, unless the target is an atom.
1180 MOZ_ASSERT(targetZone == sourceZone || targetZone->isAtomsZone());
1182 // If we are marking an atom, that atom must be marked in the source zone's
1183 // atom bitmap.
1184 if (checkAtomMarking && !sourceZone->isAtomsZone() &&
1185 targetZone->isAtomsZone()) {
1186 MOZ_ASSERT(target->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(
1187 sourceZone, reinterpret_cast<TenuredCell*>(target)));
1190 // If we have access to a compartment pointer for both things, they must
1191 // match.
1192 MOZ_ASSERT_IF(source->maybeCompartment() && target->maybeCompartment(),
1193 source->maybeCompartment() == target->maybeCompartment());
1194 #endif
1197 template <uint32_t opts, typename S, typename T>
1198 void js::GCMarker::markAndTraverseEdge(S source, T* target) {
1199 checkTraversedEdge(source, target);
1200 markAndTraverse<opts>(target);
1203 template <uint32_t opts, typename S, typename T>
1204 void js::GCMarker::markAndTraverseEdge(S source, const T& thing) {
1205 ApplyGCThingTyped(thing, [this, source](auto t) {
1206 this->markAndTraverseEdge<opts>(source, t);
1210 template <uint32_t opts, typename T>
1211 bool js::GCMarker::mark(T* thing) {
1212 if (!thing->isTenured()) {
1213 return false;
1216 AssertShouldMarkInZone(this, thing);
1218 MarkColor color =
1219 TraceKindCanBeGray<T>::value ? markColor() : MarkColor::Black;
1221 if constexpr (bool(opts & MarkingOptions::ParallelMarking)) {
1222 return thing->asTenured().markIfUnmarkedAtomic(color);
1225 return thing->asTenured().markIfUnmarked(color);
1228 /*** Mark-stack Marking *****************************************************/
1230 // Call the trace hook set on the object, if present.
1231 static inline void CallTraceHook(JSTracer* trc, JSObject* obj) {
1232 const JSClass* clasp = obj->getClass();
1233 MOZ_ASSERT(clasp);
1235 if (clasp->hasTrace()) {
1236 AutoSetTracingSource asts(trc, obj);
1237 clasp->doTrace(trc, obj);
1241 static gcstats::PhaseKind GrayMarkingPhaseForCurrentPhase(
1242 const gcstats::Statistics& stats) {
1243 using namespace gcstats;
1244 switch (stats.currentPhaseKind()) {
1245 case PhaseKind::MARK:
1246 return PhaseKind::MARK_GRAY;
1247 case PhaseKind::MARK_WEAK:
1248 return PhaseKind::MARK_GRAY_WEAK;
1249 default:
1250 MOZ_CRASH("Unexpected current phase");
1254 void GCMarker::moveWork(GCMarker* dst, GCMarker* src) {
1255 MarkStack::moveWork(dst->stack, src->stack);
1258 bool GCMarker::markUntilBudgetExhausted(SliceBudget& budget,
1259 ShouldReportMarkTime reportTime) {
1260 #ifdef DEBUG
1261 MOZ_ASSERT(!strictCompartmentChecking);
1262 strictCompartmentChecking = true;
1263 auto acc = mozilla::MakeScopeExit([&] { strictCompartmentChecking = false; });
1264 #endif
1266 if (budget.isOverBudget()) {
1267 return false;
1270 return doMarking<NormalMarkingOptions>(budget, reportTime);
1273 template <uint32_t opts>
1274 bool GCMarker::doMarking(SliceBudget& budget, ShouldReportMarkTime reportTime) {
1275 GCRuntime& gc = runtime()->gc;
1277 // This method leaves the mark color as it found it.
1279 while (!isDrained()) {
1280 if (hasBlackEntries() && !markOneColor<opts, MarkColor::Black>(budget)) {
1281 return false;
1284 if (hasGrayEntries()) {
1285 mozilla::Maybe<gcstats::AutoPhase> ap;
1286 if (reportTime) {
1287 auto& stats = runtime()->gc.stats();
1288 ap.emplace(stats, GrayMarkingPhaseForCurrentPhase(stats));
1291 if (!markOneColor<opts, MarkColor::Gray>(budget)) {
1292 return false;
1296 // All normal marking happens before any delayed marking.
1297 MOZ_ASSERT(!hasBlackEntries() && !hasGrayEntries());
1300 // Mark children of things that caused too deep recursion during the above
1301 // tracing.
1302 if (gc.hasDelayedMarking()) {
1303 gc.markAllDelayedChildren(reportTime);
1306 MOZ_ASSERT(!gc.hasDelayedMarking());
1307 MOZ_ASSERT(isDrained());
1309 return true;
1312 template <uint32_t opts, MarkColor color>
1313 bool GCMarker::markOneColor(SliceBudget& budget) {
1314 AutoSetMarkColor setColor(*this, color);
1316 while (processMarkStackTop<opts>(budget)) {
1317 if (!hasEntries(color)) {
1318 return true;
1322 return false;
1325 bool GCMarker::markCurrentColorInParallel(SliceBudget& budget) {
1326 if (markColor() == MarkColor::Black) {
1327 return markOneColorInParallel<MarkColor::Black>(budget);
1330 return markOneColorInParallel<MarkColor::Gray>(budget);
1333 template <MarkColor color>
1334 bool GCMarker::markOneColorInParallel(SliceBudget& budget) {
1335 AutoSetMarkColor setColor(*this, color);
1337 ParallelMarker::AtomicCount& waitingTaskCount =
1338 parallelMarker_->waitingTaskCountRef();
1340 while (processMarkStackTop<MarkingOptions::ParallelMarking>(budget)) {
1341 if (!hasEntries(color)) {
1342 return true;
1345 // TODO: It might be better to only check this occasionally, possibly
1346 // combined with the slice budget check. Experiments with giving this its
1347 // own counter resulted in worse performance.
1348 if (waitingTaskCount && stack.canDonateWork()) {
1349 parallelMarker_->donateWorkFrom(this);
1353 return false;
1356 static inline void CheckForCompartmentMismatch(JSObject* obj, JSObject* obj2) {
1357 #ifdef DEBUG
1358 if (MOZ_UNLIKELY(obj->compartment() != obj2->compartment())) {
1359 fprintf(
1360 stderr,
1361 "Compartment mismatch in pointer from %s object slot to %s object\n",
1362 obj->getClass()->name, obj2->getClass()->name);
1363 MOZ_CRASH("Compartment mismatch");
1365 #endif
1368 static inline size_t NumUsedFixedSlots(NativeObject* obj) {
1369 return std::min(obj->numFixedSlots(), obj->slotSpan());
1372 static inline size_t NumUsedDynamicSlots(NativeObject* obj) {
1373 size_t nfixed = obj->numFixedSlots();
1374 size_t nslots = obj->slotSpan();
1375 if (nslots < nfixed) {
1376 return 0;
1379 return nslots - nfixed;
1382 template <uint32_t opts>
1383 inline bool GCMarker::processMarkStackTop(SliceBudget& budget) {
1385 * This function uses explicit goto and scans objects directly. This allows us
1386 * to eliminate tail recursion and significantly improve the marking
1387 * performance, see bug 641025.
1389 * Note that the mutator can change the size and layout of objects between
1390 * marking slices, so we must check slots and element ranges read from the
1391 * stack.
1394 MOZ_ASSERT(hasEntries(markColor()));
1395 MOZ_ASSERT_IF(markColor() == MarkColor::Gray, !hasBlackEntries());
1397 JSObject* obj; // The object being scanned.
1398 SlotsOrElementsKind kind; // The kind of slot range being scanned, if any.
1399 HeapSlot* base; // Slot range base pointer.
1400 size_t index; // Index of the next slot to mark.
1401 size_t end; // End of slot range to mark.
1403 if (stack.peekTag() == MarkStack::SlotsOrElementsRangeTag) {
1404 auto range = stack.popSlotsOrElementsRange();
1405 obj = range.ptr().asRangeObject();
1406 NativeObject* nobj = &obj->as<NativeObject>();
1407 kind = range.kind();
1408 index = range.start();
1410 switch (kind) {
1411 case SlotsOrElementsKind::FixedSlots: {
1412 base = nobj->fixedSlots();
1413 end = NumUsedFixedSlots(nobj);
1414 break;
1417 case SlotsOrElementsKind::DynamicSlots: {
1418 base = nobj->slots_;
1419 end = NumUsedDynamicSlots(nobj);
1420 break;
1423 case SlotsOrElementsKind::Elements: {
1424 base = nobj->getDenseElements();
1426 // Account for shifted elements.
1427 size_t numShifted = nobj->getElementsHeader()->numShiftedElements();
1428 size_t initlen = nobj->getDenseInitializedLength();
1429 index = std::max(index, numShifted) - numShifted;
1430 end = initlen;
1431 break;
1434 case SlotsOrElementsKind::Unused: {
1435 MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unused SlotsOrElementsKind");
1439 goto scan_value_range;
1442 budget.step();
1443 if (budget.isOverBudget()) {
1444 return false;
1448 MarkStack::TaggedPtr ptr = stack.popPtr();
1449 switch (ptr.tag()) {
1450 case MarkStack::ObjectTag: {
1451 obj = ptr.as<JSObject>();
1452 AssertShouldMarkInZone(this, obj);
1453 goto scan_obj;
1456 case MarkStack::JitCodeTag: {
1457 auto* code = ptr.as<jit::JitCode>();
1458 AutoSetTracingSource asts(tracer(), code);
1459 code->traceChildren(tracer());
1460 return true;
1463 case MarkStack::ScriptTag: {
1464 auto* script = ptr.as<BaseScript>();
1465 if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) {
1466 markImplicitEdges(script);
1468 AutoSetTracingSource asts(tracer(), script);
1469 script->traceChildren(tracer());
1470 return true;
1473 default:
1474 MOZ_CRASH("Invalid tag in mark stack");
1478 return true;
1480 scan_value_range:
1481 while (index < end) {
1482 budget.step();
1483 if (budget.isOverBudget()) {
1484 pushValueRange(obj, kind, index, end);
1485 return false;
1488 const Value& v = base[index];
1489 index++;
1491 if (v.isString()) {
1492 markAndTraverseEdge<opts>(obj, v.toString());
1493 } else if (v.hasObjectPayload()) {
1494 JSObject* obj2 = &v.getObjectPayload();
1495 #ifdef DEBUG
1496 if (!obj2) {
1497 fprintf(stderr,
1498 "processMarkStackTop found ObjectValue(nullptr) "
1499 "at %zu Values from end of range in object:\n",
1500 size_t(end - (index - 1)));
1501 obj->dump();
1503 #endif
1504 CheckForCompartmentMismatch(obj, obj2);
1505 if (mark<opts>(obj2)) {
1506 // Save the rest of this value range for later and start scanning obj2's
1507 // children.
1508 pushValueRange(obj, kind, index, end);
1509 obj = obj2;
1510 goto scan_obj;
1512 } else if (v.isSymbol()) {
1513 markAndTraverseEdge<opts>(obj, v.toSymbol());
1514 } else if (v.isBigInt()) {
1515 markAndTraverseEdge<opts>(obj, v.toBigInt());
1516 } else if (v.isPrivateGCThing()) {
1517 // v.toGCCellPtr cannot be inlined, so construct one manually.
1518 Cell* cell = v.toGCThing();
1519 markAndTraverseEdge<opts>(obj, JS::GCCellPtr(cell, cell->getTraceKind()));
1523 return true;
1525 scan_obj : {
1526 AssertShouldMarkInZone(this, obj);
1528 if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) {
1529 markImplicitEdges(obj);
1531 markAndTraverseEdge<opts>(obj, obj->shape());
1533 CallTraceHook(tracer(), obj);
1535 if (!obj->is<NativeObject>()) {
1536 return true;
1539 NativeObject* nobj = &obj->as<NativeObject>();
1541 unsigned nslots = nobj->slotSpan();
1543 do {
1544 if (nobj->hasEmptyElements()) {
1545 break;
1548 base = nobj->getDenseElements();
1549 kind = SlotsOrElementsKind::Elements;
1550 index = 0;
1551 end = nobj->getDenseInitializedLength();
1553 if (!nslots) {
1554 goto scan_value_range;
1556 pushValueRange(nobj, kind, index, end);
1557 } while (false);
1559 unsigned nfixed = nobj->numFixedSlots();
1561 base = nobj->fixedSlots();
1562 kind = SlotsOrElementsKind::FixedSlots;
1563 index = 0;
1565 if (nslots > nfixed) {
1566 pushValueRange(nobj, kind, index, nfixed);
1567 kind = SlotsOrElementsKind::DynamicSlots;
1568 base = nobj->slots_;
1569 end = nslots - nfixed;
1570 goto scan_value_range;
1573 MOZ_ASSERT(nslots <= nobj->numFixedSlots());
1574 end = nslots;
1575 goto scan_value_range;
1579 /*** Mark Stack *************************************************************/
1581 static_assert(sizeof(MarkStack::TaggedPtr) == sizeof(uintptr_t),
1582 "A TaggedPtr should be the same size as a pointer");
1583 static_assert((sizeof(MarkStack::SlotsOrElementsRange) % sizeof(uintptr_t)) ==
1585 "SlotsOrElementsRange size should be a multiple of "
1586 "the pointer size");
1588 static const size_t ValueRangeWords =
1589 sizeof(MarkStack::SlotsOrElementsRange) / sizeof(uintptr_t);
1591 template <typename T>
1592 struct MapTypeToMarkStackTag {};
1593 template <>
1594 struct MapTypeToMarkStackTag<JSObject*> {
1595 static const auto value = MarkStack::ObjectTag;
1597 template <>
1598 struct MapTypeToMarkStackTag<jit::JitCode*> {
1599 static const auto value = MarkStack::JitCodeTag;
1601 template <>
1602 struct MapTypeToMarkStackTag<BaseScript*> {
1603 static const auto value = MarkStack::ScriptTag;
1606 #ifdef DEBUG
1607 static inline bool TagIsRangeTag(MarkStack::Tag tag) {
1608 return tag == MarkStack::SlotsOrElementsRangeTag;
1610 #endif
1612 inline MarkStack::TaggedPtr::TaggedPtr(Tag tag, Cell* ptr)
1613 : bits(tag | uintptr_t(ptr)) {
1614 assertValid();
1617 inline uintptr_t MarkStack::TaggedPtr::tagUnchecked() const {
1618 return bits & TagMask;
1621 inline MarkStack::Tag MarkStack::TaggedPtr::tag() const {
1622 auto tag = Tag(bits & TagMask);
1623 MOZ_ASSERT(tag <= LastTag);
1624 return tag;
1627 inline Cell* MarkStack::TaggedPtr::ptr() const {
1628 return reinterpret_cast<Cell*>(bits & ~TagMask);
1631 inline void MarkStack::TaggedPtr::assertValid() const {
1632 (void)tag();
1633 MOZ_ASSERT(IsCellPointerValid(ptr()));
1636 template <typename T>
1637 inline T* MarkStack::TaggedPtr::as() const {
1638 MOZ_ASSERT(tag() == MapTypeToMarkStackTag<T*>::value);
1639 MOZ_ASSERT(ptr()->isTenured());
1640 MOZ_ASSERT(ptr()->is<T>());
1641 return static_cast<T*>(ptr());
1644 inline JSObject* MarkStack::TaggedPtr::asRangeObject() const {
1645 MOZ_ASSERT(TagIsRangeTag(tag()));
1646 MOZ_ASSERT(ptr()->isTenured());
1647 return ptr()->as<JSObject>();
1650 inline JSRope* MarkStack::TaggedPtr::asTempRope() const {
1651 MOZ_ASSERT(tag() == TempRopeTag);
1652 return &ptr()->as<JSString>()->asRope();
1655 inline MarkStack::SlotsOrElementsRange::SlotsOrElementsRange(
1656 SlotsOrElementsKind kindArg, JSObject* obj, size_t startArg)
1657 : startAndKind_((startArg << StartShift) | size_t(kindArg)),
1658 ptr_(SlotsOrElementsRangeTag, obj) {
1659 assertValid();
1660 MOZ_ASSERT(kind() == kindArg);
1661 MOZ_ASSERT(start() == startArg);
1664 inline void MarkStack::SlotsOrElementsRange::assertValid() const {
1665 ptr_.assertValid();
1666 MOZ_ASSERT(TagIsRangeTag(ptr_.tag()));
1669 inline SlotsOrElementsKind MarkStack::SlotsOrElementsRange::kind() const {
1670 return SlotsOrElementsKind(startAndKind_ & KindMask);
1673 inline size_t MarkStack::SlotsOrElementsRange::start() const {
1674 return startAndKind_ >> StartShift;
1677 inline MarkStack::TaggedPtr MarkStack::SlotsOrElementsRange::ptr() const {
1678 return ptr_;
1681 MarkStack::MarkStack() : grayPosition_(0), markColor_(MarkColor::Black) {
1682 MOZ_ASSERT(isEmpty());
1685 MarkStack::~MarkStack() {
1686 MOZ_ASSERT(isEmpty());
1687 MOZ_ASSERT(iteratorCount_ == 0);
1690 bool MarkStack::init() {
1691 MOZ_ASSERT(isEmpty());
1692 return resetStackCapacity();
1695 bool MarkStack::resetStackCapacity() {
1696 size_t capacity = MARK_STACK_BASE_CAPACITY;
1698 #ifdef JS_GC_ZEAL
1699 capacity = std::min(capacity, maxCapacity_.ref());
1700 #endif
1702 return resize(capacity);
1705 #ifdef JS_GC_ZEAL
1706 void MarkStack::setMaxCapacity(size_t maxCapacity) {
1707 MOZ_ASSERT(maxCapacity != 0);
1708 MOZ_ASSERT(isEmpty());
1710 maxCapacity_ = maxCapacity;
1711 if (capacity() > maxCapacity_) {
1712 // If the realloc fails, just keep using the existing stack; it's
1713 // not ideal but better than failing.
1714 (void)resize(maxCapacity_);
1717 #endif
1719 void MarkStack::setMarkColor(gc::MarkColor newColor) {
1720 if (markColor_ == newColor) {
1721 return;
1724 MOZ_ASSERT(!hasBlackEntries());
1726 markColor_ = newColor;
1727 if (markColor_ == MarkColor::Black) {
1728 grayPosition_ = position();
1729 } else {
1730 grayPosition_ = SIZE_MAX;
1733 assertGrayPositionValid();
1736 inline void MarkStack::assertGrayPositionValid() const {
1737 // Check grayPosition_ is consistent with the current mark color. This ensures
1738 // that anything pushed on to the stack will end up marked with the correct
1739 // color.
1740 MOZ_ASSERT((markColor() == MarkColor::Black) ==
1741 (position() >= grayPosition_));
1744 bool MarkStack::hasEntries(MarkColor color) const {
1745 return color == MarkColor::Black ? hasBlackEntries() : hasGrayEntries();
1748 bool MarkStack::canDonateWork() const {
1749 // It's not worth the overhead of donating very few entries. For some
1750 // (non-parallelizable) workloads this can lead to constantly interrupting
1751 // marking work and makes parallel marking slower than single threaded.
1752 constexpr size_t MinWordCount = 12;
1754 static_assert(MinWordCount >= ValueRangeWords,
1755 "We must always leave at least one stack entry.");
1757 return wordCountForCurrentColor() > MinWordCount;
1760 MOZ_ALWAYS_INLINE bool MarkStack::indexIsEntryBase(size_t index) const {
1761 // The mark stack holds both TaggedPtr and SlotsOrElementsRange entries, which
1762 // are one or two words long respectively. Determine whether |index| points to
1763 // the base of an entry (i.e. the lowest word in memory).
1765 // The possible cases are that |index| points to:
1766 // 1. a single word TaggedPtr entry => true
1767 // 2. the startAndKind_ word of SlotsOrElementsRange => true
1768 // (startAndKind_ is a uintptr_t tagged with SlotsOrElementsKind)
1769 // 3. the ptr_ word of SlotsOrElementsRange (itself a TaggedPtr) => false
1771 // To check for case 3, interpret the word as a TaggedPtr: if it is tagged as
1772 // a SlotsOrElementsRange tagged pointer then we are inside such a range and
1773 // |index| does not point to the base of an entry. This requires that no
1774 // startAndKind_ word can be interpreted as such, which is arranged by making
1775 // SlotsOrElementsRangeTag zero and all SlotsOrElementsKind tags non-zero.
1777 MOZ_ASSERT(index >= basePositionForCurrentColor() && index < position());
1778 return stack()[index].tagUnchecked() != SlotsOrElementsRangeTag;
1781 /* static */
1782 void MarkStack::moveWork(MarkStack& dst, MarkStack& src) {
1783 // Move some work from |src| to |dst|. Assumes |dst| is empty.
1785 // When this method runs during parallel marking, we are on the thread that
1786 // owns |src|, and the thread that owns |dst| is blocked waiting on the
1787 // ParallelMarkTask::resumed condition variable.
1789 // Limit the size of moves to stop threads with work spending too much time
1790 // donating.
1791 static const size_t MaxWordsToMove = 4096;
1793 MOZ_ASSERT(src.markColor() == dst.markColor());
1794 MOZ_ASSERT(!dst.hasEntries(dst.markColor()));
1795 MOZ_ASSERT(src.canDonateWork());
1797 size_t base = src.basePositionForCurrentColor();
1798 size_t totalWords = src.position() - base;
1799 size_t wordsToMove = std::min(totalWords / 2, MaxWordsToMove);
1801 size_t targetPos = src.position() - wordsToMove;
1802 MOZ_ASSERT(src.position() >= base);
1804 // Adjust the target position in case it points to the middle of a two word
1805 // entry.
1806 if (!src.indexIsEntryBase(targetPos)) {
1807 targetPos--;
1808 wordsToMove++;
1810 MOZ_ASSERT(src.indexIsEntryBase(targetPos));
1811 MOZ_ASSERT(targetPos < src.position());
1812 MOZ_ASSERT(targetPos > base);
1813 MOZ_ASSERT(wordsToMove == src.position() - targetPos);
1815 if (!dst.ensureSpace(wordsToMove)) {
1816 return;
1819 // TODO: This doesn't have good cache behaviour when moving work between
1820 // threads. It might be better if the original thread ended up with the top
1821 // part of the stack, in src words if this method stole from the bottom of
1822 // the stack rather than the top.
1824 mozilla::PodCopy(dst.topPtr(), src.stack().begin() + targetPos, wordsToMove);
1825 dst.topIndex_ += wordsToMove;
1826 dst.peekPtr().assertValid();
1828 src.topIndex_ = targetPos;
1829 #ifdef DEBUG
1830 src.poisonUnused();
1831 #endif
1832 src.peekPtr().assertValid();
1835 MOZ_ALWAYS_INLINE size_t MarkStack::basePositionForCurrentColor() const {
1836 return markColor() == MarkColor::Black ? grayPosition_ : 0;
1839 MOZ_ALWAYS_INLINE size_t MarkStack::wordCountForCurrentColor() const {
1840 size_t base = basePositionForCurrentColor();
1841 MOZ_ASSERT(position() >= base);
1842 return position() - base;
1845 void MarkStack::clear() {
1846 // Fall back to the smaller initial capacity so we don't hold on to excess
1847 // memory between GCs.
1848 stack().clearAndFree();
1849 topIndex_ = 0;
1850 std::ignore = resetStackCapacity();
1853 inline MarkStack::TaggedPtr* MarkStack::topPtr() { return &stack()[topIndex_]; }
1855 template <typename T>
1856 inline bool MarkStack::push(T* ptr) {
1857 assertGrayPositionValid();
1859 return push(TaggedPtr(MapTypeToMarkStackTag<T*>::value, ptr));
1862 inline bool MarkStack::pushTempRope(JSRope* rope) {
1863 return push(TaggedPtr(TempRopeTag, rope));
1866 inline bool MarkStack::push(const TaggedPtr& ptr) {
1867 if (!ensureSpace(1)) {
1868 return false;
1871 infalliblePush(ptr);
1872 return true;
1875 inline void MarkStack::infalliblePush(const TaggedPtr& ptr) {
1876 *topPtr() = ptr;
1877 topIndex_++;
1878 MOZ_ASSERT(position() <= capacity());
1881 inline bool MarkStack::push(JSObject* obj, SlotsOrElementsKind kind,
1882 size_t start) {
1883 return push(SlotsOrElementsRange(kind, obj, start));
1886 inline bool MarkStack::push(const SlotsOrElementsRange& array) {
1887 array.assertValid();
1888 assertGrayPositionValid();
1890 if (!ensureSpace(ValueRangeWords)) {
1891 return false;
1894 infalliblePush(array);
1895 return true;
1898 inline void MarkStack::infalliblePush(const SlotsOrElementsRange& array) {
1899 *reinterpret_cast<SlotsOrElementsRange*>(topPtr()) = array;
1900 topIndex_ += ValueRangeWords;
1901 MOZ_ASSERT(position() <= capacity());
1902 MOZ_ASSERT(TagIsRangeTag(peekTag()));
1905 inline const MarkStack::TaggedPtr& MarkStack::peekPtr() const {
1906 MOZ_ASSERT(hasEntries(markColor()));
1907 return stack()[topIndex_ - 1];
1910 inline MarkStack::Tag MarkStack::peekTag() const {
1911 MOZ_ASSERT(!isEmpty());
1912 return peekPtr().tag();
1915 inline MarkStack::TaggedPtr MarkStack::popPtr() {
1916 MOZ_ASSERT(hasEntries(markColor()));
1917 MOZ_ASSERT(!TagIsRangeTag(peekTag()));
1918 peekPtr().assertValid();
1919 topIndex_--;
1920 return *topPtr();
1923 inline MarkStack::SlotsOrElementsRange MarkStack::popSlotsOrElementsRange() {
1924 MOZ_ASSERT(hasEntries(markColor()));
1925 MOZ_ASSERT(TagIsRangeTag(peekTag()));
1926 MOZ_ASSERT(position() >= ValueRangeWords);
1928 topIndex_ -= ValueRangeWords;
1929 const auto& array = *reinterpret_cast<SlotsOrElementsRange*>(topPtr());
1930 array.assertValid();
1931 return array;
1934 inline bool MarkStack::ensureSpace(size_t count) {
1935 if (MOZ_LIKELY((topIndex_ + count) <= capacity())) {
1936 return !js::oom::ShouldFailWithOOM();
1939 return enlarge(count);
1942 MOZ_NEVER_INLINE bool MarkStack::enlarge(size_t count) {
1943 size_t required = capacity() + count;
1944 size_t newCapacity = mozilla::RoundUpPow2(required);
1946 #ifdef JS_GC_ZEAL
1947 newCapacity = std::min(newCapacity, maxCapacity_.ref());
1948 if (newCapacity < required) {
1949 return false;
1951 #endif
1953 return resize(newCapacity);
1956 bool MarkStack::resize(size_t newCapacity) {
1957 MOZ_ASSERT(newCapacity != 0);
1958 MOZ_ASSERT(newCapacity >= position());
1960 if (!stack().resize(newCapacity)) {
1961 return false;
1964 poisonUnused();
1965 return true;
1968 inline void MarkStack::poisonUnused() {
1969 static_assert((JS_FRESH_MARK_STACK_PATTERN & TagMask) > LastTag,
1970 "The mark stack poison pattern must not look like a valid "
1971 "tagged pointer");
1973 AlwaysPoison(stack().begin() + topIndex_, JS_FRESH_MARK_STACK_PATTERN,
1974 stack().capacity() - topIndex_, MemCheckKind::MakeUndefined);
1977 size_t MarkStack::sizeOfExcludingThis(
1978 mozilla::MallocSizeOf mallocSizeOf) const {
1979 return stack().sizeOfExcludingThis(mallocSizeOf);
1982 /*** GCMarker ***************************************************************/
1985 * WeakMapTraceAction::Expand: the GC is recomputing the liveness of WeakMap
1986 * entries by expanding each live WeakMap into its constituent key->value edges,
1987 * a table of which will be consulted in a later phase whenever marking a
1988 * potential key.
1990 GCMarker::GCMarker(JSRuntime* rt)
1991 : tracer_(mozilla::VariantType<MarkingTracer>(), rt, this),
1992 runtime_(rt),
1993 state(NotActive),
1994 incrementalWeakMapMarkingEnabled(
1995 TuningDefaults::IncrementalWeakMapMarkingEnabled)
1996 #ifdef DEBUG
1998 checkAtomMarking(true),
1999 strictCompartmentChecking(false)
2000 #endif
2004 bool GCMarker::init() { return stack.init(); }
2006 void GCMarker::start() {
2007 MOZ_ASSERT(state == NotActive);
2008 MOZ_ASSERT(stack.isEmpty());
2009 state = RegularMarking;
2010 haveAllImplicitEdges = true;
2011 setMarkColor(MarkColor::Black);
2014 static void ClearEphemeronEdges(JSRuntime* rt) {
2015 AutoEnterOOMUnsafeRegion oomUnsafe;
2016 for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
2017 if (!zone->gcEphemeronEdges().clear()) {
2018 oomUnsafe.crash("clearing weak keys in GCMarker::stop()");
2020 if (!zone->gcNurseryEphemeronEdges().clear()) {
2021 oomUnsafe.crash("clearing (nursery) weak keys in GCMarker::stop()");
2026 void GCMarker::stop() {
2027 MOZ_ASSERT(isDrained());
2029 if (state == NotActive) {
2030 return;
2032 state = NotActive;
2034 stack.clear();
2035 ClearEphemeronEdges(runtime());
2037 unmarkGrayStack.clearAndFree();
2040 void GCMarker::reset() {
2041 stack.clear();
2042 ClearEphemeronEdges(runtime());
2043 MOZ_ASSERT(isDrained());
2045 setMarkColor(MarkColor::Black);
2047 unmarkGrayStack.clearAndFree();
2050 template <typename T>
2051 inline void GCMarker::pushTaggedPtr(T* ptr) {
2052 checkZone(ptr);
2053 if (!stack.push(ptr)) {
2054 delayMarkingChildrenOnOOM(ptr);
2058 inline void GCMarker::pushValueRange(JSObject* obj, SlotsOrElementsKind kind,
2059 size_t start, size_t end) {
2060 checkZone(obj);
2061 MOZ_ASSERT(obj->is<NativeObject>());
2062 MOZ_ASSERT(start <= end);
2064 if (start == end) {
2065 return;
2068 if (MOZ_UNLIKELY(!stack.push(obj, kind, start))) {
2069 delayMarkingChildrenOnOOM(obj);
2073 void GCMarker::repush(JSObject* obj) {
2074 MOZ_ASSERT(obj->asTenured().isMarkedAtLeast(markColor()));
2075 pushTaggedPtr(obj);
2078 void GCMarker::setRootMarkingMode(bool newState) {
2079 if (newState) {
2080 setMarkingStateAndTracer<RootMarkingTracer>(RegularMarking, RootMarking);
2081 } else {
2082 setMarkingStateAndTracer<MarkingTracer>(RootMarking, RegularMarking);
2086 void GCMarker::enterParallelMarkingMode(ParallelMarker* pm) {
2087 MOZ_ASSERT(pm);
2088 MOZ_ASSERT(!parallelMarker_);
2089 setMarkingStateAndTracer<ParallelMarkingTracer>(RegularMarking,
2090 ParallelMarking);
2091 parallelMarker_ = pm;
2094 void GCMarker::leaveParallelMarkingMode() {
2095 MOZ_ASSERT(parallelMarker_);
2096 setMarkingStateAndTracer<MarkingTracer>(ParallelMarking, RegularMarking);
2097 parallelMarker_ = nullptr;
2100 template <typename Tracer>
2101 void GCMarker::setMarkingStateAndTracer(MarkingState prev, MarkingState next) {
2102 MOZ_ASSERT(state == prev);
2103 state = next;
2104 tracer_.emplace<Tracer>(runtime(), this);
2107 bool GCMarker::enterWeakMarkingMode() {
2108 MOZ_ASSERT(tracer()->weakMapAction() == JS::WeakMapTraceAction::Expand);
2109 MOZ_ASSERT(state == RegularMarking);
2110 if (!haveAllImplicitEdges) {
2111 return false;
2114 // During weak marking mode, we maintain a table mapping weak keys to
2115 // entries in known-live weakmaps. Initialize it with the keys of marked
2116 // weakmaps -- or more precisely, the keys of marked weakmaps that are
2117 // mapped to not yet live values. (Once bug 1167452 implements incremental
2118 // weakmap marking, this initialization step will become unnecessary, as
2119 // the table will already hold all such keys.)
2121 // Set state before doing anything else, so any new key that is marked
2122 // during the following gcEphemeronEdges scan will itself be looked up in
2123 // gcEphemeronEdges and marked according to ephemeron rules.
2124 state = WeakMarking;
2126 return true;
2129 IncrementalProgress JS::Zone::enterWeakMarkingMode(GCMarker* marker,
2130 SliceBudget& budget) {
2131 MOZ_ASSERT(marker->isWeakMarking());
2133 if (!marker->incrementalWeakMapMarkingEnabled) {
2134 for (WeakMapBase* m : gcWeakMapList()) {
2135 if (m->mapColor) {
2136 (void)m->markEntries(marker);
2139 return IncrementalProgress::Finished;
2142 // gcEphemeronEdges contains the keys from all weakmaps marked so far, or at
2143 // least the keys that might still need to be marked through. Scan through
2144 // gcEphemeronEdges and mark all values whose keys are marked. This marking
2145 // may recursively mark through other weakmap entries (immediately since we
2146 // are now in WeakMarking mode). The end result is a consistent state where
2147 // all values are marked if both their map and key are marked -- though note
2148 // that we may later leave weak marking mode, do some more marking, and then
2149 // enter back in.
2150 if (!isGCMarking()) {
2151 return IncrementalProgress::Finished;
2154 MOZ_ASSERT(gcNurseryEphemeronEdges().count() == 0);
2156 // An OrderedHashMap::MutableRange stays valid even when the underlying table
2157 // (zone->gcEphemeronEdges) is mutated, which is useful here since we may add
2158 // additional entries while iterating over the Range.
2159 EphemeronEdgeTable::MutableRange r = gcEphemeronEdges().mutableAll();
2160 while (!r.empty()) {
2161 Cell* src = r.front().key;
2162 CellColor srcColor = gc::detail::GetEffectiveColor(marker, src);
2163 auto& edges = r.front().value;
2164 r.popFront(); // Pop before any mutations happen.
2166 if (edges.length() > 0) {
2167 uint32_t steps = edges.length();
2168 marker->markEphemeronEdges(edges, srcColor);
2169 budget.step(steps);
2170 if (budget.isOverBudget()) {
2171 return NotFinished;
2176 return IncrementalProgress::Finished;
2179 void GCMarker::leaveWeakMarkingMode() {
2180 MOZ_ASSERT(state == WeakMarking || state == RegularMarking);
2181 state = RegularMarking;
2183 // The gcEphemeronEdges table is still populated and may be used during a
2184 // future weak marking mode within this GC.
2187 void GCMarker::abortLinearWeakMarking() {
2188 haveAllImplicitEdges = false;
2189 if (state == WeakMarking) {
2190 leaveWeakMarkingMode();
2194 MOZ_NEVER_INLINE void GCMarker::delayMarkingChildrenOnOOM(Cell* cell) {
2195 runtime()->gc.delayMarkingChildren(cell, markColor());
2198 bool GCRuntime::hasDelayedMarking() const {
2199 bool result = delayedMarkingList;
2200 MOZ_ASSERT(result == (markLaterArenas != 0));
2201 return result;
2204 void GCRuntime::delayMarkingChildren(Cell* cell, MarkColor color) {
2205 // Synchronize access to delayed marking state during parallel marking.
2206 LockGuard<Mutex> lock(delayedMarkingLock);
2208 Arena* arena = cell->asTenured().arena();
2209 if (!arena->onDelayedMarkingList()) {
2210 arena->setNextDelayedMarkingArena(delayedMarkingList);
2211 delayedMarkingList = arena;
2212 #ifdef DEBUG
2213 markLaterArenas++;
2214 #endif
2217 if (!arena->hasDelayedMarking(color)) {
2218 arena->setHasDelayedMarking(color, true);
2219 delayedMarkingWorkAdded = true;
2223 void GCRuntime::markDelayedChildren(Arena* arena, MarkColor color) {
2224 JSTracer* trc = marker().tracer();
2225 JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
2226 MarkColor colorToCheck =
2227 TraceKindCanBeMarkedGray(kind) ? color : MarkColor::Black;
2229 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
2230 if (cell->isMarked(colorToCheck)) {
2231 ApplyGCThingTyped(cell, kind, [trc, this](auto t) {
2232 t->traceChildren(trc);
2233 marker().markImplicitEdges(t);
2240 * Process arenas from |delayedMarkingList| by marking the unmarked children of
2241 * marked cells of color |color|.
2243 * This is called twice, first to mark gray children and then to mark black
2244 * children.
2246 void GCRuntime::processDelayedMarkingList(MarkColor color) {
2247 // Marking delayed children may add more arenas to the list, including arenas
2248 // we are currently processing or have previously processed. Handle this by
2249 // clearing a flag on each arena before marking its children. This flag will
2250 // be set again if the arena is re-added. Iterate the list until no new arenas
2251 // were added.
2253 AutoSetMarkColor setColor(marker(), color);
2255 do {
2256 delayedMarkingWorkAdded = false;
2257 for (Arena* arena = delayedMarkingList; arena;
2258 arena = arena->getNextDelayedMarking()) {
2259 if (arena->hasDelayedMarking(color)) {
2260 arena->setHasDelayedMarking(color, false);
2261 markDelayedChildren(arena, color);
2264 while (marker().hasEntries(color)) {
2265 SliceBudget budget = SliceBudget::unlimited();
2266 MOZ_ALWAYS_TRUE(
2267 marker().processMarkStackTop<NormalMarkingOptions>(budget));
2269 } while (delayedMarkingWorkAdded);
2271 MOZ_ASSERT(marker().isDrained());
2274 void GCRuntime::markAllDelayedChildren(ShouldReportMarkTime reportTime) {
2275 MOZ_ASSERT(CurrentThreadIsMainThread() || CurrentThreadIsPerformingGC());
2276 MOZ_ASSERT(marker().isDrained());
2277 MOZ_ASSERT(hasDelayedMarking());
2279 mozilla::Maybe<gcstats::AutoPhase> ap;
2280 if (reportTime) {
2281 ap.emplace(stats(), gcstats::PhaseKind::MARK_DELAYED);
2284 // We have a list of arenas containing marked cells with unmarked children
2285 // where we ran out of stack space during marking. Both black and gray cells
2286 // in these arenas may have unmarked children. Mark black children first.
2288 const MarkColor colors[] = {MarkColor::Black, MarkColor::Gray};
2289 for (MarkColor color : colors) {
2290 processDelayedMarkingList(color);
2291 rebuildDelayedMarkingList();
2294 MOZ_ASSERT(!hasDelayedMarking());
2297 void GCRuntime::rebuildDelayedMarkingList() {
2298 // Rebuild the delayed marking list, removing arenas which do not need further
2299 // marking.
2301 Arena* listTail = nullptr;
2302 forEachDelayedMarkingArena([&](Arena* arena) {
2303 if (!arena->hasAnyDelayedMarking()) {
2304 arena->clearDelayedMarkingState();
2305 #ifdef DEBUG
2306 MOZ_ASSERT(markLaterArenas);
2307 markLaterArenas--;
2308 #endif
2309 return;
2312 appendToDelayedMarkingList(&listTail, arena);
2314 appendToDelayedMarkingList(&listTail, nullptr);
2317 void GCRuntime::resetDelayedMarking() {
2318 MOZ_ASSERT(CurrentThreadIsMainThread());
2320 forEachDelayedMarkingArena([&](Arena* arena) {
2321 MOZ_ASSERT(arena->onDelayedMarkingList());
2322 arena->clearDelayedMarkingState();
2323 #ifdef DEBUG
2324 MOZ_ASSERT(markLaterArenas);
2325 markLaterArenas--;
2326 #endif
2328 delayedMarkingList = nullptr;
2329 MOZ_ASSERT(!markLaterArenas);
2332 inline void GCRuntime::appendToDelayedMarkingList(Arena** listTail,
2333 Arena* arena) {
2334 if (*listTail) {
2335 (*listTail)->updateNextDelayedMarkingArena(arena);
2336 } else {
2337 delayedMarkingList = arena;
2339 *listTail = arena;
2342 template <typename F>
2343 inline void GCRuntime::forEachDelayedMarkingArena(F&& f) {
2344 Arena* arena = delayedMarkingList;
2345 Arena* next;
2346 while (arena) {
2347 next = arena->getNextDelayedMarking();
2348 f(arena);
2349 arena = next;
2353 #ifdef DEBUG
2354 void GCMarker::checkZone(void* p) {
2355 MOZ_ASSERT(state != NotActive);
2356 DebugOnly<Cell*> cell = static_cast<Cell*>(p);
2357 MOZ_ASSERT_IF(cell->isTenured(),
2358 cell->asTenured().zone()->isCollectingFromAnyThread());
2360 #endif
2362 size_t GCMarker::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
2363 return mallocSizeOf(this) + stack.sizeOfExcludingThis(mallocSizeOf);
2366 /*** IsMarked / IsAboutToBeFinalized ****************************************/
2368 template <typename T>
2369 static inline void CheckIsMarkedThing(T* thing) {
2370 #define IS_SAME_TYPE_OR(name, type, _, _1) std::is_same_v<type, T> ||
2371 static_assert(JS_FOR_EACH_TRACEKIND(IS_SAME_TYPE_OR) false,
2372 "Only the base cell layout types are allowed into "
2373 "marking/tracing internals");
2374 #undef IS_SAME_TYPE_OR
2376 #ifdef DEBUG
2377 MOZ_ASSERT(thing);
2379 // Allow any thread access to uncollected things.
2380 Zone* zone = thing->zoneFromAnyThread();
2381 if (thing->isPermanentAndMayBeShared()) {
2382 MOZ_ASSERT(!zone->wasGCStarted());
2383 MOZ_ASSERT(!zone->needsIncrementalBarrier());
2384 MOZ_ASSERT(thing->isMarkedBlack());
2385 return;
2388 // Allow the current thread access if it is sweeping or in sweep-marking, but
2389 // try to check the zone. Some threads have access to all zones when sweeping.
2390 JS::GCContext* gcx = TlsGCContext.get();
2391 MOZ_ASSERT(gcx->gcUse() != GCUse::Finalizing);
2392 if (gcx->gcUse() == GCUse::Sweeping || gcx->gcUse() == GCUse::Marking) {
2393 MOZ_ASSERT_IF(gcx->gcSweepZone(),
2394 gcx->gcSweepZone() == zone || zone->isAtomsZone());
2395 return;
2398 // Otherwise only allow access from the main thread or this zone's associated
2399 // thread.
2400 MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()) ||
2401 CurrentThreadCanAccessZone(thing->zoneFromAnyThread()));
2402 #endif
2405 template <typename T>
2406 bool js::gc::IsMarkedInternal(JSRuntime* rt, T* thing) {
2407 // Don't depend on the mark state of other cells during finalization.
2408 MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
2409 MOZ_ASSERT(rt->heapState() != JS::HeapState::MinorCollecting);
2410 MOZ_ASSERT(thing);
2411 CheckIsMarkedThing(thing);
2413 // This is not used during minor sweeping nor used to update moved GC things.
2414 MOZ_ASSERT(!IsForwarded(thing));
2416 // Permanent things are never marked by non-owning runtimes.
2417 TenuredCell* cell = &thing->asTenured();
2418 Zone* zone = cell->zoneFromAnyThread();
2419 #ifdef DEBUG
2420 if (IsOwnedByOtherRuntime(rt, thing)) {
2421 MOZ_ASSERT(!zone->wasGCStarted());
2422 MOZ_ASSERT(thing->isMarkedBlack());
2424 #endif
2426 return !zone->isGCMarking() || TenuredThingIsMarkedAny(thing);
2429 template <typename T>
2430 bool js::gc::IsAboutToBeFinalizedInternal(T* thing) {
2431 // Don't depend on the mark state of other cells during finalization.
2432 MOZ_ASSERT(!CurrentThreadIsGCFinalizing());
2433 MOZ_ASSERT(thing);
2434 CheckIsMarkedThing(thing);
2436 // This is not used during minor sweeping nor used to update moved GC things.
2437 MOZ_ASSERT(!IsForwarded(thing));
2439 if (!thing->isTenured()) {
2440 return false;
2443 // Permanent things are never finalized by non-owning runtimes.
2444 TenuredCell* cell = &thing->asTenured();
2445 Zone* zone = cell->zoneFromAnyThread();
2446 #ifdef DEBUG
2447 JSRuntime* rt = TlsGCContext.get()->runtimeFromAnyThread();
2448 if (IsOwnedByOtherRuntime(rt, thing)) {
2449 MOZ_ASSERT(!zone->wasGCStarted());
2450 MOZ_ASSERT(thing->isMarkedBlack());
2452 #endif
2454 return zone->isGCSweeping() && !TenuredThingIsMarkedAny(thing);
2457 template <typename T>
2458 bool js::gc::IsAboutToBeFinalizedInternal(const T& thing) {
2459 bool dying = false;
2460 ApplyGCThingTyped(
2461 thing, [&dying](auto t) { dying = IsAboutToBeFinalizedInternal(t); });
2462 return dying;
2465 SweepingTracer::SweepingTracer(JSRuntime* rt)
2466 : GenericTracerImpl(rt, JS::TracerKind::Sweeping,
2467 JS::WeakMapTraceAction::TraceKeysAndValues) {}
2469 template <typename T>
2470 inline void SweepingTracer::onEdge(T** thingp, const char* name) {
2471 T* thing = *thingp;
2472 CheckIsMarkedThing(thing);
2474 if (!thing->isTenured()) {
2475 return;
2478 // Permanent things are never finalized by non-owning runtimes.
2479 TenuredCell* cell = &thing->asTenured();
2480 Zone* zone = cell->zoneFromAnyThread();
2481 #ifdef DEBUG
2482 if (IsOwnedByOtherRuntime(runtime(), thing)) {
2483 MOZ_ASSERT(!zone->wasGCStarted());
2484 MOZ_ASSERT(thing->isMarkedBlack());
2486 #endif
2488 // It would be nice if we could assert that the zone of the tenured cell is in
2489 // the Sweeping state, but that isn't always true for:
2490 // - atoms
2491 // - the jitcode map
2492 // - the mark queue
2493 if (zone->isGCSweeping() && !cell->isMarkedAny()) {
2494 *thingp = nullptr;
2498 namespace js {
2499 namespace gc {
2501 template <typename T>
2502 JS_PUBLIC_API bool TraceWeakEdge(JSTracer* trc, JS::Heap<T>* thingp) {
2503 return TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeGet()),
2504 "JS::Heap edge");
2507 template <typename T>
2508 JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow(T* thingp) {
2509 return IsAboutToBeFinalizedInternal(*ConvertToBase(thingp));
2512 // Instantiate a copy of the Tracing templates for each public GC type.
2513 #define INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS(type) \
2514 template JS_PUBLIC_API bool TraceWeakEdge<type>(JSTracer * trc, \
2515 JS::Heap<type>*); \
2516 template JS_PUBLIC_API bool EdgeNeedsSweepUnbarrieredSlow<type>(type*);
2517 JS_FOR_EACH_PUBLIC_GC_POINTER_TYPE(INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
2518 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
2519 INSTANTIATE_ALL_VALID_HEAP_TRACE_FUNCTIONS)
2521 #define INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type) \
2522 template bool IsMarkedInternal(JSRuntime* rt, type thing);
2524 #define INSTANTIATE_INTERNAL_IATBF_FUNCTION(type) \
2525 template bool IsAboutToBeFinalizedInternal(type thingp);
2527 #define INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND(_1, type, _2, \
2528 _3) \
2529 INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION(type*) \
2530 INSTANTIATE_INTERNAL_IATBF_FUNCTION(type*)
2532 JS_FOR_EACH_TRACEKIND(INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND)
2534 #define INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER(type) \
2535 INSTANTIATE_INTERNAL_IATBF_FUNCTION(const type&)
2537 JS_FOR_EACH_PUBLIC_TAGGED_GC_POINTER_TYPE(
2538 INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER)
2540 #undef INSTANTIATE_INTERNAL_IS_MARKED_FUNCTION
2541 #undef INSTANTIATE_INTERNAL_IATBF_FUNCTION
2542 #undef INSTANTIATE_INTERNAL_MARKING_FUNCTIONS_FROM_TRACEKIND
2543 #undef INSTANTIATE_IATBF_FUNCTION_FOR_TAGGED_POINTER
2545 } /* namespace gc */
2546 } /* namespace js */
2548 /*** Cycle Collector Barrier Implementation *********************************/
2551 * The GC and CC are run independently. Consequently, the following sequence of
2552 * events can occur:
2553 * 1. GC runs and marks an object gray.
2554 * 2. The mutator runs (specifically, some C++ code with access to gray
2555 * objects) and creates a pointer from a JS root or other black object to
2556 * the gray object. If we re-ran a GC at this point, the object would now be
2557 * black.
2558 * 3. Now we run the CC. It may think it can collect the gray object, even
2559 * though it's reachable from the JS heap.
2561 * To prevent this badness, we unmark the gray bit of an object when it is
2562 * accessed by callers outside XPConnect. This would cause the object to go
2563 * black in step 2 above. This must be done on everything reachable from the
2564 * object being returned. The following code takes care of the recursive
2565 * re-coloring.
2567 * There is an additional complication for certain kinds of edges that are not
2568 * contained explicitly in the source object itself, such as from a weakmap key
2569 * to its value. These "implicit edges" are represented in some other
2570 * container object, such as the weakmap itself. In these
2571 * cases, calling unmark gray on an object won't find all of its children.
2573 * Handling these implicit edges has two parts:
2574 * - A special pass enumerating all of the containers that know about the
2575 * implicit edges to fix any black-gray edges that have been created. This
2576 * is implemented in nsXPConnect::FixWeakMappingGrayBits.
2577 * - To prevent any incorrectly gray objects from escaping to live JS outside
2578 * of the containers, we must add unmark-graying read barriers to these
2579 * containers.
2582 #ifdef DEBUG
2583 struct AssertNonGrayTracer final : public JS::CallbackTracer {
2584 // This is used by the UnmarkGray tracer only, and needs to report itself as
2585 // the non-gray tracer to not trigger assertions. Do not use it in another
2586 // context without making this more generic.
2587 explicit AssertNonGrayTracer(JSRuntime* rt)
2588 : JS::CallbackTracer(rt, JS::TracerKind::UnmarkGray) {}
2589 void onChild(JS::GCCellPtr thing, const char* name) override {
2590 MOZ_ASSERT(!thing.asCell()->isMarkedGray());
2593 #endif
2595 class js::gc::UnmarkGrayTracer final : public JS::CallbackTracer {
2596 public:
2597 // We set weakMapAction to WeakMapTraceAction::Skip because the cycle
2598 // collector will fix up any color mismatches involving weakmaps when it runs.
2599 explicit UnmarkGrayTracer(GCMarker* marker)
2600 : JS::CallbackTracer(marker->runtime(), JS::TracerKind::UnmarkGray,
2601 JS::WeakMapTraceAction::Skip),
2602 unmarkedAny(false),
2603 oom(false),
2604 marker(marker),
2605 stack(marker->unmarkGrayStack) {}
2607 void unmark(JS::GCCellPtr cell);
2609 // Whether we unmarked anything.
2610 bool unmarkedAny;
2612 // Whether we ran out of memory.
2613 bool oom;
2615 private:
2616 // Marker to use if we need to unmark in zones that are currently being
2617 // marked.
2618 GCMarker* marker;
2620 // Stack of cells to traverse.
2621 Vector<JS::GCCellPtr, 0, SystemAllocPolicy>& stack;
2623 void onChild(JS::GCCellPtr thing, const char* name) override;
2626 void UnmarkGrayTracer::onChild(JS::GCCellPtr thing, const char* name) {
2627 Cell* cell = thing.asCell();
2629 // Cells in the nursery cannot be gray, and nor can certain kinds of tenured
2630 // cells. These must necessarily point only to black edges.
2631 if (!cell->isTenured() || !TraceKindCanBeMarkedGray(thing.kind())) {
2632 #ifdef DEBUG
2633 MOZ_ASSERT(!cell->isMarkedGray());
2634 AssertNonGrayTracer nongray(runtime());
2635 JS::TraceChildren(&nongray, thing);
2636 #endif
2637 return;
2640 TenuredCell& tenured = cell->asTenured();
2641 Zone* zone = tenured.zone();
2643 // If the cell is in a zone whose mark bits are being cleared, then it will
2644 // end up white.
2645 if (zone->isGCPreparing()) {
2646 return;
2649 // If the cell is in a zone that we're currently marking, then it's possible
2650 // that it is currently white but will end up gray. To handle this case,
2651 // trigger the barrier for any cells in zones that are currently being
2652 // marked. This will ensure they will eventually get marked black.
2653 if (zone->isGCMarking()) {
2654 if (!cell->isMarkedBlack()) {
2655 TraceEdgeForBarrier(marker, &tenured, thing.kind());
2656 unmarkedAny = true;
2658 return;
2661 if (!tenured.isMarkedGray()) {
2662 return;
2665 // TODO: It may be a small improvement to only use the atomic version during
2666 // parallel marking.
2667 tenured.markBlackAtomic();
2668 unmarkedAny = true;
2670 if (!stack.append(thing)) {
2671 oom = true;
2675 void UnmarkGrayTracer::unmark(JS::GCCellPtr cell) {
2676 MOZ_ASSERT(stack.empty());
2678 onChild(cell, "unmarking root");
2680 while (!stack.empty() && !oom) {
2681 TraceChildren(this, stack.popCopy());
2684 if (oom) {
2685 // If we run out of memory, we take a drastic measure: require that we
2686 // GC again before the next CC.
2687 stack.clear();
2688 runtime()->gc.setGrayBitsInvalid();
2689 return;
2693 bool js::gc::UnmarkGrayGCThingUnchecked(GCMarker* marker, JS::GCCellPtr thing) {
2694 MOZ_ASSERT(thing);
2695 MOZ_ASSERT(thing.asCell()->isMarkedGray());
2697 mozilla::Maybe<AutoGeckoProfilerEntry> profilingStackFrame;
2698 if (JSContext* cx = TlsContext.get()) {
2699 profilingStackFrame.emplace(cx, "UnmarkGrayGCThing",
2700 JS::ProfilingCategoryPair::GCCC_UnmarkGray);
2703 UnmarkGrayTracer unmarker(marker);
2704 unmarker.unmark(thing);
2705 return unmarker.unmarkedAny;
2708 JS_PUBLIC_API bool JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr thing) {
2709 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
2710 MOZ_ASSERT(!JS::RuntimeHeapIsCycleCollecting());
2712 JSRuntime* rt = thing.asCell()->runtimeFromMainThread();
2713 if (thing.asCell()->zone()->isGCPreparing()) {
2714 // Mark bits are being cleared in preparation for GC.
2715 return false;
2718 return UnmarkGrayGCThingUnchecked(&rt->gc.marker(), thing);
2721 void js::gc::UnmarkGrayGCThingRecursively(TenuredCell* cell) {
2722 JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(cell, cell->getTraceKind()));
2725 bool js::UnmarkGrayShapeRecursively(Shape* shape) {
2726 return JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(shape));
2729 #ifdef DEBUG
2730 Cell* js::gc::UninlinedForwarded(const Cell* cell) { return Forwarded(cell); }
2731 #endif
2733 namespace js {
2734 namespace debug {
2736 MarkInfo GetMarkInfo(Cell* rawCell) {
2737 if (!rawCell->isTenured()) {
2738 return MarkInfo::NURSERY;
2741 TenuredCell* cell = &rawCell->asTenured();
2742 if (cell->isMarkedGray()) {
2743 return MarkInfo::GRAY;
2745 if (cell->isMarkedBlack()) {
2746 return MarkInfo::BLACK;
2748 return MarkInfo::UNMARKED;
2751 uintptr_t* GetMarkWordAddress(Cell* cell) {
2752 if (!cell->isTenured()) {
2753 return nullptr;
2756 MarkBitmapWord* wordp;
2757 uintptr_t mask;
2758 TenuredChunkBase* chunk = gc::detail::GetCellChunkBase(&cell->asTenured());
2759 chunk->markBits.getMarkWordAndMask(&cell->asTenured(), ColorBit::BlackBit,
2760 &wordp, &mask);
2761 return reinterpret_cast<uintptr_t*>(wordp);
2764 uintptr_t GetMarkMask(Cell* cell, uint32_t colorBit) {
2765 MOZ_ASSERT(colorBit == 0 || colorBit == 1);
2767 if (!cell->isTenured()) {
2768 return 0;
2771 ColorBit bit = colorBit == 0 ? ColorBit::BlackBit : ColorBit::GrayOrBlackBit;
2772 MarkBitmapWord* wordp;
2773 uintptr_t mask;
2774 TenuredChunkBase* chunk = gc::detail::GetCellChunkBase(&cell->asTenured());
2775 chunk->markBits.getMarkWordAndMask(&cell->asTenured(), bit, &wordp, &mask);
2776 return mask;
2779 } // namespace debug
2780 } // namespace js