1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "mozilla/Maybe.h"
8 #include "mozilla/Sprintf.h"
14 # include <valgrind/memcheck.h>
17 #include "gc/GCInternals.h"
18 #include "gc/GCLock.h"
19 #include "gc/PublicIterators.h"
20 #include "gc/WeakMap.h"
22 #include "js/friend/DumpFunctions.h" // js::DumpObject
23 #include "js/HashTable.h"
24 #include "vm/JSContext.h"
26 #include "gc/ArenaList-inl.h"
27 #include "gc/GC-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/PrivateIterators-inl.h"
32 using namespace js::gc
;
34 using mozilla::DebugOnly
;
39 * Write barrier verification
41 * The next few functions are for write barrier verification.
43 * The VerifyBarriers function is a shorthand. It checks if a verification phase
44 * is currently running. If not, it starts one. Otherwise, it ends the current
45 * phase and starts a new one.
47 * The user can adjust the frequency of verifications, which causes
48 * VerifyBarriers to be a no-op all but one out of N calls. However, if the
49 * |always| parameter is true, it starts a new phase no matter what.
51 * Pre-Barrier Verifier:
52 * When StartVerifyBarriers is called, a snapshot is taken of all objects in
53 * the GC heap and saved in an explicit graph data structure. Later,
54 * EndVerifyBarriers traverses the heap again. Any pointer values that were in
55 * the snapshot and are no longer found must be marked; otherwise an assertion
56 * triggers. Note that we must not GC in between starting and finishing a
72 HashMap
<Cell
*, VerifyNode
*, DefaultHasher
<Cell
*>, SystemAllocPolicy
>;
75 * The verifier data structures are simple. The entire graph is stored in a
76 * single block of memory. At the beginning is a VerifyNode for the root
77 * node. It is followed by a sequence of EdgeValues--the exact number is given
78 * in the node. After the edges come more nodes and their edges.
80 * The edgeptr and term fields are used to allocate out of the block of memory
81 * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
82 * we just abandon the verification.
84 * The nodemap field is a hashtable that maps from the address of the GC thing
85 * to the VerifyNode that represents it.
87 class js::VerifyPreTracer final
: public JS::CallbackTracer
{
88 JS::AutoDisableGenerationalGC noggc
;
90 void onChild(JS::GCCellPtr thing
, const char* name
) override
;
93 /* The gcNumber when the verification began. */
96 /* This counts up to gcZealFrequency to decide whether to verify. */
99 /* This graph represents the initial GC "snapshot". */
106 explicit VerifyPreTracer(JSRuntime
* rt
)
107 : JS::CallbackTracer(rt
, JS::TracerKind::Callback
,
108 JS::WeakEdgeTraceAction::Skip
),
109 noggc(rt
->mainContextFromOwnThread()),
110 number(rt
->gc
.gcNumber()),
116 // We don't care about weak edges here. Since they are not marked they
117 // cannot cause the problem that the pre-write barrier protects against.
120 ~VerifyPreTracer() { js_free(root
); }
124 * This function builds up the heap snapshot by adding edges to the current
127 void VerifyPreTracer::onChild(JS::GCCellPtr thing
, const char* name
) {
128 MOZ_ASSERT(!IsInsideNursery(thing
.asCell()));
130 // Skip things in other runtimes.
131 if (thing
.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
135 edgeptr
+= sizeof(EdgeValue
);
136 if (edgeptr
>= term
) {
141 VerifyNode
* node
= curnode
;
142 uint32_t i
= node
->count
;
144 node
->edges
[i
].thing
= thing
;
145 node
->edges
[i
].label
= name
;
149 static VerifyNode
* MakeNode(VerifyPreTracer
* trc
, JS::GCCellPtr thing
) {
150 NodeMap::AddPtr p
= trc
->nodemap
.lookupForAdd(thing
.asCell());
152 VerifyNode
* node
= (VerifyNode
*)trc
->edgeptr
;
153 trc
->edgeptr
+= sizeof(VerifyNode
) - sizeof(EdgeValue
);
154 if (trc
->edgeptr
>= trc
->term
) {
155 trc
->edgeptr
= trc
->term
;
161 if (!trc
->nodemap
.add(p
, thing
.asCell(), node
)) {
162 trc
->edgeptr
= trc
->term
;
171 static VerifyNode
* NextNode(VerifyNode
* node
) {
172 if (node
->count
== 0) {
173 return (VerifyNode
*)((char*)node
+ sizeof(VerifyNode
) - sizeof(EdgeValue
));
176 return (VerifyNode
*)((char*)node
+ sizeof(VerifyNode
) +
177 sizeof(EdgeValue
) * (node
->count
- 1));
180 template <typename ZonesIterT
>
181 static void ClearMarkBits(GCRuntime
* gc
) {
182 // This does not clear the mark bits for permanent atoms, whose arenas are
183 // removed from the arena lists by GCRuntime::freezePermanentAtoms.
185 for (ZonesIterT
zone(gc
); !zone
.done(); zone
.next()) {
186 for (auto kind
: AllAllocKinds()) {
187 for (ArenaIter
arena(zone
, kind
); !arena
.done(); arena
.next()) {
194 void gc::GCRuntime::startVerifyPreBarriers() {
195 if (verifyPreData
|| isIncrementalGCInProgress()) {
199 JSContext
* cx
= rt
->mainContextFromOwnThread();
201 if (IsIncrementalGCUnsafe(rt
) != GCAbortReason::None
) {
207 VerifyPreTracer
* trc
= js_new
<VerifyPreTracer
>(rt
);
212 AutoPrepareForTracing
prep(cx
);
214 ClearMarkBits
<AllZonesIter
>(this);
216 gcstats::AutoPhase
ap(stats(), gcstats::PhaseKind::TRACE_HEAP
);
218 const size_t size
= 64 * 1024 * 1024;
219 trc
->root
= (VerifyNode
*)js_malloc(size
);
223 trc
->edgeptr
= (char*)trc
->root
;
224 trc
->term
= trc
->edgeptr
+ size
;
226 /* Create the root node. */
227 trc
->curnode
= MakeNode(trc
, JS::GCCellPtr());
229 MOZ_ASSERT(incrementalState
== State::NotActive
);
230 incrementalState
= State::MarkRoots
;
232 /* Make all the roots be edges emanating from the root node. */
233 traceRuntime(trc
, prep
);
237 if (trc
->edgeptr
== trc
->term
) {
241 /* For each edge, make a node for it if one doesn't already exist. */
242 while ((char*)node
< trc
->edgeptr
) {
243 for (uint32_t i
= 0; i
< node
->count
; i
++) {
244 EdgeValue
& e
= node
->edges
[i
];
245 VerifyNode
* child
= MakeNode(trc
, e
.thing
);
247 trc
->curnode
= child
;
248 JS::TraceChildren(trc
, e
.thing
);
250 if (trc
->edgeptr
== trc
->term
) {
255 node
= NextNode(node
);
259 incrementalState
= State::Mark
;
262 for (ZonesIter
zone(this, WithAtoms
); !zone
.done(); zone
.next()) {
263 zone
->changeGCState(Zone::NoGC
, Zone::VerifyPreBarriers
);
264 zone
->setNeedsIncrementalBarrier(true);
265 zone
->arenas
.clearFreeLists();
271 incrementalState
= State::NotActive
;
273 verifyPreData
= nullptr;
276 static bool IsMarkedOrAllocated(TenuredCell
* cell
) {
277 return cell
->isMarkedAny();
280 struct CheckEdgeTracer final
: public JS::CallbackTracer
{
282 explicit CheckEdgeTracer(JSRuntime
* rt
)
283 : JS::CallbackTracer(rt
), node(nullptr) {}
284 void onChild(JS::GCCellPtr thing
, const char* name
) override
;
287 static const uint32_t MAX_VERIFIER_EDGES
= 1000;
290 * This function is called by EndVerifyBarriers for every heap edge. If the edge
291 * already existed in the original snapshot, we "cancel it out" by overwriting
292 * it with nullptr. EndVerifyBarriers later asserts that the remaining
293 * non-nullptr edges (i.e., the ones from the original snapshot that must have
294 * been modified) must point to marked objects.
296 void CheckEdgeTracer::onChild(JS::GCCellPtr thing
, const char* name
) {
297 // Skip things in other runtimes.
298 if (thing
.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
302 /* Avoid n^2 behavior. */
303 if (node
->count
> MAX_VERIFIER_EDGES
) {
307 for (uint32_t i
= 0; i
< node
->count
; i
++) {
308 if (node
->edges
[i
].thing
== thing
) {
309 node
->edges
[i
].thing
= JS::GCCellPtr();
315 static bool IsMarkedOrAllocated(const EdgeValue
& edge
) {
316 if (!edge
.thing
|| IsMarkedOrAllocated(&edge
.thing
.asCell()->asTenured())) {
320 // Permanent atoms and well-known symbols aren't marked during graph
322 if (edge
.thing
.is
<JSString
>() &&
323 edge
.thing
.as
<JSString
>().isPermanentAtom()) {
326 if (edge
.thing
.is
<JS::Symbol
>() &&
327 edge
.thing
.as
<JS::Symbol
>().isWellKnownSymbol()) {
334 void gc::GCRuntime::endVerifyPreBarriers() {
335 VerifyPreTracer
* trc
= verifyPreData
;
341 MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt
));
343 // Now that barrier marking has finished, prepare the heap to allow this
344 // method to trace cells and discover their outgoing edges.
345 AutoPrepareForTracing
prep(rt
->mainContextFromOwnThread());
347 bool compartmentCreated
= false;
349 /* We need to disable barriers before tracing, which may invoke barriers. */
350 for (ZonesIter
zone(this, WithAtoms
); !zone
.done(); zone
.next()) {
351 if (zone
->isVerifyingPreBarriers()) {
352 zone
->changeGCState(Zone::VerifyPreBarriers
, Zone::NoGC
);
354 compartmentCreated
= true;
357 MOZ_ASSERT(!zone
->wasGCStarted());
358 MOZ_ASSERT(!zone
->needsIncrementalBarrier());
361 verifyPreData
= nullptr;
362 MOZ_ASSERT(incrementalState
== State::Mark
);
363 incrementalState
= State::NotActive
;
365 if (!compartmentCreated
&& IsIncrementalGCUnsafe(rt
) == GCAbortReason::None
) {
366 CheckEdgeTracer
cetrc(rt
);
368 /* Start after the roots. */
369 VerifyNode
* node
= NextNode(trc
->root
);
370 while ((char*)node
< trc
->edgeptr
) {
372 JS::TraceChildren(&cetrc
, node
->thing
);
374 if (node
->count
<= MAX_VERIFIER_EDGES
) {
375 for (uint32_t i
= 0; i
< node
->count
; i
++) {
376 EdgeValue
& edge
= node
->edges
[i
];
377 if (!IsMarkedOrAllocated(edge
)) {
381 "[barrier verifier] Unmarked edge: %s %p '%s' edge to %s %p",
382 JS::GCTraceKindToAscii(node
->thing
.kind()),
383 node
->thing
.asCell(), edge
.label
,
384 JS::GCTraceKindToAscii(edge
.thing
.kind()), edge
.thing
.asCell());
385 MOZ_ReportAssertionFailure(msgbuf
, __FILE__
, __LINE__
);
391 node
= NextNode(node
);
396 resetDelayedMarking();
401 /*** Barrier Verifier Scheduling ***/
403 void gc::GCRuntime::verifyPreBarriers() {
405 endVerifyPreBarriers();
407 startVerifyPreBarriers();
411 void gc::VerifyBarriers(JSRuntime
* rt
, VerifierType type
) {
412 if (type
== PreBarrierVerifier
) {
413 rt
->gc
.verifyPreBarriers();
417 void gc::GCRuntime::maybeVerifyPreBarriers(bool always
) {
418 if (!hasZealMode(ZealMode::VerifierPre
)) {
422 if (rt
->mainContextFromOwnThread()->suppressGC
) {
427 if (++verifyPreData
->count
< zealFrequency
&& !always
) {
431 endVerifyPreBarriers();
434 startVerifyPreBarriers();
437 void js::gc::MaybeVerifyBarriers(JSContext
* cx
, bool always
) {
438 GCRuntime
* gc
= &cx
->runtime()->gc
;
439 gc
->maybeVerifyPreBarriers(always
);
442 void js::gc::GCRuntime::finishVerifier() {
444 js_delete(verifyPreData
.ref());
445 verifyPreData
= nullptr;
449 struct GCChunkHasher
{
450 using Lookup
= gc::TenuredChunk
*;
453 * Strip zeros for better distribution after multiplying by the golden
456 static HashNumber
hash(gc::TenuredChunk
* chunk
) {
457 MOZ_ASSERT(!(uintptr_t(chunk
) & gc::ChunkMask
));
458 return HashNumber(uintptr_t(chunk
) >> gc::ChunkShift
);
461 static bool match(gc::TenuredChunk
* k
, gc::TenuredChunk
* l
) {
462 MOZ_ASSERT(!(uintptr_t(k
) & gc::ChunkMask
));
463 MOZ_ASSERT(!(uintptr_t(l
) & gc::ChunkMask
));
468 class js::gc::MarkingValidator
{
470 explicit MarkingValidator(GCRuntime
* gc
);
471 void nonIncrementalMark(AutoGCSession
& session
);
478 using BitmapMap
= HashMap
<TenuredChunk
*, UniquePtr
<MarkBitmap
>, GCChunkHasher
,
483 js::gc::MarkingValidator::MarkingValidator(GCRuntime
* gc
)
484 : gc(gc
), initialized(false) {}
486 void js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession
& session
) {
488 * Perform a non-incremental mark for all collecting zones and record
489 * the results for later comparison.
491 * Currently this does not validate gray marking.
494 JSRuntime
* runtime
= gc
->rt
;
495 GCMarker
* gcmarker
= &gc
->marker();
497 MOZ_ASSERT(!gcmarker
->isWeakMarking());
499 /* Wait for off-thread parsing which can allocate. */
500 WaitForAllHelperThreads();
502 gc
->waitBackgroundAllocEnd();
503 gc
->waitBackgroundSweepEnd();
505 /* Save existing mark bits. */
508 for (auto chunk
= gc
->allNonEmptyChunks(lock
); !chunk
.done();
510 // Bug 1842582: Allocate mark bit buffer in two stages to avoid alignment
511 // restriction which we currently can't support.
512 void* buffer
= js_malloc(sizeof(MarkBitmap
));
516 UniquePtr
<MarkBitmap
> entry(new (buffer
) MarkBitmap
);
518 MarkBitmap
* bitmap
= &chunk
->markBits
;
519 memcpy((void*)entry
->bitmap
, (void*)bitmap
->bitmap
,
520 sizeof(bitmap
->bitmap
));
522 if (!map
.putNew(chunk
, std::move(entry
))) {
529 * Temporarily clear the weakmaps' mark flags for the compartments we are
533 WeakMapColors markedWeakMaps
;
536 * For saving, smush all of the keys into one big table and split them back
537 * up into per-zone tables when restoring.
539 gc::EphemeronEdgeTable
savedEphemeronEdges(
540 SystemAllocPolicy(), runtime
->randomHashCodeScrambler());
541 if (!savedEphemeronEdges
.init()) {
545 for (GCZonesIter
zone(gc
); !zone
.done(); zone
.next()) {
546 if (!WeakMapBase::saveZoneMarkedWeakMaps(zone
, markedWeakMaps
)) {
550 AutoEnterOOMUnsafeRegion oomUnsafe
;
551 for (auto r
= zone
->gcEphemeronEdges().mutableAll(); !r
.empty();
553 MOZ_ASSERT(r
.front().key
->asTenured().zone() == zone
);
554 if (!savedEphemeronEdges
.put(r
.front().key
, std::move(r
.front().value
))) {
555 oomUnsafe
.crash("saving weak keys table for validator");
559 if (!zone
->gcEphemeronEdges().clear()) {
560 oomUnsafe
.crash("clearing weak keys table for validator");
564 /* Save and restore test mark queue state. */
566 size_t savedQueuePos
= gc
->queuePos
;
567 mozilla::Maybe
<MarkColor
> savedQueueColor
= gc
->queueMarkColor
;
571 * After this point, the function should run to completion, so we shouldn't
572 * do anything fallible.
576 /* Re-do all the marking, but non-incrementally. */
577 js::gc::State state
= gc
->incrementalState
;
578 gc
->incrementalState
= State::MarkRoots
;
581 gcstats::AutoPhase
ap(gc
->stats(), gcstats::PhaseKind::PREPARE
);
584 gcstats::AutoPhase
ap(gc
->stats(), gcstats::PhaseKind::UNMARK
);
586 for (GCZonesIter
zone(gc
); !zone
.done(); zone
.next()) {
587 WeakMapBase::unmarkZone(zone
);
590 MOZ_ASSERT(gcmarker
->isDrained());
592 ClearMarkBits
<GCZonesIter
>(gc
);
597 gcstats::AutoPhase
ap(gc
->stats(), gcstats::PhaseKind::MARK
);
599 gc
->traceRuntimeForMajorGC(gcmarker
->tracer(), session
);
601 gc
->incrementalState
= State::Mark
;
602 gc
->drainMarkStack();
605 gc
->incrementalState
= State::Sweep
;
607 gcstats::AutoPhase
ap1(gc
->stats(), gcstats::PhaseKind::SWEEP
);
608 gcstats::AutoPhase
ap2(gc
->stats(), gcstats::PhaseKind::MARK
);
610 gc
->markAllWeakReferences();
612 /* Update zone state for gray marking. */
613 for (GCZonesIter
zone(gc
); !zone
.done(); zone
.next()) {
614 zone
->changeGCState(zone
->initialMarkingState(), Zone::MarkBlackAndGray
);
617 AutoSetMarkColor
setColorGray(*gcmarker
, MarkColor::Gray
);
619 gc
->markAllGrayReferences(gcstats::PhaseKind::MARK_GRAY
);
620 gc
->markAllWeakReferences();
622 /* Restore zone state. */
623 for (GCZonesIter
zone(gc
); !zone
.done(); zone
.next()) {
624 zone
->changeGCState(Zone::MarkBlackAndGray
, zone
->initialMarkingState());
626 MOZ_ASSERT(gc
->marker().isDrained());
629 /* Take a copy of the non-incremental mark state and restore the original. */
632 for (auto chunk
= gc
->allNonEmptyChunks(lock
); !chunk
.done();
634 MarkBitmap
* bitmap
= &chunk
->markBits
;
635 auto ptr
= map
.lookup(chunk
);
636 MOZ_RELEASE_ASSERT(ptr
, "Chunk not found in map");
637 MarkBitmap
* entry
= ptr
->value().get();
638 for (size_t i
= 0; i
< MarkBitmap::WordCount
; i
++) {
639 uintptr_t v
= entry
->bitmap
[i
];
640 entry
->bitmap
[i
] = uintptr_t(bitmap
->bitmap
[i
]);
641 bitmap
->bitmap
[i
] = v
;
646 for (GCZonesIter
zone(gc
); !zone
.done(); zone
.next()) {
647 WeakMapBase::unmarkZone(zone
);
648 AutoEnterOOMUnsafeRegion oomUnsafe
;
649 if (!zone
->gcEphemeronEdges().clear()) {
650 oomUnsafe
.crash("clearing weak keys table for validator");
654 WeakMapBase::restoreMarkedWeakMaps(markedWeakMaps
);
656 for (auto r
= savedEphemeronEdges
.mutableAll(); !r
.empty(); r
.popFront()) {
657 AutoEnterOOMUnsafeRegion oomUnsafe
;
658 Zone
* zone
= r
.front().key
->asTenured().zone();
659 if (!zone
->gcEphemeronEdges().put(r
.front().key
,
660 std::move(r
.front().value
))) {
661 oomUnsafe
.crash("restoring weak keys table for validator");
666 gc
->queuePos
= savedQueuePos
;
667 gc
->queueMarkColor
= savedQueueColor
;
670 gc
->incrementalState
= state
;
673 void js::gc::MarkingValidator::validate() {
675 * Validates the incremental marking for a single compartment by comparing
676 * the mark bits to those previously recorded for a non-incremental mark.
683 MOZ_ASSERT(!gc
->marker().isWeakMarking());
685 gc
->waitBackgroundSweepEnd();
688 AutoLockGC
lock(gc
->rt
);
689 for (auto chunk
= gc
->allNonEmptyChunks(lock
); !chunk
.done(); chunk
.next()) {
690 BitmapMap::Ptr ptr
= map
.lookup(chunk
);
692 continue; /* Allocated after we did the non-incremental mark. */
695 MarkBitmap
* bitmap
= ptr
->value().get();
696 MarkBitmap
* incBitmap
= &chunk
->markBits
;
698 for (size_t i
= 0; i
< ArenasPerChunk
; i
++) {
699 if (chunk
->decommittedPages
[chunk
->pageIndex(i
)]) {
702 Arena
* arena
= &chunk
->arenas
[i
];
703 if (!arena
->allocated()) {
706 if (!arena
->zone
->isGCSweeping()) {
710 AllocKind kind
= arena
->getAllocKind();
711 uintptr_t thing
= arena
->thingsStart();
712 uintptr_t end
= arena
->thingsEnd();
713 while (thing
< end
) {
714 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
);
717 * If a non-incremental GC wouldn't have collected a cell, then
718 * an incremental GC won't collect it.
720 if (bitmap
->isMarkedAny(cell
)) {
721 if (!incBitmap
->isMarkedAny(cell
)) {
724 CellColorName(TenuredCell::getColor(bitmap
, cell
));
726 "%p: cell not marked, but would be marked %s by "
727 "non-incremental marking\n",
731 fprintf(stderr
, "\n");
737 * If the cycle collector isn't allowed to collect an object
738 * after a non-incremental GC has run, then it isn't allowed to
739 * collected it after an incremental GC.
741 if (!bitmap
->isMarkedGray(cell
)) {
742 if (incBitmap
->isMarkedGray(cell
)) {
745 CellColorName(TenuredCell::getColor(bitmap
, cell
));
747 "%p: cell marked gray, but would be marked %s by "
748 "non-incremental marking\n",
752 fprintf(stderr
, "\n");
757 thing
+= Arena::thingSize(kind
);
762 MOZ_RELEASE_ASSERT(ok
, "Incremental marking verification failed");
765 void GCRuntime::computeNonIncrementalMarkingForValidation(
766 AutoGCSession
& session
) {
767 MOZ_ASSERT(!markingValidator
);
768 if (isIncremental
&& hasZealMode(ZealMode::IncrementalMarkingValidator
)) {
769 markingValidator
= js_new
<MarkingValidator
>(this);
771 if (markingValidator
) {
772 markingValidator
->nonIncrementalMark(session
);
776 void GCRuntime::validateIncrementalMarking() {
777 if (markingValidator
) {
778 markingValidator
->validate();
782 void GCRuntime::finishMarkingValidation() {
783 js_delete(markingValidator
.ref());
784 markingValidator
= nullptr;
787 #endif /* JS_GC_ZEAL */
789 #if defined(JS_GC_ZEAL) || defined(DEBUG)
791 class HeapCheckTracerBase
: public JS::CallbackTracer
{
793 explicit HeapCheckTracerBase(JSRuntime
* rt
, JS::TraceOptions options
);
794 bool traceHeap(AutoTraceSession
& session
);
795 virtual void checkCell(Cell
* cell
, const char* name
) = 0;
798 void dumpCellInfo(Cell
* cell
);
799 void dumpCellPath(const char* name
);
802 return parentIndex
== -1 ? nullptr : stack
[parentIndex
].thing
.asCell();
808 void onChild(JS::GCCellPtr thing
, const char* name
) override
;
811 WorkItem(JS::GCCellPtr thing
, const char* name
, int parentIndex
)
814 parentIndex(parentIndex
),
825 HashSet
<Cell
*, DefaultHasher
<Cell
*>, SystemAllocPolicy
> visited
;
826 Vector
<WorkItem
, 0, SystemAllocPolicy
> stack
;
830 HeapCheckTracerBase::HeapCheckTracerBase(JSRuntime
* rt
,
831 JS::TraceOptions options
)
832 : CallbackTracer(rt
, JS::TracerKind::Callback
, options
),
838 void HeapCheckTracerBase::onChild(JS::GCCellPtr thing
, const char* name
) {
839 Cell
* cell
= thing
.asCell();
840 checkCell(cell
, name
);
842 if (visited
.lookup(cell
)) {
846 if (!visited
.put(cell
)) {
851 // Don't trace into GC things owned by another runtime.
852 if (cell
->runtimeFromAnyThread() != rt
) {
856 WorkItem
item(thing
, name
, parentIndex
);
857 if (!stack
.append(item
)) {
862 bool HeapCheckTracerBase::traceHeap(AutoTraceSession
& session
) {
863 // The analysis thinks that traceRuntime might GC by calling a GC callback.
864 JS::AutoSuppressGCAnalysis nogc
;
865 if (!rt
->isBeingDestroyed()) {
866 rt
->gc
.traceRuntime(this, session
);
869 while (!stack
.empty() && !oom
) {
870 WorkItem item
= stack
.back();
871 if (item
.processed
) {
874 MOZ_ASSERT(stack
.length() <= INT_MAX
);
875 parentIndex
= int(stack
.length()) - 1;
876 stack
.back().processed
= true;
877 TraceChildren(this, item
.thing
);
884 void HeapCheckTracerBase::dumpCellInfo(Cell
* cell
) {
885 auto kind
= cell
->getTraceKind();
887 kind
== JS::TraceKind::Object
? static_cast<JSObject
*>(cell
) : nullptr;
889 fprintf(stderr
, "%s %s", CellColorName(cell
->color()),
890 GCTraceKindToAscii(kind
));
892 fprintf(stderr
, " %s", obj
->getClass()->name
);
894 fprintf(stderr
, " %p", cell
);
896 fprintf(stderr
, " (compartment %p)", obj
->compartment());
900 void HeapCheckTracerBase::dumpCellPath(const char* name
) {
901 for (int index
= parentIndex
; index
!= -1; index
= stack
[index
].parentIndex
) {
902 const WorkItem
& parent
= stack
[index
];
903 Cell
* cell
= parent
.thing
.asCell();
904 fprintf(stderr
, " from ");
906 fprintf(stderr
, " %s edge\n", name
);
909 fprintf(stderr
, " from root %s\n", name
);
912 class CheckHeapTracer final
: public HeapCheckTracerBase
{
914 enum GCType
{ Moving
, NonMoving
};
916 explicit CheckHeapTracer(JSRuntime
* rt
, GCType type
);
917 void check(AutoTraceSession
& session
);
920 void checkCell(Cell
* cell
, const char* name
) override
;
924 CheckHeapTracer::CheckHeapTracer(JSRuntime
* rt
, GCType type
)
925 : HeapCheckTracerBase(rt
, JS::WeakMapTraceAction::TraceKeysAndValues
),
928 inline static bool IsValidGCThingPointer(Cell
* cell
) {
929 return (uintptr_t(cell
) & CellAlignMask
) == 0;
932 void CheckHeapTracer::checkCell(Cell
* cell
, const char* name
) {
934 if (!IsValidGCThingPointer(cell
) ||
935 ((gcType
== GCType::Moving
) && !IsGCThingValidAfterMovingGC(cell
)) ||
936 ((gcType
== GCType::NonMoving
) && cell
->isForwarded())) {
938 fprintf(stderr
, "Bad pointer %p\n", cell
);
943 void CheckHeapTracer::check(AutoTraceSession
& session
) {
944 if (!traceHeap(session
)) {
949 fprintf(stderr
, "Heap check: %zu failure(s)\n", failures
);
951 MOZ_RELEASE_ASSERT(failures
== 0);
954 void js::gc::CheckHeapAfterGC(JSRuntime
* rt
) {
955 AutoTraceSession
session(rt
);
956 CheckHeapTracer::GCType gcType
;
958 if (rt
->gc
.nursery().isEmpty()) {
959 gcType
= CheckHeapTracer::GCType::Moving
;
961 gcType
= CheckHeapTracer::GCType::NonMoving
;
964 CheckHeapTracer
tracer(rt
, gcType
);
965 tracer
.check(session
);
968 class CheckGrayMarkingTracer final
: public HeapCheckTracerBase
{
970 explicit CheckGrayMarkingTracer(JSRuntime
* rt
);
971 bool check(AutoTraceSession
& session
);
974 void checkCell(Cell
* cell
, const char* name
) override
;
977 CheckGrayMarkingTracer::CheckGrayMarkingTracer(JSRuntime
* rt
)
978 : HeapCheckTracerBase(rt
, JS::TraceOptions(JS::WeakMapTraceAction::Skip
,
979 JS::WeakEdgeTraceAction::Skip
)) {
980 // Weak gray->black edges are allowed.
983 void CheckGrayMarkingTracer::checkCell(Cell
* cell
, const char* name
) {
984 Cell
* parent
= parentCell();
989 if (parent
->isMarkedBlack() && cell
->isMarkedGray()) {
992 fprintf(stderr
, "Found black to gray edge to ");
994 fprintf(stderr
, "\n");
998 if (parent
->is
<JSObject
>()) {
999 fprintf(stderr
, "\nSource: ");
1000 DumpObject(parent
->as
<JSObject
>(), stderr
);
1002 if (cell
->is
<JSObject
>()) {
1003 fprintf(stderr
, "\nTarget: ");
1004 DumpObject(cell
->as
<JSObject
>(), stderr
);
1010 bool CheckGrayMarkingTracer::check(AutoTraceSession
& session
) {
1011 if (!traceHeap(session
)) {
1012 return true; // Ignore failure.
1015 return failures
== 0;
1018 JS_PUBLIC_API
bool js::CheckGrayMarkingState(JSRuntime
* rt
) {
1019 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
1020 MOZ_ASSERT(!rt
->gc
.isIncrementalGCInProgress());
1021 if (!rt
->gc
.areGrayBitsValid()) {
1025 gcstats::AutoPhase
ap(rt
->gc
.stats(), gcstats::PhaseKind::TRACE_HEAP
);
1026 AutoTraceSession
session(rt
);
1027 CheckGrayMarkingTracer
tracer(rt
);
1029 return tracer
.check(session
);
1032 static JSObject
* MaybeGetDelegate(Cell
* cell
) {
1033 if (!cell
->is
<JSObject
>()) {
1037 JSObject
* object
= cell
->as
<JSObject
>();
1038 return js::UncheckedUnwrapWithoutExpose(object
);
1041 bool js::gc::CheckWeakMapEntryMarking(const WeakMapBase
* map
, Cell
* key
,
1045 Zone
* zone
= map
->zone();
1046 MOZ_ASSERT(CurrentThreadCanAccessZone(zone
));
1047 MOZ_ASSERT(zone
->isGCMarking());
1049 JSObject
* object
= map
->memberOf
;
1050 MOZ_ASSERT_IF(object
, object
->zone() == zone
);
1052 // Debugger weak maps can have keys in different zones.
1053 Zone
* keyZone
= key
->zoneFromAnyThread();
1054 MOZ_ASSERT_IF(!map
->allowKeysInOtherZones(),
1055 keyZone
== zone
|| keyZone
->isAtomsZone());
1057 Zone
* valueZone
= value
->zoneFromAnyThread();
1058 MOZ_ASSERT(valueZone
== zone
|| valueZone
->isAtomsZone());
1060 if (object
&& object
->color() != map
->mapColor()) {
1061 fprintf(stderr
, "WeakMap object is marked differently to the map\n");
1062 fprintf(stderr
, "(map %p is %s, object %p is %s)\n", map
,
1063 CellColorName(map
->mapColor()), object
,
1064 CellColorName(object
->color()));
1068 // Values belonging to other runtimes or in uncollected zones are treated as
1070 JSRuntime
* mapRuntime
= zone
->runtimeFromAnyThread();
1071 auto effectiveColor
= [=](Cell
* cell
, Zone
* cellZone
) -> CellColor
{
1072 if (cell
->runtimeFromAnyThread() != mapRuntime
) {
1073 return CellColor::Black
;
1075 if (cellZone
->isGCMarkingOrSweeping()) {
1076 return cell
->color();
1078 return CellColor::Black
;
1081 CellColor valueColor
= effectiveColor(value
, valueZone
);
1082 CellColor keyColor
= effectiveColor(key
, keyZone
);
1084 if (valueColor
< std::min(map
->mapColor(), keyColor
)) {
1085 fprintf(stderr
, "WeakMap value is less marked than map and key\n");
1086 fprintf(stderr
, "(map %p is %s, key %p is %s, value %p is %s)\n", map
,
1087 CellColorName(map
->mapColor()), key
, CellColorName(keyColor
), value
,
1088 CellColorName(valueColor
));
1090 fprintf(stderr
, "Key:\n");
1092 if (auto* delegate
= MaybeGetDelegate(key
); delegate
) {
1093 fprintf(stderr
, "Delegate:\n");
1096 fprintf(stderr
, "Value:\n");
1103 JSObject
* delegate
= MaybeGetDelegate(key
);
1108 CellColor delegateColor
= effectiveColor(delegate
, delegate
->zone());
1109 if (keyColor
< std::min(map
->mapColor(), delegateColor
)) {
1110 fprintf(stderr
, "WeakMap key is less marked than map or delegate\n");
1111 fprintf(stderr
, "(map %p is %s, delegate %p is %s, key %p is %s)\n", map
,
1112 CellColorName(map
->mapColor()), delegate
,
1113 CellColorName(delegateColor
), key
, CellColorName(keyColor
));
1120 #endif // defined(JS_GC_ZEAL) || defined(DEBUG)
1122 // Return whether an arbitrary pointer is within a cell with the given
1123 // traceKind. Only for assertions and js::debug::* APIs.
1124 bool GCRuntime::isPointerWithinTenuredCell(void* ptr
, JS::TraceKind traceKind
) {
1125 AutoLockGC
lock(this);
1126 for (auto chunk
= allNonEmptyChunks(lock
); !chunk
.done(); chunk
.next()) {
1127 MOZ_ASSERT(!chunk
->isNurseryChunk());
1128 if (ptr
>= &chunk
->arenas
[0] && ptr
< &chunk
->arenas
[ArenasPerChunk
]) {
1129 auto* arena
= reinterpret_cast<Arena
*>(uintptr_t(ptr
) & ~ArenaMask
);
1130 if (!arena
->allocated()) {
1134 return traceKind
== JS::TraceKind::Null
||
1135 MapAllocToTraceKind(arena
->getAllocKind()) == traceKind
;