Backed out changeset 4191b252db9b (bug 1886734) for causing build bustages @netwerk...
[gecko.git] / js / src / gc / Verifier.cpp
blobbcf90775665c257addd051795bcca9127329a8a4
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "mozilla/Maybe.h"
8 #include "mozilla/Sprintf.h"
10 #include <algorithm>
11 #include <utility>
13 #ifdef MOZ_VALGRIND
14 # include <valgrind/memcheck.h>
15 #endif
17 #include "gc/GCInternals.h"
18 #include "gc/GCLock.h"
19 #include "gc/PublicIterators.h"
20 #include "gc/WeakMap.h"
21 #include "gc/Zone.h"
22 #include "js/friend/DumpFunctions.h" // js::DumpObject
23 #include "js/HashTable.h"
24 #include "vm/JSContext.h"
26 #include "gc/ArenaList-inl.h"
27 #include "gc/GC-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/PrivateIterators-inl.h"
31 using namespace js;
32 using namespace js::gc;
34 using mozilla::DebugOnly;
36 #ifdef JS_GC_ZEAL
39 * Write barrier verification
41 * The next few functions are for write barrier verification.
43 * The VerifyBarriers function is a shorthand. It checks if a verification phase
44 * is currently running. If not, it starts one. Otherwise, it ends the current
45 * phase and starts a new one.
47 * The user can adjust the frequency of verifications, which causes
48 * VerifyBarriers to be a no-op all but one out of N calls. However, if the
49 * |always| parameter is true, it starts a new phase no matter what.
51 * Pre-Barrier Verifier:
52 * When StartVerifyBarriers is called, a snapshot is taken of all objects in
53 * the GC heap and saved in an explicit graph data structure. Later,
54 * EndVerifyBarriers traverses the heap again. Any pointer values that were in
55 * the snapshot and are no longer found must be marked; otherwise an assertion
56 * triggers. Note that we must not GC in between starting and finishing a
57 * verification phase.
60 struct EdgeValue {
61 JS::GCCellPtr thing;
62 const char* label;
65 struct VerifyNode {
66 JS::GCCellPtr thing;
67 uint32_t count = 0;
68 EdgeValue edges[1];
71 using NodeMap =
72 HashMap<Cell*, VerifyNode*, DefaultHasher<Cell*>, SystemAllocPolicy>;
75 * The verifier data structures are simple. The entire graph is stored in a
76 * single block of memory. At the beginning is a VerifyNode for the root
77 * node. It is followed by a sequence of EdgeValues--the exact number is given
78 * in the node. After the edges come more nodes and their edges.
80 * The edgeptr and term fields are used to allocate out of the block of memory
81 * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
82 * we just abandon the verification.
84 * The nodemap field is a hashtable that maps from the address of the GC thing
85 * to the VerifyNode that represents it.
87 class js::VerifyPreTracer final : public JS::CallbackTracer {
88 JS::AutoDisableGenerationalGC noggc;
90 void onChild(JS::GCCellPtr thing, const char* name) override;
92 public:
93 /* The gcNumber when the verification began. */
94 uint64_t number;
96 /* This counts up to gcZealFrequency to decide whether to verify. */
97 int count;
99 /* This graph represents the initial GC "snapshot". */
100 VerifyNode* curnode;
101 VerifyNode* root;
102 char* edgeptr;
103 char* term;
104 NodeMap nodemap;
106 explicit VerifyPreTracer(JSRuntime* rt)
107 : JS::CallbackTracer(rt, JS::TracerKind::Callback,
108 JS::WeakEdgeTraceAction::Skip),
109 noggc(rt->mainContextFromOwnThread()),
110 number(rt->gc.gcNumber()),
111 count(0),
112 curnode(nullptr),
113 root(nullptr),
114 edgeptr(nullptr),
115 term(nullptr) {
116 // We don't care about weak edges here. Since they are not marked they
117 // cannot cause the problem that the pre-write barrier protects against.
120 ~VerifyPreTracer() { js_free(root); }
124 * This function builds up the heap snapshot by adding edges to the current
125 * node.
127 void VerifyPreTracer::onChild(JS::GCCellPtr thing, const char* name) {
128 MOZ_ASSERT(!IsInsideNursery(thing.asCell()));
130 // Skip things in other runtimes.
131 if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
132 return;
135 edgeptr += sizeof(EdgeValue);
136 if (edgeptr >= term) {
137 edgeptr = term;
138 return;
141 VerifyNode* node = curnode;
142 uint32_t i = node->count;
144 node->edges[i].thing = thing;
145 node->edges[i].label = name;
146 node->count++;
149 static VerifyNode* MakeNode(VerifyPreTracer* trc, JS::GCCellPtr thing) {
150 NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing.asCell());
151 if (!p) {
152 VerifyNode* node = (VerifyNode*)trc->edgeptr;
153 trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
154 if (trc->edgeptr >= trc->term) {
155 trc->edgeptr = trc->term;
156 return nullptr;
159 node->thing = thing;
160 node->count = 0;
161 if (!trc->nodemap.add(p, thing.asCell(), node)) {
162 trc->edgeptr = trc->term;
163 return nullptr;
166 return node;
168 return nullptr;
171 static VerifyNode* NextNode(VerifyNode* node) {
172 if (node->count == 0) {
173 return (VerifyNode*)((char*)node + sizeof(VerifyNode) - sizeof(EdgeValue));
176 return (VerifyNode*)((char*)node + sizeof(VerifyNode) +
177 sizeof(EdgeValue) * (node->count - 1));
180 template <typename ZonesIterT>
181 static void ClearMarkBits(GCRuntime* gc) {
182 // This does not clear the mark bits for permanent atoms, whose arenas are
183 // removed from the arena lists by GCRuntime::freezePermanentAtoms.
185 for (ZonesIterT zone(gc); !zone.done(); zone.next()) {
186 for (auto kind : AllAllocKinds()) {
187 for (ArenaIter arena(zone, kind); !arena.done(); arena.next()) {
188 arena->unmarkAll();
194 void gc::GCRuntime::startVerifyPreBarriers() {
195 if (verifyPreData || isIncrementalGCInProgress()) {
196 return;
199 JSContext* cx = rt->mainContextFromOwnThread();
201 if (IsIncrementalGCUnsafe(rt) != GCAbortReason::None) {
202 return;
205 number++;
207 VerifyPreTracer* trc = js_new<VerifyPreTracer>(rt);
208 if (!trc) {
209 return;
212 AutoPrepareForTracing prep(cx);
214 ClearMarkBits<AllZonesIter>(this);
216 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::TRACE_HEAP);
218 const size_t size = 64 * 1024 * 1024;
219 trc->root = (VerifyNode*)js_malloc(size);
220 if (!trc->root) {
221 goto oom;
223 trc->edgeptr = (char*)trc->root;
224 trc->term = trc->edgeptr + size;
226 /* Create the root node. */
227 trc->curnode = MakeNode(trc, JS::GCCellPtr());
229 MOZ_ASSERT(incrementalState == State::NotActive);
230 incrementalState = State::MarkRoots;
232 /* Make all the roots be edges emanating from the root node. */
233 traceRuntime(trc, prep);
235 VerifyNode* node;
236 node = trc->curnode;
237 if (trc->edgeptr == trc->term) {
238 goto oom;
241 /* For each edge, make a node for it if one doesn't already exist. */
242 while ((char*)node < trc->edgeptr) {
243 for (uint32_t i = 0; i < node->count; i++) {
244 EdgeValue& e = node->edges[i];
245 VerifyNode* child = MakeNode(trc, e.thing);
246 if (child) {
247 trc->curnode = child;
248 JS::TraceChildren(trc, e.thing);
250 if (trc->edgeptr == trc->term) {
251 goto oom;
255 node = NextNode(node);
258 verifyPreData = trc;
259 incrementalState = State::Mark;
260 marker().start();
262 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
263 zone->changeGCState(Zone::NoGC, Zone::VerifyPreBarriers);
264 zone->setNeedsIncrementalBarrier(true);
265 zone->arenas.clearFreeLists();
268 return;
270 oom:
271 incrementalState = State::NotActive;
272 js_delete(trc);
273 verifyPreData = nullptr;
276 static bool IsMarkedOrAllocated(TenuredCell* cell) {
277 return cell->isMarkedAny();
280 struct CheckEdgeTracer final : public JS::CallbackTracer {
281 VerifyNode* node;
282 explicit CheckEdgeTracer(JSRuntime* rt)
283 : JS::CallbackTracer(rt), node(nullptr) {}
284 void onChild(JS::GCCellPtr thing, const char* name) override;
287 static const uint32_t MAX_VERIFIER_EDGES = 1000;
290 * This function is called by EndVerifyBarriers for every heap edge. If the edge
291 * already existed in the original snapshot, we "cancel it out" by overwriting
292 * it with nullptr. EndVerifyBarriers later asserts that the remaining
293 * non-nullptr edges (i.e., the ones from the original snapshot that must have
294 * been modified) must point to marked objects.
296 void CheckEdgeTracer::onChild(JS::GCCellPtr thing, const char* name) {
297 // Skip things in other runtimes.
298 if (thing.asCell()->asTenured().runtimeFromAnyThread() != runtime()) {
299 return;
302 /* Avoid n^2 behavior. */
303 if (node->count > MAX_VERIFIER_EDGES) {
304 return;
307 for (uint32_t i = 0; i < node->count; i++) {
308 if (node->edges[i].thing == thing) {
309 node->edges[i].thing = JS::GCCellPtr();
310 return;
315 static bool IsMarkedOrAllocated(const EdgeValue& edge) {
316 if (!edge.thing || IsMarkedOrAllocated(&edge.thing.asCell()->asTenured())) {
317 return true;
320 // Permanent atoms and well-known symbols aren't marked during graph
321 // traversal.
322 if (edge.thing.is<JSString>() &&
323 edge.thing.as<JSString>().isPermanentAtom()) {
324 return true;
326 if (edge.thing.is<JS::Symbol>() &&
327 edge.thing.as<JS::Symbol>().isWellKnownSymbol()) {
328 return true;
331 return false;
334 void gc::GCRuntime::endVerifyPreBarriers() {
335 VerifyPreTracer* trc = verifyPreData;
337 if (!trc) {
338 return;
341 MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
343 // Now that barrier marking has finished, prepare the heap to allow this
344 // method to trace cells and discover their outgoing edges.
345 AutoPrepareForTracing prep(rt->mainContextFromOwnThread());
347 bool compartmentCreated = false;
349 /* We need to disable barriers before tracing, which may invoke barriers. */
350 for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
351 if (zone->isVerifyingPreBarriers()) {
352 zone->changeGCState(Zone::VerifyPreBarriers, Zone::NoGC);
353 } else {
354 compartmentCreated = true;
357 MOZ_ASSERT(!zone->wasGCStarted());
358 MOZ_ASSERT(!zone->needsIncrementalBarrier());
361 verifyPreData = nullptr;
362 MOZ_ASSERT(incrementalState == State::Mark);
363 incrementalState = State::NotActive;
365 if (!compartmentCreated && IsIncrementalGCUnsafe(rt) == GCAbortReason::None) {
366 CheckEdgeTracer cetrc(rt);
368 /* Start after the roots. */
369 VerifyNode* node = NextNode(trc->root);
370 while ((char*)node < trc->edgeptr) {
371 cetrc.node = node;
372 JS::TraceChildren(&cetrc, node->thing);
374 if (node->count <= MAX_VERIFIER_EDGES) {
375 for (uint32_t i = 0; i < node->count; i++) {
376 EdgeValue& edge = node->edges[i];
377 if (!IsMarkedOrAllocated(edge)) {
378 char msgbuf[1024];
379 SprintfLiteral(
380 msgbuf,
381 "[barrier verifier] Unmarked edge: %s %p '%s' edge to %s %p",
382 JS::GCTraceKindToAscii(node->thing.kind()),
383 node->thing.asCell(), edge.label,
384 JS::GCTraceKindToAscii(edge.thing.kind()), edge.thing.asCell());
385 MOZ_ReportAssertionFailure(msgbuf, __FILE__, __LINE__);
386 MOZ_CRASH();
391 node = NextNode(node);
395 marker().reset();
396 resetDelayedMarking();
398 js_delete(trc);
401 /*** Barrier Verifier Scheduling ***/
403 void gc::GCRuntime::verifyPreBarriers() {
404 if (verifyPreData) {
405 endVerifyPreBarriers();
406 } else {
407 startVerifyPreBarriers();
411 void gc::VerifyBarriers(JSRuntime* rt, VerifierType type) {
412 if (type == PreBarrierVerifier) {
413 rt->gc.verifyPreBarriers();
417 void gc::GCRuntime::maybeVerifyPreBarriers(bool always) {
418 if (!hasZealMode(ZealMode::VerifierPre)) {
419 return;
422 if (rt->mainContextFromOwnThread()->suppressGC) {
423 return;
426 if (verifyPreData) {
427 if (++verifyPreData->count < zealFrequency && !always) {
428 return;
431 endVerifyPreBarriers();
434 startVerifyPreBarriers();
437 void js::gc::MaybeVerifyBarriers(JSContext* cx, bool always) {
438 GCRuntime* gc = &cx->runtime()->gc;
439 gc->maybeVerifyPreBarriers(always);
442 void js::gc::GCRuntime::finishVerifier() {
443 if (verifyPreData) {
444 js_delete(verifyPreData.ref());
445 verifyPreData = nullptr;
449 struct GCChunkHasher {
450 using Lookup = gc::TenuredChunk*;
453 * Strip zeros for better distribution after multiplying by the golden
454 * ratio.
456 static HashNumber hash(gc::TenuredChunk* chunk) {
457 MOZ_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask));
458 return HashNumber(uintptr_t(chunk) >> gc::ChunkShift);
461 static bool match(gc::TenuredChunk* k, gc::TenuredChunk* l) {
462 MOZ_ASSERT(!(uintptr_t(k) & gc::ChunkMask));
463 MOZ_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
464 return k == l;
468 class js::gc::MarkingValidator {
469 public:
470 explicit MarkingValidator(GCRuntime* gc);
471 void nonIncrementalMark(AutoGCSession& session);
472 void validate();
474 private:
475 GCRuntime* gc;
476 bool initialized;
478 using BitmapMap = HashMap<TenuredChunk*, UniquePtr<MarkBitmap>, GCChunkHasher,
479 SystemAllocPolicy>;
480 BitmapMap map;
483 js::gc::MarkingValidator::MarkingValidator(GCRuntime* gc)
484 : gc(gc), initialized(false) {}
486 void js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session) {
488 * Perform a non-incremental mark for all collecting zones and record
489 * the results for later comparison.
491 * Currently this does not validate gray marking.
494 JSRuntime* runtime = gc->rt;
495 GCMarker* gcmarker = &gc->marker();
497 MOZ_ASSERT(!gcmarker->isWeakMarking());
499 /* Wait for off-thread parsing which can allocate. */
500 WaitForAllHelperThreads();
502 gc->waitBackgroundAllocEnd();
503 gc->waitBackgroundSweepEnd();
505 /* Save existing mark bits. */
507 AutoLockGC lock(gc);
508 for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
509 chunk.next()) {
510 // Bug 1842582: Allocate mark bit buffer in two stages to avoid alignment
511 // restriction which we currently can't support.
512 void* buffer = js_malloc(sizeof(MarkBitmap));
513 if (!buffer) {
514 return;
516 UniquePtr<MarkBitmap> entry(new (buffer) MarkBitmap);
518 MarkBitmap* bitmap = &chunk->markBits;
519 memcpy((void*)entry->bitmap, (void*)bitmap->bitmap,
520 sizeof(bitmap->bitmap));
522 if (!map.putNew(chunk, std::move(entry))) {
523 return;
529 * Temporarily clear the weakmaps' mark flags for the compartments we are
530 * collecting.
533 WeakMapColors markedWeakMaps;
536 * For saving, smush all of the keys into one big table and split them back
537 * up into per-zone tables when restoring.
539 gc::EphemeronEdgeTable savedEphemeronEdges(
540 SystemAllocPolicy(), runtime->randomHashCodeScrambler());
541 if (!savedEphemeronEdges.init()) {
542 return;
545 for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
546 if (!WeakMapBase::saveZoneMarkedWeakMaps(zone, markedWeakMaps)) {
547 return;
550 AutoEnterOOMUnsafeRegion oomUnsafe;
551 for (auto r = zone->gcEphemeronEdges().mutableAll(); !r.empty();
552 r.popFront()) {
553 MOZ_ASSERT(r.front().key->asTenured().zone() == zone);
554 if (!savedEphemeronEdges.put(r.front().key, std::move(r.front().value))) {
555 oomUnsafe.crash("saving weak keys table for validator");
559 if (!zone->gcEphemeronEdges().clear()) {
560 oomUnsafe.crash("clearing weak keys table for validator");
564 /* Save and restore test mark queue state. */
565 # ifdef DEBUG
566 size_t savedQueuePos = gc->queuePos;
567 mozilla::Maybe<MarkColor> savedQueueColor = gc->queueMarkColor;
568 # endif
571 * After this point, the function should run to completion, so we shouldn't
572 * do anything fallible.
574 initialized = true;
576 /* Re-do all the marking, but non-incrementally. */
577 js::gc::State state = gc->incrementalState;
578 gc->incrementalState = State::MarkRoots;
581 gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::PREPARE);
584 gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::UNMARK);
586 for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
587 WeakMapBase::unmarkZone(zone);
590 MOZ_ASSERT(gcmarker->isDrained());
592 ClearMarkBits<GCZonesIter>(gc);
597 gcstats::AutoPhase ap(gc->stats(), gcstats::PhaseKind::MARK);
599 gc->traceRuntimeForMajorGC(gcmarker->tracer(), session);
601 gc->incrementalState = State::Mark;
602 gc->drainMarkStack();
605 gc->incrementalState = State::Sweep;
607 gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP);
608 gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::MARK);
610 gc->markAllWeakReferences();
612 /* Update zone state for gray marking. */
613 for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
614 zone->changeGCState(zone->initialMarkingState(), Zone::MarkBlackAndGray);
617 AutoSetMarkColor setColorGray(*gcmarker, MarkColor::Gray);
619 gc->markAllGrayReferences(gcstats::PhaseKind::MARK_GRAY);
620 gc->markAllWeakReferences();
622 /* Restore zone state. */
623 for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
624 zone->changeGCState(Zone::MarkBlackAndGray, zone->initialMarkingState());
626 MOZ_ASSERT(gc->marker().isDrained());
629 /* Take a copy of the non-incremental mark state and restore the original. */
631 AutoLockGC lock(gc);
632 for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done();
633 chunk.next()) {
634 MarkBitmap* bitmap = &chunk->markBits;
635 auto ptr = map.lookup(chunk);
636 MOZ_RELEASE_ASSERT(ptr, "Chunk not found in map");
637 MarkBitmap* entry = ptr->value().get();
638 for (size_t i = 0; i < MarkBitmap::WordCount; i++) {
639 uintptr_t v = entry->bitmap[i];
640 entry->bitmap[i] = uintptr_t(bitmap->bitmap[i]);
641 bitmap->bitmap[i] = v;
646 for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
647 WeakMapBase::unmarkZone(zone);
648 AutoEnterOOMUnsafeRegion oomUnsafe;
649 if (!zone->gcEphemeronEdges().clear()) {
650 oomUnsafe.crash("clearing weak keys table for validator");
654 WeakMapBase::restoreMarkedWeakMaps(markedWeakMaps);
656 for (auto r = savedEphemeronEdges.mutableAll(); !r.empty(); r.popFront()) {
657 AutoEnterOOMUnsafeRegion oomUnsafe;
658 Zone* zone = r.front().key->asTenured().zone();
659 if (!zone->gcEphemeronEdges().put(r.front().key,
660 std::move(r.front().value))) {
661 oomUnsafe.crash("restoring weak keys table for validator");
665 # ifdef DEBUG
666 gc->queuePos = savedQueuePos;
667 gc->queueMarkColor = savedQueueColor;
668 # endif
670 gc->incrementalState = state;
673 void js::gc::MarkingValidator::validate() {
675 * Validates the incremental marking for a single compartment by comparing
676 * the mark bits to those previously recorded for a non-incremental mark.
679 if (!initialized) {
680 return;
683 MOZ_ASSERT(!gc->marker().isWeakMarking());
685 gc->waitBackgroundSweepEnd();
687 bool ok = true;
688 AutoLockGC lock(gc->rt);
689 for (auto chunk = gc->allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
690 BitmapMap::Ptr ptr = map.lookup(chunk);
691 if (!ptr) {
692 continue; /* Allocated after we did the non-incremental mark. */
695 MarkBitmap* bitmap = ptr->value().get();
696 MarkBitmap* incBitmap = &chunk->markBits;
698 for (size_t i = 0; i < ArenasPerChunk; i++) {
699 if (chunk->decommittedPages[chunk->pageIndex(i)]) {
700 continue;
702 Arena* arena = &chunk->arenas[i];
703 if (!arena->allocated()) {
704 continue;
706 if (!arena->zone->isGCSweeping()) {
707 continue;
710 AllocKind kind = arena->getAllocKind();
711 uintptr_t thing = arena->thingsStart();
712 uintptr_t end = arena->thingsEnd();
713 while (thing < end) {
714 auto* cell = reinterpret_cast<TenuredCell*>(thing);
717 * If a non-incremental GC wouldn't have collected a cell, then
718 * an incremental GC won't collect it.
720 if (bitmap->isMarkedAny(cell)) {
721 if (!incBitmap->isMarkedAny(cell)) {
722 ok = false;
723 const char* color =
724 CellColorName(TenuredCell::getColor(bitmap, cell));
725 fprintf(stderr,
726 "%p: cell not marked, but would be marked %s by "
727 "non-incremental marking\n",
728 cell, color);
729 # ifdef DEBUG
730 cell->dump();
731 fprintf(stderr, "\n");
732 # endif
737 * If the cycle collector isn't allowed to collect an object
738 * after a non-incremental GC has run, then it isn't allowed to
739 * collected it after an incremental GC.
741 if (!bitmap->isMarkedGray(cell)) {
742 if (incBitmap->isMarkedGray(cell)) {
743 ok = false;
744 const char* color =
745 CellColorName(TenuredCell::getColor(bitmap, cell));
746 fprintf(stderr,
747 "%p: cell marked gray, but would be marked %s by "
748 "non-incremental marking\n",
749 cell, color);
750 # ifdef DEBUG
751 cell->dump();
752 fprintf(stderr, "\n");
753 # endif
757 thing += Arena::thingSize(kind);
762 MOZ_RELEASE_ASSERT(ok, "Incremental marking verification failed");
765 void GCRuntime::computeNonIncrementalMarkingForValidation(
766 AutoGCSession& session) {
767 MOZ_ASSERT(!markingValidator);
768 if (isIncremental && hasZealMode(ZealMode::IncrementalMarkingValidator)) {
769 markingValidator = js_new<MarkingValidator>(this);
771 if (markingValidator) {
772 markingValidator->nonIncrementalMark(session);
776 void GCRuntime::validateIncrementalMarking() {
777 if (markingValidator) {
778 markingValidator->validate();
782 void GCRuntime::finishMarkingValidation() {
783 js_delete(markingValidator.ref());
784 markingValidator = nullptr;
787 #endif /* JS_GC_ZEAL */
789 #if defined(JS_GC_ZEAL) || defined(DEBUG)
791 class HeapCheckTracerBase : public JS::CallbackTracer {
792 public:
793 explicit HeapCheckTracerBase(JSRuntime* rt, JS::TraceOptions options);
794 bool traceHeap(AutoTraceSession& session);
795 virtual void checkCell(Cell* cell, const char* name) = 0;
797 protected:
798 void dumpCellInfo(Cell* cell);
799 void dumpCellPath(const char* name);
801 Cell* parentCell() {
802 return parentIndex == -1 ? nullptr : stack[parentIndex].thing.asCell();
805 size_t failures;
807 private:
808 void onChild(JS::GCCellPtr thing, const char* name) override;
810 struct WorkItem {
811 WorkItem(JS::GCCellPtr thing, const char* name, int parentIndex)
812 : thing(thing),
813 name(name),
814 parentIndex(parentIndex),
815 processed(false) {}
817 JS::GCCellPtr thing;
818 const char* name;
819 int parentIndex;
820 bool processed;
823 JSRuntime* rt;
824 bool oom;
825 HashSet<Cell*, DefaultHasher<Cell*>, SystemAllocPolicy> visited;
826 Vector<WorkItem, 0, SystemAllocPolicy> stack;
827 int parentIndex;
830 HeapCheckTracerBase::HeapCheckTracerBase(JSRuntime* rt,
831 JS::TraceOptions options)
832 : CallbackTracer(rt, JS::TracerKind::Callback, options),
833 failures(0),
834 rt(rt),
835 oom(false),
836 parentIndex(-1) {}
838 void HeapCheckTracerBase::onChild(JS::GCCellPtr thing, const char* name) {
839 Cell* cell = thing.asCell();
840 checkCell(cell, name);
842 if (visited.lookup(cell)) {
843 return;
846 if (!visited.put(cell)) {
847 oom = true;
848 return;
851 // Don't trace into GC things owned by another runtime.
852 if (cell->runtimeFromAnyThread() != rt) {
853 return;
856 WorkItem item(thing, name, parentIndex);
857 if (!stack.append(item)) {
858 oom = true;
862 bool HeapCheckTracerBase::traceHeap(AutoTraceSession& session) {
863 // The analysis thinks that traceRuntime might GC by calling a GC callback.
864 JS::AutoSuppressGCAnalysis nogc;
865 if (!rt->isBeingDestroyed()) {
866 rt->gc.traceRuntime(this, session);
869 while (!stack.empty() && !oom) {
870 WorkItem item = stack.back();
871 if (item.processed) {
872 stack.popBack();
873 } else {
874 MOZ_ASSERT(stack.length() <= INT_MAX);
875 parentIndex = int(stack.length()) - 1;
876 stack.back().processed = true;
877 TraceChildren(this, item.thing);
881 return !oom;
884 void HeapCheckTracerBase::dumpCellInfo(Cell* cell) {
885 auto kind = cell->getTraceKind();
886 JSObject* obj =
887 kind == JS::TraceKind::Object ? static_cast<JSObject*>(cell) : nullptr;
889 fprintf(stderr, "%s %s", CellColorName(cell->color()),
890 GCTraceKindToAscii(kind));
891 if (obj) {
892 fprintf(stderr, " %s", obj->getClass()->name);
894 fprintf(stderr, " %p", cell);
895 if (obj) {
896 fprintf(stderr, " (compartment %p)", obj->compartment());
900 void HeapCheckTracerBase::dumpCellPath(const char* name) {
901 for (int index = parentIndex; index != -1; index = stack[index].parentIndex) {
902 const WorkItem& parent = stack[index];
903 Cell* cell = parent.thing.asCell();
904 fprintf(stderr, " from ");
905 dumpCellInfo(cell);
906 fprintf(stderr, " %s edge\n", name);
907 name = parent.name;
909 fprintf(stderr, " from root %s\n", name);
912 class CheckHeapTracer final : public HeapCheckTracerBase {
913 public:
914 enum GCType { Moving, NonMoving };
916 explicit CheckHeapTracer(JSRuntime* rt, GCType type);
917 void check(AutoTraceSession& session);
919 private:
920 void checkCell(Cell* cell, const char* name) override;
921 GCType gcType;
924 CheckHeapTracer::CheckHeapTracer(JSRuntime* rt, GCType type)
925 : HeapCheckTracerBase(rt, JS::WeakMapTraceAction::TraceKeysAndValues),
926 gcType(type) {}
928 inline static bool IsValidGCThingPointer(Cell* cell) {
929 return (uintptr_t(cell) & CellAlignMask) == 0;
932 void CheckHeapTracer::checkCell(Cell* cell, const char* name) {
933 // Moving
934 if (!IsValidGCThingPointer(cell) ||
935 ((gcType == GCType::Moving) && !IsGCThingValidAfterMovingGC(cell)) ||
936 ((gcType == GCType::NonMoving) && cell->isForwarded())) {
937 failures++;
938 fprintf(stderr, "Bad pointer %p\n", cell);
939 dumpCellPath(name);
943 void CheckHeapTracer::check(AutoTraceSession& session) {
944 if (!traceHeap(session)) {
945 return;
948 if (failures) {
949 fprintf(stderr, "Heap check: %zu failure(s)\n", failures);
951 MOZ_RELEASE_ASSERT(failures == 0);
954 void js::gc::CheckHeapAfterGC(JSRuntime* rt) {
955 AutoTraceSession session(rt);
956 CheckHeapTracer::GCType gcType;
958 if (rt->gc.nursery().isEmpty()) {
959 gcType = CheckHeapTracer::GCType::Moving;
960 } else {
961 gcType = CheckHeapTracer::GCType::NonMoving;
964 CheckHeapTracer tracer(rt, gcType);
965 tracer.check(session);
968 class CheckGrayMarkingTracer final : public HeapCheckTracerBase {
969 public:
970 explicit CheckGrayMarkingTracer(JSRuntime* rt);
971 bool check(AutoTraceSession& session);
973 private:
974 void checkCell(Cell* cell, const char* name) override;
977 CheckGrayMarkingTracer::CheckGrayMarkingTracer(JSRuntime* rt)
978 : HeapCheckTracerBase(rt, JS::TraceOptions(JS::WeakMapTraceAction::Skip,
979 JS::WeakEdgeTraceAction::Skip)) {
980 // Weak gray->black edges are allowed.
983 void CheckGrayMarkingTracer::checkCell(Cell* cell, const char* name) {
984 Cell* parent = parentCell();
985 if (!parent) {
986 return;
989 if (parent->isMarkedBlack() && cell->isMarkedGray()) {
990 failures++;
992 fprintf(stderr, "Found black to gray edge to ");
993 dumpCellInfo(cell);
994 fprintf(stderr, "\n");
995 dumpCellPath(name);
997 # ifdef DEBUG
998 if (parent->is<JSObject>()) {
999 fprintf(stderr, "\nSource: ");
1000 DumpObject(parent->as<JSObject>(), stderr);
1002 if (cell->is<JSObject>()) {
1003 fprintf(stderr, "\nTarget: ");
1004 DumpObject(cell->as<JSObject>(), stderr);
1006 # endif
1010 bool CheckGrayMarkingTracer::check(AutoTraceSession& session) {
1011 if (!traceHeap(session)) {
1012 return true; // Ignore failure.
1015 return failures == 0;
1018 JS_PUBLIC_API bool js::CheckGrayMarkingState(JSRuntime* rt) {
1019 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
1020 MOZ_ASSERT(!rt->gc.isIncrementalGCInProgress());
1021 if (!rt->gc.areGrayBitsValid()) {
1022 return true;
1025 gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::TRACE_HEAP);
1026 AutoTraceSession session(rt);
1027 CheckGrayMarkingTracer tracer(rt);
1029 return tracer.check(session);
1032 static JSObject* MaybeGetDelegate(Cell* cell) {
1033 if (!cell->is<JSObject>()) {
1034 return nullptr;
1037 JSObject* object = cell->as<JSObject>();
1038 return js::UncheckedUnwrapWithoutExpose(object);
1041 bool js::gc::CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key,
1042 Cell* value) {
1043 bool ok = true;
1045 Zone* zone = map->zone();
1046 MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
1047 MOZ_ASSERT(zone->isGCMarking());
1049 JSObject* object = map->memberOf;
1050 MOZ_ASSERT_IF(object, object->zone() == zone);
1052 // Debugger weak maps can have keys in different zones.
1053 Zone* keyZone = key->zoneFromAnyThread();
1054 MOZ_ASSERT_IF(!map->allowKeysInOtherZones(),
1055 keyZone == zone || keyZone->isAtomsZone());
1057 Zone* valueZone = value->zoneFromAnyThread();
1058 MOZ_ASSERT(valueZone == zone || valueZone->isAtomsZone());
1060 if (object && object->color() != map->mapColor()) {
1061 fprintf(stderr, "WeakMap object is marked differently to the map\n");
1062 fprintf(stderr, "(map %p is %s, object %p is %s)\n", map,
1063 CellColorName(map->mapColor()), object,
1064 CellColorName(object->color()));
1065 ok = false;
1068 // Values belonging to other runtimes or in uncollected zones are treated as
1069 // black.
1070 JSRuntime* mapRuntime = zone->runtimeFromAnyThread();
1071 auto effectiveColor = [=](Cell* cell, Zone* cellZone) -> CellColor {
1072 if (cell->runtimeFromAnyThread() != mapRuntime) {
1073 return CellColor::Black;
1075 if (cellZone->isGCMarkingOrSweeping()) {
1076 return cell->color();
1078 return CellColor::Black;
1081 CellColor valueColor = effectiveColor(value, valueZone);
1082 CellColor keyColor = effectiveColor(key, keyZone);
1084 if (valueColor < std::min(map->mapColor(), keyColor)) {
1085 fprintf(stderr, "WeakMap value is less marked than map and key\n");
1086 fprintf(stderr, "(map %p is %s, key %p is %s, value %p is %s)\n", map,
1087 CellColorName(map->mapColor()), key, CellColorName(keyColor), value,
1088 CellColorName(valueColor));
1089 # ifdef DEBUG
1090 fprintf(stderr, "Key:\n");
1091 key->dump();
1092 if (auto* delegate = MaybeGetDelegate(key); delegate) {
1093 fprintf(stderr, "Delegate:\n");
1094 delegate->dump();
1096 fprintf(stderr, "Value:\n");
1097 value->dump();
1098 # endif
1100 ok = false;
1103 JSObject* delegate = MaybeGetDelegate(key);
1104 if (!delegate) {
1105 return ok;
1108 CellColor delegateColor = effectiveColor(delegate, delegate->zone());
1109 if (keyColor < std::min(map->mapColor(), delegateColor)) {
1110 fprintf(stderr, "WeakMap key is less marked than map or delegate\n");
1111 fprintf(stderr, "(map %p is %s, delegate %p is %s, key %p is %s)\n", map,
1112 CellColorName(map->mapColor()), delegate,
1113 CellColorName(delegateColor), key, CellColorName(keyColor));
1114 ok = false;
1117 return ok;
1120 #endif // defined(JS_GC_ZEAL) || defined(DEBUG)
1122 // Return whether an arbitrary pointer is within a cell with the given
1123 // traceKind. Only for assertions and js::debug::* APIs.
1124 bool GCRuntime::isPointerWithinTenuredCell(void* ptr, JS::TraceKind traceKind) {
1125 AutoLockGC lock(this);
1126 for (auto chunk = allNonEmptyChunks(lock); !chunk.done(); chunk.next()) {
1127 MOZ_ASSERT(!chunk->isNurseryChunk());
1128 if (ptr >= &chunk->arenas[0] && ptr < &chunk->arenas[ArenasPerChunk]) {
1129 auto* arena = reinterpret_cast<Arena*>(uintptr_t(ptr) & ~ArenaMask);
1130 if (!arena->allocated()) {
1131 return false;
1134 return traceKind == JS::TraceKind::Null ||
1135 MapAllocToTraceKind(arena->getAllocKind()) == traceKind;
1139 return false;