Bug 1837620 - Part 6: Make edges for multiple shape guard weak too r=sfink
[gecko.git] / js / src / gc / Compacting.cpp
bloba6beaa761c8fe65ba2d15fc13fd021d63effa446
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /*
8 * Implementation of compacting GC.
9 */
11 #include "mozilla/Maybe.h"
13 #include "debugger/DebugAPI.h"
14 #include "gc/ArenaList.h"
15 #include "gc/GCInternals.h"
16 #include "gc/GCLock.h"
17 #include "gc/ParallelWork.h"
18 #include "gc/Zone.h"
19 #include "jit/JitCode.h"
20 #include "jit/JitRuntime.h"
21 #include "jit/JitZone.h"
22 #include "js/GCAPI.h"
23 #include "vm/HelperThreads.h"
24 #include "vm/Realm.h"
25 #include "wasm/WasmGcObject.h"
27 #include "gc/Heap-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/StableCellHasher-inl.h"
31 #include "gc/TraceMethods-inl.h"
32 #include "vm/GeckoProfiler-inl.h"
34 using namespace js;
35 using namespace js::gc;
37 using mozilla::Maybe;
39 bool GCRuntime::canRelocateZone(Zone* zone) const {
40 return !zone->isAtomsZone();
43 void GCRuntime::beginCompactPhase() {
44 MOZ_ASSERT(!isBackgroundSweeping());
45 assertBackgroundSweepingFinished();
47 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
49 MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
50 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
51 if (canRelocateZone(zone)) {
52 zonesToMaybeCompact.ref().append(zone);
56 startedCompacting = true;
57 zonesCompacted = 0;
59 #ifdef DEBUG
60 AutoLockGC lock(this);
61 MOZ_ASSERT(!relocatedArenasToRelease);
62 #endif
65 IncrementalProgress GCRuntime::compactPhase(JS::GCReason reason,
66 SliceBudget& sliceBudget,
67 AutoGCSession& session) {
68 assertBackgroundSweepingFinished();
69 MOZ_ASSERT(startedCompacting);
71 AutoMajorGCProfilerEntry s(this);
72 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
74 // TODO: JSScripts can move. If the sampler interrupts the GC in the
75 // middle of relocating an arena, invalid JSScript pointers may be
76 // accessed. Suppress all sampling until a finer-grained solution can be
77 // found. See bug 1295775.
78 AutoSuppressProfilerSampling suppressSampling(rt->mainContextFromOwnThread());
80 ZoneList relocatedZones;
81 Arena* relocatedArenas = nullptr;
82 while (!zonesToMaybeCompact.ref().isEmpty()) {
83 Zone* zone = zonesToMaybeCompact.ref().front();
84 zonesToMaybeCompact.ref().removeFront();
86 MOZ_ASSERT(nursery().isEmpty());
87 zone->changeGCState(Zone::Finished, Zone::Compact);
89 if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
90 updateZonePointersToRelocatedCells(zone);
91 relocatedZones.append(zone);
92 zonesCompacted++;
93 } else {
94 zone->changeGCState(Zone::Compact, Zone::Finished);
97 if (sliceBudget.isOverBudget()) {
98 break;
102 if (!relocatedZones.isEmpty()) {
103 updateRuntimePointersToRelocatedCells(session);
105 do {
106 Zone* zone = relocatedZones.front();
107 relocatedZones.removeFront();
108 zone->changeGCState(Zone::Compact, Zone::Finished);
109 } while (!relocatedZones.isEmpty());
112 clearRelocatedArenas(relocatedArenas, reason);
114 #ifdef DEBUG
115 protectOrReleaseRelocatedArenas(relocatedArenas, reason);
116 #else
117 releaseRelocatedArenas(relocatedArenas);
118 #endif
120 // Clear caches that can contain cell pointers.
121 rt->caches().purgeForCompaction();
123 #ifdef DEBUG
124 checkHashTablesAfterMovingGC();
125 #endif
127 return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
130 void GCRuntime::endCompactPhase() { startedCompacting = false; }
132 static bool ShouldRelocateAllArenas(JS::GCReason reason) {
133 return reason == JS::GCReason::DEBUG_GC;
137 * Choose which arenas to relocate all cells from. Return an arena cursor that
138 * can be passed to removeRemainingArenas().
140 Arena** ArenaList::pickArenasToRelocate(size_t& arenaTotalOut,
141 size_t& relocTotalOut) {
142 // Relocate the greatest number of arenas such that the number of used cells
143 // in relocated arenas is less than or equal to the number of free cells in
144 // unrelocated arenas. In other words we only relocate cells we can move
145 // into existing arenas, and we choose the least full areans to relocate.
147 // This is made easier by the fact that the arena list has been sorted in
148 // descending order of number of used cells, so we will always relocate a
149 // tail of the arena list. All we need to do is find the point at which to
150 // start relocating.
152 check();
154 if (isCursorAtEnd()) {
155 return nullptr;
158 Arena** arenap = cursorp_; // Next arena to consider for relocation.
159 size_t previousFreeCells = 0; // Count of free cells before arenap.
160 size_t followingUsedCells = 0; // Count of used cells after arenap.
161 size_t fullArenaCount = 0; // Number of full arenas (not relocated).
162 size_t nonFullArenaCount =
163 0; // Number of non-full arenas (considered for relocation).
164 size_t arenaIndex = 0; // Index of the next arena to consider.
166 for (Arena* arena = head_; arena != *cursorp_; arena = arena->next) {
167 fullArenaCount++;
170 for (Arena* arena = *cursorp_; arena; arena = arena->next) {
171 followingUsedCells += arena->countUsedCells();
172 nonFullArenaCount++;
175 mozilla::DebugOnly<size_t> lastFreeCells(0);
176 size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind());
178 while (*arenap) {
179 Arena* arena = *arenap;
180 if (followingUsedCells <= previousFreeCells) {
181 break;
184 size_t freeCells = arena->countFreeCells();
185 size_t usedCells = cellsPerArena - freeCells;
186 followingUsedCells -= usedCells;
187 #ifdef DEBUG
188 MOZ_ASSERT(freeCells >= lastFreeCells);
189 lastFreeCells = freeCells;
190 #endif
191 previousFreeCells += freeCells;
192 arenap = &arena->next;
193 arenaIndex++;
196 size_t relocCount = nonFullArenaCount - arenaIndex;
197 MOZ_ASSERT(relocCount < nonFullArenaCount);
198 MOZ_ASSERT((relocCount == 0) == (!*arenap));
199 arenaTotalOut += fullArenaCount + nonFullArenaCount;
200 relocTotalOut += relocCount;
202 return arenap;
205 #ifdef DEBUG
206 inline bool PtrIsInRange(const void* ptr, const void* start, size_t length) {
207 return uintptr_t(ptr) - uintptr_t(start) < length;
209 #endif
211 static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
212 size_t thingSize) {
213 JS::AutoSuppressGCAnalysis nogc;
215 // Allocate a new cell.
216 MOZ_ASSERT(zone == src->zone());
217 TenuredCell* dst =
218 reinterpret_cast<TenuredCell*>(AllocateCellInGC(zone, thingKind));
220 // Copy source cell contents to destination.
221 memcpy(dst, src, thingSize);
223 // Move any uid attached to the object.
224 gc::TransferUniqueId(dst, src);
226 if (IsObjectAllocKind(thingKind)) {
227 auto* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
228 auto* dstObj = static_cast<JSObject*>(static_cast<Cell*>(dst));
230 if (srcObj->is<NativeObject>()) {
231 NativeObject* srcNative = &srcObj->as<NativeObject>();
232 NativeObject* dstNative = &dstObj->as<NativeObject>();
234 // Fixup the pointer to inline object elements if necessary.
235 if (srcNative->hasFixedElements()) {
236 uint32_t numShifted =
237 srcNative->getElementsHeader()->numShiftedElements();
238 dstNative->setFixedElements(numShifted);
240 } else if (srcObj->is<ProxyObject>()) {
241 if (srcObj->as<ProxyObject>().usingInlineValueArray()) {
242 dstObj->as<ProxyObject>().setInlineValueArray();
246 // Call object moved hook if present.
247 if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp()) {
248 op(dstObj, srcObj);
251 MOZ_ASSERT_IF(
252 dstObj->is<NativeObject>(),
253 !PtrIsInRange(
254 (const Value*)dstObj->as<NativeObject>().getDenseElements(), src,
255 thingSize));
258 // Copy the mark bits.
259 dst->copyMarkBitsFrom(src);
261 // Poison the source cell contents except for the forwarding flag and pointer
262 // which will be stored in the first word. We can't do this for native object
263 // with fixed elements because this would overwrite the element flags and
264 // these are needed when updating COW elements referred to by other objects.
265 #ifdef DEBUG
266 JSObject* srcObj = IsObjectAllocKind(thingKind)
267 ? static_cast<JSObject*>(static_cast<Cell*>(src))
268 : nullptr;
269 if (!srcObj || !srcObj->is<NativeObject>() ||
270 !srcObj->as<NativeObject>().hasFixedElements()) {
271 AlwaysPoison(reinterpret_cast<uint8_t*>(src) + sizeof(uintptr_t),
272 JS_MOVED_TENURED_PATTERN, thingSize - sizeof(uintptr_t),
273 MemCheckKind::MakeNoAccess);
275 #endif
277 // Mark source cell as forwarded and leave a pointer to the destination.
278 RelocationOverlay::forwardCell(src, dst);
281 static void RelocateArena(Arena* arena, SliceBudget& sliceBudget) {
282 MOZ_ASSERT(arena->allocated());
283 MOZ_ASSERT(!arena->onDelayedMarkingList());
284 MOZ_ASSERT(arena->bufferedCells()->isEmpty());
286 Zone* zone = arena->zone;
288 AllocKind thingKind = arena->getAllocKind();
289 size_t thingSize = arena->getThingSize();
291 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
292 RelocateCell(zone, cell, thingKind, thingSize);
293 sliceBudget.step();
296 #ifdef DEBUG
297 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
298 TenuredCell* src = cell;
299 MOZ_ASSERT(src->isForwarded());
300 TenuredCell* dest = Forwarded(src);
301 MOZ_ASSERT(src->isMarkedBlack() == dest->isMarkedBlack());
302 MOZ_ASSERT(src->isMarkedGray() == dest->isMarkedGray());
304 #endif
308 * Relocate all arenas identified by pickArenasToRelocate: for each arena,
309 * relocate each cell within it, then add it to a list of relocated arenas.
311 Arena* ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated,
312 SliceBudget& sliceBudget,
313 gcstats::Statistics& stats) {
314 check();
316 while (Arena* arena = toRelocate) {
317 toRelocate = arena->next;
318 RelocateArena(arena, sliceBudget);
319 // Prepend to list of relocated arenas
320 arena->next = relocated;
321 relocated = arena;
322 stats.count(gcstats::COUNT_ARENA_RELOCATED);
325 check();
327 return relocated;
330 // Skip compacting zones unless we can free a certain proportion of their GC
331 // heap memory.
332 static const double MIN_ZONE_RECLAIM_PERCENT = 2.0;
334 static bool ShouldRelocateZone(size_t arenaCount, size_t relocCount,
335 JS::GCReason reason) {
336 if (relocCount == 0) {
337 return false;
340 if (IsOOMReason(reason)) {
341 return true;
344 double relocFraction = double(relocCount) / double(arenaCount);
345 return relocFraction * 100.0 >= MIN_ZONE_RECLAIM_PERCENT;
348 static AllocKinds CompactingAllocKinds() {
349 AllocKinds result;
350 for (AllocKind kind : AllAllocKinds()) {
351 if (IsCompactingKind(kind)) {
352 result += kind;
355 return result;
358 bool ArenaLists::relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
359 SliceBudget& sliceBudget,
360 gcstats::Statistics& stats) {
361 // This is only called from the main thread while we are doing a GC, so
362 // there is no need to lock.
363 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
364 MOZ_ASSERT(runtime()->gc.isHeapCompacting());
365 MOZ_ASSERT(!runtime()->gc.isBackgroundSweeping());
367 // Relocate all compatible kinds
368 AllocKinds allocKindsToRelocate = CompactingAllocKinds();
370 // Clear all the free lists.
371 clearFreeLists();
373 if (ShouldRelocateAllArenas(reason)) {
374 zone_->prepareForCompacting();
375 for (auto kind : allocKindsToRelocate) {
376 ArenaList& al = arenaList(kind);
377 Arena* allArenas = al.head();
378 al.clear();
379 relocatedListOut =
380 al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
382 } else {
383 size_t arenaCount = 0;
384 size_t relocCount = 0;
385 AllAllocKindArray<Arena**> toRelocate;
387 for (auto kind : allocKindsToRelocate) {
388 toRelocate[kind] =
389 arenaList(kind).pickArenasToRelocate(arenaCount, relocCount);
392 if (!ShouldRelocateZone(arenaCount, relocCount, reason)) {
393 return false;
396 zone_->prepareForCompacting();
397 for (auto kind : allocKindsToRelocate) {
398 if (toRelocate[kind]) {
399 ArenaList& al = arenaList(kind);
400 Arena* arenas = al.removeRemainingArenas(toRelocate[kind]);
401 relocatedListOut =
402 al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
407 return true;
410 bool GCRuntime::relocateArenas(Zone* zone, JS::GCReason reason,
411 Arena*& relocatedListOut,
412 SliceBudget& sliceBudget) {
413 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
415 MOZ_ASSERT(!zone->isPreservingCode());
416 MOZ_ASSERT(canRelocateZone(zone));
418 js::CancelOffThreadIonCompile(rt, JS::Zone::Compact);
420 if (!zone->arenas.relocateArenas(relocatedListOut, reason, sliceBudget,
421 stats())) {
422 return false;
425 #ifdef DEBUG
426 // Check that we did as much compaction as we should have. There
427 // should always be less than one arena's worth of free cells.
428 for (auto kind : CompactingAllocKinds()) {
429 ArenaList& al = zone->arenas.arenaList(kind);
430 size_t freeCells = 0;
431 for (Arena* arena = al.arenaAfterCursor(); arena; arena = arena->next) {
432 freeCells += arena->countFreeCells();
434 MOZ_ASSERT(freeCells < Arena::thingsPerArena(kind));
436 #endif
438 return true;
441 MovingTracer::MovingTracer(JSRuntime* rt)
442 : GenericTracerImpl(rt, JS::TracerKind::Moving,
443 JS::WeakMapTraceAction::TraceKeysAndValues) {}
445 template <typename T>
446 inline void MovingTracer::onEdge(T** thingp, const char* name) {
447 T* thing = *thingp;
448 if (thing->runtimeFromAnyThread() == runtime() && IsForwarded(thing)) {
449 *thingp = Forwarded(thing);
453 void Zone::prepareForCompacting() {
454 JS::GCContext* gcx = runtimeFromMainThread()->gcContext();
455 discardJitCode(gcx);
458 void GCRuntime::sweepZoneAfterCompacting(MovingTracer* trc, Zone* zone) {
459 MOZ_ASSERT(zone->isGCCompacting());
461 zone->traceWeakMaps(trc);
462 zone->sweepObjectsWithWeakPointers(trc);
464 traceWeakFinalizationObserverEdges(trc, zone);
466 for (auto* cache : zone->weakCaches()) {
467 cache->traceWeak(trc, nullptr);
470 if (jit::JitZone* jitZone = zone->jitZone()) {
471 jitZone->traceWeak(trc, zone);
474 for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
475 c->traceWeakNativeIterators(trc);
477 for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
478 r->traceWeakRegExps(trc);
479 r->traceWeakSavedStacks(trc);
480 r->traceWeakGlobalEdge(trc);
481 r->traceWeakDebugEnvironmentEdges(trc);
482 r->traceWeakEdgesInJitRealm(trc);
487 template <typename T>
488 static inline void UpdateCellPointers(MovingTracer* trc, T* cell) {
489 // We only update unmoved GC things or the new copy of moved GC things, never
490 // the old copy. If this happened it could clear the forwarded flag which
491 // could lead to pointers to the old copy not being updated.
492 MOZ_ASSERT(!cell->isForwarded());
494 cell->fixupAfterMovingGC();
495 cell->traceChildren(trc);
498 template <typename T>
499 static void UpdateArenaPointersTyped(MovingTracer* trc, Arena* arena) {
500 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
501 UpdateCellPointers(trc, cell.as<T>());
505 static bool CanUpdateKindInBackground(AllocKind kind) {
506 // We try to update as many GC things in parallel as we can, but there are
507 // kinds for which this might not be safe:
508 // - we assume JSObjects that are foreground finalized are not safe to
509 // update in parallel
510 // - updating a SharedPropMap touches child maps in
511 // SharedPropMap::fixupAfterMovingGC
512 return js::gc::IsBackgroundFinalized(kind) && !IsShapeAllocKind(kind) &&
513 kind != AllocKind::BASE_SHAPE;
517 * Update the internal pointers for all cells in an arena.
519 static void UpdateArenaPointers(MovingTracer* trc, Arena* arena) {
520 AllocKind kind = arena->getAllocKind();
522 MOZ_ASSERT_IF(!CanUpdateKindInBackground(kind),
523 CurrentThreadCanAccessRuntime(trc->runtime()));
525 switch (kind) {
526 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
527 compact) \
528 case AllocKind::allocKind: \
529 UpdateArenaPointersTyped<type>(trc, arena); \
530 return;
531 FOR_EACH_ALLOCKIND(EXPAND_CASE)
532 #undef EXPAND_CASE
534 default:
535 MOZ_CRASH("Invalid alloc kind for UpdateArenaPointers");
539 struct ArenaListSegment {
540 Arena* begin;
541 Arena* end;
545 * Update the internal pointers for all arenas in a segment of an arena list.
547 * Returns the number of steps to count against the slice budget.
549 static size_t UpdateArenaListSegmentPointers(GCRuntime* gc,
550 const ArenaListSegment& arenas) {
551 MOZ_ASSERT(arenas.begin);
552 MovingTracer trc(gc->rt);
553 size_t count = 0;
554 for (Arena* arena = arenas.begin; arena != arenas.end; arena = arena->next) {
555 UpdateArenaPointers(&trc, arena);
556 count++;
558 return count * 256;
561 class ArenasToUpdate {
562 // Maximum number of arenas to update in one block.
563 #ifdef DEBUG
564 static const unsigned MaxArenasToProcess = 16;
565 #else
566 static const unsigned MaxArenasToProcess = 256;
567 #endif
569 public:
570 explicit ArenasToUpdate(Zone* zone);
571 ArenasToUpdate(Zone* zone, const AllocKinds& kinds);
573 bool done() const { return !segmentBegin; }
575 ArenaListSegment get() const {
576 MOZ_ASSERT(!done());
577 return {segmentBegin, segmentEnd};
580 void next();
582 private:
583 Maybe<AllocKinds> kinds; // Selects which thing kinds to update.
584 Zone* zone; // Zone to process.
585 AllocKind kind = AllocKind::FIRST; // Current alloc kind to process.
586 Arena* segmentBegin = nullptr;
587 Arena* segmentEnd = nullptr;
589 static AllocKind nextAllocKind(AllocKind i) {
590 return AllocKind(uint8_t(i) + 1);
593 void settle();
594 void findSegmentEnd();
597 ArenasToUpdate::ArenasToUpdate(Zone* zone) : zone(zone) { settle(); }
599 ArenasToUpdate::ArenasToUpdate(Zone* zone, const AllocKinds& kinds)
600 : kinds(Some(kinds)), zone(zone) {
601 settle();
604 void ArenasToUpdate::settle() {
605 // Called when we have set |kind| to a new kind. Sets |arena| to the next
606 // arena or null if there are no more arenas to update.
608 MOZ_ASSERT(!segmentBegin);
610 for (; kind < AllocKind::LIMIT; kind = nextAllocKind(kind)) {
611 if (kinds && !kinds.ref().contains(kind)) {
612 continue;
615 Arena* arena = zone->arenas.getFirstArena(kind);
616 if (arena) {
617 segmentBegin = arena;
618 findSegmentEnd();
619 break;
624 void ArenasToUpdate::findSegmentEnd() {
625 // Take up to MaxArenasToProcess arenas from the list starting at
626 // |segmentBegin| and set |segmentEnd|.
627 Arena* arena = segmentBegin;
628 for (size_t i = 0; arena && i < MaxArenasToProcess; i++) {
629 arena = arena->next;
631 segmentEnd = arena;
634 void ArenasToUpdate::next() {
635 MOZ_ASSERT(!done());
637 segmentBegin = segmentEnd;
638 if (segmentBegin) {
639 findSegmentEnd();
640 return;
643 kind = nextAllocKind(kind);
644 settle();
647 static AllocKinds ForegroundUpdateKinds(AllocKinds kinds) {
648 AllocKinds result;
649 for (AllocKind kind : kinds) {
650 if (!CanUpdateKindInBackground(kind)) {
651 result += kind;
654 return result;
657 void GCRuntime::updateCellPointers(Zone* zone, AllocKinds kinds) {
658 AllocKinds fgKinds = ForegroundUpdateKinds(kinds);
659 AllocKinds bgKinds = kinds - fgKinds;
661 ArenasToUpdate fgArenas(zone, fgKinds);
662 ArenasToUpdate bgArenas(zone, bgKinds);
664 AutoLockHelperThreadState lock;
666 AutoRunParallelWork bgTasks(this, UpdateArenaListSegmentPointers,
667 gcstats::PhaseKind::COMPACT_UPDATE_CELLS,
668 GCUse::Unspecified, bgArenas,
669 SliceBudget::unlimited(), lock);
671 AutoUnlockHelperThreadState unlock(lock);
673 for (; !fgArenas.done(); fgArenas.next()) {
674 UpdateArenaListSegmentPointers(this, fgArenas.get());
678 // After cells have been relocated any pointers to a cell's old locations must
679 // be updated to point to the new location. This happens by iterating through
680 // all cells in heap and tracing their children (non-recursively) to update
681 // them.
683 // This is complicated by the fact that updating a GC thing sometimes depends on
684 // making use of other GC things. After a moving GC these things may not be in
685 // a valid state since they may contain pointers which have not been updated
686 // yet.
688 // The main dependencies are:
690 // - Updating a JSObject makes use of its shape
691 // - Updating a typed object makes use of its type descriptor object
693 // This means we require at least three phases for update:
695 // 1) shapes
696 // 2) typed object type descriptor objects
697 // 3) all other objects
699 // Also, there can be data races calling IsForwarded() on the new location of a
700 // cell whose first word is being updated in parallel on another thread. This
701 // easiest way to avoid this is to not store a GC pointer in the first word of a
702 // cell. Otherwise this can be avoided by updating different kinds of cell in
703 // different phases.
705 // Since we want to minimize the number of phases, arrange kinds into three
706 // arbitrary phases.
708 static constexpr AllocKinds UpdatePhaseOne{AllocKind::SCRIPT,
709 AllocKind::BASE_SHAPE,
710 AllocKind::SHAPE,
711 AllocKind::STRING,
712 AllocKind::JITCODE,
713 AllocKind::REGEXP_SHARED,
714 AllocKind::SCOPE,
715 AllocKind::GETTER_SETTER,
716 AllocKind::COMPACT_PROP_MAP,
717 AllocKind::NORMAL_PROP_MAP,
718 AllocKind::DICT_PROP_MAP};
720 // UpdatePhaseTwo is typed object descriptor objects.
722 static constexpr AllocKinds UpdatePhaseThree{AllocKind::FUNCTION,
723 AllocKind::FUNCTION_EXTENDED,
724 AllocKind::OBJECT0,
725 AllocKind::OBJECT0_BACKGROUND,
726 AllocKind::OBJECT2,
727 AllocKind::OBJECT2_BACKGROUND,
728 AllocKind::ARRAYBUFFER4,
729 AllocKind::OBJECT4,
730 AllocKind::OBJECT4_BACKGROUND,
731 AllocKind::ARRAYBUFFER8,
732 AllocKind::OBJECT8,
733 AllocKind::OBJECT8_BACKGROUND,
734 AllocKind::ARRAYBUFFER12,
735 AllocKind::OBJECT12,
736 AllocKind::OBJECT12_BACKGROUND,
737 AllocKind::ARRAYBUFFER16,
738 AllocKind::OBJECT16,
739 AllocKind::OBJECT16_BACKGROUND};
741 void GCRuntime::updateAllCellPointers(MovingTracer* trc, Zone* zone) {
742 updateCellPointers(zone, UpdatePhaseOne);
744 updateCellPointers(zone, UpdatePhaseThree);
748 * Update pointers to relocated cells in a single zone by doing a traversal of
749 * that zone's arenas and calling per-zone sweep hooks.
751 * The latter is necessary to update weak references which are not marked as
752 * part of the traversal.
754 void GCRuntime::updateZonePointersToRelocatedCells(Zone* zone) {
755 MOZ_ASSERT(!rt->isBeingDestroyed());
756 MOZ_ASSERT(zone->isGCCompacting());
758 AutoTouchingGrayThings tgt;
760 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
761 MovingTracer trc(rt);
763 zone->fixupAfterMovingGC();
764 zone->fixupScriptMapsAfterMovingGC(&trc);
766 // Fixup compartment global pointers as these get accessed during marking.
767 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
768 comp->fixupAfterMovingGC(&trc);
771 zone->externalStringCache().purge();
772 zone->functionToStringCache().purge();
773 zone->shapeZone().purgeShapeCaches(rt->gcContext());
774 rt->caches().stringToAtomCache.purge();
776 // Iterate through all cells that can contain relocatable pointers to update
777 // them. Since updating each cell is independent we try to parallelize this
778 // as much as possible.
779 updateAllCellPointers(&trc, zone);
781 // Sweep everything to fix up weak pointers.
782 sweepZoneAfterCompacting(&trc, zone);
784 // Call callbacks to get the rest of the system to fixup other untraced
785 // pointers.
786 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
787 callWeakPointerCompartmentCallbacks(&trc, comp);
792 * Update runtime-wide pointers to relocated cells.
794 void GCRuntime::updateRuntimePointersToRelocatedCells(AutoGCSession& session) {
795 MOZ_ASSERT(!rt->isBeingDestroyed());
797 gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
798 MovingTracer trc(rt);
800 Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(&trc);
802 rt->geckoProfiler().fixupStringsMapAfterMovingGC();
804 // Mark roots to update them.
806 traceRuntimeForMajorGC(&trc, session);
809 gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
810 DebugAPI::traceAllForMovingGC(&trc);
811 DebugAPI::traceCrossCompartmentEdges(&trc);
813 // Mark all gray roots.
814 traceEmbeddingGrayRoots(&trc);
815 Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
816 &trc, Compartment::GrayEdges);
819 // Sweep everything to fix up weak pointers.
820 jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
821 for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
822 cache->traceWeak(&trc, nullptr);
825 if (rt->hasJitRuntime() && rt->jitRuntime()->hasInterpreterEntryMap()) {
826 rt->jitRuntime()->getInterpreterEntryMap()->updateScriptsAfterMovingGC();
829 // Type inference may put more blocks here to free.
831 AutoLockHelperThreadState lock;
832 lifoBlocksToFree.ref().freeAll();
835 // Call callbacks to get the rest of the system to fixup other untraced
836 // pointers.
837 callWeakPointerZonesCallbacks(&trc);
840 void GCRuntime::clearRelocatedArenas(Arena* arenaList, JS::GCReason reason) {
841 AutoLockGC lock(this);
842 clearRelocatedArenasWithoutUnlocking(arenaList, reason, lock);
845 void GCRuntime::clearRelocatedArenasWithoutUnlocking(Arena* arenaList,
846 JS::GCReason reason,
847 const AutoLockGC& lock) {
848 // Clear the relocated arenas, now containing only forwarding pointers
849 while (arenaList) {
850 Arena* arena = arenaList;
851 arenaList = arenaList->next;
853 // Clear the mark bits
854 arena->unmarkAll();
856 // Mark arena as empty
857 arena->setAsFullyUnused();
859 #ifdef DEBUG
860 // The cell contents have been partially marked no access in RelocateCell,
861 // so we need to mark the region as undefined again so we can poison it.
862 SetMemCheckKind(reinterpret_cast<void*>(arena->thingsStart()),
863 arena->getThingsSpan(), MemCheckKind::MakeUndefined);
864 #endif
866 AlwaysPoison(reinterpret_cast<void*>(arena->thingsStart()),
867 JS_MOVED_TENURED_PATTERN, arena->getThingsSpan(),
868 MemCheckKind::MakeNoAccess);
870 // Don't count emptied arenas as being freed by the current GC:
871 // - if we purposely moved everything to new arenas, as that will already
872 // have allocated a similar number of arenas. (This only happens for
873 // collections triggered by GC zeal.)
874 // - if they were allocated since the start of the GC.
875 bool allArenasRelocated = ShouldRelocateAllArenas(reason);
876 bool updateRetainedSize = !allArenasRelocated && !arena->isNewlyCreated();
877 arena->zone->gcHeapSize.removeBytes(ArenaSize, updateRetainedSize,
878 heapSize);
880 // Release the arena but don't return it to the chunk yet.
881 arena->release(lock);
885 #ifdef DEBUG
887 // In debug mode we don't always release relocated arenas straight away.
888 // Sometimes protect them instead and hold onto them until the next GC sweep
889 // phase to catch any pointers to them that didn't get forwarded.
891 static inline bool CanProtectArenas() {
892 // On some systems the page size is larger than the size of an arena so we
893 // can't change the mapping permissions per arena.
894 return SystemPageSize() <= ArenaSize;
897 static inline bool ShouldProtectRelocatedArenas(JS::GCReason reason) {
898 // For zeal mode collections we don't release the relocated arenas
899 // immediately. Instead we protect them and keep them around until the next
900 // collection so we can catch any stray accesses to them.
901 return reason == JS::GCReason::DEBUG_GC && CanProtectArenas();
904 void GCRuntime::protectOrReleaseRelocatedArenas(Arena* arenaList,
905 JS::GCReason reason) {
906 if (ShouldProtectRelocatedArenas(reason)) {
907 protectAndHoldArenas(arenaList);
908 return;
911 releaseRelocatedArenas(arenaList);
914 void GCRuntime::protectAndHoldArenas(Arena* arenaList) {
915 for (Arena* arena = arenaList; arena;) {
916 MOZ_ASSERT(!arena->allocated());
917 Arena* next = arena->next;
918 if (!next) {
919 // Prepend to hold list before we protect the memory.
920 AutoLockGC lock(this);
921 arena->next = relocatedArenasToRelease;
922 relocatedArenasToRelease = arenaList;
924 ProtectPages(arena, ArenaSize);
925 arena = next;
929 void GCRuntime::unprotectHeldRelocatedArenas(const AutoLockGC& lock) {
930 for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
931 UnprotectPages(arena, ArenaSize);
932 MOZ_ASSERT(!arena->allocated());
936 void GCRuntime::releaseHeldRelocatedArenas() {
937 AutoLockGC lock(this);
938 unprotectHeldRelocatedArenas(lock);
939 Arena* arenas = relocatedArenasToRelease;
940 relocatedArenasToRelease = nullptr;
941 releaseRelocatedArenasWithoutUnlocking(arenas, lock);
944 void GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(
945 const AutoLockGC& lock) {
946 unprotectHeldRelocatedArenas(lock);
947 releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
948 relocatedArenasToRelease = nullptr;
951 #endif
953 void GCRuntime::releaseRelocatedArenas(Arena* arenaList) {
954 AutoLockGC lock(this);
955 releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
958 void GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
959 const AutoLockGC& lock) {
960 // Release relocated arenas previously cleared with clearRelocatedArenas().
961 while (arenaList) {
962 Arena* arena = arenaList;
963 arenaList = arenaList->next;
965 // We already updated the memory accounting so just call
966 // Chunk::releaseArena.
967 arena->chunk()->releaseArena(this, arena, lock);