Bug 1837620 - Part 1: Remove baseline ICs that guard shapes when the shape becomes...
[gecko.git] / js / src / gc / Compacting.cpp
blob2f767e11c5a3513a5cea65bbed3ce2b9ce192bb9
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /*
8 * Implementation of compacting GC.
9 */
11 #include "mozilla/Maybe.h"
13 #include "debugger/DebugAPI.h"
14 #include "gc/ArenaList.h"
15 #include "gc/GCInternals.h"
16 #include "gc/GCLock.h"
17 #include "gc/ParallelWork.h"
18 #include "gc/Zone.h"
19 #include "jit/JitCode.h"
20 #include "jit/JitRuntime.h"
21 #include "jit/JitZone.h"
22 #include "js/GCAPI.h"
23 #include "vm/HelperThreads.h"
24 #include "vm/Realm.h"
25 #include "wasm/WasmGcObject.h"
27 #include "gc/Heap-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/StableCellHasher-inl.h"
31 #include "gc/TraceMethods-inl.h"
32 #include "vm/GeckoProfiler-inl.h"
34 using namespace js;
35 using namespace js::gc;
37 using mozilla::Maybe;
39 bool GCRuntime::canRelocateZone(Zone* zone) const {
40 return !zone->isAtomsZone();
43 void GCRuntime::beginCompactPhase() {
44 MOZ_ASSERT(!isBackgroundSweeping());
45 assertBackgroundSweepingFinished();
47 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
49 MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
50 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
51 if (canRelocateZone(zone)) {
52 zonesToMaybeCompact.ref().append(zone);
56 startedCompacting = true;
57 zonesCompacted = 0;
59 #ifdef DEBUG
60 AutoLockGC lock(this);
61 MOZ_ASSERT(!relocatedArenasToRelease);
62 #endif
65 IncrementalProgress GCRuntime::compactPhase(JS::GCReason reason,
66 SliceBudget& sliceBudget,
67 AutoGCSession& session) {
68 assertBackgroundSweepingFinished();
69 MOZ_ASSERT(startedCompacting);
71 AutoMajorGCProfilerEntry s(this);
72 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
74 // TODO: JSScripts can move. If the sampler interrupts the GC in the
75 // middle of relocating an arena, invalid JSScript pointers may be
76 // accessed. Suppress all sampling until a finer-grained solution can be
77 // found. See bug 1295775.
78 AutoSuppressProfilerSampling suppressSampling(rt->mainContextFromOwnThread());
80 ZoneList relocatedZones;
81 Arena* relocatedArenas = nullptr;
82 while (!zonesToMaybeCompact.ref().isEmpty()) {
83 Zone* zone = zonesToMaybeCompact.ref().front();
84 zonesToMaybeCompact.ref().removeFront();
86 MOZ_ASSERT(nursery().isEmpty());
87 zone->changeGCState(Zone::Finished, Zone::Compact);
89 if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
90 updateZonePointersToRelocatedCells(zone);
91 relocatedZones.append(zone);
92 zonesCompacted++;
93 } else {
94 zone->changeGCState(Zone::Compact, Zone::Finished);
97 if (sliceBudget.isOverBudget()) {
98 break;
102 if (!relocatedZones.isEmpty()) {
103 updateRuntimePointersToRelocatedCells(session);
105 do {
106 Zone* zone = relocatedZones.front();
107 relocatedZones.removeFront();
108 zone->changeGCState(Zone::Compact, Zone::Finished);
109 } while (!relocatedZones.isEmpty());
112 clearRelocatedArenas(relocatedArenas, reason);
114 #ifdef DEBUG
115 protectOrReleaseRelocatedArenas(relocatedArenas, reason);
116 #else
117 releaseRelocatedArenas(relocatedArenas);
118 #endif
120 // Clear caches that can contain cell pointers.
121 rt->caches().purgeForCompaction();
123 #ifdef DEBUG
124 checkHashTablesAfterMovingGC();
125 #endif
127 return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
130 void GCRuntime::endCompactPhase() { startedCompacting = false; }
132 static bool ShouldRelocateAllArenas(JS::GCReason reason) {
133 return reason == JS::GCReason::DEBUG_GC;
137 * Choose which arenas to relocate all cells from. Return an arena cursor that
138 * can be passed to removeRemainingArenas().
140 Arena** ArenaList::pickArenasToRelocate(size_t& arenaTotalOut,
141 size_t& relocTotalOut) {
142 // Relocate the greatest number of arenas such that the number of used cells
143 // in relocated arenas is less than or equal to the number of free cells in
144 // unrelocated arenas. In other words we only relocate cells we can move
145 // into existing arenas, and we choose the least full areans to relocate.
147 // This is made easier by the fact that the arena list has been sorted in
148 // descending order of number of used cells, so we will always relocate a
149 // tail of the arena list. All we need to do is find the point at which to
150 // start relocating.
152 check();
154 if (isCursorAtEnd()) {
155 return nullptr;
158 Arena** arenap = cursorp_; // Next arena to consider for relocation.
159 size_t previousFreeCells = 0; // Count of free cells before arenap.
160 size_t followingUsedCells = 0; // Count of used cells after arenap.
161 size_t fullArenaCount = 0; // Number of full arenas (not relocated).
162 size_t nonFullArenaCount =
163 0; // Number of non-full arenas (considered for relocation).
164 size_t arenaIndex = 0; // Index of the next arena to consider.
166 for (Arena* arena = head_; arena != *cursorp_; arena = arena->next) {
167 fullArenaCount++;
170 for (Arena* arena = *cursorp_; arena; arena = arena->next) {
171 followingUsedCells += arena->countUsedCells();
172 nonFullArenaCount++;
175 mozilla::DebugOnly<size_t> lastFreeCells(0);
176 size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind());
178 while (*arenap) {
179 Arena* arena = *arenap;
180 if (followingUsedCells <= previousFreeCells) {
181 break;
184 size_t freeCells = arena->countFreeCells();
185 size_t usedCells = cellsPerArena - freeCells;
186 followingUsedCells -= usedCells;
187 #ifdef DEBUG
188 MOZ_ASSERT(freeCells >= lastFreeCells);
189 lastFreeCells = freeCells;
190 #endif
191 previousFreeCells += freeCells;
192 arenap = &arena->next;
193 arenaIndex++;
196 size_t relocCount = nonFullArenaCount - arenaIndex;
197 MOZ_ASSERT(relocCount < nonFullArenaCount);
198 MOZ_ASSERT((relocCount == 0) == (!*arenap));
199 arenaTotalOut += fullArenaCount + nonFullArenaCount;
200 relocTotalOut += relocCount;
202 return arenap;
205 #ifdef DEBUG
206 inline bool PtrIsInRange(const void* ptr, const void* start, size_t length) {
207 return uintptr_t(ptr) - uintptr_t(start) < length;
209 #endif
211 static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
212 size_t thingSize) {
213 JS::AutoSuppressGCAnalysis nogc;
215 // Allocate a new cell.
216 MOZ_ASSERT(zone == src->zone());
217 TenuredCell* dst =
218 reinterpret_cast<TenuredCell*>(AllocateCellInGC(zone, thingKind));
220 // Copy source cell contents to destination.
221 memcpy(dst, src, thingSize);
223 // Move any uid attached to the object.
224 gc::TransferUniqueId(dst, src);
226 if (IsObjectAllocKind(thingKind)) {
227 auto* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
228 auto* dstObj = static_cast<JSObject*>(static_cast<Cell*>(dst));
230 if (srcObj->is<NativeObject>()) {
231 NativeObject* srcNative = &srcObj->as<NativeObject>();
232 NativeObject* dstNative = &dstObj->as<NativeObject>();
234 // Fixup the pointer to inline object elements if necessary.
235 if (srcNative->hasFixedElements()) {
236 uint32_t numShifted =
237 srcNative->getElementsHeader()->numShiftedElements();
238 dstNative->setFixedElements(numShifted);
240 } else if (srcObj->is<ProxyObject>()) {
241 if (srcObj->as<ProxyObject>().usingInlineValueArray()) {
242 dstObj->as<ProxyObject>().setInlineValueArray();
246 // Call object moved hook if present.
247 if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp()) {
248 op(dstObj, srcObj);
251 MOZ_ASSERT_IF(
252 dstObj->is<NativeObject>(),
253 !PtrIsInRange(
254 (const Value*)dstObj->as<NativeObject>().getDenseElements(), src,
255 thingSize));
258 // Copy the mark bits.
259 dst->copyMarkBitsFrom(src);
261 // Poison the source cell contents except for the forwarding flag and pointer
262 // which will be stored in the first word. We can't do this for native object
263 // with fixed elements because this would overwrite the element flags and
264 // these are needed when updating COW elements referred to by other objects.
265 #ifdef DEBUG
266 JSObject* srcObj = IsObjectAllocKind(thingKind)
267 ? static_cast<JSObject*>(static_cast<Cell*>(src))
268 : nullptr;
269 if (!srcObj || !srcObj->is<NativeObject>() ||
270 !srcObj->as<NativeObject>().hasFixedElements()) {
271 AlwaysPoison(reinterpret_cast<uint8_t*>(src) + sizeof(uintptr_t),
272 JS_MOVED_TENURED_PATTERN, thingSize - sizeof(uintptr_t),
273 MemCheckKind::MakeNoAccess);
275 #endif
277 // Mark source cell as forwarded and leave a pointer to the destination.
278 RelocationOverlay::forwardCell(src, dst);
281 static void RelocateArena(Arena* arena, SliceBudget& sliceBudget) {
282 MOZ_ASSERT(arena->allocated());
283 MOZ_ASSERT(!arena->onDelayedMarkingList());
284 MOZ_ASSERT(arena->bufferedCells()->isEmpty());
286 Zone* zone = arena->zone;
288 AllocKind thingKind = arena->getAllocKind();
289 size_t thingSize = arena->getThingSize();
291 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
292 RelocateCell(zone, cell, thingKind, thingSize);
293 sliceBudget.step();
296 #ifdef DEBUG
297 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
298 TenuredCell* src = cell;
299 MOZ_ASSERT(src->isForwarded());
300 TenuredCell* dest = Forwarded(src);
301 MOZ_ASSERT(src->isMarkedBlack() == dest->isMarkedBlack());
302 MOZ_ASSERT(src->isMarkedGray() == dest->isMarkedGray());
304 #endif
308 * Relocate all arenas identified by pickArenasToRelocate: for each arena,
309 * relocate each cell within it, then add it to a list of relocated arenas.
311 Arena* ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated,
312 SliceBudget& sliceBudget,
313 gcstats::Statistics& stats) {
314 check();
316 while (Arena* arena = toRelocate) {
317 toRelocate = arena->next;
318 RelocateArena(arena, sliceBudget);
319 // Prepend to list of relocated arenas
320 arena->next = relocated;
321 relocated = arena;
322 stats.count(gcstats::COUNT_ARENA_RELOCATED);
325 check();
327 return relocated;
330 // Skip compacting zones unless we can free a certain proportion of their GC
331 // heap memory.
332 static const double MIN_ZONE_RECLAIM_PERCENT = 2.0;
334 static bool ShouldRelocateZone(size_t arenaCount, size_t relocCount,
335 JS::GCReason reason) {
336 if (relocCount == 0) {
337 return false;
340 if (IsOOMReason(reason)) {
341 return true;
344 double relocFraction = double(relocCount) / double(arenaCount);
345 return relocFraction * 100.0 >= MIN_ZONE_RECLAIM_PERCENT;
348 static AllocKinds CompactingAllocKinds() {
349 AllocKinds result;
350 for (AllocKind kind : AllAllocKinds()) {
351 if (IsCompactingKind(kind)) {
352 result += kind;
355 return result;
358 bool ArenaLists::relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
359 SliceBudget& sliceBudget,
360 gcstats::Statistics& stats) {
361 // This is only called from the main thread while we are doing a GC, so
362 // there is no need to lock.
363 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
364 MOZ_ASSERT(runtime()->gc.isHeapCompacting());
365 MOZ_ASSERT(!runtime()->gc.isBackgroundSweeping());
367 // Relocate all compatible kinds
368 AllocKinds allocKindsToRelocate = CompactingAllocKinds();
370 // Clear all the free lists.
371 clearFreeLists();
373 if (ShouldRelocateAllArenas(reason)) {
374 zone_->prepareForCompacting();
375 for (auto kind : allocKindsToRelocate) {
376 ArenaList& al = arenaList(kind);
377 Arena* allArenas = al.head();
378 al.clear();
379 relocatedListOut =
380 al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
382 } else {
383 size_t arenaCount = 0;
384 size_t relocCount = 0;
385 AllAllocKindArray<Arena**> toRelocate;
387 for (auto kind : allocKindsToRelocate) {
388 toRelocate[kind] =
389 arenaList(kind).pickArenasToRelocate(arenaCount, relocCount);
392 if (!ShouldRelocateZone(arenaCount, relocCount, reason)) {
393 return false;
396 zone_->prepareForCompacting();
397 for (auto kind : allocKindsToRelocate) {
398 if (toRelocate[kind]) {
399 ArenaList& al = arenaList(kind);
400 Arena* arenas = al.removeRemainingArenas(toRelocate[kind]);
401 relocatedListOut =
402 al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
407 return true;
410 bool GCRuntime::relocateArenas(Zone* zone, JS::GCReason reason,
411 Arena*& relocatedListOut,
412 SliceBudget& sliceBudget) {
413 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
415 MOZ_ASSERT(!zone->isPreservingCode());
416 MOZ_ASSERT(canRelocateZone(zone));
418 js::CancelOffThreadIonCompile(rt, JS::Zone::Compact);
420 if (!zone->arenas.relocateArenas(relocatedListOut, reason, sliceBudget,
421 stats())) {
422 return false;
425 #ifdef DEBUG
426 // Check that we did as much compaction as we should have. There
427 // should always be less than one arena's worth of free cells.
428 for (auto kind : CompactingAllocKinds()) {
429 ArenaList& al = zone->arenas.arenaList(kind);
430 size_t freeCells = 0;
431 for (Arena* arena = al.arenaAfterCursor(); arena; arena = arena->next) {
432 freeCells += arena->countFreeCells();
434 MOZ_ASSERT(freeCells < Arena::thingsPerArena(kind));
436 #endif
438 return true;
441 MovingTracer::MovingTracer(JSRuntime* rt)
442 : GenericTracerImpl(rt, JS::TracerKind::Moving,
443 JS::WeakMapTraceAction::TraceKeysAndValues) {}
445 template <typename T>
446 inline void MovingTracer::onEdge(T** thingp, const char* name) {
447 T* thing = *thingp;
448 if (thing->runtimeFromAnyThread() == runtime() && IsForwarded(thing)) {
449 *thingp = Forwarded(thing);
453 void Zone::prepareForCompacting() {
454 JS::GCContext* gcx = runtimeFromMainThread()->gcContext();
455 discardJitCode(gcx);
458 void GCRuntime::sweepZoneAfterCompacting(MovingTracer* trc, Zone* zone) {
459 MOZ_ASSERT(zone->isGCCompacting());
461 zone->traceWeakMaps(trc);
463 traceWeakFinalizationObserverEdges(trc, zone);
465 for (auto* cache : zone->weakCaches()) {
466 cache->traceWeak(trc, nullptr);
469 if (jit::JitZone* jitZone = zone->jitZone()) {
470 jitZone->traceWeak(trc, zone);
473 for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
474 c->traceWeakNativeIterators(trc);
476 for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
477 r->traceWeakRegExps(trc);
478 r->traceWeakSavedStacks(trc);
479 r->traceWeakGlobalEdge(trc);
480 r->traceWeakDebugEnvironmentEdges(trc);
481 r->traceWeakEdgesInJitRealm(trc);
486 template <typename T>
487 static inline void UpdateCellPointers(MovingTracer* trc, T* cell) {
488 // We only update unmoved GC things or the new copy of moved GC things, never
489 // the old copy. If this happened it could clear the forwarded flag which
490 // could lead to pointers to the old copy not being updated.
491 MOZ_ASSERT(!cell->isForwarded());
493 cell->fixupAfterMovingGC();
494 cell->traceChildren(trc);
497 template <typename T>
498 static void UpdateArenaPointersTyped(MovingTracer* trc, Arena* arena) {
499 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
500 UpdateCellPointers(trc, cell.as<T>());
504 static bool CanUpdateKindInBackground(AllocKind kind) {
505 // We try to update as many GC things in parallel as we can, but there are
506 // kinds for which this might not be safe:
507 // - we assume JSObjects that are foreground finalized are not safe to
508 // update in parallel
509 // - updating a SharedPropMap touches child maps in
510 // SharedPropMap::fixupAfterMovingGC
511 return js::gc::IsBackgroundFinalized(kind) && !IsShapeAllocKind(kind) &&
512 kind != AllocKind::BASE_SHAPE;
516 * Update the internal pointers for all cells in an arena.
518 static void UpdateArenaPointers(MovingTracer* trc, Arena* arena) {
519 AllocKind kind = arena->getAllocKind();
521 MOZ_ASSERT_IF(!CanUpdateKindInBackground(kind),
522 CurrentThreadCanAccessRuntime(trc->runtime()));
524 switch (kind) {
525 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
526 compact) \
527 case AllocKind::allocKind: \
528 UpdateArenaPointersTyped<type>(trc, arena); \
529 return;
530 FOR_EACH_ALLOCKIND(EXPAND_CASE)
531 #undef EXPAND_CASE
533 default:
534 MOZ_CRASH("Invalid alloc kind for UpdateArenaPointers");
538 struct ArenaListSegment {
539 Arena* begin;
540 Arena* end;
544 * Update the internal pointers for all arenas in a segment of an arena list.
546 * Returns the number of steps to count against the slice budget.
548 static size_t UpdateArenaListSegmentPointers(GCRuntime* gc,
549 const ArenaListSegment& arenas) {
550 MOZ_ASSERT(arenas.begin);
551 MovingTracer trc(gc->rt);
552 size_t count = 0;
553 for (Arena* arena = arenas.begin; arena != arenas.end; arena = arena->next) {
554 UpdateArenaPointers(&trc, arena);
555 count++;
557 return count * 256;
560 class ArenasToUpdate {
561 // Maximum number of arenas to update in one block.
562 #ifdef DEBUG
563 static const unsigned MaxArenasToProcess = 16;
564 #else
565 static const unsigned MaxArenasToProcess = 256;
566 #endif
568 public:
569 explicit ArenasToUpdate(Zone* zone);
570 ArenasToUpdate(Zone* zone, const AllocKinds& kinds);
572 bool done() const { return !segmentBegin; }
574 ArenaListSegment get() const {
575 MOZ_ASSERT(!done());
576 return {segmentBegin, segmentEnd};
579 void next();
581 private:
582 Maybe<AllocKinds> kinds; // Selects which thing kinds to update.
583 Zone* zone; // Zone to process.
584 AllocKind kind = AllocKind::FIRST; // Current alloc kind to process.
585 Arena* segmentBegin = nullptr;
586 Arena* segmentEnd = nullptr;
588 static AllocKind nextAllocKind(AllocKind i) {
589 return AllocKind(uint8_t(i) + 1);
592 void settle();
593 void findSegmentEnd();
596 ArenasToUpdate::ArenasToUpdate(Zone* zone) : zone(zone) { settle(); }
598 ArenasToUpdate::ArenasToUpdate(Zone* zone, const AllocKinds& kinds)
599 : kinds(Some(kinds)), zone(zone) {
600 settle();
603 void ArenasToUpdate::settle() {
604 // Called when we have set |kind| to a new kind. Sets |arena| to the next
605 // arena or null if there are no more arenas to update.
607 MOZ_ASSERT(!segmentBegin);
609 for (; kind < AllocKind::LIMIT; kind = nextAllocKind(kind)) {
610 if (kinds && !kinds.ref().contains(kind)) {
611 continue;
614 Arena* arena = zone->arenas.getFirstArena(kind);
615 if (arena) {
616 segmentBegin = arena;
617 findSegmentEnd();
618 break;
623 void ArenasToUpdate::findSegmentEnd() {
624 // Take up to MaxArenasToProcess arenas from the list starting at
625 // |segmentBegin| and set |segmentEnd|.
626 Arena* arena = segmentBegin;
627 for (size_t i = 0; arena && i < MaxArenasToProcess; i++) {
628 arena = arena->next;
630 segmentEnd = arena;
633 void ArenasToUpdate::next() {
634 MOZ_ASSERT(!done());
636 segmentBegin = segmentEnd;
637 if (segmentBegin) {
638 findSegmentEnd();
639 return;
642 kind = nextAllocKind(kind);
643 settle();
646 static AllocKinds ForegroundUpdateKinds(AllocKinds kinds) {
647 AllocKinds result;
648 for (AllocKind kind : kinds) {
649 if (!CanUpdateKindInBackground(kind)) {
650 result += kind;
653 return result;
656 void GCRuntime::updateCellPointers(Zone* zone, AllocKinds kinds) {
657 AllocKinds fgKinds = ForegroundUpdateKinds(kinds);
658 AllocKinds bgKinds = kinds - fgKinds;
660 ArenasToUpdate fgArenas(zone, fgKinds);
661 ArenasToUpdate bgArenas(zone, bgKinds);
663 AutoLockHelperThreadState lock;
665 AutoRunParallelWork bgTasks(this, UpdateArenaListSegmentPointers,
666 gcstats::PhaseKind::COMPACT_UPDATE_CELLS,
667 GCUse::Unspecified, bgArenas,
668 SliceBudget::unlimited(), lock);
670 AutoUnlockHelperThreadState unlock(lock);
672 for (; !fgArenas.done(); fgArenas.next()) {
673 UpdateArenaListSegmentPointers(this, fgArenas.get());
677 // After cells have been relocated any pointers to a cell's old locations must
678 // be updated to point to the new location. This happens by iterating through
679 // all cells in heap and tracing their children (non-recursively) to update
680 // them.
682 // This is complicated by the fact that updating a GC thing sometimes depends on
683 // making use of other GC things. After a moving GC these things may not be in
684 // a valid state since they may contain pointers which have not been updated
685 // yet.
687 // The main dependencies are:
689 // - Updating a JSObject makes use of its shape
690 // - Updating a typed object makes use of its type descriptor object
692 // This means we require at least three phases for update:
694 // 1) shapes
695 // 2) typed object type descriptor objects
696 // 3) all other objects
698 // Also, there can be data races calling IsForwarded() on the new location of a
699 // cell whose first word is being updated in parallel on another thread. This
700 // easiest way to avoid this is to not store a GC pointer in the first word of a
701 // cell. Otherwise this can be avoided by updating different kinds of cell in
702 // different phases.
704 // Since we want to minimize the number of phases, arrange kinds into three
705 // arbitrary phases.
707 static constexpr AllocKinds UpdatePhaseOne{AllocKind::SCRIPT,
708 AllocKind::BASE_SHAPE,
709 AllocKind::SHAPE,
710 AllocKind::STRING,
711 AllocKind::JITCODE,
712 AllocKind::REGEXP_SHARED,
713 AllocKind::SCOPE,
714 AllocKind::GETTER_SETTER,
715 AllocKind::COMPACT_PROP_MAP,
716 AllocKind::NORMAL_PROP_MAP,
717 AllocKind::DICT_PROP_MAP};
719 // UpdatePhaseTwo is typed object descriptor objects.
721 static constexpr AllocKinds UpdatePhaseThree{AllocKind::FUNCTION,
722 AllocKind::FUNCTION_EXTENDED,
723 AllocKind::OBJECT0,
724 AllocKind::OBJECT0_BACKGROUND,
725 AllocKind::OBJECT2,
726 AllocKind::OBJECT2_BACKGROUND,
727 AllocKind::ARRAYBUFFER4,
728 AllocKind::OBJECT4,
729 AllocKind::OBJECT4_BACKGROUND,
730 AllocKind::ARRAYBUFFER8,
731 AllocKind::OBJECT8,
732 AllocKind::OBJECT8_BACKGROUND,
733 AllocKind::ARRAYBUFFER12,
734 AllocKind::OBJECT12,
735 AllocKind::OBJECT12_BACKGROUND,
736 AllocKind::ARRAYBUFFER16,
737 AllocKind::OBJECT16,
738 AllocKind::OBJECT16_BACKGROUND};
740 void GCRuntime::updateAllCellPointers(MovingTracer* trc, Zone* zone) {
741 updateCellPointers(zone, UpdatePhaseOne);
743 updateCellPointers(zone, UpdatePhaseThree);
747 * Update pointers to relocated cells in a single zone by doing a traversal of
748 * that zone's arenas and calling per-zone sweep hooks.
750 * The latter is necessary to update weak references which are not marked as
751 * part of the traversal.
753 void GCRuntime::updateZonePointersToRelocatedCells(Zone* zone) {
754 MOZ_ASSERT(!rt->isBeingDestroyed());
755 MOZ_ASSERT(zone->isGCCompacting());
757 AutoTouchingGrayThings tgt;
759 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
760 MovingTracer trc(rt);
762 zone->fixupAfterMovingGC();
763 zone->fixupScriptMapsAfterMovingGC(&trc);
765 // Fixup compartment global pointers as these get accessed during marking.
766 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
767 comp->fixupAfterMovingGC(&trc);
770 zone->externalStringCache().purge();
771 zone->functionToStringCache().purge();
772 zone->shapeZone().purgeShapeCaches(rt->gcContext());
773 rt->caches().stringToAtomCache.purge();
775 // Iterate through all cells that can contain relocatable pointers to update
776 // them. Since updating each cell is independent we try to parallelize this
777 // as much as possible.
778 updateAllCellPointers(&trc, zone);
780 // Sweep everything to fix up weak pointers.
781 sweepZoneAfterCompacting(&trc, zone);
783 // Call callbacks to get the rest of the system to fixup other untraced
784 // pointers.
785 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
786 callWeakPointerCompartmentCallbacks(&trc, comp);
791 * Update runtime-wide pointers to relocated cells.
793 void GCRuntime::updateRuntimePointersToRelocatedCells(AutoGCSession& session) {
794 MOZ_ASSERT(!rt->isBeingDestroyed());
796 gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
797 MovingTracer trc(rt);
799 Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(&trc);
801 rt->geckoProfiler().fixupStringsMapAfterMovingGC();
803 // Mark roots to update them.
805 traceRuntimeForMajorGC(&trc, session);
808 gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
809 DebugAPI::traceAllForMovingGC(&trc);
810 DebugAPI::traceCrossCompartmentEdges(&trc);
812 // Mark all gray roots.
813 traceEmbeddingGrayRoots(&trc);
814 Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
815 &trc, Compartment::GrayEdges);
818 // Sweep everything to fix up weak pointers.
819 jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
820 for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
821 cache->traceWeak(&trc, nullptr);
824 if (rt->hasJitRuntime() && rt->jitRuntime()->hasInterpreterEntryMap()) {
825 rt->jitRuntime()->getInterpreterEntryMap()->updateScriptsAfterMovingGC();
828 // Type inference may put more blocks here to free.
830 AutoLockHelperThreadState lock;
831 lifoBlocksToFree.ref().freeAll();
834 // Call callbacks to get the rest of the system to fixup other untraced
835 // pointers.
836 callWeakPointerZonesCallbacks(&trc);
839 void GCRuntime::clearRelocatedArenas(Arena* arenaList, JS::GCReason reason) {
840 AutoLockGC lock(this);
841 clearRelocatedArenasWithoutUnlocking(arenaList, reason, lock);
844 void GCRuntime::clearRelocatedArenasWithoutUnlocking(Arena* arenaList,
845 JS::GCReason reason,
846 const AutoLockGC& lock) {
847 // Clear the relocated arenas, now containing only forwarding pointers
848 while (arenaList) {
849 Arena* arena = arenaList;
850 arenaList = arenaList->next;
852 // Clear the mark bits
853 arena->unmarkAll();
855 // Mark arena as empty
856 arena->setAsFullyUnused();
858 #ifdef DEBUG
859 // The cell contents have been partially marked no access in RelocateCell,
860 // so we need to mark the region as undefined again so we can poison it.
861 SetMemCheckKind(reinterpret_cast<void*>(arena->thingsStart()),
862 arena->getThingsSpan(), MemCheckKind::MakeUndefined);
863 #endif
865 AlwaysPoison(reinterpret_cast<void*>(arena->thingsStart()),
866 JS_MOVED_TENURED_PATTERN, arena->getThingsSpan(),
867 MemCheckKind::MakeNoAccess);
869 // Don't count emptied arenas as being freed by the current GC:
870 // - if we purposely moved everything to new arenas, as that will already
871 // have allocated a similar number of arenas. (This only happens for
872 // collections triggered by GC zeal.)
873 // - if they were allocated since the start of the GC.
874 bool allArenasRelocated = ShouldRelocateAllArenas(reason);
875 bool updateRetainedSize = !allArenasRelocated && !arena->isNewlyCreated();
876 arena->zone->gcHeapSize.removeBytes(ArenaSize, updateRetainedSize,
877 heapSize);
879 // Release the arena but don't return it to the chunk yet.
880 arena->release(lock);
884 #ifdef DEBUG
886 // In debug mode we don't always release relocated arenas straight away.
887 // Sometimes protect them instead and hold onto them until the next GC sweep
888 // phase to catch any pointers to them that didn't get forwarded.
890 static inline bool CanProtectArenas() {
891 // On some systems the page size is larger than the size of an arena so we
892 // can't change the mapping permissions per arena.
893 return SystemPageSize() <= ArenaSize;
896 static inline bool ShouldProtectRelocatedArenas(JS::GCReason reason) {
897 // For zeal mode collections we don't release the relocated arenas
898 // immediately. Instead we protect them and keep them around until the next
899 // collection so we can catch any stray accesses to them.
900 return reason == JS::GCReason::DEBUG_GC && CanProtectArenas();
903 void GCRuntime::protectOrReleaseRelocatedArenas(Arena* arenaList,
904 JS::GCReason reason) {
905 if (ShouldProtectRelocatedArenas(reason)) {
906 protectAndHoldArenas(arenaList);
907 return;
910 releaseRelocatedArenas(arenaList);
913 void GCRuntime::protectAndHoldArenas(Arena* arenaList) {
914 for (Arena* arena = arenaList; arena;) {
915 MOZ_ASSERT(!arena->allocated());
916 Arena* next = arena->next;
917 if (!next) {
918 // Prepend to hold list before we protect the memory.
919 AutoLockGC lock(this);
920 arena->next = relocatedArenasToRelease;
921 relocatedArenasToRelease = arenaList;
923 ProtectPages(arena, ArenaSize);
924 arena = next;
928 void GCRuntime::unprotectHeldRelocatedArenas(const AutoLockGC& lock) {
929 for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
930 UnprotectPages(arena, ArenaSize);
931 MOZ_ASSERT(!arena->allocated());
935 void GCRuntime::releaseHeldRelocatedArenas() {
936 AutoLockGC lock(this);
937 unprotectHeldRelocatedArenas(lock);
938 Arena* arenas = relocatedArenasToRelease;
939 relocatedArenasToRelease = nullptr;
940 releaseRelocatedArenasWithoutUnlocking(arenas, lock);
943 void GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(
944 const AutoLockGC& lock) {
945 unprotectHeldRelocatedArenas(lock);
946 releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
947 relocatedArenasToRelease = nullptr;
950 #endif
952 void GCRuntime::releaseRelocatedArenas(Arena* arenaList) {
953 AutoLockGC lock(this);
954 releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
957 void GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
958 const AutoLockGC& lock) {
959 // Release relocated arenas previously cleared with clearRelocatedArenas().
960 while (arenaList) {
961 Arena* arena = arenaList;
962 arenaList = arenaList->next;
964 // We already updated the memory accounting so just call
965 // Chunk::releaseArena.
966 arena->chunk()->releaseArena(this, arena, lock);