Backed out changeset 1e582a0e5593 (bug 1852921) for causing build bustages
[gecko.git] / js / src / gc / Compacting.cpp
blobf3cb9c8b94ee09c2d7ac662b3c5ab2803cc017fe
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /*
8 * Implementation of compacting GC.
9 */
11 #include "mozilla/Maybe.h"
13 #include "debugger/DebugAPI.h"
14 #include "gc/ArenaList.h"
15 #include "gc/GCInternals.h"
16 #include "gc/GCLock.h"
17 #include "gc/ParallelWork.h"
18 #include "gc/Zone.h"
19 #include "jit/JitCode.h"
20 #include "jit/JitRuntime.h"
21 #include "jit/JitZone.h"
22 #include "js/GCAPI.h"
23 #include "vm/HelperThreads.h"
24 #include "vm/Realm.h"
25 #include "wasm/WasmGcObject.h"
27 #include "gc/Heap-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/PrivateIterators-inl.h"
30 #include "gc/StableCellHasher-inl.h"
31 #include "gc/TraceMethods-inl.h"
32 #include "vm/GeckoProfiler-inl.h"
34 using namespace js;
35 using namespace js::gc;
37 using mozilla::Maybe;
39 bool GCRuntime::canRelocateZone(Zone* zone) const {
40 return !zone->isAtomsZone();
43 void GCRuntime::beginCompactPhase() {
44 MOZ_ASSERT(!isBackgroundSweeping());
45 assertBackgroundSweepingFinished();
47 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
49 MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
50 for (GCZonesIter zone(this); !zone.done(); zone.next()) {
51 if (canRelocateZone(zone)) {
52 zonesToMaybeCompact.ref().append(zone);
56 startedCompacting = true;
57 zonesCompacted = 0;
59 #ifdef DEBUG
60 AutoLockGC lock(this);
61 MOZ_ASSERT(!relocatedArenasToRelease);
62 #endif
65 IncrementalProgress GCRuntime::compactPhase(JS::GCReason reason,
66 SliceBudget& sliceBudget,
67 AutoGCSession& session) {
68 assertBackgroundSweepingFinished();
69 MOZ_ASSERT(startedCompacting);
71 AutoMajorGCProfilerEntry s(this);
72 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
74 // TODO: JSScripts can move. If the sampler interrupts the GC in the
75 // middle of relocating an arena, invalid JSScript pointers may be
76 // accessed. Suppress all sampling until a finer-grained solution can be
77 // found. See bug 1295775.
78 AutoSuppressProfilerSampling suppressSampling(rt->mainContextFromOwnThread());
80 ZoneList relocatedZones;
81 Arena* relocatedArenas = nullptr;
82 while (!zonesToMaybeCompact.ref().isEmpty()) {
83 Zone* zone = zonesToMaybeCompact.ref().front();
84 zonesToMaybeCompact.ref().removeFront();
86 MOZ_ASSERT(nursery().isEmpty());
87 zone->changeGCState(Zone::Finished, Zone::Compact);
89 if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
90 updateZonePointersToRelocatedCells(zone);
91 relocatedZones.append(zone);
92 zonesCompacted++;
93 } else {
94 zone->changeGCState(Zone::Compact, Zone::Finished);
97 if (sliceBudget.isOverBudget()) {
98 break;
102 if (!relocatedZones.isEmpty()) {
103 updateRuntimePointersToRelocatedCells(session);
105 do {
106 Zone* zone = relocatedZones.front();
107 relocatedZones.removeFront();
108 zone->changeGCState(Zone::Compact, Zone::Finished);
109 } while (!relocatedZones.isEmpty());
112 clearRelocatedArenas(relocatedArenas, reason);
114 #ifdef DEBUG
115 protectOrReleaseRelocatedArenas(relocatedArenas, reason);
116 #else
117 releaseRelocatedArenas(relocatedArenas);
118 #endif
120 // Clear caches that can contain cell pointers.
121 rt->caches().purgeForCompaction();
123 #ifdef DEBUG
124 checkHashTablesAfterMovingGC();
125 #endif
127 return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
130 void GCRuntime::endCompactPhase() { startedCompacting = false; }
132 static bool ShouldRelocateAllArenas(JS::GCReason reason) {
133 return reason == JS::GCReason::DEBUG_GC;
137 * Choose which arenas to relocate all cells from. Return an arena cursor that
138 * can be passed to removeRemainingArenas().
140 Arena** ArenaList::pickArenasToRelocate(size_t& arenaTotalOut,
141 size_t& relocTotalOut) {
142 // Relocate the greatest number of arenas such that the number of used cells
143 // in relocated arenas is less than or equal to the number of free cells in
144 // unrelocated arenas. In other words we only relocate cells we can move
145 // into existing arenas, and we choose the least full areans to relocate.
147 // This is made easier by the fact that the arena list has been sorted in
148 // descending order of number of used cells, so we will always relocate a
149 // tail of the arena list. All we need to do is find the point at which to
150 // start relocating.
152 check();
154 if (isCursorAtEnd()) {
155 return nullptr;
158 Arena** arenap = cursorp_; // Next arena to consider for relocation.
159 size_t previousFreeCells = 0; // Count of free cells before arenap.
160 size_t followingUsedCells = 0; // Count of used cells after arenap.
161 size_t fullArenaCount = 0; // Number of full arenas (not relocated).
162 size_t nonFullArenaCount =
163 0; // Number of non-full arenas (considered for relocation).
164 size_t arenaIndex = 0; // Index of the next arena to consider.
166 for (Arena* arena = head_; arena != *cursorp_; arena = arena->next) {
167 fullArenaCount++;
170 for (Arena* arena = *cursorp_; arena; arena = arena->next) {
171 followingUsedCells += arena->countUsedCells();
172 nonFullArenaCount++;
175 mozilla::DebugOnly<size_t> lastFreeCells(0);
176 size_t cellsPerArena = Arena::thingsPerArena((*arenap)->getAllocKind());
178 while (*arenap) {
179 Arena* arena = *arenap;
180 if (followingUsedCells <= previousFreeCells) {
181 break;
184 size_t freeCells = arena->countFreeCells();
185 size_t usedCells = cellsPerArena - freeCells;
186 followingUsedCells -= usedCells;
187 #ifdef DEBUG
188 MOZ_ASSERT(freeCells >= lastFreeCells);
189 lastFreeCells = freeCells;
190 #endif
191 previousFreeCells += freeCells;
192 arenap = &arena->next;
193 arenaIndex++;
196 size_t relocCount = nonFullArenaCount - arenaIndex;
197 MOZ_ASSERT(relocCount < nonFullArenaCount);
198 MOZ_ASSERT((relocCount == 0) == (!*arenap));
199 arenaTotalOut += fullArenaCount + nonFullArenaCount;
200 relocTotalOut += relocCount;
202 return arenap;
205 #ifdef DEBUG
206 inline bool PtrIsInRange(const void* ptr, const void* start, size_t length) {
207 return uintptr_t(ptr) - uintptr_t(start) < length;
209 #endif
211 static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
212 size_t thingSize) {
213 JS::AutoSuppressGCAnalysis nogc;
215 // Allocate a new cell.
216 MOZ_ASSERT(zone == src->zone());
217 TenuredCell* dst =
218 reinterpret_cast<TenuredCell*>(AllocateCellInGC(zone, thingKind));
220 // Copy source cell contents to destination.
221 memcpy(dst, src, thingSize);
223 // Move any uid attached to the object.
224 gc::TransferUniqueId(dst, src);
226 if (IsObjectAllocKind(thingKind)) {
227 auto* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
228 auto* dstObj = static_cast<JSObject*>(static_cast<Cell*>(dst));
230 if (srcObj->is<NativeObject>()) {
231 NativeObject* srcNative = &srcObj->as<NativeObject>();
232 NativeObject* dstNative = &dstObj->as<NativeObject>();
234 // Fixup the pointer to inline object elements if necessary.
235 if (srcNative->hasFixedElements()) {
236 uint32_t numShifted =
237 srcNative->getElementsHeader()->numShiftedElements();
238 dstNative->setFixedElements(numShifted);
240 } else if (srcObj->is<ProxyObject>()) {
241 if (srcObj->as<ProxyObject>().usingInlineValueArray()) {
242 dstObj->as<ProxyObject>().setInlineValueArray();
246 // Call object moved hook if present.
247 if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp()) {
248 op(dstObj, srcObj);
251 MOZ_ASSERT_IF(
252 dstObj->is<NativeObject>(),
253 !PtrIsInRange(
254 (const Value*)dstObj->as<NativeObject>().getDenseElements(), src,
255 thingSize));
258 // Copy the mark bits.
259 dst->copyMarkBitsFrom(src);
261 // Poison the source cell contents except for the forwarding flag and pointer
262 // which will be stored in the first word. We can't do this for native object
263 // with fixed elements because this would overwrite the element flags and
264 // these are needed when updating COW elements referred to by other objects.
265 #ifdef DEBUG
266 JSObject* srcObj = IsObjectAllocKind(thingKind)
267 ? static_cast<JSObject*>(static_cast<Cell*>(src))
268 : nullptr;
269 if (!srcObj || !srcObj->is<NativeObject>() ||
270 !srcObj->as<NativeObject>().hasFixedElements()) {
271 AlwaysPoison(reinterpret_cast<uint8_t*>(src) + sizeof(uintptr_t),
272 JS_MOVED_TENURED_PATTERN, thingSize - sizeof(uintptr_t),
273 MemCheckKind::MakeNoAccess);
275 #endif
277 // Mark source cell as forwarded and leave a pointer to the destination.
278 RelocationOverlay::forwardCell(src, dst);
281 static void RelocateArena(Arena* arena, SliceBudget& sliceBudget) {
282 MOZ_ASSERT(arena->allocated());
283 MOZ_ASSERT(!arena->onDelayedMarkingList());
284 MOZ_ASSERT(arena->bufferedCells()->isEmpty());
286 Zone* zone = arena->zone;
288 AllocKind thingKind = arena->getAllocKind();
289 size_t thingSize = arena->getThingSize();
291 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
292 RelocateCell(zone, cell, thingKind, thingSize);
293 sliceBudget.step();
296 #ifdef DEBUG
297 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
298 TenuredCell* src = cell;
299 MOZ_ASSERT(src->isForwarded());
300 TenuredCell* dest = Forwarded(src);
301 MOZ_ASSERT(src->isMarkedBlack() == dest->isMarkedBlack());
302 MOZ_ASSERT(src->isMarkedGray() == dest->isMarkedGray());
304 #endif
308 * Relocate all arenas identified by pickArenasToRelocate: for each arena,
309 * relocate each cell within it, then add it to a list of relocated arenas.
311 Arena* ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated,
312 SliceBudget& sliceBudget,
313 gcstats::Statistics& stats) {
314 check();
316 while (Arena* arena = toRelocate) {
317 toRelocate = arena->next;
318 RelocateArena(arena, sliceBudget);
319 // Prepend to list of relocated arenas
320 arena->next = relocated;
321 relocated = arena;
322 stats.count(gcstats::COUNT_ARENA_RELOCATED);
325 check();
327 return relocated;
330 // Skip compacting zones unless we can free a certain proportion of their GC
331 // heap memory.
332 static const double MIN_ZONE_RECLAIM_PERCENT = 2.0;
334 static bool ShouldRelocateZone(size_t arenaCount, size_t relocCount,
335 JS::GCReason reason) {
336 if (relocCount == 0) {
337 return false;
340 if (IsOOMReason(reason)) {
341 return true;
344 double relocFraction = double(relocCount) / double(arenaCount);
345 return relocFraction * 100.0 >= MIN_ZONE_RECLAIM_PERCENT;
348 static AllocKinds CompactingAllocKinds() {
349 AllocKinds result;
350 for (AllocKind kind : AllAllocKinds()) {
351 if (IsCompactingKind(kind)) {
352 result += kind;
355 return result;
358 bool ArenaLists::relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
359 SliceBudget& sliceBudget,
360 gcstats::Statistics& stats) {
361 // This is only called from the main thread while we are doing a GC, so
362 // there is no need to lock.
363 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
364 MOZ_ASSERT(runtime()->gc.isHeapCompacting());
365 MOZ_ASSERT(!runtime()->gc.isBackgroundSweeping());
367 // Relocate all compatible kinds
368 AllocKinds allocKindsToRelocate = CompactingAllocKinds();
370 // Clear all the free lists.
371 clearFreeLists();
373 if (ShouldRelocateAllArenas(reason)) {
374 zone_->prepareForCompacting();
375 for (auto kind : allocKindsToRelocate) {
376 ArenaList& al = arenaList(kind);
377 Arena* allArenas = al.head();
378 al.clear();
379 relocatedListOut =
380 al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
382 } else {
383 size_t arenaCount = 0;
384 size_t relocCount = 0;
385 AllAllocKindArray<Arena**> toRelocate;
387 for (auto kind : allocKindsToRelocate) {
388 toRelocate[kind] =
389 arenaList(kind).pickArenasToRelocate(arenaCount, relocCount);
392 if (!ShouldRelocateZone(arenaCount, relocCount, reason)) {
393 return false;
396 zone_->prepareForCompacting();
397 for (auto kind : allocKindsToRelocate) {
398 if (toRelocate[kind]) {
399 ArenaList& al = arenaList(kind);
400 Arena* arenas = al.removeRemainingArenas(toRelocate[kind]);
401 relocatedListOut =
402 al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
407 return true;
410 bool GCRuntime::relocateArenas(Zone* zone, JS::GCReason reason,
411 Arena*& relocatedListOut,
412 SliceBudget& sliceBudget) {
413 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
415 MOZ_ASSERT(!zone->isPreservingCode());
416 MOZ_ASSERT(canRelocateZone(zone));
418 js::CancelOffThreadIonCompile(rt, JS::Zone::Compact);
420 if (!zone->arenas.relocateArenas(relocatedListOut, reason, sliceBudget,
421 stats())) {
422 return false;
425 #ifdef DEBUG
426 // Check that we did as much compaction as we should have. There
427 // should always be less than one arena's worth of free cells.
428 for (auto kind : CompactingAllocKinds()) {
429 ArenaList& al = zone->arenas.arenaList(kind);
430 size_t freeCells = 0;
431 for (Arena* arena = al.arenaAfterCursor(); arena; arena = arena->next) {
432 freeCells += arena->countFreeCells();
434 MOZ_ASSERT(freeCells < Arena::thingsPerArena(kind));
436 #endif
438 return true;
441 MovingTracer::MovingTracer(JSRuntime* rt)
442 : GenericTracerImpl(rt, JS::TracerKind::Moving,
443 JS::WeakMapTraceAction::TraceKeysAndValues) {}
445 template <typename T>
446 inline void MovingTracer::onEdge(T** thingp, const char* name) {
447 T* thing = *thingp;
448 if (thing->runtimeFromAnyThread() == runtime() && IsForwarded(thing)) {
449 *thingp = Forwarded(thing);
453 void Zone::prepareForCompacting() {
454 JS::GCContext* gcx = runtimeFromMainThread()->gcContext();
455 discardJitCode(gcx);
458 void GCRuntime::sweepZoneAfterCompacting(MovingTracer* trc, Zone* zone) {
459 MOZ_ASSERT(zone->isGCCompacting());
461 zone->traceWeakMaps(trc);
462 zone->sweepObjectsWithWeakPointers(trc);
464 traceWeakFinalizationObserverEdges(trc, zone);
466 for (auto* cache : zone->weakCaches()) {
467 cache->traceWeak(trc, nullptr);
470 if (jit::JitZone* jitZone = zone->jitZone()) {
471 jitZone->traceWeak(trc, zone);
474 for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
475 c->traceWeakNativeIterators(trc);
477 for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
478 r->traceWeakSavedStacks(trc);
479 r->traceWeakGlobalEdge(trc);
480 r->traceWeakDebugEnvironmentEdges(trc);
485 template <typename T>
486 static inline void UpdateCellPointers(MovingTracer* trc, T* cell) {
487 // We only update unmoved GC things or the new copy of moved GC things, never
488 // the old copy. If this happened it could clear the forwarded flag which
489 // could lead to pointers to the old copy not being updated.
490 MOZ_ASSERT(!cell->isForwarded());
492 cell->fixupAfterMovingGC();
493 cell->traceChildren(trc);
496 template <typename T>
497 static void UpdateArenaPointersTyped(MovingTracer* trc, Arena* arena) {
498 for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
499 UpdateCellPointers(trc, cell.as<T>());
503 static bool CanUpdateKindInBackground(AllocKind kind) {
504 // We try to update as many GC things in parallel as we can, but there are
505 // kinds for which this might not be safe:
506 // - we assume JSObjects that are foreground finalized are not safe to
507 // update in parallel
508 // - updating a SharedPropMap touches child maps in
509 // SharedPropMap::fixupAfterMovingGC
510 return js::gc::IsBackgroundFinalized(kind) && !IsShapeAllocKind(kind) &&
511 kind != AllocKind::BASE_SHAPE;
515 * Update the internal pointers for all cells in an arena.
517 static void UpdateArenaPointers(MovingTracer* trc, Arena* arena) {
518 AllocKind kind = arena->getAllocKind();
520 MOZ_ASSERT_IF(!CanUpdateKindInBackground(kind),
521 CurrentThreadCanAccessRuntime(trc->runtime()));
523 switch (kind) {
524 #define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
525 compact) \
526 case AllocKind::allocKind: \
527 UpdateArenaPointersTyped<type>(trc, arena); \
528 return;
529 FOR_EACH_ALLOCKIND(EXPAND_CASE)
530 #undef EXPAND_CASE
532 default:
533 MOZ_CRASH("Invalid alloc kind for UpdateArenaPointers");
537 struct ArenaListSegment {
538 Arena* begin;
539 Arena* end;
543 * Update the internal pointers for all arenas in a segment of an arena list.
545 * Returns the number of steps to count against the slice budget.
547 static size_t UpdateArenaListSegmentPointers(GCRuntime* gc,
548 const ArenaListSegment& arenas) {
549 MOZ_ASSERT(arenas.begin);
550 MovingTracer trc(gc->rt);
551 size_t count = 0;
552 for (Arena* arena = arenas.begin; arena != arenas.end; arena = arena->next) {
553 UpdateArenaPointers(&trc, arena);
554 count++;
556 return count * 256;
559 class ArenasToUpdate {
560 // Maximum number of arenas to update in one block.
561 #ifdef DEBUG
562 static const unsigned MaxArenasToProcess = 16;
563 #else
564 static const unsigned MaxArenasToProcess = 256;
565 #endif
567 public:
568 explicit ArenasToUpdate(Zone* zone);
569 ArenasToUpdate(Zone* zone, const AllocKinds& kinds);
571 bool done() const { return !segmentBegin; }
573 ArenaListSegment get() const {
574 MOZ_ASSERT(!done());
575 return {segmentBegin, segmentEnd};
578 void next();
580 private:
581 Maybe<AllocKinds> kinds; // Selects which thing kinds to update.
582 Zone* zone; // Zone to process.
583 AllocKind kind = AllocKind::FIRST; // Current alloc kind to process.
584 Arena* segmentBegin = nullptr;
585 Arena* segmentEnd = nullptr;
587 static AllocKind nextAllocKind(AllocKind i) {
588 return AllocKind(uint8_t(i) + 1);
591 void settle();
592 void findSegmentEnd();
595 ArenasToUpdate::ArenasToUpdate(Zone* zone) : zone(zone) { settle(); }
597 ArenasToUpdate::ArenasToUpdate(Zone* zone, const AllocKinds& kinds)
598 : kinds(Some(kinds)), zone(zone) {
599 settle();
602 void ArenasToUpdate::settle() {
603 // Called when we have set |kind| to a new kind. Sets |arena| to the next
604 // arena or null if there are no more arenas to update.
606 MOZ_ASSERT(!segmentBegin);
608 for (; kind < AllocKind::LIMIT; kind = nextAllocKind(kind)) {
609 if (kinds && !kinds.ref().contains(kind)) {
610 continue;
613 Arena* arena = zone->arenas.getFirstArena(kind);
614 if (arena) {
615 segmentBegin = arena;
616 findSegmentEnd();
617 break;
622 void ArenasToUpdate::findSegmentEnd() {
623 // Take up to MaxArenasToProcess arenas from the list starting at
624 // |segmentBegin| and set |segmentEnd|.
625 Arena* arena = segmentBegin;
626 for (size_t i = 0; arena && i < MaxArenasToProcess; i++) {
627 arena = arena->next;
629 segmentEnd = arena;
632 void ArenasToUpdate::next() {
633 MOZ_ASSERT(!done());
635 segmentBegin = segmentEnd;
636 if (segmentBegin) {
637 findSegmentEnd();
638 return;
641 kind = nextAllocKind(kind);
642 settle();
645 static AllocKinds ForegroundUpdateKinds(AllocKinds kinds) {
646 AllocKinds result;
647 for (AllocKind kind : kinds) {
648 if (!CanUpdateKindInBackground(kind)) {
649 result += kind;
652 return result;
655 void GCRuntime::updateCellPointers(Zone* zone, AllocKinds kinds) {
656 AllocKinds fgKinds = ForegroundUpdateKinds(kinds);
657 AllocKinds bgKinds = kinds - fgKinds;
659 ArenasToUpdate fgArenas(zone, fgKinds);
660 ArenasToUpdate bgArenas(zone, bgKinds);
662 AutoLockHelperThreadState lock;
664 AutoRunParallelWork bgTasks(this, UpdateArenaListSegmentPointers,
665 gcstats::PhaseKind::COMPACT_UPDATE_CELLS,
666 GCUse::Unspecified, bgArenas,
667 SliceBudget::unlimited(), lock);
669 AutoUnlockHelperThreadState unlock(lock);
671 for (; !fgArenas.done(); fgArenas.next()) {
672 UpdateArenaListSegmentPointers(this, fgArenas.get());
676 // After cells have been relocated any pointers to a cell's old locations must
677 // be updated to point to the new location. This happens by iterating through
678 // all cells in heap and tracing their children (non-recursively) to update
679 // them.
681 // This is complicated by the fact that updating a GC thing sometimes depends on
682 // making use of other GC things. After a moving GC these things may not be in
683 // a valid state since they may contain pointers which have not been updated
684 // yet.
686 // The main dependencies are:
688 // - Updating a JSObject makes use of its shape
689 // - Updating a typed object makes use of its type descriptor object
691 // This means we require at least three phases for update:
693 // 1) shapes
694 // 2) typed object type descriptor objects
695 // 3) all other objects
697 // Also, there can be data races calling IsForwarded() on the new location of a
698 // cell whose first word is being updated in parallel on another thread. This
699 // easiest way to avoid this is to not store a GC pointer in the first word of a
700 // cell. Otherwise this can be avoided by updating different kinds of cell in
701 // different phases.
703 // Since we want to minimize the number of phases, arrange kinds into three
704 // arbitrary phases.
706 static constexpr AllocKinds UpdatePhaseOne{AllocKind::SCRIPT,
707 AllocKind::BASE_SHAPE,
708 AllocKind::SHAPE,
709 AllocKind::STRING,
710 AllocKind::JITCODE,
711 AllocKind::REGEXP_SHARED,
712 AllocKind::SCOPE,
713 AllocKind::GETTER_SETTER,
714 AllocKind::COMPACT_PROP_MAP,
715 AllocKind::NORMAL_PROP_MAP,
716 AllocKind::DICT_PROP_MAP};
718 // UpdatePhaseTwo is typed object descriptor objects.
720 static constexpr AllocKinds UpdatePhaseThree{AllocKind::FUNCTION,
721 AllocKind::FUNCTION_EXTENDED,
722 AllocKind::OBJECT0,
723 AllocKind::OBJECT0_BACKGROUND,
724 AllocKind::OBJECT2,
725 AllocKind::OBJECT2_BACKGROUND,
726 AllocKind::ARRAYBUFFER4,
727 AllocKind::OBJECT4,
728 AllocKind::OBJECT4_BACKGROUND,
729 AllocKind::ARRAYBUFFER8,
730 AllocKind::OBJECT8,
731 AllocKind::OBJECT8_BACKGROUND,
732 AllocKind::ARRAYBUFFER12,
733 AllocKind::OBJECT12,
734 AllocKind::OBJECT12_BACKGROUND,
735 AllocKind::ARRAYBUFFER16,
736 AllocKind::OBJECT16,
737 AllocKind::OBJECT16_BACKGROUND};
739 void GCRuntime::updateAllCellPointers(MovingTracer* trc, Zone* zone) {
740 updateCellPointers(zone, UpdatePhaseOne);
742 updateCellPointers(zone, UpdatePhaseThree);
746 * Update pointers to relocated cells in a single zone by doing a traversal of
747 * that zone's arenas and calling per-zone sweep hooks.
749 * The latter is necessary to update weak references which are not marked as
750 * part of the traversal.
752 void GCRuntime::updateZonePointersToRelocatedCells(Zone* zone) {
753 MOZ_ASSERT(!rt->isBeingDestroyed());
754 MOZ_ASSERT(zone->isGCCompacting());
756 AutoTouchingGrayThings tgt;
758 gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
759 MovingTracer trc(rt);
761 zone->fixupAfterMovingGC();
762 zone->fixupScriptMapsAfterMovingGC(&trc);
764 // Fixup compartment global pointers as these get accessed during marking.
765 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
766 comp->fixupAfterMovingGC(&trc);
769 zone->externalStringCache().purge();
770 zone->functionToStringCache().purge();
771 zone->shapeZone().purgeShapeCaches(rt->gcContext());
772 rt->caches().stringToAtomCache.purge();
774 // Iterate through all cells that can contain relocatable pointers to update
775 // them. Since updating each cell is independent we try to parallelize this
776 // as much as possible.
777 updateAllCellPointers(&trc, zone);
779 // Sweep everything to fix up weak pointers.
780 sweepZoneAfterCompacting(&trc, zone);
782 // Call callbacks to get the rest of the system to fixup other untraced
783 // pointers.
784 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
785 callWeakPointerCompartmentCallbacks(&trc, comp);
790 * Update runtime-wide pointers to relocated cells.
792 void GCRuntime::updateRuntimePointersToRelocatedCells(AutoGCSession& session) {
793 MOZ_ASSERT(!rt->isBeingDestroyed());
795 gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
796 MovingTracer trc(rt);
798 Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(&trc);
800 rt->geckoProfiler().fixupStringsMapAfterMovingGC();
802 // Mark roots to update them.
804 traceRuntimeForMajorGC(&trc, session);
807 gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
808 DebugAPI::traceAllForMovingGC(&trc);
809 DebugAPI::traceCrossCompartmentEdges(&trc);
811 // Mark all gray roots.
812 traceEmbeddingGrayRoots(&trc);
813 Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
814 &trc, Compartment::GrayEdges);
817 // Sweep everything to fix up weak pointers.
818 jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
819 for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
820 cache->traceWeak(&trc, nullptr);
823 if (rt->hasJitRuntime() && rt->jitRuntime()->hasInterpreterEntryMap()) {
824 rt->jitRuntime()->getInterpreterEntryMap()->updateScriptsAfterMovingGC();
827 // Type inference may put more blocks here to free.
829 AutoLockHelperThreadState lock;
830 lifoBlocksToFree.ref().freeAll();
833 // Call callbacks to get the rest of the system to fixup other untraced
834 // pointers.
835 callWeakPointerZonesCallbacks(&trc);
838 void GCRuntime::clearRelocatedArenas(Arena* arenaList, JS::GCReason reason) {
839 AutoLockGC lock(this);
840 clearRelocatedArenasWithoutUnlocking(arenaList, reason, lock);
843 void GCRuntime::clearRelocatedArenasWithoutUnlocking(Arena* arenaList,
844 JS::GCReason reason,
845 const AutoLockGC& lock) {
846 // Clear the relocated arenas, now containing only forwarding pointers
847 while (arenaList) {
848 Arena* arena = arenaList;
849 arenaList = arenaList->next;
851 // Clear the mark bits
852 arena->unmarkAll();
854 // Mark arena as empty
855 arena->setAsFullyUnused();
857 #ifdef DEBUG
858 // The cell contents have been partially marked no access in RelocateCell,
859 // so we need to mark the region as undefined again so we can poison it.
860 SetMemCheckKind(reinterpret_cast<void*>(arena->thingsStart()),
861 arena->getThingsSpan(), MemCheckKind::MakeUndefined);
862 #endif
864 AlwaysPoison(reinterpret_cast<void*>(arena->thingsStart()),
865 JS_MOVED_TENURED_PATTERN, arena->getThingsSpan(),
866 MemCheckKind::MakeNoAccess);
868 // Don't count emptied arenas as being freed by the current GC:
869 // - if we purposely moved everything to new arenas, as that will already
870 // have allocated a similar number of arenas. (This only happens for
871 // collections triggered by GC zeal.)
872 // - if they were allocated since the start of the GC.
873 bool allArenasRelocated = ShouldRelocateAllArenas(reason);
874 bool updateRetainedSize = !allArenasRelocated && !arena->isNewlyCreated();
875 arena->zone->gcHeapSize.removeBytes(ArenaSize, updateRetainedSize,
876 heapSize);
878 // Release the arena but don't return it to the chunk yet.
879 arena->release(lock);
883 #ifdef DEBUG
885 // In debug mode we don't always release relocated arenas straight away.
886 // Sometimes protect them instead and hold onto them until the next GC sweep
887 // phase to catch any pointers to them that didn't get forwarded.
889 static inline bool CanProtectArenas() {
890 // On some systems the page size is larger than the size of an arena so we
891 // can't change the mapping permissions per arena.
892 return SystemPageSize() <= ArenaSize;
895 static inline bool ShouldProtectRelocatedArenas(JS::GCReason reason) {
896 // For zeal mode collections we don't release the relocated arenas
897 // immediately. Instead we protect them and keep them around until the next
898 // collection so we can catch any stray accesses to them.
899 return reason == JS::GCReason::DEBUG_GC && CanProtectArenas();
902 void GCRuntime::protectOrReleaseRelocatedArenas(Arena* arenaList,
903 JS::GCReason reason) {
904 if (ShouldProtectRelocatedArenas(reason)) {
905 protectAndHoldArenas(arenaList);
906 return;
909 releaseRelocatedArenas(arenaList);
912 void GCRuntime::protectAndHoldArenas(Arena* arenaList) {
913 for (Arena* arena = arenaList; arena;) {
914 MOZ_ASSERT(!arena->allocated());
915 Arena* next = arena->next;
916 if (!next) {
917 // Prepend to hold list before we protect the memory.
918 AutoLockGC lock(this);
919 arena->next = relocatedArenasToRelease;
920 relocatedArenasToRelease = arenaList;
922 ProtectPages(arena, ArenaSize);
923 arena = next;
927 void GCRuntime::unprotectHeldRelocatedArenas(const AutoLockGC& lock) {
928 for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
929 UnprotectPages(arena, ArenaSize);
930 MOZ_ASSERT(!arena->allocated());
934 void GCRuntime::releaseHeldRelocatedArenas() {
935 AutoLockGC lock(this);
936 unprotectHeldRelocatedArenas(lock);
937 Arena* arenas = relocatedArenasToRelease;
938 relocatedArenasToRelease = nullptr;
939 releaseRelocatedArenasWithoutUnlocking(arenas, lock);
942 void GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(
943 const AutoLockGC& lock) {
944 unprotectHeldRelocatedArenas(lock);
945 releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
946 relocatedArenasToRelease = nullptr;
949 #endif
951 void GCRuntime::releaseRelocatedArenas(Arena* arenaList) {
952 AutoLockGC lock(this);
953 releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
956 void GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
957 const AutoLockGC& lock) {
958 // Release relocated arenas previously cleared with clearRelocatedArenas().
959 while (arenaList) {
960 Arena* arena = arenaList;
961 arenaList = arenaList->next;
963 // We already updated the memory accounting so just call
964 // Chunk::releaseArena.
965 arena->chunk()->releaseArena(this, arena, lock);