Bug 1837620 - Part 5: Sweep JIT code as part of discard when possible r=sfink
[gecko.git] / js / src / gc / Zone.cpp
blobf16192cd1b47032b09566be5629e276f0cce9e45
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "gc/Zone.h"
8 #include "js/shadow/Zone.h" // JS::shadow::Zone
10 #include "mozilla/Sprintf.h"
11 #include "mozilla/TimeStamp.h"
13 #include <type_traits>
15 #include "gc/FinalizationObservers.h"
16 #include "gc/GCContext.h"
17 #include "gc/PublicIterators.h"
18 #include "jit/BaselineIC.h"
19 #include "jit/BaselineJIT.h"
20 #include "jit/Invalidation.h"
21 #include "jit/JitScript.h"
22 #include "jit/JitZone.h"
23 #include "vm/Runtime.h"
24 #include "vm/Time.h"
26 #include "debugger/DebugAPI-inl.h"
27 #include "gc/GC-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/Nursery-inl.h"
30 #include "gc/WeakMap-inl.h"
31 #include "vm/JSScript-inl.h"
32 #include "vm/Realm-inl.h"
34 using namespace js;
35 using namespace js::gc;
37 Zone* const Zone::NotOnList = reinterpret_cast<Zone*>(1);
39 ZoneAllocator::ZoneAllocator(JSRuntime* rt, Kind kind)
40 : JS::shadow::Zone(rt, rt->gc.marker().tracer(), kind),
41 jitHeapThreshold(size_t(jit::MaxCodeBytesPerProcess * 0.8)) {}
43 ZoneAllocator::~ZoneAllocator() {
44 #ifdef DEBUG
45 mallocTracker.checkEmptyOnDestroy();
46 MOZ_ASSERT(gcHeapSize.bytes() == 0);
47 MOZ_ASSERT(mallocHeapSize.bytes() == 0);
48 MOZ_ASSERT(jitHeapSize.bytes() == 0);
49 #endif
52 void ZoneAllocator::fixupAfterMovingGC() {
53 #ifdef DEBUG
54 mallocTracker.fixupAfterMovingGC();
55 #endif
58 void js::ZoneAllocator::updateSchedulingStateOnGCStart() {
59 gcHeapSize.updateOnGCStart();
60 mallocHeapSize.updateOnGCStart();
61 jitHeapSize.updateOnGCStart();
62 perZoneGCTime = mozilla::TimeDuration::Zero();
65 void js::ZoneAllocator::updateGCStartThresholds(GCRuntime& gc) {
66 bool isAtomsZone = JS::Zone::from(this)->isAtomsZone();
67 gcHeapThreshold.updateStartThreshold(
68 gcHeapSize.retainedBytes(), smoothedAllocationRate.ref(),
69 smoothedCollectionRate.ref(), gc.tunables, gc.schedulingState,
70 isAtomsZone);
72 mallocHeapThreshold.updateStartThreshold(mallocHeapSize.retainedBytes(),
73 gc.tunables, gc.schedulingState);
76 void js::ZoneAllocator::setGCSliceThresholds(GCRuntime& gc,
77 bool waitingOnBGTask) {
78 gcHeapThreshold.setSliceThreshold(this, gcHeapSize, gc.tunables,
79 waitingOnBGTask);
80 mallocHeapThreshold.setSliceThreshold(this, mallocHeapSize, gc.tunables,
81 waitingOnBGTask);
82 jitHeapThreshold.setSliceThreshold(this, jitHeapSize, gc.tunables,
83 waitingOnBGTask);
86 void js::ZoneAllocator::clearGCSliceThresholds() {
87 gcHeapThreshold.clearSliceThreshold();
88 mallocHeapThreshold.clearSliceThreshold();
89 jitHeapThreshold.clearSliceThreshold();
92 bool ZoneAllocator::addSharedMemory(void* mem, size_t nbytes, MemoryUse use) {
93 // nbytes can be zero here for SharedArrayBuffers.
95 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
97 auto ptr = sharedMemoryUseCounts.lookupForAdd(mem);
98 MOZ_ASSERT_IF(ptr, ptr->value().use == use);
100 if (!ptr && !sharedMemoryUseCounts.add(ptr, mem, gc::SharedMemoryUse(use))) {
101 return false;
104 ptr->value().count++;
106 // Allocations can grow, so add any increase over the previous size and record
107 // the new size.
108 if (nbytes > ptr->value().nbytes) {
109 mallocHeapSize.addBytes(nbytes - ptr->value().nbytes);
110 ptr->value().nbytes = nbytes;
113 maybeTriggerGCOnMalloc();
115 return true;
118 void ZoneAllocator::removeSharedMemory(void* mem, size_t nbytes,
119 MemoryUse use) {
120 // nbytes can be zero here for SharedArrayBuffers.
122 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
123 MOZ_ASSERT(CurrentThreadIsGCFinalizing());
125 auto ptr = sharedMemoryUseCounts.lookup(mem);
127 MOZ_ASSERT(ptr);
128 MOZ_ASSERT(ptr->value().count != 0);
129 MOZ_ASSERT(ptr->value().use == use);
130 MOZ_ASSERT(ptr->value().nbytes >= nbytes);
132 ptr->value().count--;
133 if (ptr->value().count == 0) {
134 mallocHeapSize.removeBytes(ptr->value().nbytes, true);
135 sharedMemoryUseCounts.remove(ptr);
139 template <TrackingKind kind>
140 void js::TrackedAllocPolicy<kind>::decMemory(size_t nbytes) {
141 bool updateRetainedSize = false;
142 if constexpr (kind == TrackingKind::Cell) {
143 // Only subtract freed cell memory from retained size for cell associations
144 // during sweeping.
145 JS::GCContext* gcx = TlsGCContext.get();
146 updateRetainedSize = gcx->isFinalizing();
149 zone_->decNonGCMemory(this, nbytes, MemoryUse::TrackedAllocPolicy,
150 updateRetainedSize);
153 namespace js {
154 template class TrackedAllocPolicy<TrackingKind::Zone>;
155 template class TrackedAllocPolicy<TrackingKind::Cell>;
156 } // namespace js
158 JS::Zone::Zone(JSRuntime* rt, Kind kind)
159 : ZoneAllocator(rt, kind),
160 arenas(this),
161 data(nullptr),
162 tenuredBigInts(0),
163 markedStrings(0),
164 finalizedStrings(0),
165 suppressAllocationMetadataBuilder(false),
166 allocNurseryObjects_(true),
167 allocNurseryStrings_(true),
168 allocNurseryBigInts_(true),
169 pretenuring(this),
170 compartments_(),
171 crossZoneStringWrappers_(this),
172 gcEphemeronEdges_(SystemAllocPolicy(), rt->randomHashCodeScrambler()),
173 gcNurseryEphemeronEdges_(SystemAllocPolicy(),
174 rt->randomHashCodeScrambler()),
175 shapeZone_(this),
176 gcScheduled_(false),
177 gcScheduledSaved_(false),
178 gcPreserveCode_(false),
179 keepPropMapTables_(false),
180 wasCollected_(false),
181 listNext_(NotOnList),
182 keptObjects(this) {
183 /* Ensure that there are no vtables to mess us up here. */
184 MOZ_ASSERT(reinterpret_cast<JS::shadow::Zone*>(this) ==
185 static_cast<JS::shadow::Zone*>(this));
186 MOZ_ASSERT_IF(isAtomsZone(), rt->gc.zones().empty());
188 updateGCStartThresholds(rt->gc);
189 rt->gc.nursery().setAllocFlagsForZone(this);
192 Zone::~Zone() {
193 MOZ_ASSERT_IF(regExps_.ref(), regExps().empty());
195 DebugAPI::deleteDebugScriptMap(debugScriptMap);
196 js_delete(finalizationObservers_.ref().release());
198 MOZ_ASSERT(gcWeakMapList().isEmpty());
200 JSRuntime* rt = runtimeFromAnyThread();
201 if (this == rt->gc.systemZone) {
202 MOZ_ASSERT(isSystemZone());
203 rt->gc.systemZone = nullptr;
206 js_delete(jitZone_.ref());
209 bool Zone::init() {
210 regExps_.ref() = make_unique<RegExpZone>(this);
211 return regExps_.ref() && gcEphemeronEdges().init() &&
212 gcNurseryEphemeronEdges().init();
215 void Zone::setNeedsIncrementalBarrier(bool needs) {
216 needsIncrementalBarrier_ = needs;
219 void Zone::changeGCState(GCState prev, GCState next) {
220 MOZ_ASSERT(RuntimeHeapIsBusy());
221 MOZ_ASSERT(gcState() == prev);
223 // This can be called when barriers have been temporarily disabled by
224 // AutoDisableBarriers. In that case, don't update needsIncrementalBarrier_
225 // and barriers will be re-enabled by ~AutoDisableBarriers() if necessary.
226 bool barriersDisabled = isGCMarking() && !needsIncrementalBarrier();
228 gcState_ = next;
230 // Update the barriers state when we transition between marking and
231 // non-marking states, unless barriers have been disabled.
232 if (!barriersDisabled) {
233 needsIncrementalBarrier_ = isGCMarking();
237 template <class Pred>
238 static void EraseIf(js::gc::EphemeronEdgeVector& entries, Pred pred) {
239 auto* begin = entries.begin();
240 auto* const end = entries.end();
242 auto* newEnd = begin;
243 for (auto* p = begin; p != end; p++) {
244 if (!pred(*p)) {
245 *newEnd++ = *p;
249 size_t removed = end - newEnd;
250 entries.shrinkBy(removed);
253 static void SweepEphemeronEdgesWhileMinorSweeping(
254 js::gc::EphemeronEdgeVector& entries) {
255 EraseIf(entries, [](js::gc::EphemeronEdge& edge) -> bool {
256 return IsAboutToBeFinalizedDuringMinorSweep(&edge.target);
260 void Zone::sweepAfterMinorGC(JSTracer* trc) {
261 sweepEphemeronTablesAfterMinorGC();
262 crossZoneStringWrappers().sweepAfterMinorGC(trc);
264 for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
265 comp->sweepAfterMinorGC(trc);
269 void Zone::sweepEphemeronTablesAfterMinorGC() {
270 for (auto r = gcNurseryEphemeronEdges().mutableAll(); !r.empty();
271 r.popFront()) {
272 // Sweep gcNurseryEphemeronEdges to move live (forwarded) keys to
273 // gcEphemeronEdges, scanning through all the entries for such keys to
274 // update them.
276 // Forwarded and dead keys may also appear in their delegates' entries,
277 // so sweep those too (see below.)
279 // The tricky case is when the key has a delegate that was already
280 // tenured. Then it will be in its compartment's gcEphemeronEdges, but we
281 // still need to update the key (which will be in the entries
282 // associated with it.)
283 gc::Cell* key = r.front().key;
284 MOZ_ASSERT(!key->isTenured());
285 if (!Nursery::getForwardedPointer(&key)) {
286 // Dead nursery cell => discard.
287 continue;
290 // Key been moved. The value is an array of <color,cell> pairs; update all
291 // cells in that array.
292 EphemeronEdgeVector& entries = r.front().value;
293 SweepEphemeronEdgesWhileMinorSweeping(entries);
295 // Live (moved) nursery cell. Append entries to gcEphemeronEdges.
296 EphemeronEdgeTable& tenuredEdges = gcEphemeronEdges();
297 auto* entry = tenuredEdges.get(key);
298 if (!entry) {
299 if (!tenuredEdges.put(key, gc::EphemeronEdgeVector())) {
300 AutoEnterOOMUnsafeRegion oomUnsafe;
301 oomUnsafe.crash("Failed to tenure weak keys entry");
303 entry = tenuredEdges.get(key);
306 if (!entry->value.appendAll(entries)) {
307 AutoEnterOOMUnsafeRegion oomUnsafe;
308 oomUnsafe.crash("Failed to tenure weak keys entry");
311 // If the key has a delegate, then it will map to a WeakKeyEntryVector
312 // containing the key that needs to be updated.
314 JSObject* delegate = gc::detail::GetDelegate(key->as<JSObject>());
315 if (!delegate) {
316 continue;
318 MOZ_ASSERT(delegate->isTenured());
320 // If delegate was formerly nursery-allocated, we will sweep its entries
321 // when we visit its gcNurseryEphemeronEdges (if we haven't already). Note
322 // that we don't know the nursery address of the delegate, since the
323 // location it was stored in has already been updated.
325 // Otherwise, it will be in gcEphemeronEdges and we sweep it here.
326 auto* p = delegate->zone()->gcEphemeronEdges().get(delegate);
327 if (p) {
328 SweepEphemeronEdgesWhileMinorSweeping(p->value);
332 if (!gcNurseryEphemeronEdges().clear()) {
333 AutoEnterOOMUnsafeRegion oomUnsafe;
334 oomUnsafe.crash("OOM while clearing gcNurseryEphemeronEdges.");
338 void Zone::traceWeakCCWEdges(JSTracer* trc) {
339 crossZoneStringWrappers().traceWeak(trc);
340 for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
341 comp->traceCrossCompartmentObjectWrapperEdges(trc);
345 /* static */
346 void Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(JSTracer* trc) {
347 MOZ_ASSERT(trc->runtime()->gc.isHeapCompacting());
349 for (ZonesIter zone(trc->runtime(), WithAtoms); !zone.done(); zone.next()) {
350 // Trace the wrapper map to update keys (wrapped values) in other
351 // compartments that may have been moved.
352 zone->crossZoneStringWrappers().traceWeak(trc);
354 for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
355 comp->fixupCrossCompartmentObjectWrappersAfterMovingGC(trc);
360 void Zone::dropStringWrappersOnGC() {
361 MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
362 crossZoneStringWrappers().clear();
365 #ifdef JSGC_HASH_TABLE_CHECKS
367 void Zone::checkAllCrossCompartmentWrappersAfterMovingGC() {
368 checkStringWrappersAfterMovingGC();
369 for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
370 comp->checkObjectWrappersAfterMovingGC();
374 void Zone::checkStringWrappersAfterMovingGC() {
375 for (StringWrapperMap::Enum e(crossZoneStringWrappers()); !e.empty();
376 e.popFront()) {
377 // Assert that the postbarriers have worked and that nothing is left in the
378 // wrapper map that points into the nursery, and that the hash table entries
379 // are discoverable.
380 auto key = e.front().key();
381 CheckGCThingAfterMovingGC(key.get());
383 auto ptr = crossZoneStringWrappers().lookup(key);
384 MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &e.front());
387 #endif
389 void Zone::discardJitCode(JS::GCContext* gcx, const DiscardOptions& options) {
390 if (!isPreservingCode()) {
391 forceDiscardJitCode(gcx, options);
395 void Zone::forceDiscardJitCode(JS::GCContext* gcx,
396 const DiscardOptions& options) {
397 if (!jitZone()) {
398 return;
401 if (options.discardJitScripts && options.discardBaselineCode) {
402 lastDiscardedCodeTime_ = mozilla::TimeStamp::Now();
405 if (options.discardBaselineCode || options.discardJitScripts) {
406 #ifdef DEBUG
407 // Assert no JitScripts are marked as active.
408 for (auto iter = cellIter<BaseScript>(); !iter.done(); iter.next()) {
409 BaseScript* base = iter.unbarrieredGet();
410 if (jit::JitScript* jitScript = base->maybeJitScript()) {
411 MOZ_ASSERT(!jitScript->active());
414 #endif
416 // Mark JitScripts on the stack as active.
417 jit::MarkActiveJitScripts(this);
420 // Invalidate all Ion code in this zone.
421 jit::InvalidateAll(gcx, this);
423 for (auto base = cellIterUnsafe<BaseScript>(); !base.done(); base.next()) {
424 jit::JitScript* jitScript = base->maybeJitScript();
425 if (!jitScript) {
426 continue;
429 JSScript* script = base->asJSScript();
430 jit::FinishInvalidation(gcx, script);
432 // Discard baseline script if it's not marked as active.
433 if (options.discardBaselineCode) {
434 if (jitScript->hasBaselineScript() && !jitScript->active()) {
435 jit::FinishDiscardBaselineScript(gcx, script);
439 #ifdef JS_CACHEIR_SPEW
440 maybeUpdateWarmUpCount(script);
441 #endif
443 // Warm-up counter for scripts are reset on GC. After discarding code we
444 // need to let it warm back up to get information such as which
445 // opcodes are setting array holes or accessing getter properties.
446 script->resetWarmUpCounterForGC();
448 // Try to release the script's JitScript. This should happen after
449 // releasing JIT code because we can't do this when the script still has
450 // JIT code.
451 if (options.discardJitScripts) {
452 script->maybeReleaseJitScript(gcx);
453 jitScript = script->maybeJitScript();
454 if (!jitScript) {
455 // Try to discard the ScriptCounts too.
456 if (!script->realm()->collectCoverageForDebug() &&
457 !gcx->runtime()->profilingScripts) {
458 script->destroyScriptCounts();
460 continue;
464 // If we did not release the JitScript, we need to purge optimized IC
465 // stubs because the optimizedStubSpace will be purged below.
466 if (options.discardBaselineCode) {
467 jitScript->purgeOptimizedStubs(script);
470 if (options.resetNurseryAllocSites || options.resetPretenuredAllocSites) {
471 jitScript->resetAllocSites(options.resetNurseryAllocSites,
472 options.resetPretenuredAllocSites);
475 // Reset the active flag.
476 jitScript->resetActive();
478 // Optionally trace weak edges in remaining JitScripts.
479 if (options.traceWeakJitScripts) {
480 jitScript->traceWeak(options.traceWeakJitScripts);
484 // Also clear references to jit code from RegExpShared cells at this point.
485 // This avoid holding onto ExecutablePools.
486 for (auto regExp = cellIterUnsafe<RegExpShared>(); !regExp.done();
487 regExp.next()) {
488 regExp->discardJitCode();
492 * When scripts contains pointers to nursery things, the store buffer
493 * can contain entries that point into the optimized stub space. Since
494 * this method can be called outside the context of a GC, this situation
495 * could result in us trying to mark invalid store buffer entries.
497 * Defer freeing any allocated blocks until after the next minor GC.
499 if (options.discardBaselineCode) {
500 jitZone()->optimizedStubSpace()->freeAllAfterMinorGC(this);
501 jitZone()->purgeIonCacheIRStubInfo();
504 // Generate a profile marker
505 if (gcx->runtime()->geckoProfiler().enabled()) {
506 char discardingJitScript = options.discardJitScripts ? 'Y' : 'N';
507 char discardingBaseline = options.discardBaselineCode ? 'Y' : 'N';
508 char discardingIon = 'Y';
510 char discardingRegExp = 'Y';
511 char discardingNurserySites = options.resetNurseryAllocSites ? 'Y' : 'N';
512 char discardingPretenuredSites =
513 options.resetPretenuredAllocSites ? 'Y' : 'N';
515 char buf[100];
516 SprintfLiteral(buf,
517 "JitScript:%c Baseline:%c Ion:%c "
518 "RegExp:%c NurserySites:%c PretenuredSites:%c",
519 discardingJitScript, discardingBaseline, discardingIon,
520 discardingRegExp, discardingNurserySites,
521 discardingPretenuredSites);
522 gcx->runtime()->geckoProfiler().markEvent("DiscardJit", buf);
526 void JS::Zone::resetAllocSitesAndInvalidate(bool resetNurserySites,
527 bool resetPretenuredSites) {
528 MOZ_ASSERT(resetNurserySites || resetPretenuredSites);
530 if (!jitZone()) {
531 return;
534 JSContext* cx = runtime_->mainContextFromOwnThread();
535 for (auto base = cellIterUnsafe<BaseScript>(); !base.done(); base.next()) {
536 jit::JitScript* jitScript = base->maybeJitScript();
537 if (!jitScript) {
538 continue;
541 if (!jitScript->resetAllocSites(resetNurserySites, resetPretenuredSites)) {
542 continue;
545 JSScript* script = base->asJSScript();
546 CancelOffThreadIonCompile(script);
548 if (!script->hasIonScript()) {
549 continue;
552 jit::Invalidate(cx, script,
553 /* resetUses = */ true,
554 /* cancelOffThread = */ true);
558 void JS::Zone::traceWeakJitScripts(JSTracer* trc) {
559 for (auto base = cellIter<BaseScript>(); !base.done(); base.next()) {
560 jit::JitScript* jitScript = base->maybeJitScript();
561 if (jitScript) {
562 jitScript->traceWeak(trc);
567 void JS::Zone::beforeClearDelegateInternal(JSObject* wrapper,
568 JSObject* delegate) {
569 MOZ_ASSERT(js::gc::detail::GetDelegate(wrapper) == delegate);
570 MOZ_ASSERT(needsIncrementalBarrier());
571 MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this));
572 runtimeFromMainThread()->gc.marker().severWeakDelegate(wrapper, delegate);
575 void JS::Zone::afterAddDelegateInternal(JSObject* wrapper) {
576 MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this));
577 JSObject* delegate = js::gc::detail::GetDelegate(wrapper);
578 if (delegate) {
579 runtimeFromMainThread()->gc.marker().restoreWeakDelegate(wrapper, delegate);
583 #ifdef JSGC_HASH_TABLE_CHECKS
584 void JS::Zone::checkUniqueIdTableAfterMovingGC() {
585 for (auto r = uniqueIds().all(); !r.empty(); r.popFront()) {
586 js::gc::CheckGCThingAfterMovingGC(r.front().key());
589 #endif
591 js::jit::JitZone* Zone::createJitZone(JSContext* cx) {
592 MOZ_ASSERT(!jitZone_);
593 MOZ_ASSERT(cx->runtime()->hasJitRuntime());
595 UniquePtr<jit::JitZone> jitZone(cx->new_<js::jit::JitZone>());
596 if (!jitZone) {
597 return nullptr;
600 jitZone_ = jitZone.release();
601 return jitZone_;
604 bool Zone::hasMarkedRealms() {
605 for (RealmsInZoneIter realm(this); !realm.done(); realm.next()) {
606 if (realm->marked()) {
607 return true;
610 return false;
613 void Zone::notifyObservingDebuggers() {
614 AutoAssertNoGC nogc;
615 MOZ_ASSERT(JS::RuntimeHeapIsCollecting(),
616 "This method should be called during GC.");
618 JSRuntime* rt = runtimeFromMainThread();
620 for (RealmsInZoneIter realms(this); !realms.done(); realms.next()) {
621 GlobalObject* global = realms->unsafeUnbarrieredMaybeGlobal();
622 if (!global) {
623 continue;
626 DebugAPI::notifyParticipatesInGC(global, rt->gc.majorGCCount());
630 bool Zone::isOnList() const { return listNext_ != NotOnList; }
632 Zone* Zone::nextZone() const {
633 MOZ_ASSERT(isOnList());
634 return listNext_;
637 void Zone::fixupAfterMovingGC() {
638 ZoneAllocator::fixupAfterMovingGC();
639 shapeZone().fixupPropMapShapeTableAfterMovingGC();
642 void Zone::purgeAtomCache() {
643 atomCache().clearAndCompact();
645 // Also purge the dtoa caches so that subsequent lookups populate atom
646 // cache too.
647 for (RealmsInZoneIter r(this); !r.done(); r.next()) {
648 r->dtoaCache.purge();
652 void Zone::addSizeOfIncludingThis(
653 mozilla::MallocSizeOf mallocSizeOf, JS::CodeSizes* code, size_t* regexpZone,
654 size_t* jitZone, size_t* baselineStubsOptimized, size_t* uniqueIdMap,
655 size_t* initialPropMapTable, size_t* shapeTables, size_t* atomsMarkBitmaps,
656 size_t* compartmentObjects, size_t* crossCompartmentWrappersTables,
657 size_t* compartmentsPrivateData, size_t* scriptCountsMapArg) {
658 *regexpZone += regExps().sizeOfIncludingThis(mallocSizeOf);
659 if (jitZone_) {
660 jitZone_->addSizeOfIncludingThis(mallocSizeOf, code, jitZone,
661 baselineStubsOptimized);
663 *uniqueIdMap += uniqueIds().shallowSizeOfExcludingThis(mallocSizeOf);
664 shapeZone().addSizeOfExcludingThis(mallocSizeOf, initialPropMapTable,
665 shapeTables);
666 *atomsMarkBitmaps += markedAtoms().sizeOfExcludingThis(mallocSizeOf);
667 *crossCompartmentWrappersTables +=
668 crossZoneStringWrappers().sizeOfExcludingThis(mallocSizeOf);
670 for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
671 comp->addSizeOfIncludingThis(mallocSizeOf, compartmentObjects,
672 crossCompartmentWrappersTables,
673 compartmentsPrivateData);
676 if (scriptCountsMap) {
677 *scriptCountsMapArg +=
678 scriptCountsMap->shallowSizeOfIncludingThis(mallocSizeOf);
679 for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
680 *scriptCountsMapArg +=
681 r.front().value()->sizeOfIncludingThis(mallocSizeOf);
686 void* ZoneAllocator::onOutOfMemory(js::AllocFunction allocFunc,
687 arena_id_t arena, size_t nbytes,
688 void* reallocPtr) {
689 if (!js::CurrentThreadCanAccessRuntime(runtime_)) {
690 return nullptr;
692 // The analysis sees that JSRuntime::onOutOfMemory could report an error,
693 // which with a JSErrorInterceptor could GC. But we're passing a null cx (to
694 // a default parameter) so the error will not be reported.
695 JS::AutoSuppressGCAnalysis suppress;
696 return runtimeFromMainThread()->onOutOfMemory(allocFunc, arena, nbytes,
697 reallocPtr);
700 void ZoneAllocator::reportAllocationOverflow() const {
701 js::ReportAllocationOverflow(static_cast<JSContext*>(nullptr));
704 ZoneList::ZoneList() : head(nullptr), tail(nullptr) {}
706 ZoneList::ZoneList(Zone* zone) : head(zone), tail(zone) {
707 MOZ_RELEASE_ASSERT(!zone->isOnList());
708 zone->listNext_ = nullptr;
711 ZoneList::~ZoneList() { MOZ_ASSERT(isEmpty()); }
713 void ZoneList::check() const {
714 #ifdef DEBUG
715 MOZ_ASSERT((head == nullptr) == (tail == nullptr));
716 if (!head) {
717 return;
720 Zone* zone = head;
721 for (;;) {
722 MOZ_ASSERT(zone && zone->isOnList());
723 if (zone == tail) break;
724 zone = zone->listNext_;
726 MOZ_ASSERT(!zone->listNext_);
727 #endif
730 bool ZoneList::isEmpty() const { return head == nullptr; }
732 Zone* ZoneList::front() const {
733 MOZ_ASSERT(!isEmpty());
734 MOZ_ASSERT(head->isOnList());
735 return head;
738 void ZoneList::prepend(Zone* zone) { prependList(ZoneList(zone)); }
740 void ZoneList::append(Zone* zone) { appendList(ZoneList(zone)); }
742 void ZoneList::prependList(ZoneList&& other) {
743 check();
744 other.check();
746 if (other.isEmpty()) {
747 return;
750 MOZ_ASSERT(tail != other.tail);
752 if (!isEmpty()) {
753 other.tail->listNext_ = head;
754 } else {
755 tail = other.tail;
757 head = other.head;
759 other.head = nullptr;
760 other.tail = nullptr;
763 void ZoneList::appendList(ZoneList&& other) {
764 check();
765 other.check();
767 if (other.isEmpty()) {
768 return;
771 MOZ_ASSERT(tail != other.tail);
773 if (!isEmpty()) {
774 tail->listNext_ = other.head;
775 } else {
776 head = other.head;
778 tail = other.tail;
780 other.head = nullptr;
781 other.tail = nullptr;
784 Zone* ZoneList::removeFront() {
785 MOZ_ASSERT(!isEmpty());
786 check();
788 Zone* front = head;
789 head = head->listNext_;
790 if (!head) {
791 tail = nullptr;
794 front->listNext_ = Zone::NotOnList;
796 return front;
799 void ZoneList::clear() {
800 while (!isEmpty()) {
801 removeFront();
805 JS_PUBLIC_API void JS::shadow::RegisterWeakCache(
806 JS::Zone* zone, detail::WeakCacheBase* cachep) {
807 zone->registerWeakCache(cachep);
810 void Zone::traceRootsInMajorGC(JSTracer* trc) {
811 if (trc->isMarkingTracer() && !isGCMarking()) {
812 return;
815 // Trace zone script-table roots. See comment below for justification re:
816 // calling this only during major (non-nursery) collections.
817 traceScriptTableRoots(trc);
819 if (FinalizationObservers* observers = finalizationObservers()) {
820 observers->traceRoots(trc);
824 void Zone::traceScriptTableRoots(JSTracer* trc) {
825 static_assert(std::is_convertible_v<BaseScript*, gc::TenuredCell*>,
826 "BaseScript must not be nursery-allocated for script-table "
827 "tracing to work");
829 // Performance optimization: the script-table keys are JSScripts, which
830 // cannot be in the nursery, so we can skip this tracing if we are only in a
831 // minor collection. We static-assert this fact above.
832 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
834 // N.B.: the script-table keys are weak *except* in an exceptional case: when
835 // then --dump-bytecode command line option or the PCCount JSFriend API is
836 // used, then the scripts for all counts must remain alive. We only trace
837 // when the `trc->runtime()->profilingScripts` flag is set. This flag is
838 // cleared in JSRuntime::destroyRuntime() during shutdown to ensure that
839 // scripts are collected before the runtime goes away completely.
840 if (scriptCountsMap && trc->runtime()->profilingScripts) {
841 for (ScriptCountsMap::Range r = scriptCountsMap->all(); !r.empty();
842 r.popFront()) {
843 BaseScript* script = r.front().key();
844 MOZ_ASSERT(script->hasScriptCounts());
845 TraceRoot(trc, &script, "profilingScripts");
849 // Trace the debugger's DebugScript weak map.
850 if (debugScriptMap) {
851 DebugAPI::traceDebugScriptMap(trc, debugScriptMap);
855 void Zone::fixupScriptMapsAfterMovingGC(JSTracer* trc) {
856 // Map entries are removed by BaseScript::finalize, but we need to update the
857 // script pointers here in case they are moved by the GC.
859 if (scriptCountsMap) {
860 scriptCountsMap->traceWeak(trc);
863 if (scriptLCovMap) {
864 scriptLCovMap->traceWeak(trc);
867 #ifdef MOZ_VTUNE
868 if (scriptVTuneIdMap) {
869 scriptVTuneIdMap->traceWeak(trc);
871 #endif
873 #ifdef JS_CACHEIR_SPEW
874 if (scriptFinalWarmUpCountMap) {
875 scriptFinalWarmUpCountMap->traceWeak(trc);
877 #endif
880 #ifdef JSGC_HASH_TABLE_CHECKS
881 void Zone::checkScriptMapsAfterMovingGC() {
882 if (scriptCountsMap) {
883 for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront()) {
884 BaseScript* script = r.front().key();
885 MOZ_ASSERT(script->zone() == this);
886 CheckGCThingAfterMovingGC(script);
887 auto ptr = scriptCountsMap->lookup(script);
888 MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
892 if (scriptLCovMap) {
893 for (auto r = scriptLCovMap->all(); !r.empty(); r.popFront()) {
894 BaseScript* script = r.front().key();
895 MOZ_ASSERT(script->zone() == this);
896 CheckGCThingAfterMovingGC(script);
897 auto ptr = scriptLCovMap->lookup(script);
898 MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
902 # ifdef MOZ_VTUNE
903 if (scriptVTuneIdMap) {
904 for (auto r = scriptVTuneIdMap->all(); !r.empty(); r.popFront()) {
905 BaseScript* script = r.front().key();
906 MOZ_ASSERT(script->zone() == this);
907 CheckGCThingAfterMovingGC(script);
908 auto ptr = scriptVTuneIdMap->lookup(script);
909 MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
912 # endif // MOZ_VTUNE
914 # ifdef JS_CACHEIR_SPEW
915 if (scriptFinalWarmUpCountMap) {
916 for (auto r = scriptFinalWarmUpCountMap->all(); !r.empty(); r.popFront()) {
917 BaseScript* script = r.front().key();
918 MOZ_ASSERT(script->zone() == this);
919 CheckGCThingAfterMovingGC(script);
920 auto ptr = scriptFinalWarmUpCountMap->lookup(script);
921 MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front());
924 # endif // JS_CACHEIR_SPEW
926 #endif
928 void Zone::clearScriptCounts(Realm* realm) {
929 if (!scriptCountsMap) {
930 return;
933 // Clear all hasScriptCounts_ flags of BaseScript, in order to release all
934 // ScriptCounts entries of the given realm.
935 for (auto i = scriptCountsMap->modIter(); !i.done(); i.next()) {
936 BaseScript* script = i.get().key();
937 if (script->realm() != realm) {
938 continue;
940 // We can't destroy the ScriptCounts yet if the script has Baseline code,
941 // because Baseline code bakes in pointers to the counters. The ScriptCounts
942 // will be destroyed in Zone::discardJitCode when discarding the JitScript.
943 if (script->hasBaselineScript()) {
944 continue;
946 script->clearHasScriptCounts();
947 i.remove();
951 void Zone::clearScriptLCov(Realm* realm) {
952 if (!scriptLCovMap) {
953 return;
956 for (auto i = scriptLCovMap->modIter(); !i.done(); i.next()) {
957 BaseScript* script = i.get().key();
958 if (script->realm() == realm) {
959 i.remove();
964 void Zone::clearRootsForShutdownGC() {
965 // Finalization callbacks are not called if we're shutting down.
966 if (finalizationObservers()) {
967 finalizationObservers()->clearRecords();
970 clearKeptObjects();
973 void Zone::finishRoots() {
974 for (RealmsInZoneIter r(this); !r.done(); r.next()) {
975 r->finishRoots();
979 void Zone::traceKeptObjects(JSTracer* trc) { keptObjects.ref().trace(trc); }
981 bool Zone::keepDuringJob(HandleObject target) {
982 return keptObjects.ref().put(target);
985 void Zone::clearKeptObjects() { keptObjects.ref().clear(); }
987 bool Zone::ensureFinalizationObservers() {
988 if (finalizationObservers_.ref()) {
989 return true;
992 finalizationObservers_ = js::MakeUnique<FinalizationObservers>(this);
993 return bool(finalizationObservers_.ref());