1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
8 #include "js/shadow/Zone.h" // JS::shadow::Zone
10 #include "mozilla/Sprintf.h"
11 #include "mozilla/TimeStamp.h"
13 #include <type_traits>
15 #include "gc/FinalizationObservers.h"
16 #include "gc/GCContext.h"
17 #include "gc/PublicIterators.h"
18 #include "jit/BaselineIC.h"
19 #include "jit/BaselineJIT.h"
20 #include "jit/Invalidation.h"
21 #include "jit/JitScript.h"
22 #include "jit/JitZone.h"
23 #include "vm/Runtime.h"
26 #include "debugger/DebugAPI-inl.h"
27 #include "gc/GC-inl.h"
28 #include "gc/Marking-inl.h"
29 #include "gc/Nursery-inl.h"
30 #include "gc/StableCellHasher-inl.h"
31 #include "gc/WeakMap-inl.h"
32 #include "vm/JSScript-inl.h"
33 #include "vm/Realm-inl.h"
36 using namespace js::gc
;
38 Zone
* const Zone::NotOnList
= reinterpret_cast<Zone
*>(1);
40 ZoneAllocator::ZoneAllocator(JSRuntime
* rt
, Kind kind
)
41 : JS::shadow::Zone(rt
, rt
->gc
.marker().tracer(), kind
),
42 jitHeapThreshold(size_t(jit::MaxCodeBytesPerProcess
* 0.8)) {}
44 ZoneAllocator::~ZoneAllocator() {
46 mallocTracker
.checkEmptyOnDestroy();
47 MOZ_ASSERT(gcHeapSize
.bytes() == 0);
48 MOZ_ASSERT(mallocHeapSize
.bytes() == 0);
49 MOZ_ASSERT(jitHeapSize
.bytes() == 0);
53 void ZoneAllocator::fixupAfterMovingGC() {
55 mallocTracker
.fixupAfterMovingGC();
59 void js::ZoneAllocator::updateSchedulingStateOnGCStart() {
60 gcHeapSize
.updateOnGCStart();
61 mallocHeapSize
.updateOnGCStart();
62 jitHeapSize
.updateOnGCStart();
63 perZoneGCTime
= mozilla::TimeDuration::Zero();
66 void js::ZoneAllocator::updateGCStartThresholds(GCRuntime
& gc
) {
67 bool isAtomsZone
= JS::Zone::from(this)->isAtomsZone();
68 gcHeapThreshold
.updateStartThreshold(
69 gcHeapSize
.retainedBytes(), smoothedAllocationRate
.ref(),
70 smoothedCollectionRate
.ref(), gc
.tunables
, gc
.schedulingState
,
73 mallocHeapThreshold
.updateStartThreshold(mallocHeapSize
.retainedBytes(),
74 gc
.tunables
, gc
.schedulingState
);
77 void js::ZoneAllocator::setGCSliceThresholds(GCRuntime
& gc
,
78 bool waitingOnBGTask
) {
79 gcHeapThreshold
.setSliceThreshold(this, gcHeapSize
, gc
.tunables
,
81 mallocHeapThreshold
.setSliceThreshold(this, mallocHeapSize
, gc
.tunables
,
83 jitHeapThreshold
.setSliceThreshold(this, jitHeapSize
, gc
.tunables
,
87 void js::ZoneAllocator::clearGCSliceThresholds() {
88 gcHeapThreshold
.clearSliceThreshold();
89 mallocHeapThreshold
.clearSliceThreshold();
90 jitHeapThreshold
.clearSliceThreshold();
93 bool ZoneAllocator::addSharedMemory(void* mem
, size_t nbytes
, MemoryUse use
) {
94 // nbytes can be zero here for SharedArrayBuffers.
96 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_
));
98 auto ptr
= sharedMemoryUseCounts
.lookupForAdd(mem
);
99 MOZ_ASSERT_IF(ptr
, ptr
->value().use
== use
);
101 if (!ptr
&& !sharedMemoryUseCounts
.add(ptr
, mem
, gc::SharedMemoryUse(use
))) {
105 ptr
->value().count
++;
107 // Allocations can grow, so add any increase over the previous size and record
109 if (nbytes
> ptr
->value().nbytes
) {
110 mallocHeapSize
.addBytes(nbytes
- ptr
->value().nbytes
);
111 ptr
->value().nbytes
= nbytes
;
114 maybeTriggerGCOnMalloc();
119 void ZoneAllocator::removeSharedMemory(void* mem
, size_t nbytes
,
121 // nbytes can be zero here for SharedArrayBuffers.
123 MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_
));
124 MOZ_ASSERT(CurrentThreadIsGCFinalizing());
126 auto ptr
= sharedMemoryUseCounts
.lookup(mem
);
129 MOZ_ASSERT(ptr
->value().count
!= 0);
130 MOZ_ASSERT(ptr
->value().use
== use
);
131 MOZ_ASSERT(ptr
->value().nbytes
>= nbytes
);
133 ptr
->value().count
--;
134 if (ptr
->value().count
== 0) {
135 mallocHeapSize
.removeBytes(ptr
->value().nbytes
, true);
136 sharedMemoryUseCounts
.remove(ptr
);
140 template <TrackingKind kind
>
141 void js::TrackedAllocPolicy
<kind
>::decMemory(size_t nbytes
) {
142 bool updateRetainedSize
= false;
143 if constexpr (kind
== TrackingKind::Cell
) {
144 // Only subtract freed cell memory from retained size for cell associations
146 JS::GCContext
* gcx
= TlsGCContext
.get();
147 updateRetainedSize
= gcx
->isFinalizing();
150 zone_
->decNonGCMemory(this, nbytes
, MemoryUse::TrackedAllocPolicy
,
155 template class TrackedAllocPolicy
<TrackingKind::Zone
>;
156 template class TrackedAllocPolicy
<TrackingKind::Cell
>;
159 JS::Zone::Zone(JSRuntime
* rt
, Kind kind
)
160 : ZoneAllocator(rt
, kind
),
163 suppressAllocationMetadataBuilder(false),
164 allocNurseryObjects_(true),
165 allocNurseryStrings_(true),
166 allocNurseryBigInts_(true),
168 crossZoneStringWrappers_(this),
169 gcEphemeronEdges_(SystemAllocPolicy(), rt
->randomHashCodeScrambler()),
170 gcNurseryEphemeronEdges_(SystemAllocPolicy(),
171 rt
->randomHashCodeScrambler()),
174 gcScheduledSaved_(false),
175 gcPreserveCode_(false),
176 keepPropMapTables_(false),
177 wasCollected_(false),
178 listNext_(NotOnList
),
180 /* Ensure that there are no vtables to mess us up here. */
181 MOZ_ASSERT(reinterpret_cast<JS::shadow::Zone
*>(this) ==
182 static_cast<JS::shadow::Zone
*>(this));
183 MOZ_ASSERT_IF(isAtomsZone(), rt
->gc
.zones().empty());
185 updateGCStartThresholds(rt
->gc
);
186 rt
->gc
.nursery().setAllocFlagsForZone(this);
190 MOZ_ASSERT_IF(regExps_
.ref(), regExps().empty());
192 MOZ_ASSERT(numRealmsWithAllocMetadataBuilder_
== 0);
194 DebugAPI::deleteDebugScriptMap(debugScriptMap
);
195 js_delete(finalizationObservers_
.ref().release());
197 MOZ_ASSERT(gcWeakMapList().isEmpty());
198 MOZ_ASSERT(objectsWithWeakPointers
.ref().empty());
200 JSRuntime
* rt
= runtimeFromAnyThread();
201 if (this == rt
->gc
.systemZone
) {
202 MOZ_ASSERT(isSystemZone());
203 rt
->gc
.systemZone
= nullptr;
206 js_delete(jitZone_
.ref());
210 regExps_
.ref() = make_unique
<RegExpZone
>(this);
211 return regExps_
.ref() && gcEphemeronEdges().init() &&
212 gcNurseryEphemeronEdges().init();
215 void Zone::setNeedsIncrementalBarrier(bool needs
) {
216 needsIncrementalBarrier_
= needs
;
219 void Zone::changeGCState(GCState prev
, GCState next
) {
220 MOZ_ASSERT(RuntimeHeapIsBusy());
221 MOZ_ASSERT(gcState() == prev
);
223 // This can be called when barriers have been temporarily disabled by
224 // AutoDisableBarriers. In that case, don't update needsIncrementalBarrier_
225 // and barriers will be re-enabled by ~AutoDisableBarriers() if necessary.
226 bool barriersDisabled
= isGCMarking() && !needsIncrementalBarrier();
230 // Update the barriers state when we transition between marking and
231 // non-marking states, unless barriers have been disabled.
232 if (!barriersDisabled
) {
233 needsIncrementalBarrier_
= isGCMarking();
237 template <class Pred
>
238 static void EraseIf(js::gc::EphemeronEdgeVector
& entries
, Pred pred
) {
239 auto* begin
= entries
.begin();
240 auto* const end
= entries
.end();
242 auto* newEnd
= begin
;
243 for (auto* p
= begin
; p
!= end
; p
++) {
249 size_t removed
= end
- newEnd
;
250 entries
.shrinkBy(removed
);
253 static void SweepEphemeronEdgesWhileMinorSweeping(
254 js::gc::EphemeronEdgeVector
& entries
) {
255 EraseIf(entries
, [](js::gc::EphemeronEdge
& edge
) -> bool {
256 return IsAboutToBeFinalizedDuringMinorSweep(&edge
.target
);
260 void Zone::sweepAfterMinorGC(JSTracer
* trc
) {
261 sweepEphemeronTablesAfterMinorGC();
262 crossZoneStringWrappers().sweepAfterMinorGC(trc
);
264 for (CompartmentsInZoneIter
comp(this); !comp
.done(); comp
.next()) {
265 comp
->sweepAfterMinorGC(trc
);
269 void Zone::sweepEphemeronTablesAfterMinorGC() {
270 for (auto r
= gcNurseryEphemeronEdges().mutableAll(); !r
.empty();
272 // Sweep gcNurseryEphemeronEdges to move live (forwarded) keys to
273 // gcEphemeronEdges, scanning through all the entries for such keys to
276 // Forwarded and dead keys may also appear in their delegates' entries,
277 // so sweep those too (see below.)
279 // The tricky case is when the key has a delegate that was already
280 // tenured. Then it will be in its compartment's gcEphemeronEdges, but we
281 // still need to update the key (which will be in the entries
282 // associated with it.)
283 gc::Cell
* key
= r
.front().key
;
284 MOZ_ASSERT(!key
->isTenured());
285 if (!Nursery::getForwardedPointer(&key
)) {
286 // Dead nursery cell => discard.
290 // Key been moved. The value is an array of <color,cell> pairs; update all
291 // cells in that array.
292 EphemeronEdgeVector
& entries
= r
.front().value
;
293 SweepEphemeronEdgesWhileMinorSweeping(entries
);
295 // Live (moved) nursery cell. Append entries to gcEphemeronEdges.
296 EphemeronEdgeTable
& tenuredEdges
= gcEphemeronEdges();
297 AutoEnterOOMUnsafeRegion oomUnsafe
;
298 auto* entry
= tenuredEdges
.getOrAdd(key
);
300 oomUnsafe
.crash("Failed to tenure weak keys entry");
303 if (!entry
->value
.appendAll(entries
)) {
304 oomUnsafe
.crash("Failed to tenure weak keys entry");
307 // If the key has a delegate, then it will map to a WeakKeyEntryVector
308 // containing the key that needs to be updated.
310 JSObject
* delegate
= gc::detail::GetDelegate(key
->as
<JSObject
>());
314 MOZ_ASSERT(delegate
->isTenured());
316 // If delegate was formerly nursery-allocated, we will sweep its entries
317 // when we visit its gcNurseryEphemeronEdges (if we haven't already). Note
318 // that we don't know the nursery address of the delegate, since the
319 // location it was stored in has already been updated.
321 // Otherwise, it will be in gcEphemeronEdges and we sweep it here.
322 auto* p
= delegate
->zone()->gcEphemeronEdges().get(delegate
);
324 SweepEphemeronEdgesWhileMinorSweeping(p
->value
);
328 if (!gcNurseryEphemeronEdges().clear()) {
329 AutoEnterOOMUnsafeRegion oomUnsafe
;
330 oomUnsafe
.crash("OOM while clearing gcNurseryEphemeronEdges.");
334 void Zone::traceWeakCCWEdges(JSTracer
* trc
) {
335 crossZoneStringWrappers().traceWeak(trc
);
336 for (CompartmentsInZoneIter
comp(this); !comp
.done(); comp
.next()) {
337 comp
->traceCrossCompartmentObjectWrapperEdges(trc
);
342 void Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(JSTracer
* trc
) {
343 MOZ_ASSERT(trc
->runtime()->gc
.isHeapCompacting());
345 for (ZonesIter
zone(trc
->runtime(), WithAtoms
); !zone
.done(); zone
.next()) {
346 // Trace the wrapper map to update keys (wrapped values) in other
347 // compartments that may have been moved.
348 zone
->crossZoneStringWrappers().traceWeak(trc
);
350 for (CompartmentsInZoneIter
comp(zone
); !comp
.done(); comp
.next()) {
351 comp
->fixupCrossCompartmentObjectWrappersAfterMovingGC(trc
);
356 void Zone::dropStringWrappersOnGC() {
357 MOZ_ASSERT(JS::RuntimeHeapIsCollecting());
358 crossZoneStringWrappers().clear();
361 #ifdef JSGC_HASH_TABLE_CHECKS
363 void Zone::checkAllCrossCompartmentWrappersAfterMovingGC() {
364 checkStringWrappersAfterMovingGC();
365 for (CompartmentsInZoneIter
comp(this); !comp
.done(); comp
.next()) {
366 comp
->checkObjectWrappersAfterMovingGC();
370 void Zone::checkStringWrappersAfterMovingGC() {
371 CheckTableAfterMovingGC(crossZoneStringWrappers(), [this](const auto& entry
) {
372 JSString
* key
= entry
.key().get();
373 CheckGCThingAfterMovingGC(key
); // Keys may be in a different zone.
374 CheckGCThingAfterMovingGC(entry
.value().unbarrieredGet(), this);
380 void Zone::discardJitCode(JS::GCContext
* gcx
, const DiscardOptions
& options
) {
381 if (!isPreservingCode()) {
382 forceDiscardJitCode(gcx
, options
);
386 void Zone::forceDiscardJitCode(JS::GCContext
* gcx
,
387 const DiscardOptions
& options
) {
392 if (options
.discardJitScripts
) {
393 lastDiscardedCodeTime_
= mozilla::TimeStamp::Now();
396 // Copy Baseline IC stubs that are active on the stack to a new LifoAlloc.
397 // After freeing stub memory, these chunks are then transferred to the
398 // zone-wide allocator.
399 jit::ICStubSpace newStubSpace
;
402 // Assert no ICScripts are marked as active.
403 jitZone()->forEachJitScript([](jit::JitScript
* jitScript
) {
404 MOZ_ASSERT(!jitScript
->hasActiveICScript());
408 // Mark ICScripts on the stack as active and copy active Baseline stubs.
409 jit::MarkActiveICScriptsAndCopyStubs(this, newStubSpace
);
411 // Invalidate all Ion code in this zone.
412 jit::InvalidateAll(gcx
, this);
414 jitZone()->forEachJitScript
<jit::IncludeDyingScripts
>(
415 [&](jit::JitScript
* jitScript
) {
416 JSScript
* script
= jitScript
->owningScript();
417 jit::FinishInvalidation(gcx
, script
);
419 // Discard baseline script if it's not marked as active.
420 if (jitScript
->hasBaselineScript() &&
421 !jitScript
->icScript()->active()) {
422 jit::FinishDiscardBaselineScript(gcx
, script
);
425 #ifdef JS_CACHEIR_SPEW
426 maybeUpdateWarmUpCount(script
);
429 // Warm-up counter for scripts are reset on GC. After discarding code we
430 // need to let it warm back up to get information such as which
431 // opcodes are setting array holes or accessing getter properties.
432 script
->resetWarmUpCounterForGC();
434 // Try to release the script's JitScript. This should happen after
435 // releasing JIT code because we can't do this when the script still has
437 if (options
.discardJitScripts
) {
438 script
->maybeReleaseJitScript(gcx
);
439 jitScript
= script
->maybeJitScript();
441 // Try to discard the ScriptCounts too.
442 if (!script
->realm()->collectCoverageForDebug() &&
443 !gcx
->runtime()->profilingScripts
) {
444 script
->destroyScriptCounts();
446 return; // Continue script loop.
450 // If we did not release the JitScript, we need to purge IC stubs
451 // because the ICStubSpace will be purged below. Also purge all
452 // trial-inlined ICScripts that are not active on the stack.
453 jitScript
->purgeInactiveICScripts();
454 jitScript
->purgeStubs(script
, newStubSpace
);
456 if (options
.resetNurseryAllocSites
||
457 options
.resetPretenuredAllocSites
) {
458 jitScript
->resetAllocSites(options
.resetNurseryAllocSites
,
459 options
.resetPretenuredAllocSites
);
462 // Reset the active flag of each ICScript.
463 jitScript
->resetAllActiveFlags();
465 // Optionally trace weak edges in remaining JitScripts.
466 if (options
.traceWeakJitScripts
) {
467 jitScript
->traceWeak(options
.traceWeakJitScripts
);
471 // Also clear references to jit code from RegExpShared cells at this point.
472 // This avoid holding onto ExecutablePools.
473 for (auto regExp
= cellIterUnsafe
<RegExpShared
>(); !regExp
.done();
475 regExp
->discardJitCode();
479 * When scripts contain pointers to nursery things, the store buffer
480 * can contain entries that point into the optimized stub space. Since
481 * this method can be called outside the context of a GC, this situation
482 * could result in us trying to mark invalid store buffer entries.
484 * Defer freeing any allocated blocks until after the next minor GC.
486 jitZone()->stubSpace()->freeAllAfterMinorGC(this);
487 jitZone()->stubSpace()->transferFrom(newStubSpace
);
488 jitZone()->purgeIonCacheIRStubInfo();
490 // Generate a profile marker
491 if (gcx
->runtime()->geckoProfiler().enabled()) {
492 char discardingJitScript
= options
.discardJitScripts
? 'Y' : 'N';
493 char discardingBaseline
= 'Y';
494 char discardingIon
= 'Y';
496 char discardingRegExp
= 'Y';
497 char discardingNurserySites
= options
.resetNurseryAllocSites
? 'Y' : 'N';
498 char discardingPretenuredSites
=
499 options
.resetPretenuredAllocSites
? 'Y' : 'N';
503 "JitScript:%c Baseline:%c Ion:%c "
504 "RegExp:%c NurserySites:%c PretenuredSites:%c",
505 discardingJitScript
, discardingBaseline
, discardingIon
,
506 discardingRegExp
, discardingNurserySites
,
507 discardingPretenuredSites
);
508 gcx
->runtime()->geckoProfiler().markEvent("DiscardJit", buf
);
512 void JS::Zone::resetAllocSitesAndInvalidate(bool resetNurserySites
,
513 bool resetPretenuredSites
) {
514 MOZ_ASSERT(resetNurserySites
|| resetPretenuredSites
);
520 JSContext
* cx
= runtime_
->mainContextFromOwnThread();
521 jitZone()->forEachJitScript
<jit::IncludeDyingScripts
>(
522 [&](jit::JitScript
* jitScript
) {
523 if (jitScript
->resetAllocSites(resetNurserySites
,
524 resetPretenuredSites
)) {
525 JSScript
* script
= jitScript
->owningScript();
526 CancelOffThreadIonCompile(script
);
527 if (script
->hasIonScript()) {
528 jit::Invalidate(cx
, script
,
529 /* resetUses = */ true,
530 /* cancelOffThread = */ true);
536 void JS::Zone::traceWeakJitScripts(JSTracer
* trc
) {
538 jitZone()->forEachJitScript(
539 [&](jit::JitScript
* jitScript
) { jitScript
->traceWeak(trc
); });
543 void JS::Zone::beforeClearDelegateInternal(JSObject
* wrapper
,
544 JSObject
* delegate
) {
545 // 'delegate' is no longer the delegate of 'wrapper'.
546 MOZ_ASSERT(js::gc::detail::GetDelegate(wrapper
) == delegate
);
547 MOZ_ASSERT(needsIncrementalBarrier());
548 MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this));
550 // If |wrapper| might be a key in a weak map, trigger a barrier to account for
551 // the removal of the automatically added edge from delegate to wrapper.
552 if (HasUniqueId(wrapper
)) {
553 PreWriteBarrier(wrapper
);
557 #ifdef JSGC_HASH_TABLE_CHECKS
558 void JS::Zone::checkUniqueIdTableAfterMovingGC() {
559 CheckTableAfterMovingGC(uniqueIds(), [this](const auto& entry
) {
560 js::gc::CheckGCThingAfterMovingGC(entry
.key(), this);
566 js::jit::JitZone
* Zone::createJitZone(JSContext
* cx
) {
567 MOZ_ASSERT(!jitZone_
);
568 #ifndef ENABLE_PORTABLE_BASELINE_INTERP
569 MOZ_ASSERT(cx
->runtime()->hasJitRuntime());
572 auto jitZone
= cx
->make_unique
<jit::JitZone
>(allocNurseryStrings());
577 jitZone_
= jitZone
.release();
581 bool Zone::hasMarkedRealms() {
582 for (RealmsInZoneIter
realm(this); !realm
.done(); realm
.next()) {
583 if (realm
->marked()) {
590 void Zone::notifyObservingDebuggers() {
592 MOZ_ASSERT(JS::RuntimeHeapIsCollecting(),
593 "This method should be called during GC.");
595 JSRuntime
* rt
= runtimeFromMainThread();
597 for (RealmsInZoneIter
realms(this); !realms
.done(); realms
.next()) {
598 GlobalObject
* global
= realms
->unsafeUnbarrieredMaybeGlobal();
603 DebugAPI::notifyParticipatesInGC(global
, rt
->gc
.majorGCCount());
607 bool Zone::isOnList() const { return listNext_
!= NotOnList
; }
609 Zone
* Zone::nextZone() const {
610 MOZ_ASSERT(isOnList());
614 void Zone::fixupAfterMovingGC() {
615 ZoneAllocator::fixupAfterMovingGC();
616 shapeZone().fixupPropMapShapeTableAfterMovingGC();
619 void Zone::purgeAtomCache() {
620 atomCache_
.ref().reset();
622 // Also purge the dtoa caches so that subsequent lookups populate atom
624 for (RealmsInZoneIter
r(this); !r
.done(); r
.next()) {
625 r
->dtoaCache
.purge();
629 void Zone::addSizeOfIncludingThis(
630 mozilla::MallocSizeOf mallocSizeOf
, size_t* zoneObject
, JS::CodeSizes
* code
,
631 size_t* regexpZone
, size_t* jitZone
, size_t* cacheIRStubs
,
632 size_t* uniqueIdMap
, size_t* initialPropMapTable
, size_t* shapeTables
,
633 size_t* atomsMarkBitmaps
, size_t* compartmentObjects
,
634 size_t* crossCompartmentWrappersTables
, size_t* compartmentsPrivateData
,
635 size_t* scriptCountsMapArg
) {
636 *zoneObject
+= mallocSizeOf(this);
637 *regexpZone
+= regExps().sizeOfIncludingThis(mallocSizeOf
);
639 jitZone_
->addSizeOfIncludingThis(mallocSizeOf
, code
, jitZone
, cacheIRStubs
);
641 *uniqueIdMap
+= uniqueIds().shallowSizeOfExcludingThis(mallocSizeOf
);
642 shapeZone().addSizeOfExcludingThis(mallocSizeOf
, initialPropMapTable
,
644 *atomsMarkBitmaps
+= markedAtoms().sizeOfExcludingThis(mallocSizeOf
);
645 *crossCompartmentWrappersTables
+=
646 crossZoneStringWrappers().sizeOfExcludingThis(mallocSizeOf
);
648 for (CompartmentsInZoneIter
comp(this); !comp
.done(); comp
.next()) {
649 comp
->addSizeOfIncludingThis(mallocSizeOf
, compartmentObjects
,
650 crossCompartmentWrappersTables
,
651 compartmentsPrivateData
);
654 if (scriptCountsMap
) {
655 *scriptCountsMapArg
+=
656 scriptCountsMap
->shallowSizeOfIncludingThis(mallocSizeOf
);
657 for (auto r
= scriptCountsMap
->all(); !r
.empty(); r
.popFront()) {
658 *scriptCountsMapArg
+=
659 r
.front().value()->sizeOfIncludingThis(mallocSizeOf
);
664 void* ZoneAllocator::onOutOfMemory(js::AllocFunction allocFunc
,
665 arena_id_t arena
, size_t nbytes
,
667 if (!js::CurrentThreadCanAccessRuntime(runtime_
)) {
670 // The analysis sees that JSRuntime::onOutOfMemory could report an error,
671 // which with a JSErrorInterceptor could GC. But we're passing a null cx (to
672 // a default parameter) so the error will not be reported.
673 JS::AutoSuppressGCAnalysis suppress
;
674 return runtimeFromMainThread()->onOutOfMemory(allocFunc
, arena
, nbytes
,
678 void ZoneAllocator::reportAllocationOverflow() const {
679 js::ReportAllocationOverflow(static_cast<JSContext
*>(nullptr));
682 ZoneList::ZoneList() : head(nullptr), tail(nullptr) {}
684 ZoneList::ZoneList(Zone
* zone
) : head(zone
), tail(zone
) {
685 MOZ_RELEASE_ASSERT(!zone
->isOnList());
686 zone
->listNext_
= nullptr;
689 ZoneList::~ZoneList() { MOZ_ASSERT(isEmpty()); }
691 void ZoneList::check() const {
693 MOZ_ASSERT((head
== nullptr) == (tail
== nullptr));
700 MOZ_ASSERT(zone
&& zone
->isOnList());
701 if (zone
== tail
) break;
702 zone
= zone
->listNext_
;
704 MOZ_ASSERT(!zone
->listNext_
);
708 bool ZoneList::isEmpty() const { return head
== nullptr; }
710 Zone
* ZoneList::front() const {
711 MOZ_ASSERT(!isEmpty());
712 MOZ_ASSERT(head
->isOnList());
716 void ZoneList::prepend(Zone
* zone
) { prependList(ZoneList(zone
)); }
718 void ZoneList::append(Zone
* zone
) { appendList(ZoneList(zone
)); }
720 void ZoneList::prependList(ZoneList
&& other
) {
724 if (other
.isEmpty()) {
728 MOZ_ASSERT(tail
!= other
.tail
);
731 other
.tail
->listNext_
= head
;
737 other
.head
= nullptr;
738 other
.tail
= nullptr;
741 void ZoneList::appendList(ZoneList
&& other
) {
745 if (other
.isEmpty()) {
749 MOZ_ASSERT(tail
!= other
.tail
);
752 tail
->listNext_
= other
.head
;
758 other
.head
= nullptr;
759 other
.tail
= nullptr;
762 Zone
* ZoneList::removeFront() {
763 MOZ_ASSERT(!isEmpty());
767 head
= head
->listNext_
;
772 front
->listNext_
= Zone::NotOnList
;
777 void ZoneList::clear() {
783 JS_PUBLIC_API
void JS::shadow::RegisterWeakCache(
784 JS::Zone
* zone
, detail::WeakCacheBase
* cachep
) {
785 zone
->registerWeakCache(cachep
);
788 void Zone::traceRootsInMajorGC(JSTracer
* trc
) {
789 if (trc
->isMarkingTracer() && !isGCMarking()) {
793 // Trace zone script-table roots. See comment below for justification re:
794 // calling this only during major (non-nursery) collections.
795 traceScriptTableRoots(trc
);
797 if (FinalizationObservers
* observers
= finalizationObservers()) {
798 observers
->traceRoots(trc
);
802 void Zone::traceScriptTableRoots(JSTracer
* trc
) {
803 static_assert(std::is_convertible_v
<BaseScript
*, gc::TenuredCell
*>,
804 "BaseScript must not be nursery-allocated for script-table "
807 // Performance optimization: the script-table keys are JSScripts, which
808 // cannot be in the nursery, so we can skip this tracing if we are only in a
809 // minor collection. We static-assert this fact above.
810 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
812 // N.B.: the script-table keys are weak *except* in an exceptional case: when
813 // then --dump-bytecode command line option or the PCCount JSFriend API is
814 // used, then the scripts for all counts must remain alive. We only trace
815 // when the `trc->runtime()->profilingScripts` flag is set. This flag is
816 // cleared in JSRuntime::destroyRuntime() during shutdown to ensure that
817 // scripts are collected before the runtime goes away completely.
818 if (scriptCountsMap
&& trc
->runtime()->profilingScripts
) {
819 for (ScriptCountsMap::Range r
= scriptCountsMap
->all(); !r
.empty();
821 BaseScript
* script
= r
.front().key();
822 MOZ_ASSERT(script
->hasScriptCounts());
823 TraceRoot(trc
, &script
, "profilingScripts");
827 // Trace the debugger's DebugScript weak map.
828 if (debugScriptMap
) {
829 DebugAPI::traceDebugScriptMap(trc
, debugScriptMap
);
833 void Zone::fixupScriptMapsAfterMovingGC(JSTracer
* trc
) {
834 // Map entries are removed by BaseScript::finalize, but we need to update the
835 // script pointers here in case they are moved by the GC.
837 if (scriptCountsMap
) {
838 scriptCountsMap
->traceWeak(trc
);
842 scriptLCovMap
->traceWeak(trc
);
846 if (scriptVTuneIdMap
) {
847 scriptVTuneIdMap
->traceWeak(trc
);
851 #ifdef JS_CACHEIR_SPEW
852 if (scriptFinalWarmUpCountMap
) {
853 scriptFinalWarmUpCountMap
->traceWeak(trc
);
858 #ifdef JSGC_HASH_TABLE_CHECKS
859 void Zone::checkScriptMapsAfterMovingGC() {
860 // |debugScriptMap| is checked automatically because it is s a WeakMap.
862 if (scriptCountsMap
) {
863 CheckTableAfterMovingGC(*scriptCountsMap
, [this](const auto& entry
) {
864 BaseScript
* script
= entry
.key();
865 CheckGCThingAfterMovingGC(script
, this);
871 CheckTableAfterMovingGC(*scriptLCovMap
, [this](const auto& entry
) {
872 BaseScript
* script
= entry
.key();
873 CheckGCThingAfterMovingGC(script
, this);
879 if (scriptVTuneIdMap
) {
880 CheckTableAfterMovingGC(*scriptVTuneIdMap
, [this](const auto& entry
) {
881 BaseScript
* script
= entry
.key();
882 CheckGCThingAfterMovingGC(script
, this);
888 # ifdef JS_CACHEIR_SPEW
889 if (scriptFinalWarmUpCountMap
) {
890 CheckTableAfterMovingGC(*scriptFinalWarmUpCountMap
,
891 [this](const auto& entry
) {
892 BaseScript
* script
= entry
.key();
893 CheckGCThingAfterMovingGC(script
, this);
897 # endif // JS_CACHEIR_SPEW
901 void Zone::clearScriptCounts(Realm
* realm
) {
902 if (!scriptCountsMap
) {
906 // Clear all hasScriptCounts_ flags of BaseScript, in order to release all
907 // ScriptCounts entries of the given realm.
908 for (auto i
= scriptCountsMap
->modIter(); !i
.done(); i
.next()) {
909 const HeapPtr
<BaseScript
*>& script
= i
.get().key();
910 if (IsAboutToBeFinalized(script
)) {
911 // Dead scripts may be present during incremental GC until script
912 // finalizers have been run.
916 if (script
->realm() != realm
) {
919 // We can't destroy the ScriptCounts yet if the script has Baseline code,
920 // because Baseline code bakes in pointers to the counters. The ScriptCounts
921 // will be destroyed in Zone::discardJitCode when discarding the JitScript.
922 if (script
->hasBaselineScript()) {
925 script
->clearHasScriptCounts();
930 void Zone::clearScriptLCov(Realm
* realm
) {
931 if (!scriptLCovMap
) {
935 for (auto i
= scriptLCovMap
->modIter(); !i
.done(); i
.next()) {
936 const HeapPtr
<BaseScript
*>& script
= i
.get().key();
937 if (IsAboutToBeFinalized(script
)) {
938 // Dead scripts may be present during incremental GC until script
939 // finalizers have been run.
943 if (script
->realm() == realm
) {
949 void Zone::clearRootsForShutdownGC() {
950 // Finalization callbacks are not called if we're shutting down.
951 if (finalizationObservers()) {
952 finalizationObservers()->clearRecords();
958 void Zone::finishRoots() {
959 for (RealmsInZoneIter
r(this); !r
.done(); r
.next()) {
964 void Zone::traceKeptObjects(JSTracer
* trc
) { keptObjects
.ref().trace(trc
); }
966 bool Zone::addToKeptObjects(HandleObject target
) {
967 return keptObjects
.ref().put(target
);
970 void Zone::clearKeptObjects() { keptObjects
.ref().clear(); }
972 bool Zone::ensureFinalizationObservers() {
973 if (finalizationObservers_
.ref()) {
977 finalizationObservers_
= js::MakeUnique
<FinalizationObservers
>(this);
978 return bool(finalizationObservers_
.ref());
981 bool Zone::registerObjectWithWeakPointers(JSObject
* obj
) {
982 MOZ_ASSERT(obj
->getClass()->hasTrace());
983 MOZ_ASSERT(!IsInsideNursery(obj
));
984 return objectsWithWeakPointers
.ref().append(obj
);