Bug 1686838 [wpt PR 27194] - [webcodecs] Deprecate VideoFrame.destroy()., a=testonly
[gecko.git] / xpcom / base / CycleCollectedJSRuntime.cpp
blobf8a60b70d86c7305873be46f6c270fe808f622d7
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 // We're dividing JS objects into 3 categories:
8 //
9 // 1. "real" roots, held by the JS engine itself or rooted through the root
10 // and lock JS APIs. Roots from this category are considered black in the
11 // cycle collector, any cycle they participate in is uncollectable.
13 // 2. certain roots held by C++ objects that are guaranteed to be alive.
14 // Roots from this category are considered black in the cycle collector,
15 // and any cycle they participate in is uncollectable. These roots are
16 // traced from TraceNativeBlackRoots.
18 // 3. all other roots held by C++ objects that participate in cycle collection,
19 // held by us (see TraceNativeGrayRoots). Roots from this category are
20 // considered grey in the cycle collector; whether or not they are collected
21 // depends on the objects that hold them.
23 // Note that if a root is in multiple categories the fact that it is in
24 // category 1 or 2 that takes precedence, so it will be considered black.
26 // During garbage collection we switch to an additional mark color (gray) when
27 // tracing inside TraceNativeGrayRoots. This allows us to walk those roots later
28 // on and add all objects reachable only from them to the cycle collector.
30 // Phases:
32 // 1. marking of the roots in category 1 by having the JS GC do its marking
33 // 2. marking of the roots in category 2 by having the JS GC call us back
34 // (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots
35 // 3. marking of the roots in category 3 by
36 // TraceNativeGrayRootsInCollectingZones using an additional color (gray).
37 // 4. end of GC, GC can sweep its heap
39 // At some later point, when the cycle collector runs:
41 // 5. walk gray objects and add them to the cycle collector, cycle collect
43 // JS objects that are part of cycles the cycle collector breaks will be
44 // collected by the next JS GC.
46 // If WantAllTraces() is false the cycle collector will not traverse roots
47 // from category 1 or any JS objects held by them. Any JS objects they hold
48 // will already be marked by the JS GC and will thus be colored black
49 // themselves. Any C++ objects they hold will have a missing (untraversed)
50 // edge from the JS object to the C++ object and so it will be marked black
51 // too. This decreases the number of objects that the cycle collector has to
52 // deal with.
53 // To improve debugging, if WantAllTraces() is true all JS objects are
54 // traversed.
56 #include "mozilla/CycleCollectedJSRuntime.h"
58 #include <algorithm>
59 #include <utility>
61 #include "GeckoProfiler.h"
62 #include "js/Debug.h"
63 #include "js/friend/DumpFunctions.h" // js::DumpHeap
64 #include "js/GCAPI.h"
65 #include "js/HeapAPI.h"
66 #include "js/Object.h" // JS::GetClass, JS::GetCompartment, JS::GetPrivate
67 #include "js/Warnings.h" // JS::SetWarningReporter
68 #include "jsfriendapi.h"
69 #include "mozilla/ArrayUtils.h"
70 #include "mozilla/AutoRestore.h"
71 #include "mozilla/CycleCollectedJSContext.h"
72 #include "mozilla/DebuggerOnGCRunnable.h"
73 #include "mozilla/MemoryReporting.h"
74 #include "mozilla/Sprintf.h"
75 #include "mozilla/Telemetry.h"
76 #include "mozilla/TimelineConsumers.h"
77 #include "mozilla/TimelineMarker.h"
78 #include "mozilla/Unused.h"
79 #include "mozilla/dom/DOMJSClass.h"
80 #include "mozilla/dom/JSExecutionManager.h"
81 #include "mozilla/dom/ProfileTimelineMarkerBinding.h"
82 #include "mozilla/dom/Promise.h"
83 #include "mozilla/dom/PromiseBinding.h"
84 #include "mozilla/dom/PromiseDebugging.h"
85 #include "mozilla/dom/ScriptSettings.h"
86 #include "nsContentUtils.h"
87 #include "nsCycleCollectionNoteRootCallback.h"
88 #include "nsCycleCollectionParticipant.h"
89 #include "nsCycleCollector.h"
90 #include "nsDOMJSUtils.h"
91 #include "nsExceptionHandler.h"
92 #include "nsJSUtils.h"
93 #include "nsStringBuffer.h"
94 #include "nsWrapperCache.h"
96 #if defined(XP_MACOSX)
97 # include "nsMacUtilsImpl.h"
98 #endif
100 #include "nsThread.h"
101 #include "nsThreadUtils.h"
102 #include "xpcpublic.h"
104 #ifdef NIGHTLY_BUILD
105 // For performance reasons, we make the JS Dev Error Interceptor a Nightly-only
106 // feature.
107 # define MOZ_JS_DEV_ERROR_INTERCEPTOR = 1
108 #endif // NIGHTLY_BUILD
110 using namespace mozilla;
111 using namespace mozilla::dom;
113 namespace mozilla {
115 struct DeferredFinalizeFunctionHolder {
116 DeferredFinalizeFunction run;
117 void* data;
120 class IncrementalFinalizeRunnable : public DiscardableRunnable {
121 typedef AutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray;
122 typedef CycleCollectedJSRuntime::DeferredFinalizerTable
123 DeferredFinalizerTable;
125 CycleCollectedJSRuntime* mRuntime;
126 DeferredFinalizeArray mDeferredFinalizeFunctions;
127 uint32_t mFinalizeFunctionToRun;
128 bool mReleasing;
130 static const PRTime SliceMillis = 5; /* ms */
132 public:
133 IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
134 DeferredFinalizerTable& aFinalizerTable);
135 virtual ~IncrementalFinalizeRunnable();
137 void ReleaseNow(bool aLimited);
139 NS_DECL_NSIRUNNABLE
142 } // namespace mozilla
144 struct NoteWeakMapChildrenTracer : public JS::CallbackTracer {
145 NoteWeakMapChildrenTracer(JSRuntime* aRt,
146 nsCycleCollectionNoteRootCallback& aCb)
147 : JS::CallbackTracer(aRt, JS::TracerKind::Callback,
148 JS::IdTraceAction::CanSkip),
149 mCb(aCb),
150 mTracedAny(false),
151 mMap(nullptr),
152 mKey(nullptr),
153 mKeyDelegate(nullptr) {}
154 void onChild(const JS::GCCellPtr& aThing) override;
155 nsCycleCollectionNoteRootCallback& mCb;
156 bool mTracedAny;
157 JSObject* mMap;
158 JS::GCCellPtr mKey;
159 JSObject* mKeyDelegate;
162 void NoteWeakMapChildrenTracer::onChild(const JS::GCCellPtr& aThing) {
163 if (aThing.is<JSString>()) {
164 return;
167 if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
168 return;
171 if (JS::IsCCTraceKind(aThing.kind())) {
172 mCb.NoteWeakMapping(mMap, mKey, mKeyDelegate, aThing);
173 mTracedAny = true;
174 } else {
175 JS::TraceChildren(this, aThing);
179 struct NoteWeakMapsTracer : public js::WeakMapTracer {
180 NoteWeakMapsTracer(JSRuntime* aRt, nsCycleCollectionNoteRootCallback& aCccb)
181 : js::WeakMapTracer(aRt), mCb(aCccb), mChildTracer(aRt, aCccb) {}
182 void trace(JSObject* aMap, JS::GCCellPtr aKey, JS::GCCellPtr aValue) override;
183 nsCycleCollectionNoteRootCallback& mCb;
184 NoteWeakMapChildrenTracer mChildTracer;
187 void NoteWeakMapsTracer::trace(JSObject* aMap, JS::GCCellPtr aKey,
188 JS::GCCellPtr aValue) {
189 // If nothing that could be held alive by this entry is marked gray, return.
190 if ((!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
191 MOZ_LIKELY(!mCb.WantAllTraces())) {
192 if (!aValue || !JS::GCThingIsMarkedGray(aValue) || aValue.is<JSString>()) {
193 return;
197 // The cycle collector can only properly reason about weak maps if it can
198 // reason about the liveness of their keys, which in turn requires that
199 // the key can be represented in the cycle collector graph. All existing
200 // uses of weak maps use either objects or scripts as keys, which are okay.
201 MOZ_ASSERT(JS::IsCCTraceKind(aKey.kind()));
203 // As an emergency fallback for non-debug builds, if the key is not
204 // representable in the cycle collector graph, we treat it as marked. This
205 // can cause leaks, but is preferable to ignoring the binding, which could
206 // cause the cycle collector to free live objects.
207 if (!JS::IsCCTraceKind(aKey.kind())) {
208 aKey = nullptr;
211 JSObject* kdelegate = nullptr;
212 if (aKey.is<JSObject>()) {
213 kdelegate = js::UncheckedUnwrapWithoutExpose(&aKey.as<JSObject>());
216 if (JS::IsCCTraceKind(aValue.kind())) {
217 mCb.NoteWeakMapping(aMap, aKey, kdelegate, aValue);
218 } else {
219 mChildTracer.mTracedAny = false;
220 mChildTracer.mMap = aMap;
221 mChildTracer.mKey = aKey;
222 mChildTracer.mKeyDelegate = kdelegate;
224 if (!aValue.is<JSString>()) {
225 JS::TraceChildren(&mChildTracer, aValue);
228 // The delegate could hold alive the key, so report something to the CC
229 // if we haven't already.
230 if (!mChildTracer.mTracedAny && aKey && JS::GCThingIsMarkedGray(aKey) &&
231 kdelegate) {
232 mCb.NoteWeakMapping(aMap, aKey, kdelegate, nullptr);
237 // Report whether the key or value of a weak mapping entry are gray but need to
238 // be marked black.
239 static void ShouldWeakMappingEntryBeBlack(JSObject* aMap, JS::GCCellPtr aKey,
240 JS::GCCellPtr aValue,
241 bool* aKeyShouldBeBlack,
242 bool* aValueShouldBeBlack) {
243 *aKeyShouldBeBlack = false;
244 *aValueShouldBeBlack = false;
246 // If nothing that could be held alive by this entry is marked gray, return.
247 bool keyMightNeedMarking = aKey && JS::GCThingIsMarkedGray(aKey);
248 bool valueMightNeedMarking = aValue && JS::GCThingIsMarkedGray(aValue) &&
249 aValue.kind() != JS::TraceKind::String;
250 if (!keyMightNeedMarking && !valueMightNeedMarking) {
251 return;
254 if (!JS::IsCCTraceKind(aKey.kind())) {
255 aKey = nullptr;
258 if (keyMightNeedMarking && aKey.is<JSObject>()) {
259 JSObject* kdelegate =
260 js::UncheckedUnwrapWithoutExpose(&aKey.as<JSObject>());
261 if (kdelegate && !JS::ObjectIsMarkedGray(kdelegate) &&
262 (!aMap || !JS::ObjectIsMarkedGray(aMap))) {
263 *aKeyShouldBeBlack = true;
267 if (aValue && JS::GCThingIsMarkedGray(aValue) &&
268 (!aKey || !JS::GCThingIsMarkedGray(aKey)) &&
269 (!aMap || !JS::ObjectIsMarkedGray(aMap)) &&
270 aValue.kind() != JS::TraceKind::Shape) {
271 *aValueShouldBeBlack = true;
275 struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer {
276 explicit FixWeakMappingGrayBitsTracer(JSRuntime* aRt)
277 : js::WeakMapTracer(aRt) {}
279 void FixAll() {
280 do {
281 mAnyMarked = false;
282 js::TraceWeakMaps(this);
283 } while (mAnyMarked);
286 void trace(JSObject* aMap, JS::GCCellPtr aKey,
287 JS::GCCellPtr aValue) override {
288 bool keyShouldBeBlack;
289 bool valueShouldBeBlack;
290 ShouldWeakMappingEntryBeBlack(aMap, aKey, aValue, &keyShouldBeBlack,
291 &valueShouldBeBlack);
292 if (keyShouldBeBlack && JS::UnmarkGrayGCThingRecursively(aKey)) {
293 mAnyMarked = true;
296 if (valueShouldBeBlack && JS::UnmarkGrayGCThingRecursively(aValue)) {
297 mAnyMarked = true;
301 MOZ_INIT_OUTSIDE_CTOR bool mAnyMarked;
304 #ifdef DEBUG
305 // Check whether weak maps are marked correctly according to the logic above.
306 struct CheckWeakMappingGrayBitsTracer : public js::WeakMapTracer {
307 explicit CheckWeakMappingGrayBitsTracer(JSRuntime* aRt)
308 : js::WeakMapTracer(aRt), mFailed(false) {}
310 static bool Check(JSRuntime* aRt) {
311 CheckWeakMappingGrayBitsTracer tracer(aRt);
312 js::TraceWeakMaps(&tracer);
313 return !tracer.mFailed;
316 void trace(JSObject* aMap, JS::GCCellPtr aKey,
317 JS::GCCellPtr aValue) override {
318 bool keyShouldBeBlack;
319 bool valueShouldBeBlack;
320 ShouldWeakMappingEntryBeBlack(aMap, aKey, aValue, &keyShouldBeBlack,
321 &valueShouldBeBlack);
323 if (keyShouldBeBlack) {
324 fprintf(stderr, "Weak mapping key %p of map %p should be black\n",
325 aKey.asCell(), aMap);
326 mFailed = true;
329 if (valueShouldBeBlack) {
330 fprintf(stderr, "Weak mapping value %p of map %p should be black\n",
331 aValue.asCell(), aMap);
332 mFailed = true;
336 bool mFailed;
338 #endif // DEBUG
340 static void CheckParticipatesInCycleCollection(JS::GCCellPtr aThing,
341 const char* aName,
342 void* aClosure) {
343 bool* cycleCollectionEnabled = static_cast<bool*>(aClosure);
345 if (*cycleCollectionEnabled) {
346 return;
349 if (JS::IsCCTraceKind(aThing.kind()) && JS::GCThingIsMarkedGray(aThing)) {
350 *cycleCollectionEnabled = true;
354 NS_IMETHODIMP
355 JSGCThingParticipant::TraverseNative(void* aPtr,
356 nsCycleCollectionTraversalCallback& aCb) {
357 auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
358 reinterpret_cast<char*>(this) -
359 offsetof(CycleCollectedJSRuntime, mGCThingCycleCollectorGlobal));
361 JS::GCCellPtr cellPtr(aPtr, JS::GCThingTraceKind(aPtr));
362 runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL, cellPtr,
363 aCb);
364 return NS_OK;
367 // NB: This is only used to initialize the participant in
368 // CycleCollectedJSRuntime. It should never be used directly.
369 static JSGCThingParticipant sGCThingCycleCollectorGlobal;
371 NS_IMETHODIMP
372 JSZoneParticipant::TraverseNative(void* aPtr,
373 nsCycleCollectionTraversalCallback& aCb) {
374 auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
375 reinterpret_cast<char*>(this) -
376 offsetof(CycleCollectedJSRuntime, mJSZoneCycleCollectorGlobal));
378 MOZ_ASSERT(!aCb.WantAllTraces());
379 JS::Zone* zone = static_cast<JS::Zone*>(aPtr);
381 runtime->TraverseZone(zone, aCb);
382 return NS_OK;
385 struct TraversalTracer : public JS::CallbackTracer {
386 TraversalTracer(JSRuntime* aRt, nsCycleCollectionTraversalCallback& aCb)
387 : JS::CallbackTracer(aRt, JS::TracerKind::Callback,
388 JS::TraceOptions(JS::WeakMapTraceAction::Skip,
389 JS::WeakEdgeTraceAction::Trace,
390 JS::IdTraceAction::CanSkip)),
391 mCb(aCb) {}
392 void onChild(const JS::GCCellPtr& aThing) override;
393 nsCycleCollectionTraversalCallback& mCb;
396 void TraversalTracer::onChild(const JS::GCCellPtr& aThing) {
397 // Checking strings and symbols for being gray is rather slow, and we don't
398 // need either of them for the cycle collector.
399 if (aThing.is<JSString>() || aThing.is<JS::Symbol>()) {
400 return;
403 // Don't traverse non-gray objects, unless we want all traces.
404 if (!JS::GCThingIsMarkedGray(aThing) && !mCb.WantAllTraces()) {
405 return;
409 * This function needs to be careful to avoid stack overflow. Normally, when
410 * IsCCTraceKind is true, the recursion terminates immediately as we just add
411 * |thing| to the CC graph. So overflow is only possible when there are long
412 * or cyclic chains of non-IsCCTraceKind GC things. Places where this can
413 * occur use special APIs to handle such chains iteratively.
415 if (JS::IsCCTraceKind(aThing.kind())) {
416 if (MOZ_UNLIKELY(mCb.WantDebugInfo())) {
417 char buffer[200];
418 context().getEdgeName(buffer, sizeof(buffer));
419 mCb.NoteNextEdgeName(buffer);
421 mCb.NoteJSChild(aThing);
422 } else if (aThing.is<js::Shape>()) {
423 // The maximum depth of traversal when tracing a Shape is unbounded, due to
424 // the parent pointers on the shape.
425 JS_TraceShapeCycleCollectorChildren(this, aThing);
426 } else if (aThing.is<js::ObjectGroup>()) {
427 // The maximum depth of traversal when tracing an ObjectGroup is unbounded,
428 // due to information attached to the groups which can lead other groups to
429 // be traced.
430 JS_TraceObjectGroupCycleCollectorChildren(this, aThing);
431 } else {
432 JS::TraceChildren(this, aThing);
437 * The cycle collection participant for a Zone is intended to produce the same
438 * results as if all of the gray GCthings in a zone were merged into a single
439 * node, except for self-edges. This avoids the overhead of representing all of
440 * the GCthings in the zone in the cycle collector graph, which should be much
441 * faster if many of the GCthings in the zone are gray.
443 * Zone merging should not always be used, because it is a conservative
444 * approximation of the true cycle collector graph that can incorrectly identify
445 * some garbage objects as being live. For instance, consider two cycles that
446 * pass through a zone, where one is garbage and the other is live. If we merge
447 * the entire zone, the cycle collector will think that both are alive.
449 * We don't have to worry about losing track of a garbage cycle, because any
450 * such garbage cycle incorrectly identified as live must contain at least one
451 * C++ to JS edge, and XPConnect will always add the C++ object to the CC graph.
452 * (This is in contrast to pure C++ garbage cycles, which must always be
453 * properly identified, because we clear the purple buffer during every CC,
454 * which may contain the last reference to a garbage cycle.)
457 // NB: This is only used to initialize the participant in
458 // CycleCollectedJSRuntime. It should never be used directly.
459 static const JSZoneParticipant sJSZoneCycleCollectorGlobal;
461 static void JSObjectsTenuredCb(JSContext* aContext, void* aData) {
462 static_cast<CycleCollectedJSRuntime*>(aData)->JSObjectsTenured();
465 static void MozCrashWarningReporter(JSContext*, JSErrorReport*) {
466 MOZ_CRASH("Why is someone touching JSAPI without an AutoJSAPI?");
469 JSHolderMap::Entry::Entry() : Entry(nullptr, nullptr, nullptr) {}
471 JSHolderMap::Entry::Entry(void* aHolder, nsScriptObjectTracer* aTracer,
472 JS::Zone* aZone)
473 : mHolder(aHolder),
474 mTracer(aTracer)
475 #ifdef DEBUG
477 mZone(aZone)
478 #endif
482 JSHolderMap::JSHolderMap() : mJSHolderMap(256) {}
484 template <typename F>
485 inline void JSHolderMap::ForEach(F&& f, WhichHolders aWhich) {
486 // Multi-zone JS holders must always be considered.
487 ForEach(mAnyZoneJSHolders, f, nullptr);
489 for (auto i = mPerZoneJSHolders.modIter(); !i.done(); i.next()) {
490 if (aWhich == HoldersInCollectingZones &&
491 !JS::ZoneIsCollecting(i.get().key())) {
492 continue;
495 EntryVector* holders = i.get().value().get();
496 ForEach(*holders, f, i.get().key());
497 if (holders->IsEmpty()) {
498 i.remove();
503 template <typename F>
504 inline void JSHolderMap::ForEach(EntryVector& aJSHolders, const F& f,
505 JS::Zone* aZone) {
506 for (auto iter = aJSHolders.Iter(); !iter.Done(); iter.Next()) {
507 Entry* entry = &iter.Get();
509 // If the entry has been cleared, remove it and shrink the vector.
510 if (!entry->mHolder && !RemoveEntry(aJSHolders, entry)) {
511 break; // Removed the last entry.
514 MOZ_ASSERT_IF(aZone, entry->mZone == aZone);
515 f(entry->mHolder, entry->mTracer, aZone);
519 bool JSHolderMap::RemoveEntry(EntryVector& aJSHolders, Entry* aEntry) {
520 MOZ_ASSERT(aEntry);
521 MOZ_ASSERT(!aEntry->mHolder);
523 // Remove all dead entries from the end of the vector.
524 while (!aJSHolders.GetLast().mHolder && &aJSHolders.GetLast() != aEntry) {
525 aJSHolders.PopLast();
528 // Swap the element we want to remove with the last one and update the hash
529 // table.
530 Entry* lastEntry = &aJSHolders.GetLast();
531 if (aEntry != lastEntry) {
532 MOZ_ASSERT(lastEntry->mHolder);
533 *aEntry = *lastEntry;
534 MOZ_ASSERT(mJSHolderMap.has(aEntry->mHolder));
535 MOZ_ALWAYS_TRUE(mJSHolderMap.put(aEntry->mHolder, aEntry));
538 aJSHolders.PopLast();
540 // Return whether aEntry is still in the vector.
541 return aEntry != lastEntry;
544 inline bool JSHolderMap::Has(void* aHolder) const {
545 return mJSHolderMap.has(aHolder);
548 inline nsScriptObjectTracer* JSHolderMap::Get(void* aHolder) const {
549 auto ptr = mJSHolderMap.lookup(aHolder);
550 if (!ptr) {
551 return nullptr;
554 Entry* entry = ptr->value();
555 MOZ_ASSERT(entry->mHolder == aHolder);
556 return entry->mTracer;
559 inline nsScriptObjectTracer* JSHolderMap::GetAndRemove(void* aHolder) {
560 MOZ_ASSERT(aHolder);
562 auto ptr = mJSHolderMap.lookup(aHolder);
563 if (!ptr) {
564 return nullptr;
567 Entry* entry = ptr->value();
568 MOZ_ASSERT(entry->mHolder == aHolder);
569 nsScriptObjectTracer* tracer = entry->mTracer;
571 // Clear the entry's contents. It will be removed during the next iteration.
572 *entry = Entry();
574 mJSHolderMap.remove(ptr);
576 return tracer;
579 inline void JSHolderMap::Put(void* aHolder, nsScriptObjectTracer* aTracer,
580 JS::Zone* aZone) {
581 MOZ_ASSERT(aHolder);
582 MOZ_ASSERT(aTracer);
584 // Don't associate multi-zone holders with a zone, even if one is supplied.
585 if (aTracer->IsMultiZoneJSHolder()) {
586 aZone = nullptr;
589 auto ptr = mJSHolderMap.lookupForAdd(aHolder);
590 if (ptr) {
591 Entry* entry = ptr->value();
592 #ifdef DEBUG
593 MOZ_ASSERT(entry->mHolder == aHolder);
594 MOZ_ASSERT(entry->mTracer == aTracer,
595 "Don't call HoldJSObjects in superclass ctors");
596 if (aZone) {
597 if (entry->mZone) {
598 MOZ_ASSERT(entry->mZone == aZone);
599 } else {
600 entry->mZone = aZone;
603 #endif
604 entry->mTracer = aTracer;
605 return;
608 EntryVector* vector = &mAnyZoneJSHolders;
609 if (aZone) {
610 auto ptr = mPerZoneJSHolders.lookupForAdd(aZone);
611 if (!ptr) {
612 MOZ_ALWAYS_TRUE(
613 mPerZoneJSHolders.add(ptr, aZone, MakeUnique<EntryVector>()));
615 vector = ptr->value().get();
618 vector->InfallibleAppend(Entry{aHolder, aTracer, aZone});
619 MOZ_ALWAYS_TRUE(mJSHolderMap.add(ptr, aHolder, &vector->GetLast()));
622 size_t JSHolderMap::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
623 size_t n = 0;
625 // We're deliberately not measuring anything hanging off the entries in
626 // mJSHolders.
627 n += mJSHolderMap.shallowSizeOfExcludingThis(aMallocSizeOf);
628 n += mAnyZoneJSHolders.SizeOfExcludingThis(aMallocSizeOf);
629 n += mPerZoneJSHolders.shallowSizeOfExcludingThis(aMallocSizeOf);
630 for (auto i = mPerZoneJSHolders.iter(); !i.done(); i.next()) {
631 n += i.get().value()->SizeOfExcludingThis(aMallocSizeOf);
634 return n;
637 CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSContext* aCx)
638 : mContext(nullptr),
639 mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal),
640 mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal),
641 mJSRuntime(JS_GetRuntime(aCx)),
642 mHasPendingIdleGCTask(false),
643 mPrevGCSliceCallback(nullptr),
644 mPrevGCNurseryCollectionCallback(nullptr),
645 mOutOfMemoryState(OOMState::OK),
646 mLargeAllocationFailureState(OOMState::OK)
647 #ifdef DEBUG
649 mShutdownCalled(false)
650 #endif
652 MOZ_COUNT_CTOR(CycleCollectedJSRuntime);
653 MOZ_ASSERT(aCx);
654 MOZ_ASSERT(mJSRuntime);
656 #if defined(XP_MACOSX)
657 if (!XRE_IsParentProcess()) {
658 nsMacUtilsImpl::EnableTCSMIfAvailable();
660 #endif
662 if (!JS_AddExtraGCRootsTracer(aCx, TraceBlackJS, this)) {
663 MOZ_CRASH("JS_AddExtraGCRootsTracer failed");
665 JS_SetGrayGCRootsTracer(aCx, TraceGrayJS, this);
666 JS_SetGCCallback(aCx, GCCallback, this);
667 mPrevGCSliceCallback = JS::SetGCSliceCallback(aCx, GCSliceCallback);
669 if (NS_IsMainThread()) {
670 // We would like to support all threads here, but the way timeline consumers
671 // are set up currently, you can either add a marker for one specific
672 // docshell, or for every consumer globally. We would like to add a marker
673 // for every consumer observing anything on this thread, but that is not
674 // currently possible. For now, add global markers only when we are on the
675 // main thread, since the UI for this tracing data only displays data
676 // relevant to the main-thread.
677 mPrevGCNurseryCollectionCallback =
678 JS::SetGCNurseryCollectionCallback(aCx, GCNurseryCollectionCallback);
681 JS_SetObjectsTenuredCallback(aCx, JSObjectsTenuredCb, this);
682 JS::SetOutOfMemoryCallback(aCx, OutOfMemoryCallback, this);
683 JS::SetWaitCallback(mJSRuntime, BeforeWaitCallback, AfterWaitCallback,
684 sizeof(dom::AutoYieldJSThreadExecution));
685 JS::SetWarningReporter(aCx, MozCrashWarningReporter);
687 js::AutoEnterOOMUnsafeRegion::setAnnotateOOMAllocationSizeCallback(
688 CrashReporter::AnnotateOOMAllocationSize);
690 static js::DOMCallbacks DOMcallbacks = {InstanceClassHasProtoAtDepth};
691 SetDOMCallbacks(aCx, &DOMcallbacks);
692 js::SetScriptEnvironmentPreparer(aCx, &mEnvironmentPreparer);
694 JS::dbg::SetDebuggerMallocSizeOf(aCx, moz_malloc_size_of);
696 #ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
697 JS_SetErrorInterceptorCallback(mJSRuntime, &mErrorInterceptor);
698 #endif // MOZ_JS_DEV_ERROR_INTERCEPTOR
700 JS_SetDestroyZoneCallback(aCx, OnZoneDestroyed);
703 #ifdef NS_BUILD_REFCNT_LOGGING
704 class JSLeakTracer : public JS::CallbackTracer {
705 public:
706 explicit JSLeakTracer(JSRuntime* aRuntime)
707 : JS::CallbackTracer(aRuntime, JS::TracerKind::Callback,
708 JS::WeakMapTraceAction::TraceKeysAndValues) {}
710 private:
711 void onChild(const JS::GCCellPtr& thing) override {
712 const char* kindName = JS::GCTraceKindToAscii(thing.kind());
713 size_t size = JS::GCTraceKindSize(thing.kind());
714 MOZ_LOG_CTOR(thing.asCell(), kindName, size);
717 #endif
719 void CycleCollectedJSRuntime::Shutdown(JSContext* cx) {
720 #ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
721 mErrorInterceptor.Shutdown(mJSRuntime);
722 #endif // MOZ_JS_DEV_ERROR_INTERCEPTOR
724 // There should not be any roots left to trace at this point. Ensure any that
725 // remain are flagged as leaks.
726 #ifdef NS_BUILD_REFCNT_LOGGING
727 JSLeakTracer tracer(Runtime());
728 TraceNativeBlackRoots(&tracer);
729 TraceNativeGrayRoots(&tracer, JSHolderMap::AllHolders);
730 #endif
732 #ifdef DEBUG
733 mShutdownCalled = true;
734 #endif
736 JS_SetDestroyZoneCallback(cx, nullptr);
739 CycleCollectedJSRuntime::~CycleCollectedJSRuntime() {
740 MOZ_COUNT_DTOR(CycleCollectedJSRuntime);
741 MOZ_ASSERT(!mDeferredFinalizerTable.Count());
742 MOZ_ASSERT(!mFinalizeRunnable);
743 MOZ_ASSERT(mShutdownCalled);
746 void CycleCollectedJSRuntime::SetContext(CycleCollectedJSContext* aContext) {
747 MOZ_ASSERT(!mContext || !aContext, "Don't replace the context!");
748 mContext = aContext;
751 size_t CycleCollectedJSRuntime::SizeOfExcludingThis(
752 MallocSizeOf aMallocSizeOf) const {
753 return mJSHolders.SizeOfExcludingThis(aMallocSizeOf);
756 void CycleCollectedJSRuntime::UnmarkSkippableJSHolders() {
757 mJSHolders.ForEach([](void* holder, nsScriptObjectTracer* tracer,
758 JS::Zone* zone) { tracer->CanSkip(holder, true); });
761 void CycleCollectedJSRuntime::DescribeGCThing(
762 bool aIsMarked, JS::GCCellPtr aThing,
763 nsCycleCollectionTraversalCallback& aCb) const {
764 if (!aCb.WantDebugInfo()) {
765 aCb.DescribeGCedNode(aIsMarked, "JS Object");
766 return;
769 char name[72];
770 uint64_t compartmentAddress = 0;
771 if (aThing.is<JSObject>()) {
772 JSObject* obj = &aThing.as<JSObject>();
773 compartmentAddress = (uint64_t)JS::GetCompartment(obj);
774 const JSClass* clasp = JS::GetClass(obj);
776 // Give the subclass a chance to do something
777 if (DescribeCustomObjects(obj, clasp, name)) {
778 // Nothing else to do!
779 } else if (js::IsFunctionObject(obj)) {
780 JSFunction* fun = JS_GetObjectFunction(obj);
781 JSString* str = JS_GetFunctionDisplayId(fun);
782 if (str) {
783 JSLinearString* linear = JS_ASSERT_STRING_IS_LINEAR(str);
784 nsAutoString chars;
785 AssignJSLinearString(chars, linear);
786 NS_ConvertUTF16toUTF8 fname(chars);
787 SprintfLiteral(name, "JS Object (Function - %s)", fname.get());
788 } else {
789 SprintfLiteral(name, "JS Object (Function)");
791 } else {
792 SprintfLiteral(name, "JS Object (%s)", clasp->name);
794 } else {
795 SprintfLiteral(name, "%s", JS::GCTraceKindToAscii(aThing.kind()));
798 // Disable printing global for objects while we figure out ObjShrink fallout.
799 aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress);
802 void CycleCollectedJSRuntime::NoteGCThingJSChildren(
803 JS::GCCellPtr aThing, nsCycleCollectionTraversalCallback& aCb) const {
804 TraversalTracer trc(mJSRuntime, aCb);
805 JS::TraceChildren(&trc, aThing);
808 void CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(
809 const JSClass* aClasp, JSObject* aObj,
810 nsCycleCollectionTraversalCallback& aCb) const {
811 MOZ_ASSERT(aClasp);
812 MOZ_ASSERT(aClasp == JS::GetClass(aObj));
814 JS::Rooted<JSObject*> obj(RootingCx(), aObj);
816 if (NoteCustomGCThingXPCOMChildren(aClasp, obj, aCb)) {
817 // Nothing else to do!
818 return;
821 // XXX This test does seem fragile, we should probably whitelist classes
822 // that do hold a strong reference, but that might not be possible.
823 if (aClasp->flags & JSCLASS_HAS_PRIVATE &&
824 aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) {
825 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "JS::GetPrivate(obj)");
826 aCb.NoteXPCOMChild(static_cast<nsISupports*>(JS::GetPrivate(obj)));
827 return;
830 const DOMJSClass* domClass = GetDOMClass(aClasp);
831 if (domClass) {
832 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)");
833 // It's possible that our object is an unforgeable holder object, in
834 // which case it doesn't actually have a C++ DOM object associated with
835 // it. Use UnwrapPossiblyNotInitializedDOMObject, which produces null in
836 // that case, since NoteXPCOMChild/NoteNativeChild are null-safe.
837 if (domClass->mDOMObjectIsISupports) {
838 aCb.NoteXPCOMChild(
839 UnwrapPossiblyNotInitializedDOMObject<nsISupports>(obj));
840 } else if (domClass->mParticipant) {
841 aCb.NoteNativeChild(UnwrapPossiblyNotInitializedDOMObject<void>(obj),
842 domClass->mParticipant);
844 return;
847 if (IsRemoteObjectProxy(obj)) {
848 auto handler =
849 static_cast<const RemoteObjectProxyBase*>(js::GetProxyHandler(obj));
850 return handler->NoteChildren(obj, aCb);
853 JS::Value value = js::MaybeGetScriptPrivate(obj);
854 if (!value.isUndefined()) {
855 aCb.NoteXPCOMChild(static_cast<nsISupports*>(value.toPrivate()));
859 void CycleCollectedJSRuntime::TraverseGCThing(
860 TraverseSelect aTs, JS::GCCellPtr aThing,
861 nsCycleCollectionTraversalCallback& aCb) {
862 bool isMarkedGray = JS::GCThingIsMarkedGray(aThing);
864 if (aTs == TRAVERSE_FULL) {
865 DescribeGCThing(!isMarkedGray, aThing, aCb);
868 // If this object is alive, then all of its children are alive. For JS
869 // objects, the black-gray invariant ensures the children are also marked
870 // black. For C++ objects, the ref count from this object will keep them
871 // alive. Thus we don't need to trace our children, unless we are debugging
872 // using WantAllTraces.
873 if (!isMarkedGray && !aCb.WantAllTraces()) {
874 return;
877 if (aTs == TRAVERSE_FULL) {
878 NoteGCThingJSChildren(aThing, aCb);
881 if (aThing.is<JSObject>()) {
882 JSObject* obj = &aThing.as<JSObject>();
883 NoteGCThingXPCOMChildren(JS::GetClass(obj), obj, aCb);
887 struct TraverseObjectShimClosure {
888 nsCycleCollectionTraversalCallback& cb;
889 CycleCollectedJSRuntime* self;
892 void CycleCollectedJSRuntime::TraverseZone(
893 JS::Zone* aZone, nsCycleCollectionTraversalCallback& aCb) {
895 * We treat the zone as being gray. We handle non-gray GCthings in the
896 * zone by not reporting their children to the CC. The black-gray invariant
897 * ensures that any JS children will also be non-gray, and thus don't need to
898 * be added to the graph. For C++ children, not representing the edge from the
899 * non-gray JS GCthings to the C++ object will keep the child alive.
901 * We don't allow zone merging in a WantAllTraces CC, because then these
902 * assumptions don't hold.
904 aCb.DescribeGCedNode(false, "JS Zone");
907 * Every JS child of everything in the zone is either in the zone
908 * or is a cross-compartment wrapper. In the former case, we don't need to
909 * represent these edges in the CC graph because JS objects are not ref
910 * counted. In the latter case, the JS engine keeps a map of these wrappers,
911 * which we iterate over. Edges between compartments in the same zone will add
912 * unnecessary loop edges to the graph (bug 842137).
914 TraversalTracer trc(mJSRuntime, aCb);
915 js::TraceGrayWrapperTargets(&trc, aZone);
918 * To find C++ children of things in the zone, we scan every JS Object in
919 * the zone. Only JS Objects can have C++ children.
921 TraverseObjectShimClosure closure = {aCb, this};
922 js::IterateGrayObjects(aZone, TraverseObjectShim, &closure);
925 /* static */
926 void CycleCollectedJSRuntime::TraverseObjectShim(
927 void* aData, JS::GCCellPtr aThing, const JS::AutoRequireNoGC& nogc) {
928 TraverseObjectShimClosure* closure =
929 static_cast<TraverseObjectShimClosure*>(aData);
931 MOZ_ASSERT(aThing.is<JSObject>());
932 closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing,
933 closure->cb);
936 void CycleCollectedJSRuntime::TraverseNativeRoots(
937 nsCycleCollectionNoteRootCallback& aCb) {
938 // NB: This is here just to preserve the existing XPConnect order. I doubt it
939 // would hurt to do this after the JS holders.
940 TraverseAdditionalNativeRoots(aCb);
942 mJSHolders.ForEach(
943 [&aCb](void* holder, nsScriptObjectTracer* tracer, JS::Zone* zone) {
944 bool noteRoot = false;
945 if (MOZ_UNLIKELY(aCb.WantAllTraces())) {
946 noteRoot = true;
947 } else {
948 tracer->Trace(holder,
949 TraceCallbackFunc(CheckParticipatesInCycleCollection),
950 &noteRoot);
953 if (noteRoot) {
954 aCb.NoteNativeRoot(holder, tracer);
959 /* static */
960 void CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData) {
961 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
963 self->TraceNativeBlackRoots(aTracer);
966 /* static */
967 void CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData) {
968 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
970 // Mark these roots as gray so the CC can walk them later.
971 self->TraceNativeGrayRoots(aTracer, JSHolderMap::HoldersInCollectingZones);
974 /* static */
975 void CycleCollectedJSRuntime::GCCallback(JSContext* aContext,
976 JSGCStatus aStatus,
977 JS::GCReason aReason, void* aData) {
978 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
980 MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
981 MOZ_ASSERT(CycleCollectedJSContext::Get()->Runtime() == self);
983 self->OnGC(aContext, aStatus, aReason);
986 /* static */
987 void CycleCollectedJSRuntime::GCSliceCallback(JSContext* aContext,
988 JS::GCProgress aProgress,
989 const JS::GCDescription& aDesc) {
990 CycleCollectedJSRuntime* self = CycleCollectedJSRuntime::Get();
991 MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
993 #ifdef MOZ_GECKO_PROFILER
994 if (profiler_thread_is_being_profiled()) {
995 if (aProgress == JS::GC_CYCLE_END) {
996 struct GCMajorMarker {
997 static constexpr mozilla::Span<const char> MarkerTypeName() {
998 return mozilla::MakeStringSpan("GCMajor");
1000 static void StreamJSONMarkerData(
1001 mozilla::baseprofiler::SpliceableJSONWriter& aWriter,
1002 const mozilla::ProfilerString8View& aTimingJSON) {
1003 if (aTimingJSON.Length() != 0) {
1004 aWriter.SplicedJSONProperty("timings", aTimingJSON);
1005 } else {
1006 aWriter.NullProperty("timings");
1009 static mozilla::MarkerSchema MarkerTypeDisplay() {
1010 using MS = mozilla::MarkerSchema;
1011 MS schema{MS::Location::markerChart, MS::Location::markerTable,
1012 MS::Location::timelineMemory};
1013 // No display instructions here, there is special handling in the
1014 // front-end.
1015 return schema;
1019 profiler_add_marker("GCMajor", baseprofiler::category::GCCC,
1020 MarkerTiming::Interval(aDesc.startTime(aContext),
1021 aDesc.endTime(aContext)),
1022 GCMajorMarker{},
1023 ProfilerString8View::WrapNullTerminatedString(
1024 aDesc.formatJSONProfiler(aContext).get()));
1025 } else if (aProgress == JS::GC_SLICE_END) {
1026 struct GCSliceMarker {
1027 static constexpr mozilla::Span<const char> MarkerTypeName() {
1028 return mozilla::MakeStringSpan("GCSlice");
1030 static void StreamJSONMarkerData(
1031 mozilla::baseprofiler::SpliceableJSONWriter& aWriter,
1032 const mozilla::ProfilerString8View& aTimingJSON) {
1033 if (aTimingJSON.Length() != 0) {
1034 aWriter.SplicedJSONProperty("timings", aTimingJSON);
1035 } else {
1036 aWriter.NullProperty("timings");
1039 static mozilla::MarkerSchema MarkerTypeDisplay() {
1040 using MS = mozilla::MarkerSchema;
1041 MS schema{MS::Location::markerChart, MS::Location::markerTable,
1042 MS::Location::timelineMemory};
1043 // No display instructions here, there is special handling in the
1044 // front-end.
1045 return schema;
1049 profiler_add_marker("GCSlice", baseprofiler::category::GCCC,
1050 MarkerTiming::Interval(aDesc.lastSliceStart(aContext),
1051 aDesc.lastSliceEnd(aContext)),
1052 GCSliceMarker{},
1053 ProfilerString8View::WrapNullTerminatedString(
1054 aDesc.sliceToJSONProfiler(aContext).get()));
1057 #endif
1059 if (aProgress == JS::GC_CYCLE_END &&
1060 JS::dbg::FireOnGarbageCollectionHookRequired(aContext)) {
1061 JS::GCReason reason = aDesc.reason_;
1062 Unused << NS_WARN_IF(
1063 NS_FAILED(DebuggerOnGCRunnable::Enqueue(aContext, aDesc)) &&
1064 reason != JS::GCReason::SHUTDOWN_CC &&
1065 reason != JS::GCReason::DESTROY_RUNTIME &&
1066 reason != JS::GCReason::XPCONNECT_SHUTDOWN);
1069 if (self->mPrevGCSliceCallback) {
1070 self->mPrevGCSliceCallback(aContext, aProgress, aDesc);
1074 class MinorGCMarker : public TimelineMarker {
1075 private:
1076 JS::GCReason mReason;
1078 public:
1079 MinorGCMarker(MarkerTracingType aTracingType, JS::GCReason aReason)
1080 : TimelineMarker("MinorGC", aTracingType, MarkerStackRequest::NO_STACK),
1081 mReason(aReason) {
1082 MOZ_ASSERT(aTracingType == MarkerTracingType::START ||
1083 aTracingType == MarkerTracingType::END);
1086 MinorGCMarker(JS::GCNurseryProgress aProgress, JS::GCReason aReason)
1087 : TimelineMarker(
1088 "MinorGC",
1089 aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_START
1090 ? MarkerTracingType::START
1091 : MarkerTracingType::END,
1092 MarkerStackRequest::NO_STACK),
1093 mReason(aReason) {}
1095 virtual void AddDetails(JSContext* aCx,
1096 dom::ProfileTimelineMarker& aMarker) override {
1097 TimelineMarker::AddDetails(aCx, aMarker);
1099 if (GetTracingType() == MarkerTracingType::START) {
1100 auto reason = JS::ExplainGCReason(mReason);
1101 aMarker.mCauseName.Construct(NS_ConvertUTF8toUTF16(reason));
1105 virtual UniquePtr<AbstractTimelineMarker> Clone() override {
1106 auto clone = MakeUnique<MinorGCMarker>(GetTracingType(), mReason);
1107 clone->SetCustomTime(GetTime());
1108 return UniquePtr<AbstractTimelineMarker>(std::move(clone));
1112 /* static */
1113 void CycleCollectedJSRuntime::GCNurseryCollectionCallback(
1114 JSContext* aContext, JS::GCNurseryProgress aProgress,
1115 JS::GCReason aReason) {
1116 CycleCollectedJSRuntime* self = CycleCollectedJSRuntime::Get();
1117 MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
1118 MOZ_ASSERT(NS_IsMainThread());
1120 RefPtr<TimelineConsumers> timelines = TimelineConsumers::Get();
1121 if (timelines && !timelines->IsEmpty()) {
1122 UniquePtr<AbstractTimelineMarker> abstractMarker(
1123 MakeUnique<MinorGCMarker>(aProgress, aReason));
1124 timelines->AddMarkerForAllObservedDocShells(abstractMarker);
1127 if (aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_START) {
1128 self->mLatestNurseryCollectionStart = TimeStamp::Now();
1130 #ifdef MOZ_GECKO_PROFILER
1131 else if (aProgress == JS::GCNurseryProgress::GC_NURSERY_COLLECTION_END &&
1132 profiler_thread_is_being_profiled()) {
1133 struct GCMinorMarker {
1134 static constexpr mozilla::Span<const char> MarkerTypeName() {
1135 return mozilla::MakeStringSpan("GCMinor");
1137 static void StreamJSONMarkerData(
1138 mozilla::baseprofiler::SpliceableJSONWriter& aWriter,
1139 const mozilla::ProfilerString8View& aTimingJSON) {
1140 if (aTimingJSON.Length() != 0) {
1141 aWriter.SplicedJSONProperty("nursery", aTimingJSON);
1142 } else {
1143 aWriter.NullProperty("nursery");
1146 static mozilla::MarkerSchema MarkerTypeDisplay() {
1147 using MS = mozilla::MarkerSchema;
1148 MS schema{MS::Location::markerChart, MS::Location::markerTable,
1149 MS::Location::timelineMemory};
1150 // No display instructions here, there is special handling in the
1151 // front-end.
1152 return schema;
1156 profiler_add_marker(
1157 "GCMinor", baseprofiler::category::GCCC,
1158 MarkerTiming::IntervalUntilNowFrom(self->mLatestNurseryCollectionStart),
1159 GCMinorMarker{},
1160 ProfilerString8View::WrapNullTerminatedString(
1161 JS::MinorGcToJSON(aContext).get()));
1163 #endif
1165 if (self->mPrevGCNurseryCollectionCallback) {
1166 self->mPrevGCNurseryCollectionCallback(aContext, aProgress, aReason);
1170 /* static */
1171 void CycleCollectedJSRuntime::OutOfMemoryCallback(JSContext* aContext,
1172 void* aData) {
1173 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
1175 MOZ_ASSERT(CycleCollectedJSContext::Get()->Context() == aContext);
1176 MOZ_ASSERT(CycleCollectedJSContext::Get()->Runtime() == self);
1178 self->OnOutOfMemory();
1181 /* static */
1182 void* CycleCollectedJSRuntime::BeforeWaitCallback(uint8_t* aMemory) {
1183 MOZ_ASSERT(aMemory);
1185 // aMemory is stack allocated memory to contain our RAII object. This allows
1186 // for us to avoid allocations on the heap during this callback.
1187 return new (aMemory) dom::AutoYieldJSThreadExecution;
1190 /* static */
1191 void CycleCollectedJSRuntime::AfterWaitCallback(void* aCookie) {
1192 MOZ_ASSERT(aCookie);
1193 static_cast<dom::AutoYieldJSThreadExecution*>(aCookie)
1194 ->~AutoYieldJSThreadExecution();
1197 struct JsGcTracer : public TraceCallbacks {
1198 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char* aName,
1199 void* aClosure) const override {
1200 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1202 virtual void Trace(JS::Heap<jsid>* aPtr, const char* aName,
1203 void* aClosure) const override {
1204 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1206 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
1207 void* aClosure) const override {
1208 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1210 virtual void Trace(nsWrapperCache* aPtr, const char* aName,
1211 void* aClosure) const override {
1212 aPtr->TraceWrapper(static_cast<JSTracer*>(aClosure), aName);
1214 virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
1215 void* aClosure) const override {
1216 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1218 virtual void Trace(JS::Heap<JSString*>* aPtr, const char* aName,
1219 void* aClosure) const override {
1220 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1222 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
1223 void* aClosure) const override {
1224 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1226 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
1227 void* aClosure) const override {
1228 JS::TraceEdge(static_cast<JSTracer*>(aClosure), aPtr, aName);
1232 void mozilla::TraceScriptHolder(nsISupports* aHolder, JSTracer* aTracer) {
1233 nsXPCOMCycleCollectionParticipant* participant = nullptr;
1234 CallQueryInterface(aHolder, &participant);
1235 participant->Trace(aHolder, JsGcTracer(), aTracer);
1238 #if defined(NIGHTLY_BUILD) || defined(MOZ_DEV_EDITION) || defined(DEBUG)
1239 # define CHECK_SINGLE_ZONE_JS_HOLDERS
1240 #endif
1242 #ifdef CHECK_SINGLE_ZONE_JS_HOLDERS
1244 // A tracer that checks that a JS holder only holds JS GC things in a single
1245 // JS::Zone.
1246 struct CheckZoneTracer : public TraceCallbacks {
1247 const char* mClassName;
1248 mutable JS::Zone* mZone;
1250 explicit CheckZoneTracer(const char* aClassName, JS::Zone* aZone = nullptr)
1251 : mClassName(aClassName), mZone(aZone) {}
1253 void checkZone(JS::Zone* aZone, const char* aName) const {
1254 if (!mZone) {
1255 mZone = aZone;
1256 return;
1259 if (aZone == mZone) {
1260 return;
1263 // Most JS holders only contain pointers to GC things in a single zone. In
1264 // the future this will allow us to improve GC performance by only tracing
1265 // holders in zones that are being collected.
1267 // If you added a holder that has pointers into multiple zones please try to
1268 // remedy this. Some options are:
1270 // - wrap all JS GC things into the same compartment
1271 // - split GC thing pointers between separate cycle collected objects
1273 // If all else fails, flag the class as containing pointers into multiple
1274 // zones by using NS_IMPL_CYCLE_COLLECTION_MULTI_ZONE_JSHOLDER_CLASS.
1275 MOZ_CRASH_UNSAFE_PRINTF(
1276 "JS holder %s contains pointers to GC things in more than one zone ("
1277 "found in %s)\n",
1278 mClassName, aName);
1281 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char* aName,
1282 void* aClosure) const override {
1283 JS::Value value = aPtr->unbarrieredGet();
1284 if (value.isGCThing()) {
1285 checkZone(JS::GetGCThingZone(value.toGCCellPtr()), aName);
1288 virtual void Trace(JS::Heap<jsid>* aPtr, const char* aName,
1289 void* aClosure) const override {
1290 jsid id = aPtr->unbarrieredGet();
1291 if (id.isGCThing()) {
1292 checkZone(JS::GetTenuredGCThingZone(id.toGCCellPtr()), aName);
1295 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
1296 void* aClosure) const override {
1297 JSObject* obj = aPtr->unbarrieredGet();
1298 if (obj) {
1299 checkZone(js::GetObjectZoneFromAnyThread(obj), aName);
1302 virtual void Trace(nsWrapperCache* aPtr, const char* aName,
1303 void* aClosure) const override {
1304 JSObject* obj = aPtr->GetWrapperPreserveColor();
1305 if (obj) {
1306 checkZone(js::GetObjectZoneFromAnyThread(obj), aName);
1309 virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
1310 void* aClosure) const override {
1311 JSObject* obj = aPtr->unbarrieredGetPtr();
1312 if (obj) {
1313 checkZone(js::GetObjectZoneFromAnyThread(obj), aName);
1316 virtual void Trace(JS::Heap<JSString*>* aPtr, const char* aName,
1317 void* aClosure) const override {
1318 JSString* str = aPtr->unbarrieredGet();
1319 if (str) {
1320 checkZone(JS::GetStringZone(str), aName);
1323 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
1324 void* aClosure) const override {
1325 JSScript* script = aPtr->unbarrieredGet();
1326 if (script) {
1327 checkZone(JS::GetTenuredGCThingZone(JS::GCCellPtr(script)), aName);
1330 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
1331 void* aClosure) const override {
1332 JSFunction* fun = aPtr->unbarrieredGet();
1333 if (fun) {
1334 checkZone(js::GetObjectZoneFromAnyThread(JS_GetFunctionObject(fun)),
1335 aName);
1340 static inline void CheckHolderIsSingleZone(
1341 void* aHolder, nsCycleCollectionParticipant* aParticipant,
1342 JS::Zone* aZone) {
1343 CheckZoneTracer tracer(aParticipant->ClassName(), aZone);
1344 aParticipant->Trace(aHolder, tracer, nullptr);
1347 #endif
1349 static inline bool ShouldCheckSingleZoneHolders() {
1350 #if defined(DEBUG)
1351 return true;
1352 #elif defined(NIGHTLY_BUILD) || defined(MOZ_DEV_EDITION)
1353 // Don't check every time to avoid performance impact.
1354 return rand() % 256 == 0;
1355 #else
1356 return false;
1357 #endif
1360 void CycleCollectedJSRuntime::TraceNativeGrayRoots(
1361 JSTracer* aTracer, JSHolderMap::WhichHolders aWhich) {
1362 // NB: This is here just to preserve the existing XPConnect order. I doubt it
1363 // would hurt to do this after the JS holders.
1364 TraceAdditionalNativeGrayRoots(aTracer);
1366 bool checkSingleZoneHolders = ShouldCheckSingleZoneHolders();
1367 mJSHolders.ForEach(
1368 [aTracer, checkSingleZoneHolders](
1369 void* holder, nsScriptObjectTracer* tracer, JS::Zone* zone) {
1370 #ifdef CHECK_SINGLE_ZONE_JS_HOLDERS
1371 if (checkSingleZoneHolders && !tracer->IsMultiZoneJSHolder()) {
1372 CheckHolderIsSingleZone(holder, tracer, zone);
1374 #else
1375 Unused << checkSingleZoneHolders;
1376 #endif
1377 tracer->Trace(holder, JsGcTracer(), aTracer);
1379 aWhich);
1382 void CycleCollectedJSRuntime::AddJSHolder(void* aHolder,
1383 nsScriptObjectTracer* aTracer,
1384 JS::Zone* aZone) {
1385 mJSHolders.Put(aHolder, aTracer, aZone);
1388 struct ClearJSHolder : public TraceCallbacks {
1389 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*,
1390 void*) const override {
1391 aPtr->setUndefined();
1394 virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const override {
1395 *aPtr = JSID_VOID;
1398 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*,
1399 void*) const override {
1400 *aPtr = nullptr;
1403 virtual void Trace(nsWrapperCache* aPtr, const char* aName,
1404 void* aClosure) const override {
1405 aPtr->ClearWrapper();
1408 virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char*,
1409 void*) const override {
1410 *aPtr = nullptr;
1413 virtual void Trace(JS::Heap<JSString*>* aPtr, const char*,
1414 void*) const override {
1415 *aPtr = nullptr;
1418 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*,
1419 void*) const override {
1420 *aPtr = nullptr;
1423 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*,
1424 void*) const override {
1425 *aPtr = nullptr;
1429 void CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder) {
1430 nsScriptObjectTracer* tracer = mJSHolders.GetAndRemove(aHolder);
1431 if (tracer) {
1432 // Bug 1531951: The analysis can't see through the virtual call but we know
1433 // that the ClearJSHolder tracer will never GC.
1434 JS::AutoSuppressGCAnalysis nogc;
1435 tracer->Trace(aHolder, ClearJSHolder(), nullptr);
1439 #ifdef DEBUG
1440 static void AssertNoGcThing(JS::GCCellPtr aGCThing, const char* aName,
1441 void* aClosure) {
1442 MOZ_ASSERT(!aGCThing);
1445 void CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder) {
1446 nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder);
1447 if (tracer) {
1448 tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing),
1449 nullptr);
1452 #endif
1454 nsCycleCollectionParticipant* CycleCollectedJSRuntime::GCThingParticipant() {
1455 return &mGCThingCycleCollectorGlobal;
1458 nsCycleCollectionParticipant* CycleCollectedJSRuntime::ZoneParticipant() {
1459 return &mJSZoneCycleCollectorGlobal;
1462 nsresult CycleCollectedJSRuntime::TraverseRoots(
1463 nsCycleCollectionNoteRootCallback& aCb) {
1464 TraverseNativeRoots(aCb);
1466 NoteWeakMapsTracer trc(mJSRuntime, aCb);
1467 js::TraceWeakMaps(&trc);
1469 return NS_OK;
1472 bool CycleCollectedJSRuntime::UsefulToMergeZones() const { return false; }
1474 void CycleCollectedJSRuntime::FixWeakMappingGrayBits() const {
1475 MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1476 "Don't call FixWeakMappingGrayBits during a GC.");
1477 FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
1478 fixer.FixAll();
1481 void CycleCollectedJSRuntime::CheckGrayBits() const {
1482 MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1483 "Don't call CheckGrayBits during a GC.");
1485 #ifndef ANDROID
1486 // Bug 1346874 - The gray state check is expensive. Android tests are already
1487 // slow enough that this check can easily push them over the threshold to a
1488 // timeout.
1490 MOZ_ASSERT(js::CheckGrayMarkingState(mJSRuntime));
1491 MOZ_ASSERT(CheckWeakMappingGrayBitsTracer::Check(mJSRuntime));
1492 #endif
1495 bool CycleCollectedJSRuntime::AreGCGrayBitsValid() const {
1496 return js::AreGCGrayBitsValid(mJSRuntime);
1499 void CycleCollectedJSRuntime::GarbageCollect(JS::GCReason aReason) const {
1500 JSContext* cx = CycleCollectedJSContext::Get()->Context();
1501 JS::PrepareForFullGC(cx);
1502 JS::NonIncrementalGC(cx, GC_NORMAL, aReason);
1505 void CycleCollectedJSRuntime::JSObjectsTenured() {
1506 JSContext* cx = CycleCollectedJSContext::Get()->Context();
1507 for (auto iter = mNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
1508 nsWrapperCache* cache = iter.Get();
1509 JSObject* wrapper = cache->GetWrapperMaybeDead();
1510 MOZ_DIAGNOSTIC_ASSERT(wrapper);
1511 if (!JS::ObjectIsTenured(wrapper)) {
1512 MOZ_ASSERT(!cache->PreservingWrapper());
1513 js::gc::FinalizeDeadNurseryObject(cx, wrapper);
1517 #ifdef DEBUG
1518 for (auto iter = mPreservedNurseryObjects.Iter(); !iter.Done(); iter.Next()) {
1519 MOZ_ASSERT(JS::ObjectIsTenured(iter.Get().get()));
1521 #endif
1523 mNurseryObjects.Clear();
1524 mPreservedNurseryObjects.Clear();
1527 void CycleCollectedJSRuntime::NurseryWrapperAdded(nsWrapperCache* aCache) {
1528 MOZ_ASSERT(aCache);
1529 MOZ_ASSERT(aCache->GetWrapperMaybeDead());
1530 MOZ_ASSERT(!JS::ObjectIsTenured(aCache->GetWrapperMaybeDead()));
1531 mNurseryObjects.InfallibleAppend(aCache);
1534 void CycleCollectedJSRuntime::NurseryWrapperPreserved(JSObject* aWrapper) {
1535 mPreservedNurseryObjects.InfallibleAppend(
1536 JS::PersistentRooted<JSObject*>(mJSRuntime, aWrapper));
1539 void CycleCollectedJSRuntime::DeferredFinalize(
1540 DeferredFinalizeAppendFunction aAppendFunc, DeferredFinalizeFunction aFunc,
1541 void* aThing) {
1542 // Tell the analysis that the function pointers will not GC.
1543 JS::AutoSuppressGCAnalysis suppress;
1544 if (auto entry = mDeferredFinalizerTable.LookupForAdd(aFunc)) {
1545 aAppendFunc(entry.Data(), aThing);
1546 } else {
1547 entry.OrInsert(
1548 [aAppendFunc, aThing]() { return aAppendFunc(nullptr, aThing); });
1552 void CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports) {
1553 typedef DeferredFinalizerImpl<nsISupports> Impl;
1554 DeferredFinalize(Impl::AppendDeferredFinalizePointer, Impl::DeferredFinalize,
1555 aSupports);
1558 void CycleCollectedJSRuntime::DumpJSHeap(FILE* aFile) {
1559 JSContext* cx = CycleCollectedJSContext::Get()->Context();
1561 mozilla::MallocSizeOf mallocSizeOf =
1562 PR_GetEnv("MOZ_GC_LOG_SIZE") ? moz_malloc_size_of : nullptr;
1563 js::DumpHeap(cx, aFile, js::CollectNurseryBeforeDump, mallocSizeOf);
1566 IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(
1567 CycleCollectedJSRuntime* aRt, DeferredFinalizerTable& aFinalizers)
1568 : DiscardableRunnable("IncrementalFinalizeRunnable"),
1569 mRuntime(aRt),
1570 mFinalizeFunctionToRun(0),
1571 mReleasing(false) {
1572 for (auto iter = aFinalizers.Iter(); !iter.Done(); iter.Next()) {
1573 DeferredFinalizeFunction& function = iter.Key();
1574 void*& data = iter.Data();
1576 DeferredFinalizeFunctionHolder* holder =
1577 mDeferredFinalizeFunctions.AppendElement();
1578 holder->run = function;
1579 holder->data = data;
1581 iter.Remove();
1583 MOZ_ASSERT(mDeferredFinalizeFunctions.Length());
1586 IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable() {
1587 MOZ_ASSERT(!mDeferredFinalizeFunctions.Length());
1588 MOZ_ASSERT(!mRuntime);
1591 void IncrementalFinalizeRunnable::ReleaseNow(bool aLimited) {
1592 if (mReleasing) {
1593 NS_WARNING("Re-entering ReleaseNow");
1594 return;
1597 AUTO_PROFILER_LABEL("IncrementalFinalizeRunnable::ReleaseNow",
1598 GCCC_Finalize);
1600 mozilla::AutoRestore<bool> ar(mReleasing);
1601 mReleasing = true;
1602 MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0,
1603 "We should have at least ReleaseSliceNow to run");
1604 MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(),
1605 "No more finalizers to run?");
1607 TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis);
1608 TimeStamp started = aLimited ? TimeStamp::Now() : TimeStamp();
1609 bool timeout = false;
1610 do {
1611 const DeferredFinalizeFunctionHolder& function =
1612 mDeferredFinalizeFunctions[mFinalizeFunctionToRun];
1613 if (aLimited) {
1614 bool done = false;
1615 while (!timeout && !done) {
1617 * We don't want to read the clock too often, so we try to
1618 * release slices of 100 items.
1620 done = function.run(100, function.data);
1621 timeout = TimeStamp::Now() - started >= sliceTime;
1623 if (done) {
1624 ++mFinalizeFunctionToRun;
1626 if (timeout) {
1627 break;
1629 } else {
1630 while (!function.run(UINT32_MAX, function.data))
1632 ++mFinalizeFunctionToRun;
1634 } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length());
1637 if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) {
1638 MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1639 mDeferredFinalizeFunctions.Clear();
1640 CycleCollectedJSRuntime* runtime = mRuntime;
1641 mRuntime = nullptr;
1642 // NB: This may delete this!
1643 runtime->mFinalizeRunnable = nullptr;
1647 NS_IMETHODIMP
1648 IncrementalFinalizeRunnable::Run() {
1649 if (!mDeferredFinalizeFunctions.Length()) {
1650 /* These items were already processed synchronously in JSGC_END. */
1651 MOZ_ASSERT(!mRuntime);
1652 return NS_OK;
1655 MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1656 TimeStamp start = TimeStamp::Now();
1657 ReleaseNow(true);
1659 if (mDeferredFinalizeFunctions.Length()) {
1660 nsresult rv = NS_DispatchToCurrentThread(this);
1661 if (NS_FAILED(rv)) {
1662 ReleaseNow(false);
1664 } else {
1665 MOZ_ASSERT(!mRuntime);
1668 uint32_t duration = (uint32_t)((TimeStamp::Now() - start).ToMilliseconds());
1669 Telemetry::Accumulate(Telemetry::DEFERRED_FINALIZE_ASYNC, duration);
1671 return NS_OK;
1674 void CycleCollectedJSRuntime::FinalizeDeferredThings(
1675 CycleCollectedJSContext::DeferredFinalizeType aType) {
1677 * If the previous GC created a runnable to finalize objects
1678 * incrementally, and if it hasn't finished yet, finish it now. We
1679 * don't want these to build up. We also don't want to allow any
1680 * existing incremental finalize runnables to run after a
1681 * non-incremental GC, since they are often used to detect leaks.
1683 if (mFinalizeRunnable) {
1684 mFinalizeRunnable->ReleaseNow(false);
1685 if (mFinalizeRunnable) {
1686 // If we re-entered ReleaseNow, we couldn't delete mFinalizeRunnable and
1687 // we need to just continue processing it.
1688 return;
1692 if (mDeferredFinalizerTable.Count() == 0) {
1693 return;
1696 mFinalizeRunnable =
1697 new IncrementalFinalizeRunnable(this, mDeferredFinalizerTable);
1699 // Everything should be gone now.
1700 MOZ_ASSERT(mDeferredFinalizerTable.Count() == 0);
1702 if (aType == CycleCollectedJSContext::FinalizeIncrementally) {
1703 NS_DispatchToCurrentThreadQueue(do_AddRef(mFinalizeRunnable), 2500,
1704 EventQueuePriority::Idle);
1705 } else {
1706 mFinalizeRunnable->ReleaseNow(false);
1707 MOZ_ASSERT(!mFinalizeRunnable);
1711 const char* CycleCollectedJSRuntime::OOMStateToString(
1712 const OOMState aOomState) const {
1713 switch (aOomState) {
1714 case OOMState::OK:
1715 return "OK";
1716 case OOMState::Reporting:
1717 return "Reporting";
1718 case OOMState::Reported:
1719 return "Reported";
1720 case OOMState::Recovered:
1721 return "Recovered";
1722 default:
1723 MOZ_ASSERT_UNREACHABLE("OOMState holds an invalid value");
1724 return "Unknown";
1728 void CycleCollectedJSRuntime::AnnotateAndSetOutOfMemory(OOMState* aStatePtr,
1729 OOMState aNewState) {
1730 *aStatePtr = aNewState;
1731 CrashReporter::Annotation annotation =
1732 (aStatePtr == &mOutOfMemoryState)
1733 ? CrashReporter::Annotation::JSOutOfMemory
1734 : CrashReporter::Annotation::JSLargeAllocationFailure;
1736 CrashReporter::AnnotateCrashReport(
1737 annotation, nsDependentCString(OOMStateToString(aNewState)));
1740 void CycleCollectedJSRuntime::OnGC(JSContext* aContext, JSGCStatus aStatus,
1741 JS::GCReason aReason) {
1742 switch (aStatus) {
1743 case JSGC_BEGIN:
1744 nsCycleCollector_prepareForGarbageCollection();
1745 PrepareWaitingZonesForGC();
1746 break;
1747 case JSGC_END: {
1748 if (mOutOfMemoryState == OOMState::Reported) {
1749 AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Recovered);
1751 if (mLargeAllocationFailureState == OOMState::Reported) {
1752 AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState,
1753 OOMState::Recovered);
1756 // Do any deferred finalization of native objects. We will run the
1757 // finalizer later after we've returned to the event loop if any of
1758 // three conditions hold:
1759 // a) The GC is incremental. In this case, we probably care about pauses.
1760 // b) There is a pending exception. The finalizers are not set up to run
1761 // in that state.
1762 // c) The GC was triggered for internal JS engine reasons. If this is the
1763 // case, then we may be in the middle of running some code that the JIT
1764 // has assumed can't have certain kinds of side effects. Finalizers can do
1765 // all sorts of things, such as run JS, so we want to run them later.
1766 // However, if we're shutting down, we need to destroy things immediately.
1768 // Why do we ever bother finalizing things immediately if that's so
1769 // questionable? In some situations, such as while testing or in low
1770 // memory situations, we really want to free things right away.
1771 bool finalizeIncrementally = JS::WasIncrementalGC(mJSRuntime) ||
1772 JS_IsExceptionPending(aContext) ||
1773 (JS::InternalGCReason(aReason) &&
1774 aReason != JS::GCReason::DESTROY_RUNTIME);
1776 FinalizeDeferredThings(
1777 finalizeIncrementally ? CycleCollectedJSContext::FinalizeIncrementally
1778 : CycleCollectedJSContext::FinalizeNow);
1780 break;
1782 default:
1783 MOZ_CRASH();
1786 CustomGCCallback(aStatus);
1789 void CycleCollectedJSRuntime::OnOutOfMemory() {
1790 AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reporting);
1791 CustomOutOfMemoryCallback();
1792 AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reported);
1795 void CycleCollectedJSRuntime::SetLargeAllocationFailure(OOMState aNewState) {
1796 AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, aNewState);
1799 void CycleCollectedJSRuntime::PrepareWaitingZonesForGC() {
1800 JSContext* cx = CycleCollectedJSContext::Get()->Context();
1801 if (mZonesWaitingForGC.Count() == 0) {
1802 JS::PrepareForFullGC(cx);
1803 } else {
1804 for (auto iter = mZonesWaitingForGC.Iter(); !iter.Done(); iter.Next()) {
1805 JS::PrepareZoneForGC(cx, iter.Get()->GetKey());
1807 mZonesWaitingForGC.Clear();
1811 /* static */
1812 void CycleCollectedJSRuntime::OnZoneDestroyed(JSFreeOp* aFop, JS::Zone* aZone) {
1813 // Remove the zone from the set of zones waiting for GC, if present. This can
1814 // happen if a zone is added to the set during an incremental GC in which it
1815 // is later destroyed.
1816 CycleCollectedJSRuntime* runtime = Get();
1817 runtime->mZonesWaitingForGC.RemoveEntry(aZone);
1820 void CycleCollectedJSRuntime::EnvironmentPreparer::invoke(
1821 JS::HandleObject global, js::ScriptEnvironmentPreparer::Closure& closure) {
1822 MOZ_ASSERT(JS_IsGlobalObject(global));
1823 nsIGlobalObject* nativeGlobal = xpc::NativeGlobal(global);
1825 // Not much we can do if we simply don't have a usable global here...
1826 NS_ENSURE_TRUE_VOID(nativeGlobal && nativeGlobal->HasJSGlobal());
1828 AutoEntryScript aes(nativeGlobal, "JS-engine-initiated execution");
1830 MOZ_ASSERT(!JS_IsExceptionPending(aes.cx()));
1832 DebugOnly<bool> ok = closure(aes.cx());
1834 MOZ_ASSERT_IF(ok, !JS_IsExceptionPending(aes.cx()));
1836 // The AutoEntryScript will check for JS_IsExceptionPending on the
1837 // JSContext and report it as needed as it comes off the stack.
1840 /* static */
1841 CycleCollectedJSRuntime* CycleCollectedJSRuntime::Get() {
1842 auto context = CycleCollectedJSContext::Get();
1843 if (context) {
1844 return context->Runtime();
1846 return nullptr;
1849 #ifdef MOZ_JS_DEV_ERROR_INTERCEPTOR
1851 namespace js {
1852 extern void DumpValue(const JS::Value& val);
1855 void CycleCollectedJSRuntime::ErrorInterceptor::Shutdown(JSRuntime* rt) {
1856 JS_SetErrorInterceptorCallback(rt, nullptr);
1857 mThrownError.reset();
1860 /* virtual */
1861 void CycleCollectedJSRuntime::ErrorInterceptor::interceptError(
1862 JSContext* cx, JS::HandleValue exn) {
1863 if (mThrownError) {
1864 // We already have an error, we don't need anything more.
1865 return;
1868 if (!nsContentUtils::ThreadsafeIsSystemCaller(cx)) {
1869 // We are only interested in chrome code.
1870 return;
1873 const auto type = JS_GetErrorType(exn);
1874 if (!type) {
1875 // This is not one of the primitive error types.
1876 return;
1879 switch (*type) {
1880 case JSExnType::JSEXN_REFERENCEERR:
1881 case JSExnType::JSEXN_SYNTAXERR:
1882 break;
1883 default:
1884 // Not one of the errors we are interested in.
1885 // Note that we are not interested in instances of `TypeError`
1886 // for the time being, as DOM (ab)uses this constructor to represent
1887 // all sorts of errors that are not even remotely related to type
1888 // errors (e.g. some network errors).
1889 // If we ever have a mechanism to differentiate between DOM-thrown
1890 // and SpiderMonkey-thrown instances of `TypeError`, we should
1891 // consider watching for `TypeError` here.
1892 return;
1895 // Now copy the details of the exception locally.
1896 // While copying the details of an exception could be expensive, in most runs,
1897 // this will be done at most once during the execution of the process, so the
1898 // total cost should be reasonable.
1900 ErrorDetails details;
1901 details.mType = *type;
1902 // If `exn` isn't an exception object, `ExtractErrorValues` could end up
1903 // calling `toString()`, which could in turn end up throwing an error. While
1904 // this should work, we want to avoid that complex use case. Fortunately, we
1905 // have already checked above that `exn` is an exception object, so nothing
1906 // such should happen.
1907 nsContentUtils::ExtractErrorValues(cx, exn, details.mFilename, &details.mLine,
1908 &details.mColumn, details.mMessage);
1910 JS::UniqueChars buf =
1911 JS::FormatStackDump(cx, /* showArgs = */ false, /* showLocals = */ false,
1912 /* showThisProps = */ false);
1913 CopyUTF8toUTF16(mozilla::MakeStringSpan(buf.get()), details.mStack);
1915 mThrownError.emplace(std::move(details));
1918 void CycleCollectedJSRuntime::ClearRecentDevError() {
1919 mErrorInterceptor.mThrownError.reset();
1922 bool CycleCollectedJSRuntime::GetRecentDevError(
1923 JSContext* cx, JS::MutableHandle<JS::Value> error) {
1924 if (!mErrorInterceptor.mThrownError) {
1925 return true;
1928 // Create a copy of the exception.
1929 JS::RootedObject obj(cx, JS_NewPlainObject(cx));
1930 if (!obj) {
1931 return false;
1934 JS::RootedValue message(cx);
1935 JS::RootedValue filename(cx);
1936 JS::RootedValue stack(cx);
1937 if (!ToJSValue(cx, mErrorInterceptor.mThrownError->mMessage, &message) ||
1938 !ToJSValue(cx, mErrorInterceptor.mThrownError->mFilename, &filename) ||
1939 !ToJSValue(cx, mErrorInterceptor.mThrownError->mStack, &stack)) {
1940 return false;
1943 // Build the object.
1944 const auto FLAGS = JSPROP_READONLY | JSPROP_ENUMERATE | JSPROP_PERMANENT;
1945 if (!JS_DefineProperty(cx, obj, "message", message, FLAGS) ||
1946 !JS_DefineProperty(cx, obj, "fileName", filename, FLAGS) ||
1947 !JS_DefineProperty(cx, obj, "lineNumber",
1948 mErrorInterceptor.mThrownError->mLine, FLAGS) ||
1949 !JS_DefineProperty(cx, obj, "stack", stack, FLAGS)) {
1950 return false;
1953 // Pass the result.
1954 error.setObject(*obj);
1955 return true;
1957 #endif // MOZ_JS_DEV_ERROR_INTERCEPTOR
1959 #undef MOZ_JS_DEV_ERROR_INTERCEPTOR