Bumping manifests a=b2g-bump
[gecko.git] / xpcom / base / CycleCollectedJSRuntime.cpp
blobd656afd60c516a566e1a206948100ac27d953867
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 // We're dividing JS objects into 3 categories:
8 //
9 // 1. "real" roots, held by the JS engine itself or rooted through the root
10 // and lock JS APIs. Roots from this category are considered black in the
11 // cycle collector, any cycle they participate in is uncollectable.
13 // 2. certain roots held by C++ objects that are guaranteed to be alive.
14 // Roots from this category are considered black in the cycle collector,
15 // and any cycle they participate in is uncollectable. These roots are
16 // traced from TraceNativeBlackRoots.
18 // 3. all other roots held by C++ objects that participate in cycle
19 // collection, held by us (see TraceNativeGrayRoots). Roots from this
20 // category are considered grey in the cycle collector; whether or not
21 // they are collected depends on the objects that hold them.
23 // Note that if a root is in multiple categories the fact that it is in
24 // category 1 or 2 that takes precedence, so it will be considered black.
26 // During garbage collection we switch to an additional mark color (gray)
27 // when tracing inside TraceNativeGrayRoots. This allows us to walk those
28 // roots later on and add all objects reachable only from them to the
29 // cycle collector.
31 // Phases:
33 // 1. marking of the roots in category 1 by having the JS GC do its marking
34 // 2. marking of the roots in category 2 by having the JS GC call us back
35 // (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots
36 // 3. marking of the roots in category 3 by TraceNativeGrayRoots using an
37 // additional color (gray).
38 // 4. end of GC, GC can sweep its heap
40 // At some later point, when the cycle collector runs:
42 // 5. walk gray objects and add them to the cycle collector, cycle collect
44 // JS objects that are part of cycles the cycle collector breaks will be
45 // collected by the next JS GC.
47 // If WantAllTraces() is false the cycle collector will not traverse roots
48 // from category 1 or any JS objects held by them. Any JS objects they hold
49 // will already be marked by the JS GC and will thus be colored black
50 // themselves. Any C++ objects they hold will have a missing (untraversed)
51 // edge from the JS object to the C++ object and so it will be marked black
52 // too. This decreases the number of objects that the cycle collector has to
53 // deal with.
54 // To improve debugging, if WantAllTraces() is true all JS objects are
55 // traversed.
57 #include "mozilla/CycleCollectedJSRuntime.h"
58 #include <algorithm>
59 #include "mozilla/ArrayUtils.h"
60 #include "mozilla/MemoryReporting.h"
61 #include "mozilla/dom/BindingUtils.h"
62 #include "mozilla/dom/DOMJSClass.h"
63 #include "mozilla/dom/ScriptSettings.h"
64 #include "jsprf.h"
65 #include "nsCycleCollectionNoteRootCallback.h"
66 #include "nsCycleCollectionParticipant.h"
67 #include "nsCycleCollector.h"
68 #include "nsDOMJSUtils.h"
69 #include "nsJSUtils.h"
71 #ifdef MOZ_CRASHREPORTER
72 #include "nsExceptionHandler.h"
73 #endif
75 #include "nsIException.h"
76 #include "nsThreadUtils.h"
77 #include "xpcpublic.h"
79 using namespace mozilla;
80 using namespace mozilla::dom;
82 namespace mozilla {
84 struct DeferredFinalizeFunctionHolder
86 DeferredFinalizeFunction run;
87 void* data;
90 class IncrementalFinalizeRunnable : public nsRunnable
92 typedef nsAutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray;
93 typedef CycleCollectedJSRuntime::DeferredFinalizerTable DeferredFinalizerTable;
95 CycleCollectedJSRuntime* mRuntime;
96 nsTArray<nsISupports*> mSupports;
97 DeferredFinalizeArray mDeferredFinalizeFunctions;
98 uint32_t mFinalizeFunctionToRun;
100 static const PRTime SliceMillis = 10; /* ms */
102 static PLDHashOperator
103 DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction,
104 void*& aData,
105 void* aClosure);
107 public:
108 IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
109 nsTArray<nsISupports*>& aMSupports,
110 DeferredFinalizerTable& aFinalizerTable);
111 virtual ~IncrementalFinalizeRunnable();
113 void ReleaseNow(bool aLimited);
115 NS_DECL_NSIRUNNABLE
118 } // namespace mozilla
120 inline bool
121 AddToCCKind(JSGCTraceKind aKind)
123 return aKind == JSTRACE_OBJECT || aKind == JSTRACE_SCRIPT;
126 static void
127 TraceWeakMappingChild(JSTracer* aTrc, void** aThingp, JSGCTraceKind aKind);
129 struct NoteWeakMapChildrenTracer : public JSTracer
131 NoteWeakMapChildrenTracer(JSRuntime* aRt,
132 nsCycleCollectionNoteRootCallback& aCb)
133 : JSTracer(aRt, TraceWeakMappingChild), mCb(aCb)
136 nsCycleCollectionNoteRootCallback& mCb;
137 bool mTracedAny;
138 JSObject* mMap;
139 void* mKey;
140 void* mKeyDelegate;
143 static void
144 TraceWeakMappingChild(JSTracer* aTrc, void** aThingp, JSGCTraceKind aKind)
146 MOZ_ASSERT(aTrc->callback == TraceWeakMappingChild);
147 void* thing = *aThingp;
148 NoteWeakMapChildrenTracer* tracer =
149 static_cast<NoteWeakMapChildrenTracer*>(aTrc);
151 if (aKind == JSTRACE_STRING) {
152 return;
155 if (!xpc_IsGrayGCThing(thing) && !tracer->mCb.WantAllTraces()) {
156 return;
159 if (AddToCCKind(aKind)) {
160 tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey,
161 tracer->mKeyDelegate, thing);
162 tracer->mTracedAny = true;
163 } else {
164 JS_TraceChildren(aTrc, thing, aKind);
168 struct NoteWeakMapsTracer : public js::WeakMapTracer
170 NoteWeakMapsTracer(JSRuntime* aRt, js::WeakMapTraceCallback aCb,
171 nsCycleCollectionNoteRootCallback& aCccb)
172 : js::WeakMapTracer(aRt, aCb), mCb(aCccb), mChildTracer(aRt, aCccb)
175 nsCycleCollectionNoteRootCallback& mCb;
176 NoteWeakMapChildrenTracer mChildTracer;
179 static void
180 TraceWeakMapping(js::WeakMapTracer* aTrc, JSObject* aMap,
181 void* aKey, JSGCTraceKind aKeyKind,
182 void* aValue, JSGCTraceKind aValueKind)
184 MOZ_ASSERT(aTrc->callback == TraceWeakMapping);
185 NoteWeakMapsTracer* tracer = static_cast<NoteWeakMapsTracer*>(aTrc);
187 // If nothing that could be held alive by this entry is marked gray, return.
188 if ((!aKey || !xpc_IsGrayGCThing(aKey)) &&
189 MOZ_LIKELY(!tracer->mCb.WantAllTraces())) {
190 if (!aValue || !xpc_IsGrayGCThing(aValue) ||
191 aValueKind == JSTRACE_STRING) {
192 return;
196 // The cycle collector can only properly reason about weak maps if it can
197 // reason about the liveness of their keys, which in turn requires that
198 // the key can be represented in the cycle collector graph. All existing
199 // uses of weak maps use either objects or scripts as keys, which are okay.
200 MOZ_ASSERT(AddToCCKind(aKeyKind));
202 // As an emergency fallback for non-debug builds, if the key is not
203 // representable in the cycle collector graph, we treat it as marked. This
204 // can cause leaks, but is preferable to ignoring the binding, which could
205 // cause the cycle collector to free live objects.
206 if (!AddToCCKind(aKeyKind)) {
207 aKey = nullptr;
210 JSObject* kdelegate = nullptr;
211 if (aKey && aKeyKind == JSTRACE_OBJECT) {
212 kdelegate = js::GetWeakmapKeyDelegate((JSObject*)aKey);
215 if (AddToCCKind(aValueKind)) {
216 tracer->mCb.NoteWeakMapping(aMap, aKey, kdelegate, aValue);
217 } else {
218 tracer->mChildTracer.mTracedAny = false;
219 tracer->mChildTracer.mMap = aMap;
220 tracer->mChildTracer.mKey = aKey;
221 tracer->mChildTracer.mKeyDelegate = kdelegate;
223 if (aValue && aValueKind != JSTRACE_STRING) {
224 JS_TraceChildren(&tracer->mChildTracer, aValue, aValueKind);
227 // The delegate could hold alive the key, so report something to the CC
228 // if we haven't already.
229 if (!tracer->mChildTracer.mTracedAny &&
230 aKey && xpc_IsGrayGCThing(aKey) && kdelegate) {
231 tracer->mCb.NoteWeakMapping(aMap, aKey, kdelegate, nullptr);
236 // This is based on the logic in TraceWeakMapping.
237 struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer
239 explicit FixWeakMappingGrayBitsTracer(JSRuntime* aRt)
240 : js::WeakMapTracer(aRt, FixWeakMappingGrayBits)
244 void
245 FixAll()
247 do {
248 mAnyMarked = false;
249 js::TraceWeakMaps(this);
250 } while (mAnyMarked);
253 private:
255 static void
256 FixWeakMappingGrayBits(js::WeakMapTracer* aTrc, JSObject* aMap,
257 void* aKey, JSGCTraceKind aKeyKind,
258 void* aValue, JSGCTraceKind aValueKind)
260 FixWeakMappingGrayBitsTracer* tracer =
261 static_cast<FixWeakMappingGrayBitsTracer*>(aTrc);
263 // If nothing that could be held alive by this entry is marked gray, return.
264 bool delegateMightNeedMarking = aKey && xpc_IsGrayGCThing(aKey);
265 bool valueMightNeedMarking = aValue && xpc_IsGrayGCThing(aValue) &&
266 aValueKind != JSTRACE_STRING;
267 if (!delegateMightNeedMarking && !valueMightNeedMarking) {
268 return;
271 if (!AddToCCKind(aKeyKind)) {
272 aKey = nullptr;
275 if (delegateMightNeedMarking && aKeyKind == JSTRACE_OBJECT) {
276 JSObject* kdelegate = js::GetWeakmapKeyDelegate((JSObject*)aKey);
277 if (kdelegate && !xpc_IsGrayGCThing(kdelegate)) {
278 if (JS::UnmarkGrayGCThingRecursively(aKey, JSTRACE_OBJECT)) {
279 tracer->mAnyMarked = true;
284 if (aValue && xpc_IsGrayGCThing(aValue) &&
285 (!aKey || !xpc_IsGrayGCThing(aKey)) &&
286 (!aMap || !xpc_IsGrayGCThing(aMap)) &&
287 aValueKind != JSTRACE_SHAPE) {
288 if (JS::UnmarkGrayGCThingRecursively(aValue, aValueKind)) {
289 tracer->mAnyMarked = true;
294 bool mAnyMarked;
297 struct Closure
299 explicit Closure(nsCycleCollectionNoteRootCallback* aCb)
300 : mCycleCollectionEnabled(true), mCb(aCb)
304 bool mCycleCollectionEnabled;
305 nsCycleCollectionNoteRootCallback* mCb;
308 static void
309 CheckParticipatesInCycleCollection(void* aThing, const char* aName,
310 void* aClosure)
312 Closure* closure = static_cast<Closure*>(aClosure);
314 if (closure->mCycleCollectionEnabled) {
315 return;
318 if (AddToCCKind(js::GCThingTraceKind(aThing)) &&
319 xpc_IsGrayGCThing(aThing)) {
320 closure->mCycleCollectionEnabled = true;
324 static PLDHashOperator
325 NoteJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg)
327 Closure* closure = static_cast<Closure*>(aArg);
329 bool noteRoot;
330 if (MOZ_UNLIKELY(closure->mCb->WantAllTraces())) {
331 noteRoot = true;
332 } else {
333 closure->mCycleCollectionEnabled = false;
334 aTracer->Trace(aHolder,
335 TraceCallbackFunc(CheckParticipatesInCycleCollection),
336 closure);
337 noteRoot = closure->mCycleCollectionEnabled;
340 if (noteRoot) {
341 closure->mCb->NoteNativeRoot(aHolder, aTracer);
344 return PL_DHASH_NEXT;
347 NS_IMETHODIMP
348 JSGCThingParticipant::Traverse(void* aPtr,
349 nsCycleCollectionTraversalCallback& aCb)
351 auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
352 reinterpret_cast<char*>(this) - offsetof(CycleCollectedJSRuntime,
353 mGCThingCycleCollectorGlobal));
355 runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL,
356 aPtr, js::GCThingTraceKind(aPtr), aCb);
357 return NS_OK;
360 // NB: This is only used to initialize the participant in
361 // CycleCollectedJSRuntime. It should never be used directly.
362 static JSGCThingParticipant sGCThingCycleCollectorGlobal;
364 NS_IMETHODIMP
365 JSZoneParticipant::Traverse(void* aPtr, nsCycleCollectionTraversalCallback& aCb)
367 auto runtime = reinterpret_cast<CycleCollectedJSRuntime*>(
368 reinterpret_cast<char*>(this) - offsetof(CycleCollectedJSRuntime,
369 mJSZoneCycleCollectorGlobal));
371 MOZ_ASSERT(!aCb.WantAllTraces());
372 JS::Zone* zone = static_cast<JS::Zone*>(aPtr);
374 runtime->TraverseZone(zone, aCb);
375 return NS_OK;
378 static void
379 NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind);
381 struct TraversalTracer : public JSTracer
383 TraversalTracer(JSRuntime* aRt, nsCycleCollectionTraversalCallback& aCb)
384 : JSTracer(aRt, NoteJSChildTracerShim, DoNotTraceWeakMaps), mCb(aCb)
387 nsCycleCollectionTraversalCallback& mCb;
390 static void
391 NoteJSChild(JSTracer* aTrc, void* aThing, JSGCTraceKind aTraceKind)
393 TraversalTracer* tracer = static_cast<TraversalTracer*>(aTrc);
395 // Don't traverse non-gray objects, unless we want all traces.
396 if (!xpc_IsGrayGCThing(aThing) && !tracer->mCb.WantAllTraces()) {
397 return;
401 * This function needs to be careful to avoid stack overflow. Normally, when
402 * AddToCCKind is true, the recursion terminates immediately as we just add
403 * |thing| to the CC graph. So overflow is only possible when there are long
404 * chains of non-AddToCCKind GC things. Currently, this only can happen via
405 * shape parent pointers. The special JSTRACE_SHAPE case below handles
406 * parent pointers iteratively, rather than recursively, to avoid overflow.
408 if (AddToCCKind(aTraceKind)) {
409 if (MOZ_UNLIKELY(tracer->mCb.WantDebugInfo())) {
410 // based on DumpNotify in jsapi.cpp
411 if (tracer->debugPrinter()) {
412 char buffer[200];
413 tracer->debugPrinter()(aTrc, buffer, sizeof(buffer));
414 tracer->mCb.NoteNextEdgeName(buffer);
415 } else if (tracer->debugPrintIndex() != (size_t)-1) {
416 char buffer[200];
417 JS_snprintf(buffer, sizeof(buffer), "%s[%lu]",
418 static_cast<const char*>(tracer->debugPrintArg()),
419 tracer->debugPrintIndex());
420 tracer->mCb.NoteNextEdgeName(buffer);
421 } else {
422 tracer->mCb.NoteNextEdgeName(static_cast<const char*>(tracer->debugPrintArg()));
425 tracer->mCb.NoteJSChild(aThing);
426 } else if (aTraceKind == JSTRACE_SHAPE) {
427 JS_TraceShapeCycleCollectorChildren(aTrc, aThing);
428 } else if (aTraceKind != JSTRACE_STRING) {
429 JS_TraceChildren(aTrc, aThing, aTraceKind);
433 static void
434 NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind)
436 NoteJSChild(aTrc, *aThingp, aTraceKind);
439 static void
440 NoteJSChildGrayWrapperShim(void* aData, void* aThing)
442 TraversalTracer* trc = static_cast<TraversalTracer*>(aData);
443 NoteJSChild(trc, aThing, js::GCThingTraceKind(aThing));
447 * The cycle collection participant for a Zone is intended to produce the same
448 * results as if all of the gray GCthings in a zone were merged into a single node,
449 * except for self-edges. This avoids the overhead of representing all of the GCthings in
450 * the zone in the cycle collector graph, which should be much faster if many of
451 * the GCthings in the zone are gray.
453 * Zone merging should not always be used, because it is a conservative
454 * approximation of the true cycle collector graph that can incorrectly identify some
455 * garbage objects as being live. For instance, consider two cycles that pass through a
456 * zone, where one is garbage and the other is live. If we merge the entire
457 * zone, the cycle collector will think that both are alive.
459 * We don't have to worry about losing track of a garbage cycle, because any such garbage
460 * cycle incorrectly identified as live must contain at least one C++ to JS edge, and
461 * XPConnect will always add the C++ object to the CC graph. (This is in contrast to pure
462 * C++ garbage cycles, which must always be properly identified, because we clear the
463 * purple buffer during every CC, which may contain the last reference to a garbage
464 * cycle.)
467 // NB: This is only used to initialize the participant in
468 // CycleCollectedJSRuntime. It should never be used directly.
469 static const JSZoneParticipant sJSZoneCycleCollectorGlobal;
471 CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSRuntime* aParentRuntime,
472 uint32_t aMaxBytes,
473 uint32_t aMaxNurseryBytes)
474 : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal)
475 , mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal)
476 , mJSRuntime(nullptr)
477 , mJSHolders(256)
478 , mOutOfMemoryState(OOMState::OK)
479 , mLargeAllocationFailureState(OOMState::OK)
481 mozilla::dom::InitScriptSettings();
483 mJSRuntime = JS_NewRuntime(aMaxBytes, aMaxNurseryBytes, aParentRuntime);
484 if (!mJSRuntime) {
485 MOZ_CRASH();
488 if (!JS_AddExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this)) {
489 MOZ_CRASH();
491 JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this);
492 JS_SetGCCallback(mJSRuntime, GCCallback, this);
493 JS::SetOutOfMemoryCallback(mJSRuntime, OutOfMemoryCallback, this);
494 JS::SetLargeAllocationFailureCallback(mJSRuntime,
495 LargeAllocationFailureCallback, this);
496 JS_SetContextCallback(mJSRuntime, ContextCallback, this);
497 JS_SetDestroyZoneCallback(mJSRuntime, XPCStringConvert::FreeZoneCache);
498 JS_SetSweepZoneCallback(mJSRuntime, XPCStringConvert::ClearZoneCache);
500 static js::DOMCallbacks DOMcallbacks = {
501 InstanceClassHasProtoAtDepth
503 SetDOMCallbacks(mJSRuntime, &DOMcallbacks);
505 nsCycleCollector_registerJSRuntime(this);
508 CycleCollectedJSRuntime::~CycleCollectedJSRuntime()
510 MOZ_ASSERT(mJSRuntime);
511 MOZ_ASSERT(!mDeferredFinalizerTable.Count());
512 MOZ_ASSERT(!mDeferredSupports.Length());
514 // Clear mPendingException first, since it might be cycle collected.
515 mPendingException = nullptr;
517 JS_DestroyRuntime(mJSRuntime);
518 mJSRuntime = nullptr;
519 nsCycleCollector_forgetJSRuntime();
521 mozilla::dom::DestroyScriptSettings();
524 size_t
525 CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
527 size_t n = 0;
529 // nullptr for the second arg; we're not measuring anything hanging off the
530 // entries in mJSHolders.
531 n += mJSHolders.SizeOfExcludingThis(nullptr, aMallocSizeOf);
533 return n;
536 static PLDHashOperator
537 UnmarkJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg)
539 aTracer->CanSkip(aHolder, true);
540 return PL_DHASH_NEXT;
543 void
544 CycleCollectedJSRuntime::UnmarkSkippableJSHolders()
546 mJSHolders.Enumerate(UnmarkJSHolder, nullptr);
549 void
550 CycleCollectedJSRuntime::DescribeGCThing(bool aIsMarked, void* aThing,
551 JSGCTraceKind aTraceKind,
552 nsCycleCollectionTraversalCallback& aCb) const
554 if (!aCb.WantDebugInfo()) {
555 aCb.DescribeGCedNode(aIsMarked, "JS Object");
556 return;
559 char name[72];
560 uint64_t compartmentAddress = 0;
561 if (aTraceKind == JSTRACE_OBJECT) {
562 JSObject* obj = static_cast<JSObject*>(aThing);
563 compartmentAddress = (uint64_t)js::GetObjectCompartment(obj);
564 const js::Class* clasp = js::GetObjectClass(obj);
566 // Give the subclass a chance to do something
567 if (DescribeCustomObjects(obj, clasp, name)) {
568 // Nothing else to do!
569 } else if (js::IsFunctionObject(obj)) {
570 JSFunction* fun = JS_GetObjectFunction(obj);
571 JSString* str = JS_GetFunctionDisplayId(fun);
572 if (str) {
573 JSFlatString* flat = JS_ASSERT_STRING_IS_FLAT(str);
574 nsAutoString chars;
575 AssignJSFlatString(chars, flat);
576 NS_ConvertUTF16toUTF8 fname(chars);
577 JS_snprintf(name, sizeof(name),
578 "JS Object (Function - %s)", fname.get());
579 } else {
580 JS_snprintf(name, sizeof(name), "JS Object (Function)");
582 } else {
583 JS_snprintf(name, sizeof(name), "JS Object (%s)",
584 clasp->name);
586 } else {
587 static const char trace_types[][11] = {
588 "Object",
589 "String",
590 "Symbol",
591 "Script",
592 "LazyScript",
593 "IonCode",
594 "Shape",
595 "BaseShape",
596 "TypeObject",
598 static_assert(MOZ_ARRAY_LENGTH(trace_types) == JSTRACE_LAST + 1,
599 "JSTRACE_LAST enum must match trace_types count.");
600 JS_snprintf(name, sizeof(name), "JS %s", trace_types[aTraceKind]);
603 // Disable printing global for objects while we figure out ObjShrink fallout.
604 aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress);
607 void
608 CycleCollectedJSRuntime::NoteGCThingJSChildren(void* aThing,
609 JSGCTraceKind aTraceKind,
610 nsCycleCollectionTraversalCallback& aCb) const
612 MOZ_ASSERT(mJSRuntime);
613 TraversalTracer trc(mJSRuntime, aCb);
614 JS_TraceChildren(&trc, aThing, aTraceKind);
617 void
618 CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp,
619 JSObject* aObj,
620 nsCycleCollectionTraversalCallback& aCb) const
622 MOZ_ASSERT(aClasp);
623 MOZ_ASSERT(aClasp == js::GetObjectClass(aObj));
625 if (NoteCustomGCThingXPCOMChildren(aClasp, aObj, aCb)) {
626 // Nothing else to do!
627 return;
629 // XXX This test does seem fragile, we should probably whitelist classes
630 // that do hold a strong reference, but that might not be possible.
631 else if (aClasp->flags & JSCLASS_HAS_PRIVATE &&
632 aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) {
633 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)");
634 aCb.NoteXPCOMChild(static_cast<nsISupports*>(js::GetObjectPrivate(aObj)));
635 } else {
636 const DOMJSClass* domClass = GetDOMClass(aObj);
637 if (domClass) {
638 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)");
639 if (domClass->mDOMObjectIsISupports) {
640 aCb.NoteXPCOMChild(UnwrapDOMObject<nsISupports>(aObj));
641 } else if (domClass->mParticipant) {
642 aCb.NoteNativeChild(UnwrapDOMObject<void>(aObj),
643 domClass->mParticipant);
649 void
650 CycleCollectedJSRuntime::TraverseGCThing(TraverseSelect aTs, void* aThing,
651 JSGCTraceKind aTraceKind,
652 nsCycleCollectionTraversalCallback& aCb)
654 MOZ_ASSERT(aTraceKind == js::GCThingTraceKind(aThing));
655 bool isMarkedGray = xpc_IsGrayGCThing(aThing);
657 if (aTs == TRAVERSE_FULL) {
658 DescribeGCThing(!isMarkedGray, aThing, aTraceKind, aCb);
661 // If this object is alive, then all of its children are alive. For JS objects,
662 // the black-gray invariant ensures the children are also marked black. For C++
663 // objects, the ref count from this object will keep them alive. Thus we don't
664 // need to trace our children, unless we are debugging using WantAllTraces.
665 if (!isMarkedGray && !aCb.WantAllTraces()) {
666 return;
669 if (aTs == TRAVERSE_FULL) {
670 NoteGCThingJSChildren(aThing, aTraceKind, aCb);
673 if (aTraceKind == JSTRACE_OBJECT) {
674 JSObject* obj = static_cast<JSObject*>(aThing);
675 NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb);
679 struct TraverseObjectShimClosure
681 nsCycleCollectionTraversalCallback& cb;
682 CycleCollectedJSRuntime* self;
685 void
686 CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone,
687 nsCycleCollectionTraversalCallback& aCb)
690 * We treat the zone as being gray. We handle non-gray GCthings in the
691 * zone by not reporting their children to the CC. The black-gray invariant
692 * ensures that any JS children will also be non-gray, and thus don't need to be
693 * added to the graph. For C++ children, not representing the edge from the
694 * non-gray JS GCthings to the C++ object will keep the child alive.
696 * We don't allow zone merging in a WantAllTraces CC, because then these
697 * assumptions don't hold.
699 aCb.DescribeGCedNode(false, "JS Zone");
702 * Every JS child of everything in the zone is either in the zone
703 * or is a cross-compartment wrapper. In the former case, we don't need to
704 * represent these edges in the CC graph because JS objects are not ref counted.
705 * In the latter case, the JS engine keeps a map of these wrappers, which we
706 * iterate over. Edges between compartments in the same zone will add
707 * unnecessary loop edges to the graph (bug 842137).
709 TraversalTracer trc(mJSRuntime, aCb);
710 js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc);
713 * To find C++ children of things in the zone, we scan every JS Object in
714 * the zone. Only JS Objects can have C++ children.
716 TraverseObjectShimClosure closure = { aCb, this };
717 js::IterateGrayObjects(aZone, TraverseObjectShim, &closure);
720 /* static */ void
721 CycleCollectedJSRuntime::TraverseObjectShim(void* aData, void* aThing)
723 TraverseObjectShimClosure* closure =
724 static_cast<TraverseObjectShimClosure*>(aData);
726 MOZ_ASSERT(js::GCThingTraceKind(aThing) == JSTRACE_OBJECT);
727 closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing,
728 JSTRACE_OBJECT, closure->cb);
731 void
732 CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb)
734 // NB: This is here just to preserve the existing XPConnect order. I doubt it
735 // would hurt to do this after the JS holders.
736 TraverseAdditionalNativeRoots(aCb);
738 Closure closure(&aCb);
739 mJSHolders.Enumerate(NoteJSHolder, &closure);
742 /* static */ void
743 CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData)
745 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
747 self->TraceNativeBlackRoots(aTracer);
750 /* static */ void
751 CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData)
753 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
755 // Mark these roots as gray so the CC can walk them later.
756 self->TraceNativeGrayRoots(aTracer);
759 /* static */ void
760 CycleCollectedJSRuntime::GCCallback(JSRuntime* aRuntime,
761 JSGCStatus aStatus,
762 void* aData)
764 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
766 MOZ_ASSERT(aRuntime == self->Runtime());
768 self->OnGC(aStatus);
771 /* static */ void
772 CycleCollectedJSRuntime::OutOfMemoryCallback(JSContext* aContext,
773 void* aData)
775 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
777 MOZ_ASSERT(JS_GetRuntime(aContext) == self->Runtime());
779 self->OnOutOfMemory();
782 /* static */ void
783 CycleCollectedJSRuntime::LargeAllocationFailureCallback(void* aData)
785 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
787 self->OnLargeAllocationFailure();
790 /* static */ bool
791 CycleCollectedJSRuntime::ContextCallback(JSContext* aContext,
792 unsigned aOperation,
793 void* aData)
795 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
797 MOZ_ASSERT(JS_GetRuntime(aContext) == self->Runtime());
799 return self->CustomContextCallback(aContext, aOperation);
802 struct JsGcTracer : public TraceCallbacks
804 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char* aName,
805 void* aClosure) const MOZ_OVERRIDE
807 JS_CallValueTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
809 virtual void Trace(JS::Heap<jsid>* aPtr, const char* aName,
810 void* aClosure) const MOZ_OVERRIDE
812 JS_CallIdTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
814 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char* aName,
815 void* aClosure) const MOZ_OVERRIDE
817 JS_CallObjectTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
819 virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char* aName,
820 void* aClosure) const MOZ_OVERRIDE
822 JS_CallTenuredObjectTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
824 virtual void Trace(JS::Heap<JSString*>* aPtr, const char* aName,
825 void* aClosure) const MOZ_OVERRIDE
827 JS_CallStringTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
829 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char* aName,
830 void* aClosure) const MOZ_OVERRIDE
832 JS_CallScriptTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
834 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char* aName,
835 void* aClosure) const MOZ_OVERRIDE
837 JS_CallFunctionTracer(static_cast<JSTracer*>(aClosure), aPtr, aName);
841 static PLDHashOperator
842 TraceJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg)
844 aTracer->Trace(aHolder, JsGcTracer(), aArg);
846 return PL_DHASH_NEXT;
849 void
850 mozilla::TraceScriptHolder(nsISupports* aHolder, JSTracer* aTracer)
852 nsXPCOMCycleCollectionParticipant* participant = nullptr;
853 CallQueryInterface(aHolder, &participant);
854 participant->Trace(aHolder, JsGcTracer(), aTracer);
857 void
858 CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer)
860 // NB: This is here just to preserve the existing XPConnect order. I doubt it
861 // would hurt to do this after the JS holders.
862 TraceAdditionalNativeGrayRoots(aTracer);
864 mJSHolders.Enumerate(TraceJSHolder, aTracer);
867 void
868 CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer)
870 mJSHolders.Put(aHolder, aTracer);
873 struct ClearJSHolder : TraceCallbacks
875 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*, void*) const MOZ_OVERRIDE
877 *aPtr = JSVAL_VOID;
880 virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const MOZ_OVERRIDE
882 *aPtr = JSID_VOID;
885 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE
887 *aPtr = nullptr;
890 virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE
892 *aPtr = nullptr;
895 virtual void Trace(JS::Heap<JSString*>* aPtr, const char*, void*) const MOZ_OVERRIDE
897 *aPtr = nullptr;
900 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*, void*) const MOZ_OVERRIDE
902 *aPtr = nullptr;
905 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*, void*) const MOZ_OVERRIDE
907 *aPtr = nullptr;
911 void
912 CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder)
914 nsScriptObjectTracer* tracer = mJSHolders.Get(aHolder);
915 if (!tracer) {
916 return;
918 tracer->Trace(aHolder, ClearJSHolder(), nullptr);
919 mJSHolders.Remove(aHolder);
922 #ifdef DEBUG
923 bool
924 CycleCollectedJSRuntime::IsJSHolder(void* aHolder)
926 return mJSHolders.Get(aHolder, nullptr);
929 static void
930 AssertNoGcThing(void* aGCThing, const char* aName, void* aClosure)
932 MOZ_ASSERT(!aGCThing);
935 void
936 CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder)
938 nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder);
939 if (tracer) {
940 tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr);
943 #endif
945 already_AddRefed<nsIException>
946 CycleCollectedJSRuntime::GetPendingException() const
948 nsCOMPtr<nsIException> out = mPendingException;
949 return out.forget();
952 void
953 CycleCollectedJSRuntime::SetPendingException(nsIException* aException)
955 mPendingException = aException;
958 nsCycleCollectionParticipant*
959 CycleCollectedJSRuntime::GCThingParticipant()
961 return &mGCThingCycleCollectorGlobal;
964 nsCycleCollectionParticipant*
965 CycleCollectedJSRuntime::ZoneParticipant()
967 return &mJSZoneCycleCollectorGlobal;
970 nsresult
971 CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback& aCb)
973 TraverseNativeRoots(aCb);
975 NoteWeakMapsTracer trc(mJSRuntime, TraceWeakMapping, aCb);
976 js::TraceWeakMaps(&trc);
978 return NS_OK;
982 * Return true if there exists a JSContext with a default global whose current
983 * inner is gray. The intent is to look for JS Object windows. We don't merge
984 * system compartments, so we don't use them to trigger merging CCs.
986 bool
987 CycleCollectedJSRuntime::UsefulToMergeZones() const
989 if (!NS_IsMainThread()) {
990 return false;
993 JSContext* iter = nullptr;
994 JSContext* cx;
995 JSAutoRequest ar(nsContentUtils::GetSafeJSContext());
996 while ((cx = JS_ContextIterator(mJSRuntime, &iter))) {
997 // Skip anything without an nsIScriptContext.
998 nsIScriptContext* scx = GetScriptContextFromJSContext(cx);
999 JS::RootedObject obj(cx, scx ? scx->GetWindowProxyPreserveColor() : nullptr);
1000 if (!obj) {
1001 continue;
1003 MOZ_ASSERT(js::IsOuterObject(obj));
1004 // Grab the inner from the outer.
1005 obj = JS_ObjectToInnerObject(cx, obj);
1006 MOZ_ASSERT(!js::GetObjectParent(obj));
1007 if (JS::GCThingIsMarkedGray(obj) &&
1008 !js::IsSystemCompartment(js::GetObjectCompartment(obj))) {
1009 return true;
1012 return false;
1015 void
1016 CycleCollectedJSRuntime::FixWeakMappingGrayBits() const
1018 MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
1019 "Don't call FixWeakMappingGrayBits during a GC.");
1020 FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
1021 fixer.FixAll();
1024 bool
1025 CycleCollectedJSRuntime::AreGCGrayBitsValid() const
1027 return js::AreGCGrayBitsValid(mJSRuntime);
1030 void
1031 CycleCollectedJSRuntime::GarbageCollect(uint32_t aReason) const
1033 MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS);
1034 JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason);
1036 JS::PrepareForFullGC(mJSRuntime);
1037 JS::GCForReason(mJSRuntime, gcreason);
1040 void
1041 CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,
1042 DeferredFinalizeFunction aFunc,
1043 void* aThing)
1045 void* thingArray = nullptr;
1046 bool hadThingArray = mDeferredFinalizerTable.Get(aFunc, &thingArray);
1048 thingArray = aAppendFunc(thingArray, aThing);
1049 if (!hadThingArray) {
1050 mDeferredFinalizerTable.Put(aFunc, thingArray);
1054 void
1055 CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports)
1057 mDeferredSupports.AppendElement(aSupports);
1060 void
1061 CycleCollectedJSRuntime::DumpJSHeap(FILE* aFile)
1063 js::DumpHeapComplete(Runtime(), aFile, js::CollectNurseryBeforeDump);
1067 bool
1068 ReleaseSliceNow(uint32_t aSlice, void* aData)
1070 MOZ_ASSERT(aSlice > 0, "nonsensical/useless call with slice == 0");
1071 nsTArray<nsISupports*>* items = static_cast<nsTArray<nsISupports*>*>(aData);
1073 uint32_t length = items->Length();
1074 aSlice = std::min(aSlice, length);
1075 for (uint32_t i = length; i > length - aSlice; --i) {
1076 // Remove (and NS_RELEASE) the last entry in "items":
1077 uint32_t lastItemIdx = i - 1;
1079 nsISupports* wrapper = items->ElementAt(lastItemIdx);
1080 items->RemoveElementAt(lastItemIdx);
1081 NS_IF_RELEASE(wrapper);
1084 return items->IsEmpty();
1087 /* static */ PLDHashOperator
1088 IncrementalFinalizeRunnable::DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction,
1089 void*& aData,
1090 void* aClosure)
1092 DeferredFinalizeArray* array = static_cast<DeferredFinalizeArray*>(aClosure);
1094 DeferredFinalizeFunctionHolder* function = array->AppendElement();
1095 function->run = aFunction;
1096 function->data = aData;
1098 return PL_DHASH_REMOVE;
1101 IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
1102 nsTArray<nsISupports*>& aSupports,
1103 DeferredFinalizerTable& aFinalizers)
1104 : mRuntime(aRt)
1105 , mFinalizeFunctionToRun(0)
1107 this->mSupports.SwapElements(aSupports);
1108 DeferredFinalizeFunctionHolder* function =
1109 mDeferredFinalizeFunctions.AppendElement();
1110 function->run = ReleaseSliceNow;
1111 function->data = &this->mSupports;
1113 // Enumerate the hashtable into our array.
1114 aFinalizers.Enumerate(DeferredFinalizerEnumerator, &mDeferredFinalizeFunctions);
1117 IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable()
1119 MOZ_ASSERT(this != mRuntime->mFinalizeRunnable);
1122 void
1123 IncrementalFinalizeRunnable::ReleaseNow(bool aLimited)
1125 //MOZ_ASSERT(NS_IsMainThread());
1126 MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0,
1127 "We should have at least ReleaseSliceNow to run");
1128 MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(),
1129 "No more finalizers to run?");
1131 TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis);
1132 TimeStamp started = TimeStamp::Now();
1133 bool timeout = false;
1134 do {
1135 const DeferredFinalizeFunctionHolder& function =
1136 mDeferredFinalizeFunctions[mFinalizeFunctionToRun];
1137 if (aLimited) {
1138 bool done = false;
1139 while (!timeout && !done) {
1141 * We don't want to read the clock too often, so we try to
1142 * release slices of 100 items.
1144 done = function.run(100, function.data);
1145 timeout = TimeStamp::Now() - started >= sliceTime;
1147 if (done) {
1148 ++mFinalizeFunctionToRun;
1150 if (timeout) {
1151 break;
1153 } else {
1154 function.run(UINT32_MAX, function.data);
1155 ++mFinalizeFunctionToRun;
1157 } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length());
1159 if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) {
1160 MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1161 mDeferredFinalizeFunctions.Clear();
1162 // NB: This may delete this!
1163 mRuntime->mFinalizeRunnable = nullptr;
1167 NS_IMETHODIMP
1168 IncrementalFinalizeRunnable::Run()
1170 if (mRuntime->mFinalizeRunnable != this) {
1171 /* These items were already processed synchronously in JSGC_END. */
1172 MOZ_ASSERT(!mSupports.Length());
1173 MOZ_ASSERT(!mDeferredFinalizeFunctions.Length());
1174 return NS_OK;
1177 ReleaseNow(true);
1179 if (mDeferredFinalizeFunctions.Length()) {
1180 nsresult rv = NS_DispatchToCurrentThread(this);
1181 if (NS_FAILED(rv)) {
1182 ReleaseNow(false);
1186 return NS_OK;
1189 void
1190 CycleCollectedJSRuntime::FinalizeDeferredThings(DeferredFinalizeType aType)
1192 MOZ_ASSERT(!mFinalizeRunnable);
1193 mFinalizeRunnable = new IncrementalFinalizeRunnable(this,
1194 mDeferredSupports,
1195 mDeferredFinalizerTable);
1197 // Everything should be gone now.
1198 MOZ_ASSERT(!mDeferredSupports.Length());
1199 MOZ_ASSERT(!mDeferredFinalizerTable.Count());
1201 if (aType == FinalizeIncrementally) {
1202 NS_DispatchToCurrentThread(mFinalizeRunnable);
1203 } else {
1204 mFinalizeRunnable->ReleaseNow(false);
1205 MOZ_ASSERT(!mFinalizeRunnable);
1209 void
1210 CycleCollectedJSRuntime::AnnotateAndSetOutOfMemory(OOMState* aStatePtr,
1211 OOMState aNewState)
1213 *aStatePtr = aNewState;
1214 #ifdef MOZ_CRASHREPORTER
1215 CrashReporter::AnnotateCrashReport(aStatePtr == &mOutOfMemoryState
1216 ? NS_LITERAL_CSTRING("JSOutOfMemory")
1217 : NS_LITERAL_CSTRING("JSLargeAllocationFailure"),
1218 aNewState == OOMState::Reporting
1219 ? NS_LITERAL_CSTRING("Reporting")
1220 : aNewState == OOMState::Reported
1221 ? NS_LITERAL_CSTRING("Reported")
1222 : NS_LITERAL_CSTRING("Recovered"));
1223 #endif
1226 void
1227 CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus)
1229 switch (aStatus) {
1230 case JSGC_BEGIN:
1231 nsCycleCollector_prepareForGarbageCollection();
1232 break;
1233 case JSGC_END: {
1234 #ifdef MOZ_CRASHREPORTER
1235 if (mOutOfMemoryState == OOMState::Reported) {
1236 AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Recovered);
1238 if (mLargeAllocationFailureState == OOMState::Reported) {
1239 AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Recovered);
1241 #endif
1244 * If the previous GC created a runnable to finalize objects
1245 * incrementally, and if it hasn't finished yet, finish it now. We
1246 * don't want these to build up. We also don't want to allow any
1247 * existing incremental finalize runnables to run after a
1248 * non-incremental GC, since they are often used to detect leaks.
1250 if (mFinalizeRunnable) {
1251 mFinalizeRunnable->ReleaseNow(false);
1254 // Do any deferred finalization of native objects.
1255 FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally :
1256 FinalizeNow);
1257 break;
1259 default:
1260 MOZ_CRASH();
1263 CustomGCCallback(aStatus);
1266 void
1267 CycleCollectedJSRuntime::OnOutOfMemory()
1269 AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reporting);
1270 CustomOutOfMemoryCallback();
1271 AnnotateAndSetOutOfMemory(&mOutOfMemoryState, OOMState::Reported);
1274 void
1275 CycleCollectedJSRuntime::OnLargeAllocationFailure()
1277 AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Reporting);
1278 CustomLargeAllocationFailureCallback();
1279 AnnotateAndSetOutOfMemory(&mLargeAllocationFailureState, OOMState::Reported);