Bug 964976 - Prevent crash of unsupported pixel format gralloc allocation. r=nical
[gecko.git] / xpcom / base / CycleCollectedJSRuntime.cpp
blobac917ee51d47d7d7e3887af24a1550ed39ce4e2a
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 // We're dividing JS objects into 3 categories:
8 //
9 // 1. "real" roots, held by the JS engine itself or rooted through the root
10 // and lock JS APIs. Roots from this category are considered black in the
11 // cycle collector, any cycle they participate in is uncollectable.
13 // 2. certain roots held by C++ objects that are guaranteed to be alive.
14 // Roots from this category are considered black in the cycle collector,
15 // and any cycle they participate in is uncollectable. These roots are
16 // traced from TraceNativeBlackRoots.
18 // 3. all other roots held by C++ objects that participate in cycle
19 // collection, held by us (see TraceNativeGrayRoots). Roots from this
20 // category are considered grey in the cycle collector; whether or not
21 // they are collected depends on the objects that hold them.
23 // Note that if a root is in multiple categories the fact that it is in
24 // category 1 or 2 that takes precedence, so it will be considered black.
26 // During garbage collection we switch to an additional mark color (gray)
27 // when tracing inside TraceNativeGrayRoots. This allows us to walk those
28 // roots later on and add all objects reachable only from them to the
29 // cycle collector.
31 // Phases:
33 // 1. marking of the roots in category 1 by having the JS GC do its marking
34 // 2. marking of the roots in category 2 by having the JS GC call us back
35 // (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots
36 // 3. marking of the roots in category 3 by TraceNativeGrayRoots using an
37 // additional color (gray).
38 // 4. end of GC, GC can sweep its heap
40 // At some later point, when the cycle collector runs:
42 // 5. walk gray objects and add them to the cycle collector, cycle collect
44 // JS objects that are part of cycles the cycle collector breaks will be
45 // collected by the next JS GC.
47 // If WantAllTraces() is false the cycle collector will not traverse roots
48 // from category 1 or any JS objects held by them. Any JS objects they hold
49 // will already be marked by the JS GC and will thus be colored black
50 // themselves. Any C++ objects they hold will have a missing (untraversed)
51 // edge from the JS object to the C++ object and so it will be marked black
52 // too. This decreases the number of objects that the cycle collector has to
53 // deal with.
54 // To improve debugging, if WantAllTraces() is true all JS objects are
55 // traversed.
57 #include "mozilla/CycleCollectedJSRuntime.h"
58 #include <algorithm>
59 #include "mozilla/MemoryReporting.h"
60 #include "mozilla/dom/BindingUtils.h"
61 #include "mozilla/dom/DOMJSClass.h"
62 #include "mozilla/dom/ScriptSettings.h"
63 #include "jsprf.h"
64 #include "nsCycleCollectionNoteRootCallback.h"
65 #include "nsCycleCollectionParticipant.h"
66 #include "nsCycleCollector.h"
67 #include "nsDOMJSUtils.h"
68 #include "nsIException.h"
69 #include "nsThreadUtils.h"
70 #include "xpcpublic.h"
72 using namespace mozilla;
73 using namespace mozilla::dom;
75 namespace mozilla {
77 struct DeferredFinalizeFunctionHolder
79 DeferredFinalizeFunction run;
80 void *data;
83 class IncrementalFinalizeRunnable : public nsRunnable
85 typedef nsAutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray;
86 typedef CycleCollectedJSRuntime::DeferredFinalizerTable DeferredFinalizerTable;
88 CycleCollectedJSRuntime* mRuntime;
89 nsTArray<nsISupports*> mSupports;
90 DeferredFinalizeArray mDeferredFinalizeFunctions;
91 uint32_t mFinalizeFunctionToRun;
93 static const PRTime SliceMillis = 10; /* ms */
95 static PLDHashOperator
96 DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction,
97 void*& aData,
98 void* aClosure);
100 public:
101 IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
102 nsTArray<nsISupports*>& mSupports,
103 DeferredFinalizerTable& aFinalizerTable);
104 virtual ~IncrementalFinalizeRunnable();
106 void ReleaseNow(bool aLimited);
108 NS_DECL_NSIRUNNABLE
111 } // namespace mozilla
113 inline bool
114 AddToCCKind(JSGCTraceKind kind)
116 return kind == JSTRACE_OBJECT || kind == JSTRACE_SCRIPT;
119 struct NoteWeakMapChildrenTracer : public JSTracer
121 NoteWeakMapChildrenTracer(nsCycleCollectionNoteRootCallback& cb)
122 : mCb(cb)
125 nsCycleCollectionNoteRootCallback& mCb;
126 bool mTracedAny;
127 JSObject* mMap;
128 void* mKey;
129 void* mKeyDelegate;
132 static void
133 TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind)
135 MOZ_ASSERT(trc->callback == TraceWeakMappingChild);
136 void* thing = *thingp;
137 NoteWeakMapChildrenTracer* tracer =
138 static_cast<NoteWeakMapChildrenTracer*>(trc);
140 if (kind == JSTRACE_STRING) {
141 return;
144 if (!xpc_IsGrayGCThing(thing) && !tracer->mCb.WantAllTraces()) {
145 return;
148 if (AddToCCKind(kind)) {
149 tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey, tracer->mKeyDelegate, thing);
150 tracer->mTracedAny = true;
151 } else {
152 JS_TraceChildren(trc, thing, kind);
156 struct NoteWeakMapsTracer : public js::WeakMapTracer
158 NoteWeakMapsTracer(JSRuntime* rt, js::WeakMapTraceCallback cb,
159 nsCycleCollectionNoteRootCallback& cccb)
160 : js::WeakMapTracer(rt, cb), mCb(cccb), mChildTracer(cccb)
162 JS_TracerInit(&mChildTracer, rt, TraceWeakMappingChild);
164 nsCycleCollectionNoteRootCallback& mCb;
165 NoteWeakMapChildrenTracer mChildTracer;
168 static void
169 TraceWeakMapping(js::WeakMapTracer* trc, JSObject* m,
170 void* k, JSGCTraceKind kkind,
171 void* v, JSGCTraceKind vkind)
173 MOZ_ASSERT(trc->callback == TraceWeakMapping);
174 NoteWeakMapsTracer* tracer = static_cast<NoteWeakMapsTracer* >(trc);
176 // If nothing that could be held alive by this entry is marked gray, return.
177 if ((!k || !xpc_IsGrayGCThing(k)) && MOZ_LIKELY(!tracer->mCb.WantAllTraces())) {
178 if (!v || !xpc_IsGrayGCThing(v) || vkind == JSTRACE_STRING) {
179 return;
183 // The cycle collector can only properly reason about weak maps if it can
184 // reason about the liveness of their keys, which in turn requires that
185 // the key can be represented in the cycle collector graph. All existing
186 // uses of weak maps use either objects or scripts as keys, which are okay.
187 MOZ_ASSERT(AddToCCKind(kkind));
189 // As an emergency fallback for non-debug builds, if the key is not
190 // representable in the cycle collector graph, we treat it as marked. This
191 // can cause leaks, but is preferable to ignoring the binding, which could
192 // cause the cycle collector to free live objects.
193 if (!AddToCCKind(kkind)) {
194 k = nullptr;
197 JSObject* kdelegate = nullptr;
198 if (k && kkind == JSTRACE_OBJECT) {
199 kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k);
202 if (AddToCCKind(vkind)) {
203 tracer->mCb.NoteWeakMapping(m, k, kdelegate, v);
204 } else {
205 tracer->mChildTracer.mTracedAny = false;
206 tracer->mChildTracer.mMap = m;
207 tracer->mChildTracer.mKey = k;
208 tracer->mChildTracer.mKeyDelegate = kdelegate;
210 if (v && vkind != JSTRACE_STRING) {
211 JS_TraceChildren(&tracer->mChildTracer, v, vkind);
214 // The delegate could hold alive the key, so report something to the CC
215 // if we haven't already.
216 if (!tracer->mChildTracer.mTracedAny && k && xpc_IsGrayGCThing(k) && kdelegate) {
217 tracer->mCb.NoteWeakMapping(m, k, kdelegate, nullptr);
222 // This is based on the logic in TraceWeakMapping.
223 struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer
225 FixWeakMappingGrayBitsTracer(JSRuntime* rt)
226 : js::WeakMapTracer(rt, FixWeakMappingGrayBits)
229 void
230 FixAll()
232 do {
233 mAnyMarked = false;
234 js::TraceWeakMaps(this);
235 } while (mAnyMarked);
238 private:
240 static void
241 FixWeakMappingGrayBits(js::WeakMapTracer* trc, JSObject* m,
242 void* k, JSGCTraceKind kkind,
243 void* v, JSGCTraceKind vkind)
245 MOZ_ASSERT(!JS::IsIncrementalGCInProgress(trc->runtime),
246 "Don't call FixWeakMappingGrayBits during a GC.");
248 FixWeakMappingGrayBitsTracer* tracer = static_cast<FixWeakMappingGrayBitsTracer*>(trc);
250 // If nothing that could be held alive by this entry is marked gray, return.
251 bool delegateMightNeedMarking = k && xpc_IsGrayGCThing(k);
252 bool valueMightNeedMarking = v && xpc_IsGrayGCThing(v) && vkind != JSTRACE_STRING;
253 if (!delegateMightNeedMarking && !valueMightNeedMarking) {
254 return;
257 if (!AddToCCKind(kkind)) {
258 k = nullptr;
261 if (delegateMightNeedMarking && kkind == JSTRACE_OBJECT) {
262 JSObject* kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k);
263 if (kdelegate && !xpc_IsGrayGCThing(kdelegate)) {
264 if (JS::UnmarkGrayGCThingRecursively(k, JSTRACE_OBJECT)) {
265 tracer->mAnyMarked = true;
270 if (v && xpc_IsGrayGCThing(v) &&
271 (!k || !xpc_IsGrayGCThing(k)) &&
272 (!m || !xpc_IsGrayGCThing(m)) &&
273 vkind != JSTRACE_SHAPE) {
274 if (JS::UnmarkGrayGCThingRecursively(v, vkind)) {
275 tracer->mAnyMarked = true;
280 bool mAnyMarked;
283 struct Closure
285 bool cycleCollectionEnabled;
286 nsCycleCollectionNoteRootCallback *cb;
289 static void
290 CheckParticipatesInCycleCollection(void *aThing, const char *name, void *aClosure)
292 Closure *closure = static_cast<Closure*>(aClosure);
294 if (closure->cycleCollectionEnabled) {
295 return;
298 if (AddToCCKind(js::GCThingTraceKind(aThing)) &&
299 xpc_IsGrayGCThing(aThing))
301 closure->cycleCollectionEnabled = true;
305 static PLDHashOperator
306 NoteJSHolder(void *holder, nsScriptObjectTracer *&tracer, void *arg)
308 Closure *closure = static_cast<Closure*>(arg);
310 closure->cycleCollectionEnabled = false;
311 tracer->Trace(holder, TraceCallbackFunc(CheckParticipatesInCycleCollection), closure);
312 if (closure->cycleCollectionEnabled) {
313 closure->cb->NoteNativeRoot(holder, tracer);
316 return PL_DHASH_NEXT;
319 NS_IMETHODIMP
320 JSGCThingParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb)
322 CycleCollectedJSRuntime* runtime = reinterpret_cast<CycleCollectedJSRuntime*>
323 (reinterpret_cast<char*>(this) -
324 offsetof(CycleCollectedJSRuntime, mGCThingCycleCollectorGlobal));
326 runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL,
327 p, js::GCThingTraceKind(p), cb);
328 return NS_OK;
331 // NB: This is only used to initialize the participant in
332 // CycleCollectedJSRuntime. It should never be used directly.
333 static JSGCThingParticipant sGCThingCycleCollectorGlobal;
335 NS_IMETHODIMP
336 JSZoneParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb)
338 CycleCollectedJSRuntime* runtime = reinterpret_cast<CycleCollectedJSRuntime*>
339 (reinterpret_cast<char*>(this) -
340 offsetof(CycleCollectedJSRuntime, mJSZoneCycleCollectorGlobal));
342 MOZ_ASSERT(!cb.WantAllTraces());
343 JS::Zone* zone = static_cast<JS::Zone*>(p);
345 runtime->TraverseZone(zone, cb);
346 return NS_OK;
349 struct TraversalTracer : public JSTracer
351 TraversalTracer(nsCycleCollectionTraversalCallback& aCb) : mCb(aCb)
354 nsCycleCollectionTraversalCallback& mCb;
357 static void
358 NoteJSChild(JSTracer* aTrc, void* aThing, JSGCTraceKind aTraceKind)
360 TraversalTracer* tracer = static_cast<TraversalTracer*>(aTrc);
362 // Don't traverse non-gray objects, unless we want all traces.
363 if (!xpc_IsGrayGCThing(aThing) && !tracer->mCb.WantAllTraces()) {
364 return;
368 * This function needs to be careful to avoid stack overflow. Normally, when
369 * AddToCCKind is true, the recursion terminates immediately as we just add
370 * |thing| to the CC graph. So overflow is only possible when there are long
371 * chains of non-AddToCCKind GC things. Currently, this only can happen via
372 * shape parent pointers. The special JSTRACE_SHAPE case below handles
373 * parent pointers iteratively, rather than recursively, to avoid overflow.
375 if (AddToCCKind(aTraceKind)) {
376 if (MOZ_UNLIKELY(tracer->mCb.WantDebugInfo())) {
377 // based on DumpNotify in jsapi.c
378 if (tracer->debugPrinter) {
379 char buffer[200];
380 tracer->debugPrinter(aTrc, buffer, sizeof(buffer));
381 tracer->mCb.NoteNextEdgeName(buffer);
382 } else if (tracer->debugPrintIndex != (size_t)-1) {
383 char buffer[200];
384 JS_snprintf(buffer, sizeof(buffer), "%s[%lu]",
385 static_cast<const char *>(tracer->debugPrintArg),
386 tracer->debugPrintIndex);
387 tracer->mCb.NoteNextEdgeName(buffer);
388 } else {
389 tracer->mCb.NoteNextEdgeName(static_cast<const char*>(tracer->debugPrintArg));
392 tracer->mCb.NoteJSChild(aThing);
393 } else if (aTraceKind == JSTRACE_SHAPE) {
394 JS_TraceShapeCycleCollectorChildren(aTrc, aThing);
395 } else if (aTraceKind != JSTRACE_STRING) {
396 JS_TraceChildren(aTrc, aThing, aTraceKind);
400 static void
401 NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind)
403 NoteJSChild(aTrc, *aThingp, aTraceKind);
406 static void
407 NoteJSChildGrayWrapperShim(void* aData, void* aThing)
409 TraversalTracer* trc = static_cast<TraversalTracer*>(aData);
410 NoteJSChild(trc, aThing, js::GCThingTraceKind(aThing));
414 * The cycle collection participant for a Zone is intended to produce the same
415 * results as if all of the gray GCthings in a zone were merged into a single node,
416 * except for self-edges. This avoids the overhead of representing all of the GCthings in
417 * the zone in the cycle collector graph, which should be much faster if many of
418 * the GCthings in the zone are gray.
420 * Zone merging should not always be used, because it is a conservative
421 * approximation of the true cycle collector graph that can incorrectly identify some
422 * garbage objects as being live. For instance, consider two cycles that pass through a
423 * zone, where one is garbage and the other is live. If we merge the entire
424 * zone, the cycle collector will think that both are alive.
426 * We don't have to worry about losing track of a garbage cycle, because any such garbage
427 * cycle incorrectly identified as live must contain at least one C++ to JS edge, and
428 * XPConnect will always add the C++ object to the CC graph. (This is in contrast to pure
429 * C++ garbage cycles, which must always be properly identified, because we clear the
430 * purple buffer during every CC, which may contain the last reference to a garbage
431 * cycle.)
434 // NB: This is only used to initialize the participant in
435 // CycleCollectedJSRuntime. It should never be used directly.
436 static const JSZoneParticipant sJSZoneCycleCollectorGlobal;
438 CycleCollectedJSRuntime::CycleCollectedJSRuntime(uint32_t aMaxbytes,
439 JSUseHelperThreads aUseHelperThreads)
440 : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal),
441 mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal),
442 mJSRuntime(nullptr),
443 mJSHolders(512)
445 mozilla::dom::InitScriptSettings();
447 mJSRuntime = JS_NewRuntime(aMaxbytes, aUseHelperThreads);
448 if (!mJSRuntime) {
449 MOZ_CRASH();
452 if (!JS_AddExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this)) {
453 MOZ_CRASH();
455 JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this);
456 JS_SetGCCallback(mJSRuntime, GCCallback, this);
457 JS_SetContextCallback(mJSRuntime, ContextCallback, this);
458 JS_SetDestroyZoneCallback(mJSRuntime, XPCStringConvert::FreeZoneCache);
459 JS_SetSweepZoneCallback(mJSRuntime, XPCStringConvert::ClearZoneCache);
461 nsCycleCollector_registerJSRuntime(this);
464 CycleCollectedJSRuntime::~CycleCollectedJSRuntime()
466 MOZ_ASSERT(mJSRuntime);
467 MOZ_ASSERT(!mDeferredFinalizerTable.Count());
468 MOZ_ASSERT(!mDeferredSupports.Length());
470 // Clear mPendingException first, since it might be cycle collected.
471 mPendingException = nullptr;
473 JS_DestroyRuntime(mJSRuntime);
474 mJSRuntime = nullptr;
475 nsCycleCollector_forgetJSRuntime();
477 mozilla::dom::DestroyScriptSettings();
480 size_t
481 CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
483 size_t n = 0;
485 // nullptr for the second arg; we're not measuring anything hanging off the
486 // entries in mJSHolders.
487 n += mJSHolders.SizeOfExcludingThis(nullptr, aMallocSizeOf);
489 return n;
492 static PLDHashOperator
493 UnmarkJSHolder(void* holder, nsScriptObjectTracer*& tracer, void* arg)
495 tracer->CanSkip(holder, true);
496 return PL_DHASH_NEXT;
499 void
500 CycleCollectedJSRuntime::UnmarkSkippableJSHolders()
502 mJSHolders.Enumerate(UnmarkJSHolder, nullptr);
505 void
506 CycleCollectedJSRuntime::DescribeGCThing(bool aIsMarked, void* aThing,
507 JSGCTraceKind aTraceKind,
508 nsCycleCollectionTraversalCallback& aCb) const
510 if (!aCb.WantDebugInfo()) {
511 aCb.DescribeGCedNode(aIsMarked, "JS Object");
512 return;
515 char name[72];
516 uint64_t compartmentAddress = 0;
517 if (aTraceKind == JSTRACE_OBJECT) {
518 JSObject* obj = static_cast<JSObject*>(aThing);
519 compartmentAddress = (uint64_t)js::GetObjectCompartment(obj);
520 const js::Class* clasp = js::GetObjectClass(obj);
522 // Give the subclass a chance to do something
523 if (DescribeCustomObjects(obj, clasp, name)) {
524 // Nothing else to do!
525 } else if (js::IsFunctionObject(obj)) {
526 JSFunction* fun = JS_GetObjectFunction(obj);
527 JSString* str = JS_GetFunctionDisplayId(fun);
528 if (str) {
529 NS_ConvertUTF16toUTF8 fname(JS_GetInternedStringChars(str));
530 JS_snprintf(name, sizeof(name),
531 "JS Object (Function - %s)", fname.get());
532 } else {
533 JS_snprintf(name, sizeof(name), "JS Object (Function)");
535 } else {
536 JS_snprintf(name, sizeof(name), "JS Object (%s)",
537 clasp->name);
539 } else {
540 static const char trace_types[][11] = {
541 "Object",
542 "String",
543 "Script",
544 "LazyScript",
545 "IonCode",
546 "Shape",
547 "BaseShape",
548 "TypeObject",
550 JS_STATIC_ASSERT(NS_ARRAY_LENGTH(trace_types) == JSTRACE_LAST + 1);
551 JS_snprintf(name, sizeof(name), "JS %s", trace_types[aTraceKind]);
554 // Disable printing global for objects while we figure out ObjShrink fallout.
555 aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress);
558 void
559 CycleCollectedJSRuntime::NoteGCThingJSChildren(void* aThing,
560 JSGCTraceKind aTraceKind,
561 nsCycleCollectionTraversalCallback& aCb) const
563 MOZ_ASSERT(mJSRuntime);
564 TraversalTracer trc(aCb);
565 JS_TracerInit(&trc, mJSRuntime, NoteJSChildTracerShim);
566 trc.eagerlyTraceWeakMaps = DoNotTraceWeakMaps;
567 JS_TraceChildren(&trc, aThing, aTraceKind);
570 void
571 CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp, JSObject* aObj,
572 nsCycleCollectionTraversalCallback& aCb) const
574 MOZ_ASSERT(aClasp);
575 MOZ_ASSERT(aClasp == js::GetObjectClass(aObj));
577 if (NoteCustomGCThingXPCOMChildren(aClasp, aObj, aCb)) {
578 // Nothing else to do!
579 return;
581 // XXX This test does seem fragile, we should probably whitelist classes
582 // that do hold a strong reference, but that might not be possible.
583 else if (aClasp->flags & JSCLASS_HAS_PRIVATE &&
584 aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) {
585 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)");
586 aCb.NoteXPCOMChild(static_cast<nsISupports*>(js::GetObjectPrivate(aObj)));
587 } else {
588 const DOMClass* domClass = GetDOMClass(aObj);
589 if (domClass) {
590 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)");
591 if (domClass->mDOMObjectIsISupports) {
592 aCb.NoteXPCOMChild(UnwrapDOMObject<nsISupports>(aObj));
593 } else if (domClass->mParticipant) {
594 aCb.NoteNativeChild(UnwrapDOMObject<void>(aObj),
595 domClass->mParticipant);
601 void
602 CycleCollectedJSRuntime::TraverseGCThing(TraverseSelect aTs, void* aThing,
603 JSGCTraceKind aTraceKind,
604 nsCycleCollectionTraversalCallback& aCb)
606 MOZ_ASSERT(aTraceKind == js::GCThingTraceKind(aThing));
607 bool isMarkedGray = xpc_IsGrayGCThing(aThing);
609 if (aTs == TRAVERSE_FULL) {
610 DescribeGCThing(!isMarkedGray, aThing, aTraceKind, aCb);
613 // If this object is alive, then all of its children are alive. For JS objects,
614 // the black-gray invariant ensures the children are also marked black. For C++
615 // objects, the ref count from this object will keep them alive. Thus we don't
616 // need to trace our children, unless we are debugging using WantAllTraces.
617 if (!isMarkedGray && !aCb.WantAllTraces()) {
618 return;
621 if (aTs == TRAVERSE_FULL) {
622 NoteGCThingJSChildren(aThing, aTraceKind, aCb);
625 if (aTraceKind == JSTRACE_OBJECT) {
626 JSObject* obj = static_cast<JSObject*>(aThing);
627 NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb);
631 struct TraverseObjectShimClosure {
632 nsCycleCollectionTraversalCallback& cb;
633 CycleCollectedJSRuntime* self;
636 void
637 CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone,
638 nsCycleCollectionTraversalCallback& aCb)
641 * We treat the zone as being gray. We handle non-gray GCthings in the
642 * zone by not reporting their children to the CC. The black-gray invariant
643 * ensures that any JS children will also be non-gray, and thus don't need to be
644 * added to the graph. For C++ children, not representing the edge from the
645 * non-gray JS GCthings to the C++ object will keep the child alive.
647 * We don't allow zone merging in a WantAllTraces CC, because then these
648 * assumptions don't hold.
650 aCb.DescribeGCedNode(false, "JS Zone");
653 * Every JS child of everything in the zone is either in the zone
654 * or is a cross-compartment wrapper. In the former case, we don't need to
655 * represent these edges in the CC graph because JS objects are not ref counted.
656 * In the latter case, the JS engine keeps a map of these wrappers, which we
657 * iterate over. Edges between compartments in the same zone will add
658 * unnecessary loop edges to the graph (bug 842137).
660 TraversalTracer trc(aCb);
661 JS_TracerInit(&trc, mJSRuntime, NoteJSChildTracerShim);
662 trc.eagerlyTraceWeakMaps = DoNotTraceWeakMaps;
663 js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc);
666 * To find C++ children of things in the zone, we scan every JS Object in
667 * the zone. Only JS Objects can have C++ children.
669 TraverseObjectShimClosure closure = { aCb, this };
670 js::IterateGrayObjects(aZone, TraverseObjectShim, &closure);
673 /* static */ void
674 CycleCollectedJSRuntime::TraverseObjectShim(void* aData, void* aThing)
676 TraverseObjectShimClosure* closure =
677 static_cast<TraverseObjectShimClosure*>(aData);
679 MOZ_ASSERT(js::GCThingTraceKind(aThing) == JSTRACE_OBJECT);
680 closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing,
681 JSTRACE_OBJECT, closure->cb);
684 void
685 CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb)
687 // NB: This is here just to preserve the existing XPConnect order. I doubt it
688 // would hurt to do this after the JS holders.
689 TraverseAdditionalNativeRoots(aCb);
691 Closure closure = { true, &aCb };
692 mJSHolders.Enumerate(NoteJSHolder, &closure);
695 /* static */ void
696 CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData)
698 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
700 self->TraceNativeBlackRoots(aTracer);
703 /* static */ void
704 CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData)
706 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
708 // Mark these roots as gray so the CC can walk them later.
709 self->TraceNativeGrayRoots(aTracer);
712 /* static */ void
713 CycleCollectedJSRuntime::GCCallback(JSRuntime* aRuntime,
714 JSGCStatus aStatus,
715 void* aData)
717 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
719 MOZ_ASSERT(aRuntime == self->Runtime());
721 self->OnGC(aStatus);
724 /* static */ bool
725 CycleCollectedJSRuntime::ContextCallback(JSContext* aContext,
726 unsigned aOperation,
727 void* aData)
729 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData);
731 MOZ_ASSERT(JS_GetRuntime(aContext) == self->Runtime());
733 return self->CustomContextCallback(aContext, aOperation);
736 struct JsGcTracer : public TraceCallbacks
738 virtual void Trace(JS::Heap<JS::Value> *p, const char *name, void *closure) const MOZ_OVERRIDE {
739 JS_CallHeapValueTracer(static_cast<JSTracer*>(closure), p, name);
741 virtual void Trace(JS::Heap<jsid> *p, const char *name, void *closure) const MOZ_OVERRIDE {
742 JS_CallHeapIdTracer(static_cast<JSTracer*>(closure), p, name);
744 virtual void Trace(JS::Heap<JSObject *> *p, const char *name, void *closure) const MOZ_OVERRIDE {
745 JS_CallHeapObjectTracer(static_cast<JSTracer*>(closure), p, name);
747 virtual void Trace(JS::Heap<JSString *> *p, const char *name, void *closure) const MOZ_OVERRIDE {
748 JS_CallHeapStringTracer(static_cast<JSTracer*>(closure), p, name);
750 virtual void Trace(JS::Heap<JSScript *> *p, const char *name, void *closure) const MOZ_OVERRIDE {
751 JS_CallHeapScriptTracer(static_cast<JSTracer*>(closure), p, name);
753 virtual void Trace(JS::Heap<JSFunction *> *p, const char *name, void *closure) const MOZ_OVERRIDE {
754 JS_CallHeapFunctionTracer(static_cast<JSTracer*>(closure), p, name);
758 static PLDHashOperator
759 TraceJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg)
761 aTracer->Trace(aHolder, JsGcTracer(), aArg);
763 return PL_DHASH_NEXT;
766 void
767 CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer)
769 // NB: This is here just to preserve the existing XPConnect order. I doubt it
770 // would hurt to do this after the JS holders.
771 TraceAdditionalNativeGrayRoots(aTracer);
773 mJSHolders.Enumerate(TraceJSHolder, aTracer);
776 void
777 CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer)
779 mJSHolders.Put(aHolder, aTracer);
782 struct ClearJSHolder : TraceCallbacks
784 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*, void*) const MOZ_OVERRIDE
786 *aPtr = JSVAL_VOID;
789 virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const MOZ_OVERRIDE
791 *aPtr = JSID_VOID;
794 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE
796 *aPtr = nullptr;
799 virtual void Trace(JS::Heap<JSString*>* aPtr, const char*, void*) const MOZ_OVERRIDE
801 *aPtr = nullptr;
804 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*, void*) const MOZ_OVERRIDE
806 *aPtr = nullptr;
809 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*, void*) const MOZ_OVERRIDE
811 *aPtr = nullptr;
815 void
816 CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder)
818 nsScriptObjectTracer* tracer = mJSHolders.Get(aHolder);
819 if (!tracer) {
820 return;
822 tracer->Trace(aHolder, ClearJSHolder(), nullptr);
823 mJSHolders.Remove(aHolder);
826 #ifdef DEBUG
827 bool
828 CycleCollectedJSRuntime::IsJSHolder(void* aHolder)
830 return mJSHolders.Get(aHolder, nullptr);
833 static void
834 AssertNoGcThing(void* aGCThing, const char* aName, void* aClosure)
836 MOZ_ASSERT(!aGCThing);
839 void
840 CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder)
842 nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder);
843 if (tracer) {
844 tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr);
847 #endif
849 already_AddRefed<nsIException>
850 CycleCollectedJSRuntime::GetPendingException() const
852 nsCOMPtr<nsIException> out = mPendingException;
853 return out.forget();
856 void
857 CycleCollectedJSRuntime::SetPendingException(nsIException* aException)
859 mPendingException = aException;
862 nsCycleCollectionParticipant*
863 CycleCollectedJSRuntime::GCThingParticipant()
865 return &mGCThingCycleCollectorGlobal;
868 nsCycleCollectionParticipant*
869 CycleCollectedJSRuntime::ZoneParticipant()
871 return &mJSZoneCycleCollectorGlobal;
874 nsresult
875 CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback &aCb)
877 static bool gcHasRun = false;
878 if (!gcHasRun) {
879 uint32_t gcNumber = JS_GetGCParameter(mJSRuntime, JSGC_NUMBER);
880 if (!gcNumber) {
881 // Cannot cycle collect if GC has not run first!
882 MOZ_CRASH();
884 gcHasRun = true;
887 TraverseNativeRoots(aCb);
889 NoteWeakMapsTracer trc(mJSRuntime, TraceWeakMapping, aCb);
890 js::TraceWeakMaps(&trc);
892 return NS_OK;
896 * Return true if there exists a JSContext with a default global whose current
897 * inner is gray. The intent is to look for JS Object windows. We don't merge
898 * system compartments, so we don't use them to trigger merging CCs.
900 bool
901 CycleCollectedJSRuntime::UsefulToMergeZones() const
903 if (!NS_IsMainThread()) {
904 return false;
907 JSContext* iter = nullptr;
908 JSContext* cx;
909 JSAutoRequest ar(nsContentUtils::GetSafeJSContext());
910 while ((cx = JS_ContextIterator(mJSRuntime, &iter))) {
911 // Skip anything without an nsIScriptContext.
912 nsIScriptContext* scx = GetScriptContextFromJSContext(cx);
913 JS::RootedObject obj(cx, scx ? scx->GetWindowProxyPreserveColor() : nullptr);
914 if (!obj) {
915 continue;
917 MOZ_ASSERT(js::IsOuterObject(obj));
918 // Grab the inner from the outer.
919 obj = JS_ObjectToInnerObject(cx, obj);
920 MOZ_ASSERT(!js::GetObjectParent(obj));
921 if (JS::GCThingIsMarkedGray(obj) &&
922 !js::IsSystemCompartment(js::GetObjectCompartment(obj))) {
923 return true;
926 return false;
929 void
930 CycleCollectedJSRuntime::FixWeakMappingGrayBits() const
932 FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
933 fixer.FixAll();
936 bool
937 CycleCollectedJSRuntime::NeedCollect() const
939 return !js::AreGCGrayBitsValid(mJSRuntime);
942 void
943 CycleCollectedJSRuntime::Collect(uint32_t aReason) const
945 MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS);
946 JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason);
948 JS::PrepareForFullGC(mJSRuntime);
949 JS::GCForReason(mJSRuntime, gcreason);
952 void
953 CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,
954 DeferredFinalizeFunction aFunc,
955 void* aThing)
957 void* thingArray = nullptr;
958 bool hadThingArray = mDeferredFinalizerTable.Get(aFunc, &thingArray);
960 thingArray = aAppendFunc(thingArray, aThing);
961 if (!hadThingArray) {
962 mDeferredFinalizerTable.Put(aFunc, thingArray);
966 void
967 CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports)
969 mDeferredSupports.AppendElement(aSupports);
972 void
973 CycleCollectedJSRuntime::DumpJSHeap(FILE* file)
975 js::DumpHeapComplete(Runtime(), file, js::CollectNurseryBeforeDump);
979 bool
980 ReleaseSliceNow(uint32_t aSlice, void* aData)
982 MOZ_ASSERT(aSlice > 0, "nonsensical/useless call with slice == 0");
983 nsTArray<nsISupports*>* items = static_cast<nsTArray<nsISupports*>*>(aData);
985 uint32_t length = items->Length();
986 aSlice = std::min(aSlice, length);
987 for (uint32_t i = length; i > length - aSlice; --i) {
988 // Remove (and NS_RELEASE) the last entry in "items":
989 uint32_t lastItemIdx = i - 1;
991 nsISupports* wrapper = items->ElementAt(lastItemIdx);
992 items->RemoveElementAt(lastItemIdx);
993 NS_RELEASE(wrapper);
996 return items->IsEmpty();
999 /* static */ PLDHashOperator
1000 IncrementalFinalizeRunnable::DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction,
1001 void*& aData,
1002 void* aClosure)
1004 DeferredFinalizeArray* array = static_cast<DeferredFinalizeArray*>(aClosure);
1006 DeferredFinalizeFunctionHolder* function = array->AppendElement();
1007 function->run = aFunction;
1008 function->data = aData;
1010 return PL_DHASH_REMOVE;
1013 IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt,
1014 nsTArray<nsISupports*>& aSupports,
1015 DeferredFinalizerTable& aFinalizers)
1016 : mRuntime(aRt),
1017 mFinalizeFunctionToRun(0)
1019 this->mSupports.SwapElements(aSupports);
1020 DeferredFinalizeFunctionHolder* function = mDeferredFinalizeFunctions.AppendElement();
1021 function->run = ReleaseSliceNow;
1022 function->data = &this->mSupports;
1024 // Enumerate the hashtable into our array.
1025 aFinalizers.Enumerate(DeferredFinalizerEnumerator, &mDeferredFinalizeFunctions);
1028 IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable()
1030 MOZ_ASSERT(this != mRuntime->mFinalizeRunnable);
1033 void
1034 IncrementalFinalizeRunnable::ReleaseNow(bool aLimited)
1036 //MOZ_ASSERT(NS_IsMainThread());
1037 MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0,
1038 "We should have at least ReleaseSliceNow to run");
1039 MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(),
1040 "No more finalizers to run?");
1042 TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis);
1043 TimeStamp started = TimeStamp::Now();
1044 bool timeout = false;
1045 do {
1046 const DeferredFinalizeFunctionHolder &function =
1047 mDeferredFinalizeFunctions[mFinalizeFunctionToRun];
1048 if (aLimited) {
1049 bool done = false;
1050 while (!timeout && !done) {
1052 * We don't want to read the clock too often, so we try to
1053 * release slices of 100 items.
1055 done = function.run(100, function.data);
1056 timeout = TimeStamp::Now() - started >= sliceTime;
1058 if (done) {
1059 ++mFinalizeFunctionToRun;
1061 if (timeout) {
1062 break;
1064 } else {
1065 function.run(UINT32_MAX, function.data);
1066 ++mFinalizeFunctionToRun;
1068 } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length());
1070 if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) {
1071 MOZ_ASSERT(mRuntime->mFinalizeRunnable == this);
1072 mDeferredFinalizeFunctions.Clear();
1073 // NB: This may delete this!
1074 mRuntime->mFinalizeRunnable = nullptr;
1078 NS_IMETHODIMP
1079 IncrementalFinalizeRunnable::Run()
1081 if (mRuntime->mFinalizeRunnable != this) {
1082 /* These items were already processed synchronously in JSGC_END. */
1083 MOZ_ASSERT(!mSupports.Length());
1084 MOZ_ASSERT(!mDeferredFinalizeFunctions.Length());
1085 return NS_OK;
1088 ReleaseNow(true);
1090 if (mDeferredFinalizeFunctions.Length()) {
1091 nsresult rv = NS_DispatchToCurrentThread(this);
1092 if (NS_FAILED(rv)) {
1093 ReleaseNow(false);
1097 return NS_OK;
1100 void
1101 CycleCollectedJSRuntime::FinalizeDeferredThings(DeferredFinalizeType aType)
1103 MOZ_ASSERT(!mFinalizeRunnable);
1104 mFinalizeRunnable = new IncrementalFinalizeRunnable(this,
1105 mDeferredSupports,
1106 mDeferredFinalizerTable);
1108 // Everything should be gone now.
1109 MOZ_ASSERT(!mDeferredSupports.Length());
1110 MOZ_ASSERT(!mDeferredFinalizerTable.Count());
1112 if (aType == FinalizeIncrementally) {
1113 NS_DispatchToCurrentThread(mFinalizeRunnable);
1114 } else {
1115 mFinalizeRunnable->ReleaseNow(false);
1116 MOZ_ASSERT(!mFinalizeRunnable);
1120 void
1121 CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus)
1123 switch (aStatus) {
1124 case JSGC_BEGIN:
1125 nsCycleCollector_prepareForGarbageCollection();
1126 break;
1127 case JSGC_END:
1130 * If the previous GC created a runnable to finalize objects
1131 * incrementally, and if it hasn't finished yet, finish it now. We
1132 * don't want these to build up. We also don't want to allow any
1133 * existing incremental finalize runnables to run after a
1134 * non-incremental GC, since they are often used to detect leaks.
1136 if (mFinalizeRunnable) {
1137 mFinalizeRunnable->ReleaseNow(false);
1140 // Do any deferred finalization of native objects.
1141 FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally :
1142 FinalizeNow);
1143 break;
1145 default:
1146 MOZ_CRASH();
1149 CustomGCCallback(aStatus);