Backed out changeset 4191b252db9b (bug 1886734) for causing build bustages @netwerk...
[gecko.git] / js / src / gc / SweepingAPI.h
blobc9948d1118b435daee5b25843f0dc2d5fc1c4eaa
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef js_SweepingAPI_h
8 #define js_SweepingAPI_h
10 #include "mozilla/LinkedList.h"
11 #include "mozilla/Maybe.h"
13 #include "jstypes.h"
15 #include "js/GCAnnotations.h"
16 #include "js/GCHashTable.h"
17 #include "js/GCPolicyAPI.h"
18 #include "js/RootingAPI.h"
20 namespace js {
21 namespace gc {
23 JS_PUBLIC_API void LockStoreBuffer(JSRuntime* runtime);
24 JS_PUBLIC_API void UnlockStoreBuffer(JSRuntime* runtim);
26 class AutoLockStoreBuffer {
27 JSRuntime* runtime;
29 public:
30 explicit AutoLockStoreBuffer(JSRuntime* runtime) : runtime(runtime) {
31 LockStoreBuffer(runtime);
33 ~AutoLockStoreBuffer() { UnlockStoreBuffer(runtime); }
36 class WeakCacheBase;
37 JS_PUBLIC_API void RegisterWeakCache(JS::Zone* zone, WeakCacheBase* cachep);
38 JS_PUBLIC_API void RegisterWeakCache(JSRuntime* rt, WeakCacheBase* cachep);
40 class WeakCacheBase : public mozilla::LinkedListElement<WeakCacheBase> {
41 WeakCacheBase() = delete;
42 explicit WeakCacheBase(const WeakCacheBase&) = delete;
44 public:
45 enum NeedsLock : bool { LockStoreBuffer = true, DontLockStoreBuffer = false };
47 explicit WeakCacheBase(JS::Zone* zone) { RegisterWeakCache(zone, this); }
48 explicit WeakCacheBase(JSRuntime* rt) { RegisterWeakCache(rt, this); }
49 WeakCacheBase(WeakCacheBase&& other) = default;
50 virtual ~WeakCacheBase() = default;
52 virtual size_t traceWeak(JSTracer* trc, NeedsLock needLock) = 0;
54 // Sweeping will be skipped if the cache is empty already.
55 virtual bool empty() = 0;
57 // Enable/disable read barrier during incremental sweeping and set the tracer
58 // to use.
59 virtual bool setIncrementalBarrierTracer(JSTracer* trc) {
60 // Derived classes do not support incremental barriers by default.
61 return false;
63 virtual bool needsIncrementalBarrier() const {
64 // Derived classes do not support incremental barriers by default.
65 return false;
69 } // namespace gc
71 // A WeakCache stores the given Sweepable container and links itself into a
72 // list of such caches that are swept during each GC. A WeakCache can be
73 // specific to a zone, or across a whole runtime, depending on which
74 // constructor is used.
75 template <typename T>
76 class WeakCache : protected gc::WeakCacheBase,
77 public js::MutableWrappedPtrOperations<T, WeakCache<T>> {
78 T cache;
80 public:
81 using Type = T;
83 template <typename... Args>
84 explicit WeakCache(Zone* zone, Args&&... args)
85 : WeakCacheBase(zone), cache(std::forward<Args>(args)...) {}
86 template <typename... Args>
87 explicit WeakCache(JSRuntime* rt, Args&&... args)
88 : WeakCacheBase(rt), cache(std::forward<Args>(args)...) {}
90 const T& get() const { return cache; }
91 T& get() { return cache; }
93 size_t traceWeak(JSTracer* trc, NeedsLock needsLock) override {
94 // Take the store buffer lock in case sweeping triggers any generational
95 // post barriers. This is not always required and WeakCache specializations
96 // may delay or skip taking the lock as appropriate.
97 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock;
98 if (needsLock) {
99 lock.emplace(trc->runtime());
102 JS::GCPolicy<T>::traceWeak(trc, &cache);
103 return 0;
106 bool empty() override { return cache.empty(); }
107 } JS_HAZ_NON_GC_POINTER;
109 // Specialize WeakCache for GCHashMap to provide a barriered map that does not
110 // need to be swept immediately.
111 template <typename Key, typename Value, typename HashPolicy,
112 typename AllocPolicy, typename MapEntryGCPolicy>
113 class WeakCache<
114 GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapEntryGCPolicy>>
115 final : protected gc::WeakCacheBase {
116 using Map = GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapEntryGCPolicy>;
117 using Self = WeakCache<Map>;
119 Map map;
120 JSTracer* barrierTracer = nullptr;
122 public:
123 template <typename... Args>
124 explicit WeakCache(Zone* zone, Args&&... args)
125 : WeakCacheBase(zone), map(std::forward<Args>(args)...) {}
126 template <typename... Args>
127 explicit WeakCache(JSRuntime* rt, Args&&... args)
128 : WeakCacheBase(rt), map(std::forward<Args>(args)...) {}
129 ~WeakCache() { MOZ_ASSERT(!barrierTracer); }
131 bool empty() override { return map.empty(); }
133 size_t traceWeak(JSTracer* trc, NeedsLock needsLock) override {
134 size_t steps = map.count();
136 // Create an Enum and sweep the table entries.
137 mozilla::Maybe<typename Map::Enum> e;
138 e.emplace(map);
139 map.traceWeakEntries(trc, e.ref());
141 // Potentially take a lock while the Enum's destructor is called as this can
142 // rehash/resize the table and access the store buffer.
143 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock;
144 if (needsLock) {
145 lock.emplace(trc->runtime());
147 e.reset();
149 return steps;
152 bool setIncrementalBarrierTracer(JSTracer* trc) override {
153 MOZ_ASSERT(bool(barrierTracer) != bool(trc));
154 barrierTracer = trc;
155 return true;
158 bool needsIncrementalBarrier() const override { return barrierTracer; }
160 private:
161 using Entry = typename Map::Entry;
163 static bool entryNeedsSweep(JSTracer* barrierTracer, const Entry& prior) {
164 Key key(prior.key());
165 Value value(prior.value());
166 bool needsSweep = !MapEntryGCPolicy::traceWeak(barrierTracer, &key, &value);
167 MOZ_ASSERT_IF(!needsSweep,
168 prior.key() == key); // We shouldn't update here.
169 MOZ_ASSERT_IF(!needsSweep,
170 prior.value() == value); // We shouldn't update here.
171 return needsSweep;
174 public:
175 using Lookup = typename Map::Lookup;
176 using Ptr = typename Map::Ptr;
177 using AddPtr = typename Map::AddPtr;
179 // Iterator over the whole collection.
180 struct Range {
181 explicit Range(Self& self) : cache(self), range(self.map.all()) {
182 settle();
184 Range() = default;
186 bool empty() const { return range.empty(); }
187 const Entry& front() const { return range.front(); }
189 void popFront() {
190 range.popFront();
191 settle();
194 private:
195 Self& cache;
196 typename Map::Range range;
198 void settle() {
199 if (JSTracer* trc = cache.barrierTracer) {
200 while (!empty() && entryNeedsSweep(trc, front())) {
201 popFront();
207 struct Enum : public Map::Enum {
208 explicit Enum(Self& cache) : Map::Enum(cache.map) {
209 // This operation is not allowed while barriers are in place as we
210 // may also need to enumerate the set for sweeping.
211 MOZ_ASSERT(!cache.barrierTracer);
215 Ptr lookup(const Lookup& l) const {
216 Ptr ptr = map.lookup(l);
217 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
218 const_cast<Map&>(map).remove(ptr);
219 return Ptr();
221 return ptr;
224 AddPtr lookupForAdd(const Lookup& l) {
225 AddPtr ptr = map.lookupForAdd(l);
226 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
227 const_cast<Map&>(map).remove(ptr);
228 return map.lookupForAdd(l);
230 return ptr;
233 Range all() const { return Range(*const_cast<Self*>(this)); }
235 bool empty() const {
236 // This operation is not currently allowed while barriers are in place
237 // as it would require iterating the map and the caller expects a
238 // constant time operation.
239 MOZ_ASSERT(!barrierTracer);
240 return map.empty();
243 uint32_t count() const {
244 // This operation is not currently allowed while barriers are in place
245 // as it would require iterating the set and the caller expects a
246 // constant time operation.
247 MOZ_ASSERT(!barrierTracer);
248 return map.count();
251 size_t capacity() const { return map.capacity(); }
253 bool has(const Lookup& l) const { return lookup(l).found(); }
255 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
256 return map.sizeOfExcludingThis(mallocSizeOf);
258 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
259 return mallocSizeOf(this) + map.shallowSizeOfExcludingThis(mallocSizeOf);
262 void clear() {
263 // This operation is not currently allowed while barriers are in place
264 // since it doesn't make sense to clear a cache while it is being swept.
265 MOZ_ASSERT(!barrierTracer);
266 map.clear();
269 void clearAndCompact() {
270 // This operation is not currently allowed while barriers are in place
271 // since it doesn't make sense to clear a cache while it is being swept.
272 MOZ_ASSERT(!barrierTracer);
273 map.clearAndCompact();
276 void remove(Ptr p) {
277 // This currently supports removing entries during incremental
278 // sweeping. If we allow these tables to be swept incrementally this may
279 // no longer be possible.
280 map.remove(p);
283 void remove(const Lookup& l) {
284 Ptr p = lookup(l);
285 if (p) {
286 remove(p);
290 template <typename KeyInput, typename ValueInput>
291 bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
292 return map.add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v));
295 template <typename KeyInput, typename ValueInput>
296 bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) {
297 return map.relookupOrAdd(p, std::forward<KeyInput>(k),
298 std::forward<ValueInput>(v));
301 template <typename KeyInput, typename ValueInput>
302 bool put(KeyInput&& k, ValueInput&& v) {
303 return map.put(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
306 template <typename KeyInput, typename ValueInput>
307 bool putNew(KeyInput&& k, ValueInput&& v) {
308 return map.putNew(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
310 } JS_HAZ_NON_GC_POINTER;
312 // Specialize WeakCache for GCHashSet to provide a barriered set that does not
313 // need to be swept immediately.
314 template <typename T, typename HashPolicy, typename AllocPolicy>
315 class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>> final
316 : protected gc::WeakCacheBase {
317 using Set = GCHashSet<T, HashPolicy, AllocPolicy>;
318 using Self = WeakCache<Set>;
320 Set set;
321 JSTracer* barrierTracer = nullptr;
323 public:
324 using Entry = typename Set::Entry;
326 template <typename... Args>
327 explicit WeakCache(Zone* zone, Args&&... args)
328 : WeakCacheBase(zone), set(std::forward<Args>(args)...) {}
329 template <typename... Args>
330 explicit WeakCache(JSRuntime* rt, Args&&... args)
331 : WeakCacheBase(rt), set(std::forward<Args>(args)...) {}
333 size_t traceWeak(JSTracer* trc, NeedsLock needsLock) override {
334 size_t steps = set.count();
336 // Create an Enum and sweep the table entries. It's not necessary to take
337 // the store buffer lock yet.
338 mozilla::Maybe<typename Set::Enum> e;
339 e.emplace(set);
340 set.traceWeakEntries(trc, e.ref());
342 // Destroy the Enum, potentially rehashing or resizing the table. Since this
343 // can access the store buffer, we need to take a lock for this if we're
344 // called off main thread.
345 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock;
346 if (needsLock) {
347 lock.emplace(trc->runtime());
349 e.reset();
351 return steps;
354 bool empty() override { return set.empty(); }
356 bool setIncrementalBarrierTracer(JSTracer* trc) override {
357 MOZ_ASSERT(bool(barrierTracer) != bool(trc));
358 barrierTracer = trc;
359 return true;
362 bool needsIncrementalBarrier() const override { return barrierTracer; }
364 private:
365 static bool entryNeedsSweep(JSTracer* barrierTracer, const Entry& prior) {
366 Entry entry(prior);
367 bool needsSweep = !JS::GCPolicy<T>::traceWeak(barrierTracer, &entry);
368 MOZ_ASSERT_IF(!needsSweep, prior == entry); // We shouldn't update here.
369 return needsSweep;
372 public:
373 using Lookup = typename Set::Lookup;
374 using Ptr = typename Set::Ptr;
375 using AddPtr = typename Set::AddPtr;
377 // Iterator over the whole collection.
378 struct Range {
379 explicit Range(Self& self) : cache(self), range(self.set.all()) {
380 settle();
382 Range() = default;
384 bool empty() const { return range.empty(); }
385 const Entry& front() const { return range.front(); }
387 void popFront() {
388 range.popFront();
389 settle();
392 private:
393 Self& cache;
394 typename Set::Range range;
396 void settle() {
397 if (JSTracer* trc = cache.barrierTracer) {
398 while (!empty() && entryNeedsSweep(trc, front())) {
399 popFront();
405 struct Enum : public Set::Enum {
406 explicit Enum(Self& cache) : Set::Enum(cache.set) {
407 // This operation is not allowed while barriers are in place as we
408 // may also need to enumerate the set for sweeping.
409 MOZ_ASSERT(!cache.barrierTracer);
413 Ptr lookup(const Lookup& l) const {
414 Ptr ptr = set.lookup(l);
415 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
416 const_cast<Set&>(set).remove(ptr);
417 return Ptr();
419 return ptr;
422 AddPtr lookupForAdd(const Lookup& l) {
423 AddPtr ptr = set.lookupForAdd(l);
424 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
425 const_cast<Set&>(set).remove(ptr);
426 return set.lookupForAdd(l);
428 return ptr;
431 Range all() const { return Range(*const_cast<Self*>(this)); }
433 bool empty() const {
434 // This operation is not currently allowed while barriers are in place
435 // as it would require iterating the set and the caller expects a
436 // constant time operation.
437 MOZ_ASSERT(!barrierTracer);
438 return set.empty();
441 uint32_t count() const {
442 // This operation is not currently allowed while barriers are in place
443 // as it would require iterating the set and the caller expects a
444 // constant time operation.
445 MOZ_ASSERT(!barrierTracer);
446 return set.count();
449 size_t capacity() const { return set.capacity(); }
451 bool has(const Lookup& l) const { return lookup(l).found(); }
453 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
454 return set.shallowSizeOfExcludingThis(mallocSizeOf);
456 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
457 return mallocSizeOf(this) + set.shallowSizeOfExcludingThis(mallocSizeOf);
460 void clear() {
461 // This operation is not currently allowed while barriers are in place
462 // since it doesn't make sense to clear a cache while it is being swept.
463 MOZ_ASSERT(!barrierTracer);
464 set.clear();
467 void clearAndCompact() {
468 // This operation is not currently allowed while barriers are in place
469 // since it doesn't make sense to clear a cache while it is being swept.
470 MOZ_ASSERT(!barrierTracer);
471 set.clearAndCompact();
474 void remove(Ptr p) {
475 // This currently supports removing entries during incremental
476 // sweeping. If we allow these tables to be swept incrementally this may
477 // no longer be possible.
478 set.remove(p);
481 void remove(const Lookup& l) {
482 Ptr p = lookup(l);
483 if (p) {
484 remove(p);
488 template <typename TInput>
489 void replaceKey(Ptr p, const Lookup& l, TInput&& newValue) {
490 set.replaceKey(p, l, std::forward<TInput>(newValue));
493 template <typename TInput>
494 bool add(AddPtr& p, TInput&& t) {
495 return set.add(p, std::forward<TInput>(t));
498 template <typename TInput>
499 bool relookupOrAdd(AddPtr& p, const Lookup& l, TInput&& t) {
500 return set.relookupOrAdd(p, l, std::forward<TInput>(t));
503 template <typename TInput>
504 bool put(TInput&& t) {
505 return set.put(std::forward<TInput>(t));
508 template <typename TInput>
509 bool putNew(TInput&& t) {
510 return set.putNew(std::forward<TInput>(t));
513 template <typename TInput>
514 bool putNew(const Lookup& l, TInput&& t) {
515 return set.putNew(l, std::forward<TInput>(t));
517 } JS_HAZ_NON_GC_POINTER;
519 } // namespace js
521 #endif // js_SweepingAPI_h