Bug 1856126 [wpt PR 42137] - LoAF: ensure scripts are added after microtask checkpoin...
[gecko.git] / js / public / GCHashTable.h
blob0319779d8fc27295a8158d0982e7c5853bf4e7fc
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef GCHashTable_h
8 #define GCHashTable_h
10 #include "mozilla/Maybe.h"
12 #include "js/GCPolicyAPI.h"
13 #include "js/HashTable.h"
14 #include "js/RootingAPI.h"
15 #include "js/SweepingAPI.h"
16 #include "js/TypeDecls.h"
18 class JSTracer;
20 namespace JS {
22 // Define a reasonable default GC policy for GC-aware Maps.
23 template <typename Key, typename Value>
24 struct DefaultMapEntryGCPolicy {
25 static bool traceWeak(JSTracer* trc, Key* key, Value* value) {
26 return GCPolicy<Key>::traceWeak(trc, key) &&
27 GCPolicy<Value>::traceWeak(trc, value);
31 // A GCHashMap is a GC-aware HashMap, meaning that it has additional trace
32 // methods that know how to visit all keys and values in the table. HashMaps
33 // that contain GC pointers will generally want to use this GCHashMap
34 // specialization instead of HashMap, because this conveniently supports tracing
35 // keys and values, and cleaning up weak entries.
37 // GCHashMap::trace applies GCPolicy<T>::trace to each entry's key and value.
38 // Most types of GC pointers already have appropriate specializations of
39 // GCPolicy, so they should just work as keys and values. Any struct type with a
40 // default constructor and trace function should work as well. If you need to
41 // define your own GCPolicy specialization, generic helpers can be found in
42 // js/public/TracingAPI.h.
44 // The MapEntryGCPolicy template parameter controls how the table drops entries
45 // when edges are weakly held. GCHashMap::traceWeak applies the
46 // MapEntryGCPolicy's traceWeak method to each table entry; if it returns true,
47 // the entry is dropped. The default MapEntryGCPolicy drops the entry if either
48 // the key or value is about to be finalized, according to its
49 // GCPolicy<T>::traceWeak method. (This default is almost always fine: it's hard
50 // to imagine keeping such an entry around anyway.)
52 // Note that this HashMap only knows *how* to trace, but it does not itself
53 // cause tracing to be invoked. For tracing, it must be used as
54 // Rooted<GCHashMap> or PersistentRooted<GCHashMap>, or barriered and traced
55 // manually.
56 template <typename Key, typename Value,
57 typename HashPolicy = js::DefaultHasher<Key>,
58 typename AllocPolicy = js::TempAllocPolicy,
59 typename MapEntryGCPolicy = DefaultMapEntryGCPolicy<Key, Value>>
60 class GCHashMap : public js::HashMap<Key, Value, HashPolicy, AllocPolicy> {
61 using Base = js::HashMap<Key, Value, HashPolicy, AllocPolicy>;
63 public:
64 using EntryGCPolicy = MapEntryGCPolicy;
66 explicit GCHashMap(AllocPolicy a = AllocPolicy()) : Base(std::move(a)) {}
67 explicit GCHashMap(size_t length) : Base(length) {}
68 GCHashMap(AllocPolicy a, size_t length) : Base(std::move(a), length) {}
70 void trace(JSTracer* trc) {
71 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
72 GCPolicy<Value>::trace(trc, &e.front().value(), "hashmap value");
73 GCPolicy<Key>::trace(trc, &e.front().mutableKey(), "hashmap key");
77 bool traceWeak(JSTracer* trc) {
78 typename Base::Enum e(*this);
79 traceWeakEntries(trc, e);
80 return !this->empty();
83 void traceWeakEntries(JSTracer* trc, typename Base::Enum& e) {
84 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
85 if (!MapEntryGCPolicy::traceWeak(trc, &e.front().mutableKey(),
86 &e.front().value())) {
87 e.removeFront();
92 // GCHashMap is movable
93 GCHashMap(GCHashMap&& rhs) : Base(std::move(rhs)) {}
94 void operator=(GCHashMap&& rhs) {
95 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
96 Base::operator=(std::move(rhs));
99 private:
100 // GCHashMap is not copyable or assignable
101 GCHashMap(const GCHashMap& hm) = delete;
102 GCHashMap& operator=(const GCHashMap& hm) = delete;
103 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS;
105 } // namespace JS
107 namespace js {
109 // HashMap that supports rekeying.
111 // If your keys are pointers to something like JSObject that can be tenured or
112 // compacted, prefer to use GCHashMap with StableCellHasher, which takes
113 // advantage of the Zone's stable id table to make rekeying unnecessary.
114 template <typename Key, typename Value,
115 typename HashPolicy = DefaultHasher<Key>,
116 typename AllocPolicy = TempAllocPolicy,
117 typename MapEntryGCPolicy = JS::DefaultMapEntryGCPolicy<Key, Value>>
118 class GCRekeyableHashMap : public JS::GCHashMap<Key, Value, HashPolicy,
119 AllocPolicy, MapEntryGCPolicy> {
120 using Base = JS::GCHashMap<Key, Value, HashPolicy, AllocPolicy>;
122 public:
123 explicit GCRekeyableHashMap(AllocPolicy a = AllocPolicy())
124 : Base(std::move(a)) {}
125 explicit GCRekeyableHashMap(size_t length) : Base(length) {}
126 GCRekeyableHashMap(AllocPolicy a, size_t length)
127 : Base(std::move(a), length) {}
129 bool traceWeak(JSTracer* trc) {
130 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
131 Key key(e.front().key());
132 if (!MapEntryGCPolicy::traceWeak(trc, &key, &e.front().value())) {
133 e.removeFront();
134 } else if (!HashPolicy::match(key, e.front().key())) {
135 e.rekeyFront(key);
138 return !this->empty();
141 // GCRekeyableHashMap is movable
142 GCRekeyableHashMap(GCRekeyableHashMap&& rhs) : Base(std::move(rhs)) {}
143 void operator=(GCRekeyableHashMap&& rhs) {
144 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
145 Base::operator=(std::move(rhs));
147 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS;
149 template <typename Wrapper, typename... Args>
150 class WrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper> {
151 using Map = JS::GCHashMap<Args...>;
152 using Lookup = typename Map::Lookup;
154 const Map& map() const { return static_cast<const Wrapper*>(this)->get(); }
156 public:
157 using AddPtr = typename Map::AddPtr;
158 using Ptr = typename Map::Ptr;
159 using Range = typename Map::Range;
161 Ptr lookup(const Lookup& l) const { return map().lookup(l); }
162 Range all() const { return map().all(); }
163 bool empty() const { return map().empty(); }
164 uint32_t count() const { return map().count(); }
165 size_t capacity() const { return map().capacity(); }
166 bool has(const Lookup& l) const { return map().lookup(l).found(); }
167 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
168 return map().sizeOfExcludingThis(mallocSizeOf);
170 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
171 return mallocSizeOf(this) + map().sizeOfExcludingThis(mallocSizeOf);
175 template <typename Wrapper, typename... Args>
176 class MutableWrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper>
177 : public WrappedPtrOperations<JS::GCHashMap<Args...>, Wrapper> {
178 using Map = JS::GCHashMap<Args...>;
179 using Lookup = typename Map::Lookup;
181 Map& map() { return static_cast<Wrapper*>(this)->get(); }
183 public:
184 using AddPtr = typename Map::AddPtr;
185 struct Enum : public Map::Enum {
186 explicit Enum(Wrapper& o) : Map::Enum(o.map()) {}
188 using Ptr = typename Map::Ptr;
189 using Range = typename Map::Range;
191 void clear() { map().clear(); }
192 void clearAndCompact() { map().clearAndCompact(); }
193 void remove(Ptr p) { map().remove(p); }
194 AddPtr lookupForAdd(const Lookup& l) { return map().lookupForAdd(l); }
196 template <typename KeyInput, typename ValueInput>
197 bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
198 return map().add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v));
201 template <typename KeyInput>
202 bool add(AddPtr& p, KeyInput&& k) {
203 return map().add(p, std::forward<KeyInput>(k), Map::Value());
206 template <typename KeyInput, typename ValueInput>
207 bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) {
208 return map().relookupOrAdd(p, k, std::forward<KeyInput>(k),
209 std::forward<ValueInput>(v));
212 template <typename KeyInput, typename ValueInput>
213 bool put(KeyInput&& k, ValueInput&& v) {
214 return map().put(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
217 template <typename KeyInput, typename ValueInput>
218 bool putNew(KeyInput&& k, ValueInput&& v) {
219 return map().putNew(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
223 } // namespace js
225 namespace JS {
227 // A GCHashSet is a HashSet with an additional trace method that knows
228 // be traced to be kept alive will generally want to use this GCHashSet
229 // specialization in lieu of HashSet.
231 // Most types of GC pointers can be traced with no extra infrastructure. For
232 // structs and non-gc-pointer members, ensure that there is a specialization of
233 // GCPolicy<T> with an appropriate trace method available to handle the custom
234 // type. Generic helpers can be found in js/public/TracingAPI.h.
236 // Note that although this HashSet's trace will deal correctly with moved
237 // elements, it does not itself know when to barrier or trace elements. To
238 // function properly it must either be used with Rooted or barriered and traced
239 // manually.
240 template <typename T, typename HashPolicy = js::DefaultHasher<T>,
241 typename AllocPolicy = js::TempAllocPolicy>
242 class GCHashSet : public js::HashSet<T, HashPolicy, AllocPolicy> {
243 using Base = js::HashSet<T, HashPolicy, AllocPolicy>;
245 public:
246 explicit GCHashSet(AllocPolicy a = AllocPolicy()) : Base(std::move(a)) {}
247 explicit GCHashSet(size_t length) : Base(length) {}
248 GCHashSet(AllocPolicy a, size_t length) : Base(std::move(a), length) {}
250 void trace(JSTracer* trc) {
251 for (typename Base::Enum e(*this); !e.empty(); e.popFront()) {
252 GCPolicy<T>::trace(trc, &e.mutableFront(), "hashset element");
256 bool traceWeak(JSTracer* trc) {
257 typename Base::Enum e(*this);
258 traceWeakEntries(trc, e);
259 return !this->empty();
262 void traceWeakEntries(JSTracer* trc, typename Base::Enum& e) {
263 for (; !e.empty(); e.popFront()) {
264 if (!GCPolicy<T>::traceWeak(trc, &e.mutableFront())) {
265 e.removeFront();
270 // GCHashSet is movable
271 GCHashSet(GCHashSet&& rhs) : Base(std::move(rhs)) {}
272 void operator=(GCHashSet&& rhs) {
273 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
274 Base::operator=(std::move(rhs));
277 private:
278 // GCHashSet is not copyable or assignable
279 GCHashSet(const GCHashSet& hs) = delete;
280 GCHashSet& operator=(const GCHashSet& hs) = delete;
281 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS;
283 } // namespace JS
285 namespace js {
287 template <typename Wrapper, typename... Args>
288 class WrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper> {
289 using Set = JS::GCHashSet<Args...>;
291 const Set& set() const { return static_cast<const Wrapper*>(this)->get(); }
293 public:
294 using Lookup = typename Set::Lookup;
295 using AddPtr = typename Set::AddPtr;
296 using Entry = typename Set::Entry;
297 using Ptr = typename Set::Ptr;
298 using Range = typename Set::Range;
300 Ptr lookup(const Lookup& l) const { return set().lookup(l); }
301 Range all() const { return set().all(); }
302 bool empty() const { return set().empty(); }
303 uint32_t count() const { return set().count(); }
304 size_t capacity() const { return set().capacity(); }
305 bool has(const Lookup& l) const { return set().lookup(l).found(); }
306 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
307 return set().sizeOfExcludingThis(mallocSizeOf);
309 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
310 return mallocSizeOf(this) + set().sizeOfExcludingThis(mallocSizeOf);
314 template <typename Wrapper, typename... Args>
315 class MutableWrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper>
316 : public WrappedPtrOperations<JS::GCHashSet<Args...>, Wrapper> {
317 using Set = JS::GCHashSet<Args...>;
318 using Lookup = typename Set::Lookup;
320 Set& set() { return static_cast<Wrapper*>(this)->get(); }
322 public:
323 using AddPtr = typename Set::AddPtr;
324 using Entry = typename Set::Entry;
325 struct Enum : public Set::Enum {
326 explicit Enum(Wrapper& o) : Set::Enum(o.set()) {}
328 using Ptr = typename Set::Ptr;
329 using Range = typename Set::Range;
331 void clear() { set().clear(); }
332 void clearAndCompact() { set().clearAndCompact(); }
333 [[nodiscard]] bool reserve(uint32_t len) { return set().reserve(len); }
334 void remove(Ptr p) { set().remove(p); }
335 void remove(const Lookup& l) { set().remove(l); }
336 AddPtr lookupForAdd(const Lookup& l) { return set().lookupForAdd(l); }
338 template <typename TInput>
339 void replaceKey(Ptr p, const Lookup& l, TInput&& newValue) {
340 set().replaceKey(p, l, std::forward<TInput>(newValue));
343 template <typename TInput>
344 bool add(AddPtr& p, TInput&& t) {
345 return set().add(p, std::forward<TInput>(t));
348 template <typename TInput>
349 bool relookupOrAdd(AddPtr& p, const Lookup& l, TInput&& t) {
350 return set().relookupOrAdd(p, l, std::forward<TInput>(t));
353 template <typename TInput>
354 bool put(TInput&& t) {
355 return set().put(std::forward<TInput>(t));
358 template <typename TInput>
359 bool putNew(TInput&& t) {
360 return set().putNew(std::forward<TInput>(t));
363 template <typename TInput>
364 bool putNew(const Lookup& l, TInput&& t) {
365 return set().putNew(l, std::forward<TInput>(t));
369 } /* namespace js */
371 namespace JS {
373 // Specialize WeakCache for GCHashMap to provide a barriered map that does not
374 // need to be swept immediately.
375 template <typename Key, typename Value, typename HashPolicy,
376 typename AllocPolicy, typename MapEntryGCPolicy>
377 class WeakCache<
378 GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapEntryGCPolicy>>
379 final : protected detail::WeakCacheBase {
380 using Map = GCHashMap<Key, Value, HashPolicy, AllocPolicy, MapEntryGCPolicy>;
381 using Self = WeakCache<Map>;
383 Map map;
384 JSTracer* barrierTracer = nullptr;
386 public:
387 template <typename... Args>
388 explicit WeakCache(Zone* zone, Args&&... args)
389 : WeakCacheBase(zone), map(std::forward<Args>(args)...) {}
390 template <typename... Args>
391 explicit WeakCache(JSRuntime* rt, Args&&... args)
392 : WeakCacheBase(rt), map(std::forward<Args>(args)...) {}
393 ~WeakCache() { MOZ_ASSERT(!barrierTracer); }
395 bool empty() override { return map.empty(); }
397 size_t traceWeak(JSTracer* trc, js::gc::StoreBuffer* sbToLock) override {
398 size_t steps = map.count();
400 // Create an Enum and sweep the table entries.
401 mozilla::Maybe<typename Map::Enum> e;
402 e.emplace(map);
403 map.traceWeakEntries(trc, e.ref());
405 // Potentially take a lock while the Enum's destructor is called as this can
406 // rehash/resize the table and access the store buffer.
407 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock;
408 if (sbToLock) {
409 lock.emplace(sbToLock);
411 e.reset();
413 return steps;
416 bool setIncrementalBarrierTracer(JSTracer* trc) override {
417 MOZ_ASSERT(bool(barrierTracer) != bool(trc));
418 barrierTracer = trc;
419 return true;
422 bool needsIncrementalBarrier() const override { return barrierTracer; }
424 private:
425 using Entry = typename Map::Entry;
427 static bool entryNeedsSweep(JSTracer* barrierTracer, const Entry& prior) {
428 Key key(prior.key());
429 Value value(prior.value());
430 bool needsSweep = !MapEntryGCPolicy::traceWeak(barrierTracer, &key, &value);
431 MOZ_ASSERT_IF(!needsSweep,
432 prior.key() == key); // We shouldn't update here.
433 MOZ_ASSERT_IF(!needsSweep,
434 prior.value() == value); // We shouldn't update here.
435 return needsSweep;
438 public:
439 using Lookup = typename Map::Lookup;
440 using Ptr = typename Map::Ptr;
441 using AddPtr = typename Map::AddPtr;
443 // Iterator over the whole collection.
444 struct Range {
445 explicit Range(Self& self) : cache(self), range(self.map.all()) {
446 settle();
448 Range() = default;
450 bool empty() const { return range.empty(); }
451 const Entry& front() const { return range.front(); }
453 void popFront() {
454 range.popFront();
455 settle();
458 private:
459 Self& cache;
460 typename Map::Range range;
462 void settle() {
463 if (JSTracer* trc = cache.barrierTracer) {
464 while (!empty() && entryNeedsSweep(trc, front())) {
465 popFront();
471 struct Enum : public Map::Enum {
472 explicit Enum(Self& cache) : Map::Enum(cache.map) {
473 // This operation is not allowed while barriers are in place as we
474 // may also need to enumerate the set for sweeping.
475 MOZ_ASSERT(!cache.barrierTracer);
479 Ptr lookup(const Lookup& l) const {
480 Ptr ptr = map.lookup(l);
481 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
482 const_cast<Map&>(map).remove(ptr);
483 return Ptr();
485 return ptr;
488 AddPtr lookupForAdd(const Lookup& l) {
489 AddPtr ptr = map.lookupForAdd(l);
490 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
491 const_cast<Map&>(map).remove(ptr);
492 return map.lookupForAdd(l);
494 return ptr;
497 Range all() const { return Range(*const_cast<Self*>(this)); }
499 bool empty() const {
500 // This operation is not currently allowed while barriers are in place
501 // as it would require iterating the map and the caller expects a
502 // constant time operation.
503 MOZ_ASSERT(!barrierTracer);
504 return map.empty();
507 uint32_t count() const {
508 // This operation is not currently allowed while barriers are in place
509 // as it would require iterating the set and the caller expects a
510 // constant time operation.
511 MOZ_ASSERT(!barrierTracer);
512 return map.count();
515 size_t capacity() const { return map.capacity(); }
517 bool has(const Lookup& l) const { return lookup(l).found(); }
519 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
520 return map.sizeOfExcludingThis(mallocSizeOf);
522 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
523 return mallocSizeOf(this) + map.shallowSizeOfExcludingThis(mallocSizeOf);
526 void clear() {
527 // This operation is not currently allowed while barriers are in place
528 // since it doesn't make sense to clear a cache while it is being swept.
529 MOZ_ASSERT(!barrierTracer);
530 map.clear();
533 void clearAndCompact() {
534 // This operation is not currently allowed while barriers are in place
535 // since it doesn't make sense to clear a cache while it is being swept.
536 MOZ_ASSERT(!barrierTracer);
537 map.clearAndCompact();
540 void remove(Ptr p) {
541 // This currently supports removing entries during incremental
542 // sweeping. If we allow these tables to be swept incrementally this may
543 // no longer be possible.
544 map.remove(p);
547 void remove(const Lookup& l) {
548 Ptr p = lookup(l);
549 if (p) {
550 remove(p);
554 template <typename KeyInput, typename ValueInput>
555 bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
556 return map.add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v));
559 template <typename KeyInput, typename ValueInput>
560 bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) {
561 return map.relookupOrAdd(p, std::forward<KeyInput>(k),
562 std::forward<ValueInput>(v));
565 template <typename KeyInput, typename ValueInput>
566 bool put(KeyInput&& k, ValueInput&& v) {
567 return map.put(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
570 template <typename KeyInput, typename ValueInput>
571 bool putNew(KeyInput&& k, ValueInput&& v) {
572 return map.putNew(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
574 } JS_HAZ_NON_GC_POINTER;
576 // Specialize WeakCache for GCHashSet to provide a barriered set that does not
577 // need to be swept immediately.
578 template <typename T, typename HashPolicy, typename AllocPolicy>
579 class WeakCache<GCHashSet<T, HashPolicy, AllocPolicy>> final
580 : protected detail::WeakCacheBase {
581 using Set = GCHashSet<T, HashPolicy, AllocPolicy>;
582 using Self = WeakCache<Set>;
584 Set set;
585 JSTracer* barrierTracer = nullptr;
587 public:
588 using Entry = typename Set::Entry;
590 template <typename... Args>
591 explicit WeakCache(Zone* zone, Args&&... args)
592 : WeakCacheBase(zone), set(std::forward<Args>(args)...) {}
593 template <typename... Args>
594 explicit WeakCache(JSRuntime* rt, Args&&... args)
595 : WeakCacheBase(rt), set(std::forward<Args>(args)...) {}
597 size_t traceWeak(JSTracer* trc, js::gc::StoreBuffer* sbToLock) override {
598 size_t steps = set.count();
600 // Create an Enum and sweep the table entries. It's not necessary to take
601 // the store buffer lock yet.
602 mozilla::Maybe<typename Set::Enum> e;
603 e.emplace(set);
604 set.traceWeakEntries(trc, e.ref());
606 // Destroy the Enum, potentially rehashing or resizing the table. Since this
607 // can access the store buffer, we need to take a lock for this if we're
608 // called off main thread.
609 mozilla::Maybe<js::gc::AutoLockStoreBuffer> lock;
610 if (sbToLock) {
611 lock.emplace(sbToLock);
613 e.reset();
615 return steps;
618 bool empty() override { return set.empty(); }
620 bool setIncrementalBarrierTracer(JSTracer* trc) override {
621 MOZ_ASSERT(bool(barrierTracer) != bool(trc));
622 barrierTracer = trc;
623 return true;
626 bool needsIncrementalBarrier() const override { return barrierTracer; }
628 private:
629 static bool entryNeedsSweep(JSTracer* barrierTracer, const Entry& prior) {
630 Entry entry(prior);
631 bool needsSweep = !GCPolicy<T>::traceWeak(barrierTracer, &entry);
632 MOZ_ASSERT_IF(!needsSweep, prior == entry); // We shouldn't update here.
633 return needsSweep;
636 public:
637 using Lookup = typename Set::Lookup;
638 using Ptr = typename Set::Ptr;
639 using AddPtr = typename Set::AddPtr;
641 // Iterator over the whole collection.
642 struct Range {
643 explicit Range(Self& self) : cache(self), range(self.set.all()) {
644 settle();
646 Range() = default;
648 bool empty() const { return range.empty(); }
649 const Entry& front() const { return range.front(); }
651 void popFront() {
652 range.popFront();
653 settle();
656 private:
657 Self& cache;
658 typename Set::Range range;
660 void settle() {
661 if (JSTracer* trc = cache.barrierTracer) {
662 while (!empty() && entryNeedsSweep(trc, front())) {
663 popFront();
669 struct Enum : public Set::Enum {
670 explicit Enum(Self& cache) : Set::Enum(cache.set) {
671 // This operation is not allowed while barriers are in place as we
672 // may also need to enumerate the set for sweeping.
673 MOZ_ASSERT(!cache.barrierTracer);
677 Ptr lookup(const Lookup& l) const {
678 Ptr ptr = set.lookup(l);
679 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
680 const_cast<Set&>(set).remove(ptr);
681 return Ptr();
683 return ptr;
686 AddPtr lookupForAdd(const Lookup& l) {
687 AddPtr ptr = set.lookupForAdd(l);
688 if (barrierTracer && ptr && entryNeedsSweep(barrierTracer, *ptr)) {
689 const_cast<Set&>(set).remove(ptr);
690 return set.lookupForAdd(l);
692 return ptr;
695 Range all() const { return Range(*const_cast<Self*>(this)); }
697 bool empty() const {
698 // This operation is not currently allowed while barriers are in place
699 // as it would require iterating the set and the caller expects a
700 // constant time operation.
701 MOZ_ASSERT(!barrierTracer);
702 return set.empty();
705 uint32_t count() const {
706 // This operation is not currently allowed while barriers are in place
707 // as it would require iterating the set and the caller expects a
708 // constant time operation.
709 MOZ_ASSERT(!barrierTracer);
710 return set.count();
713 size_t capacity() const { return set.capacity(); }
715 bool has(const Lookup& l) const { return lookup(l).found(); }
717 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
718 return set.shallowSizeOfExcludingThis(mallocSizeOf);
720 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
721 return mallocSizeOf(this) + set.shallowSizeOfExcludingThis(mallocSizeOf);
724 void clear() {
725 // This operation is not currently allowed while barriers are in place
726 // since it doesn't make sense to clear a cache while it is being swept.
727 MOZ_ASSERT(!barrierTracer);
728 set.clear();
731 void clearAndCompact() {
732 // This operation is not currently allowed while barriers are in place
733 // since it doesn't make sense to clear a cache while it is being swept.
734 MOZ_ASSERT(!barrierTracer);
735 set.clearAndCompact();
738 void remove(Ptr p) {
739 // This currently supports removing entries during incremental
740 // sweeping. If we allow these tables to be swept incrementally this may
741 // no longer be possible.
742 set.remove(p);
745 void remove(const Lookup& l) {
746 Ptr p = lookup(l);
747 if (p) {
748 remove(p);
752 template <typename TInput>
753 void replaceKey(Ptr p, const Lookup& l, TInput&& newValue) {
754 set.replaceKey(p, l, std::forward<TInput>(newValue));
757 template <typename TInput>
758 bool add(AddPtr& p, TInput&& t) {
759 return set.add(p, std::forward<TInput>(t));
762 template <typename TInput>
763 bool relookupOrAdd(AddPtr& p, const Lookup& l, TInput&& t) {
764 return set.relookupOrAdd(p, l, std::forward<TInput>(t));
767 template <typename TInput>
768 bool put(TInput&& t) {
769 return set.put(std::forward<TInput>(t));
772 template <typename TInput>
773 bool putNew(TInput&& t) {
774 return set.putNew(std::forward<TInput>(t));
777 template <typename TInput>
778 bool putNew(const Lookup& l, TInput&& t) {
779 return set.putNew(l, std::forward<TInput>(t));
781 } JS_HAZ_NON_GC_POINTER;
783 } // namespace JS
785 #endif /* GCHashTable_h */