1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include "mozilla/Maybe.h"
12 #include "js/GCPolicyAPI.h"
13 #include "js/HashTable.h"
14 #include "js/RootingAPI.h"
15 #include "js/SweepingAPI.h"
16 #include "js/TypeDecls.h"
22 // Define a reasonable default GC policy for GC-aware Maps.
23 template <typename Key
, typename Value
>
24 struct DefaultMapEntryGCPolicy
{
25 static bool traceWeak(JSTracer
* trc
, Key
* key
, Value
* value
) {
26 return GCPolicy
<Key
>::traceWeak(trc
, key
) &&
27 GCPolicy
<Value
>::traceWeak(trc
, value
);
29 static bool needsSweep(JSTracer
* trc
, const Key
* key
, const Value
* value
) {
30 // This is like a const version of the |traceWeak| method. It has the sense
31 // of the return value reversed and does not mutate keys/values. Used during
32 // incremental sweeping by the WeakCache specializations for maps and sets.
33 return GCPolicy
<Key
>::needsSweep(trc
, key
) ||
34 GCPolicy
<Value
>::needsSweep(trc
, value
);
38 // A GCHashMap is a GC-aware HashMap, meaning that it has additional trace
39 // methods that know how to visit all keys and values in the table. HashMaps
40 // that contain GC pointers will generally want to use this GCHashMap
41 // specialization instead of HashMap, because this conveniently supports tracing
42 // keys and values, and cleaning up weak entries.
44 // GCHashMap::trace applies GCPolicy<T>::trace to each entry's key and value.
45 // Most types of GC pointers already have appropriate specializations of
46 // GCPolicy, so they should just work as keys and values. Any struct type with a
47 // default constructor and trace function should work as well. If you need to
48 // define your own GCPolicy specialization, generic helpers can be found in
49 // js/public/TracingAPI.h.
51 // The MapEntryGCPolicy template parameter controls how the table drops entries
52 // when edges are weakly held. GCHashMap::traceWeak applies the
53 // MapEntryGCPolicy's traceWeak method to each table entry; if it returns true,
54 // the entry is dropped. The default MapEntryGCPolicy drops the entry if either
55 // the key or value is about to be finalized, according to its
56 // GCPolicy<T>::traceWeak method. (This default is almost always fine: it's hard
57 // to imagine keeping such an entry around anyway.)
59 // Note that this HashMap only knows *how* to trace, but it does not itself
60 // cause tracing to be invoked. For tracing, it must be used as
61 // Rooted<GCHashMap> or PersistentRooted<GCHashMap>, or barriered and traced
63 template <typename Key
, typename Value
,
64 typename HashPolicy
= js::DefaultHasher
<Key
>,
65 typename AllocPolicy
= js::TempAllocPolicy
,
66 typename MapEntryGCPolicy
= DefaultMapEntryGCPolicy
<Key
, Value
>>
67 class GCHashMap
: public js::HashMap
<Key
, Value
, HashPolicy
, AllocPolicy
> {
68 using Base
= js::HashMap
<Key
, Value
, HashPolicy
, AllocPolicy
>;
71 using EntryGCPolicy
= MapEntryGCPolicy
;
73 explicit GCHashMap() : Base(AllocPolicy()) {}
74 explicit GCHashMap(AllocPolicy a
) : Base(std::move(a
)) {}
75 explicit GCHashMap(size_t length
) : Base(length
) {}
76 GCHashMap(AllocPolicy a
, size_t length
) : Base(std::move(a
), length
) {}
78 void trace(JSTracer
* trc
) {
79 for (typename
Base::Enum
e(*this); !e
.empty(); e
.popFront()) {
80 GCPolicy
<Value
>::trace(trc
, &e
.front().value(), "hashmap value");
81 GCPolicy
<Key
>::trace(trc
, &e
.front().mutableKey(), "hashmap key");
85 bool traceWeak(JSTracer
* trc
) {
86 typename
Base::Enum
e(*this);
87 traceWeakEntries(trc
, e
);
88 return !this->empty();
91 void traceWeakEntries(JSTracer
* trc
, typename
Base::Enum
& e
) {
92 for (typename
Base::Enum
e(*this); !e
.empty(); e
.popFront()) {
93 if (!MapEntryGCPolicy::traceWeak(trc
, &e
.front().mutableKey(),
94 &e
.front().value())) {
100 bool needsSweep(JSTracer
* trc
) const {
101 for (auto r
= this->all(); !r
.empty(); r
.popFront()) {
102 if (MapEntryGCPolicy::needsSweep(trc
, &r
.front().key(),
103 &r
.front().value())) {
110 // GCHashMap is movable
111 GCHashMap(GCHashMap
&& rhs
) : Base(std::move(rhs
)) {}
112 void operator=(GCHashMap
&& rhs
) {
113 MOZ_ASSERT(this != &rhs
, "self-move assignment is prohibited");
114 Base::operator=(std::move(rhs
));
118 // GCHashMap is not copyable or assignable
119 GCHashMap(const GCHashMap
& hm
) = delete;
120 GCHashMap
& operator=(const GCHashMap
& hm
) = delete;
121 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS
;
127 // HashMap that supports rekeying.
129 // If your keys are pointers to something like JSObject that can be tenured or
130 // compacted, prefer to use GCHashMap with StableCellHasher, which takes
131 // advantage of the Zone's stable id table to make rekeying unnecessary.
132 template <typename Key
, typename Value
,
133 typename HashPolicy
= DefaultHasher
<Key
>,
134 typename AllocPolicy
= TempAllocPolicy
,
135 typename MapEntryGCPolicy
= JS::DefaultMapEntryGCPolicy
<Key
, Value
>>
136 class GCRekeyableHashMap
: public JS::GCHashMap
<Key
, Value
, HashPolicy
,
137 AllocPolicy
, MapEntryGCPolicy
> {
138 using Base
= JS::GCHashMap
<Key
, Value
, HashPolicy
, AllocPolicy
>;
141 explicit GCRekeyableHashMap(AllocPolicy a
= AllocPolicy())
142 : Base(std::move(a
)) {}
143 explicit GCRekeyableHashMap(size_t length
) : Base(length
) {}
144 GCRekeyableHashMap(AllocPolicy a
, size_t length
)
145 : Base(std::move(a
), length
) {}
147 bool traceWeak(JSTracer
* trc
) {
148 for (typename
Base::Enum
e(*this); !e
.empty(); e
.popFront()) {
149 Key
key(e
.front().key());
150 if (!MapEntryGCPolicy::traceWeak(trc
, &key
, &e
.front().value())) {
152 } else if (!HashPolicy::match(key
, e
.front().key())) {
156 return !this->empty();
159 // GCRekeyableHashMap is movable
160 GCRekeyableHashMap(GCRekeyableHashMap
&& rhs
) : Base(std::move(rhs
)) {}
161 void operator=(GCRekeyableHashMap
&& rhs
) {
162 MOZ_ASSERT(this != &rhs
, "self-move assignment is prohibited");
163 Base::operator=(std::move(rhs
));
165 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS
;
167 template <typename Wrapper
, typename
... Args
>
168 class WrappedPtrOperations
<JS::GCHashMap
<Args
...>, Wrapper
> {
169 using Map
= JS::GCHashMap
<Args
...>;
170 using Lookup
= typename
Map::Lookup
;
172 const Map
& map() const { return static_cast<const Wrapper
*>(this)->get(); }
175 using AddPtr
= typename
Map::AddPtr
;
176 using Ptr
= typename
Map::Ptr
;
177 using Range
= typename
Map::Range
;
179 Ptr
lookup(const Lookup
& l
) const { return map().lookup(l
); }
180 Range
all() const { return map().all(); }
181 bool empty() const { return map().empty(); }
182 uint32_t count() const { return map().count(); }
183 size_t capacity() const { return map().capacity(); }
184 bool has(const Lookup
& l
) const { return map().lookup(l
).found(); }
185 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
186 return map().sizeOfExcludingThis(mallocSizeOf
);
188 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
189 return mallocSizeOf(this) + map().sizeOfExcludingThis(mallocSizeOf
);
193 template <typename Wrapper
, typename
... Args
>
194 class MutableWrappedPtrOperations
<JS::GCHashMap
<Args
...>, Wrapper
>
195 : public WrappedPtrOperations
<JS::GCHashMap
<Args
...>, Wrapper
> {
196 using Map
= JS::GCHashMap
<Args
...>;
197 using Lookup
= typename
Map::Lookup
;
199 Map
& map() { return static_cast<Wrapper
*>(this)->get(); }
202 using AddPtr
= typename
Map::AddPtr
;
203 struct Enum
: public Map::Enum
{
204 explicit Enum(Wrapper
& o
) : Map::Enum(o
.map()) {}
206 using Ptr
= typename
Map::Ptr
;
207 using Range
= typename
Map::Range
;
209 void clear() { map().clear(); }
210 void clearAndCompact() { map().clearAndCompact(); }
211 void remove(Ptr p
) { map().remove(p
); }
212 AddPtr
lookupForAdd(const Lookup
& l
) { return map().lookupForAdd(l
); }
214 template <typename KeyInput
, typename ValueInput
>
215 bool add(AddPtr
& p
, KeyInput
&& k
, ValueInput
&& v
) {
216 return map().add(p
, std::forward
<KeyInput
>(k
), std::forward
<ValueInput
>(v
));
219 template <typename KeyInput
>
220 bool add(AddPtr
& p
, KeyInput
&& k
) {
221 return map().add(p
, std::forward
<KeyInput
>(k
), Map::Value());
224 template <typename KeyInput
, typename ValueInput
>
225 bool relookupOrAdd(AddPtr
& p
, KeyInput
&& k
, ValueInput
&& v
) {
226 return map().relookupOrAdd(p
, k
, std::forward
<KeyInput
>(k
),
227 std::forward
<ValueInput
>(v
));
230 template <typename KeyInput
, typename ValueInput
>
231 bool put(KeyInput
&& k
, ValueInput
&& v
) {
232 return map().put(std::forward
<KeyInput
>(k
), std::forward
<ValueInput
>(v
));
235 template <typename KeyInput
, typename ValueInput
>
236 bool putNew(KeyInput
&& k
, ValueInput
&& v
) {
237 return map().putNew(std::forward
<KeyInput
>(k
), std::forward
<ValueInput
>(v
));
245 // A GCHashSet is a HashSet with an additional trace method that knows
246 // be traced to be kept alive will generally want to use this GCHashSet
247 // specialization in lieu of HashSet.
249 // Most types of GC pointers can be traced with no extra infrastructure. For
250 // structs and non-gc-pointer members, ensure that there is a specialization of
251 // GCPolicy<T> with an appropriate trace method available to handle the custom
252 // type. Generic helpers can be found in js/public/TracingAPI.h.
254 // Note that although this HashSet's trace will deal correctly with moved
255 // elements, it does not itself know when to barrier or trace elements. To
256 // function properly it must either be used with Rooted or barriered and traced
258 template <typename T
, typename HashPolicy
= js::DefaultHasher
<T
>,
259 typename AllocPolicy
= js::TempAllocPolicy
>
260 class GCHashSet
: public js::HashSet
<T
, HashPolicy
, AllocPolicy
> {
261 using Base
= js::HashSet
<T
, HashPolicy
, AllocPolicy
>;
264 explicit GCHashSet(AllocPolicy a
= AllocPolicy()) : Base(std::move(a
)) {}
265 explicit GCHashSet(size_t length
) : Base(length
) {}
266 GCHashSet(AllocPolicy a
, size_t length
) : Base(std::move(a
), length
) {}
268 void trace(JSTracer
* trc
) {
269 for (typename
Base::Enum
e(*this); !e
.empty(); e
.popFront()) {
270 GCPolicy
<T
>::trace(trc
, &e
.mutableFront(), "hashset element");
274 bool traceWeak(JSTracer
* trc
) {
275 typename
Base::Enum
e(*this);
276 traceWeakEntries(trc
, e
);
277 return !this->empty();
280 void traceWeakEntries(JSTracer
* trc
, typename
Base::Enum
& e
) {
281 for (; !e
.empty(); e
.popFront()) {
282 if (!GCPolicy
<T
>::traceWeak(trc
, &e
.mutableFront())) {
288 bool needsSweep(JSTracer
* trc
) const {
289 for (auto r
= this->all(); !r
.empty(); r
.popFront()) {
290 if (GCPolicy
<T
>::needsSweep(trc
, &r
.front())) {
297 // GCHashSet is movable
298 GCHashSet(GCHashSet
&& rhs
) : Base(std::move(rhs
)) {}
299 void operator=(GCHashSet
&& rhs
) {
300 MOZ_ASSERT(this != &rhs
, "self-move assignment is prohibited");
301 Base::operator=(std::move(rhs
));
305 // GCHashSet is not copyable or assignable
306 GCHashSet(const GCHashSet
& hs
) = delete;
307 GCHashSet
& operator=(const GCHashSet
& hs
) = delete;
308 } MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS
;
314 template <typename Wrapper
, typename
... Args
>
315 class WrappedPtrOperations
<JS::GCHashSet
<Args
...>, Wrapper
> {
316 using Set
= JS::GCHashSet
<Args
...>;
318 const Set
& set() const { return static_cast<const Wrapper
*>(this)->get(); }
321 using Lookup
= typename
Set::Lookup
;
322 using AddPtr
= typename
Set::AddPtr
;
323 using Entry
= typename
Set::Entry
;
324 using Ptr
= typename
Set::Ptr
;
325 using Range
= typename
Set::Range
;
327 Ptr
lookup(const Lookup
& l
) const { return set().lookup(l
); }
328 Range
all() const { return set().all(); }
329 bool empty() const { return set().empty(); }
330 uint32_t count() const { return set().count(); }
331 size_t capacity() const { return set().capacity(); }
332 bool has(const Lookup
& l
) const { return set().lookup(l
).found(); }
333 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
334 return set().sizeOfExcludingThis(mallocSizeOf
);
336 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
337 return mallocSizeOf(this) + set().sizeOfExcludingThis(mallocSizeOf
);
341 template <typename Wrapper
, typename
... Args
>
342 class MutableWrappedPtrOperations
<JS::GCHashSet
<Args
...>, Wrapper
>
343 : public WrappedPtrOperations
<JS::GCHashSet
<Args
...>, Wrapper
> {
344 using Set
= JS::GCHashSet
<Args
...>;
345 using Lookup
= typename
Set::Lookup
;
347 Set
& set() { return static_cast<Wrapper
*>(this)->get(); }
350 using AddPtr
= typename
Set::AddPtr
;
351 using Entry
= typename
Set::Entry
;
352 struct Enum
: public Set::Enum
{
353 explicit Enum(Wrapper
& o
) : Set::Enum(o
.set()) {}
355 using Ptr
= typename
Set::Ptr
;
356 using Range
= typename
Set::Range
;
358 void clear() { set().clear(); }
359 void clearAndCompact() { set().clearAndCompact(); }
360 [[nodiscard
]] bool reserve(uint32_t len
) { return set().reserve(len
); }
361 void remove(Ptr p
) { set().remove(p
); }
362 void remove(const Lookup
& l
) { set().remove(l
); }
363 AddPtr
lookupForAdd(const Lookup
& l
) { return set().lookupForAdd(l
); }
365 template <typename TInput
>
366 void replaceKey(Ptr p
, const Lookup
& l
, TInput
&& newValue
) {
367 set().replaceKey(p
, l
, std::forward
<TInput
>(newValue
));
370 template <typename TInput
>
371 bool add(AddPtr
& p
, TInput
&& t
) {
372 return set().add(p
, std::forward
<TInput
>(t
));
375 template <typename TInput
>
376 bool relookupOrAdd(AddPtr
& p
, const Lookup
& l
, TInput
&& t
) {
377 return set().relookupOrAdd(p
, l
, std::forward
<TInput
>(t
));
380 template <typename TInput
>
381 bool put(TInput
&& t
) {
382 return set().put(std::forward
<TInput
>(t
));
385 template <typename TInput
>
386 bool putNew(TInput
&& t
) {
387 return set().putNew(std::forward
<TInput
>(t
));
390 template <typename TInput
>
391 bool putNew(const Lookup
& l
, TInput
&& t
) {
392 return set().putNew(l
, std::forward
<TInput
>(t
));
400 // Specialize WeakCache for GCHashMap to provide a barriered map that does not
401 // need to be swept immediately.
402 template <typename Key
, typename Value
, typename HashPolicy
,
403 typename AllocPolicy
, typename MapEntryGCPolicy
>
405 GCHashMap
<Key
, Value
, HashPolicy
, AllocPolicy
, MapEntryGCPolicy
>>
406 final
: protected detail::WeakCacheBase
{
407 using Map
= GCHashMap
<Key
, Value
, HashPolicy
, AllocPolicy
, MapEntryGCPolicy
>;
408 using Self
= WeakCache
<Map
>;
411 JSTracer
* barrierTracer
= nullptr;
414 template <typename
... Args
>
415 explicit WeakCache(Zone
* zone
, Args
&&... args
)
416 : WeakCacheBase(zone
), map(std::forward
<Args
>(args
)...) {}
417 template <typename
... Args
>
418 explicit WeakCache(JSRuntime
* rt
, Args
&&... args
)
419 : WeakCacheBase(rt
), map(std::forward
<Args
>(args
)...) {}
420 ~WeakCache() { MOZ_ASSERT(!barrierTracer
); }
422 bool empty() override
{ return map
.empty(); }
424 size_t traceWeak(JSTracer
* trc
, NeedsLock needsLock
) override
{
425 size_t steps
= map
.count();
427 // Create an Enum and sweep the table entries.
428 mozilla::Maybe
<typename
Map::Enum
> e
;
430 map
.traceWeakEntries(trc
, e
.ref());
432 // Potentially take a lock while the Enum's destructor is called as this can
433 // rehash/resize the table and access the store buffer.
434 mozilla::Maybe
<js::gc::AutoLockStoreBuffer
> lock
;
436 lock
.emplace(trc
->runtime());
443 bool setIncrementalBarrierTracer(JSTracer
* trc
) override
{
444 MOZ_ASSERT(bool(barrierTracer
) != bool(trc
));
449 bool needsIncrementalBarrier() const override
{ return barrierTracer
; }
452 using Entry
= typename
Map::Entry
;
454 static bool entryNeedsSweep(JSTracer
* barrierTracer
, const Entry
& entry
) {
455 return MapEntryGCPolicy::needsSweep(barrierTracer
, &entry
.key(),
460 using Lookup
= typename
Map::Lookup
;
461 using Ptr
= typename
Map::Ptr
;
462 using AddPtr
= typename
Map::AddPtr
;
464 // Iterator over the whole collection.
466 explicit Range(Self
& self
) : cache(self
), range(self
.map
.all()) {
471 bool empty() const { return range
.empty(); }
472 const Entry
& front() const { return range
.front(); }
481 typename
Map::Range range
;
484 if (JSTracer
* trc
= cache
.barrierTracer
) {
485 while (!empty() && entryNeedsSweep(trc
, front())) {
492 struct Enum
: public Map::Enum
{
493 explicit Enum(Self
& cache
) : Map::Enum(cache
.map
) {
494 // This operation is not allowed while barriers are in place as we
495 // may also need to enumerate the set for sweeping.
496 MOZ_ASSERT(!cache
.barrierTracer
);
500 Ptr
lookup(const Lookup
& l
) const {
501 Ptr ptr
= map
.lookup(l
);
502 if (barrierTracer
&& ptr
&& entryNeedsSweep(barrierTracer
, *ptr
)) {
503 const_cast<Map
&>(map
).remove(ptr
);
509 AddPtr
lookupForAdd(const Lookup
& l
) {
510 AddPtr ptr
= map
.lookupForAdd(l
);
511 if (barrierTracer
&& ptr
&& entryNeedsSweep(barrierTracer
, *ptr
)) {
512 const_cast<Map
&>(map
).remove(ptr
);
513 return map
.lookupForAdd(l
);
518 Range
all() const { return Range(*const_cast<Self
*>(this)); }
521 // This operation is not currently allowed while barriers are in place
522 // as it would require iterating the map and the caller expects a
523 // constant time operation.
524 MOZ_ASSERT(!barrierTracer
);
528 uint32_t count() const {
529 // This operation is not currently allowed while barriers are in place
530 // as it would require iterating the set and the caller expects a
531 // constant time operation.
532 MOZ_ASSERT(!barrierTracer
);
536 size_t capacity() const { return map
.capacity(); }
538 bool has(const Lookup
& l
) const { return lookup(l
).found(); }
540 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
541 return map
.sizeOfExcludingThis(mallocSizeOf
);
543 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
544 return mallocSizeOf(this) + map
.shallowSizeOfExcludingThis(mallocSizeOf
);
548 // This operation is not currently allowed while barriers are in place
549 // since it doesn't make sense to clear a cache while it is being swept.
550 MOZ_ASSERT(!barrierTracer
);
554 void clearAndCompact() {
555 // This operation is not currently allowed while barriers are in place
556 // since it doesn't make sense to clear a cache while it is being swept.
557 MOZ_ASSERT(!barrierTracer
);
558 map
.clearAndCompact();
562 // This currently supports removing entries during incremental
563 // sweeping. If we allow these tables to be swept incrementally this may
564 // no longer be possible.
568 void remove(const Lookup
& l
) {
575 template <typename KeyInput
, typename ValueInput
>
576 bool add(AddPtr
& p
, KeyInput
&& k
, ValueInput
&& v
) {
577 return map
.add(p
, std::forward
<KeyInput
>(k
), std::forward
<ValueInput
>(v
));
580 template <typename KeyInput
, typename ValueInput
>
581 bool relookupOrAdd(AddPtr
& p
, KeyInput
&& k
, ValueInput
&& v
) {
582 return map
.relookupOrAdd(p
, std::forward
<KeyInput
>(k
),
583 std::forward
<ValueInput
>(v
));
586 template <typename KeyInput
, typename ValueInput
>
587 bool put(KeyInput
&& k
, ValueInput
&& v
) {
588 return map
.put(std::forward
<KeyInput
>(k
), std::forward
<ValueInput
>(v
));
591 template <typename KeyInput
, typename ValueInput
>
592 bool putNew(KeyInput
&& k
, ValueInput
&& v
) {
593 return map
.putNew(std::forward
<KeyInput
>(k
), std::forward
<ValueInput
>(v
));
595 } JS_HAZ_NON_GC_POINTER
;
597 // Specialize WeakCache for GCHashSet to provide a barriered set that does not
598 // need to be swept immediately.
599 template <typename T
, typename HashPolicy
, typename AllocPolicy
>
600 class WeakCache
<GCHashSet
<T
, HashPolicy
, AllocPolicy
>> final
601 : protected detail::WeakCacheBase
{
602 using Set
= GCHashSet
<T
, HashPolicy
, AllocPolicy
>;
603 using Self
= WeakCache
<Set
>;
606 JSTracer
* barrierTracer
= nullptr;
609 using Entry
= typename
Set::Entry
;
611 template <typename
... Args
>
612 explicit WeakCache(Zone
* zone
, Args
&&... args
)
613 : WeakCacheBase(zone
), set(std::forward
<Args
>(args
)...) {}
614 template <typename
... Args
>
615 explicit WeakCache(JSRuntime
* rt
, Args
&&... args
)
616 : WeakCacheBase(rt
), set(std::forward
<Args
>(args
)...) {}
618 size_t traceWeak(JSTracer
* trc
, NeedsLock needsLock
) override
{
619 size_t steps
= set
.count();
621 // Create an Enum and sweep the table entries. It's not necessary to take
622 // the store buffer lock yet.
623 mozilla::Maybe
<typename
Set::Enum
> e
;
625 set
.traceWeakEntries(trc
, e
.ref());
627 // Destroy the Enum, potentially rehashing or resizing the table. Since this
628 // can access the store buffer, we need to take a lock for this if we're
629 // called off main thread.
630 mozilla::Maybe
<js::gc::AutoLockStoreBuffer
> lock
;
632 lock
.emplace(trc
->runtime());
639 bool empty() override
{ return set
.empty(); }
641 bool setIncrementalBarrierTracer(JSTracer
* trc
) override
{
642 MOZ_ASSERT(bool(barrierTracer
) != bool(trc
));
647 bool needsIncrementalBarrier() const override
{ return barrierTracer
; }
649 // Steal the contents of this weak cache.
650 Set
stealContents() {
651 // This operation is not currently allowed while barriers are in place
652 // since it doesn't make sense to steal the contents while we are
654 MOZ_ASSERT(!barrierTracer
);
656 auto rval
= std::move(set
);
657 // Ensure set is in a specified (empty) state after the move
660 // Return set; no move to avoid invalidating NRVO.
665 static bool entryNeedsSweep(JSTracer
* barrierTracer
, const Entry
& prior
) {
667 bool needsSweep
= !GCPolicy
<T
>::traceWeak(barrierTracer
, &entry
);
668 MOZ_ASSERT_IF(!needsSweep
, prior
== entry
); // We shouldn't update here.
673 using Lookup
= typename
Set::Lookup
;
674 using Ptr
= typename
Set::Ptr
;
675 using AddPtr
= typename
Set::AddPtr
;
677 // Iterator over the whole collection.
679 explicit Range(Self
& self
) : cache(self
), range(self
.set
.all()) {
684 bool empty() const { return range
.empty(); }
685 const Entry
& front() const { return range
.front(); }
694 typename
Set::Range range
;
697 if (JSTracer
* trc
= cache
.barrierTracer
) {
698 while (!empty() && entryNeedsSweep(trc
, front())) {
705 struct Enum
: public Set::Enum
{
706 explicit Enum(Self
& cache
) : Set::Enum(cache
.set
) {
707 // This operation is not allowed while barriers are in place as we
708 // may also need to enumerate the set for sweeping.
709 MOZ_ASSERT(!cache
.barrierTracer
);
713 Ptr
lookup(const Lookup
& l
) const {
714 Ptr ptr
= set
.lookup(l
);
715 if (barrierTracer
&& ptr
&& entryNeedsSweep(barrierTracer
, *ptr
)) {
716 const_cast<Set
&>(set
).remove(ptr
);
722 AddPtr
lookupForAdd(const Lookup
& l
) {
723 AddPtr ptr
= set
.lookupForAdd(l
);
724 if (barrierTracer
&& ptr
&& entryNeedsSweep(barrierTracer
, *ptr
)) {
725 const_cast<Set
&>(set
).remove(ptr
);
726 return set
.lookupForAdd(l
);
731 Range
all() const { return Range(*const_cast<Self
*>(this)); }
734 // This operation is not currently allowed while barriers are in place
735 // as it would require iterating the set and the caller expects a
736 // constant time operation.
737 MOZ_ASSERT(!barrierTracer
);
741 uint32_t count() const {
742 // This operation is not currently allowed while barriers are in place
743 // as it would require iterating the set and the caller expects a
744 // constant time operation.
745 MOZ_ASSERT(!barrierTracer
);
749 size_t capacity() const { return set
.capacity(); }
751 bool has(const Lookup
& l
) const { return lookup(l
).found(); }
753 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
754 return set
.shallowSizeOfExcludingThis(mallocSizeOf
);
756 size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf
) const {
757 return mallocSizeOf(this) + set
.shallowSizeOfExcludingThis(mallocSizeOf
);
761 // This operation is not currently allowed while barriers are in place
762 // since it doesn't make sense to clear a cache while it is being swept.
763 MOZ_ASSERT(!barrierTracer
);
767 void clearAndCompact() {
768 // This operation is not currently allowed while barriers are in place
769 // since it doesn't make sense to clear a cache while it is being swept.
770 MOZ_ASSERT(!barrierTracer
);
771 set
.clearAndCompact();
775 // This currently supports removing entries during incremental
776 // sweeping. If we allow these tables to be swept incrementally this may
777 // no longer be possible.
781 void remove(const Lookup
& l
) {
788 template <typename TInput
>
789 void replaceKey(Ptr p
, const Lookup
& l
, TInput
&& newValue
) {
790 set
.replaceKey(p
, l
, std::forward
<TInput
>(newValue
));
793 template <typename TInput
>
794 bool add(AddPtr
& p
, TInput
&& t
) {
795 return set
.add(p
, std::forward
<TInput
>(t
));
798 template <typename TInput
>
799 bool relookupOrAdd(AddPtr
& p
, const Lookup
& l
, TInput
&& t
) {
800 return set
.relookupOrAdd(p
, l
, std::forward
<TInput
>(t
));
803 template <typename TInput
>
804 bool put(TInput
&& t
) {
805 return set
.put(std::forward
<TInput
>(t
));
808 template <typename TInput
>
809 bool putNew(TInput
&& t
) {
810 return set
.putNew(std::forward
<TInput
>(t
));
813 template <typename TInput
>
814 bool putNew(const Lookup
& l
, TInput
&& t
) {
815 return set
.putNew(l
, std::forward
<TInput
>(t
));
817 } JS_HAZ_NON_GC_POINTER
;
821 #endif /* GCHashTable_h */