Bug 1865597 - Add error checking when initializing parallel marking and disable on...
[gecko.git] / js / src / frontend / NameCollections.h
blob57806a69849f0e43e4f49b1f38f4f3d7f515cb68
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef frontend_NameCollections_h
8 #define frontend_NameCollections_h
10 #include "mozilla/Assertions.h" // MOZ_ASSERT
11 #include "mozilla/Attributes.h" // MOZ_IMPLICIT
13 #include <stddef.h> // size_t
14 #include <stdint.h> // uint32_t, uint64_t
15 #include <type_traits> // std::{true_type, false_type, is_trivial_v, is_trivially_copyable_v, is_trivially_destructible_v}
16 #include <utility> // std::forward
18 #include "ds/InlineTable.h" // InlineMap, DefaultKeyPolicy
19 #include "frontend/NameAnalysisTypes.h" // AtomVector, FunctionBoxVector
20 #include "frontend/ParserAtom.h" // TaggedParserAtomIndex, TrivialTaggedParserAtomIndex
21 #include "frontend/TaggedParserAtomIndexHasher.h" // TrivialTaggedParserAtomIndexHasher
22 #include "js/AllocPolicy.h" // SystemAllocPolicy, ReportOutOfMemory
23 #include "js/Utility.h" // js_new, js_delete
24 #include "js/Vector.h" // Vector
26 namespace js {
28 namespace detail {
30 // For InlineMap<TrivialTaggedParserAtomIndex>.
31 // See DefaultKeyPolicy definition in InlineTable.h for more details.
32 template <>
33 class DefaultKeyPolicy<frontend::TrivialTaggedParserAtomIndex> {
34 public:
35 DefaultKeyPolicy() = delete;
36 DefaultKeyPolicy(const frontend::TrivialTaggedParserAtomIndex&) = delete;
38 static bool isTombstone(const frontend::TrivialTaggedParserAtomIndex& atom) {
39 return atom.isNull();
41 static void setToTombstone(frontend::TrivialTaggedParserAtomIndex& atom) {
42 atom = frontend::TrivialTaggedParserAtomIndex::null();
46 } // namespace detail
48 namespace frontend {
50 class FunctionBox;
52 // A pool of recyclable containers for use in the frontend. The Parser and
53 // BytecodeEmitter create many maps for name analysis that are short-lived
54 // (i.e., for the duration of parsing or emitting a lexical scope). Making
55 // them recyclable cuts down significantly on allocator churn.
56 template <typename RepresentativeCollection, typename ConcreteCollectionPool>
57 class CollectionPool {
58 using RecyclableCollections = Vector<void*, 32, SystemAllocPolicy>;
60 RecyclableCollections all_;
61 RecyclableCollections recyclable_;
63 static RepresentativeCollection* asRepresentative(void* p) {
64 return reinterpret_cast<RepresentativeCollection*>(p);
67 RepresentativeCollection* allocate() {
68 size_t newAllLength = all_.length() + 1;
69 if (!all_.reserve(newAllLength) || !recyclable_.reserve(newAllLength)) {
70 return nullptr;
73 RepresentativeCollection* collection = js_new<RepresentativeCollection>();
74 if (collection) {
75 all_.infallibleAppend(collection);
77 return collection;
80 public:
81 ~CollectionPool() { purgeAll(); }
83 void purgeAll() {
84 void** end = all_.end();
85 for (void** it = all_.begin(); it != end; ++it) {
86 js_delete(asRepresentative(*it));
89 all_.clearAndFree();
90 recyclable_.clearAndFree();
93 // Fallibly aquire one of the supported collection types from the pool.
94 template <typename Collection>
95 Collection* acquire(FrontendContext* fc) {
96 ConcreteCollectionPool::template assertInvariants<Collection>();
98 RepresentativeCollection* collection;
99 if (recyclable_.empty()) {
100 collection = allocate();
101 if (!collection) {
102 ReportOutOfMemory(fc);
104 } else {
105 collection = asRepresentative(recyclable_.popCopy());
106 collection->clear();
108 return reinterpret_cast<Collection*>(collection);
111 // Release a collection back to the pool.
112 template <typename Collection>
113 void release(Collection** collection) {
114 ConcreteCollectionPool::template assertInvariants<Collection>();
115 MOZ_ASSERT(*collection);
117 #ifdef DEBUG
118 bool ok = false;
119 // Make sure the collection is in |all_| but not already in |recyclable_|.
120 for (void** it = all_.begin(); it != all_.end(); ++it) {
121 if (*it == *collection) {
122 ok = true;
123 break;
126 MOZ_ASSERT(ok);
127 for (void** it = recyclable_.begin(); it != recyclable_.end(); ++it) {
128 MOZ_ASSERT(*it != *collection);
130 #endif
132 MOZ_ASSERT(recyclable_.length() < all_.length());
133 // Reserved in allocateFresh.
134 recyclable_.infallibleAppend(*collection);
135 *collection = nullptr;
139 template <typename Wrapped>
140 struct RecyclableAtomMapValueWrapper {
141 using WrappedType = Wrapped;
143 union {
144 Wrapped wrapped;
145 uint64_t dummy;
148 static void assertInvariant() {
149 static_assert(sizeof(Wrapped) <= sizeof(uint64_t),
150 "Can only recycle atom maps with values smaller than uint64");
153 RecyclableAtomMapValueWrapper() : dummy(0) { assertInvariant(); }
155 MOZ_IMPLICIT RecyclableAtomMapValueWrapper(Wrapped w) : wrapped(w) {
156 assertInvariant();
159 MOZ_IMPLICIT operator Wrapped&() { return wrapped; }
161 MOZ_IMPLICIT operator Wrapped&() const { return wrapped; }
163 Wrapped* operator->() { return &wrapped; }
165 const Wrapped* operator->() const { return &wrapped; }
168 template <typename MapValue>
169 using RecyclableNameMapBase =
170 InlineMap<TrivialTaggedParserAtomIndex,
171 RecyclableAtomMapValueWrapper<MapValue>, 24,
172 TrivialTaggedParserAtomIndexHasher, SystemAllocPolicy>;
174 // Define wrapper methods to accept TaggedParserAtomIndex.
175 template <typename MapValue>
176 class RecyclableNameMap : public RecyclableNameMapBase<MapValue> {
177 using Base = RecyclableNameMapBase<MapValue>;
179 public:
180 template <typename... Args>
181 [[nodiscard]] MOZ_ALWAYS_INLINE bool add(typename Base::AddPtr& p,
182 const TaggedParserAtomIndex& key,
183 Args&&... args) {
184 return Base::add(p, TrivialTaggedParserAtomIndex::from(key),
185 std::forward<Args>(args)...);
188 MOZ_ALWAYS_INLINE
189 typename Base::Ptr lookup(const TaggedParserAtomIndex& l) {
190 return Base::lookup(TrivialTaggedParserAtomIndex::from(l));
193 MOZ_ALWAYS_INLINE
194 typename Base::AddPtr lookupForAdd(const TaggedParserAtomIndex& l) {
195 return Base::lookupForAdd(TrivialTaggedParserAtomIndex::from(l));
199 using DeclaredNameMap = RecyclableNameMap<DeclaredNameInfo>;
200 using NameLocationMap = RecyclableNameMap<NameLocation>;
201 // Cannot use GCThingIndex here because it's not trivial type.
202 using AtomIndexMap = RecyclableNameMap<uint32_t>;
204 template <typename RepresentativeTable>
205 class InlineTablePool
206 : public CollectionPool<RepresentativeTable,
207 InlineTablePool<RepresentativeTable>> {
208 template <typename>
209 struct IsRecyclableAtomMapValueWrapper : std::false_type {};
211 template <typename T>
212 struct IsRecyclableAtomMapValueWrapper<RecyclableAtomMapValueWrapper<T>>
213 : std::true_type {};
215 public:
216 template <typename Table>
217 static void assertInvariants() {
218 static_assert(
219 Table::SizeOfInlineEntries == RepresentativeTable::SizeOfInlineEntries,
220 "Only tables with the same size for inline entries are usable in the "
221 "pool.");
223 using EntryType = typename Table::Table::Entry;
224 using KeyType = typename EntryType::KeyType;
225 using ValueType = typename EntryType::ValueType;
227 static_assert(IsRecyclableAtomMapValueWrapper<ValueType>::value,
228 "Please adjust the static assertions below if you need to "
229 "support other types than RecyclableAtomMapValueWrapper");
231 using WrappedType = typename ValueType::WrappedType;
233 // We can't directly check |std::is_trivial<EntryType>|, because neither
234 // mozilla::HashMapEntry nor IsRecyclableAtomMapValueWrapper are trivially
235 // default constructible. Instead we check that the key and the unwrapped
236 // value are trivial and additionally ensure that the entry itself is
237 // trivially copyable and destructible.
239 static_assert(std::is_trivial_v<KeyType>,
240 "Only tables with trivial keys are usable in the pool.");
241 static_assert(std::is_trivial_v<WrappedType>,
242 "Only tables with trivial values are usable in the pool.");
244 static_assert(
245 std::is_trivially_copyable_v<EntryType>,
246 "Only tables with trivially copyable entries are usable in the pool.");
247 static_assert(std::is_trivially_destructible_v<EntryType>,
248 "Only tables with trivially destructible entries are usable "
249 "in the pool.");
253 template <typename RepresentativeVector>
254 class VectorPool : public CollectionPool<RepresentativeVector,
255 VectorPool<RepresentativeVector>> {
256 public:
257 template <typename Vector>
258 static void assertInvariants() {
259 static_assert(
260 Vector::sMaxInlineStorage == RepresentativeVector::sMaxInlineStorage,
261 "Only vectors with the same size for inline entries are usable in the "
262 "pool.");
264 using ElementType = typename Vector::ElementType;
266 static_assert(std::is_trivial_v<ElementType>,
267 "Only vectors of trivial values are usable in the pool.");
268 static_assert(std::is_trivially_destructible_v<ElementType>,
269 "Only vectors of trivially destructible values are usable in "
270 "the pool.");
272 static_assert(
273 sizeof(ElementType) ==
274 sizeof(typename RepresentativeVector::ElementType),
275 "Only vectors with same-sized elements are usable in the pool.");
279 using AtomVector = Vector<TrivialTaggedParserAtomIndex, 24, SystemAllocPolicy>;
281 using FunctionBoxVector = Vector<FunctionBox*, 24, SystemAllocPolicy>;
283 class NameCollectionPool {
284 InlineTablePool<AtomIndexMap> mapPool_;
285 VectorPool<AtomVector> atomVectorPool_;
286 VectorPool<FunctionBoxVector> functionBoxVectorPool_;
287 uint32_t activeCompilations_;
289 public:
290 NameCollectionPool() : activeCompilations_(0) {}
292 bool hasActiveCompilation() const { return activeCompilations_ != 0; }
294 void addActiveCompilation() { activeCompilations_++; }
296 void removeActiveCompilation() {
297 MOZ_ASSERT(hasActiveCompilation());
298 activeCompilations_--;
301 template <typename Map>
302 Map* acquireMap(FrontendContext* fc) {
303 MOZ_ASSERT(hasActiveCompilation());
304 return mapPool_.acquire<Map>(fc);
307 template <typename Map>
308 void releaseMap(Map** map) {
309 MOZ_ASSERT(hasActiveCompilation());
310 MOZ_ASSERT(map);
311 if (*map) {
312 mapPool_.release(map);
316 template <typename Vector>
317 inline Vector* acquireVector(FrontendContext* fc);
319 template <typename Vector>
320 inline void releaseVector(Vector** vec);
322 void purge() {
323 if (!hasActiveCompilation()) {
324 mapPool_.purgeAll();
325 atomVectorPool_.purgeAll();
326 functionBoxVectorPool_.purgeAll();
331 template <>
332 inline AtomVector* NameCollectionPool::acquireVector<AtomVector>(
333 FrontendContext* fc) {
334 MOZ_ASSERT(hasActiveCompilation());
335 return atomVectorPool_.acquire<AtomVector>(fc);
338 template <>
339 inline void NameCollectionPool::releaseVector<AtomVector>(AtomVector** vec) {
340 MOZ_ASSERT(hasActiveCompilation());
341 MOZ_ASSERT(vec);
342 if (*vec) {
343 atomVectorPool_.release(vec);
347 template <>
348 inline FunctionBoxVector* NameCollectionPool::acquireVector<FunctionBoxVector>(
349 FrontendContext* fc) {
350 MOZ_ASSERT(hasActiveCompilation());
351 return functionBoxVectorPool_.acquire<FunctionBoxVector>(fc);
354 template <>
355 inline void NameCollectionPool::releaseVector<FunctionBoxVector>(
356 FunctionBoxVector** vec) {
357 MOZ_ASSERT(hasActiveCompilation());
358 MOZ_ASSERT(vec);
359 if (*vec) {
360 functionBoxVectorPool_.release(vec);
364 template <typename T, template <typename> typename Impl>
365 class PooledCollectionPtr {
366 NameCollectionPool& pool_;
367 T* collection_ = nullptr;
369 protected:
370 ~PooledCollectionPtr() { Impl<T>::releaseCollection(pool_, &collection_); }
372 T& collection() {
373 MOZ_ASSERT(collection_);
374 return *collection_;
377 const T& collection() const {
378 MOZ_ASSERT(collection_);
379 return *collection_;
382 public:
383 explicit PooledCollectionPtr(NameCollectionPool& pool) : pool_(pool) {}
385 bool acquire(FrontendContext* fc) {
386 MOZ_ASSERT(!collection_);
387 collection_ = Impl<T>::acquireCollection(fc, pool_);
388 return !!collection_;
391 explicit operator bool() const { return !!collection_; }
393 T* operator->() { return &collection(); }
395 const T* operator->() const { return &collection(); }
397 T& operator*() { return collection(); }
399 const T& operator*() const { return collection(); }
402 template <typename Map>
403 class PooledMapPtr : public PooledCollectionPtr<Map, PooledMapPtr> {
404 friend class PooledCollectionPtr<Map, PooledMapPtr>;
406 static Map* acquireCollection(FrontendContext* fc, NameCollectionPool& pool) {
407 return pool.acquireMap<Map>(fc);
410 static void releaseCollection(NameCollectionPool& pool, Map** ptr) {
411 pool.releaseMap(ptr);
414 using Base = PooledCollectionPtr<Map, PooledMapPtr>;
416 public:
417 using Base::Base;
419 ~PooledMapPtr() = default;
422 template <typename Vector>
423 class PooledVectorPtr : public PooledCollectionPtr<Vector, PooledVectorPtr> {
424 friend class PooledCollectionPtr<Vector, PooledVectorPtr>;
426 static Vector* acquireCollection(FrontendContext* fc,
427 NameCollectionPool& pool) {
428 return pool.acquireVector<Vector>(fc);
431 static void releaseCollection(NameCollectionPool& pool, Vector** ptr) {
432 pool.releaseVector(ptr);
435 using Base = PooledCollectionPtr<Vector, PooledVectorPtr>;
436 using Base::collection;
438 public:
439 using Base::Base;
441 ~PooledVectorPtr() = default;
443 typename Vector::ElementType& operator[](size_t index) {
444 return collection()[index];
447 const typename Vector::ElementType& operator[](size_t index) const {
448 return collection()[index];
452 } // namespace frontend
453 } // namespace js
455 #endif // frontend_NameCollections_h