Bumping manifests a=b2g-bump
[gecko.git] / mfbt / Vector.h
blob5d89acf0069fff2b9b887d6bc9788fbda68d17d3
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /* A type/length-parametrized vector class. */
9 #ifndef mozilla_Vector_h
10 #define mozilla_Vector_h
12 #include "mozilla/Alignment.h"
13 #include "mozilla/AllocPolicy.h"
14 #include "mozilla/ArrayUtils.h" // for PointerRangeSize
15 #include "mozilla/Assertions.h"
16 #include "mozilla/Attributes.h"
17 #include "mozilla/MathAlgorithms.h"
18 #include "mozilla/MemoryReporting.h"
19 #include "mozilla/Move.h"
20 #include "mozilla/ReentrancyGuard.h"
21 #include "mozilla/TemplateLib.h"
22 #include "mozilla/TypeTraits.h"
24 #include <new> // for placement new
26 /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
27 #ifdef _MSC_VER
28 #pragma warning(push)
29 #pragma warning(disable:4345)
30 #endif
32 namespace mozilla {
34 template<typename T, size_t N, class AllocPolicy, class ThisVector>
35 class VectorBase;
37 namespace detail {
40 * Check that the given capacity wastes the minimal amount of space if
41 * allocated on the heap. This means that aCapacity*sizeof(T) is as close to a
42 * power-of-two as possible. growStorageBy() is responsible for ensuring this.
44 template<typename T>
45 static bool CapacityHasExcessSpace(size_t aCapacity)
47 size_t size = aCapacity * sizeof(T);
48 return RoundUpPow2(size) - size >= sizeof(T);
52 * This template class provides a default implementation for vector operations
53 * when the element type is not known to be a POD, as judged by IsPod.
55 template<typename T, size_t N, class AP, class ThisVector, bool IsPod>
56 struct VectorImpl
58 /* Destroys constructed objects in the range [aBegin, aEnd). */
59 static inline void destroy(T* aBegin, T* aEnd)
61 MOZ_ASSERT(aBegin <= aEnd);
62 for (T* p = aBegin; p < aEnd; ++p) {
63 p->~T();
67 /* Constructs objects in the uninitialized range [aBegin, aEnd). */
68 static inline void initialize(T* aBegin, T* aEnd)
70 MOZ_ASSERT(aBegin <= aEnd);
71 for (T* p = aBegin; p < aEnd; ++p) {
72 new(p) T();
77 * Copy-constructs objects in the uninitialized range
78 * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
80 template<typename U>
81 static inline void copyConstruct(T* aDst,
82 const U* aSrcStart, const U* aSrcEnd)
84 MOZ_ASSERT(aSrcStart <= aSrcEnd);
85 for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
86 new(aDst) T(*p);
91 * Move-constructs objects in the uninitialized range
92 * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
94 template<typename U>
95 static inline void moveConstruct(T* aDst, U* aSrcStart, U* aSrcEnd)
97 MOZ_ASSERT(aSrcStart <= aSrcEnd);
98 for (U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
99 new(aDst) T(Move(*p));
104 * Copy-constructs objects in the uninitialized range [aDst, aDst+aN) from
105 * the same object aU.
107 template<typename U>
108 static inline void copyConstructN(T* aDst, size_t aN, const U& aU)
110 for (T* end = aDst + aN; aDst < end; ++aDst) {
111 new(aDst) T(aU);
116 * Grows the given buffer to have capacity aNewCap, preserving the objects
117 * constructed in the range [begin, end) and updating aV. Assumes that (1)
118 * aNewCap has not overflowed, and (2) multiplying aNewCap by sizeof(T) will
119 * not overflow.
121 static inline bool
122 growTo(VectorBase<T, N, AP, ThisVector>& aV, size_t aNewCap)
124 MOZ_ASSERT(!aV.usingInlineStorage());
125 MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
126 T* newbuf = aV.template pod_malloc<T>(aNewCap);
127 if (!newbuf) {
128 return false;
130 T* dst = newbuf;
131 T* src = aV.beginNoCheck();
132 for (; src < aV.endNoCheck(); ++dst, ++src) {
133 new(dst) T(Move(*src));
135 VectorImpl::destroy(aV.beginNoCheck(), aV.endNoCheck());
136 aV.free_(aV.mBegin);
137 aV.mBegin = newbuf;
138 /* aV.mLength is unchanged. */
139 aV.mCapacity = aNewCap;
140 return true;
145 * This partial template specialization provides a default implementation for
146 * vector operations when the element type is known to be a POD, as judged by
147 * IsPod.
149 template<typename T, size_t N, class AP, class ThisVector>
150 struct VectorImpl<T, N, AP, ThisVector, true>
152 static inline void destroy(T*, T*) {}
154 static inline void initialize(T* aBegin, T* aEnd)
157 * You would think that memset would be a big win (or even break even)
158 * when we know T is a POD. But currently it's not. This is probably
159 * because |append| tends to be given small ranges and memset requires
160 * a function call that doesn't get inlined.
162 * memset(aBegin, 0, sizeof(T) * (aEnd - aBegin));
164 MOZ_ASSERT(aBegin <= aEnd);
165 for (T* p = aBegin; p < aEnd; ++p) {
166 new(p) T();
170 template<typename U>
171 static inline void copyConstruct(T* aDst,
172 const U* aSrcStart, const U* aSrcEnd)
175 * See above memset comment. Also, notice that copyConstruct is
176 * currently templated (T != U), so memcpy won't work without
177 * requiring T == U.
179 * memcpy(aDst, aSrcStart, sizeof(T) * (aSrcEnd - aSrcStart));
181 MOZ_ASSERT(aSrcStart <= aSrcEnd);
182 for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
183 *aDst = *p;
187 template<typename U>
188 static inline void moveConstruct(T* aDst,
189 const U* aSrcStart, const U* aSrcEnd)
191 copyConstruct(aDst, aSrcStart, aSrcEnd);
194 static inline void copyConstructN(T* aDst, size_t aN, const T& aT)
196 for (T* end = aDst + aN; aDst < end; ++aDst) {
197 *aDst = aT;
201 static inline bool
202 growTo(VectorBase<T, N, AP, ThisVector>& aV, size_t aNewCap)
204 MOZ_ASSERT(!aV.usingInlineStorage());
205 MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
206 T* newbuf = aV.template pod_realloc<T>(aV.mBegin, aV.mCapacity, aNewCap);
207 if (!newbuf) {
208 return false;
210 aV.mBegin = newbuf;
211 /* aV.mLength is unchanged. */
212 aV.mCapacity = aNewCap;
213 return true;
217 // A struct for TestVector.cpp to access private internal fields.
218 // DO NOT DEFINE IN YOUR OWN CODE.
219 struct VectorTesting;
221 } // namespace detail
224 * A CRTP base class for vector-like classes. Unless you really really want
225 * your own vector class -- and you almost certainly don't -- you should use
226 * mozilla::Vector instead!
228 * See mozilla::Vector for interface requirements.
230 template<typename T, size_t N, class AllocPolicy, class ThisVector>
231 class VectorBase : private AllocPolicy
233 /* utilities */
235 static const bool kElemIsPod = IsPod<T>::value;
236 typedef detail::VectorImpl<T, N, AllocPolicy, ThisVector, kElemIsPod> Impl;
237 friend struct detail::VectorImpl<T, N, AllocPolicy, ThisVector, kElemIsPod>;
239 friend struct detail::VectorTesting;
241 bool growStorageBy(size_t aIncr);
242 bool convertToHeapStorage(size_t aNewCap);
244 /* magic constants */
246 static const int kMaxInlineBytes = 1024;
248 /* compute constants */
251 * Consider element size to be 1 for buffer sizing if there are 0 inline
252 * elements. This allows us to compile when the definition of the element
253 * type is not visible here.
255 * Explicit specialization is only allowed at namespace scope, so in order
256 * to keep everything here, we use a dummy template parameter with partial
257 * specialization.
259 template<int M, int Dummy>
260 struct ElemSize
262 static const size_t value = sizeof(T);
264 template<int Dummy>
265 struct ElemSize<0, Dummy>
267 static const size_t value = 1;
270 static const size_t kInlineCapacity =
271 tl::Min<N, kMaxInlineBytes / ElemSize<N, 0>::value>::value;
273 /* Calculate inline buffer size; avoid 0-sized array. */
274 static const size_t kInlineBytes =
275 tl::Max<1, kInlineCapacity * ElemSize<N, 0>::value>::value;
277 /* member data */
280 * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
281 * mBegin + mLength) hold valid constructed T objects. The range [mBegin +
282 * mLength, mBegin + mCapacity) holds uninitialized memory. The range
283 * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
284 * previously allocated by a call to reserve().
286 T* mBegin;
288 /* Number of elements in the vector. */
289 size_t mLength;
291 /* Max number of elements storable in the vector without resizing. */
292 size_t mCapacity;
294 #ifdef DEBUG
295 /* Max elements of reserved or used space in this vector. */
296 size_t mReserved;
297 #endif
299 /* Memory used for inline storage. */
300 AlignedStorage<kInlineBytes> mStorage;
302 #ifdef DEBUG
303 friend class ReentrancyGuard;
304 bool mEntered;
305 #endif
307 /* private accessors */
309 bool usingInlineStorage() const
311 return mBegin == const_cast<VectorBase*>(this)->inlineStorage();
314 T* inlineStorage()
316 return static_cast<T*>(mStorage.addr());
319 T* beginNoCheck() const
321 return mBegin;
324 T* endNoCheck()
326 return mBegin + mLength;
329 const T* endNoCheck() const
331 return mBegin + mLength;
334 #ifdef DEBUG
336 * The amount of explicitly allocated space in this vector that is immediately
337 * available to be filled by appending additional elements. This value is
338 * always greater than or equal to |length()| -- the vector's actual elements
339 * are implicitly reserved. This value is always less than or equal to
340 * |capacity()|. It may be explicitly increased using the |reserve()| method.
342 size_t reserved() const
344 MOZ_ASSERT(mLength <= mReserved);
345 MOZ_ASSERT(mReserved <= mCapacity);
346 return mReserved;
348 #endif
350 /* Append operations guaranteed to succeed due to pre-reserved space. */
351 template<typename U> void internalAppend(U&& aU);
352 template<typename U, size_t O, class BP, class UV>
353 void internalAppendAll(const VectorBase<U, O, BP, UV>& aU);
354 void internalAppendN(const T& aT, size_t aN);
355 template<typename U> void internalAppend(const U* aBegin, size_t aLength);
357 public:
358 static const size_t sMaxInlineStorage = N;
360 typedef T ElementType;
362 explicit VectorBase(AllocPolicy = AllocPolicy());
363 explicit VectorBase(ThisVector&&); /* Move constructor. */
364 ThisVector& operator=(ThisVector&&); /* Move assignment. */
365 ~VectorBase();
367 /* accessors */
369 const AllocPolicy& allocPolicy() const { return *this; }
371 AllocPolicy& allocPolicy() { return *this; }
373 enum { InlineLength = N };
375 size_t length() const { return mLength; }
377 bool empty() const { return mLength == 0; }
379 size_t capacity() const { return mCapacity; }
381 T* begin()
383 MOZ_ASSERT(!mEntered);
384 return mBegin;
387 const T* begin() const
389 MOZ_ASSERT(!mEntered);
390 return mBegin;
393 T* end()
395 MOZ_ASSERT(!mEntered);
396 return mBegin + mLength;
399 const T* end() const
401 MOZ_ASSERT(!mEntered);
402 return mBegin + mLength;
405 T& operator[](size_t aIndex)
407 MOZ_ASSERT(!mEntered);
408 MOZ_ASSERT(aIndex < mLength);
409 return begin()[aIndex];
412 const T& operator[](size_t aIndex) const
414 MOZ_ASSERT(!mEntered);
415 MOZ_ASSERT(aIndex < mLength);
416 return begin()[aIndex];
419 T& back()
421 MOZ_ASSERT(!mEntered);
422 MOZ_ASSERT(!empty());
423 return *(end() - 1);
426 const T& back() const
428 MOZ_ASSERT(!mEntered);
429 MOZ_ASSERT(!empty());
430 return *(end() - 1);
433 class Range
435 friend class VectorBase;
436 T* mCur;
437 T* mEnd;
438 Range(T* aCur, T* aEnd)
439 : mCur(aCur)
440 , mEnd(aEnd)
442 MOZ_ASSERT(aCur <= aEnd);
445 public:
446 Range() {}
447 bool empty() const { return mCur == mEnd; }
448 size_t remain() const { return PointerRangeSize(mCur, mEnd); }
449 T& front() const { MOZ_ASSERT(!empty()); return *mCur; }
450 void popFront() { MOZ_ASSERT(!empty()); ++mCur; }
451 T popCopyFront() { MOZ_ASSERT(!empty()); return *mCur++; }
454 Range all() { return Range(begin(), end()); }
456 /* mutators */
459 * Given that the vector is empty and has no inline storage, grow to
460 * |capacity|.
462 bool initCapacity(size_t aRequest);
465 * If reserve(aRequest) succeeds and |aRequest >= length()|, then appending
466 * |aRequest - length()| elements, in any sequence of append/appendAll calls,
467 * is guaranteed to succeed.
469 * A request to reserve an amount less than the current length does not affect
470 * reserved space.
472 bool reserve(size_t aRequest);
475 * Destroy elements in the range [end() - aIncr, end()). Does not deallocate
476 * or unreserve storage for those elements.
478 void shrinkBy(size_t aIncr);
480 /** Grow the vector by aIncr elements. */
481 bool growBy(size_t aIncr);
483 /** Call shrinkBy or growBy based on whether newSize > length(). */
484 bool resize(size_t aNewLength);
487 * Increase the length of the vector, but don't initialize the new elements
488 * -- leave them as uninitialized memory.
490 bool growByUninitialized(size_t aIncr);
491 void infallibleGrowByUninitialized(size_t aIncr);
492 bool resizeUninitialized(size_t aNewLength);
494 /** Shorthand for shrinkBy(length()). */
495 void clear();
497 /** Clears and releases any heap-allocated storage. */
498 void clearAndFree();
501 * If true, appending |aNeeded| elements won't reallocate elements storage.
502 * This *doesn't* mean that infallibleAppend may be used! You still must
503 * reserve the extra space, even if this method indicates that appends won't
504 * need to reallocate elements storage.
506 bool canAppendWithoutRealloc(size_t aNeeded) const;
508 /** Potentially fallible append operations. */
511 * This can take either a T& or a T&&. Given a T&&, it moves |aU| into the
512 * vector, instead of copying it. If it fails, |aU| is left unmoved. ("We are
513 * not amused.")
515 template<typename U> bool append(U&& aU);
517 template<typename U, size_t O, class BP, class UV>
518 bool appendAll(const VectorBase<U, O, BP, UV>& aU);
519 bool appendN(const T& aT, size_t aN);
520 template<typename U> bool append(const U* aBegin, const U* aEnd);
521 template<typename U> bool append(const U* aBegin, size_t aLength);
524 * Guaranteed-infallible append operations for use upon vectors whose
525 * memory has been pre-reserved. Don't use this if you haven't reserved the
526 * memory!
528 template<typename U> void infallibleAppend(U&& aU)
530 internalAppend(Forward<U>(aU));
532 void infallibleAppendN(const T& aT, size_t aN)
534 internalAppendN(aT, aN);
536 template<typename U> void infallibleAppend(const U* aBegin, const U* aEnd)
538 internalAppend(aBegin, PointerRangeSize(aBegin, aEnd));
540 template<typename U> void infallibleAppend(const U* aBegin, size_t aLength)
542 internalAppend(aBegin, aLength);
545 void popBack();
547 T popCopy();
550 * Transfers ownership of the internal buffer used by this vector to the
551 * caller. (It's the caller's responsibility to properly deallocate this
552 * buffer, in accordance with this vector's AllocPolicy.) After this call,
553 * the vector is empty. Since the returned buffer may need to be allocated
554 * (if the elements are currently stored in-place), the call can fail,
555 * returning nullptr.
557 * N.B. Although a T*, only the range [0, length()) is constructed.
559 T* extractRawBuffer();
562 * Transfer ownership of an array of objects into the vector. The caller
563 * must have allocated the array in accordance with this vector's
564 * AllocPolicy.
566 * N.B. This call assumes that there are no uninitialized elements in the
567 * passed array.
569 void replaceRawBuffer(T* aP, size_t aLength);
572 * Places |aVal| at position |aP|, shifting existing elements from |aP| onward
573 * one position higher. On success, |aP| should not be reused because it'll
574 * be a dangling pointer if reallocation of the vector storage occurred; the
575 * return value should be used instead. On failure, nullptr is returned.
577 * Example usage:
579 * if (!(p = vec.insert(p, val))) {
580 * <handle failure>
582 * <keep working with p>
584 * This is inherently a linear-time operation. Be careful!
586 template<typename U>
587 T* insert(T* aP, U&& aVal);
590 * Removes the element |aT|, which must fall in the bounds [begin, end),
591 * shifting existing elements from |aT + 1| onward one position lower.
593 void erase(T* aT);
596 * Removes the elements [|aBegin|, |aEnd|), which must fall in the bounds
597 * [begin, end), shifting existing elements from |aEnd + 1| onward to aBegin's
598 * old position.
600 void erase(T* aBegin, T* aEnd);
603 * Measure the size of the vector's heap-allocated storage.
605 size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
608 * Like sizeOfExcludingThis, but also measures the size of the vector
609 * object (which must be heap-allocated) itself.
611 size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
613 void swap(ThisVector& aOther);
615 private:
616 VectorBase(const VectorBase&) = delete;
617 void operator=(const VectorBase&) = delete;
619 /* Move-construct/assign only from our derived class, ThisVector. */
620 VectorBase(VectorBase&&) = delete;
621 void operator=(VectorBase&&) = delete;
624 /* This does the re-entrancy check plus several other sanity checks. */
625 #define MOZ_REENTRANCY_GUARD_ET_AL \
626 ReentrancyGuard g(*this); \
627 MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == kInlineCapacity); \
628 MOZ_ASSERT(reserved() <= mCapacity); \
629 MOZ_ASSERT(mLength <= reserved()); \
630 MOZ_ASSERT(mLength <= mCapacity)
632 /* Vector Implementation */
634 template<typename T, size_t N, class AP, class TV>
635 MOZ_ALWAYS_INLINE
636 VectorBase<T, N, AP, TV>::VectorBase(AP aAP)
637 : AP(aAP)
638 , mLength(0)
639 , mCapacity(kInlineCapacity)
640 #ifdef DEBUG
641 , mReserved(0)
642 , mEntered(false)
643 #endif
645 mBegin = static_cast<T*>(mStorage.addr());
648 /* Move constructor. */
649 template<typename T, size_t N, class AllocPolicy, class TV>
650 MOZ_ALWAYS_INLINE
651 VectorBase<T, N, AllocPolicy, TV>::VectorBase(TV&& aRhs)
652 : AllocPolicy(Move(aRhs))
653 #ifdef DEBUG
654 , mEntered(false)
655 #endif
657 mLength = aRhs.mLength;
658 mCapacity = aRhs.mCapacity;
659 #ifdef DEBUG
660 mReserved = aRhs.mReserved;
661 #endif
663 if (aRhs.usingInlineStorage()) {
664 /* We can't move the buffer over in this case, so copy elements. */
665 mBegin = static_cast<T*>(mStorage.addr());
666 Impl::moveConstruct(mBegin, aRhs.beginNoCheck(), aRhs.endNoCheck());
668 * Leave aRhs's mLength, mBegin, mCapacity, and mReserved as they are.
669 * The elements in its in-line storage still need to be destroyed.
671 } else {
673 * Take src's buffer, and turn src into an empty vector using
674 * in-line storage.
676 mBegin = aRhs.mBegin;
677 aRhs.mBegin = static_cast<T*>(aRhs.mStorage.addr());
678 aRhs.mCapacity = kInlineCapacity;
679 aRhs.mLength = 0;
680 #ifdef DEBUG
681 aRhs.mReserved = 0;
682 #endif
686 /* Move assignment. */
687 template<typename T, size_t N, class AP, class TV>
688 MOZ_ALWAYS_INLINE TV&
689 VectorBase<T, N, AP, TV>::operator=(TV&& aRhs)
691 MOZ_ASSERT(this != &aRhs, "self-move assignment is prohibited");
692 TV* tv = static_cast<TV*>(this);
693 tv->~TV();
694 new(tv) TV(Move(aRhs));
695 return *tv;
698 template<typename T, size_t N, class AP, class TV>
699 MOZ_ALWAYS_INLINE
700 VectorBase<T, N, AP, TV>::~VectorBase()
702 MOZ_REENTRANCY_GUARD_ET_AL;
703 Impl::destroy(beginNoCheck(), endNoCheck());
704 if (!usingInlineStorage()) {
705 this->free_(beginNoCheck());
710 * This function will create a new heap buffer with capacity aNewCap,
711 * move all elements in the inline buffer to this new buffer,
712 * and fail on OOM.
714 template<typename T, size_t N, class AP, class TV>
715 inline bool
716 VectorBase<T, N, AP, TV>::convertToHeapStorage(size_t aNewCap)
718 MOZ_ASSERT(usingInlineStorage());
720 /* Allocate buffer. */
721 MOZ_ASSERT(!detail::CapacityHasExcessSpace<T>(aNewCap));
722 T* newBuf = this->template pod_malloc<T>(aNewCap);
723 if (!newBuf) {
724 return false;
727 /* Copy inline elements into heap buffer. */
728 Impl::moveConstruct(newBuf, beginNoCheck(), endNoCheck());
729 Impl::destroy(beginNoCheck(), endNoCheck());
731 /* Switch in heap buffer. */
732 mBegin = newBuf;
733 /* mLength is unchanged. */
734 mCapacity = aNewCap;
735 return true;
738 template<typename T, size_t N, class AP, class TV>
739 MOZ_NEVER_INLINE bool
740 VectorBase<T, N, AP, TV>::growStorageBy(size_t aIncr)
742 MOZ_ASSERT(mLength + aIncr > mCapacity);
745 * When choosing a new capacity, its size should is as close to 2**N bytes
746 * as possible. 2**N-sized requests are best because they are unlikely to
747 * be rounded up by the allocator. Asking for a 2**N number of elements
748 * isn't as good, because if sizeof(T) is not a power-of-two that would
749 * result in a non-2**N request size.
752 size_t newCap;
754 if (aIncr == 1) {
755 if (usingInlineStorage()) {
756 /* This case occurs in ~70--80% of the calls to this function. */
757 size_t newSize =
758 tl::RoundUpPow2<(kInlineCapacity + 1) * sizeof(T)>::value;
759 newCap = newSize / sizeof(T);
760 goto convert;
763 if (mLength == 0) {
764 /* This case occurs in ~0--10% of the calls to this function. */
765 newCap = 1;
766 goto grow;
769 /* This case occurs in ~15--20% of the calls to this function. */
772 * Will mLength * 4 *sizeof(T) overflow? This condition limits a vector
773 * to 1GB of memory on a 32-bit system, which is a reasonable limit. It
774 * also ensures that
776 * static_cast<char*>(end()) - static_cast<char*>(begin())
778 * doesn't overflow ptrdiff_t (see bug 510319).
780 if (mLength & tl::MulOverflowMask<4 * sizeof(T)>::value) {
781 this->reportAllocOverflow();
782 return false;
786 * If we reach here, the existing capacity will have a size that is already
787 * as close to 2^N as sizeof(T) will allow. Just double the capacity, and
788 * then there might be space for one more element.
790 newCap = mLength * 2;
791 if (detail::CapacityHasExcessSpace<T>(newCap)) {
792 newCap += 1;
794 } else {
795 /* This case occurs in ~2% of the calls to this function. */
796 size_t newMinCap = mLength + aIncr;
798 /* Did mLength + aIncr overflow? Will newCap * sizeof(T) overflow? */
799 if (newMinCap < mLength ||
800 newMinCap & tl::MulOverflowMask<2 * sizeof(T)>::value)
802 this->reportAllocOverflow();
803 return false;
806 size_t newMinSize = newMinCap * sizeof(T);
807 size_t newSize = RoundUpPow2(newMinSize);
808 newCap = newSize / sizeof(T);
811 if (usingInlineStorage()) {
812 convert:
813 return convertToHeapStorage(newCap);
816 grow:
817 return Impl::growTo(*this, newCap);
820 template<typename T, size_t N, class AP, class TV>
821 inline bool
822 VectorBase<T, N, AP, TV>::initCapacity(size_t aRequest)
824 MOZ_ASSERT(empty());
825 MOZ_ASSERT(usingInlineStorage());
826 if (aRequest == 0) {
827 return true;
829 T* newbuf = this->template pod_malloc<T>(aRequest);
830 if (!newbuf) {
831 return false;
833 mBegin = newbuf;
834 mCapacity = aRequest;
835 #ifdef DEBUG
836 mReserved = aRequest;
837 #endif
838 return true;
841 template<typename T, size_t N, class AP, class TV>
842 inline bool
843 VectorBase<T, N, AP, TV>::reserve(size_t aRequest)
845 MOZ_REENTRANCY_GUARD_ET_AL;
846 if (aRequest > mCapacity && !growStorageBy(aRequest - mLength)) {
847 return false;
849 #ifdef DEBUG
850 if (aRequest > mReserved) {
851 mReserved = aRequest;
853 MOZ_ASSERT(mLength <= mReserved);
854 MOZ_ASSERT(mReserved <= mCapacity);
855 #endif
856 return true;
859 template<typename T, size_t N, class AP, class TV>
860 inline void
861 VectorBase<T, N, AP, TV>::shrinkBy(size_t aIncr)
863 MOZ_REENTRANCY_GUARD_ET_AL;
864 MOZ_ASSERT(aIncr <= mLength);
865 Impl::destroy(endNoCheck() - aIncr, endNoCheck());
866 mLength -= aIncr;
869 template<typename T, size_t N, class AP, class TV>
870 MOZ_ALWAYS_INLINE bool
871 VectorBase<T, N, AP, TV>::growBy(size_t aIncr)
873 MOZ_REENTRANCY_GUARD_ET_AL;
874 if (aIncr > mCapacity - mLength && !growStorageBy(aIncr)) {
875 return false;
877 MOZ_ASSERT(mLength + aIncr <= mCapacity);
878 T* newend = endNoCheck() + aIncr;
879 Impl::initialize(endNoCheck(), newend);
880 mLength += aIncr;
881 #ifdef DEBUG
882 if (mLength > mReserved) {
883 mReserved = mLength;
885 #endif
886 return true;
889 template<typename T, size_t N, class AP, class TV>
890 MOZ_ALWAYS_INLINE bool
891 VectorBase<T, N, AP, TV>::growByUninitialized(size_t aIncr)
893 MOZ_REENTRANCY_GUARD_ET_AL;
894 if (aIncr > mCapacity - mLength && !growStorageBy(aIncr)) {
895 return false;
897 infallibleGrowByUninitialized(aIncr);
898 return true;
901 template<typename T, size_t N, class AP, class TV>
902 MOZ_ALWAYS_INLINE void
903 VectorBase<T, N, AP, TV>::infallibleGrowByUninitialized(size_t aIncr)
905 MOZ_ASSERT(mLength + aIncr <= mCapacity);
906 mLength += aIncr;
907 #ifdef DEBUG
908 if (mLength > mReserved) {
909 mReserved = mLength;
911 #endif
914 template<typename T, size_t N, class AP, class TV>
915 inline bool
916 VectorBase<T, N, AP, TV>::resize(size_t aNewLength)
918 size_t curLength = mLength;
919 if (aNewLength > curLength) {
920 return growBy(aNewLength - curLength);
922 shrinkBy(curLength - aNewLength);
923 return true;
926 template<typename T, size_t N, class AP, class TV>
927 MOZ_ALWAYS_INLINE bool
928 VectorBase<T, N, AP, TV>::resizeUninitialized(size_t aNewLength)
930 size_t curLength = mLength;
931 if (aNewLength > curLength) {
932 return growByUninitialized(aNewLength - curLength);
934 shrinkBy(curLength - aNewLength);
935 return true;
938 template<typename T, size_t N, class AP, class TV>
939 inline void
940 VectorBase<T, N, AP, TV>::clear()
942 MOZ_REENTRANCY_GUARD_ET_AL;
943 Impl::destroy(beginNoCheck(), endNoCheck());
944 mLength = 0;
947 template<typename T, size_t N, class AP, class TV>
948 inline void
949 VectorBase<T, N, AP, TV>::clearAndFree()
951 clear();
953 if (usingInlineStorage()) {
954 return;
956 this->free_(beginNoCheck());
957 mBegin = static_cast<T*>(mStorage.addr());
958 mCapacity = kInlineCapacity;
959 #ifdef DEBUG
960 mReserved = 0;
961 #endif
964 template<typename T, size_t N, class AP, class TV>
965 inline bool
966 VectorBase<T, N, AP, TV>::canAppendWithoutRealloc(size_t aNeeded) const
968 return mLength + aNeeded <= mCapacity;
971 template<typename T, size_t N, class AP, class TV>
972 template<typename U, size_t O, class BP, class UV>
973 MOZ_ALWAYS_INLINE void
974 VectorBase<T, N, AP, TV>::internalAppendAll(
975 const VectorBase<U, O, BP, UV>& aOther)
977 internalAppend(aOther.begin(), aOther.length());
980 template<typename T, size_t N, class AP, class TV>
981 template<typename U>
982 MOZ_ALWAYS_INLINE void
983 VectorBase<T, N, AP, TV>::internalAppend(U&& aU)
985 MOZ_ASSERT(mLength + 1 <= mReserved);
986 MOZ_ASSERT(mReserved <= mCapacity);
987 new(endNoCheck()) T(Forward<U>(aU));
988 ++mLength;
991 template<typename T, size_t N, class AP, class TV>
992 MOZ_ALWAYS_INLINE bool
993 VectorBase<T, N, AP, TV>::appendN(const T& aT, size_t aNeeded)
995 MOZ_REENTRANCY_GUARD_ET_AL;
996 if (mLength + aNeeded > mCapacity && !growStorageBy(aNeeded)) {
997 return false;
999 #ifdef DEBUG
1000 if (mLength + aNeeded > mReserved) {
1001 mReserved = mLength + aNeeded;
1003 #endif
1004 internalAppendN(aT, aNeeded);
1005 return true;
1008 template<typename T, size_t N, class AP, class TV>
1009 MOZ_ALWAYS_INLINE void
1010 VectorBase<T, N, AP, TV>::internalAppendN(const T& aT, size_t aNeeded)
1012 MOZ_ASSERT(mLength + aNeeded <= mReserved);
1013 MOZ_ASSERT(mReserved <= mCapacity);
1014 Impl::copyConstructN(endNoCheck(), aNeeded, aT);
1015 mLength += aNeeded;
1018 template<typename T, size_t N, class AP, class TV>
1019 template<typename U>
1020 inline T*
1021 VectorBase<T, N, AP, TV>::insert(T* aP, U&& aVal)
1023 MOZ_ASSERT(begin() <= aP);
1024 MOZ_ASSERT(aP <= end());
1025 size_t pos = aP - begin();
1026 MOZ_ASSERT(pos <= mLength);
1027 size_t oldLength = mLength;
1028 if (pos == oldLength) {
1029 if (!append(Forward<U>(aVal))) {
1030 return nullptr;
1032 } else {
1033 T oldBack = Move(back());
1034 if (!append(Move(oldBack))) { /* Dup the last element. */
1035 return nullptr;
1037 for (size_t i = oldLength; i > pos; --i) {
1038 (*this)[i] = Move((*this)[i - 1]);
1040 (*this)[pos] = Forward<U>(aVal);
1042 return begin() + pos;
1045 template<typename T, size_t N, class AP, class TV>
1046 inline void
1047 VectorBase<T, N, AP, TV>::erase(T* aIt)
1049 MOZ_ASSERT(begin() <= aIt);
1050 MOZ_ASSERT(aIt < end());
1051 while (aIt + 1 < end()) {
1052 *aIt = Move(*(aIt + 1));
1053 ++aIt;
1055 popBack();
1058 template<typename T, size_t N, class AP, class TV>
1059 inline void
1060 VectorBase<T, N, AP, TV>::erase(T* aBegin, T* aEnd)
1062 MOZ_ASSERT(begin() <= aBegin);
1063 MOZ_ASSERT(aBegin <= aEnd);
1064 MOZ_ASSERT(aEnd <= end());
1065 while (aEnd < end()) {
1066 *aBegin++ = Move(*aEnd++);
1068 shrinkBy(aEnd - aBegin);
1071 template<typename T, size_t N, class AP, class TV>
1072 template<typename U>
1073 MOZ_ALWAYS_INLINE bool
1074 VectorBase<T, N, AP, TV>::append(const U* aInsBegin, const U* aInsEnd)
1076 MOZ_REENTRANCY_GUARD_ET_AL;
1077 size_t aNeeded = PointerRangeSize(aInsBegin, aInsEnd);
1078 if (mLength + aNeeded > mCapacity && !growStorageBy(aNeeded)) {
1079 return false;
1081 #ifdef DEBUG
1082 if (mLength + aNeeded > mReserved) {
1083 mReserved = mLength + aNeeded;
1085 #endif
1086 internalAppend(aInsBegin, aNeeded);
1087 return true;
1090 template<typename T, size_t N, class AP, class TV>
1091 template<typename U>
1092 MOZ_ALWAYS_INLINE void
1093 VectorBase<T, N, AP, TV>::internalAppend(const U* aInsBegin, size_t aInsLength)
1095 MOZ_ASSERT(mLength + aInsLength <= mReserved);
1096 MOZ_ASSERT(mReserved <= mCapacity);
1097 Impl::copyConstruct(endNoCheck(), aInsBegin, aInsBegin + aInsLength);
1098 mLength += aInsLength;
1101 template<typename T, size_t N, class AP, class TV>
1102 template<typename U>
1103 MOZ_ALWAYS_INLINE bool
1104 VectorBase<T, N, AP, TV>::append(U&& aU)
1106 MOZ_REENTRANCY_GUARD_ET_AL;
1107 if (mLength == mCapacity && !growStorageBy(1)) {
1108 return false;
1110 #ifdef DEBUG
1111 if (mLength + 1 > mReserved) {
1112 mReserved = mLength + 1;
1114 #endif
1115 internalAppend(Forward<U>(aU));
1116 return true;
1119 template<typename T, size_t N, class AP, class TV>
1120 template<typename U, size_t O, class BP, class UV>
1121 MOZ_ALWAYS_INLINE bool
1122 VectorBase<T, N, AP, TV>::appendAll(const VectorBase<U, O, BP, UV>& aOther)
1124 return append(aOther.begin(), aOther.length());
1127 template<typename T, size_t N, class AP, class TV>
1128 template<class U>
1129 MOZ_ALWAYS_INLINE bool
1130 VectorBase<T, N, AP, TV>::append(const U* aInsBegin, size_t aInsLength)
1132 return append(aInsBegin, aInsBegin + aInsLength);
1135 template<typename T, size_t N, class AP, class TV>
1136 MOZ_ALWAYS_INLINE void
1137 VectorBase<T, N, AP, TV>::popBack()
1139 MOZ_REENTRANCY_GUARD_ET_AL;
1140 MOZ_ASSERT(!empty());
1141 --mLength;
1142 endNoCheck()->~T();
1145 template<typename T, size_t N, class AP, class TV>
1146 MOZ_ALWAYS_INLINE T
1147 VectorBase<T, N, AP, TV>::popCopy()
1149 T ret = back();
1150 popBack();
1151 return ret;
1154 template<typename T, size_t N, class AP, class TV>
1155 inline T*
1156 VectorBase<T, N, AP, TV>::extractRawBuffer()
1158 T* ret;
1159 if (usingInlineStorage()) {
1160 ret = this->template pod_malloc<T>(mLength);
1161 if (!ret) {
1162 return nullptr;
1164 Impl::copyConstruct(ret, beginNoCheck(), endNoCheck());
1165 Impl::destroy(beginNoCheck(), endNoCheck());
1166 /* mBegin, mCapacity are unchanged. */
1167 mLength = 0;
1168 } else {
1169 ret = mBegin;
1170 mBegin = static_cast<T*>(mStorage.addr());
1171 mLength = 0;
1172 mCapacity = kInlineCapacity;
1173 #ifdef DEBUG
1174 mReserved = 0;
1175 #endif
1177 return ret;
1180 template<typename T, size_t N, class AP, class TV>
1181 inline void
1182 VectorBase<T, N, AP, TV>::replaceRawBuffer(T* aP, size_t aLength)
1184 MOZ_REENTRANCY_GUARD_ET_AL;
1186 /* Destroy what we have. */
1187 Impl::destroy(beginNoCheck(), endNoCheck());
1188 if (!usingInlineStorage()) {
1189 this->free_(beginNoCheck());
1192 /* Take in the new buffer. */
1193 if (aLength <= kInlineCapacity) {
1195 * We convert to inline storage if possible, even though aP might
1196 * otherwise be acceptable. Maybe this behaviour should be
1197 * specifiable with an argument to this function.
1199 mBegin = static_cast<T*>(mStorage.addr());
1200 mLength = aLength;
1201 mCapacity = kInlineCapacity;
1202 Impl::moveConstruct(mBegin, aP, aP + aLength);
1203 Impl::destroy(aP, aP + aLength);
1204 this->free_(aP);
1205 } else {
1206 mBegin = aP;
1207 mLength = aLength;
1208 mCapacity = aLength;
1210 #ifdef DEBUG
1211 mReserved = aLength;
1212 #endif
1215 template<typename T, size_t N, class AP, class TV>
1216 inline size_t
1217 VectorBase<T, N, AP, TV>::sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
1219 return usingInlineStorage() ? 0 : aMallocSizeOf(beginNoCheck());
1222 template<typename T, size_t N, class AP, class TV>
1223 inline size_t
1224 VectorBase<T, N, AP, TV>::sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
1226 return aMallocSizeOf(this) + sizeOfExcludingThis(aMallocSizeOf);
1229 template<typename T, size_t N, class AP, class TV>
1230 inline void
1231 VectorBase<T, N, AP, TV>::swap(TV& aOther)
1233 static_assert(N == 0,
1234 "still need to implement this for N != 0");
1236 // This only works when inline storage is always empty.
1237 if (!usingInlineStorage() && aOther.usingInlineStorage()) {
1238 aOther.mBegin = mBegin;
1239 mBegin = inlineStorage();
1240 } else if (usingInlineStorage() && !aOther.usingInlineStorage()) {
1241 mBegin = aOther.mBegin;
1242 aOther.mBegin = aOther.inlineStorage();
1243 } else if (!usingInlineStorage() && !aOther.usingInlineStorage()) {
1244 Swap(mBegin, aOther.mBegin);
1245 } else {
1246 // This case is a no-op, since we'd set both to use their inline storage.
1249 Swap(mLength, aOther.mLength);
1250 Swap(mCapacity, aOther.mCapacity);
1251 #ifdef DEBUG
1252 Swap(mReserved, aOther.mReserved);
1253 #endif
1257 * STL-like container providing a short-lived, dynamic buffer. Vector calls the
1258 * constructors/destructors of all elements stored in its internal buffer, so
1259 * non-PODs may be safely used. Additionally, Vector will store the first N
1260 * elements in-place before resorting to dynamic allocation.
1262 * T requirements:
1263 * - default and copy constructible, assignable, destructible
1264 * - operations do not throw
1265 * N requirements:
1266 * - any value, however, N is clamped to min/max values
1267 * AllocPolicy:
1268 * - see "Allocation policies" in AllocPolicy.h (defaults to
1269 * mozilla::MallocAllocPolicy)
1271 * Vector is not reentrant: T member functions called during Vector member
1272 * functions must not call back into the same object!
1274 template<typename T,
1275 size_t MinInlineCapacity = 0,
1276 class AllocPolicy = MallocAllocPolicy>
1277 class Vector
1278 : public VectorBase<T,
1279 MinInlineCapacity,
1280 AllocPolicy,
1281 Vector<T, MinInlineCapacity, AllocPolicy> >
1283 typedef VectorBase<T, MinInlineCapacity, AllocPolicy, Vector> Base;
1285 public:
1286 explicit Vector(AllocPolicy alloc = AllocPolicy()) : Base(alloc) {}
1287 Vector(Vector&& vec) : Base(Move(vec)) {}
1288 Vector& operator=(Vector&& aOther)
1290 return Base::operator=(Move(aOther));
1294 } // namespace mozilla
1296 #ifdef _MSC_VER
1297 #pragma warning(pop)
1298 #endif
1300 #endif /* mozilla_Vector_h */