Bumping manifests a=b2g-bump
[gecko.git] / mfbt / Vector.h
blob3e7745dea1420c3149e4dc4c1acd2cfe5122b12e
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /* A type/length-parametrized vector class. */
9 #ifndef mozilla_Vector_h
10 #define mozilla_Vector_h
12 #include "mozilla/Alignment.h"
13 #include "mozilla/AllocPolicy.h"
14 #include "mozilla/ArrayUtils.h" // for PointerRangeSize
15 #include "mozilla/Assertions.h"
16 #include "mozilla/Attributes.h"
17 #include "mozilla/MathAlgorithms.h"
18 #include "mozilla/MemoryReporting.h"
19 #include "mozilla/Move.h"
20 #include "mozilla/NullPtr.h"
21 #include "mozilla/ReentrancyGuard.h"
22 #include "mozilla/TemplateLib.h"
23 #include "mozilla/TypeTraits.h"
25 #include <new> // for placement new
27 /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
28 #ifdef _MSC_VER
29 #pragma warning(push)
30 #pragma warning(disable:4345)
31 #endif
33 namespace mozilla {
35 template<typename T, size_t N, class AllocPolicy, class ThisVector>
36 class VectorBase;
38 namespace detail {
41 * Check that the given capacity wastes the minimal amount of space if
42 * allocated on the heap. This means that aCapacity*sizeof(T) is as close to a
43 * power-of-two as possible. growStorageBy() is responsible for ensuring this.
45 template<typename T>
46 static bool CapacityHasExcessSpace(size_t aCapacity)
48 size_t size = aCapacity * sizeof(T);
49 return RoundUpPow2(size) - size >= sizeof(T);
53 * This template class provides a default implementation for vector operations
54 * when the element type is not known to be a POD, as judged by IsPod.
56 template<typename T, size_t N, class AP, class ThisVector, bool IsPod>
57 struct VectorImpl
59 /* Destroys constructed objects in the range [aBegin, aEnd). */
60 static inline void destroy(T* aBegin, T* aEnd)
62 MOZ_ASSERT(aBegin <= aEnd);
63 for (T* p = aBegin; p < aEnd; ++p) {
64 p->~T();
68 /* Constructs objects in the uninitialized range [aBegin, aEnd). */
69 static inline void initialize(T* aBegin, T* aEnd)
71 MOZ_ASSERT(aBegin <= aEnd);
72 for (T* p = aBegin; p < aEnd; ++p) {
73 new(p) T();
78 * Copy-constructs objects in the uninitialized range
79 * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
81 template<typename U>
82 static inline void copyConstruct(T* aDst,
83 const U* aSrcStart, const U* aSrcEnd)
85 MOZ_ASSERT(aSrcStart <= aSrcEnd);
86 for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
87 new(aDst) T(*p);
92 * Move-constructs objects in the uninitialized range
93 * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
95 template<typename U>
96 static inline void moveConstruct(T* aDst, U* aSrcStart, U* aSrcEnd)
98 MOZ_ASSERT(aSrcStart <= aSrcEnd);
99 for (U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
100 new(aDst) T(Move(*p));
105 * Copy-constructs objects in the uninitialized range [aDst, aDst+aN) from
106 * the same object aU.
108 template<typename U>
109 static inline void copyConstructN(T* aDst, size_t aN, const U& aU)
111 for (T* end = aDst + aN; aDst < end; ++aDst) {
112 new(aDst) T(aU);
117 * Grows the given buffer to have capacity aNewCap, preserving the objects
118 * constructed in the range [begin, end) and updating aV. Assumes that (1)
119 * aNewCap has not overflowed, and (2) multiplying aNewCap by sizeof(T) will
120 * not overflow.
122 static inline bool
123 growTo(VectorBase<T, N, AP, ThisVector>& aV, size_t aNewCap)
125 MOZ_ASSERT(!aV.usingInlineStorage());
126 MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
127 T* newbuf = aV.template pod_malloc<T>(aNewCap);
128 if (!newbuf) {
129 return false;
131 T* dst = newbuf;
132 T* src = aV.beginNoCheck();
133 for (; src < aV.endNoCheck(); ++dst, ++src) {
134 new(dst) T(Move(*src));
136 VectorImpl::destroy(aV.beginNoCheck(), aV.endNoCheck());
137 aV.free_(aV.mBegin);
138 aV.mBegin = newbuf;
139 /* aV.mLength is unchanged. */
140 aV.mCapacity = aNewCap;
141 return true;
146 * This partial template specialization provides a default implementation for
147 * vector operations when the element type is known to be a POD, as judged by
148 * IsPod.
150 template<typename T, size_t N, class AP, class ThisVector>
151 struct VectorImpl<T, N, AP, ThisVector, true>
153 static inline void destroy(T*, T*) {}
155 static inline void initialize(T* aBegin, T* aEnd)
158 * You would think that memset would be a big win (or even break even)
159 * when we know T is a POD. But currently it's not. This is probably
160 * because |append| tends to be given small ranges and memset requires
161 * a function call that doesn't get inlined.
163 * memset(aBegin, 0, sizeof(T) * (aEnd - aBegin));
165 MOZ_ASSERT(aBegin <= aEnd);
166 for (T* p = aBegin; p < aEnd; ++p) {
167 new(p) T();
171 template<typename U>
172 static inline void copyConstruct(T* aDst,
173 const U* aSrcStart, const U* aSrcEnd)
176 * See above memset comment. Also, notice that copyConstruct is
177 * currently templated (T != U), so memcpy won't work without
178 * requiring T == U.
180 * memcpy(aDst, aSrcStart, sizeof(T) * (aSrcEnd - aSrcStart));
182 MOZ_ASSERT(aSrcStart <= aSrcEnd);
183 for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
184 *aDst = *p;
188 template<typename U>
189 static inline void moveConstruct(T* aDst,
190 const U* aSrcStart, const U* aSrcEnd)
192 copyConstruct(aDst, aSrcStart, aSrcEnd);
195 static inline void copyConstructN(T* aDst, size_t aN, const T& aT)
197 for (T* end = aDst + aN; aDst < end; ++aDst) {
198 *aDst = aT;
202 static inline bool
203 growTo(VectorBase<T, N, AP, ThisVector>& aV, size_t aNewCap)
205 MOZ_ASSERT(!aV.usingInlineStorage());
206 MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
207 T* newbuf = aV.template pod_realloc<T>(aV.mBegin, aV.mCapacity, aNewCap);
208 if (!newbuf) {
209 return false;
211 aV.mBegin = newbuf;
212 /* aV.mLength is unchanged. */
213 aV.mCapacity = aNewCap;
214 return true;
218 } // namespace detail
221 * A CRTP base class for vector-like classes. Unless you really really want
222 * your own vector class -- and you almost certainly don't -- you should use
223 * mozilla::Vector instead!
225 * See mozilla::Vector for interface requirements.
227 template<typename T, size_t N, class AllocPolicy, class ThisVector>
228 class VectorBase : private AllocPolicy
230 /* utilities */
232 static const bool kElemIsPod = IsPod<T>::value;
233 typedef detail::VectorImpl<T, N, AllocPolicy, ThisVector, kElemIsPod> Impl;
234 friend struct detail::VectorImpl<T, N, AllocPolicy, ThisVector, kElemIsPod>;
236 bool growStorageBy(size_t aIncr);
237 bool convertToHeapStorage(size_t aNewCap);
239 /* magic constants */
241 static const int kMaxInlineBytes = 1024;
243 /* compute constants */
246 * Consider element size to be 1 for buffer sizing if there are 0 inline
247 * elements. This allows us to compile when the definition of the element
248 * type is not visible here.
250 * Explicit specialization is only allowed at namespace scope, so in order
251 * to keep everything here, we use a dummy template parameter with partial
252 * specialization.
254 template<int M, int Dummy>
255 struct ElemSize
257 static const size_t value = sizeof(T);
259 template<int Dummy>
260 struct ElemSize<0, Dummy>
262 static const size_t value = 1;
265 static const size_t kInlineCapacity =
266 tl::Min<N, kMaxInlineBytes / ElemSize<N, 0>::value>::value;
268 /* Calculate inline buffer size; avoid 0-sized array. */
269 static const size_t kInlineBytes =
270 tl::Max<1, kInlineCapacity * ElemSize<N, 0>::value>::value;
272 /* member data */
275 * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
276 * mBegin + mLength) hold valid constructed T objects. The range [mBegin +
277 * mLength, mBegin + mCapacity) holds uninitialized memory. The range
278 * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
279 * previously allocated by a call to reserve().
281 T* mBegin;
283 /* Number of elements in the vector. */
284 size_t mLength;
286 /* Max number of elements storable in the vector without resizing. */
287 size_t mCapacity;
289 #ifdef DEBUG
290 /* Max elements of reserved or used space in this vector. */
291 size_t mReserved;
292 #endif
294 /* Memory used for inline storage. */
295 AlignedStorage<kInlineBytes> mStorage;
297 #ifdef DEBUG
298 friend class ReentrancyGuard;
299 bool mEntered;
300 #endif
302 /* private accessors */
304 bool usingInlineStorage() const
306 return mBegin == const_cast<VectorBase*>(this)->inlineStorage();
309 T* inlineStorage()
311 return static_cast<T*>(mStorage.addr());
314 T* beginNoCheck() const
316 return mBegin;
319 T* endNoCheck()
321 return mBegin + mLength;
324 const T* endNoCheck() const
326 return mBegin + mLength;
329 #ifdef DEBUG
330 size_t reserved() const
332 MOZ_ASSERT(mReserved <= mCapacity);
333 MOZ_ASSERT(mLength <= mReserved);
334 return mReserved;
336 #endif
338 /* Append operations guaranteed to succeed due to pre-reserved space. */
339 template<typename U> void internalAppend(U&& aU);
340 template<typename U, size_t O, class BP, class UV>
341 void internalAppendAll(const VectorBase<U, O, BP, UV>& aU);
342 void internalAppendN(const T& aT, size_t aN);
343 template<typename U> void internalAppend(const U* aBegin, size_t aLength);
345 public:
346 static const size_t sMaxInlineStorage = N;
348 typedef T ElementType;
350 explicit VectorBase(AllocPolicy = AllocPolicy());
351 explicit VectorBase(ThisVector&&); /* Move constructor. */
352 ThisVector& operator=(ThisVector&&); /* Move assignment. */
353 ~VectorBase();
355 /* accessors */
357 const AllocPolicy& allocPolicy() const { return *this; }
359 AllocPolicy& allocPolicy() { return *this; }
361 enum { InlineLength = N };
363 size_t length() const { return mLength; }
365 bool empty() const { return mLength == 0; }
367 size_t capacity() const { return mCapacity; }
369 T* begin()
371 MOZ_ASSERT(!mEntered);
372 return mBegin;
375 const T* begin() const
377 MOZ_ASSERT(!mEntered);
378 return mBegin;
381 T* end()
383 MOZ_ASSERT(!mEntered);
384 return mBegin + mLength;
387 const T* end() const
389 MOZ_ASSERT(!mEntered);
390 return mBegin + mLength;
393 T& operator[](size_t aIndex)
395 MOZ_ASSERT(!mEntered);
396 MOZ_ASSERT(aIndex < mLength);
397 return begin()[aIndex];
400 const T& operator[](size_t aIndex) const
402 MOZ_ASSERT(!mEntered);
403 MOZ_ASSERT(aIndex < mLength);
404 return begin()[aIndex];
407 T& back()
409 MOZ_ASSERT(!mEntered);
410 MOZ_ASSERT(!empty());
411 return *(end() - 1);
414 const T& back() const
416 MOZ_ASSERT(!mEntered);
417 MOZ_ASSERT(!empty());
418 return *(end() - 1);
421 class Range
423 friend class VectorBase;
424 T* mCur;
425 T* mEnd;
426 Range(T* aCur, T* aEnd)
427 : mCur(aCur)
428 , mEnd(aEnd)
430 MOZ_ASSERT(aCur <= aEnd);
433 public:
434 Range() {}
435 bool empty() const { return mCur == mEnd; }
436 size_t remain() const { return PointerRangeSize(mCur, mEnd); }
437 T& front() const { MOZ_ASSERT(!empty()); return *mCur; }
438 void popFront() { MOZ_ASSERT(!empty()); ++mCur; }
439 T popCopyFront() { MOZ_ASSERT(!empty()); return *mCur++; }
442 Range all() { return Range(begin(), end()); }
444 /* mutators */
447 * Given that the vector is empty and has no inline storage, grow to
448 * |capacity|.
450 bool initCapacity(size_t aRequest);
453 * If reserve(length() + N) succeeds, the N next appends are guaranteed to
454 * succeed.
456 bool reserve(size_t aRequest);
459 * Destroy elements in the range [end() - aIncr, end()). Does not deallocate
460 * or unreserve storage for those elements.
462 void shrinkBy(size_t aIncr);
464 /** Grow the vector by aIncr elements. */
465 bool growBy(size_t aIncr);
467 /** Call shrinkBy or growBy based on whether newSize > length(). */
468 bool resize(size_t aNewLength);
471 * Increase the length of the vector, but don't initialize the new elements
472 * -- leave them as uninitialized memory.
474 bool growByUninitialized(size_t aIncr);
475 bool resizeUninitialized(size_t aNewLength);
477 /** Shorthand for shrinkBy(length()). */
478 void clear();
480 /** Clears and releases any heap-allocated storage. */
481 void clearAndFree();
484 * If true, appending |aNeeded| elements won't reallocate elements storage.
485 * This *doesn't* mean that infallibleAppend may be used! You still must
486 * reserve the extra space, even if this method indicates that appends won't
487 * need to reallocate elements storage.
489 bool canAppendWithoutRealloc(size_t aNeeded) const;
491 /** Potentially fallible append operations. */
494 * This can take either a T& or a T&&. Given a T&&, it moves |aU| into the
495 * vector, instead of copying it. If it fails, |aU| is left unmoved. ("We are
496 * not amused.")
498 template<typename U> bool append(U&& aU);
500 template<typename U, size_t O, class BP, class UV>
501 bool appendAll(const VectorBase<U, O, BP, UV>& aU);
502 bool appendN(const T& aT, size_t aN);
503 template<typename U> bool append(const U* aBegin, const U* aEnd);
504 template<typename U> bool append(const U* aBegin, size_t aLength);
507 * Guaranteed-infallible append operations for use upon vectors whose
508 * memory has been pre-reserved. Don't use this if you haven't reserved the
509 * memory!
511 template<typename U> void infallibleAppend(U&& aU)
513 internalAppend(Forward<U>(aU));
515 void infallibleAppendN(const T& aT, size_t aN)
517 internalAppendN(aT, aN);
519 template<typename U> void infallibleAppend(const U* aBegin, const U* aEnd)
521 internalAppend(aBegin, PointerRangeSize(aBegin, aEnd));
523 template<typename U> void infallibleAppend(const U* aBegin, size_t aLength)
525 internalAppend(aBegin, aLength);
528 void popBack();
530 T popCopy();
533 * Transfers ownership of the internal buffer used by this vector to the
534 * caller. (It's the caller's responsibility to properly deallocate this
535 * buffer, in accordance with this vector's AllocPolicy.) After this call,
536 * the vector is empty. Since the returned buffer may need to be allocated
537 * (if the elements are currently stored in-place), the call can fail,
538 * returning nullptr.
540 * N.B. Although a T*, only the range [0, length()) is constructed.
542 T* extractRawBuffer();
545 * Transfer ownership of an array of objects into the vector. The caller
546 * must have allocated the array in accordance with this vector's
547 * AllocPolicy.
549 * N.B. This call assumes that there are no uninitialized elements in the
550 * passed array.
552 void replaceRawBuffer(T* aP, size_t aLength);
555 * Places |aVal| at position |aP|, shifting existing elements from |aP| onward
556 * one position higher. On success, |aP| should not be reused because it'll
557 * be a dangling pointer if reallocation of the vector storage occurred; the
558 * return value should be used instead. On failure, nullptr is returned.
560 * Example usage:
562 * if (!(p = vec.insert(p, val))) {
563 * <handle failure>
565 * <keep working with p>
567 * This is inherently a linear-time operation. Be careful!
569 template<typename U>
570 T* insert(T* aP, U&& aVal);
573 * Removes the element |aT|, which must fall in the bounds [begin, end),
574 * shifting existing elements from |aT + 1| onward one position lower.
576 void erase(T* aT);
579 * Removes the elements [|aBegin|, |aEnd|), which must fall in the bounds
580 * [begin, end), shifting existing elements from |aEnd + 1| onward to aBegin's
581 * old position.
583 void erase(T* aBegin, T* aEnd);
586 * Measure the size of the vector's heap-allocated storage.
588 size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
591 * Like sizeOfExcludingThis, but also measures the size of the vector
592 * object (which must be heap-allocated) itself.
594 size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
596 void swap(ThisVector& aOther);
598 private:
599 VectorBase(const VectorBase&) MOZ_DELETE;
600 void operator=(const VectorBase&) MOZ_DELETE;
602 /* Move-construct/assign only from our derived class, ThisVector. */
603 VectorBase(VectorBase&&) MOZ_DELETE;
604 void operator=(VectorBase&&) MOZ_DELETE;
607 /* This does the re-entrancy check plus several other sanity checks. */
608 #define MOZ_REENTRANCY_GUARD_ET_AL \
609 ReentrancyGuard g(*this); \
610 MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == kInlineCapacity); \
611 MOZ_ASSERT(reserved() <= mCapacity); \
612 MOZ_ASSERT(mLength <= reserved()); \
613 MOZ_ASSERT(mLength <= mCapacity)
615 /* Vector Implementation */
617 template<typename T, size_t N, class AP, class TV>
618 MOZ_ALWAYS_INLINE
619 VectorBase<T, N, AP, TV>::VectorBase(AP aAP)
620 : AP(aAP)
621 , mLength(0)
622 , mCapacity(kInlineCapacity)
623 #ifdef DEBUG
624 , mReserved(kInlineCapacity)
625 , mEntered(false)
626 #endif
628 mBegin = static_cast<T*>(mStorage.addr());
631 /* Move constructor. */
632 template<typename T, size_t N, class AllocPolicy, class TV>
633 MOZ_ALWAYS_INLINE
634 VectorBase<T, N, AllocPolicy, TV>::VectorBase(TV&& aRhs)
635 : AllocPolicy(Move(aRhs))
636 #ifdef DEBUG
637 , mEntered(false)
638 #endif
640 mLength = aRhs.mLength;
641 mCapacity = aRhs.mCapacity;
642 #ifdef DEBUG
643 mReserved = aRhs.mReserved;
644 #endif
646 if (aRhs.usingInlineStorage()) {
647 /* We can't move the buffer over in this case, so copy elements. */
648 mBegin = static_cast<T*>(mStorage.addr());
649 Impl::moveConstruct(mBegin, aRhs.beginNoCheck(), aRhs.endNoCheck());
651 * Leave aRhs's mLength, mBegin, mCapacity, and mReserved as they are.
652 * The elements in its in-line storage still need to be destroyed.
654 } else {
656 * Take src's buffer, and turn src into an empty vector using
657 * in-line storage.
659 mBegin = aRhs.mBegin;
660 aRhs.mBegin = static_cast<T*>(aRhs.mStorage.addr());
661 aRhs.mCapacity = kInlineCapacity;
662 aRhs.mLength = 0;
663 #ifdef DEBUG
664 aRhs.mReserved = kInlineCapacity;
665 #endif
669 /* Move assignment. */
670 template<typename T, size_t N, class AP, class TV>
671 MOZ_ALWAYS_INLINE TV&
672 VectorBase<T, N, AP, TV>::operator=(TV&& aRhs)
674 MOZ_ASSERT(this != &aRhs, "self-move assignment is prohibited");
675 TV* tv = static_cast<TV*>(this);
676 tv->~TV();
677 new(tv) TV(Move(aRhs));
678 return *tv;
681 template<typename T, size_t N, class AP, class TV>
682 MOZ_ALWAYS_INLINE
683 VectorBase<T, N, AP, TV>::~VectorBase()
685 MOZ_REENTRANCY_GUARD_ET_AL;
686 Impl::destroy(beginNoCheck(), endNoCheck());
687 if (!usingInlineStorage()) {
688 this->free_(beginNoCheck());
693 * This function will create a new heap buffer with capacity aNewCap,
694 * move all elements in the inline buffer to this new buffer,
695 * and fail on OOM.
697 template<typename T, size_t N, class AP, class TV>
698 inline bool
699 VectorBase<T, N, AP, TV>::convertToHeapStorage(size_t aNewCap)
701 MOZ_ASSERT(usingInlineStorage());
703 /* Allocate buffer. */
704 MOZ_ASSERT(!detail::CapacityHasExcessSpace<T>(aNewCap));
705 T* newBuf = this->template pod_malloc<T>(aNewCap);
706 if (!newBuf) {
707 return false;
710 /* Copy inline elements into heap buffer. */
711 Impl::moveConstruct(newBuf, beginNoCheck(), endNoCheck());
712 Impl::destroy(beginNoCheck(), endNoCheck());
714 /* Switch in heap buffer. */
715 mBegin = newBuf;
716 /* mLength is unchanged. */
717 mCapacity = aNewCap;
718 return true;
721 template<typename T, size_t N, class AP, class TV>
722 MOZ_NEVER_INLINE bool
723 VectorBase<T, N, AP, TV>::growStorageBy(size_t aIncr)
725 MOZ_ASSERT(mLength + aIncr > mCapacity);
726 MOZ_ASSERT_IF(!usingInlineStorage(),
727 !detail::CapacityHasExcessSpace<T>(mCapacity));
730 * When choosing a new capacity, its size should is as close to 2**N bytes
731 * as possible. 2**N-sized requests are best because they are unlikely to
732 * be rounded up by the allocator. Asking for a 2**N number of elements
733 * isn't as good, because if sizeof(T) is not a power-of-two that would
734 * result in a non-2**N request size.
737 size_t newCap;
739 if (aIncr == 1) {
740 if (usingInlineStorage()) {
741 /* This case occurs in ~70--80% of the calls to this function. */
742 size_t newSize =
743 tl::RoundUpPow2<(kInlineCapacity + 1) * sizeof(T)>::value;
744 newCap = newSize / sizeof(T);
745 goto convert;
748 if (mLength == 0) {
749 /* This case occurs in ~0--10% of the calls to this function. */
750 newCap = 1;
751 goto grow;
754 /* This case occurs in ~15--20% of the calls to this function. */
757 * Will mLength * 4 *sizeof(T) overflow? This condition limits a vector
758 * to 1GB of memory on a 32-bit system, which is a reasonable limit. It
759 * also ensures that
761 * static_cast<char*>(end()) - static_cast<char*>(begin())
763 * doesn't overflow ptrdiff_t (see bug 510319).
765 if (mLength & tl::MulOverflowMask<4 * sizeof(T)>::value) {
766 this->reportAllocOverflow();
767 return false;
771 * If we reach here, the existing capacity will have a size that is already
772 * as close to 2^N as sizeof(T) will allow. Just double the capacity, and
773 * then there might be space for one more element.
775 newCap = mLength * 2;
776 if (detail::CapacityHasExcessSpace<T>(newCap)) {
777 newCap += 1;
779 } else {
780 /* This case occurs in ~2% of the calls to this function. */
781 size_t newMinCap = mLength + aIncr;
783 /* Did mLength + aIncr overflow? Will newCap * sizeof(T) overflow? */
784 if (newMinCap < mLength ||
785 newMinCap & tl::MulOverflowMask<2 * sizeof(T)>::value)
787 this->reportAllocOverflow();
788 return false;
791 size_t newMinSize = newMinCap * sizeof(T);
792 size_t newSize = RoundUpPow2(newMinSize);
793 newCap = newSize / sizeof(T);
796 if (usingInlineStorage()) {
797 convert:
798 return convertToHeapStorage(newCap);
801 grow:
802 return Impl::growTo(*this, newCap);
805 template<typename T, size_t N, class AP, class TV>
806 inline bool
807 VectorBase<T, N, AP, TV>::initCapacity(size_t aRequest)
809 MOZ_ASSERT(empty());
810 MOZ_ASSERT(usingInlineStorage());
811 if (aRequest == 0) {
812 return true;
814 T* newbuf = this->template pod_malloc<T>(aRequest);
815 if (!newbuf) {
816 return false;
818 mBegin = newbuf;
819 mCapacity = aRequest;
820 #ifdef DEBUG
821 mReserved = aRequest;
822 #endif
823 return true;
826 template<typename T, size_t N, class AP, class TV>
827 inline bool
828 VectorBase<T, N, AP, TV>::reserve(size_t aRequest)
830 MOZ_REENTRANCY_GUARD_ET_AL;
831 if (aRequest > mCapacity && !growStorageBy(aRequest - mLength)) {
832 return false;
834 #ifdef DEBUG
835 if (aRequest > mReserved) {
836 mReserved = aRequest;
838 MOZ_ASSERT(mLength <= mReserved);
839 MOZ_ASSERT(mReserved <= mCapacity);
840 #endif
841 return true;
844 template<typename T, size_t N, class AP, class TV>
845 inline void
846 VectorBase<T, N, AP, TV>::shrinkBy(size_t aIncr)
848 MOZ_REENTRANCY_GUARD_ET_AL;
849 MOZ_ASSERT(aIncr <= mLength);
850 Impl::destroy(endNoCheck() - aIncr, endNoCheck());
851 mLength -= aIncr;
854 template<typename T, size_t N, class AP, class TV>
855 MOZ_ALWAYS_INLINE bool
856 VectorBase<T, N, AP, TV>::growBy(size_t aIncr)
858 MOZ_REENTRANCY_GUARD_ET_AL;
859 if (aIncr > mCapacity - mLength && !growStorageBy(aIncr)) {
860 return false;
862 MOZ_ASSERT(mLength + aIncr <= mCapacity);
863 T* newend = endNoCheck() + aIncr;
864 Impl::initialize(endNoCheck(), newend);
865 mLength += aIncr;
866 #ifdef DEBUG
867 if (mLength > mReserved) {
868 mReserved = mLength;
870 #endif
871 return true;
874 template<typename T, size_t N, class AP, class TV>
875 MOZ_ALWAYS_INLINE bool
876 VectorBase<T, N, AP, TV>::growByUninitialized(size_t aIncr)
878 MOZ_REENTRANCY_GUARD_ET_AL;
879 if (aIncr > mCapacity - mLength && !growStorageBy(aIncr)) {
880 return false;
882 MOZ_ASSERT(mLength + aIncr <= mCapacity);
883 mLength += aIncr;
884 #ifdef DEBUG
885 if (mLength > mReserved) {
886 mReserved = mLength;
888 #endif
889 return true;
892 template<typename T, size_t N, class AP, class TV>
893 inline bool
894 VectorBase<T, N, AP, TV>::resize(size_t aNewLength)
896 size_t curLength = mLength;
897 if (aNewLength > curLength) {
898 return growBy(aNewLength - curLength);
900 shrinkBy(curLength - aNewLength);
901 return true;
904 template<typename T, size_t N, class AP, class TV>
905 MOZ_ALWAYS_INLINE bool
906 VectorBase<T, N, AP, TV>::resizeUninitialized(size_t aNewLength)
908 size_t curLength = mLength;
909 if (aNewLength > curLength) {
910 return growByUninitialized(aNewLength - curLength);
912 shrinkBy(curLength - aNewLength);
913 return true;
916 template<typename T, size_t N, class AP, class TV>
917 inline void
918 VectorBase<T, N, AP, TV>::clear()
920 MOZ_REENTRANCY_GUARD_ET_AL;
921 Impl::destroy(beginNoCheck(), endNoCheck());
922 mLength = 0;
925 template<typename T, size_t N, class AP, class TV>
926 inline void
927 VectorBase<T, N, AP, TV>::clearAndFree()
929 clear();
931 if (usingInlineStorage()) {
932 return;
934 this->free_(beginNoCheck());
935 mBegin = static_cast<T*>(mStorage.addr());
936 mCapacity = kInlineCapacity;
937 #ifdef DEBUG
938 mReserved = kInlineCapacity;
939 #endif
942 template<typename T, size_t N, class AP, class TV>
943 inline bool
944 VectorBase<T, N, AP, TV>::canAppendWithoutRealloc(size_t aNeeded) const
946 return mLength + aNeeded <= mCapacity;
949 template<typename T, size_t N, class AP, class TV>
950 template<typename U, size_t O, class BP, class UV>
951 MOZ_ALWAYS_INLINE void
952 VectorBase<T, N, AP, TV>::internalAppendAll(
953 const VectorBase<U, O, BP, UV>& aOther)
955 internalAppend(aOther.begin(), aOther.length());
958 template<typename T, size_t N, class AP, class TV>
959 template<typename U>
960 MOZ_ALWAYS_INLINE void
961 VectorBase<T, N, AP, TV>::internalAppend(U&& aU)
963 MOZ_ASSERT(mLength + 1 <= mReserved);
964 MOZ_ASSERT(mReserved <= mCapacity);
965 new(endNoCheck()) T(Forward<U>(aU));
966 ++mLength;
969 template<typename T, size_t N, class AP, class TV>
970 MOZ_ALWAYS_INLINE bool
971 VectorBase<T, N, AP, TV>::appendN(const T& aT, size_t aNeeded)
973 MOZ_REENTRANCY_GUARD_ET_AL;
974 if (mLength + aNeeded > mCapacity && !growStorageBy(aNeeded)) {
975 return false;
977 #ifdef DEBUG
978 if (mLength + aNeeded > mReserved) {
979 mReserved = mLength + aNeeded;
981 #endif
982 internalAppendN(aT, aNeeded);
983 return true;
986 template<typename T, size_t N, class AP, class TV>
987 MOZ_ALWAYS_INLINE void
988 VectorBase<T, N, AP, TV>::internalAppendN(const T& aT, size_t aNeeded)
990 MOZ_ASSERT(mLength + aNeeded <= mReserved);
991 MOZ_ASSERT(mReserved <= mCapacity);
992 Impl::copyConstructN(endNoCheck(), aNeeded, aT);
993 mLength += aNeeded;
996 template<typename T, size_t N, class AP, class TV>
997 template<typename U>
998 inline T*
999 VectorBase<T, N, AP, TV>::insert(T* aP, U&& aVal)
1001 MOZ_ASSERT(begin() <= aP);
1002 MOZ_ASSERT(aP <= end());
1003 size_t pos = aP - begin();
1004 MOZ_ASSERT(pos <= mLength);
1005 size_t oldLength = mLength;
1006 if (pos == oldLength) {
1007 if (!append(Forward<U>(aVal))) {
1008 return nullptr;
1010 } else {
1011 T oldBack = Move(back());
1012 if (!append(Move(oldBack))) { /* Dup the last element. */
1013 return nullptr;
1015 for (size_t i = oldLength; i > pos; --i) {
1016 (*this)[i] = Move((*this)[i - 1]);
1018 (*this)[pos] = Forward<U>(aVal);
1020 return begin() + pos;
1023 template<typename T, size_t N, class AP, class TV>
1024 inline void
1025 VectorBase<T, N, AP, TV>::erase(T* aIt)
1027 MOZ_ASSERT(begin() <= aIt);
1028 MOZ_ASSERT(aIt < end());
1029 while (aIt + 1 < end()) {
1030 *aIt = Move(*(aIt + 1));
1031 ++aIt;
1033 popBack();
1036 template<typename T, size_t N, class AP, class TV>
1037 inline void
1038 VectorBase<T, N, AP, TV>::erase(T* aBegin, T* aEnd)
1040 MOZ_ASSERT(begin() <= aBegin);
1041 MOZ_ASSERT(aBegin <= aEnd);
1042 MOZ_ASSERT(aEnd <= end());
1043 while (aEnd < end()) {
1044 *aBegin++ = Move(*aEnd++);
1046 shrinkBy(aEnd - aBegin);
1049 template<typename T, size_t N, class AP, class TV>
1050 template<typename U>
1051 MOZ_ALWAYS_INLINE bool
1052 VectorBase<T, N, AP, TV>::append(const U* aInsBegin, const U* aInsEnd)
1054 MOZ_REENTRANCY_GUARD_ET_AL;
1055 size_t aNeeded = PointerRangeSize(aInsBegin, aInsEnd);
1056 if (mLength + aNeeded > mCapacity && !growStorageBy(aNeeded)) {
1057 return false;
1059 #ifdef DEBUG
1060 if (mLength + aNeeded > mReserved) {
1061 mReserved = mLength + aNeeded;
1063 #endif
1064 internalAppend(aInsBegin, aNeeded);
1065 return true;
1068 template<typename T, size_t N, class AP, class TV>
1069 template<typename U>
1070 MOZ_ALWAYS_INLINE void
1071 VectorBase<T, N, AP, TV>::internalAppend(const U* aInsBegin, size_t aInsLength)
1073 MOZ_ASSERT(mLength + aInsLength <= mReserved);
1074 MOZ_ASSERT(mReserved <= mCapacity);
1075 Impl::copyConstruct(endNoCheck(), aInsBegin, aInsBegin + aInsLength);
1076 mLength += aInsLength;
1079 template<typename T, size_t N, class AP, class TV>
1080 template<typename U>
1081 MOZ_ALWAYS_INLINE bool
1082 VectorBase<T, N, AP, TV>::append(U&& aU)
1084 MOZ_REENTRANCY_GUARD_ET_AL;
1085 if (mLength == mCapacity && !growStorageBy(1)) {
1086 return false;
1088 #ifdef DEBUG
1089 if (mLength + 1 > mReserved) {
1090 mReserved = mLength + 1;
1092 #endif
1093 internalAppend(Forward<U>(aU));
1094 return true;
1097 template<typename T, size_t N, class AP, class TV>
1098 template<typename U, size_t O, class BP, class UV>
1099 MOZ_ALWAYS_INLINE bool
1100 VectorBase<T, N, AP, TV>::appendAll(const VectorBase<U, O, BP, UV>& aOther)
1102 return append(aOther.begin(), aOther.length());
1105 template<typename T, size_t N, class AP, class TV>
1106 template<class U>
1107 MOZ_ALWAYS_INLINE bool
1108 VectorBase<T, N, AP, TV>::append(const U* aInsBegin, size_t aInsLength)
1110 return append(aInsBegin, aInsBegin + aInsLength);
1113 template<typename T, size_t N, class AP, class TV>
1114 MOZ_ALWAYS_INLINE void
1115 VectorBase<T, N, AP, TV>::popBack()
1117 MOZ_REENTRANCY_GUARD_ET_AL;
1118 MOZ_ASSERT(!empty());
1119 --mLength;
1120 endNoCheck()->~T();
1123 template<typename T, size_t N, class AP, class TV>
1124 MOZ_ALWAYS_INLINE T
1125 VectorBase<T, N, AP, TV>::popCopy()
1127 T ret = back();
1128 popBack();
1129 return ret;
1132 template<typename T, size_t N, class AP, class TV>
1133 inline T*
1134 VectorBase<T, N, AP, TV>::extractRawBuffer()
1136 T* ret;
1137 if (usingInlineStorage()) {
1138 ret = this->template pod_malloc<T>(mLength);
1139 if (!ret) {
1140 return nullptr;
1142 Impl::copyConstruct(ret, beginNoCheck(), endNoCheck());
1143 Impl::destroy(beginNoCheck(), endNoCheck());
1144 /* mBegin, mCapacity are unchanged. */
1145 mLength = 0;
1146 } else {
1147 ret = mBegin;
1148 mBegin = static_cast<T*>(mStorage.addr());
1149 mLength = 0;
1150 mCapacity = kInlineCapacity;
1151 #ifdef DEBUG
1152 mReserved = kInlineCapacity;
1153 #endif
1155 return ret;
1158 template<typename T, size_t N, class AP, class TV>
1159 inline void
1160 VectorBase<T, N, AP, TV>::replaceRawBuffer(T* aP, size_t aLength)
1162 MOZ_REENTRANCY_GUARD_ET_AL;
1164 /* Destroy what we have. */
1165 Impl::destroy(beginNoCheck(), endNoCheck());
1166 if (!usingInlineStorage()) {
1167 this->free_(beginNoCheck());
1170 /* Take in the new buffer. */
1171 if (aLength <= kInlineCapacity) {
1173 * We convert to inline storage if possible, even though aP might
1174 * otherwise be acceptable. Maybe this behaviour should be
1175 * specifiable with an argument to this function.
1177 mBegin = static_cast<T*>(mStorage.addr());
1178 mLength = aLength;
1179 mCapacity = kInlineCapacity;
1180 Impl::moveConstruct(mBegin, aP, aP + aLength);
1181 Impl::destroy(aP, aP + aLength);
1182 this->free_(aP);
1183 } else {
1184 mBegin = aP;
1185 mLength = aLength;
1186 mCapacity = aLength;
1188 #ifdef DEBUG
1189 mReserved = aLength;
1190 #endif
1193 template<typename T, size_t N, class AP, class TV>
1194 inline size_t
1195 VectorBase<T, N, AP, TV>::sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
1197 return usingInlineStorage() ? 0 : aMallocSizeOf(beginNoCheck());
1200 template<typename T, size_t N, class AP, class TV>
1201 inline size_t
1202 VectorBase<T, N, AP, TV>::sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
1204 return aMallocSizeOf(this) + sizeOfExcludingThis(aMallocSizeOf);
1207 template<typename T, size_t N, class AP, class TV>
1208 inline void
1209 VectorBase<T, N, AP, TV>::swap(TV& aOther)
1211 static_assert(N == 0,
1212 "still need to implement this for N != 0");
1214 // This only works when inline storage is always empty.
1215 if (!usingInlineStorage() && aOther.usingInlineStorage()) {
1216 aOther.mBegin = mBegin;
1217 mBegin = inlineStorage();
1218 } else if (usingInlineStorage() && !aOther.usingInlineStorage()) {
1219 mBegin = aOther.mBegin;
1220 aOther.mBegin = aOther.inlineStorage();
1221 } else if (!usingInlineStorage() && !aOther.usingInlineStorage()) {
1222 Swap(mBegin, aOther.mBegin);
1223 } else {
1224 // This case is a no-op, since we'd set both to use their inline storage.
1227 Swap(mLength, aOther.mLength);
1228 Swap(mCapacity, aOther.mCapacity);
1229 #ifdef DEBUG
1230 Swap(mReserved, aOther.mReserved);
1231 #endif
1235 * STL-like container providing a short-lived, dynamic buffer. Vector calls the
1236 * constructors/destructors of all elements stored in its internal buffer, so
1237 * non-PODs may be safely used. Additionally, Vector will store the first N
1238 * elements in-place before resorting to dynamic allocation.
1240 * T requirements:
1241 * - default and copy constructible, assignable, destructible
1242 * - operations do not throw
1243 * N requirements:
1244 * - any value, however, N is clamped to min/max values
1245 * AllocPolicy:
1246 * - see "Allocation policies" in AllocPolicy.h (defaults to
1247 * mozilla::MallocAllocPolicy)
1249 * Vector is not reentrant: T member functions called during Vector member
1250 * functions must not call back into the same object!
1252 template<typename T,
1253 size_t MinInlineCapacity = 0,
1254 class AllocPolicy = MallocAllocPolicy>
1255 class Vector
1256 : public VectorBase<T,
1257 MinInlineCapacity,
1258 AllocPolicy,
1259 Vector<T, MinInlineCapacity, AllocPolicy> >
1261 typedef VectorBase<T, MinInlineCapacity, AllocPolicy, Vector> Base;
1263 public:
1264 explicit Vector(AllocPolicy alloc = AllocPolicy()) : Base(alloc) {}
1265 Vector(Vector&& vec) : Base(Move(vec)) {}
1266 Vector& operator=(Vector&& aOther)
1268 return Base::operator=(Move(aOther));
1272 } // namespace mozilla
1274 #ifdef _MSC_VER
1275 #pragma warning(pop)
1276 #endif
1278 #endif /* mozilla_Vector_h */