1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /* A type/length-parametrized vector class. */
9 #ifndef mozilla_Vector_h
10 #define mozilla_Vector_h
12 #include "mozilla/Alignment.h"
13 #include "mozilla/AllocPolicy.h"
14 #include "mozilla/ArrayUtils.h" // for PointerRangeSize
15 #include "mozilla/Assertions.h"
16 #include "mozilla/Attributes.h"
17 #include "mozilla/MathAlgorithms.h"
18 #include "mozilla/MemoryReporting.h"
19 #include "mozilla/Move.h"
20 #include "mozilla/NullPtr.h"
21 #include "mozilla/ReentrancyGuard.h"
22 #include "mozilla/TemplateLib.h"
23 #include "mozilla/TypeTraits.h"
25 #include <new> // for placement new
27 /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
30 #pragma warning(disable:4345)
35 template<typename T
, size_t N
, class AllocPolicy
, class ThisVector
>
41 * Check that the given capacity wastes the minimal amount of space if
42 * allocated on the heap. This means that aCapacity*sizeof(T) is as close to a
43 * power-of-two as possible. growStorageBy() is responsible for ensuring this.
46 static bool CapacityHasExcessSpace(size_t aCapacity
)
48 size_t size
= aCapacity
* sizeof(T
);
49 return RoundUpPow2(size
) - size
>= sizeof(T
);
53 * This template class provides a default implementation for vector operations
54 * when the element type is not known to be a POD, as judged by IsPod.
56 template<typename T
, size_t N
, class AP
, class ThisVector
, bool IsPod
>
59 /* Destroys constructed objects in the range [aBegin, aEnd). */
60 static inline void destroy(T
* aBegin
, T
* aEnd
)
62 MOZ_ASSERT(aBegin
<= aEnd
);
63 for (T
* p
= aBegin
; p
< aEnd
; ++p
) {
68 /* Constructs objects in the uninitialized range [aBegin, aEnd). */
69 static inline void initialize(T
* aBegin
, T
* aEnd
)
71 MOZ_ASSERT(aBegin
<= aEnd
);
72 for (T
* p
= aBegin
; p
< aEnd
; ++p
) {
78 * Copy-constructs objects in the uninitialized range
79 * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
82 static inline void copyConstruct(T
* aDst
,
83 const U
* aSrcStart
, const U
* aSrcEnd
)
85 MOZ_ASSERT(aSrcStart
<= aSrcEnd
);
86 for (const U
* p
= aSrcStart
; p
< aSrcEnd
; ++p
, ++aDst
) {
92 * Move-constructs objects in the uninitialized range
93 * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
96 static inline void moveConstruct(T
* aDst
, U
* aSrcStart
, U
* aSrcEnd
)
98 MOZ_ASSERT(aSrcStart
<= aSrcEnd
);
99 for (U
* p
= aSrcStart
; p
< aSrcEnd
; ++p
, ++aDst
) {
100 new(aDst
) T(Move(*p
));
105 * Copy-constructs objects in the uninitialized range [aDst, aDst+aN) from
106 * the same object aU.
109 static inline void copyConstructN(T
* aDst
, size_t aN
, const U
& aU
)
111 for (T
* end
= aDst
+ aN
; aDst
< end
; ++aDst
) {
117 * Grows the given buffer to have capacity aNewCap, preserving the objects
118 * constructed in the range [begin, end) and updating aV. Assumes that (1)
119 * aNewCap has not overflowed, and (2) multiplying aNewCap by sizeof(T) will
123 growTo(VectorBase
<T
, N
, AP
, ThisVector
>& aV
, size_t aNewCap
)
125 MOZ_ASSERT(!aV
.usingInlineStorage());
126 MOZ_ASSERT(!CapacityHasExcessSpace
<T
>(aNewCap
));
127 T
* newbuf
= aV
.template pod_malloc
<T
>(aNewCap
);
132 T
* src
= aV
.beginNoCheck();
133 for (; src
< aV
.endNoCheck(); ++dst
, ++src
) {
134 new(dst
) T(Move(*src
));
136 VectorImpl::destroy(aV
.beginNoCheck(), aV
.endNoCheck());
139 /* aV.mLength is unchanged. */
140 aV
.mCapacity
= aNewCap
;
146 * This partial template specialization provides a default implementation for
147 * vector operations when the element type is known to be a POD, as judged by
150 template<typename T
, size_t N
, class AP
, class ThisVector
>
151 struct VectorImpl
<T
, N
, AP
, ThisVector
, true>
153 static inline void destroy(T
*, T
*) {}
155 static inline void initialize(T
* aBegin
, T
* aEnd
)
158 * You would think that memset would be a big win (or even break even)
159 * when we know T is a POD. But currently it's not. This is probably
160 * because |append| tends to be given small ranges and memset requires
161 * a function call that doesn't get inlined.
163 * memset(aBegin, 0, sizeof(T) * (aEnd - aBegin));
165 MOZ_ASSERT(aBegin
<= aEnd
);
166 for (T
* p
= aBegin
; p
< aEnd
; ++p
) {
172 static inline void copyConstruct(T
* aDst
,
173 const U
* aSrcStart
, const U
* aSrcEnd
)
176 * See above memset comment. Also, notice that copyConstruct is
177 * currently templated (T != U), so memcpy won't work without
180 * memcpy(aDst, aSrcStart, sizeof(T) * (aSrcEnd - aSrcStart));
182 MOZ_ASSERT(aSrcStart
<= aSrcEnd
);
183 for (const U
* p
= aSrcStart
; p
< aSrcEnd
; ++p
, ++aDst
) {
189 static inline void moveConstruct(T
* aDst
,
190 const U
* aSrcStart
, const U
* aSrcEnd
)
192 copyConstruct(aDst
, aSrcStart
, aSrcEnd
);
195 static inline void copyConstructN(T
* aDst
, size_t aN
, const T
& aT
)
197 for (T
* end
= aDst
+ aN
; aDst
< end
; ++aDst
) {
203 growTo(VectorBase
<T
, N
, AP
, ThisVector
>& aV
, size_t aNewCap
)
205 MOZ_ASSERT(!aV
.usingInlineStorage());
206 MOZ_ASSERT(!CapacityHasExcessSpace
<T
>(aNewCap
));
207 T
* newbuf
= aV
.template pod_realloc
<T
>(aV
.mBegin
, aV
.mCapacity
, aNewCap
);
212 /* aV.mLength is unchanged. */
213 aV
.mCapacity
= aNewCap
;
218 } // namespace detail
221 * A CRTP base class for vector-like classes. Unless you really really want
222 * your own vector class -- and you almost certainly don't -- you should use
223 * mozilla::Vector instead!
225 * See mozilla::Vector for interface requirements.
227 template<typename T
, size_t N
, class AllocPolicy
, class ThisVector
>
228 class VectorBase
: private AllocPolicy
232 static const bool kElemIsPod
= IsPod
<T
>::value
;
233 typedef detail::VectorImpl
<T
, N
, AllocPolicy
, ThisVector
, kElemIsPod
> Impl
;
234 friend struct detail::VectorImpl
<T
, N
, AllocPolicy
, ThisVector
, kElemIsPod
>;
236 bool growStorageBy(size_t aIncr
);
237 bool convertToHeapStorage(size_t aNewCap
);
239 /* magic constants */
241 static const int kMaxInlineBytes
= 1024;
243 /* compute constants */
246 * Consider element size to be 1 for buffer sizing if there are 0 inline
247 * elements. This allows us to compile when the definition of the element
248 * type is not visible here.
250 * Explicit specialization is only allowed at namespace scope, so in order
251 * to keep everything here, we use a dummy template parameter with partial
254 template<int M
, int Dummy
>
257 static const size_t value
= sizeof(T
);
260 struct ElemSize
<0, Dummy
>
262 static const size_t value
= 1;
265 static const size_t kInlineCapacity
=
266 tl::Min
<N
, kMaxInlineBytes
/ ElemSize
<N
, 0>::value
>::value
;
268 /* Calculate inline buffer size; avoid 0-sized array. */
269 static const size_t kInlineBytes
=
270 tl::Max
<1, kInlineCapacity
* ElemSize
<N
, 0>::value
>::value
;
275 * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
276 * mBegin + mLength) hold valid constructed T objects. The range [mBegin +
277 * mLength, mBegin + mCapacity) holds uninitialized memory. The range
278 * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
279 * previously allocated by a call to reserve().
283 /* Number of elements in the vector. */
286 /* Max number of elements storable in the vector without resizing. */
290 /* Max elements of reserved or used space in this vector. */
294 /* Memory used for inline storage. */
295 AlignedStorage
<kInlineBytes
> mStorage
;
298 friend class ReentrancyGuard
;
302 /* private accessors */
304 bool usingInlineStorage() const
306 return mBegin
== const_cast<VectorBase
*>(this)->inlineStorage();
311 return static_cast<T
*>(mStorage
.addr());
314 T
* beginNoCheck() const
321 return mBegin
+ mLength
;
324 const T
* endNoCheck() const
326 return mBegin
+ mLength
;
330 size_t reserved() const
332 MOZ_ASSERT(mReserved
<= mCapacity
);
333 MOZ_ASSERT(mLength
<= mReserved
);
338 /* Append operations guaranteed to succeed due to pre-reserved space. */
339 template<typename U
> void internalAppend(U
&& aU
);
340 template<typename U
, size_t O
, class BP
, class UV
>
341 void internalAppendAll(const VectorBase
<U
, O
, BP
, UV
>& aU
);
342 void internalAppendN(const T
& aT
, size_t aN
);
343 template<typename U
> void internalAppend(const U
* aBegin
, size_t aLength
);
346 static const size_t sMaxInlineStorage
= N
;
348 typedef T ElementType
;
350 explicit VectorBase(AllocPolicy
= AllocPolicy());
351 explicit VectorBase(ThisVector
&&); /* Move constructor. */
352 ThisVector
& operator=(ThisVector
&&); /* Move assignment. */
357 const AllocPolicy
& allocPolicy() const { return *this; }
359 AllocPolicy
& allocPolicy() { return *this; }
361 enum { InlineLength
= N
};
363 size_t length() const { return mLength
; }
365 bool empty() const { return mLength
== 0; }
367 size_t capacity() const { return mCapacity
; }
371 MOZ_ASSERT(!mEntered
);
375 const T
* begin() const
377 MOZ_ASSERT(!mEntered
);
383 MOZ_ASSERT(!mEntered
);
384 return mBegin
+ mLength
;
389 MOZ_ASSERT(!mEntered
);
390 return mBegin
+ mLength
;
393 T
& operator[](size_t aIndex
)
395 MOZ_ASSERT(!mEntered
);
396 MOZ_ASSERT(aIndex
< mLength
);
397 return begin()[aIndex
];
400 const T
& operator[](size_t aIndex
) const
402 MOZ_ASSERT(!mEntered
);
403 MOZ_ASSERT(aIndex
< mLength
);
404 return begin()[aIndex
];
409 MOZ_ASSERT(!mEntered
);
410 MOZ_ASSERT(!empty());
414 const T
& back() const
416 MOZ_ASSERT(!mEntered
);
417 MOZ_ASSERT(!empty());
423 friend class VectorBase
;
426 Range(T
* aCur
, T
* aEnd
)
430 MOZ_ASSERT(aCur
<= aEnd
);
435 bool empty() const { return mCur
== mEnd
; }
436 size_t remain() const { return PointerRangeSize(mCur
, mEnd
); }
437 T
& front() const { MOZ_ASSERT(!empty()); return *mCur
; }
438 void popFront() { MOZ_ASSERT(!empty()); ++mCur
; }
439 T
popCopyFront() { MOZ_ASSERT(!empty()); return *mCur
++; }
442 Range
all() { return Range(begin(), end()); }
447 * Given that the vector is empty and has no inline storage, grow to
450 bool initCapacity(size_t aRequest
);
453 * If reserve(length() + N) succeeds, the N next appends are guaranteed to
456 bool reserve(size_t aRequest
);
459 * Destroy elements in the range [end() - aIncr, end()). Does not deallocate
460 * or unreserve storage for those elements.
462 void shrinkBy(size_t aIncr
);
464 /** Grow the vector by aIncr elements. */
465 bool growBy(size_t aIncr
);
467 /** Call shrinkBy or growBy based on whether newSize > length(). */
468 bool resize(size_t aNewLength
);
471 * Increase the length of the vector, but don't initialize the new elements
472 * -- leave them as uninitialized memory.
474 bool growByUninitialized(size_t aIncr
);
475 bool resizeUninitialized(size_t aNewLength
);
477 /** Shorthand for shrinkBy(length()). */
480 /** Clears and releases any heap-allocated storage. */
484 * If true, appending |aNeeded| elements won't reallocate elements storage.
485 * This *doesn't* mean that infallibleAppend may be used! You still must
486 * reserve the extra space, even if this method indicates that appends won't
487 * need to reallocate elements storage.
489 bool canAppendWithoutRealloc(size_t aNeeded
) const;
491 /** Potentially fallible append operations. */
494 * This can take either a T& or a T&&. Given a T&&, it moves |aU| into the
495 * vector, instead of copying it. If it fails, |aU| is left unmoved. ("We are
498 template<typename U
> bool append(U
&& aU
);
500 template<typename U
, size_t O
, class BP
, class UV
>
501 bool appendAll(const VectorBase
<U
, O
, BP
, UV
>& aU
);
502 bool appendN(const T
& aT
, size_t aN
);
503 template<typename U
> bool append(const U
* aBegin
, const U
* aEnd
);
504 template<typename U
> bool append(const U
* aBegin
, size_t aLength
);
507 * Guaranteed-infallible append operations for use upon vectors whose
508 * memory has been pre-reserved. Don't use this if you haven't reserved the
511 template<typename U
> void infallibleAppend(U
&& aU
)
513 internalAppend(Forward
<U
>(aU
));
515 void infallibleAppendN(const T
& aT
, size_t aN
)
517 internalAppendN(aT
, aN
);
519 template<typename U
> void infallibleAppend(const U
* aBegin
, const U
* aEnd
)
521 internalAppend(aBegin
, PointerRangeSize(aBegin
, aEnd
));
523 template<typename U
> void infallibleAppend(const U
* aBegin
, size_t aLength
)
525 internalAppend(aBegin
, aLength
);
533 * Transfers ownership of the internal buffer used by this vector to the
534 * caller. (It's the caller's responsibility to properly deallocate this
535 * buffer, in accordance with this vector's AllocPolicy.) After this call,
536 * the vector is empty. Since the returned buffer may need to be allocated
537 * (if the elements are currently stored in-place), the call can fail,
540 * N.B. Although a T*, only the range [0, length()) is constructed.
542 T
* extractRawBuffer();
545 * Transfer ownership of an array of objects into the vector. The caller
546 * must have allocated the array in accordance with this vector's
549 * N.B. This call assumes that there are no uninitialized elements in the
552 void replaceRawBuffer(T
* aP
, size_t aLength
);
555 * Places |aVal| at position |aP|, shifting existing elements from |aP| onward
556 * one position higher. On success, |aP| should not be reused because it'll
557 * be a dangling pointer if reallocation of the vector storage occurred; the
558 * return value should be used instead. On failure, nullptr is returned.
562 * if (!(p = vec.insert(p, val))) {
565 * <keep working with p>
567 * This is inherently a linear-time operation. Be careful!
570 T
* insert(T
* aP
, U
&& aVal
);
573 * Removes the element |aT|, which must fall in the bounds [begin, end),
574 * shifting existing elements from |aT + 1| onward one position lower.
579 * Removes the elements [|aBegin|, |aEnd|), which must fall in the bounds
580 * [begin, end), shifting existing elements from |aEnd + 1| onward to aBegin's
583 void erase(T
* aBegin
, T
* aEnd
);
586 * Measure the size of the vector's heap-allocated storage.
588 size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf
) const;
591 * Like sizeOfExcludingThis, but also measures the size of the vector
592 * object (which must be heap-allocated) itself.
594 size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf
) const;
596 void swap(ThisVector
& aOther
);
599 VectorBase(const VectorBase
&) MOZ_DELETE
;
600 void operator=(const VectorBase
&) MOZ_DELETE
;
602 /* Move-construct/assign only from our derived class, ThisVector. */
603 VectorBase(VectorBase
&&) MOZ_DELETE
;
604 void operator=(VectorBase
&&) MOZ_DELETE
;
607 /* This does the re-entrancy check plus several other sanity checks. */
608 #define MOZ_REENTRANCY_GUARD_ET_AL \
609 ReentrancyGuard g(*this); \
610 MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == kInlineCapacity); \
611 MOZ_ASSERT(reserved() <= mCapacity); \
612 MOZ_ASSERT(mLength <= reserved()); \
613 MOZ_ASSERT(mLength <= mCapacity)
615 /* Vector Implementation */
617 template<typename T
, size_t N
, class AP
, class TV
>
619 VectorBase
<T
, N
, AP
, TV
>::VectorBase(AP aAP
)
622 , mCapacity(kInlineCapacity
)
624 , mReserved(kInlineCapacity
)
628 mBegin
= static_cast<T
*>(mStorage
.addr());
631 /* Move constructor. */
632 template<typename T
, size_t N
, class AllocPolicy
, class TV
>
634 VectorBase
<T
, N
, AllocPolicy
, TV
>::VectorBase(TV
&& aRhs
)
635 : AllocPolicy(Move(aRhs
))
640 mLength
= aRhs
.mLength
;
641 mCapacity
= aRhs
.mCapacity
;
643 mReserved
= aRhs
.mReserved
;
646 if (aRhs
.usingInlineStorage()) {
647 /* We can't move the buffer over in this case, so copy elements. */
648 mBegin
= static_cast<T
*>(mStorage
.addr());
649 Impl::moveConstruct(mBegin
, aRhs
.beginNoCheck(), aRhs
.endNoCheck());
651 * Leave aRhs's mLength, mBegin, mCapacity, and mReserved as they are.
652 * The elements in its in-line storage still need to be destroyed.
656 * Take src's buffer, and turn src into an empty vector using
659 mBegin
= aRhs
.mBegin
;
660 aRhs
.mBegin
= static_cast<T
*>(aRhs
.mStorage
.addr());
661 aRhs
.mCapacity
= kInlineCapacity
;
664 aRhs
.mReserved
= kInlineCapacity
;
669 /* Move assignment. */
670 template<typename T
, size_t N
, class AP
, class TV
>
671 MOZ_ALWAYS_INLINE TV
&
672 VectorBase
<T
, N
, AP
, TV
>::operator=(TV
&& aRhs
)
674 MOZ_ASSERT(this != &aRhs
, "self-move assignment is prohibited");
675 TV
* tv
= static_cast<TV
*>(this);
677 new(tv
) TV(Move(aRhs
));
681 template<typename T
, size_t N
, class AP
, class TV
>
683 VectorBase
<T
, N
, AP
, TV
>::~VectorBase()
685 MOZ_REENTRANCY_GUARD_ET_AL
;
686 Impl::destroy(beginNoCheck(), endNoCheck());
687 if (!usingInlineStorage()) {
688 this->free_(beginNoCheck());
693 * This function will create a new heap buffer with capacity aNewCap,
694 * move all elements in the inline buffer to this new buffer,
697 template<typename T
, size_t N
, class AP
, class TV
>
699 VectorBase
<T
, N
, AP
, TV
>::convertToHeapStorage(size_t aNewCap
)
701 MOZ_ASSERT(usingInlineStorage());
703 /* Allocate buffer. */
704 MOZ_ASSERT(!detail::CapacityHasExcessSpace
<T
>(aNewCap
));
705 T
* newBuf
= this->template pod_malloc
<T
>(aNewCap
);
710 /* Copy inline elements into heap buffer. */
711 Impl::moveConstruct(newBuf
, beginNoCheck(), endNoCheck());
712 Impl::destroy(beginNoCheck(), endNoCheck());
714 /* Switch in heap buffer. */
716 /* mLength is unchanged. */
721 template<typename T
, size_t N
, class AP
, class TV
>
722 MOZ_NEVER_INLINE
bool
723 VectorBase
<T
, N
, AP
, TV
>::growStorageBy(size_t aIncr
)
725 MOZ_ASSERT(mLength
+ aIncr
> mCapacity
);
726 MOZ_ASSERT_IF(!usingInlineStorage(),
727 !detail::CapacityHasExcessSpace
<T
>(mCapacity
));
730 * When choosing a new capacity, its size should is as close to 2**N bytes
731 * as possible. 2**N-sized requests are best because they are unlikely to
732 * be rounded up by the allocator. Asking for a 2**N number of elements
733 * isn't as good, because if sizeof(T) is not a power-of-two that would
734 * result in a non-2**N request size.
740 if (usingInlineStorage()) {
741 /* This case occurs in ~70--80% of the calls to this function. */
743 tl::RoundUpPow2
<(kInlineCapacity
+ 1) * sizeof(T
)>::value
;
744 newCap
= newSize
/ sizeof(T
);
749 /* This case occurs in ~0--10% of the calls to this function. */
754 /* This case occurs in ~15--20% of the calls to this function. */
757 * Will mLength * 4 *sizeof(T) overflow? This condition limits a vector
758 * to 1GB of memory on a 32-bit system, which is a reasonable limit. It
761 * static_cast<char*>(end()) - static_cast<char*>(begin())
763 * doesn't overflow ptrdiff_t (see bug 510319).
765 if (mLength
& tl::MulOverflowMask
<4 * sizeof(T
)>::value
) {
766 this->reportAllocOverflow();
771 * If we reach here, the existing capacity will have a size that is already
772 * as close to 2^N as sizeof(T) will allow. Just double the capacity, and
773 * then there might be space for one more element.
775 newCap
= mLength
* 2;
776 if (detail::CapacityHasExcessSpace
<T
>(newCap
)) {
780 /* This case occurs in ~2% of the calls to this function. */
781 size_t newMinCap
= mLength
+ aIncr
;
783 /* Did mLength + aIncr overflow? Will newCap * sizeof(T) overflow? */
784 if (newMinCap
< mLength
||
785 newMinCap
& tl::MulOverflowMask
<2 * sizeof(T
)>::value
)
787 this->reportAllocOverflow();
791 size_t newMinSize
= newMinCap
* sizeof(T
);
792 size_t newSize
= RoundUpPow2(newMinSize
);
793 newCap
= newSize
/ sizeof(T
);
796 if (usingInlineStorage()) {
798 return convertToHeapStorage(newCap
);
802 return Impl::growTo(*this, newCap
);
805 template<typename T
, size_t N
, class AP
, class TV
>
807 VectorBase
<T
, N
, AP
, TV
>::initCapacity(size_t aRequest
)
810 MOZ_ASSERT(usingInlineStorage());
814 T
* newbuf
= this->template pod_malloc
<T
>(aRequest
);
819 mCapacity
= aRequest
;
821 mReserved
= aRequest
;
826 template<typename T
, size_t N
, class AP
, class TV
>
828 VectorBase
<T
, N
, AP
, TV
>::reserve(size_t aRequest
)
830 MOZ_REENTRANCY_GUARD_ET_AL
;
831 if (aRequest
> mCapacity
&& !growStorageBy(aRequest
- mLength
)) {
835 if (aRequest
> mReserved
) {
836 mReserved
= aRequest
;
838 MOZ_ASSERT(mLength
<= mReserved
);
839 MOZ_ASSERT(mReserved
<= mCapacity
);
844 template<typename T
, size_t N
, class AP
, class TV
>
846 VectorBase
<T
, N
, AP
, TV
>::shrinkBy(size_t aIncr
)
848 MOZ_REENTRANCY_GUARD_ET_AL
;
849 MOZ_ASSERT(aIncr
<= mLength
);
850 Impl::destroy(endNoCheck() - aIncr
, endNoCheck());
854 template<typename T
, size_t N
, class AP
, class TV
>
855 MOZ_ALWAYS_INLINE
bool
856 VectorBase
<T
, N
, AP
, TV
>::growBy(size_t aIncr
)
858 MOZ_REENTRANCY_GUARD_ET_AL
;
859 if (aIncr
> mCapacity
- mLength
&& !growStorageBy(aIncr
)) {
862 MOZ_ASSERT(mLength
+ aIncr
<= mCapacity
);
863 T
* newend
= endNoCheck() + aIncr
;
864 Impl::initialize(endNoCheck(), newend
);
867 if (mLength
> mReserved
) {
874 template<typename T
, size_t N
, class AP
, class TV
>
875 MOZ_ALWAYS_INLINE
bool
876 VectorBase
<T
, N
, AP
, TV
>::growByUninitialized(size_t aIncr
)
878 MOZ_REENTRANCY_GUARD_ET_AL
;
879 if (aIncr
> mCapacity
- mLength
&& !growStorageBy(aIncr
)) {
882 MOZ_ASSERT(mLength
+ aIncr
<= mCapacity
);
885 if (mLength
> mReserved
) {
892 template<typename T
, size_t N
, class AP
, class TV
>
894 VectorBase
<T
, N
, AP
, TV
>::resize(size_t aNewLength
)
896 size_t curLength
= mLength
;
897 if (aNewLength
> curLength
) {
898 return growBy(aNewLength
- curLength
);
900 shrinkBy(curLength
- aNewLength
);
904 template<typename T
, size_t N
, class AP
, class TV
>
905 MOZ_ALWAYS_INLINE
bool
906 VectorBase
<T
, N
, AP
, TV
>::resizeUninitialized(size_t aNewLength
)
908 size_t curLength
= mLength
;
909 if (aNewLength
> curLength
) {
910 return growByUninitialized(aNewLength
- curLength
);
912 shrinkBy(curLength
- aNewLength
);
916 template<typename T
, size_t N
, class AP
, class TV
>
918 VectorBase
<T
, N
, AP
, TV
>::clear()
920 MOZ_REENTRANCY_GUARD_ET_AL
;
921 Impl::destroy(beginNoCheck(), endNoCheck());
925 template<typename T
, size_t N
, class AP
, class TV
>
927 VectorBase
<T
, N
, AP
, TV
>::clearAndFree()
931 if (usingInlineStorage()) {
934 this->free_(beginNoCheck());
935 mBegin
= static_cast<T
*>(mStorage
.addr());
936 mCapacity
= kInlineCapacity
;
938 mReserved
= kInlineCapacity
;
942 template<typename T
, size_t N
, class AP
, class TV
>
944 VectorBase
<T
, N
, AP
, TV
>::canAppendWithoutRealloc(size_t aNeeded
) const
946 return mLength
+ aNeeded
<= mCapacity
;
949 template<typename T
, size_t N
, class AP
, class TV
>
950 template<typename U
, size_t O
, class BP
, class UV
>
951 MOZ_ALWAYS_INLINE
void
952 VectorBase
<T
, N
, AP
, TV
>::internalAppendAll(
953 const VectorBase
<U
, O
, BP
, UV
>& aOther
)
955 internalAppend(aOther
.begin(), aOther
.length());
958 template<typename T
, size_t N
, class AP
, class TV
>
960 MOZ_ALWAYS_INLINE
void
961 VectorBase
<T
, N
, AP
, TV
>::internalAppend(U
&& aU
)
963 MOZ_ASSERT(mLength
+ 1 <= mReserved
);
964 MOZ_ASSERT(mReserved
<= mCapacity
);
965 new(endNoCheck()) T(Forward
<U
>(aU
));
969 template<typename T
, size_t N
, class AP
, class TV
>
970 MOZ_ALWAYS_INLINE
bool
971 VectorBase
<T
, N
, AP
, TV
>::appendN(const T
& aT
, size_t aNeeded
)
973 MOZ_REENTRANCY_GUARD_ET_AL
;
974 if (mLength
+ aNeeded
> mCapacity
&& !growStorageBy(aNeeded
)) {
978 if (mLength
+ aNeeded
> mReserved
) {
979 mReserved
= mLength
+ aNeeded
;
982 internalAppendN(aT
, aNeeded
);
986 template<typename T
, size_t N
, class AP
, class TV
>
987 MOZ_ALWAYS_INLINE
void
988 VectorBase
<T
, N
, AP
, TV
>::internalAppendN(const T
& aT
, size_t aNeeded
)
990 MOZ_ASSERT(mLength
+ aNeeded
<= mReserved
);
991 MOZ_ASSERT(mReserved
<= mCapacity
);
992 Impl::copyConstructN(endNoCheck(), aNeeded
, aT
);
996 template<typename T
, size_t N
, class AP
, class TV
>
999 VectorBase
<T
, N
, AP
, TV
>::insert(T
* aP
, U
&& aVal
)
1001 MOZ_ASSERT(begin() <= aP
);
1002 MOZ_ASSERT(aP
<= end());
1003 size_t pos
= aP
- begin();
1004 MOZ_ASSERT(pos
<= mLength
);
1005 size_t oldLength
= mLength
;
1006 if (pos
== oldLength
) {
1007 if (!append(Forward
<U
>(aVal
))) {
1011 T oldBack
= Move(back());
1012 if (!append(Move(oldBack
))) { /* Dup the last element. */
1015 for (size_t i
= oldLength
; i
> pos
; --i
) {
1016 (*this)[i
] = Move((*this)[i
- 1]);
1018 (*this)[pos
] = Forward
<U
>(aVal
);
1020 return begin() + pos
;
1023 template<typename T
, size_t N
, class AP
, class TV
>
1025 VectorBase
<T
, N
, AP
, TV
>::erase(T
* aIt
)
1027 MOZ_ASSERT(begin() <= aIt
);
1028 MOZ_ASSERT(aIt
< end());
1029 while (aIt
+ 1 < end()) {
1030 *aIt
= Move(*(aIt
+ 1));
1036 template<typename T
, size_t N
, class AP
, class TV
>
1038 VectorBase
<T
, N
, AP
, TV
>::erase(T
* aBegin
, T
* aEnd
)
1040 MOZ_ASSERT(begin() <= aBegin
);
1041 MOZ_ASSERT(aBegin
<= aEnd
);
1042 MOZ_ASSERT(aEnd
<= end());
1043 while (aEnd
< end()) {
1044 *aBegin
++ = Move(*aEnd
++);
1046 shrinkBy(aEnd
- aBegin
);
1049 template<typename T
, size_t N
, class AP
, class TV
>
1050 template<typename U
>
1051 MOZ_ALWAYS_INLINE
bool
1052 VectorBase
<T
, N
, AP
, TV
>::append(const U
* aInsBegin
, const U
* aInsEnd
)
1054 MOZ_REENTRANCY_GUARD_ET_AL
;
1055 size_t aNeeded
= PointerRangeSize(aInsBegin
, aInsEnd
);
1056 if (mLength
+ aNeeded
> mCapacity
&& !growStorageBy(aNeeded
)) {
1060 if (mLength
+ aNeeded
> mReserved
) {
1061 mReserved
= mLength
+ aNeeded
;
1064 internalAppend(aInsBegin
, aNeeded
);
1068 template<typename T
, size_t N
, class AP
, class TV
>
1069 template<typename U
>
1070 MOZ_ALWAYS_INLINE
void
1071 VectorBase
<T
, N
, AP
, TV
>::internalAppend(const U
* aInsBegin
, size_t aInsLength
)
1073 MOZ_ASSERT(mLength
+ aInsLength
<= mReserved
);
1074 MOZ_ASSERT(mReserved
<= mCapacity
);
1075 Impl::copyConstruct(endNoCheck(), aInsBegin
, aInsBegin
+ aInsLength
);
1076 mLength
+= aInsLength
;
1079 template<typename T
, size_t N
, class AP
, class TV
>
1080 template<typename U
>
1081 MOZ_ALWAYS_INLINE
bool
1082 VectorBase
<T
, N
, AP
, TV
>::append(U
&& aU
)
1084 MOZ_REENTRANCY_GUARD_ET_AL
;
1085 if (mLength
== mCapacity
&& !growStorageBy(1)) {
1089 if (mLength
+ 1 > mReserved
) {
1090 mReserved
= mLength
+ 1;
1093 internalAppend(Forward
<U
>(aU
));
1097 template<typename T
, size_t N
, class AP
, class TV
>
1098 template<typename U
, size_t O
, class BP
, class UV
>
1099 MOZ_ALWAYS_INLINE
bool
1100 VectorBase
<T
, N
, AP
, TV
>::appendAll(const VectorBase
<U
, O
, BP
, UV
>& aOther
)
1102 return append(aOther
.begin(), aOther
.length());
1105 template<typename T
, size_t N
, class AP
, class TV
>
1107 MOZ_ALWAYS_INLINE
bool
1108 VectorBase
<T
, N
, AP
, TV
>::append(const U
* aInsBegin
, size_t aInsLength
)
1110 return append(aInsBegin
, aInsBegin
+ aInsLength
);
1113 template<typename T
, size_t N
, class AP
, class TV
>
1114 MOZ_ALWAYS_INLINE
void
1115 VectorBase
<T
, N
, AP
, TV
>::popBack()
1117 MOZ_REENTRANCY_GUARD_ET_AL
;
1118 MOZ_ASSERT(!empty());
1123 template<typename T
, size_t N
, class AP
, class TV
>
1125 VectorBase
<T
, N
, AP
, TV
>::popCopy()
1132 template<typename T
, size_t N
, class AP
, class TV
>
1134 VectorBase
<T
, N
, AP
, TV
>::extractRawBuffer()
1137 if (usingInlineStorage()) {
1138 ret
= this->template pod_malloc
<T
>(mLength
);
1142 Impl::copyConstruct(ret
, beginNoCheck(), endNoCheck());
1143 Impl::destroy(beginNoCheck(), endNoCheck());
1144 /* mBegin, mCapacity are unchanged. */
1148 mBegin
= static_cast<T
*>(mStorage
.addr());
1150 mCapacity
= kInlineCapacity
;
1152 mReserved
= kInlineCapacity
;
1158 template<typename T
, size_t N
, class AP
, class TV
>
1160 VectorBase
<T
, N
, AP
, TV
>::replaceRawBuffer(T
* aP
, size_t aLength
)
1162 MOZ_REENTRANCY_GUARD_ET_AL
;
1164 /* Destroy what we have. */
1165 Impl::destroy(beginNoCheck(), endNoCheck());
1166 if (!usingInlineStorage()) {
1167 this->free_(beginNoCheck());
1170 /* Take in the new buffer. */
1171 if (aLength
<= kInlineCapacity
) {
1173 * We convert to inline storage if possible, even though aP might
1174 * otherwise be acceptable. Maybe this behaviour should be
1175 * specifiable with an argument to this function.
1177 mBegin
= static_cast<T
*>(mStorage
.addr());
1179 mCapacity
= kInlineCapacity
;
1180 Impl::moveConstruct(mBegin
, aP
, aP
+ aLength
);
1181 Impl::destroy(aP
, aP
+ aLength
);
1186 mCapacity
= aLength
;
1189 mReserved
= aLength
;
1193 template<typename T
, size_t N
, class AP
, class TV
>
1195 VectorBase
<T
, N
, AP
, TV
>::sizeOfExcludingThis(MallocSizeOf aMallocSizeOf
) const
1197 return usingInlineStorage() ? 0 : aMallocSizeOf(beginNoCheck());
1200 template<typename T
, size_t N
, class AP
, class TV
>
1202 VectorBase
<T
, N
, AP
, TV
>::sizeOfIncludingThis(MallocSizeOf aMallocSizeOf
) const
1204 return aMallocSizeOf(this) + sizeOfExcludingThis(aMallocSizeOf
);
1207 template<typename T
, size_t N
, class AP
, class TV
>
1209 VectorBase
<T
, N
, AP
, TV
>::swap(TV
& aOther
)
1211 static_assert(N
== 0,
1212 "still need to implement this for N != 0");
1214 // This only works when inline storage is always empty.
1215 if (!usingInlineStorage() && aOther
.usingInlineStorage()) {
1216 aOther
.mBegin
= mBegin
;
1217 mBegin
= inlineStorage();
1218 } else if (usingInlineStorage() && !aOther
.usingInlineStorage()) {
1219 mBegin
= aOther
.mBegin
;
1220 aOther
.mBegin
= aOther
.inlineStorage();
1221 } else if (!usingInlineStorage() && !aOther
.usingInlineStorage()) {
1222 Swap(mBegin
, aOther
.mBegin
);
1224 // This case is a no-op, since we'd set both to use their inline storage.
1227 Swap(mLength
, aOther
.mLength
);
1228 Swap(mCapacity
, aOther
.mCapacity
);
1230 Swap(mReserved
, aOther
.mReserved
);
1235 * STL-like container providing a short-lived, dynamic buffer. Vector calls the
1236 * constructors/destructors of all elements stored in its internal buffer, so
1237 * non-PODs may be safely used. Additionally, Vector will store the first N
1238 * elements in-place before resorting to dynamic allocation.
1241 * - default and copy constructible, assignable, destructible
1242 * - operations do not throw
1244 * - any value, however, N is clamped to min/max values
1246 * - see "Allocation policies" in AllocPolicy.h (defaults to
1247 * mozilla::MallocAllocPolicy)
1249 * Vector is not reentrant: T member functions called during Vector member
1250 * functions must not call back into the same object!
1252 template<typename T
,
1253 size_t MinInlineCapacity
= 0,
1254 class AllocPolicy
= MallocAllocPolicy
>
1256 : public VectorBase
<T
,
1259 Vector
<T
, MinInlineCapacity
, AllocPolicy
> >
1261 typedef VectorBase
<T
, MinInlineCapacity
, AllocPolicy
, Vector
> Base
;
1264 explicit Vector(AllocPolicy alloc
= AllocPolicy()) : Base(alloc
) {}
1265 Vector(Vector
&& vec
) : Base(Move(vec
)) {}
1266 Vector
& operator=(Vector
&& aOther
)
1268 return Base::operator=(Move(aOther
));
1272 } // namespace mozilla
1275 #pragma warning(pop)
1278 #endif /* mozilla_Vector_h */