1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99 ft=cpp:
4 * This Source Code Form is subject to the terms of the Mozilla Public
5 * License, v. 2.0. If a copy of the MPL was not distributed with this
6 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
11 #include "mozilla/Attributes.h"
13 #include "TemplateLib.h"
16 /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
19 #pragma warning(disable:4345)
24 class TempAllocPolicy
;
27 size_t MinInlineCapacity
= 0,
28 class AllocPolicy
= TempAllocPolicy
>
32 * This template class provides a default implementation for vector operations
33 * when the element type is not known to be a POD, as judged by IsPodType.
35 template <class T
, size_t N
, class AP
, bool IsPod
>
38 /* Destroys constructed objects in the range [begin, end). */
39 static inline void destroy(T
*begin
, T
*end
) {
40 for (T
*p
= begin
; p
!= end
; ++p
)
44 /* Constructs objects in the uninitialized range [begin, end). */
45 static inline void initialize(T
*begin
, T
*end
) {
46 for (T
*p
= begin
; p
!= end
; ++p
)
51 * Copy-constructs objects in the uninitialized range
52 * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend).
55 static inline void copyConstruct(T
*dst
, const U
*srcbeg
, const U
*srcend
) {
56 for (const U
*p
= srcbeg
; p
!= srcend
; ++p
, ++dst
)
61 * Move-constructs objects in the uninitialized range
62 * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend).
65 static inline void moveConstruct(T
*dst
, const U
*srcbeg
, const U
*srcend
) {
66 for (const U
*p
= srcbeg
; p
!= srcend
; ++p
, ++dst
)
71 * Copy-constructs objects in the uninitialized range [dst, dst+n) from the
75 static inline void copyConstructN(T
*dst
, size_t n
, const U
&u
) {
76 for (T
*end
= dst
+ n
; dst
!= end
; ++dst
)
81 * Grows the given buffer to have capacity newcap, preserving the objects
82 * constructed in the range [begin, end) and updating v. Assumes that (1)
83 * newcap has not overflowed, and (2) multiplying newcap by sizeof(T) will
86 static inline bool growTo(Vector
<T
,N
,AP
> &v
, size_t newcap
) {
87 JS_ASSERT(!v
.usingInlineStorage());
88 T
*newbuf
= reinterpret_cast<T
*>(v
.malloc_(newcap
* sizeof(T
)));
91 for (T
*dst
= newbuf
, *src
= v
.beginNoCheck(); src
!= v
.endNoCheck(); ++dst
, ++src
)
92 new(dst
) T(Move(*src
));
93 VectorImpl::destroy(v
.beginNoCheck(), v
.endNoCheck());
96 /* v.mLength is unchanged. */
103 * This partial template specialization provides a default implementation for
104 * vector operations when the element type is known to be a POD, as judged by
107 template <class T
, size_t N
, class AP
>
108 struct VectorImpl
<T
, N
, AP
, true>
110 static inline void destroy(T
*, T
*) {}
112 static inline void initialize(T
*begin
, T
*end
) {
114 * You would think that memset would be a big win (or even break even)
115 * when we know T is a POD. But currently it's not. This is probably
116 * because |append| tends to be given small ranges and memset requires
117 * a function call that doesn't get inlined.
119 * memset(begin, 0, sizeof(T) * (end-begin));
121 for (T
*p
= begin
; p
!= end
; ++p
)
126 static inline void copyConstruct(T
*dst
, const U
*srcbeg
, const U
*srcend
) {
128 * See above memset comment. Also, notice that copyConstruct is
129 * currently templated (T != U), so memcpy won't work without
132 * memcpy(dst, srcbeg, sizeof(T) * (srcend - srcbeg));
134 for (const U
*p
= srcbeg
; p
!= srcend
; ++p
, ++dst
)
139 static inline void moveConstruct(T
*dst
, const U
*srcbeg
, const U
*srcend
) {
140 copyConstruct(dst
, srcbeg
, srcend
);
143 static inline void copyConstructN(T
*dst
, size_t n
, const T
&t
) {
144 for (T
*p
= dst
, *end
= dst
+ n
; p
!= end
; ++p
)
148 static inline bool growTo(Vector
<T
,N
,AP
> &v
, size_t newcap
) {
149 JS_ASSERT(!v
.usingInlineStorage());
150 size_t bytes
= sizeof(T
) * newcap
;
151 size_t oldBytes
= sizeof(T
) * v
.mCapacity
;
152 T
*newbuf
= reinterpret_cast<T
*>(v
.realloc_(v
.mBegin
, oldBytes
, bytes
));
156 /* v.mLength is unchanged. */
157 v
.mCapacity
= newcap
;
163 * JS-friendly, STL-like container providing a short-lived, dynamic buffer.
164 * Vector calls the constructors/destructors of all elements stored in
165 * its internal buffer, so non-PODs may be safely used. Additionally,
166 * Vector will store the first N elements in-place before resorting to
167 * dynamic allocation.
170 * - default and copy constructible, assignable, destructible
171 * - operations do not throw
173 * - any value, however, N is clamped to min/max values
175 * - see "Allocation policies" in jsalloc.h (default js::TempAllocPolicy)
177 * N.B: Vector is not reentrant: T member functions called during Vector member
178 * functions must not call back into the same object.
180 template <class T
, size_t N
, class AllocPolicy
>
181 class Vector
: private AllocPolicy
183 // typedef typename tl::StaticAssert<!tl::IsPostBarrieredType<T>::result>::result _;
187 static const bool sElemIsPod
= tl::IsPodType
<T
>::result
;
188 typedef VectorImpl
<T
, N
, AllocPolicy
, sElemIsPod
> Impl
;
189 friend struct VectorImpl
<T
, N
, AllocPolicy
, sElemIsPod
>;
191 bool calculateNewCapacity(size_t curLength
, size_t lengthInc
, size_t &newCap
);
192 bool growStorageBy(size_t lengthInc
);
193 bool growHeapStorageBy(size_t lengthInc
);
194 bool convertToHeapStorage(size_t lengthInc
);
196 template <bool InitNewElems
> inline bool growByImpl(size_t inc
);
198 /* magic constants */
200 static const int sMaxInlineBytes
= 1024;
202 /* compute constants */
205 * Consider element size to be 1 for buffer sizing if there are
206 * 0 inline elements. This allows us to compile when the definition
207 * of the element type is not visible here.
209 * Explicit specialization is only allowed at namespace scope, so
210 * in order to keep everything here, we use a dummy template
211 * parameter with partial specialization.
213 template <int M
, int Dummy
>
215 static const size_t result
= sizeof(T
);
218 struct ElemSize
<0, Dummy
> {
219 static const size_t result
= 1;
222 static const size_t sInlineCapacity
=
223 tl::Min
<N
, sMaxInlineBytes
/ ElemSize
<N
, 0>::result
>::result
;
225 /* Calculate inline buffer size; avoid 0-sized array. */
226 static const size_t sInlineBytes
=
227 tl::Max
<1, sInlineCapacity
* ElemSize
<N
, 0>::result
>::result
;
232 * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
233 * mBegin + mLength) hold valid constructed T objects. The range [mBegin +
234 * mLength, mBegin + mCapacity) holds uninitialized memory. The range
235 * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
236 * previously allocated by a call to reserve().
239 size_t mLength
; /* Number of elements in the Vector. */
240 size_t mCapacity
; /* Max number of elements storable in the Vector without resizing. */
242 size_t mReserved
; /* Max elements of reserved or used space in this vector. */
245 mozilla::AlignedStorage
<sInlineBytes
> storage
;
248 friend class ReentrancyGuard
;
252 Vector(const Vector
&) MOZ_DELETE
;
253 Vector
&operator=(const Vector
&) MOZ_DELETE
;
255 /* private accessors */
257 bool usingInlineStorage() const {
258 return mBegin
== inlineStorage();
261 T
*inlineStorage() const {
262 return (T
*)storage
.addr();
265 T
*beginNoCheck() const {
270 return mBegin
+ mLength
;
273 const T
*endNoCheck() const {
274 return mBegin
+ mLength
;
278 size_t reserved() const {
279 JS_ASSERT(mReserved
<= mCapacity
);
280 JS_ASSERT(mLength
<= mReserved
);
285 /* Append operations guaranteed to succeed due to pre-reserved space. */
286 template <class U
> void internalAppend(U t
);
287 void internalAppendN(const T
&t
, size_t n
);
288 template <class U
> void internalAppend(const U
*begin
, size_t length
);
289 template <class U
, size_t O
, class BP
> void internalAppend(const Vector
<U
,O
,BP
> &other
);
292 static const size_t sMaxInlineStorage
= N
;
294 typedef T ElementType
;
296 Vector(AllocPolicy
= AllocPolicy());
297 Vector(MoveRef
<Vector
>); /* Move constructor. */
298 Vector
&operator=(MoveRef
<Vector
>); /* Move assignment. */
303 const AllocPolicy
&allocPolicy() const {
307 AllocPolicy
&allocPolicy() {
311 enum { InlineLength
= N
};
313 size_t length() const {
321 size_t capacity() const {
330 const T
*begin() const {
337 return mBegin
+ mLength
;
340 const T
*end() const {
342 return mBegin
+ mLength
;
345 T
&operator[](size_t i
) {
346 JS_ASSERT(!entered
&& i
< mLength
);
350 const T
&operator[](size_t i
) const {
351 JS_ASSERT(!entered
&& i
< mLength
);
356 JS_ASSERT(!entered
&& !empty());
360 const T
&back() const {
361 JS_ASSERT(!entered
&& !empty());
368 Range(T
*cur
, T
*end
) : cur(cur
), end(end
) {}
371 bool empty() const { return cur
== end
; }
372 size_t remain() const { return end
- cur
; }
373 T
&front() const { return *cur
; }
374 void popFront() { JS_ASSERT(!empty()); ++cur
; }
375 T
popCopyFront() { JS_ASSERT(!empty()); return *cur
++; }
379 return Range(begin(), end());
384 /* If reserve(length() + N) succeeds, the N next appends are guaranteed to succeed. */
385 bool reserve(size_t capacity
);
388 * Destroy elements in the range [end() - incr, end()). Does not deallocate
389 * or unreserve storage for those elements.
391 void shrinkBy(size_t incr
);
393 /* Grow the vector by incr elements. */
394 bool growBy(size_t incr
);
396 /* Call shrinkBy or growBy based on whether newSize > length(). */
397 bool resize(size_t newLength
);
399 /* Leave new elements as uninitialized memory. */
400 bool growByUninitialized(size_t incr
);
401 bool resizeUninitialized(size_t newLength
);
403 /* Shorthand for shrinkBy(length()). */
406 /* Clears and releases any heap-allocated storage. */
410 * Potentially fallible append operations.
412 * The function templates that take an unspecified type U require a
413 * const T & or a MoveRef<T>. The MoveRef<T> variants move their
414 * operands into the vector, instead of copying them. If they fail, the
415 * operand is left unmoved.
417 template <class U
> bool append(U t
);
418 bool appendN(const T
&t
, size_t n
);
419 template <class U
> bool append(const U
*begin
, const U
*end
);
420 template <class U
> bool append(const U
*begin
, size_t length
);
421 template <class U
, size_t O
, class BP
> bool append(const Vector
<U
,O
,BP
> &other
);
424 * Guaranteed-infallible append operations for use upon vectors whose
425 * memory has been pre-reserved.
427 void infallibleAppend(const T
&t
) {
430 void infallibleAppendN(const T
&t
, size_t n
) {
431 internalAppendN(t
, n
);
433 template <class U
> void infallibleAppend(const U
*begin
, const U
*end
) {
434 internalAppend(begin
, mozilla::PointerRangeSize(begin
, end
));
436 template <class U
> void infallibleAppend(const U
*begin
, size_t length
) {
437 internalAppend(begin
, length
);
439 template <class U
, size_t O
, class BP
> void infallibleAppend(const Vector
<U
,O
,BP
> &other
) {
440 internalAppend(other
);
448 * Transfers ownership of the internal buffer used by Vector to the caller.
449 * After this call, the Vector is empty. Since the returned buffer may need
450 * to be allocated (if the elements are currently stored in-place), the
451 * call can fail, returning NULL.
453 * N.B. Although a T*, only the range [0, length()) is constructed.
455 T
*extractRawBuffer();
458 * Transfer ownership of an array of objects into the Vector.
459 * N.B. This call assumes that there are no uninitialized elements in the
462 void replaceRawBuffer(T
*p
, size_t length
);
465 * Places |val| at position |p|, shifting existing elements
466 * from |p| onward one position higher.
468 bool insert(T
*p
, const T
&val
);
471 * Removes the element |t|, which must fall in the bounds [begin, end),
472 * shifting existing elements from |t + 1| onward one position lower.
477 * Measure the size of the Vector's heap-allocated storage.
479 size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf
) const;
482 * Like sizeOfExcludingThis, but also measures the size of the Vector
483 * object (which must be heap-allocated) itself.
485 size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf
) const;
487 void swap(Vector
&other
);
490 /* This does the re-entrancy check plus several other sanity checks. */
491 #define REENTRANCY_GUARD_ET_AL \
492 ReentrancyGuard g(*this); \
493 JS_ASSERT_IF(usingInlineStorage(), mCapacity == sInlineCapacity); \
494 JS_ASSERT(reserved() <= mCapacity); \
495 JS_ASSERT(mLength <= reserved()); \
496 JS_ASSERT(mLength <= mCapacity)
498 /* Vector Implementation */
500 template <class T
, size_t N
, class AllocPolicy
>
502 Vector
<T
,N
,AllocPolicy
>::Vector(AllocPolicy ap
)
503 : AllocPolicy(ap
), mBegin((T
*)storage
.addr()), mLength(0),
504 mCapacity(sInlineCapacity
)
506 , mReserved(0), entered(false)
510 /* Move constructor. */
511 template <class T
, size_t N
, class AllocPolicy
>
513 Vector
<T
, N
, AllocPolicy
>::Vector(MoveRef
<Vector
> rhs
)
519 mLength
= rhs
->mLength
;
520 mCapacity
= rhs
->mCapacity
;
522 mReserved
= rhs
->mReserved
;
525 if (rhs
->usingInlineStorage()) {
526 /* We can't move the buffer over in this case, so copy elements. */
527 mBegin
= (T
*)storage
.addr();
528 Impl::moveConstruct(mBegin
, rhs
->beginNoCheck(), rhs
->endNoCheck());
530 * Leave rhs's mLength, mBegin, mCapacity, and mReserved as they are.
531 * The elements in its in-line storage still need to be destroyed.
535 * Take src's buffer, and turn src into an empty vector using
538 mBegin
= rhs
->mBegin
;
539 rhs
->mBegin
= (T
*) rhs
->storage
.addr();
540 rhs
->mCapacity
= sInlineCapacity
;
548 /* Move assignment. */
549 template <class T
, size_t N
, class AP
>
552 Vector
<T
, N
, AP
>::operator=(MoveRef
<Vector
> rhs
)
555 new(this) Vector(rhs
);
559 template <class T
, size_t N
, class AP
>
561 Vector
<T
,N
,AP
>::~Vector()
563 REENTRANCY_GUARD_ET_AL
;
564 Impl::destroy(beginNoCheck(), endNoCheck());
565 if (!usingInlineStorage())
566 this->free_(beginNoCheck());
570 * Calculate a new capacity that is at least lengthInc greater than
571 * curLength and check for overflow.
573 template <class T
, size_t N
, class AP
>
574 STATIC_POSTCONDITION(!return || newCap
>= curLength
+ lengthInc
)
576 /* gcc (ARM, x86) compiler bug workaround - See bug 694694 */
581 Vector
<T
,N
,AP
>::calculateNewCapacity(size_t curLength
, size_t lengthInc
,
584 size_t newMinCap
= curLength
+ lengthInc
;
587 * Check for overflow in the above addition, below CEILING_LOG2, and later
588 * multiplication by sizeof(T).
590 if (newMinCap
< curLength
||
591 newMinCap
& tl::MulOverflowMask
<2 * sizeof(T
)>::result
) {
592 this->reportAllocOverflow();
596 /* Round up to next power of 2. */
597 newCap
= RoundUpPow2(newMinCap
);
600 * Do not allow a buffer large enough that the expression ((char *)end() -
601 * (char *)begin()) overflows ptrdiff_t. See Bug 510319.
603 if (newCap
& tl::UnsafeRangeSizeMask
<T
>::result
) {
604 this->reportAllocOverflow();
611 * This function will grow the current heap capacity to have capacity
612 * (mLength + lengthInc) and fail on OOM or integer overflow.
614 template <class T
, size_t N
, class AP
>
615 JS_ALWAYS_INLINE
bool
616 Vector
<T
,N
,AP
>::growHeapStorageBy(size_t lengthInc
)
618 JS_ASSERT(!usingInlineStorage());
620 return calculateNewCapacity(mLength
, lengthInc
, newCap
) &&
621 Impl::growTo(*this, newCap
);
625 * This function will create a new heap buffer with capacity (mLength +
626 * lengthInc()), move all elements in the inline buffer to this new buffer,
627 * and fail on OOM or integer overflow.
629 template <class T
, size_t N
, class AP
>
631 Vector
<T
,N
,AP
>::convertToHeapStorage(size_t lengthInc
)
633 JS_ASSERT(usingInlineStorage());
635 if (!calculateNewCapacity(mLength
, lengthInc
, newCap
))
638 /* Allocate buffer. */
639 T
*newBuf
= reinterpret_cast<T
*>(this->malloc_(newCap
* sizeof(T
)));
643 /* Copy inline elements into heap buffer. */
644 Impl::moveConstruct(newBuf
, beginNoCheck(), endNoCheck());
645 Impl::destroy(beginNoCheck(), endNoCheck());
647 /* Switch in heap buffer. */
649 /* mLength is unchanged. */
654 template <class T
, size_t N
, class AP
>
656 Vector
<T
,N
,AP
>::growStorageBy(size_t incr
)
658 JS_ASSERT(mLength
+ incr
> mCapacity
);
659 return usingInlineStorage()
660 ? convertToHeapStorage(incr
)
661 : growHeapStorageBy(incr
);
664 template <class T
, size_t N
, class AP
>
666 Vector
<T
,N
,AP
>::reserve(size_t request
)
668 REENTRANCY_GUARD_ET_AL
;
669 if (request
> mCapacity
&& !growStorageBy(request
- mLength
))
673 if (request
> mReserved
)
675 JS_ASSERT(mLength
<= mReserved
);
676 JS_ASSERT(mReserved
<= mCapacity
);
681 template <class T
, size_t N
, class AP
>
683 Vector
<T
,N
,AP
>::shrinkBy(size_t incr
)
685 REENTRANCY_GUARD_ET_AL
;
686 JS_ASSERT(incr
<= mLength
);
687 Impl::destroy(endNoCheck() - incr
, endNoCheck());
691 template <class T
, size_t N
, class AP
>
692 template <bool InitNewElems
>
693 JS_ALWAYS_INLINE
bool
694 Vector
<T
,N
,AP
>::growByImpl(size_t incr
)
696 REENTRANCY_GUARD_ET_AL
;
697 if (incr
> mCapacity
- mLength
&& !growStorageBy(incr
))
700 JS_ASSERT(mLength
+ incr
<= mCapacity
);
701 T
*newend
= endNoCheck() + incr
;
703 Impl::initialize(endNoCheck(), newend
);
706 if (mLength
> mReserved
)
712 template <class T
, size_t N
, class AP
>
713 JS_ALWAYS_INLINE
bool
714 Vector
<T
,N
,AP
>::growBy(size_t incr
)
716 return growByImpl
<true>(incr
);
719 template <class T
, size_t N
, class AP
>
720 JS_ALWAYS_INLINE
bool
721 Vector
<T
,N
,AP
>::growByUninitialized(size_t incr
)
723 return growByImpl
<false>(incr
);
726 template <class T
, size_t N
, class AP
>
727 STATIC_POSTCONDITION(!return || ubound(this->begin()) >= newLength
)
729 Vector
<T
,N
,AP
>::resize(size_t newLength
)
731 size_t curLength
= mLength
;
732 if (newLength
> curLength
)
733 return growBy(newLength
- curLength
);
734 shrinkBy(curLength
- newLength
);
738 template <class T
, size_t N
, class AP
>
739 JS_ALWAYS_INLINE
bool
740 Vector
<T
,N
,AP
>::resizeUninitialized(size_t newLength
)
742 size_t curLength
= mLength
;
743 if (newLength
> curLength
)
744 return growByUninitialized(newLength
- curLength
);
745 shrinkBy(curLength
- newLength
);
749 template <class T
, size_t N
, class AP
>
751 Vector
<T
,N
,AP
>::clear()
753 REENTRANCY_GUARD_ET_AL
;
754 Impl::destroy(beginNoCheck(), endNoCheck());
758 template <class T
, size_t N
, class AP
>
760 Vector
<T
,N
,AP
>::clearAndFree()
764 if (usingInlineStorage())
767 this->free_(beginNoCheck());
768 mBegin
= (T
*)storage
.addr();
769 mCapacity
= sInlineCapacity
;
775 template <class T
, size_t N
, class AP
>
777 JS_ALWAYS_INLINE
bool
778 Vector
<T
,N
,AP
>::append(U t
)
780 REENTRANCY_GUARD_ET_AL
;
781 if (mLength
== mCapacity
&& !growStorageBy(1))
785 if (mLength
+ 1 > mReserved
)
786 mReserved
= mLength
+ 1;
792 template <class T
, size_t N
, class AP
>
794 JS_ALWAYS_INLINE
void
795 Vector
<T
,N
,AP
>::internalAppend(U t
)
797 JS_ASSERT(mLength
+ 1 <= mReserved
);
798 JS_ASSERT(mReserved
<= mCapacity
);
799 new(endNoCheck()) T(t
);
803 template <class T
, size_t N
, class AP
>
804 JS_ALWAYS_INLINE
bool
805 Vector
<T
,N
,AP
>::appendN(const T
&t
, size_t needed
)
807 REENTRANCY_GUARD_ET_AL
;
808 if (mLength
+ needed
> mCapacity
&& !growStorageBy(needed
))
812 if (mLength
+ needed
> mReserved
)
813 mReserved
= mLength
+ needed
;
815 internalAppendN(t
, needed
);
819 template <class T
, size_t N
, class AP
>
820 JS_ALWAYS_INLINE
void
821 Vector
<T
,N
,AP
>::internalAppendN(const T
&t
, size_t needed
)
823 JS_ASSERT(mLength
+ needed
<= mReserved
);
824 JS_ASSERT(mReserved
<= mCapacity
);
825 Impl::copyConstructN(endNoCheck(), needed
, t
);
829 template <class T
, size_t N
, class AP
>
831 Vector
<T
,N
,AP
>::insert(T
*p
, const T
&val
)
833 JS_ASSERT(begin() <= p
&& p
<= end());
834 size_t pos
= p
- begin();
835 JS_ASSERT(pos
<= mLength
);
836 size_t oldLength
= mLength
;
837 if (pos
== oldLength
)
841 if (!append(oldBack
)) /* Dup the last element. */
844 for (size_t i
= oldLength
; i
> pos
; --i
)
845 (*this)[i
] = (*this)[i
- 1];
850 template<typename T
, size_t N
, class AP
>
852 Vector
<T
,N
,AP
>::erase(T
*it
)
854 JS_ASSERT(begin() <= it
&& it
< end());
855 while (it
+ 1 != end()) {
862 template <class T
, size_t N
, class AP
>
864 JS_ALWAYS_INLINE
bool
865 Vector
<T
,N
,AP
>::append(const U
*insBegin
, const U
*insEnd
)
867 REENTRANCY_GUARD_ET_AL
;
868 size_t needed
= mozilla::PointerRangeSize(insBegin
, insEnd
);
869 if (mLength
+ needed
> mCapacity
&& !growStorageBy(needed
))
873 if (mLength
+ needed
> mReserved
)
874 mReserved
= mLength
+ needed
;
876 internalAppend(insBegin
, needed
);
880 template <class T
, size_t N
, class AP
>
882 JS_ALWAYS_INLINE
void
883 Vector
<T
,N
,AP
>::internalAppend(const U
*insBegin
, size_t length
)
885 JS_ASSERT(mLength
+ length
<= mReserved
);
886 JS_ASSERT(mReserved
<= mCapacity
);
887 Impl::copyConstruct(endNoCheck(), insBegin
, insBegin
+ length
);
891 template <class T
, size_t N
, class AP
>
892 template <class U
, size_t O
, class BP
>
894 Vector
<T
,N
,AP
>::append(const Vector
<U
,O
,BP
> &other
)
896 return append(other
.begin(), other
.end());
899 template <class T
, size_t N
, class AP
>
900 template <class U
, size_t O
, class BP
>
902 Vector
<T
,N
,AP
>::internalAppend(const Vector
<U
,O
,BP
> &other
)
904 internalAppend(other
.begin(), other
.length());
907 template <class T
, size_t N
, class AP
>
909 JS_ALWAYS_INLINE
bool
910 Vector
<T
,N
,AP
>::append(const U
*insBegin
, size_t length
)
912 return this->append(insBegin
, insBegin
+ length
);
915 template <class T
, size_t N
, class AP
>
916 JS_ALWAYS_INLINE
void
917 Vector
<T
,N
,AP
>::popBack()
919 REENTRANCY_GUARD_ET_AL
;
925 template <class T
, size_t N
, class AP
>
927 Vector
<T
,N
,AP
>::popCopy()
934 template <class T
, size_t N
, class AP
>
936 Vector
<T
,N
,AP
>::extractRawBuffer()
939 if (usingInlineStorage()) {
940 ret
= reinterpret_cast<T
*>(this->malloc_(mLength
* sizeof(T
)));
943 Impl::copyConstruct(ret
, beginNoCheck(), endNoCheck());
944 Impl::destroy(beginNoCheck(), endNoCheck());
945 /* mBegin, mCapacity are unchanged. */
949 mBegin
= (T
*)storage
.addr();
951 mCapacity
= sInlineCapacity
;
959 template <class T
, size_t N
, class AP
>
961 Vector
<T
,N
,AP
>::replaceRawBuffer(T
*p
, size_t length
)
963 REENTRANCY_GUARD_ET_AL
;
965 /* Destroy what we have. */
966 Impl::destroy(beginNoCheck(), endNoCheck());
967 if (!usingInlineStorage())
968 this->free_(beginNoCheck());
970 /* Take in the new buffer. */
971 if (length
<= sInlineCapacity
) {
973 * We convert to inline storage if possible, even though p might
974 * otherwise be acceptable. Maybe this behaviour should be
975 * specifiable with an argument to this function.
977 mBegin
= (T
*)storage
.addr();
979 mCapacity
= sInlineCapacity
;
980 Impl::moveConstruct(mBegin
, p
, p
+ length
);
981 Impl::destroy(p
, p
+ length
);
993 template <class T
, size_t N
, class AP
>
995 Vector
<T
,N
,AP
>::sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf
) const
997 return usingInlineStorage() ? 0 : mallocSizeOf(beginNoCheck());
1000 template <class T
, size_t N
, class AP
>
1002 Vector
<T
,N
,AP
>::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf
) const
1004 return mallocSizeOf(this) + sizeOfExcludingThis(mallocSizeOf
);
1007 template <class T
, size_t N
, class AP
>
1009 Vector
<T
,N
,AP
>::swap(Vector
&other
)
1011 // TODO Implement N != 0
1012 JS_STATIC_ASSERT(N
== 0);
1014 // This only works when inline storage is always empty.
1015 if (!usingInlineStorage() && other
.usingInlineStorage()) {
1016 other
.mBegin
= mBegin
;
1017 mBegin
= inlineStorage();
1018 } else if (usingInlineStorage() && !other
.usingInlineStorage()) {
1019 mBegin
= other
.mBegin
;
1020 other
.mBegin
= other
.inlineStorage();
1021 } else if (!usingInlineStorage() && !other
.usingInlineStorage()) {
1022 Swap(mBegin
, other
.mBegin
);
1024 // This case is a no-op, since we'd set both to use their inline storage.
1027 Swap(mLength
, other
.mLength
);
1028 Swap(mCapacity
, other
.mCapacity
);
1030 Swap(mReserved
, other
.mReserved
);
1034 } /* namespace js */
1037 #pragma warning(pop)
1040 #endif /* jsvector_h_ */