Add support for HHBC ops with 5 immediates
[hiphop-php.git] / hphp / runtime / base / packed-array.cpp
blobac87b0a5b3a0519f5f89d3d3535b881f2832827b
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/base/packed-array.h"
18 #include <algorithm>
19 #include <cstdlib>
20 #include <cstring>
22 #include <folly/Likely.h>
24 #include "hphp/runtime/base/apc-array.h"
25 #include "hphp/runtime/base/apc-stats.h"
26 #include "hphp/runtime/base/array-init.h"
27 #include "hphp/runtime/base/array-helpers.h"
28 #include "hphp/runtime/base/tv-val.h"
29 #include "hphp/runtime/base/mixed-array.h"
30 #include "hphp/runtime/base/runtime-error.h"
31 #include "hphp/runtime/base/thread-info.h"
32 #include "hphp/runtime/base/tv-comparisons.h"
33 #include "hphp/runtime/base/tv-mutate.h"
34 #include "hphp/runtime/base/tv-refcount.h"
35 #include "hphp/runtime/base/tv-type.h"
36 #include "hphp/runtime/base/tv-variant.h"
38 #include "hphp/runtime/base/mixed-array-defs.h"
39 #include "hphp/runtime/base/array-iterator-defs.h"
40 #include "hphp/runtime/base/packed-array-defs.h"
42 namespace HPHP {
44 //////////////////////////////////////////////////////////////////////
46 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyVecArray;
47 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyVArray;
49 struct PackedArray::VecInitializer {
50 VecInitializer() {
51 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyVecArray);
52 ad->m_sizeAndPos = 0;
53 ad->initHeader_16(
54 HeaderKind::VecArray,
55 StaticValue,
56 packSizeIndexAndDV(0, ArrayData::kNotDVArray)
58 assertx(checkInvariants(ad));
61 PackedArray::VecInitializer PackedArray::s_vec_initializer;
63 struct PackedArray::VArrayInitializer {
64 VArrayInitializer() {
65 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyVArray);
66 ad->m_sizeAndPos = 0;
67 ad->initHeader_16(
68 HeaderKind::Packed,
69 StaticValue,
70 packSizeIndexAndDV(0, ArrayData::kVArray)
72 assertx(RuntimeOption::EvalHackArrDVArrs || checkInvariants(ad));
75 PackedArray::VArrayInitializer PackedArray::s_varr_initializer;
77 //////////////////////////////////////////////////////////////////////
79 namespace {
81 inline ArrayData* alloc_packed_static(size_t cap) {
82 auto size = sizeof(ArrayData) + cap * sizeof(TypedValue);
83 auto ret = RuntimeOption::EvalLowStaticArrays ? low_malloc(size)
84 : malloc(size);
85 return static_cast<ArrayData*>(ret);
90 bool PackedArray::checkInvariants(const ArrayData* arr) {
91 assertx(arr->hasPackedLayout());
92 assertx(arr->checkCount());
93 assertx(arr->m_size <= MixedArray::MaxSize);
94 assertx(arr->m_size <= capacity(arr));
95 assertx(arr->m_pos >= 0 && arr->m_pos <= arr->m_size);
96 assertx(!arr->isPacked() || !arr->isDArray());
97 assertx(!arr->isVecArray() || arr->isNotDVArray());
98 assertx(!RuntimeOption::EvalHackArrDVArrs || arr->isNotDVArray());
99 static_assert(ArrayData::kPackedKind == 0, "");
100 // Note that m_pos < m_size is not an invariant, because an array
101 // that grows will only adjust m_size to zero on the old array.
103 // This loop is too slow for normal use, but can be enabled to debug
104 // packed arrays.
105 if (false) {
106 auto ptr = packedData(arr);
107 auto const stop = ptr + arr->m_size;
108 for (; ptr != stop; ptr++) {
109 assertx(ptr->m_type != KindOfUninit);
110 assertx(tvIsPlausible(*ptr));
111 assertx(!arr->isVecArray() || ptr->m_type != KindOfRef);
114 return true;
117 //////////////////////////////////////////////////////////////////////
119 ALWAYS_INLINE
120 MixedArray* PackedArray::ToMixedHeader(const ArrayData* old,
121 size_t neededSize) {
122 assertx(checkInvariants(old));
124 if (UNLIKELY(RuntimeOption::EvalHackArrCompatPromoteNotices) &&
125 old->isVArray()) {
126 raise_hackarr_compat_notice("varray promoting to darray");
129 auto const oldSize = old->m_size;
130 auto const scale = MixedArray::computeScaleFromSize(neededSize);
131 auto const ad = MixedArray::reqAlloc(scale);
132 ad->m_sizeAndPos = oldSize | int64_t{old->m_pos} << 32;
133 ad->initHeader_16(
134 HeaderKind::Mixed,
135 OneReference,
136 old->isVArray() ? ArrayData::kDArray : ArrayData::kNotDVArray
138 ad->m_scale_used = scale | uint64_t{oldSize} << 32; // used=oldSize
139 ad->m_nextKI = oldSize;
141 assertx(ad->m_size == oldSize);
142 assertx(ad->m_pos == old->m_pos);
143 assertx(ad->kind() == ArrayData::kMixedKind);
144 assertx(ad->isDArray() == old->isVArray());
145 assertx(ad->hasExactlyOneRef());
146 assertx(ad->m_used == oldSize);
147 assertx(ad->m_scale == scale);
148 assertx(ad->m_nextKI == oldSize);
149 // Can't checkInvariants yet, since we haven't populated the payload.
150 return ad;
154 * Converts a packed array to mixed, leaving the packed array in an
155 * empty state. You need ToMixedCopy in cases where the old array
156 * needs to remain un-modified (usually if `copy' is true).
158 * The returned array is mixed, and is guaranteed not to be isFull().
159 * (Note: only unset can call ToMixed when we aren't about to insert.)
161 MixedArray* PackedArray::ToMixed(ArrayData* old) {
162 auto const oldSize = old->m_size;
163 auto const ad = ToMixedHeader(old, oldSize + 1);
164 auto const mask = ad->mask();
165 auto dstData = ad->data();
166 auto const srcData = packedData(old);
168 auto const dstHash = ad->initHash(ad->scale());
169 for (uint32_t i = 0; i < oldSize; ++i) {
170 auto h = hash_int64(i);
171 *ad->findForNewInsert(dstHash, mask, h) = i;
172 dstData->setIntKey(i, h);
173 tvCopy(srcData[i], dstData->data);
174 ++dstData;
176 old->m_sizeAndPos = 0;
178 // PHP does not have the concept of packed VS mixed, so packed to mixed
179 // promotion needs to be invisible to strong iteration in order to match
180 // PHP behavior; intentionally not doing the same in ToMixedCopy{,Reserve}
181 // because copies _are_ supposed to be visible to strong iteration
182 if (UNLIKELY(strong_iterators_exist())) move_strong_iterators(ad, old);
184 assertx(ad->checkInvariants());
185 assertx(!ad->isFull());
186 assertx(ad->hasExactlyOneRef());
187 return ad;
191 * Convert a packed array to mixed, without moving the elements out of
192 * the old packed array. This effectively performs a Copy at the same
193 * time as converting to mixed. The returned mixed array is
194 * guaranteed not to be full.
196 MixedArray* PackedArray::ToMixedCopy(const ArrayData* old) {
197 assertx(checkInvariants(old));
199 auto const oldSize = old->m_size;
200 auto const ad = ToMixedHeader(old, oldSize + 1);
201 auto const mask = ad->mask();
202 auto dstData = ad->data();
203 auto const srcData = packedData(old);
205 auto const dstHash = ad->initHash(ad->scale());
206 for (uint32_t i = 0; i < oldSize; ++i) {
207 auto h = hash_int64(i);
208 *ad->findForNewInsert(dstHash, mask, h) = i;
209 dstData->setIntKey(i, h);
210 tvDupWithRef(srcData[i], dstData->data, old);
211 ++dstData;
214 assertx(ad->checkInvariants());
215 assertx(!ad->isFull());
216 assertx(ad->hasExactlyOneRef());
217 return ad;
221 * Convert to mixed, reserving space for at least `neededSize' elems.
222 * The `neededSize' should include old->size(), but may be equal to
223 * it.
225 MixedArray* PackedArray::ToMixedCopyReserve(const ArrayData* old,
226 size_t neededSize) {
227 assertx(neededSize >= old->m_size);
228 auto const ad = ToMixedHeader(old, neededSize);
229 auto const oldSize = old->m_size;
230 auto const mask = ad->mask();
231 auto dstData = ad->data();
232 auto const srcData = packedData(old);
234 auto const dstHash = ad->initHash(ad->scale());
235 for (uint32_t i = 0; i < oldSize; ++i) {
236 auto h = hash_int64(i);
237 *ad->findForNewInsert(dstHash, mask, h) = i;
238 dstData->setIntKey(i, h);
239 tvDupWithRef(srcData[i], dstData->data, old);
240 ++dstData;
243 assertx(ad->checkInvariants());
244 assertx(ad->hasExactlyOneRef());
245 return ad;
248 NEVER_INLINE
249 ArrayData* PackedArray::Grow(ArrayData* adIn, bool copy) {
250 assertx(checkInvariants(adIn));
251 assertx(adIn->m_size == capacity(adIn));
253 auto const sizeIndex = sizeClass(adIn) + kSizeClassesPerDoubling;
254 if (UNLIKELY(sizeIndex > MaxSizeIndex)) return nullptr;
255 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeIndex));
257 if (copy) {
258 // CopyPackedHelper will copy the header and m_sizeAndPos; since we pass
259 // convertingPackedToVec = false, it can't fail. All we have to do
260 // afterwards is fix the capacity and refcount on the copy; it's easiest
261 // to do that by reinitializing the whole header.
262 auto const DEBUG_ONLY ok = CopyPackedHelper<false>(adIn, ad);
263 assertx(ok);
264 ad->initHeader_16(
265 adIn->m_kind,
266 OneReference,
267 packSizeIndexAndDV(sizeIndex, adIn->dvArray())
270 assertx(ad->m_size == adIn->m_size);
271 assertx(ad->m_pos == adIn->m_pos);
272 } else {
273 // Copy everything from `adIn' to `ad', including header and m_sizeAndPos
274 static_assert(sizeof(ArrayData) == 16 && sizeof(TypedValue) == 16, "");
275 memcpy16_inline(ad, adIn, (adIn->m_size + 1) * sizeof(TypedValue));
276 ad->initHeader_16(
277 adIn->m_kind,
278 OneReference,
279 packSizeIndexAndDV(sizeIndex, adIn->dvArray())
282 assertx(ad->m_size == adIn->m_size);
283 assertx(ad->m_pos == adIn->m_pos);
284 adIn->m_sizeAndPos = 0; // old is a zombie now
286 if (UNLIKELY(strong_iterators_exist())) move_strong_iterators(ad, adIn);
289 assertx(ad->kind() == adIn->kind());
290 assertx(ad->dvArray() == adIn->dvArray());
291 assertx(capacity(ad) > capacity(adIn));
292 assertx(ad->hasExactlyOneRef());
293 assertx(checkInvariants(ad));
294 return ad;
297 ALWAYS_INLINE
298 ArrayData* PackedArray::PrepareForInsert(ArrayData* adIn, bool copy) {
299 assertx(checkInvariants(adIn));
300 if (adIn->m_size == capacity(adIn)) return Grow(adIn, copy);
301 if (copy) return Copy(adIn);
302 return adIn;
305 //////////////////////////////////////////////////////////////////////
307 /* This helper copies everything from adIn to ad, including the header
308 * (capacity, kind, and refcount) and m_sizeAndPos. It then increfs the
309 * contents, if needed.
311 * If convertingPackedToVec is false, it will always succeed (return true).
313 * If convertingPackedToVec is true and adIn contains a Ref, then it will
314 * return false. Refcounts of the contents will be left in a consistent state.
315 * It is the callers responsibility to free ad and throw an appropriate
316 * exception in this case.
318 template<bool convertingPackedToVec>
319 ALWAYS_INLINE
320 bool PackedArray::CopyPackedHelper(const ArrayData* adIn, ArrayData* ad) {
321 // Copy everything from `adIn' to `ad', including refcount, kind and cap
322 auto const size = adIn->m_size;
323 static_assert(sizeof(ArrayData) == 16 && sizeof(TypedValue) == 16, "");
324 memcpy16_inline(ad, adIn, (size + 1) * 16);
326 // Copy counted types correctly, especially RefData.
327 for (auto elm = packedData(ad), end = elm + size; elm < end; ++elm) {
328 if (UNLIKELY(elm->m_type == KindOfRef)) {
329 assertx(!adIn->isVecArray());
330 auto ref = elm->m_data.pref;
331 // See also tvDupWithRef()
332 if (!ref->isReferenced() && ref->tv()->m_data.parr != adIn) {
333 cellDup(*ref->tv(), *elm);
334 continue;
335 } else if (convertingPackedToVec) {
336 for (--elm; elm >= packedData(ad); --elm) {
337 tvDecRefGen(elm);
339 return false;
342 tvIncRefGen(*elm);
344 return true;
347 NEVER_INLINE
348 ArrayData* PackedArray::Copy(const ArrayData* adIn) {
349 assertx(checkInvariants(adIn));
351 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeClass(adIn)));
353 // CopyPackedHelper will copy the header (including capacity and kind), and
354 // m_sizeAndPos; since we pass convertingPackedToVec = false, it can't fail.
355 // All we have to do afterwards is fix the refcount on the copy.
356 auto const DEBUG_ONLY ok = CopyPackedHelper<false>(adIn, ad);
357 assertx(ok);
358 ad->m_count = OneReference;
360 assertx(ad->kind() == adIn->kind());
361 assertx(capacity(ad) == capacity(adIn));
362 assertx(ad->m_size == adIn->m_size);
363 assertx(ad->m_pos == adIn->m_pos);
364 assertx(ad->hasExactlyOneRef());
365 assertx(checkInvariants(ad));
366 return ad;
369 ArrayData* PackedArray::CopyStatic(const ArrayData* adIn) {
370 assertx(checkInvariants(adIn));
372 auto const sizeIndex = capacityToSizeIndex(adIn->m_size);
373 auto ad = alloc_packed_static(adIn->m_size);
374 // CopyPackedHelper will copy the header and m_sizeAndPos; since we pass
375 // convertingPackedToVec = false, it can't fail. All we have to do afterwards
376 // is fix the capacity and refcount on the copy; it's easiest to do that by
377 // reinitializing the whole header.
378 auto const DEBUG_ONLY ok = CopyPackedHelper<false>(adIn, ad);
379 assertx(ok);
380 ad->initHeader_16(
381 adIn->m_kind,
382 StaticValue,
383 packSizeIndexAndDV(sizeIndex, adIn->dvArray())
386 assertx(ad->kind() == adIn->kind());
387 assertx(ad->dvArray() == adIn->dvArray());
388 assertx(capacity(ad) >= adIn->m_size);
389 assertx(ad->m_size == adIn->m_size);
390 assertx(ad->m_pos == adIn->m_pos);
391 assertx(ad->isStatic());
392 assertx(checkInvariants(ad));
393 return ad;
396 ArrayData* PackedArray::ConvertStatic(const ArrayData* arr) {
397 assertx(arr->isVectorData());
398 assertx(!RuntimeOption::EvalHackArrDVArrs || arr->isNotDVArray());
399 assertx(!arr->isDArray());
401 auto const sizeIndex = capacityToSizeIndex(arr->m_size);
402 auto ad = alloc_packed_static(arr->m_size);
403 ad->initHeader_16(
404 HeaderKind::Packed,
405 StaticValue,
406 packSizeIndexAndDV(sizeIndex, arr->dvArray())
408 ad->m_sizeAndPos = arr->m_sizeAndPos;
410 auto data = packedData(ad);
411 auto pos_limit = arr->iter_end();
412 for (auto pos = arr->iter_begin(); pos != pos_limit;
413 pos = arr->iter_advance(pos), ++data) {
414 tvDupWithRef(arr->atPos(pos), *data, arr);
417 assertx(ad->isPacked());
418 assertx(capacity(ad) >= arr->m_size);
419 assertx(ad->dvArray() == arr->dvArray());
420 assertx(ad->m_size == arr->m_size);
421 assertx(ad->m_pos == arr->m_pos);
422 assertx(ad->isStatic());
423 assertx(checkInvariants(ad));
424 return ad;
427 /* This helper allocates an ArrayData and initializes the header (including
428 * capacity, kind, and refcount). The caller is responsible for initializing
429 * m_sizeAndPos, and initializing array entries (if any).
431 ALWAYS_INLINE
432 ArrayData* PackedArray::MakeReserveImpl(uint32_t cap,
433 HeaderKind hk,
434 ArrayData::DVArray dvarray) {
435 assertx(!RuntimeOption::EvalHackArrDVArrs ||
436 dvarray == ArrayData::kNotDVArray);
437 auto const sizeIndex = capacityToSizeIndex(cap);
438 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeIndex));
439 ad->initHeader_16(
441 OneReference,
442 packSizeIndexAndDV(sizeIndex, dvarray)
444 assertx(ad->m_kind == hk);
445 assertx(ad->dvArray() == dvarray);
446 assertx(capacity(ad) >= cap);
447 assertx(ad->hasExactlyOneRef());
448 return ad;
451 ArrayData* PackedArray::MakeReserve(uint32_t capacity) {
452 auto ad =
453 MakeReserveImpl(capacity, HeaderKind::Packed, ArrayData::kNotDVArray);
454 ad->m_sizeAndPos = 0;
455 assertx(ad->isPacked());
456 assertx(ad->isNotDVArray());
457 assertx(ad->m_size == 0);
458 assertx(ad->m_pos == 0);
459 assertx(checkInvariants(ad));
460 return ad;
463 ArrayData* PackedArray::MakeReserveVArray(uint32_t capacity) {
464 auto ad = MakeReserveImpl(capacity, HeaderKind::Packed, ArrayData::kVArray);
465 ad->m_sizeAndPos = 0;
466 assertx(ad->isPacked());
467 assertx(ad->isVArray());
468 assertx(ad->m_size == 0);
469 assertx(ad->m_pos == 0);
470 assertx(checkInvariants(ad));
471 return ad;
474 ArrayData* PackedArray::MakeReserveVec(uint32_t capacity) {
475 auto ad =
476 MakeReserveImpl(capacity, HeaderKind::VecArray, ArrayData::kNotDVArray);
477 ad->m_sizeAndPos = 0;
478 assertx(ad->isVecArray());
479 assertx(ad->m_size == 0);
480 assertx(ad->m_pos == 0);
481 assertx(checkInvariants(ad));
482 return ad;
485 template<bool reverse>
486 ALWAYS_INLINE
487 ArrayData* PackedArray::MakePackedImpl(uint32_t size,
488 const Cell* values,
489 HeaderKind hk,
490 ArrayData::DVArray dv) {
491 assertx(size > 0);
492 auto ad = MakeReserveImpl(size, hk, dv);
493 ad->m_sizeAndPos = size; // pos = 0
495 // Append values by moving; this function takes ownership of them.
496 if (reverse) {
497 auto elm = packedData(ad) + size - 1;
498 for (auto end = values + size; values < end; ++values, --elm) {
499 cellCopy(*values, *elm);
501 } else {
502 if (debug) {
503 for (uint32_t i = 0; i < size; ++i) {
504 assertx(cellIsPlausible(*(values + i)));
507 memcpy16_inline(packedData(ad), values, sizeof(Cell) * size);
510 assertx(ad->m_size == size);
511 assertx(ad->m_pos == 0);
512 assertx(checkInvariants(ad));
513 return ad;
516 ArrayData* PackedArray::MakePacked(uint32_t size, const Cell* values) {
517 // Values are in reverse order since they come from the stack, which
518 // grows down.
519 auto ad = MakePackedImpl<true>(size, values, HeaderKind::Packed,
520 ArrayData::kNotDVArray);
521 assertx(ad->isPacked());
522 assertx(ad->isNotDVArray());
523 return ad;
526 ArrayData* PackedArray::MakeVArray(uint32_t size, const Cell* values) {
527 // Values are in reverse order since they come from the stack, which
528 // grows down.
529 assertx(!RuntimeOption::EvalHackArrDVArrs);
530 auto ad = MakePackedImpl<true>(size, values, HeaderKind::Packed,
531 ArrayData::kVArray);
532 assertx(ad->isPacked());
533 assertx(ad->isVArray());
534 return ad;
537 ArrayData* PackedArray::MakeVec(uint32_t size, const Cell* values) {
538 // Values are in reverse order since they come from the stack, which
539 // grows down.
540 auto ad = MakePackedImpl<true>(size, values, HeaderKind::VecArray,
541 ArrayData::kNotDVArray);
542 assertx(ad->isVecArray());
543 return ad;
546 ArrayData* PackedArray::MakePackedNatural(uint32_t size, const Cell* values) {
547 auto ad = MakePackedImpl<false>(size, values, HeaderKind::Packed,
548 ArrayData::kNotDVArray);
549 assertx(ad->isPacked());
550 assertx(ad->isNotDVArray());
551 return ad;
554 ArrayData* PackedArray::MakeUninitialized(uint32_t size) {
555 auto ad = MakeReserveImpl(size, HeaderKind::Packed, ArrayData::kNotDVArray);
556 ad->m_sizeAndPos = size; // pos = 0
557 assertx(ad->isPacked());
558 assertx(ad->isNotDVArray());
559 assertx(ad->m_size == size);
560 assertx(ad->m_pos == 0);
561 assertx(checkInvariants(ad));
562 return ad;
565 ArrayData* PackedArray::MakeUninitializedVArray(uint32_t size) {
566 assertx(!RuntimeOption::EvalHackArrDVArrs);
567 auto ad = MakeReserveImpl(size, HeaderKind::Packed, ArrayData::kVArray);
568 ad->m_sizeAndPos = size; // pos = 0
569 assertx(ad->isPacked());
570 assertx(ad->isVArray());
571 assertx(ad->m_size == size);
572 assertx(ad->m_pos == 0);
573 assertx(checkInvariants(ad));
574 return ad;
577 ArrayData* PackedArray::MakeUninitializedVec(uint32_t size) {
578 auto ad = MakeReserveImpl(size, HeaderKind::VecArray, ArrayData::kNotDVArray);
579 ad->m_sizeAndPos = size; // pos = 0
580 assertx(ad->isVecArray());
581 assertx(ad->m_size == size);
582 assertx(ad->m_pos == 0);
583 assertx(checkInvariants(ad));
584 return ad;
587 ArrayData* PackedArray::MakeVecFromAPC(const APCArray* apc) {
588 assertx(apc->isVec());
589 auto const apcSize = apc->size();
590 VecArrayInit init{apcSize};
591 for (uint32_t i = 0; i < apcSize; ++i) {
592 init.append(apc->getValue(i)->toLocal());
594 return init.create();
597 ArrayData* PackedArray::MakeVArrayFromAPC(const APCArray* apc) {
598 assertx(!RuntimeOption::EvalHackArrDVArrs);
599 assertx(apc->isVArray());
600 auto const apcSize = apc->size();
601 VArrayInit init{apcSize};
602 for (uint32_t i = 0; i < apcSize; ++i) {
603 init.append(apc->getValue(i)->toLocal());
605 return init.create();
608 void PackedArray::Release(ArrayData* ad) {
609 assertx(checkInvariants(ad));
610 assertx(ad->isRefCounted());
611 assertx(ad->hasExactlyOneRef());
613 for (auto elm = packedData(ad), end = elm + ad->m_size; elm < end; ++elm) {
614 tvDecRefGen(elm);
616 if (UNLIKELY(strong_iterators_exist())) {
617 free_strong_iterators(ad);
619 tl_heap->objFreeIndex(ad, sizeClass(ad));
620 AARCH64_WALKABLE_FRAME();
623 NEVER_INLINE
624 void PackedArray::ReleaseUncounted(ArrayData* ad) {
625 assertx(checkInvariants(ad));
626 if (!ad->uncountedDecRef()) return;
628 auto const data = packedData(ad);
629 auto const stop = data + ad->m_size;
630 for (auto ptr = data; ptr != stop; ++ptr) {
631 ReleaseUncountedTv(*ptr);
634 // We better not have strong iterators associated with uncounted arrays.
635 assertx(!has_strong_iterator(ad));
636 if (APCStats::IsCreated()) {
637 APCStats::getAPCStats().removeAPCUncountedBlock();
640 free_huge(reinterpret_cast<char*>(ad) -
641 (ad->hasApcTv() ? sizeof(APCTypedValue) : 0));
644 ////////////////////////////////////////////////////////////////////////////////
646 tv_rval PackedArray::NvGetInt(const ArrayData* ad, int64_t ki) {
647 assertx(checkInvariants(ad));
648 auto const data = packedData(ad);
649 return LIKELY(size_t(ki) < ad->m_size) ? &data[ki] : nullptr;
652 tv_rval
653 PackedArray::NvGetStr(const ArrayData* ad, const StringData* /*s*/) {
654 assertx(checkInvariants(ad));
655 return nullptr;
658 tv_rval PackedArray::NvTryGetIntVec(const ArrayData* ad, int64_t k) {
659 assertx(checkInvariants(ad));
660 assertx(ad->isVecArray());
661 auto const data = packedData(ad);
662 if (LIKELY(size_t(k) < ad->m_size)) return &data[k];
663 throwOOBArrayKeyException(k, ad);
666 tv_rval PackedArray::NvTryGetStrVec(const ArrayData* ad,
667 const StringData* s) {
668 assertx(checkInvariants(ad));
669 assertx(ad->isVecArray());
670 throwInvalidArrayKeyException(s, ad);
673 Cell PackedArray::NvGetKey(const ArrayData* ad, ssize_t pos) {
674 assertx(checkInvariants(ad));
675 assertx(pos != ad->m_size);
676 return make_tv<KindOfInt64>(pos);
679 size_t PackedArray::Vsize(const ArrayData*) {
680 // PackedArray always has a valid m_size so it's an error to get here.
681 always_assert(false);
684 tv_rval PackedArray::GetValueRef(const ArrayData* ad, ssize_t pos) {
685 assertx(checkInvariants(ad));
686 assertx(pos != ad->m_size);
687 return &packedData(ad)[pos];
690 bool PackedArray::ExistsInt(const ArrayData* ad, int64_t k) {
691 assertx(checkInvariants(ad));
692 return size_t(k) < ad->m_size;
695 bool PackedArray::ExistsStr(const ArrayData* ad, const StringData* /*s*/) {
696 assertx(checkInvariants(ad));
697 return false;
700 ///////////////////////////////////////////////////////////////////////////////
702 namespace {
704 template<typename FoundFn, typename AppendFn, typename PromotedFn>
705 auto MutableOpInt(ArrayData* adIn, int64_t k, bool copy,
706 FoundFn found, AppendFn append, PromotedFn promoted) {
707 assertx(PackedArray::checkInvariants(adIn));
708 assertx(adIn->isPacked());
710 if (LIKELY(size_t(k) < adIn->getSize())) {
711 auto const ad = copy ? PackedArray::Copy(adIn) : adIn;
712 return found(ad);
715 if (size_t(k) == adIn->getSize()) {
716 if (UNLIKELY(RuntimeOption::EvalHackArrCompatPromoteNotices) &&
717 adIn->isVArray()) {
718 raise_hackarr_compat_notice("Implicit append to varray");
720 return append();
723 auto const mixed = copy ? PackedArray::ToMixedCopy(adIn)
724 : PackedArray::ToMixed(adIn);
725 return promoted(mixed);
728 template <typename PromotedFn>
729 auto MutableOpStr(ArrayData* adIn, StringData* /*k*/, bool copy,
730 PromotedFn promoted) {
731 assertx(PackedArray::checkInvariants(adIn));
732 assertx(adIn->isPacked());
734 auto const mixed = copy ? PackedArray::ToMixedCopy(adIn)
735 : PackedArray::ToMixed(adIn);
736 return promoted(mixed);
739 template<typename FoundFn>
740 auto MutableOpIntVec(ArrayData* adIn, int64_t k, bool copy, FoundFn found) {
741 assertx(PackedArray::checkInvariants(adIn));
742 assertx(adIn->isVecArray());
744 if (UNLIKELY(size_t(k) >= adIn->getSize())) {
745 throwOOBArrayKeyException(k, adIn);
747 auto const ad = copy ? PackedArray::Copy(adIn) : adIn;
748 return found(ad);
753 arr_lval PackedArray::LvalInt(ArrayData* adIn, int64_t k, bool copy) {
754 return MutableOpInt(adIn, k, copy,
755 [&] (ArrayData* ad) { return arr_lval { ad, &packedData(ad)[k] }; },
756 [&] { return LvalNew(adIn, copy); },
757 // TODO(#2606310): Make use of our knowledge that the key is missing.
758 [&] (MixedArray* mixed) { return mixed->addLvalImpl<true>(k); }
762 arr_lval PackedArray::LvalIntRef(ArrayData* adIn, int64_t k, bool copy) {
763 if (checkHACRefBind()) raiseHackArrCompatRefBind(k);
764 return LvalInt(adIn, k, copy);
767 arr_lval PackedArray::LvalIntVec(ArrayData* adIn, int64_t k, bool copy) {
768 return MutableOpIntVec(adIn, k, copy,
769 [&] (ArrayData* ad) { return arr_lval { ad, &packedData(ad)[k] }; }
773 arr_lval PackedArray::LvalSilentInt(ArrayData* adIn, int64_t k, bool copy) {
774 assertx(checkInvariants(adIn));
775 if (UNLIKELY(size_t(k) >= adIn->m_size)) return {adIn, nullptr};
776 auto const ad = copy ? Copy(adIn) : adIn;
777 return arr_lval { ad, &packedData(ad)[k] };
780 arr_lval PackedArray::LvalStr(ArrayData* adIn, StringData* k, bool copy) {
781 return MutableOpStr(adIn, k, copy,
782 // TODO(#2606310): Make use of our knowledge that the key is missing.
783 [&] (MixedArray* mixed) { return mixed->addLvalImpl<true>(k); }
787 arr_lval
788 PackedArray::LvalStrRef(ArrayData* adIn, StringData* key, bool copy) {
789 if (checkHACRefBind()) raiseHackArrCompatRefBind(key);
790 return LvalStr(adIn, key, copy);
793 arr_lval
794 PackedArray::LvalStrVec(ArrayData* adIn, StringData* key, bool) {
795 assertx(checkInvariants(adIn));
796 assertx(adIn->isVecArray());
797 throwInvalidArrayKeyException(key, adIn);
800 arr_lval PackedArray::LvalIntRefVec(ArrayData* adIn, int64_t /*k*/, bool) {
801 assertx(checkInvariants(adIn));
802 assertx(adIn->isVecArray());
803 throwRefInvalidArrayValueException(adIn);
806 arr_lval
807 PackedArray::LvalStrRefVec(ArrayData* adIn, StringData* key, bool) {
808 assertx(checkInvariants(adIn));
809 assertx(adIn->isVecArray());
810 throwInvalidArrayKeyException(key, adIn);
813 arr_lval PackedArray::LvalNew(ArrayData* adIn, bool copy) {
814 assertx(checkInvariants(adIn));
815 auto const ad = PrepareForInsert(adIn, copy);
816 auto& tv = packedData(ad)[ad->m_size++];
817 tv.m_type = KindOfNull;
818 return arr_lval { ad, &tv };
821 arr_lval PackedArray::LvalNewRef(ArrayData* adIn, bool copy) {
822 if (checkHACRefBind()) raiseHackArrCompatRefNew();
823 return LvalNew(adIn, copy);
826 arr_lval PackedArray::LvalNewRefVec(ArrayData* adIn, bool) {
827 assertx(checkInvariants(adIn));
828 assertx(adIn->isVecArray());
829 throwRefInvalidArrayValueException(adIn);
832 ArrayData* PackedArray::SetInt(ArrayData* adIn, int64_t k, Cell v, bool copy) {
833 return MutableOpInt(adIn, k, copy,
834 [&] (ArrayData* ad) { setElem(packedData(ad)[k], v); return ad; },
835 [&] { return Append(adIn, v, copy); },
836 [&] (MixedArray* mixed) { return mixed->addVal(k, v); }
840 ArrayData*
841 PackedArray::SetIntVec(ArrayData* adIn, int64_t k, Cell v, bool copy) {
842 return MutableOpIntVec(adIn, k, copy,
843 [&] (ArrayData* ad) { setElemNoRef(packedData(ad)[k], v); return ad; }
847 ArrayData* PackedArray::SetStr(ArrayData* adIn, StringData* k, Cell v,
848 bool copy) {
849 return MutableOpStr(adIn, k, copy,
850 [&] (MixedArray* mixed) { return mixed->addVal(k, v); }
854 ArrayData* PackedArray::SetStrVec(ArrayData* adIn, StringData* k, Cell, bool) {
855 assertx(checkInvariants(adIn));
856 assertx(adIn->isVecArray());
857 throwInvalidArrayKeyException(k, adIn);
860 ArrayData* PackedArray::SetWithRefInt(ArrayData* adIn, int64_t k,
861 TypedValue v, bool copy) {
862 auto const checkHackArrRef = [&] {
863 if (checkHACRefBind() && tvIsReferenced(v)) {
864 raiseHackArrCompatRefBind(k);
868 return MutableOpInt(adIn, k, copy,
869 [&] (ArrayData* ad) {
870 checkHackArrRef();
871 setElemWithRef(packedData(ad)[k], v);
872 return ad;
874 [&] { return AppendWithRef(adIn, v, copy); },
875 [&] (MixedArray* mixed) {
876 checkHackArrRef();
877 auto const lval = mixed->addLvalImpl<false>(k);
878 tvSetWithRef(v, lval);
879 return lval.arr;
884 ArrayData* PackedArray::SetWithRefIntVec(ArrayData* adIn, int64_t k,
885 TypedValue v, bool copy) {
886 if (tvIsReferenced(v)) throwRefInvalidArrayValueException(adIn);
888 return MutableOpIntVec(adIn, k, copy,
889 [&] (ArrayData* ad) { setElemNoRef(packedData(ad)[k], v); return ad; }
893 ArrayData* PackedArray::SetWithRefStr(ArrayData* adIn, StringData* k,
894 TypedValue v, bool copy) {
895 return MutableOpStr(adIn, k, copy,
896 [&] (MixedArray* mixed) {
897 if (checkHACRefBind() && tvIsReferenced(v)) {
898 raiseHackArrCompatRefBind(k);
900 auto const lval = mixed->addLvalImpl<false>(k);
901 tvSetWithRef(v, lval);
902 return lval.arr;
907 ArrayData* PackedArray::SetWithRefStrVec(ArrayData* adIn, StringData* k,
908 TypedValue, bool) {
909 assertx(checkInvariants(adIn));
910 assertx(adIn->isVecArray());
911 throwInvalidArrayKeyException(k, adIn);
914 ArrayData* PackedArray::SetRefInt(ArrayData* adIn, int64_t k,
915 tv_lval v, bool copy) {
916 if (checkHACRefBind()) raiseHackArrCompatRefBind(k);
918 return MutableOpInt(adIn, k, copy,
919 [&] (ArrayData* ad) {
920 tvBoxIfNeeded(v);
921 tvBind(v.tv(), packedData(ad)[k]);
922 return ad;
924 [&] { return AppendRef(adIn, v, copy); },
925 // TODO(#2606310): Make use of our knowledge that the key is missing.
926 [&] (MixedArray* mixed) { return mixed->updateRef(k, v); }
930 ArrayData* PackedArray::SetRefIntVec(ArrayData* adIn, int64_t,
931 tv_lval, bool) {
932 assertx(checkInvariants(adIn));
933 assertx(adIn->isVecArray());
934 throwRefInvalidArrayValueException(adIn);
937 ArrayData* PackedArray::SetRefStr(ArrayData* adIn, StringData* k,
938 tv_lval v, bool copy) {
939 if (checkHACRefBind()) raiseHackArrCompatRefBind(k);
941 return MutableOpStr(adIn, k, copy,
942 // TODO(#2606310): Make use of our knowledge that the key is missing.
943 [&] (MixedArray* mixed) { return mixed->updateRef(k, v); }
947 ArrayData* PackedArray::SetRefStrVec(ArrayData* adIn, StringData* k,
948 tv_lval, bool) {
949 assertx(checkInvariants(adIn));
950 assertx(adIn->isVecArray());
951 throwInvalidArrayKeyException(k, adIn);
954 ///////////////////////////////////////////////////////////////////////////////
956 namespace {
958 void adjustMArrayIterAfterPop(ArrayData* ad) {
959 assertx(ad->hasPackedLayout());
960 auto const size = ad->getSize();
961 if (size) {
962 for_each_strong_iterator([&] (MIterTable::Ent& miEnt) {
963 if (miEnt.array != ad) return;
964 auto const iter = miEnt.iter;
965 if (iter->getResetFlag()) return;
966 if (iter->m_pos >= size) iter->m_pos = size - 1;
968 } else {
969 reset_strong_iterators(ad);
975 ArrayData* PackedArray::RemoveInt(ArrayData* adIn, int64_t k, bool copy) {
976 assertx(checkInvariants(adIn));
977 assertx(adIn->isPacked());
978 if (size_t(k) < adIn->m_size) {
979 // Escalate to mixed for correctness; unset preserves m_nextKI.
981 // TODO(#2606310): if we're removing the /last/ element, we
982 // probably could stay packed, but this needs to be verified.
983 auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn);
984 auto pos = mixed->findForRemove(k, hash_int64(k), false);
985 if (validPos(pos)) mixed->erase(pos);
986 return mixed;
988 // Key doesn't exist---we're still packed.
989 return copy ? Copy(adIn) : adIn;
992 ArrayData*
993 PackedArray::RemoveIntVec(ArrayData* adIn, int64_t k, bool copy) {
994 assertx(checkInvariants(adIn));
995 assertx(adIn->isVecArray());
997 // You're only allowed to remove an element at the end of the vec (or beyond,
998 // which is a no-op).
999 if (UNLIKELY(size_t(k) >= adIn->m_size)) return adIn;
1000 if (LIKELY(size_t(k) + 1 == adIn->m_size)) {
1001 auto const ad = copy ? Copy(adIn) : adIn;
1002 auto const oldSize = ad->m_size;
1003 auto& tv = packedData(ad)[oldSize - 1];
1004 auto const oldTV = tv;
1005 ad->m_size = oldSize - 1;
1006 ad->m_pos = 0;
1007 tvDecRefGen(oldTV);
1008 return ad;
1010 throwVecUnsetException();
1013 ArrayData*
1014 PackedArray::RemoveStr(ArrayData* adIn, const StringData*, bool) {
1015 assertx(checkInvariants(adIn));
1016 return adIn;
1019 ssize_t PackedArray::IterBegin(const ArrayData* ad) {
1020 assertx(checkInvariants(ad));
1021 return 0;
1024 ssize_t PackedArray::IterLast(const ArrayData* ad) {
1025 assertx(checkInvariants(ad));
1026 return ad->m_size ? ad->m_size - 1 : 0;
1029 ssize_t PackedArray::IterEnd(const ArrayData* ad) {
1030 assertx(checkInvariants(ad));
1031 return ad->m_size;
1034 ssize_t PackedArray::IterAdvance(const ArrayData* ad, ssize_t pos) {
1035 assertx(checkInvariants(ad));
1036 if (pos < ad->m_size) {
1037 ++pos;
1039 return pos;
1042 ssize_t PackedArray::IterRewind(const ArrayData* ad, ssize_t pos) {
1043 assertx(checkInvariants(ad));
1044 if (pos > 0) {
1045 return pos - 1;
1047 return ad->m_size;
1050 bool PackedArray::AdvanceMArrayIter(ArrayData* ad, MArrayIter& fp) {
1051 assertx(checkInvariants(ad));
1052 if (fp.getResetFlag()) {
1053 fp.setResetFlag(false);
1054 fp.m_pos = 0;
1055 } else if (fp.m_pos == ad->m_size) {
1056 return false;
1057 } else {
1058 fp.m_pos = IterAdvance(ad, fp.m_pos);
1060 if (fp.m_pos == ad->m_size) {
1061 return false;
1063 // We set ad's internal cursor to point to the next element
1064 // to conform with PHP5 behavior
1065 ad->m_pos = IterAdvance(ad, fp.m_pos);
1066 return true;
1069 ArrayData* PackedArray::Append(ArrayData* adIn, Cell v, bool copy) {
1070 assertx(checkInvariants(adIn));
1071 assertx(v.m_type != KindOfUninit);
1072 auto const ad = PrepareForInsert(adIn, copy);
1073 cellDup(v, packedData(ad)[ad->m_size++]);
1074 return ad;
1077 ArrayData* PackedArray::AppendRef(ArrayData* adIn, tv_lval v, bool copy) {
1078 assertx(checkInvariants(adIn));
1079 assertx(adIn->isPacked());
1080 if (checkHACRefBind()) raiseHackArrCompatRefNew();
1081 auto const ad = PrepareForInsert(adIn, copy);
1082 auto& dst = packedData(ad)[ad->m_size++];
1083 tvBoxIfNeeded(v);
1084 dst.m_data.pref = v.val().pref;
1085 dst.m_type = KindOfRef;
1086 dst.m_data.pref->incRefCount();
1087 return ad;
1090 ArrayData* PackedArray::AppendRefVec(ArrayData* adIn, tv_lval, bool) {
1091 assertx(checkInvariants(adIn));
1092 assertx(adIn->isVecArray());
1093 throwRefInvalidArrayValueException(adIn);
1096 ArrayData*
1097 PackedArray::AppendWithRef(ArrayData* adIn, TypedValue v, bool copy) {
1098 assertx(checkInvariants(adIn));
1099 assertx(adIn->isPacked());
1101 if (checkHACRefBind() && tvIsReferenced(v)) {
1102 raiseHackArrCompatRefNew();
1105 auto const ad = PrepareForInsert(adIn, copy);
1106 auto& dst = packedData(ad)[ad->m_size++];
1107 dst.m_type = KindOfNull;
1108 tvAsVariant(&dst).setWithRef(v);
1109 return ad;
1112 ArrayData*
1113 PackedArray::AppendWithRefVec(ArrayData* adIn, TypedValue v, bool copy) {
1114 assertx(checkInvariants(adIn));
1115 assertx(adIn->isVecArray());
1116 if (tvIsReferenced(v)) throwRefInvalidArrayValueException(adIn);
1117 return Append(adIn, tvToInitCell(v), copy);
1120 ArrayData* PackedArray::PlusEq(ArrayData* adIn, const ArrayData* elems) {
1121 assertx(checkInvariants(adIn));
1122 assertx(adIn->isPacked());
1123 if (!elems->isPHPArray()) throwInvalidAdditionException(elems);
1124 auto const neededSize = adIn->size() + elems->size();
1125 auto const mixed = ToMixedCopyReserve(adIn, neededSize);
1126 try {
1127 auto const ret = MixedArray::PlusEq(mixed, elems);
1128 assertx(ret == mixed);
1129 assertx(mixed->hasExactlyOneRef());
1130 return ret;
1131 } catch (...) {
1132 MixedArray::Release(mixed);
1133 throw;
1137 ArrayData* PackedArray::PlusEqVec(ArrayData* adIn, const ArrayData* /*elems*/) {
1138 assertx(checkInvariants(adIn));
1139 assertx(adIn->isVecArray());
1140 throwInvalidAdditionException(adIn);
1143 ArrayData* PackedArray::Merge(ArrayData* adIn, const ArrayData* elems) {
1144 assertx(checkInvariants(adIn));
1145 auto const neededSize = adIn->m_size + elems->size();
1146 auto const ret = ToMixedCopyReserve(adIn, neededSize);
1147 ret->setDVArray(ArrayData::kNotDVArray);
1148 return MixedArray::ArrayMergeGeneric(ret, elems);
1151 ArrayData* PackedArray::Pop(ArrayData* adIn, Variant& value) {
1152 assertx(checkInvariants(adIn));
1154 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1156 if (UNLIKELY(ad->m_size == 0)) {
1157 assertx(ad->m_pos == 0);
1158 value = uninit_null();
1159 return ad;
1162 auto const oldSize = ad->m_size;
1163 auto& tv = packedData(ad)[oldSize - 1];
1164 value = tvAsCVarRef(&tv);
1165 auto const oldTV = tv;
1166 ad->m_size = oldSize - 1;
1167 ad->m_pos = 0;
1168 if (UNLIKELY(strong_iterators_exist())) adjustMArrayIterAfterPop(ad);
1169 tvDecRefGen(oldTV);
1170 return ad;
1173 ArrayData* PackedArray::Dequeue(ArrayData* adIn, Variant& value) {
1174 assertx(checkInvariants(adIn));
1176 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1177 // To conform to PHP behavior, we invalidate all strong iterators when an
1178 // element is removed from the beginning of the array.
1179 if (UNLIKELY(strong_iterators_exist())) {
1180 free_strong_iterators(ad);
1183 if (UNLIKELY(ad->m_size == 0)) {
1184 value = uninit_null();
1185 return ad;
1188 // This is O(N), but so is Dequeue on a mixed array, because it
1189 // needs to renumber keys. So it makes sense to stay packed.
1190 auto n = ad->m_size - 1;
1191 auto const data = packedData(ad);
1192 value = std::move(tvAsVariant(data)); // no incref+decref
1193 std::memmove(data, data + 1, n * sizeof *data);
1194 ad->m_size = n;
1195 ad->m_pos = 0;
1196 return ad;
1199 ArrayData* PackedArray::Prepend(ArrayData* adIn, Cell v, bool /*copy*/) {
1200 assertx(checkInvariants(adIn));
1202 auto const ad = PrepareForInsert(adIn, adIn->cowCheck());
1204 // To conform to PHP behavior, we invalidate all strong iterators when an
1205 // element is added to the beginning of the array.
1206 if (UNLIKELY(strong_iterators_exist())) {
1207 free_strong_iterators(ad);
1210 auto const size = ad->m_size;
1211 auto const data = packedData(ad);
1212 std::memmove(data + 1, data, sizeof *data * size);
1213 cellDup(v, data[0]);
1214 ad->m_size = size + 1;
1215 ad->m_pos = 0;
1216 return ad;
1219 ArrayData* PackedArray::ToPHPArray(ArrayData* adIn, bool copy) {
1220 assertx(checkInvariants(adIn));
1221 assertx(adIn->isPacked());
1222 if (adIn->isNotDVArray()) return adIn;
1223 assertx(adIn->isVArray());
1224 if (adIn->getSize() == 0) return staticEmptyArray();
1225 ArrayData* ad = copy ? Copy(adIn) : adIn;
1226 ad->setDVArray(ArrayData::kNotDVArray);
1227 assertx(checkInvariants(ad));
1228 return ad;
1231 ArrayData* PackedArray::ToVArray(ArrayData* adIn, bool copy) {
1232 assertx(checkInvariants(adIn));
1233 assertx(adIn->isPacked());
1234 if (RuntimeOption::EvalHackArrDVArrs) return ToVec(adIn, copy);
1235 if (adIn->isVArray()) return adIn;
1236 if (adIn->getSize() == 0) return staticEmptyVArray();
1237 ArrayData* ad = copy ? Copy(adIn) : adIn;
1238 ad->setDVArray(ArrayData::kVArray);
1239 assertx(checkInvariants(ad));
1240 return ad;
1243 ArrayData* PackedArray::ToDArray(ArrayData* adIn, bool /*copy*/) {
1244 assertx(checkInvariants(adIn));
1246 auto const size = adIn->getSize();
1247 if (size == 0) return staticEmptyDArray();
1249 DArrayInit init{size};
1250 auto const elms = packedData(adIn);
1251 for (int64_t i = 0; i < size; ++i) init.add(i, elms[i]);
1252 return init.create();
1255 ArrayData* PackedArray::ToPHPArrayVec(ArrayData* adIn, bool copy) {
1256 assertx(checkInvariants(adIn));
1257 assertx(adIn->isVecArray());
1258 ArrayData* ad = copy ? Copy(adIn) : adIn;
1259 ad->m_kind = HeaderKind::Packed;
1260 assertx(ad->isNotDVArray());
1261 assertx(checkInvariants(ad));
1262 return ad;
1265 ArrayData* PackedArray::ToVArrayVec(ArrayData* adIn, bool copy) {
1266 assertx(checkInvariants(adIn));
1267 assertx(adIn->isVecArray());
1268 if (RuntimeOption::EvalHackArrDVArrs) return adIn;
1269 if (adIn->getSize() == 0) return staticEmptyVArray();
1270 ArrayData* ad = copy ? Copy(adIn) : adIn;
1271 ad->m_kind = HeaderKind::Packed;
1272 ad->setDVArray(ArrayData::kVArray);
1273 assertx(checkInvariants(ad));
1274 return ad;
1277 ArrayData* PackedArray::ToDict(ArrayData* ad, bool copy) {
1278 assertx(checkInvariants(ad));
1279 assertx(ad->isPacked());
1281 auto mixed = [&] {
1282 switch (ArrayCommon::CheckForRefs(ad)) {
1283 case ArrayCommon::RefCheckResult::Pass:
1284 return copy ? ToMixedCopy(ad) : ToMixed(ad);
1285 case ArrayCommon::RefCheckResult::Collapse:
1286 // Unconditionally copy to remove unreferenced refs
1287 return ToMixedCopy(ad);
1288 case ArrayCommon::RefCheckResult::Fail:
1289 throwRefInvalidArrayValueException(staticEmptyDictArray());
1290 break;
1292 not_reached();
1293 }();
1294 return MixedArray::ToDictInPlace(mixed);
1297 ArrayData* PackedArray::ToDictVec(ArrayData* ad, bool copy) {
1298 assertx(checkInvariants(ad));
1299 assertx(ad->isVecArray());
1300 auto mixed = copy ? ToMixedCopy(ad) : ToMixed(ad);
1301 return MixedArray::ToDictInPlace(mixed);
1304 ArrayData* PackedArray::ToVec(ArrayData* adIn, bool copy) {
1305 assertx(checkInvariants(adIn));
1306 assertx(adIn->isPacked());
1308 auto const do_copy = [&] {
1309 // CopyPackedHelper will copy the header and m_sizeAndPos; since we pass
1310 // convertingPackedToVec = true, it can fail and we have to handle that.
1311 // All we have to do afterwards is fix the kind and refcount in the copy;
1312 // it's easiest to do that by reinitializing the whole header.
1313 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeClass(adIn)));
1314 if (!CopyPackedHelper<true>(adIn, ad)) {
1315 tl_heap->objFreeIndex(ad, sizeClass(adIn));
1316 SystemLib::throwInvalidArgumentExceptionObject(
1317 "Vecs cannot contain references");
1319 ad->initHeader_16(
1320 HeaderKind::VecArray,
1321 OneReference,
1322 packSizeIndexAndDV(sizeClass(adIn), ArrayData::kNotDVArray)
1324 return ad;
1327 ArrayData* ad;
1328 if (copy) {
1329 ad = do_copy();
1330 } else {
1331 auto const result = ArrayCommon::CheckForRefs(adIn);
1332 if (LIKELY(result == ArrayCommon::RefCheckResult::Pass)) {
1333 adIn->m_kind = HeaderKind::VecArray;
1334 adIn->setDVArray(ArrayData::kNotDVArray);
1335 ad = adIn;
1336 } else if (result == ArrayCommon::RefCheckResult::Collapse) {
1337 ad = do_copy();
1338 } else {
1339 throwRefInvalidArrayValueException(staticEmptyVecArray());
1343 assertx(ad->isVecArray());
1344 assertx(capacity(ad) == capacity(adIn));
1345 assertx(ad->m_size == adIn->m_size);
1346 assertx(ad->m_pos == adIn->m_pos);
1347 assertx(ad->hasExactlyOneRef());
1348 assertx(checkInvariants(ad));
1349 return ad;
1352 ArrayData* PackedArray::ToVecVec(ArrayData* ad, bool) {
1353 assertx(checkInvariants(ad));
1354 assertx(ad->isVecArray());
1355 return ad;
1358 void PackedArray::OnSetEvalScalar(ArrayData* ad) {
1359 assertx(checkInvariants(ad));
1360 auto ptr = packedData(ad);
1361 auto const stop = ptr + ad->m_size;
1362 for (; ptr != stop; ++ptr) {
1363 tvAsVariant(ptr).setEvalScalar();
1367 void PackedArray::Ksort(ArrayData* ad, int /*flags*/, bool ascending) {
1368 assertx(ad->getSize() <= 1 || ascending);
1371 void PackedArray::Asort(ArrayData* ad, int, bool) {
1372 assertx(ad->getSize() <= 1);
1375 bool PackedArray::Uksort(ArrayData* ad, const Variant&) {
1376 assertx(ad->getSize() <= 1);
1377 return true;
1380 bool PackedArray::Uasort(ArrayData* ad, const Variant&) {
1381 assertx(ad->getSize() <= 1);
1382 return true;
1385 ArrayData* PackedArray::MakeUncounted(ArrayData* array,
1386 bool withApcTypedValue,
1387 PointerMap* seen) {
1388 void** seenVal = nullptr;
1389 if (seen && array->hasMultipleRefs()) {
1390 auto it = seen->find(array);
1391 assertx(it != seen->end());
1392 seenVal = &it->second;
1393 if (auto const arr = static_cast<ArrayData*>(*seenVal)) {
1394 if (arr->uncountedIncRef()) {
1395 return arr;
1399 assertx(checkInvariants(array));
1400 assertx(!array->empty());
1401 if (APCStats::IsCreated()) {
1402 APCStats::getAPCStats().addAPCUncountedBlock();
1405 auto const extra = withApcTypedValue ? sizeof(APCTypedValue) : 0;
1406 auto const size = array->m_size;
1407 auto const sizeIndex = capacityToSizeIndex(size);
1408 auto const mem = static_cast<char*>(
1409 malloc_huge(extra + sizeof(ArrayData) + size * sizeof(TypedValue))
1411 auto ad = reinterpret_cast<ArrayData*>(mem + extra);
1412 ad->initHeader_16(
1413 array->m_kind,
1414 UncountedValue,
1415 packSizeIndexAndDV(sizeIndex, array->dvArray()) |
1416 (withApcTypedValue ? ArrayData::kHasApcTv : 0)
1418 ad->m_sizeAndPos = array->m_sizeAndPos;
1420 // Do a raw copy without worrying about refcounts, and convert the values to
1421 // uncounted later.
1422 auto src = packedData(array);
1423 auto dst = packedData(ad);
1424 memcpy16_inline(dst, src, sizeof(TypedValue) * size);
1425 for (auto end = dst + size; dst < end; ++dst) {
1426 ConvertTvToUncounted(dst, seen);
1429 assertx(ad->kind() == array->kind());
1430 assertx(ad->dvArray() == array->dvArray());
1431 assertx(capacity(ad) >= size);
1432 assertx(ad->m_size == size);
1433 assertx(ad->m_pos == array->m_pos);
1434 assertx(ad->isUncounted());
1435 assertx(checkInvariants(ad));
1436 if (seenVal) *seenVal = ad;
1437 return ad;
1440 ALWAYS_INLINE
1441 bool PackedArray::VecEqualHelper(const ArrayData* ad1, const ArrayData* ad2,
1442 bool strict) {
1443 assertx(checkInvariants(ad1));
1444 assertx(checkInvariants(ad2));
1445 assertx(ad1->isVecArray());
1446 assertx(ad2->isVecArray());
1448 if (ad1 == ad2) return true;
1449 if (ad1->m_size != ad2->m_size) return false;
1451 // Prevent circular referenced objects/arrays or deep ones.
1452 check_recursion_error();
1454 auto elm1 = packedData(ad1);
1455 auto end = elm1 + ad1->m_size;
1456 auto elm2 = packedData(ad2);
1457 if (strict) {
1458 for (; elm1 < end; ++elm1, ++elm2) {
1459 if (!cellSame(*elm1, *elm2)) return false;
1461 } else {
1462 for (; elm1 < end; ++elm1, ++elm2) {
1463 if (!cellEqual(*elm1, *elm2)) return false;
1467 return true;
1470 ALWAYS_INLINE
1471 int64_t PackedArray::VecCmpHelper(const ArrayData* ad1, const ArrayData* ad2) {
1472 assertx(checkInvariants(ad1));
1473 assertx(checkInvariants(ad2));
1474 assertx(ad1->isVecArray());
1475 assertx(ad2->isVecArray());
1477 auto const size1 = ad1->m_size;
1478 auto const size2 = ad2->m_size;
1480 if (size1 < size2) return -1;
1481 if (size1 > size2) return 1;
1483 // Prevent circular referenced objects/arrays or deep ones.
1484 check_recursion_error();
1486 auto elm1 = packedData(ad1);
1487 auto end = elm1 + size1;
1488 auto elm2 = packedData(ad2);
1489 for (; elm1 < end; ++elm1, ++elm2) {
1490 auto const cmp = cellCompare(*elm1, *elm2);
1491 if (cmp != 0) return cmp;
1494 return 0;
1497 bool PackedArray::VecEqual(const ArrayData* ad1, const ArrayData* ad2) {
1498 return VecEqualHelper(ad1, ad2, false);
1501 bool PackedArray::VecNotEqual(const ArrayData* ad1, const ArrayData* ad2) {
1502 return !VecEqualHelper(ad1, ad2, false);
1505 bool PackedArray::VecSame(const ArrayData* ad1, const ArrayData* ad2) {
1506 return VecEqualHelper(ad1, ad2, true);
1509 bool PackedArray::VecNotSame(const ArrayData* ad1, const ArrayData* ad2) {
1510 return !VecEqualHelper(ad1, ad2, true);
1513 bool PackedArray::VecLt(const ArrayData* ad1, const ArrayData* ad2) {
1514 return VecCmpHelper(ad1, ad2) < 0;
1517 bool PackedArray::VecLte(const ArrayData* ad1, const ArrayData* ad2) {
1518 return VecCmpHelper(ad1, ad2) <= 0;
1521 bool PackedArray::VecGt(const ArrayData* ad1, const ArrayData* ad2) {
1522 return VecCmpHelper(ad1, ad2) > 0;
1525 bool PackedArray::VecGte(const ArrayData* ad1, const ArrayData* ad2) {
1526 return VecCmpHelper(ad1, ad2) >= 0;
1529 int64_t PackedArray::VecCmp(const ArrayData* ad1, const ArrayData* ad2) {
1530 return VecCmpHelper(ad1, ad2);
1533 //////////////////////////////////////////////////////////////////////