Use bulk countedness checks to optimize PackedArray::Release
[hiphop-php.git] / hphp / runtime / base / packed-array.cpp
bloba80b146dedff7faa20fdb10543184d9e071e4293
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/base/packed-array.h"
18 #include <algorithm>
19 #include <cstdlib>
20 #include <cstring>
22 #include <folly/Format.h>
23 #include <folly/Likely.h>
25 #include "hphp/runtime/base/apc-array.h"
26 #include "hphp/runtime/base/apc-stats.h"
27 #include "hphp/runtime/base/array-init.h"
28 #include "hphp/runtime/base/array-helpers.h"
29 #include "hphp/runtime/base/tv-val.h"
30 #include "hphp/runtime/base/mixed-array.h"
31 #include "hphp/runtime/base/runtime-error.h"
32 #include "hphp/runtime/base/request-info.h"
33 #include "hphp/runtime/base/tv-comparisons.h"
34 #include "hphp/runtime/base/tv-mutate.h"
35 #include "hphp/runtime/base/tv-refcount.h"
36 #include "hphp/runtime/base/tv-type.h"
37 #include "hphp/runtime/base/tv-variant.h"
39 #include "hphp/runtime/base/mixed-array-defs.h"
40 #include "hphp/runtime/base/packed-array-defs.h"
41 #include "hphp/runtime/base/packed-block.h"
43 namespace HPHP {
45 //////////////////////////////////////////////////////////////////////
47 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyVec;
48 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyVArray;
49 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyMarkedVec;
50 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyMarkedVArray;
52 struct PackedArray::VecInitializer {
53 VecInitializer() {
54 auto const aux = packSizeIndexAndAuxBits(0, 0);
55 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyVec);
56 ad->m_size = 0;
57 ad->m_extra = 0;
58 ad->initHeader_16(HeaderKind::Vec, StaticValue, aux);
59 assertx(checkInvariants(ad));
62 PackedArray::VecInitializer PackedArray::s_vec_initializer;
64 struct PackedArray::VArrayInitializer {
65 VArrayInitializer() {
66 auto const aux = packSizeIndexAndAuxBits(0, 0);
67 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyVArray);
68 ad->m_size = 0;
69 ad->m_extra = 0;
70 ad->initHeader_16(HeaderKind::Packed, StaticValue, aux);
71 assertx(RuntimeOption::EvalHackArrDVArrs || checkInvariants(ad));
74 PackedArray::VArrayInitializer PackedArray::s_varr_initializer;
76 struct PackedArray::MarkedVecInitializer {
77 MarkedVecInitializer() {
78 auto const aux = packSizeIndexAndAuxBits(0, ArrayData::kLegacyArray);
79 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyMarkedVec);
80 ad->m_size = 0;
81 ad->m_extra = 0;
82 ad->initHeader_16(HeaderKind::Vec, StaticValue, aux);
83 assertx(!RuntimeOption::EvalHackArrDVArrs || checkInvariants(ad));
86 PackedArray::MarkedVecInitializer PackedArray::s_marked_vec_initializer;
88 struct PackedArray::MarkedVArrayInitializer {
89 MarkedVArrayInitializer() {
90 auto const aux = packSizeIndexAndAuxBits(0, ArrayData::kLegacyArray);
91 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyMarkedVArray);
92 ad->m_size = 0;
93 ad->m_extra = 0;
94 ad->initHeader_16(HeaderKind::Packed, StaticValue, aux);
95 assertx(RuntimeOption::EvalHackArrDVArrs || checkInvariants(ad));
98 PackedArray::MarkedVArrayInitializer PackedArray::s_marked_varr_initializer;
100 //////////////////////////////////////////////////////////////////////
102 namespace {
104 inline ArrayData* alloc_packed_static(const ArrayData* ad) {
105 auto const size = PackedArray::capacityToSizeBytes(ad->size());
106 auto const ret = RuntimeOption::EvalLowStaticArrays
107 ? low_malloc(size)
108 : uncounted_malloc(size);
109 return reinterpret_cast<ArrayData*>(reinterpret_cast<char*>(ret));
114 bool PackedArray::checkInvariants(const ArrayData* arr) {
115 assertx(arr->hasVanillaPackedLayout());
116 assertx(arr->checkCountZ());
117 assertx(arr->m_size <= MixedArray::MaxSize);
118 assertx(arr->m_size <= capacity(arr));
119 assertx(IMPLIES(arr->isVArray(), arr->isPackedKind()));
120 assertx(IMPLIES(arr->isNotDVArray(), arr->isVecKind()));
121 assertx(IMPLIES(arr->isLegacyArray(), arr->isHAMSafeVArray()));
122 assertx(!RO::EvalHackArrDVArrs || arr->isVecKind());
123 assertx(IMPLIES(!arrprov::arrayWantsTag(arr),
124 arr->m_extra == 0 &&
125 IMPLIES(RO::EvalArrayProvenance,
126 !arrprov::getTag(arr).valid())));
128 // This loop is too slow for normal use, but can be enabled to debug
129 // packed arrays.
130 if (false) {
131 for (uint32_t i = 0; i < arr->m_size; ++i) {
132 auto const DEBUG_ONLY tv = NvGetInt(arr, i);
133 assertx(tv.is_init());
134 assertx(tvIsPlausible(tv));
137 return true;
140 //////////////////////////////////////////////////////////////////////
142 ALWAYS_INLINE
143 MixedArray* PackedArray::ToMixedHeader(const ArrayData* old,
144 size_t neededSize) {
145 assertx(checkInvariants(old));
147 auto const oldSize = old->m_size;
148 auto const scale = MixedArray::computeScaleFromSize(neededSize);
149 auto const ad = MixedArray::reqAlloc(scale);
150 auto const kind = old->isVArray() ? HeaderKind::Mixed : HeaderKind::Dict;
151 ad->initHeader_16(kind, OneReference, MixedArrayKeys::packIntsForAux());
152 ad->m_size = oldSize;
153 ad->m_extra = old->m_extra;
154 ad->m_scale_used = scale | uint64_t{oldSize} << 32; // used=oldSize
155 ad->m_nextKI = oldSize;
157 assertx(ad->m_size == oldSize);
158 assertx(ad->hasVanillaMixedLayout());
159 assertx(ad->isDArray() == old->isVArray());
160 assertx(ad->hasExactlyOneRef());
161 assertx(ad->m_used == oldSize);
162 assertx(ad->m_scale == scale);
163 assertx(ad->m_nextKI == oldSize);
164 // Can't checkInvariants yet, since we haven't populated the payload.
165 return ad;
169 * Converts a packed array to mixed, leaving the packed array in an
170 * empty state. You need ToMixedCopy in cases where the old array
171 * needs to remain un-modified (usually if `copy' is true).
173 * The returned array is mixed, and is guaranteed not to be isFull().
174 * (Note: only unset can call ToMixed when we aren't about to insert.)
176 MixedArray* PackedArray::ToMixed(ArrayData* old) {
177 auto const oldSize = old->m_size;
178 auto const ad = ToMixedHeader(old, oldSize + 1);
179 auto const mask = ad->mask();
180 auto dstData = ad->data();
182 auto const dstHash = ad->initHash(ad->scale());
183 for (uint32_t i = 0; i < oldSize; ++i) {
184 auto h = hash_int64(i);
185 *ad->findForNewInsert(dstHash, mask, h) = i;
186 dstData->setIntKey(i, h);
187 tvCopy(GetPosVal(old, i), dstData->data);
188 ++dstData;
190 old->m_size = 0;
192 assertx(ad->checkInvariants());
193 assertx(!ad->isFull());
194 assertx(ad->hasExactlyOneRef());
195 return ad;
199 * Convert a packed array to mixed, without moving the elements out of
200 * the old packed array. This effectively performs a Copy at the same
201 * time as converting to mixed. The returned mixed array is
202 * guaranteed not to be full.
204 MixedArray* PackedArray::ToMixedCopy(const ArrayData* old) {
205 assertx(checkInvariants(old));
206 auto ad = PackedArray::ToMixedCopyReserve(old, old->m_size + 1);
207 assertx(!ad->isFull());
208 return ad;
212 * Convert to mixed, reserving space for at least `neededSize' elems.
213 * `neededSize' must be at least old->size(), and may be equal to it.
215 MixedArray* PackedArray::ToMixedCopyReserve(const ArrayData* old,
216 size_t neededSize) {
217 assertx(neededSize >= old->m_size);
218 auto const ad = ToMixedHeader(old, neededSize);
219 auto const oldSize = old->m_size;
220 auto const mask = ad->mask();
221 auto dstData = ad->data();
223 auto const dstHash = ad->initHash(ad->scale());
224 for (uint32_t i = 0; i < oldSize; ++i) {
225 auto const h = hash_int64(i);
226 *ad->findForNewInsert(dstHash, mask, h) = i;
227 dstData->setIntKey(i, h);
228 tvDup(GetPosVal(old, i), dstData->data);
229 ++dstData;
232 assertx(ad->checkInvariants());
233 assertx(ad->hasExactlyOneRef());
234 return ad;
237 NEVER_INLINE
238 ArrayData* PackedArray::Grow(ArrayData* adIn, bool copy) {
239 assertx(checkInvariants(adIn));
240 assertx(adIn->m_size == capacity(adIn));
242 auto const sizeIndex = sizeClass(adIn) + kSizeClassesPerDoubling;
243 if (UNLIKELY(sizeIndex > MaxSizeIndex)) return nullptr;
244 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeIndex));
246 if (copy) {
247 // CopyPackedHelper will copy the header and m_size; since we pass
248 // convertingPackedToVec = false, it can't fail. All we have to do
249 // afterwards is fix the capacity and refcount on the copy; it's easiest
250 // to do that by reinitializing the whole header.
251 CopyPackedHelper(adIn, ad);
252 ad->initHeader_16(
253 adIn->m_kind,
254 OneReference,
255 packSizeIndexAndAuxBits(sizeIndex, adIn->auxBits())
258 assertx(ad->m_size == adIn->m_size);
259 } else {
260 // Copy everything from `adIn' to `ad', including header and m_size
261 memcpy16_inline(ad, adIn, PackedArray::capacityToSizeBytes(adIn->m_size));
262 ad->initHeader_16(
263 adIn->m_kind,
264 OneReference,
265 packSizeIndexAndAuxBits(sizeIndex, adIn->auxBits())
268 assertx(ad->m_size == adIn->m_size);
269 adIn->m_size = 0; // old is a zombie now
272 assertx(ad->kind() == adIn->kind());
273 assertx(ArrayData::dvArrayEqual(ad, adIn));
274 assertx(capacity(ad) > capacity(adIn));
275 assertx(ad->hasExactlyOneRef());
276 assertx(ad->m_extra == adIn->m_extra);
277 assertx(checkInvariants(ad));
278 return tagArrProv(ad, adIn);
281 ALWAYS_INLINE
282 ArrayData* PackedArray::PrepareForInsert(ArrayData* adIn, bool copy) {
283 assertx(checkInvariants(adIn));
284 if (adIn->m_size == capacity(adIn)) return Grow(adIn, copy);
285 if (copy) return Copy(adIn);
286 return adIn;
289 //////////////////////////////////////////////////////////////////////
291 /* This helper copies everything from adIn to ad, including the header
292 * (capacity, kind, and refcount) and m_size. It then increfs the
293 * contents, if needed.
295 * If convertingPackedToVec is false, it will always succeed (return true).
297 * If convertingPackedToVec is true and adIn contains a Ref, then it will
298 * return false. Refcounts of the contents will be left in a consistent state.
299 * It is the callers responsibility to free ad and throw an appropriate
300 * exception in this case.
302 ALWAYS_INLINE
303 void PackedArray::CopyPackedHelper(const ArrayData* adIn, ArrayData* ad) {
304 // Copy everything from `adIn' to `ad', including refcount, kind and cap
305 auto const size = adIn->m_size;
306 memcpy16_inline(ad, adIn, PackedArray::capacityToSizeBytes(size));
308 // Copy counted types correctly
309 for (uint32_t i = 0; i < size; ++i) {
310 auto const elm = LvalUncheckedInt(ad, i);
311 tvIncRefGen(*elm);
315 NEVER_INLINE
316 ArrayData* PackedArray::Copy(const ArrayData* adIn) {
317 assertx(checkInvariants(adIn));
319 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeClass(adIn)));
321 // CopyPackedHelper will copy the header (including capacity and kind), and
322 // m_size. All we have to do afterwards is fix the refcount on the copy.
323 CopyPackedHelper(adIn, ad);
324 ad->m_count = OneReference;
326 assertx(ad->kind() == adIn->kind());
327 assertx(ad->isLegacyArray() == adIn->isLegacyArray());
328 assertx(capacity(ad) == capacity(adIn));
329 assertx(ad->m_size == adIn->m_size);
330 assertx(ad->m_extra == adIn->m_extra);
331 assertx(ad->hasExactlyOneRef());
332 assertx(checkInvariants(ad));
333 return tagArrProv(ad, adIn);
336 ArrayData* PackedArray::CopyStatic(const ArrayData* adIn) {
337 assertx(checkInvariants(adIn));
339 auto const sizeIndex = capacityToSizeIndex(adIn->m_size);
340 auto ad = alloc_packed_static(adIn);
341 // CopyPackedHelper will copy the header and m_size. All we have to do
342 // afterwards is fix the capacity and refcount on the copy; it's easiest to do
343 // that by reinitializing the whole header.
344 CopyPackedHelper(adIn, ad);
345 ad->initHeader_16(
346 adIn->m_kind,
347 StaticValue,
348 packSizeIndexAndAuxBits(sizeIndex, adIn->auxBits())
351 assertx(ad->kind() == adIn->kind());
352 assertx(ArrayData::dvArrayEqual(ad, adIn));
353 assertx(!arrprov::arrayWantsTag(ad) ||
354 arrprov::getTag(ad) == arrprov::getTag(adIn));
355 assertx(capacity(ad) >= adIn->m_size);
356 assertx(ad->m_size == adIn->m_size);
357 assertx(ad->m_extra == adIn->m_extra);
358 assertx(ad->isStatic());
359 assertx(checkInvariants(ad));
360 return ad;
363 /* This helper allocates an ArrayData and initializes the header (including
364 * capacity, kind, and refcount). The caller is responsible for initializing
365 * m_size, and initializing array entries (if any).
367 ALWAYS_INLINE
368 ArrayData* PackedArray::MakeReserveImpl(uint32_t cap, HeaderKind hk) {
369 auto const sizeIndex = capacityToSizeIndex(cap);
370 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeIndex));
371 ad->initHeader_16(hk, OneReference, packSizeIndexAndAuxBits(sizeIndex, 0));
373 assertx(ad->m_kind == hk);
374 assertx(capacity(ad) >= cap);
375 assertx(ad->hasExactlyOneRef());
376 return ad;
379 ArrayData* PackedArray::MakeReserveVArray(uint32_t capacity) {
380 if (RuntimeOption::EvalHackArrDVArrs) {
381 auto const ad = MakeReserveVec(capacity);
382 ad->setLegacyArrayInPlace(RuntimeOption::EvalHackArrDVArrMark);
383 return ad;
386 auto ad = MakeReserveImpl(capacity, HeaderKind::Packed);
387 ad->m_size = 0;
388 ad->m_extra = 0;
389 assertx(ad->isPackedKind());
390 assertx(ad->isVArray());
391 assertx(ad->m_size == 0);
392 assertx(checkInvariants(ad));
393 return tagArrProv(ad);
396 ArrayData* PackedArray::MakeReserveVec(uint32_t capacity) {
397 auto ad = MakeReserveImpl(capacity, HeaderKind::Vec);
398 ad->m_size = 0;
399 ad->m_extra = 0;
400 assertx(ad->isVecKind());
401 assertx(ad->m_size == 0);
402 assertx(checkInvariants(ad));
403 return ad;
406 template<bool reverse>
407 ALWAYS_INLINE
408 ArrayData* PackedArray::MakePackedImpl(uint32_t size,
409 const TypedValue* values,
410 HeaderKind hk) {
411 assertx(size > 0);
412 auto ad = MakeReserveImpl(size, hk);
413 ad->m_size = size;
414 ad->m_extra = 0;
416 // Append values by moving; this function takes ownership of them.
417 if (reverse) {
418 uint32_t i = size - 1;
419 for (auto end = values + size; values < end; ++values, --i) {
420 tvCopy(*values, LvalUncheckedInt(ad, i));
422 } else {
423 if (debug) {
424 for (uint32_t i = 0; i < size; ++i) {
425 assertx(tvIsPlausible(*(values + i)));
428 if constexpr (stores_typed_values) {
429 auto const data = PackedArray::entries(ad);
430 memcpy16_inline(data, values, sizeof(TypedValue) * size);
431 } else {
432 for (uint32_t i = 0; i < size; ++values, ++i) {
433 tvCopy(*values, LvalUncheckedInt(ad, i));
438 assertx(ad->m_size == size);
439 assertx(checkInvariants(ad));
440 return ad;
443 ArrayData* PackedArray::MakeVArray(uint32_t size, const TypedValue* values) {
444 // Values are in reverse order since they come from the stack, which
445 // grows down.
446 if (RuntimeOption::EvalHackArrDVArrs) {
447 auto const ad = MakeVec(size, values);
448 ad->setLegacyArrayInPlace(RuntimeOption::EvalHackArrDVArrMark);
449 return ad;
451 auto ad = MakePackedImpl<true>(size, values, HeaderKind::Packed);
452 assertx(ad->isPackedKind());
453 assertx(ad->isVArray());
454 return tagArrProv(ad);
457 ArrayData* PackedArray::MakeVec(uint32_t size, const TypedValue* values) {
458 // Values are in reverse order since they come from the stack, which
459 // grows down.
460 auto ad = MakePackedImpl<true>(size, values, HeaderKind::Vec);
461 assertx(ad->isVecKind());
462 return ad;
465 ArrayData* PackedArray::MakeVArrayNatural(uint32_t size, const TypedValue* values) {
466 if (RuntimeOption::EvalHackArrDVArrs) {
467 auto const ad = MakeVecNatural(size, values);
468 ad->setLegacyArrayInPlace(RuntimeOption::EvalHackArrDVArrMark);
469 return ad;
472 auto ad = MakePackedImpl<false>(size, values, HeaderKind::Packed);
473 assertx(ad->isPackedKind());
474 assertx(ad->isVArray());
475 return tagArrProv(ad);
478 ArrayData* PackedArray::MakeVecNatural(uint32_t size, const TypedValue* values) {
479 auto ad = MakePackedImpl<false>(size, values, HeaderKind::Vec);
480 assertx(ad->isVecKind());
481 return ad;
484 ArrayData* PackedArray::MakeUninitializedVArray(uint32_t size) {
485 if (RuntimeOption::EvalHackArrDVArrs) {
486 auto const ad = MakeUninitializedVec(size);
487 ad->setLegacyArrayInPlace(RuntimeOption::EvalHackArrDVArrMark);
488 return ad;
490 auto ad = MakeReserveImpl(size, HeaderKind::Packed);
491 ad->m_size = size;
492 ad->m_extra = 0;
493 assertx(ad->isPackedKind());
494 assertx(ad->isVArray());
495 assertx(ad->m_size == size);
496 assertx(checkInvariants(ad));
497 return tagArrProv(ad);
500 ArrayData* PackedArray::MakeUninitializedVec(uint32_t size) {
501 auto ad = MakeReserveImpl(size, HeaderKind::Vec);
502 ad->m_size = size;
503 ad->m_extra = 0;
504 assertx(ad->isVecKind());
505 assertx(ad->m_size == size);
506 assertx(checkInvariants(ad));
507 return ad;
510 ArrayData* PackedArray::MakeVecFromAPC(const APCArray* apc, bool isLegacy) {
511 assertx(apc->isPacked());
512 auto const apcSize = apc->size();
513 VecInit init{apcSize};
514 for (uint32_t i = 0; i < apcSize; ++i) {
515 init.append(apc->getValue(i)->toLocal());
517 auto const ad = init.create();
518 ad->setLegacyArrayInPlace(isLegacy);
519 return ad;
522 ArrayData* PackedArray::MakeVArrayFromAPC(const APCArray* apc, bool isMarked) {
523 assertx(!RuntimeOption::EvalHackArrDVArrs);
524 assertx(apc->isVArray());
525 auto const apcSize = apc->size();
526 VArrayInit init{apcSize};
527 for (uint32_t i = 0; i < apcSize; ++i) {
528 init.append(apc->getValue(i)->toLocal());
530 auto const ad = init.create();
531 ad->setLegacyArrayInPlace(isMarked);
532 return tagArrProv(ad, apc);
535 void PackedArray::Release(ArrayData* ad) {
536 ad->fixCountForRelease();
537 assertx(checkInvariants(ad));
538 assertx(ad->isRefCounted());
539 assertx(ad->hasExactlyOneRef());
541 if (RuntimeOption::EvalArrayProvenance) {
542 arrprov::clearTag(ad);
545 if constexpr (stores_typed_values) {
546 for (uint32_t i = 0; i < ad->m_size; ++i) {
547 tvDecRefGen(LvalUncheckedInt(ad, i));
549 } else {
550 auto constexpr n = PackedBlock::kNumItems;
551 auto block = PackedBlock::BlockAt(ad, 0);
552 auto remainder = ad->size();
553 while (remainder >= n) {
554 if (!block.AllTypesAreUncounted(n)) {
555 for (auto i = 0; i < n; i++) {
556 tvDecRefGen(block[i]);
559 ++block;
560 remainder -= n;
562 if (remainder && !block.AllTypesAreUncounted(remainder)) {
563 auto i = 0;
564 do {
565 tvDecRefGen(block[i++]);
566 } while (i < remainder);
570 tl_heap->objFreeIndex(ad, sizeClass(ad));
571 AARCH64_WALKABLE_FRAME();
574 NEVER_INLINE
575 void PackedArray::ReleaseUncounted(ArrayData* ad) {
576 assertx(checkInvariants(ad));
577 if (!ad->uncountedDecRef()) return;
579 if (RuntimeOption::EvalArrayProvenance) {
580 arrprov::clearTag(ad);
582 for (uint32_t i = 0; i < ad->m_size; ++i) {
583 ReleaseUncountedTv(LvalUncheckedInt(ad, i));
586 if (APCStats::IsCreated()) {
587 APCStats::getAPCStats().removeAPCUncountedBlock();
590 auto const extra = uncountedAllocExtra(ad, ad->hasApcTv());
591 auto const allocSize = extra + PackedArray::capacityToSizeBytes(ad->m_size);
592 uncounted_sized_free(reinterpret_cast<char*>(ad) - extra, allocSize);
595 ////////////////////////////////////////////////////////////////////////////////
597 TypedValue PackedArray::NvGetInt(const ArrayData* ad, int64_t k) {
598 assertx(checkInvariants(ad));
599 return LIKELY(size_t(k) < ad->m_size)
600 ? *LvalUncheckedInt(const_cast<ArrayData*>(ad), k)
601 : make_tv<KindOfUninit>();
604 TypedValue PackedArray::NvGetStr(const ArrayData* ad, const StringData* /*s*/) {
605 assertx(checkInvariants(ad));
606 return make_tv<KindOfUninit>();
609 ssize_t PackedArray::NvGetIntPos(const ArrayData* ad, int64_t k) {
610 assertx(checkInvariants(ad));
611 return LIKELY(size_t(k) < ad->m_size) ? k : ad->m_size;
614 ssize_t PackedArray::NvGetStrPos(const ArrayData* ad, const StringData* k) {
615 assertx(checkInvariants(ad));
616 return ad->m_size;
619 TypedValue PackedArray::GetPosKey(const ArrayData* ad, ssize_t pos) {
620 assertx(checkInvariants(ad));
621 assertx(pos != ad->m_size);
622 return make_tv<KindOfInt64>(pos);
625 TypedValue PackedArray::GetPosVal(const ArrayData* ad, ssize_t pos) {
626 assertx(checkInvariants(ad));
627 assertx(pos < ad->m_size);
628 return *LvalUncheckedInt(const_cast<ArrayData*>(ad), pos);
631 bool PackedArray::ExistsInt(const ArrayData* ad, int64_t k) {
632 assertx(checkInvariants(ad));
633 return size_t(k) < ad->m_size;
636 bool PackedArray::ExistsStr(const ArrayData* ad, const StringData* /*s*/) {
637 assertx(checkInvariants(ad));
638 return false;
641 ///////////////////////////////////////////////////////////////////////////////
643 namespace {
645 template<typename FoundFn>
646 auto MutableOpInt(ArrayData* adIn, int64_t k, bool copy, FoundFn found) {
647 assertx(PackedArray::checkInvariants(adIn));
648 if (UNLIKELY(size_t(k) >= adIn->size())) {
649 throwOOBArrayKeyException(k, adIn);
651 auto const ad = copy ? PackedArray::Copy(adIn) : adIn;
652 return found(ad);
657 arr_lval PackedArray::LvalInt(ArrayData* adIn, int64_t k) {
658 assertx(checkInvariants(adIn));
659 return MutableOpInt(adIn, k, adIn->cowCheck(),
660 [&] (ArrayData* ad) { return arr_lval { ad, LvalUncheckedInt(ad, k) }; }
664 tv_lval PackedArray::LvalUncheckedInt(ArrayData* ad, int64_t k) {
665 assertx(size_t(k) < PackedArray::capacity(ad));
666 return stores_typed_values ? &PackedArray::entries(ad)[k]
667 : PackedBlock::LvalAt(ad, k);
670 arr_lval PackedArray::LvalStr(ArrayData* adIn, StringData* key) {
671 assertx(checkInvariants(adIn));
672 throwInvalidArrayKeyException(key, adIn);
675 tv_lval PackedArray::LvalNewInPlace(ArrayData* ad) {
676 assertx(checkInvariants(ad));
677 assertx(ad->m_size < capacity(ad));
678 assertx(!ad->cowCheck());
679 auto const lval = LvalUncheckedInt(ad, ad->m_size++);
680 type(lval) = KindOfNull;
681 return lval;
684 ArrayData* PackedArray::SetInt(ArrayData* adIn, int64_t k, TypedValue v) {
685 assertx(adIn->cowCheck() || adIn->notCyclic(v));
686 return MutableOpInt(adIn, k, adIn->cowCheck(),
687 [&] (ArrayData* ad) { setElem(LvalUncheckedInt(ad, k), v); return ad; }
691 ArrayData* PackedArray::SetIntMove(ArrayData* adIn, int64_t k, TypedValue v) {
692 assertx(adIn->cowCheck() || adIn->notCyclic(v));
693 auto const copy = adIn->cowCheck();
694 return MutableOpInt(adIn, k, copy,
695 [&] (ArrayData* ad) {
696 assertx((adIn != ad) == copy);
697 if (copy && adIn->decReleaseCheck()) PackedArray::Release(adIn);
698 setElem(LvalUncheckedInt(ad, k), v, true);
699 return ad;
704 ArrayData* PackedArray::SetStr(ArrayData* adIn, StringData* k, TypedValue v) {
705 assertx(checkInvariants(adIn));
706 throwInvalidArrayKeyException(k, adIn);
709 ///////////////////////////////////////////////////////////////////////////////
711 ArrayData* PackedArray::RemoveInt(ArrayData* adIn, int64_t k) {
712 assertx(checkInvariants(adIn));
714 // You're only allowed to remove an element at the end of the varray or
715 // vec (or beyond the end, which is a no-op).
716 if (UNLIKELY(size_t(k) >= adIn->m_size)) return adIn;
717 if (LIKELY(size_t(k) + 1 == adIn->m_size)) {
718 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
719 auto const size = ad->m_size - 1;
720 ad->m_size = size;
721 tvDecRefGen(LvalUncheckedInt(ad, size));
722 return ad;
725 if (adIn->isVecKind()) {
726 throwVecUnsetException();
727 } else {
728 throwVarrayUnsetException();
732 ArrayData* PackedArray::RemoveStr(ArrayData* adIn, const StringData*) {
733 assertx(checkInvariants(adIn));
734 return adIn;
737 ssize_t PackedArray::IterBegin(const ArrayData* ad) {
738 assertx(checkInvariants(ad));
739 return 0;
742 ssize_t PackedArray::IterLast(const ArrayData* ad) {
743 assertx(checkInvariants(ad));
744 return ad->m_size ? ad->m_size - 1 : 0;
747 ssize_t PackedArray::IterEnd(const ArrayData* ad) {
748 assertx(checkInvariants(ad));
749 return ad->m_size;
752 ssize_t PackedArray::IterAdvance(const ArrayData* ad, ssize_t pos) {
753 assertx(checkInvariants(ad));
754 if (pos < ad->m_size) {
755 ++pos;
757 return pos;
760 ssize_t PackedArray::IterRewind(const ArrayData* ad, ssize_t pos) {
761 assertx(checkInvariants(ad));
762 if (pos > 0) {
763 return pos - 1;
765 return ad->m_size;
768 ArrayData* PackedArray::AppendImpl(
769 ArrayData* adIn, TypedValue v, bool copy, bool move) {
770 assertx(checkInvariants(adIn));
771 assertx(v.m_type != KindOfUninit);
772 assertx(copy || adIn->notCyclic(v));
773 auto const ad = PrepareForInsert(adIn, copy);
774 if (move) {
775 if (ad != adIn && adIn->decReleaseCheck()) Release(adIn);
776 tvCopy(v, LvalUncheckedInt(ad, ad->m_size++));
777 } else {
778 tvDup(v, LvalUncheckedInt(ad, ad->m_size++));
780 return ad;
783 ArrayData* PackedArray::Append(ArrayData* adIn, TypedValue v) {
784 return AppendImpl(adIn, v, adIn->cowCheck());
787 ArrayData* PackedArray::AppendMove(ArrayData* adIn, TypedValue v) {
788 return AppendImpl(adIn, v, adIn->cowCheck(), true);
791 ArrayData* PackedArray::AppendInPlace(ArrayData* adIn, TypedValue v) {
792 assertx(!adIn->cowCheck());
793 return AppendImpl(adIn, v, false);
796 ArrayData* PackedArray::Pop(ArrayData* adIn, Variant& value) {
797 assertx(checkInvariants(adIn));
799 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
801 if (UNLIKELY(ad->m_size == 0)) {
802 value = uninit_null();
803 return ad;
806 auto const size = ad->m_size - 1;
807 auto const tv = *LvalUncheckedInt(ad, size);
808 value = tvAsCVarRef(&tv);
809 ad->m_size = size;
810 tvDecRefGen(tv);
811 return ad;
814 ArrayData* PackedArray::Prepend(ArrayData* adIn, TypedValue v) {
815 assertx(checkInvariants(adIn));
817 auto const ad = PrepareForInsert(adIn, adIn->cowCheck());
818 auto const size = ad->m_size;
819 if constexpr (stores_typed_values) {
820 auto const data = PackedArray::entries(ad);
821 std::memmove(data + 1, data, sizeof *data * size);
822 } else {
823 for (uint32_t i = size; i > 0; --i) {
824 tvCopy(*LvalUncheckedInt(ad, i - 1), LvalUncheckedInt(ad, i));
827 tvDup(v, LvalUncheckedInt(ad, 0));
828 ad->m_size = size + 1;
829 return ad;
832 ArrayData* PackedArray::ToDVArray(ArrayData* adIn, bool copy) {
833 assertx(checkInvariants(adIn));
834 if (adIn->empty()) return ArrayData::CreateVArray();
835 auto const ad = copy ? Copy(adIn) : adIn;
836 ad->m_kind = HeaderKind::Packed;
837 if (RO::EvalArrayProvenance) arrprov::reassignTag(ad);
838 assertx(checkInvariants(ad));
839 return ad;
842 ArrayData* PackedArray::ToHackArr(ArrayData* adIn, bool copy) {
843 assertx(checkInvariants(adIn));
844 if (adIn->empty()) return ArrayData::CreateVec();
845 auto const ad = copy ? Copy(adIn) : adIn;
846 ad->m_kind = HeaderKind::Vec;
847 ad->setLegacyArrayInPlace(false);
848 if (RO::EvalArrayProvenance) arrprov::clearTag(ad);
849 assertx(checkInvariants(ad));
850 return ad;
853 void PackedArray::OnSetEvalScalar(ArrayData* ad) {
854 assertx(checkInvariants(ad));
855 auto const size = ad->m_size;
856 for (uint32_t i = 0; i < size; ++i) {
857 auto lval = LvalUncheckedInt(ad, i);
858 auto tv = *lval;
859 tvAsVariant(&tv).setEvalScalar();
860 tvCopy(tv, lval);
864 void PackedArray::Ksort(ArrayData* ad, int /*flags*/, bool ascending) {
865 assertx(ascending);
866 assertx(ad->isVecKind() || ad->isVArray());
869 void PackedArray::Asort(ArrayData* ad, int, bool) {
870 always_assert(false);
873 bool PackedArray::Uksort(ArrayData* ad, const Variant&) {
874 always_assert(false);
877 bool PackedArray::Uasort(ArrayData* ad, const Variant&) {
878 always_assert(false);
881 ArrayData* PackedArray::MakeUncounted(ArrayData* array,
882 bool withApcTypedValue,
883 DataWalker::PointerMap* seen) {
884 auto const updateSeen = seen && array->hasMultipleRefs();
885 if (updateSeen) {
886 auto it = seen->find(array);
887 assertx(it != seen->end());
888 if (auto const arr = static_cast<ArrayData*>(it->second)) {
889 if (arr->uncountedIncRef()) {
890 return arr;
894 assertx(checkInvariants(array));
895 assertx(!array->empty());
896 if (APCStats::IsCreated()) {
897 APCStats::getAPCStats().addAPCUncountedBlock();
900 auto const size = array->m_size;
901 auto const extra = withApcTypedValue ? sizeof(APCTypedValue) : 0;
902 auto const bytes = PackedArray::capacityToSizeBytes(size);
903 auto const sizeIndex = MemoryManager::size2Index(bytes);
904 assertx(sizeIndex <= PackedArray::MaxSizeIndex);
906 auto const mem = static_cast<char*>(uncounted_malloc(bytes + extra));
907 auto ad = reinterpret_cast<ArrayData*>(mem + extra);
908 ad->initHeader_16(
909 array->m_kind,
910 UncountedValue,
911 packSizeIndexAndAuxBits(sizeIndex, array->auxBits()) |
912 (withApcTypedValue ? ArrayData::kHasApcTv : 0)
914 ad->m_size = array->m_size;
915 ad->m_extra = array->m_extra;
918 // Do a raw copy without worrying about refcounts. Then, traverse the
919 // array and convert refcounted objects to their uncounted types.
920 memcpy16_inline(ad + 1, array + 1, bytes - sizeof(ArrayData));
921 for (uint32_t i = 0; i < size; i++) {
922 ConvertTvToUncounted(LvalUncheckedInt(ad, i), seen);
925 assertx(ad->kind() == array->kind());
926 assertx(ArrayData::dvArrayEqual(ad, array));
927 assertx(capacity(ad) >= size);
928 assertx(ad->m_size == size);
929 assertx(ad->m_extra == array->m_extra);
930 assertx(ad->isUncounted());
931 assertx(checkInvariants(ad));
932 if (updateSeen) (*seen)[array] = ad;
933 return tagArrProv(ad, array);
936 ALWAYS_INLINE
937 bool PackedArray::VecEqualHelper(const ArrayData* ad1, const ArrayData* ad2,
938 bool strict) {
939 assertx(checkInvariants(ad1));
940 assertx(checkInvariants(ad2));
941 assertx(ad1->isVecKind());
942 assertx(ad2->isVecKind());
944 if (ad1 == ad2) return true;
945 if (ad1->m_size != ad2->m_size) return false;
947 // Prevent circular referenced objects/arrays or deep ones.
948 check_recursion_error();
950 auto const size = ad1->m_size;
951 for (uint32_t i = 0; i < size; ++i) {
952 auto const elm1 = GetPosVal(ad1, i);
953 auto const elm2 = GetPosVal(ad2, i);
954 auto const cmp = strict ? tvSame(elm1, elm2) : tvEqual(elm1, elm2);
955 if (!cmp) return false;
958 return true;
961 bool PackedArray::VecEqual(const ArrayData* ad1, const ArrayData* ad2) {
962 return VecEqualHelper(ad1, ad2, false);
965 bool PackedArray::VecNotEqual(const ArrayData* ad1, const ArrayData* ad2) {
966 return !VecEqualHelper(ad1, ad2, false);
969 bool PackedArray::VecSame(const ArrayData* ad1, const ArrayData* ad2) {
970 return VecEqualHelper(ad1, ad2, true);
973 bool PackedArray::VecNotSame(const ArrayData* ad1, const ArrayData* ad2) {
974 return !VecEqualHelper(ad1, ad2, true);
977 //////////////////////////////////////////////////////////////////////
979 PackedArray::EntryOffset PackedArray::entryOffset(size_t i) {
980 if constexpr (stores_typed_values) {
981 auto const base = sizeof(ArrayData) + i * sizeof(TypedValue);
982 return {ptrdiff_t(base + offsetof(TypedValue, m_type)),
983 ptrdiff_t(base + offsetof(TypedValue, m_data))};
985 return PackedBlock::EntryOffset(i);
988 int64_t PackedArray::pointerToIndex(const ArrayData* ad, const void* ptr) {
989 const auto index = [&]() {
990 if constexpr (stores_typed_values) {
991 const auto tv = reinterpret_cast<const TypedValue*>(ptr);
992 return tv - PackedArray::entries(const_cast<ArrayData*>(ad));
993 } else {
994 return PackedBlock::PointerToIndex(ad, ptr);
996 }();
997 return 0 <= index && index < ad->m_size ? index : -1;
1000 //////////////////////////////////////////////////////////////////////