unrestrict-layout part 5: Eliminate set in place
[hiphop-php.git] / hphp / runtime / base / packed-array.cpp
blob2167df3b9d8a9797abb8d212d69ab098dd8947ca
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/base/packed-array.h"
18 #include <algorithm>
19 #include <cstdlib>
20 #include <cstring>
22 #include <folly/Format.h>
23 #include <folly/Likely.h>
25 #include "hphp/runtime/base/apc-array.h"
26 #include "hphp/runtime/base/apc-stats.h"
27 #include "hphp/runtime/base/array-init.h"
28 #include "hphp/runtime/base/array-helpers.h"
29 #include "hphp/runtime/base/tv-val.h"
30 #include "hphp/runtime/base/mixed-array.h"
31 #include "hphp/runtime/base/runtime-error.h"
32 #include "hphp/runtime/base/request-info.h"
33 #include "hphp/runtime/base/tv-comparisons.h"
34 #include "hphp/runtime/base/tv-mutate.h"
35 #include "hphp/runtime/base/tv-refcount.h"
36 #include "hphp/runtime/base/tv-type.h"
37 #include "hphp/runtime/base/tv-variant.h"
39 #include "hphp/runtime/base/mixed-array-defs.h"
40 #include "hphp/runtime/base/packed-array-defs.h"
42 namespace HPHP {
44 //////////////////////////////////////////////////////////////////////
46 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyVecArray;
47 std::aligned_storage<sizeof(ArrayData), 16>::type s_theEmptyVArray;
49 struct PackedArray::VecInitializer {
50 VecInitializer() {
51 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyVecArray);
52 ad->m_sizeAndPos = 0;
53 ad->initHeader_16(
54 HeaderKind::VecArray,
55 StaticValue,
56 packSizeIndexAndAuxBits(0, ArrayData::kNotDVArray)
58 assertx(checkInvariants(ad));
61 PackedArray::VecInitializer PackedArray::s_vec_initializer;
63 struct PackedArray::VArrayInitializer {
64 VArrayInitializer() {
65 auto const ad = reinterpret_cast<ArrayData*>(&s_theEmptyVArray);
66 ad->m_sizeAndPos = 0;
67 ad->initHeader_16(
68 HeaderKind::Packed,
69 StaticValue,
70 packSizeIndexAndAuxBits(0, ArrayData::kVArray)
72 assertx(RuntimeOption::EvalHackArrDVArrs || checkInvariants(ad));
75 PackedArray::VArrayInitializer PackedArray::s_varr_initializer;
77 //////////////////////////////////////////////////////////////////////
79 namespace {
81 inline ArrayData* alloc_packed_static(size_t cap) {
82 auto size = sizeof(ArrayData) + cap * sizeof(TypedValue);
83 auto ret = RuntimeOption::EvalLowStaticArrays ? low_malloc(size)
84 : uncounted_malloc(size);
85 return static_cast<ArrayData*>(ret);
90 bool PackedArray::checkInvariants(const ArrayData* arr) {
91 assertx(arr->hasVanillaPackedLayout());
92 assertx(arr->checkCountZ());
93 assertx(arr->m_size <= MixedArray::MaxSize);
94 assertx(arr->m_size <= capacity(arr));
95 assertx(arr->m_pos >= 0 && arr->m_pos <= arr->m_size);
96 assertx(!arr->isPackedKind() || !arr->isDArray());
97 assertx(!arr->isVecArrayKind() || arr->isNotDVArray());
98 assertx(!RuntimeOption::EvalHackArrDVArrs || arr->isNotDVArray());
99 assertx(arrprov::arrayWantsTag(arr) || !arr->hasProvenanceData());
100 static_assert(ArrayData::kPackedKind == 0, "");
101 // Note that m_pos < m_size is not an invariant, because an array
102 // that grows will only adjust m_size to zero on the old array.
104 // This loop is too slow for normal use, but can be enabled to debug
105 // packed arrays.
106 if (false) {
107 for (uint32_t i = 0; i < arr->m_size; ++i) {
108 auto const DEBUG_ONLY rval = RvalPos(arr, i);
109 assertx(type(rval) != KindOfUninit);
110 assertx(tvIsPlausible(*rval));
113 return true;
116 //////////////////////////////////////////////////////////////////////
118 ALWAYS_INLINE
119 MixedArray* PackedArray::ToMixedHeader(const ArrayData* old,
120 size_t neededSize) {
121 assertx(checkInvariants(old));
123 auto const aux =
124 MixedArrayKeys::packIntsForAux() |
125 (old->isVArray() ? ArrayData::kDArray : ArrayData::kNotDVArray);
127 auto const oldSize = old->m_size;
128 auto const scale = MixedArray::computeScaleFromSize(neededSize);
129 auto const ad = MixedArray::reqAlloc(scale);
130 ad->m_sizeAndPos = oldSize | int64_t{old->m_pos} << 32;
131 ad->initHeader_16(HeaderKind::Mixed, OneReference, aux);
132 ad->m_scale_used = scale | uint64_t{oldSize} << 32; // used=oldSize
133 ad->m_nextKI = oldSize;
135 assertx(ad->m_size == oldSize);
136 assertx(ad->m_pos == old->m_pos);
137 assertx(ad->kind() == ArrayData::kMixedKind);
138 assertx(ad->isDArray() == old->isVArray());
139 assertx(ad->hasExactlyOneRef());
140 assertx(ad->m_used == oldSize);
141 assertx(ad->m_scale == scale);
142 assertx(ad->m_nextKI == oldSize);
143 // Can't checkInvariants yet, since we haven't populated the payload.
144 return ad;
148 * Converts a packed array to mixed, leaving the packed array in an
149 * empty state. You need ToMixedCopy in cases where the old array
150 * needs to remain un-modified (usually if `copy' is true).
152 * The returned array is mixed, and is guaranteed not to be isFull().
153 * (Note: only unset can call ToMixed when we aren't about to insert.)
155 MixedArray* PackedArray::ToMixed(ArrayData* old) {
156 auto const oldSize = old->m_size;
157 auto const ad = ToMixedHeader(old, oldSize + 1);
158 auto const mask = ad->mask();
159 auto dstData = ad->data();
161 auto const dstHash = ad->initHash(ad->scale());
162 for (uint32_t i = 0; i < oldSize; ++i) {
163 auto h = hash_int64(i);
164 *ad->findForNewInsert(dstHash, mask, h) = i;
165 dstData->setIntKey(i, h);
166 tvCopy(*RvalPos(old, i), dstData->data);
167 ++dstData;
169 old->m_sizeAndPos = 0;
171 assertx(ad->checkInvariants());
172 assertx(!ad->isFull());
173 assertx(ad->hasExactlyOneRef());
174 return ad;
178 * Convert a packed array to mixed, without moving the elements out of
179 * the old packed array. This effectively performs a Copy at the same
180 * time as converting to mixed. The returned mixed array is
181 * guaranteed not to be full.
183 MixedArray* PackedArray::ToMixedCopy(const ArrayData* old) {
184 assertx(checkInvariants(old));
185 auto ad = PackedArray::ToMixedCopyReserve(old, old->m_size + 1);
186 assertx(!ad->isFull());
187 return ad;
191 * Convert to mixed, reserving space for at least `neededSize' elems.
192 * `neededSize' must be at least old->size(), and may be equal to it.
194 MixedArray* PackedArray::ToMixedCopyReserve(const ArrayData* old,
195 size_t neededSize) {
196 assertx(neededSize >= old->m_size);
197 auto const ad = ToMixedHeader(old, neededSize);
198 auto const oldSize = old->m_size;
199 auto const mask = ad->mask();
200 auto dstData = ad->data();
202 auto const dstHash = ad->initHash(ad->scale());
203 for (uint32_t i = 0; i < oldSize; ++i) {
204 auto const h = hash_int64(i);
205 *ad->findForNewInsert(dstHash, mask, h) = i;
206 dstData->setIntKey(i, h);
207 tvDup(*RvalPos(old, i), dstData->data);
208 ++dstData;
211 assertx(ad->checkInvariants());
212 assertx(ad->hasExactlyOneRef());
213 return ad;
216 NEVER_INLINE
217 ArrayData* PackedArray::Grow(ArrayData* adIn, bool copy) {
218 assertx(checkInvariants(adIn));
219 assertx(adIn->m_size == capacity(adIn));
221 auto const sizeIndex = sizeClass(adIn) + kSizeClassesPerDoubling;
222 if (UNLIKELY(sizeIndex > MaxSizeIndex)) return nullptr;
223 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeIndex));
225 if (copy) {
226 // CopyPackedHelper will copy the header and m_sizeAndPos; since we pass
227 // convertingPackedToVec = false, it can't fail. All we have to do
228 // afterwards is fix the capacity and refcount on the copy; it's easiest
229 // to do that by reinitializing the whole header.
230 CopyPackedHelper(adIn, ad);
231 ad->initHeader_16(
232 adIn->m_kind,
233 OneReference,
234 packSizeIndexAndAuxBits(sizeIndex, adIn->auxBits())
237 assertx(ad->m_size == adIn->m_size);
238 assertx(ad->m_pos == adIn->m_pos);
239 } else {
240 // Copy everything from `adIn' to `ad', including header and m_sizeAndPos
241 static_assert(sizeof(ArrayData) == 16 && sizeof(TypedValue) == 16, "");
242 static_assert(PackedArray::stores_typed_values, "");
243 memcpy16_inline(ad, adIn, (adIn->m_size + 1) * sizeof(TypedValue));
244 ad->initHeader_16(
245 adIn->m_kind,
246 OneReference,
247 packSizeIndexAndAuxBits(sizeIndex, adIn->auxBits())
250 assertx(ad->m_size == adIn->m_size);
251 assertx(ad->m_pos == adIn->m_pos);
252 adIn->m_sizeAndPos = 0; // old is a zombie now
255 ad->m_aux16 &= ~ArrayData::kHasProvenanceData;
257 assertx(ad->kind() == adIn->kind());
258 assertx(ad->dvArray() == adIn->dvArray());
259 assertx(capacity(ad) > capacity(adIn));
260 assertx(ad->hasExactlyOneRef());
261 assertx(checkInvariants(ad));
262 return tagArrProv(ad, adIn);
265 ALWAYS_INLINE
266 ArrayData* PackedArray::PrepareForInsert(ArrayData* adIn, bool copy) {
267 assertx(checkInvariants(adIn));
268 if (adIn->m_size == capacity(adIn)) return Grow(adIn, copy);
269 if (copy) return Copy(adIn);
270 return adIn;
273 //////////////////////////////////////////////////////////////////////
275 /* This helper copies everything from adIn to ad, including the header
276 * (capacity, kind, and refcount) and m_sizeAndPos. It then increfs the
277 * contents, if needed.
279 * If convertingPackedToVec is false, it will always succeed (return true).
281 * If convertingPackedToVec is true and adIn contains a Ref, then it will
282 * return false. Refcounts of the contents will be left in a consistent state.
283 * It is the callers responsibility to free ad and throw an appropriate
284 * exception in this case.
286 ALWAYS_INLINE
287 void PackedArray::CopyPackedHelper(const ArrayData* adIn, ArrayData* ad) {
288 // Copy everything from `adIn' to `ad', including refcount, kind and cap
289 auto const size = adIn->m_size;
290 static_assert(sizeof(ArrayData) == 16 && sizeof(TypedValue) == 16, "");
291 static_assert(PackedArray::stores_typed_values, "");
292 memcpy16_inline(ad, adIn, (size + 1) * 16);
293 // Clear the provenance bit if we had one set
294 ad->m_aux16 &= ~ArrayData::kHasProvenanceData;
296 // Copy counted types correctly
297 for (uint32_t i = 0; i < size; ++i) {
298 auto const elm = LvalUncheckedInt(ad, i);
299 tvIncRefGen(*elm);
303 NEVER_INLINE
304 ArrayData* PackedArray::Copy(const ArrayData* adIn) {
305 assertx(checkInvariants(adIn));
307 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeClass(adIn)));
309 // CopyPackedHelper will copy the header (including capacity and kind), and
310 // m_sizeAndPos. All we have to do afterwards is fix the refcount on the
311 // copy.
312 CopyPackedHelper(adIn, ad);
313 ad->m_count = OneReference;
315 assertx(ad->kind() == adIn->kind());
316 assertx(ad->isLegacyArray() == adIn->isLegacyArray());
317 assertx(capacity(ad) == capacity(adIn));
318 assertx(ad->m_size == adIn->m_size);
319 assertx(ad->m_pos == adIn->m_pos);
320 assertx(ad->hasExactlyOneRef());
321 assertx(checkInvariants(ad));
322 return tagArrProv(ad, adIn);
325 ArrayData* PackedArray::CopyStatic(const ArrayData* adIn) {
326 assertx(checkInvariants(adIn));
328 auto const sizeIndex = capacityToSizeIndex(adIn->m_size);
329 auto ad = alloc_packed_static(adIn->m_size);
330 // CopyPackedHelper will copy the header and m_sizeAndPos. All we have to do
331 // afterwards is fix the capacity and refcount on the copy; it's easiest to do
332 // that by reinitializing the whole header.
333 CopyPackedHelper(adIn, ad);
334 ad->initHeader_16(
335 adIn->m_kind,
336 StaticValue,
337 packSizeIndexAndAuxBits(sizeIndex, adIn->auxBits())
340 if (RuntimeOption::EvalArrayProvenance) {
341 assertx(!ad->hasProvenanceData());
342 auto const tag = arrprov::getTag(adIn);
343 if (tag.valid()) {
344 arrprov::setTag(ad, tag);
348 assertx(ad->kind() == adIn->kind());
349 assertx(ad->dvArray() == adIn->dvArray());
350 assertx(!arrprov::arrayWantsTag(ad) ||
351 arrprov::getTag(ad) == arrprov::getTag(adIn));
352 assertx(capacity(ad) >= adIn->m_size);
353 assertx(ad->m_size == adIn->m_size);
354 assertx(ad->m_pos == adIn->m_pos);
355 assertx(ad->isStatic());
356 assertx(checkInvariants(ad));
357 return ad;
360 ArrayData* PackedArray::ConvertStatic(const ArrayData* arr) {
361 assertx(arr->isVectorData());
362 assertx(!RuntimeOption::EvalHackArrDVArrs || arr->isNotDVArray());
363 assertx(!arr->isDArray());
365 auto const sizeIndex = capacityToSizeIndex(arr->m_size);
366 auto ad = alloc_packed_static(arr->m_size);
367 ad->initHeader_16(
368 HeaderKind::Packed,
369 StaticValue,
370 packSizeIndexAndAuxBits(sizeIndex, arr->auxBits())
372 ad->m_sizeAndPos = arr->m_sizeAndPos;
374 uint32_t i = 0;
375 auto pos_limit = arr->iter_end();
376 for (auto pos = arr->iter_begin(); pos != pos_limit;
377 pos = arr->iter_advance(pos), ++i) {
378 tvDup(arr->atPos(pos), LvalUncheckedInt(ad, i));
381 if (RuntimeOption::EvalArrayProvenance) {
382 assertx(!ad->hasProvenanceData());
383 auto const tag = arrprov::getTag(ad);
384 if (tag.valid()) {
385 arrprov::setTag(ad, tag);
389 assertx(ad->isPackedKind());
390 assertx(capacity(ad) >= arr->m_size);
391 assertx(ad->dvArray() == arr->dvArray());
392 assertx(!arrprov::arrayWantsTag(ad) ||
393 !!arrprov::getTag(ad) == !!arrprov::getTag(arr));
394 assertx(ad->m_size == arr->m_size);
395 assertx(ad->m_pos == arr->m_pos);
396 assertx(ad->isStatic());
397 assertx(checkInvariants(ad));
398 return ad;
401 /* This helper allocates an ArrayData and initializes the header (including
402 * capacity, kind, and refcount). The caller is responsible for initializing
403 * m_sizeAndPos, and initializing array entries (if any).
405 ALWAYS_INLINE
406 ArrayData* PackedArray::MakeReserveImpl(uint32_t cap,
407 HeaderKind hk,
408 ArrayData::DVArray dvarray) {
409 assertx(!RuntimeOption::EvalHackArrDVArrs ||
410 dvarray == ArrayData::kNotDVArray);
411 auto const sizeIndex = capacityToSizeIndex(cap);
412 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeIndex));
413 ad->initHeader_16(
415 OneReference,
416 packSizeIndexAndAuxBits(sizeIndex, dvarray)
419 assertx(ad->m_kind == hk);
420 assertx(ad->dvArray() == dvarray);
421 assertx(capacity(ad) >= cap);
422 assertx(ad->hasExactlyOneRef());
423 return ad;
426 ArrayData* PackedArray::MakeReserve(uint32_t capacity) {
427 auto ad =
428 MakeReserveImpl(capacity, HeaderKind::Packed, ArrayData::kNotDVArray);
429 ad->m_sizeAndPos = 0;
430 assertx(ad->isPackedKind());
431 assertx(ad->isNotDVArray());
432 assertx(ad->m_size == 0);
433 assertx(ad->m_pos == 0);
434 assertx(checkInvariants(ad));
435 return ad;
438 ArrayData* PackedArray::MakeReserveVArray(uint32_t capacity) {
439 auto ad = MakeReserveImpl(capacity, HeaderKind::Packed, ArrayData::kVArray);
440 ad->m_sizeAndPos = 0;
441 assertx(ad->isPackedKind());
442 assertx(ad->isVArray());
443 assertx(ad->m_size == 0);
444 assertx(ad->m_pos == 0);
445 assertx(checkInvariants(ad));
446 return tagArrProv(ad);
449 ArrayData* PackedArray::MakeReserveVec(uint32_t capacity) {
450 auto ad =
451 MakeReserveImpl(capacity, HeaderKind::VecArray, ArrayData::kNotDVArray);
452 ad->m_sizeAndPos = 0;
453 assertx(ad->isVecArrayKind());
454 assertx(ad->m_size == 0);
455 assertx(ad->m_pos == 0);
456 assertx(checkInvariants(ad));
457 return tagArrProv(ad);
460 template<bool reverse>
461 ALWAYS_INLINE
462 ArrayData* PackedArray::MakePackedImpl(uint32_t size,
463 const TypedValue* values,
464 HeaderKind hk,
465 ArrayData::DVArray dv) {
466 assertx(size > 0);
467 auto ad = MakeReserveImpl(size, hk, dv);
468 ad->m_sizeAndPos = size; // pos = 0
470 // Append values by moving; this function takes ownership of them.
471 if (reverse) {
472 uint32_t i = size - 1;
473 for (auto end = values + size; values < end; ++values, --i) {
474 tvCopy(*values, LvalUncheckedInt(ad, i));
476 } else {
477 if (debug) {
478 for (uint32_t i = 0; i < size; ++i) {
479 assertx(tvIsPlausible(*(values + i)));
482 memcpy16_inline(packedData(ad), values, sizeof(TypedValue) * size);
485 assertx(ad->m_size == size);
486 assertx(ad->m_pos == 0);
487 assertx(checkInvariants(ad));
488 return ad;
491 ArrayData* PackedArray::MakePacked(uint32_t size, const TypedValue* values) {
492 // Values are in reverse order since they come from the stack, which
493 // grows down.
494 auto ad = MakePackedImpl<true>(size, values, HeaderKind::Packed,
495 ArrayData::kNotDVArray);
496 assertx(ad->isPackedKind());
497 assertx(ad->isNotDVArray());
498 return ad;
501 ArrayData* PackedArray::MakeVArray(uint32_t size, const TypedValue* values) {
502 // Values are in reverse order since they come from the stack, which
503 // grows down.
504 assertx(!RuntimeOption::EvalHackArrDVArrs);
505 auto ad = MakePackedImpl<true>(size, values, HeaderKind::Packed,
506 ArrayData::kVArray);
507 assertx(ad->isPackedKind());
508 assertx(ad->isVArray());
509 return tagArrProv(ad);
512 ArrayData* PackedArray::MakeVec(uint32_t size, const TypedValue* values) {
513 // Values are in reverse order since they come from the stack, which
514 // grows down.
515 auto ad = MakePackedImpl<true>(size, values, HeaderKind::VecArray,
516 ArrayData::kNotDVArray);
517 assertx(ad->isVecArrayKind());
518 return tagArrProv(ad);
521 ArrayData* PackedArray::MakePackedNatural(uint32_t size, const TypedValue* values) {
522 auto ad = MakePackedImpl<false>(size, values, HeaderKind::Packed,
523 ArrayData::kNotDVArray);
524 assertx(ad->isPackedKind());
525 assertx(ad->isNotDVArray());
526 return ad;
529 ArrayData* PackedArray::MakeVArrayNatural(uint32_t size, const TypedValue* values) {
530 if (RuntimeOption::EvalHackArrDVArrs) return MakeVecNatural(size, values);
532 auto ad = MakePackedImpl<false>(size, values, HeaderKind::Packed,
533 ArrayData::kVArray);
534 assertx(ad->isPackedKind());
535 assertx(ad->isVArray());
536 return tagArrProv(ad);
539 ArrayData* PackedArray::MakeVecNatural(uint32_t size, const TypedValue* values) {
540 auto ad = MakePackedImpl<false>(size, values, HeaderKind::VecArray,
541 ArrayData::kNotDVArray);
542 assertx(ad->isVecArrayKind());
543 return tagArrProv(ad);
546 ArrayData* PackedArray::MakeUninitialized(uint32_t size) {
547 auto ad = MakeReserveImpl(size, HeaderKind::Packed, ArrayData::kNotDVArray);
548 ad->m_sizeAndPos = size; // pos = 0
549 assertx(ad->isPackedKind());
550 assertx(ad->isNotDVArray());
551 assertx(ad->m_size == size);
552 assertx(ad->m_pos == 0);
553 assertx(checkInvariants(ad));
554 return ad;
557 ArrayData* PackedArray::MakeUninitializedVArray(uint32_t size) {
558 assertx(!RuntimeOption::EvalHackArrDVArrs);
559 auto ad = MakeReserveImpl(size, HeaderKind::Packed, ArrayData::kVArray);
560 ad->m_sizeAndPos = size; // pos = 0
561 assertx(ad->isPackedKind());
562 assertx(ad->isVArray());
563 assertx(ad->m_size == size);
564 assertx(ad->m_pos == 0);
565 assertx(checkInvariants(ad));
566 return tagArrProv(ad);
569 ArrayData* PackedArray::MakeUninitializedVec(uint32_t size) {
570 auto ad = MakeReserveImpl(size, HeaderKind::VecArray, ArrayData::kNotDVArray);
571 ad->m_sizeAndPos = size; // pos = 0
572 assertx(ad->isVecArrayKind());
573 assertx(ad->m_size == size);
574 assertx(ad->m_pos == 0);
575 assertx(checkInvariants(ad));
576 return tagArrProv(ad);
579 ArrayData* PackedArray::MakeVecFromAPC(const APCArray* apc) {
580 assertx(apc->isVec());
581 auto const apcSize = apc->size();
582 VecArrayInit init{apcSize};
583 for (uint32_t i = 0; i < apcSize; ++i) {
584 init.append(apc->getValue(i)->toLocal());
586 auto const ad = init.create();
587 return tagArrProv(ad, apc);
590 ArrayData* PackedArray::MakeVArrayFromAPC(const APCArray* apc) {
591 assertx(!RuntimeOption::EvalHackArrDVArrs);
592 assertx(apc->isVArray());
593 auto const apcSize = apc->size();
594 VArrayInit init{apcSize};
595 for (uint32_t i = 0; i < apcSize; ++i) {
596 init.append(apc->getValue(i)->toLocal());
598 auto const ad = init.create();
599 return tagArrProv(ad, apc);
602 void PackedArray::Release(ArrayData* ad) {
603 ad->fixCountForRelease();
604 assertx(checkInvariants(ad));
605 assertx(ad->isRefCounted());
606 assertx(ad->hasExactlyOneRef());
608 if (RuntimeOption::EvalArrayProvenance) {
609 arrprov::clearTag(ad);
611 for (uint32_t i = 0; i < ad->m_size; ++i) {
612 tvDecRefGen(LvalUncheckedInt(ad, i));
614 tl_heap->objFreeIndex(ad, sizeClass(ad));
615 AARCH64_WALKABLE_FRAME();
618 NEVER_INLINE
619 void PackedArray::ReleaseUncounted(ArrayData* ad) {
620 assertx(checkInvariants(ad));
621 if (!ad->uncountedDecRef()) return;
623 if (RuntimeOption::EvalArrayProvenance) {
624 arrprov::clearTag(ad);
626 for (uint32_t i = 0; i < ad->m_size; ++i) {
627 ReleaseUncountedTv(LvalUncheckedInt(ad, i));
630 if (APCStats::IsCreated()) {
631 APCStats::getAPCStats().removeAPCUncountedBlock();
634 static_assert(PackedArray::stores_typed_values, "");
635 auto const extra = (ad->hasApcTv() ? sizeof(APCTypedValue) : 0);
636 auto const allocSize = extra + sizeof(PackedArray) +
637 ad->m_size * sizeof(TypedValue);
638 uncounted_sized_free(reinterpret_cast<char*>(ad) - extra, allocSize);
641 ////////////////////////////////////////////////////////////////////////////////
643 tv_rval PackedArray::NvGetInt(const ArrayData* ad, int64_t k) {
644 assertx(checkInvariants(ad));
645 return LIKELY(size_t(k) < ad->m_size) ? RvalPos(ad, k) : nullptr;
648 tv_rval
649 PackedArray::NvGetStr(const ArrayData* ad, const StringData* /*s*/) {
650 assertx(checkInvariants(ad));
651 return nullptr;
654 ssize_t PackedArray::NvGetIntPos(const ArrayData* ad, int64_t k) {
655 assertx(checkInvariants(ad));
656 return LIKELY(size_t(k) < ad->m_size) ? k : ad->m_size;
659 ssize_t PackedArray::NvGetStrPos(const ArrayData* ad, const StringData* k) {
660 assertx(checkInvariants(ad));
661 return ad->m_size;
664 tv_rval PackedArray::NvTryGetIntVec(const ArrayData* ad, int64_t k) {
665 assertx(checkInvariants(ad));
666 assertx(ad->isVecArrayKind());
667 if (LIKELY(size_t(k) < ad->m_size)) return RvalPos(ad, k);
668 throwOOBArrayKeyException(k, ad);
671 tv_rval PackedArray::NvTryGetStrVec(const ArrayData* ad, const StringData* s) {
672 assertx(checkInvariants(ad));
673 assertx(ad->isVecArrayKind());
674 throwInvalidArrayKeyException(s, ad);
677 TypedValue PackedArray::NvGetKey(const ArrayData* ad, ssize_t pos) {
678 assertx(checkInvariants(ad));
679 assertx(pos != ad->m_size);
680 return make_tv<KindOfInt64>(pos);
683 size_t PackedArray::Vsize(const ArrayData*) {
684 // PackedArray always has a valid m_size so it's an error to get here.
685 always_assert(false);
688 tv_rval PackedArray::RvalPos(const ArrayData* ad, ssize_t pos) {
689 assertx(checkInvariants(ad));
690 assertx(pos < ad->m_size);
691 return LvalUncheckedInt(const_cast<ArrayData*>(ad), pos);
694 bool PackedArray::ExistsInt(const ArrayData* ad, int64_t k) {
695 assertx(checkInvariants(ad));
696 return size_t(k) < ad->m_size;
699 bool PackedArray::ExistsStr(const ArrayData* ad, const StringData* /*s*/) {
700 assertx(checkInvariants(ad));
701 return false;
704 ///////////////////////////////////////////////////////////////////////////////
706 namespace {
708 template<typename FoundFn, typename AppendFn, typename PromotedFn>
709 auto MutableOpInt(ArrayData* adIn, int64_t k, bool copy,
710 FoundFn found, AppendFn append, PromotedFn promoted) {
711 assertx(PackedArray::checkInvariants(adIn));
712 assertx(adIn->isPackedKind());
714 if (LIKELY(size_t(k) < adIn->getSize())) {
715 auto const ad = copy ? PackedArray::Copy(adIn) : adIn;
716 return found(ad);
719 if (size_t(k) == adIn->getSize()) {
720 if (UNLIKELY(RuntimeOption::EvalHackArrCompatCheckImplicitVarrayAppend) &&
721 adIn->isVArray()) {
722 raise_hackarr_compat_notice("Implicit append to varray");
724 return append();
727 if (UNLIKELY(RuntimeOption::EvalHackArrCompatCheckVarrayPromote) &&
728 adIn->isVArray()) {
729 raise_hackarr_compat_notice(
730 folly::sformat("varray promoting to darray: out of bounds key {}", k)
734 auto const mixed = copy ? PackedArray::ToMixedCopy(adIn)
735 : PackedArray::ToMixed(adIn);
736 return promoted(mixed);
739 template <typename PromotedFn>
740 auto MutableOpStr(ArrayData* adIn, StringData* /*k*/, bool copy,
741 PromotedFn promoted) {
742 assertx(PackedArray::checkInvariants(adIn));
743 assertx(adIn->isPackedKind());
745 if (UNLIKELY(RuntimeOption::EvalHackArrCompatCheckVarrayPromote) &&
746 adIn->isVArray()) {
747 raise_hackarr_compat_notice(
748 "varray promoting to darray: invalid key: expected int, got string"
752 auto const mixed = copy ? PackedArray::ToMixedCopy(adIn)
753 : PackedArray::ToMixed(adIn);
754 return promoted(mixed);
757 template<typename FoundFn>
758 auto MutableOpIntVec(ArrayData* adIn, int64_t k, bool copy, FoundFn found) {
759 assertx(PackedArray::checkInvariants(adIn));
760 assertx(adIn->isVecArrayKind());
762 if (UNLIKELY(size_t(k) >= adIn->getSize())) {
763 throwOOBArrayKeyException(k, adIn);
765 auto const ad = copy ? PackedArray::Copy(adIn) : adIn;
766 return found(ad);
771 arr_lval PackedArray::LvalInt(ArrayData* adIn, int64_t k, bool copy) {
772 return MutableOpInt(adIn, k, copy,
773 [&] (ArrayData* ad) { return arr_lval { ad, LvalUncheckedInt(ad, k) }; },
774 []() -> arr_lval { throwMissingElementException("Lval"); },
775 // TODO(#2606310): Make use of our knowledge that the key is missing.
776 [&] (MixedArray* mixed) { return mixed->addLvalImpl<true>(k); }
780 arr_lval PackedArray::LvalIntVec(ArrayData* adIn, int64_t k, bool copy) {
781 return MutableOpIntVec(adIn, k, copy,
782 [&] (ArrayData* ad) { return arr_lval { ad, LvalUncheckedInt(ad, k) }; }
786 arr_lval PackedArray::LvalSilentInt(ArrayData* adIn, int64_t k, bool copy) {
787 assertx(checkInvariants(adIn));
788 if (UNLIKELY(size_t(k) >= adIn->m_size)) return {adIn, nullptr};
789 auto const ad = copy ? Copy(adIn) : adIn;
790 return arr_lval { ad, LvalUncheckedInt(ad, k) };
793 tv_lval PackedArray::LvalUncheckedInt(ArrayData* ad, int64_t k) {
794 // NOTE: We cannot check that k is less than the array's length here, because
795 // the vector extension allocates the array and uses this method to fill it.
796 assertx(size_t(k) < PackedArray::capacity(ad));
797 return &packedData(ad)[k];
800 arr_lval PackedArray::LvalStr(ArrayData* adIn, StringData* k, bool copy) {
801 return MutableOpStr(adIn, k, copy,
802 // TODO(#2606310): Make use of our knowledge that the key is missing.
803 [&] (MixedArray* mixed) { return mixed->addLvalImpl<true>(k); }
807 arr_lval
808 PackedArray::LvalStrVec(ArrayData* adIn, StringData* key, bool) {
809 assertx(checkInvariants(adIn));
810 assertx(adIn->isVecArrayKind());
811 throwInvalidArrayKeyException(key, adIn);
814 arr_lval PackedArray::LvalSilentStr(ArrayData* ad, StringData* k, bool) {
815 return arr_lval { ad, nullptr };
818 arr_lval PackedArray::LvalForceNew(ArrayData* adIn, bool copy) {
819 assertx(checkInvariants(adIn));
820 auto const ad = PrepareForInsert(adIn, copy);
821 auto lval = LvalUncheckedInt(ad, ad->m_size++);
822 type(lval) = KindOfNull;
823 return arr_lval { ad, lval };
826 ArrayData* PackedArray::SetInt(ArrayData* adIn, int64_t k, TypedValue v) {
827 auto const copy = adIn->cowCheck();
828 return MutableOpInt(adIn, k, copy,
829 [&] (ArrayData* ad) { setElem(LvalUncheckedInt(ad, k), v); return ad; },
830 [&] { return AppendImpl(adIn, v, copy); },
831 [&] (MixedArray* mixed) { return mixed->addVal(k, v); }
835 ArrayData* PackedArray::SetIntMove(ArrayData* adIn, int64_t k, TypedValue v) {
836 auto done = false;
837 auto const copy = adIn->cowCheck();
838 auto const result = MutableOpInt(adIn, k, copy,
839 [&] (ArrayData* ad) {
840 assertx((adIn != ad) == copy);
841 if (copy && adIn->decReleaseCheck()) PackedArray::Release(adIn);
842 setElem(LvalUncheckedInt(ad, k), v, true);
843 done = true;
844 return ad;
846 [&] { return AppendImpl(adIn, v, copy); },
847 [&] (MixedArray* mixed) { return mixed->addVal(k, v); }
849 if (done) return result;
850 if (adIn != result && adIn->decReleaseCheck()) PackedArray::Release(adIn);
851 tvDecRefGen(v);
852 return result;
855 ArrayData* PackedArray::SetIntVec(ArrayData* adIn, int64_t k, TypedValue v) {
856 assertx(adIn->cowCheck() || adIn->notCyclic(v));
857 return MutableOpIntVec(adIn, k, adIn->cowCheck(),
858 [&] (ArrayData* ad) { setElem(LvalUncheckedInt(ad, k), v); return ad; }
862 ArrayData* PackedArray::SetIntMoveVec(ArrayData* adIn, int64_t k, TypedValue v) {
863 assertx(adIn->cowCheck() || adIn->notCyclic(v));
864 auto const copy = adIn->cowCheck();
865 return MutableOpIntVec(adIn, k, copy,
866 [&] (ArrayData* ad) {
867 assertx((adIn != ad) == copy);
868 if (copy && adIn->decReleaseCheck()) PackedArray::Release(adIn);
869 setElem(LvalUncheckedInt(ad, k), v, true);
870 return ad;
875 ArrayData* PackedArray::SetStr(ArrayData* adIn, StringData* k, TypedValue v) {
876 return MutableOpStr(adIn, k, adIn->cowCheck(),
877 [&] (MixedArray* mixed) { return mixed->addVal(k, v); }
881 ArrayData* PackedArray::SetStrMove(ArrayData* adIn, StringData* k, TypedValue v) {
882 auto const result = SetStr(adIn, k, v);
883 assertx(result != adIn);
884 if (adIn->decReleaseCheck()) PackedArray::Release(adIn);
885 tvDecRefGen(v);
886 return result;
889 ArrayData* PackedArray::SetStrVec(ArrayData* adIn, StringData* k, TypedValue v) {
890 assertx(checkInvariants(adIn));
891 assertx(adIn->isVecArrayKind());
892 throwInvalidArrayKeyException(k, adIn);
895 ///////////////////////////////////////////////////////////////////////////////
897 ArrayData* PackedArray::RemoveImpl(ArrayData* adIn, int64_t k, bool copy) {
898 assertx(checkInvariants(adIn));
899 assertx(adIn->isPackedKind());
900 if (size_t(k) < adIn->m_size) {
901 // Escalate to mixed for correctness; unset preserves m_nextKI.
902 if (UNLIKELY(RuntimeOption::EvalHackArrCompatCheckVarrayPromote) &&
903 adIn->isVArray()) {
904 raise_hackarr_compat_notice("varray promoting to darray: removing key");
907 // TODO(#2606310): if we're removing the /last/ element, we
908 // probably could stay packed, but this needs to be verified.
909 auto const mixed = copy ? ToMixedCopy(adIn) : ToMixed(adIn);
910 return MixedArray::RemoveInt(mixed, k);
912 // Key doesn't exist---we're still packed.
913 return copy ? Copy(adIn) : adIn;
916 ArrayData* PackedArray::RemoveInt(ArrayData* adIn, int64_t k) {
917 return RemoveImpl(adIn, k, adIn->cowCheck());
920 ArrayData*
921 PackedArray::RemoveImplVec(ArrayData* adIn, int64_t k, bool copy) {
922 assertx(checkInvariants(adIn));
923 assertx(adIn->isVecArrayKind());
925 // You're only allowed to remove an element at the end of the vec (or beyond,
926 // which is a no-op).
927 if (UNLIKELY(size_t(k) >= adIn->m_size)) return adIn;
928 if (LIKELY(size_t(k) + 1 == adIn->m_size)) {
929 auto const ad = copy ? Copy(adIn) : adIn;
930 auto const size = ad->m_size - 1;
931 ad->m_sizeAndPos = size; // pos = 0
932 tvDecRefGen(LvalUncheckedInt(ad, size));
933 return ad;
935 throwVecUnsetException();
938 ArrayData* PackedArray::RemoveIntVec(ArrayData* adIn, int64_t k) {
939 return RemoveImplVec(adIn, k, adIn->cowCheck());
942 ArrayData*
943 PackedArray::RemoveStr(ArrayData* adIn, const StringData*) {
944 assertx(checkInvariants(adIn));
945 return adIn;
948 ssize_t PackedArray::IterBegin(const ArrayData* ad) {
949 assertx(checkInvariants(ad));
950 return 0;
953 ssize_t PackedArray::IterLast(const ArrayData* ad) {
954 assertx(checkInvariants(ad));
955 return ad->m_size ? ad->m_size - 1 : 0;
958 ssize_t PackedArray::IterEnd(const ArrayData* ad) {
959 assertx(checkInvariants(ad));
960 return ad->m_size;
963 ssize_t PackedArray::IterAdvance(const ArrayData* ad, ssize_t pos) {
964 assertx(checkInvariants(ad));
965 if (pos < ad->m_size) {
966 ++pos;
968 return pos;
971 ssize_t PackedArray::IterRewind(const ArrayData* ad, ssize_t pos) {
972 assertx(checkInvariants(ad));
973 if (pos > 0) {
974 return pos - 1;
976 return ad->m_size;
979 ArrayData* PackedArray::AppendImpl(ArrayData* adIn, TypedValue v, bool copy) {
980 assertx(checkInvariants(adIn));
981 assertx(v.m_type != KindOfUninit);
982 assertx(copy || adIn->notCyclic(v));
983 auto const ad = PrepareForInsert(adIn, copy);
984 tvDup(v, LvalUncheckedInt(ad, ad->m_size++));
985 return ad;
988 ArrayData* PackedArray::Append(ArrayData* adIn, TypedValue v) {
989 return AppendImpl(adIn, v, adIn->cowCheck());
992 ArrayData* PackedArray::AppendInPlace(ArrayData* adIn, TypedValue v) {
993 assertx(!adIn->cowCheck());
994 return AppendImpl(adIn, v, false);
997 ArrayData* PackedArray::PlusEq(ArrayData* adIn, const ArrayData* elems) {
998 assertx(checkInvariants(adIn));
999 assertx(adIn->isPackedKind());
1000 if (!elems->isPHPArrayType()) throwInvalidAdditionException(elems);
1001 auto const neededSize = adIn->size() + elems->size();
1002 auto const mixed = ToMixedCopyReserve(adIn, neededSize);
1003 try {
1004 auto const ret = MixedArray::PlusEq(mixed, elems);
1005 assertx(ret == mixed);
1006 assertx(mixed->hasExactlyOneRef());
1007 return ret;
1008 } catch (...) {
1009 MixedArray::Release(mixed);
1010 throw;
1014 ArrayData* PackedArray::PlusEqVec(ArrayData* adIn, const ArrayData* /*elems*/) {
1015 assertx(checkInvariants(adIn));
1016 assertx(adIn->isVecArrayKind());
1017 throwInvalidAdditionException(adIn);
1020 ArrayData* PackedArray::Merge(ArrayData* adIn, const ArrayData* elems) {
1021 assertx(checkInvariants(adIn));
1022 auto const neededSize = adIn->m_size + elems->size();
1023 auto const ret = ToMixedCopyReserve(adIn, neededSize);
1024 ret->setDVArray(ArrayData::kNotDVArray);
1025 return MixedArray::ArrayMergeGeneric(ret, elems);
1028 ArrayData* PackedArray::Pop(ArrayData* adIn, Variant& value) {
1029 assertx(checkInvariants(adIn));
1031 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1033 if (UNLIKELY(ad->m_size == 0)) {
1034 assertx(ad->m_pos == 0);
1035 value = uninit_null();
1036 return ad;
1039 auto const size = ad->m_size - 1;
1040 auto const tv = *LvalUncheckedInt(ad, size);
1041 value = tvAsCVarRef(&tv);
1042 ad->m_sizeAndPos = size; // pos = 0
1043 tvDecRefGen(tv);
1044 return ad;
1047 ArrayData* PackedArray::Dequeue(ArrayData* adIn, Variant& value) {
1048 assertx(checkInvariants(adIn));
1050 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1051 if (UNLIKELY(ad->m_size == 0)) {
1052 value = uninit_null();
1053 return ad;
1056 // This is O(N), but so is Dequeue on a mixed array, because it
1057 // needs to renumber keys. So it makes sense to stay packed.
1058 auto const size = ad->m_size - 1;
1059 auto const data = packedData(ad);
1060 value = std::move(tvAsVariant(data)); // no incref+decref
1061 std::memmove(data, data + 1, size * sizeof *data);
1062 ad->m_sizeAndPos = size; // pos = 0
1063 return ad;
1066 ArrayData* PackedArray::Prepend(ArrayData* adIn, TypedValue v) {
1067 assertx(checkInvariants(adIn));
1069 auto const ad = PrepareForInsert(adIn, adIn->cowCheck());
1070 auto const size = ad->m_size;
1071 auto const data = packedData(ad);
1072 std::memmove(data + 1, data, sizeof *data * size);
1073 tvDup(v, data[0]);
1074 ad->m_size = size + 1;
1075 ad->m_pos = 0;
1076 return ad;
1079 ArrayData* PackedArray::ToPHPArray(ArrayData* adIn, bool copy) {
1080 assertx(checkInvariants(adIn));
1081 assertx(adIn->isPackedKind());
1082 if (adIn->isNotDVArray()) return adIn;
1083 assertx(adIn->isVArray());
1084 if (adIn->getSize() == 0) return ArrayData::Create();
1085 ArrayData* ad = copy ? Copy(adIn) : adIn;
1086 ad->setDVArray(ArrayData::kNotDVArray);
1087 if (RO::EvalArrayProvenance) arrprov::clearTag(ad);
1088 assertx(checkInvariants(ad));
1089 return ad;
1092 ArrayData* PackedArray::ToVArray(ArrayData* adIn, bool copy) {
1093 assertx(checkInvariants(adIn));
1094 assertx(adIn->isPackedKind());
1095 if (RuntimeOption::EvalHackArrDVArrs) return ToVec(adIn, copy);
1096 if (adIn->isVArray()) return adIn;
1097 if (adIn->getSize() == 0) return ArrayData::CreateVArray();
1098 ArrayData* ad = copy ? Copy(adIn) : adIn;
1099 ad->setDVArray(ArrayData::kVArray);
1100 if (RO::EvalArrayProvenance) arrprov::reassignTag(ad);
1101 assertx(checkInvariants(ad));
1102 return ad;
1105 ArrayData* PackedArray::ToDArray(ArrayData* adIn, bool /*copy*/) {
1106 assertx(checkInvariants(adIn));
1108 auto const size = adIn->getSize();
1109 if (size == 0) return ArrayData::CreateDArray();
1111 DArrayInit init{size};
1112 for (int64_t i = 0; i < size; ++i) init.add(i, *RvalPos(adIn, i));
1113 return init.create();
1116 ArrayData* PackedArray::ToPHPArrayVec(ArrayData* adIn, bool copy) {
1117 assertx(checkInvariants(adIn));
1118 assertx(adIn->isVecArrayKind());
1119 if (adIn->empty()) return ArrayData::Create();
1120 ArrayData* ad = copy ? Copy(adIn) : adIn;
1121 ad->m_kind = HeaderKind::Packed;
1122 assertx(ad->isNotDVArray());
1123 if (RO::EvalArrayProvenance) arrprov::clearTag(ad);
1124 ad->setLegacyArray(false);
1125 assertx(checkInvariants(ad));
1126 return ad;
1129 ArrayData* PackedArray::ToVArrayVec(ArrayData* adIn, bool copy) {
1130 assertx(checkInvariants(adIn));
1131 assertx(adIn->isVecArrayKind());
1132 if (RuntimeOption::EvalHackArrDVArrs) return adIn;
1133 if (adIn->getSize() == 0) return ArrayData::CreateVArray();
1134 ArrayData* ad = copy ? Copy(adIn) : adIn;
1135 ad->m_kind = HeaderKind::Packed;
1136 ad->setLegacyArray(false);
1137 ad->setDVArray(ArrayData::kVArray);
1138 if (RO::EvalArrayProvenance) arrprov::reassignTag(ad);
1139 assertx(checkInvariants(ad));
1140 return ad;
1143 ArrayData* PackedArray::ToDict(ArrayData* ad, bool copy) {
1144 assertx(checkInvariants(ad));
1145 assertx(ad->isPackedKind());
1147 if (ad->empty()) return ArrayData::CreateDict();
1149 auto const mixed = copy ? ToMixedCopy(ad) : ToMixed(ad);
1150 return MixedArray::ToDictInPlace(mixed);
1153 ArrayData* PackedArray::ToDictVec(ArrayData* ad, bool copy) {
1154 assertx(checkInvariants(ad));
1155 assertx(ad->isVecArrayKind());
1156 if (ad->empty()) return ArrayData::CreateDict();
1157 auto mixed = copy ? ToMixedCopy(ad) : ToMixed(ad);
1158 return MixedArray::ToDictInPlace(mixed);
1161 ArrayData* PackedArray::ToVec(ArrayData* adIn, bool copy) {
1162 assertx(checkInvariants(adIn));
1163 assertx(adIn->isPackedKind());
1165 if (adIn->empty()) return ArrayData::CreateVec();
1167 auto const do_copy = [&] {
1168 // CopyPackedHelper will copy the header and m_sizeAndPos. All we have to do
1169 // afterwards is fix the kind and refcount in the copy; it's easiest to do
1170 // that by reinitializing the whole header.
1171 auto ad = static_cast<ArrayData*>(tl_heap->objMallocIndex(sizeClass(adIn)));
1172 CopyPackedHelper(adIn, ad);
1173 ad->initHeader_16(
1174 HeaderKind::VecArray,
1175 OneReference,
1176 packSizeIndexAndAuxBits(sizeClass(adIn), ArrayData::kNotDVArray)
1178 return ad;
1181 ArrayData* ad;
1182 if (copy) {
1183 ad = do_copy();
1184 } else {
1185 adIn->m_kind = HeaderKind::VecArray;
1186 adIn->setDVArray(ArrayData::kNotDVArray);
1187 ad = adIn;
1189 if (RO::EvalArrayProvenance) arrprov::reassignTag(ad);
1191 assertx(ad->isVecArrayKind());
1192 assertx(capacity(ad) == capacity(adIn));
1193 assertx(ad->m_size == adIn->m_size);
1194 assertx(ad->m_pos == adIn->m_pos);
1195 assertx(ad->hasExactlyOneRef());
1196 assertx(checkInvariants(ad));
1197 return tagArrProv(ad);
1200 ArrayData* PackedArray::ToVecVec(ArrayData* ad, bool) {
1201 assertx(checkInvariants(ad));
1202 assertx(ad->isVecArrayKind());
1203 return ad;
1206 void PackedArray::OnSetEvalScalar(ArrayData* ad) {
1207 assertx(checkInvariants(ad));
1208 auto const size = ad->m_size;
1209 for (uint32_t i = 0; i < size; ++i) {
1210 auto lval = LvalUncheckedInt(ad, i);
1211 auto tv = *lval;
1212 tvAsVariant(&tv).setEvalScalar();
1213 tvCopy(tv, lval);
1217 void PackedArray::Ksort(ArrayData* ad, int /*flags*/, bool ascending) {
1218 assertx(ascending ||
1219 (ad->getSize() <= 1 && !(ad->isVecArrayKind() || ad->isVArray())));
1222 void PackedArray::Asort(ArrayData* ad, int, bool) {
1223 assertx(ad->getSize() <= 1 && !(ad->isVecArrayKind() || ad->isVArray()));
1226 bool PackedArray::Uksort(ArrayData* ad, const Variant&) {
1227 assertx(ad->getSize() <= 1 && !(ad->isVecArrayKind() || ad->isVArray()));
1228 return true;
1231 bool PackedArray::Uasort(ArrayData* ad, const Variant&) {
1232 assertx(ad->getSize() <= 1 && !(ad->isVecArrayKind() || ad->isVArray()));
1233 return true;
1236 ArrayData* PackedArray::MakeUncounted(ArrayData* array,
1237 bool withApcTypedValue,
1238 DataWalker::PointerMap* seen) {
1239 auto const updateSeen = seen && array->hasMultipleRefs();
1240 if (updateSeen) {
1241 auto it = seen->find(array);
1242 assertx(it != seen->end());
1243 if (auto const arr = static_cast<ArrayData*>(it->second)) {
1244 if (arr->uncountedIncRef()) {
1245 return arr;
1249 assertx(checkInvariants(array));
1250 assertx(!array->empty());
1251 if (APCStats::IsCreated()) {
1252 APCStats::getAPCStats().addAPCUncountedBlock();
1255 auto const extra = withApcTypedValue ? sizeof(APCTypedValue) : 0;
1256 auto const size = array->m_size;
1257 auto const sizeIndex = capacityToSizeIndex(size);
1258 auto const mem = static_cast<char*>(
1259 uncounted_malloc(extra + sizeof(ArrayData) + size * sizeof(TypedValue))
1261 auto ad = reinterpret_cast<ArrayData*>(mem + extra);
1262 ad->initHeader_16(
1263 array->m_kind,
1264 UncountedValue,
1265 packSizeIndexAndAuxBits(sizeIndex, array->auxBits()) |
1266 (withApcTypedValue ? ArrayData::kHasApcTv : 0)
1268 ad->m_sizeAndPos = array->m_sizeAndPos;
1270 ad->m_aux16 &= ~ArrayData::kHasProvenanceData;
1272 // Do a raw copy without worrying about refcounts, and convert the values to
1273 // uncounted later.
1274 auto src = packedData(array);
1275 auto dst = packedData(ad);
1276 memcpy16_inline(dst, src, sizeof(TypedValue) * size);
1277 for (auto end = dst + size; dst < end; ++dst) {
1278 ConvertTvToUncounted(dst, seen);
1281 assertx(ad->kind() == array->kind());
1282 assertx(ad->dvArray() == array->dvArray());
1283 assertx(capacity(ad) >= size);
1284 assertx(ad->m_size == size);
1285 assertx(ad->m_pos == array->m_pos);
1286 assertx(ad->isUncounted());
1287 assertx(checkInvariants(ad));
1288 if (updateSeen) (*seen)[array] = ad;
1289 return tagArrProv(ad, array);
1292 ALWAYS_INLINE
1293 bool PackedArray::VecEqualHelper(const ArrayData* ad1, const ArrayData* ad2,
1294 bool strict) {
1295 assertx(checkInvariants(ad1));
1296 assertx(checkInvariants(ad2));
1297 assertx(ad1->isVecArrayKind());
1298 assertx(ad2->isVecArrayKind());
1300 if (ad1 == ad2) return true;
1301 if (ad1->m_size != ad2->m_size) return false;
1303 // Prevent circular referenced objects/arrays or deep ones.
1304 check_recursion_error();
1306 auto const size = ad1->m_size;
1307 for (uint32_t i = 0; i < size; ++i) {
1308 auto const elm1 = *RvalPos(ad1, i);
1309 auto const elm2 = *RvalPos(ad2, i);
1310 auto const cmp = strict ? tvSame(elm1, elm2) : tvEqual(elm1, elm2);
1311 if (!cmp) return false;
1314 return true;
1317 ALWAYS_INLINE
1318 int64_t PackedArray::VecCmpHelper(const ArrayData* ad1, const ArrayData* ad2) {
1319 assertx(checkInvariants(ad1));
1320 assertx(checkInvariants(ad2));
1321 assertx(ad1->isVecArrayKind());
1322 assertx(ad2->isVecArrayKind());
1324 auto const size1 = ad1->m_size;
1325 auto const size2 = ad2->m_size;
1327 if (size1 < size2) return -1;
1328 if (size1 > size2) return 1;
1330 // Prevent circular referenced objects/arrays or deep ones.
1331 check_recursion_error();
1333 for (uint32_t i = 0; i < size1; ++i) {
1334 auto const cmp = tvCompare(*RvalPos(ad1, i), *RvalPos(ad2, i));
1335 if (cmp != 0) return cmp;
1338 return 0;
1341 bool PackedArray::VecEqual(const ArrayData* ad1, const ArrayData* ad2) {
1342 return VecEqualHelper(ad1, ad2, false);
1345 bool PackedArray::VecNotEqual(const ArrayData* ad1, const ArrayData* ad2) {
1346 return !VecEqualHelper(ad1, ad2, false);
1349 bool PackedArray::VecSame(const ArrayData* ad1, const ArrayData* ad2) {
1350 return VecEqualHelper(ad1, ad2, true);
1353 bool PackedArray::VecNotSame(const ArrayData* ad1, const ArrayData* ad2) {
1354 return !VecEqualHelper(ad1, ad2, true);
1357 bool PackedArray::VecLt(const ArrayData* ad1, const ArrayData* ad2) {
1358 return VecCmpHelper(ad1, ad2) < 0;
1361 bool PackedArray::VecLte(const ArrayData* ad1, const ArrayData* ad2) {
1362 return VecCmpHelper(ad1, ad2) <= 0;
1365 bool PackedArray::VecGt(const ArrayData* ad1, const ArrayData* ad2) {
1366 return VecCmpHelper(ad1, ad2) > 0;
1369 bool PackedArray::VecGte(const ArrayData* ad1, const ArrayData* ad2) {
1370 return VecCmpHelper(ad1, ad2) >= 0;
1373 int64_t PackedArray::VecCmp(const ArrayData* ad1, const ArrayData* ad2) {
1374 return VecCmpHelper(ad1, ad2);
1377 //////////////////////////////////////////////////////////////////////