Optimize idx/ArrayGet for OOB/undefined index on static arrays by keeping a side...
[hiphop-php.git] / hphp / runtime / base / mixed-array.cpp
blob3428a76a8d9e0270ef9330614658ceebb078b3d8
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/base/mixed-array.h"
19 #include "hphp/runtime/base/apc-array.h"
20 #include "hphp/runtime/base/apc-stats.h"
21 #include "hphp/runtime/base/array-data.h"
22 #include "hphp/runtime/base/array-helpers.h"
23 #include "hphp/runtime/base/array-init.h"
24 #include "hphp/runtime/base/array-iterator.h"
25 #include "hphp/runtime/base/array-provenance.h"
26 #include "hphp/runtime/base/comparisons.h"
27 #include "hphp/runtime/base/empty-array.h"
28 #include "hphp/runtime/base/execution-context.h"
29 #include "hphp/runtime/base/tv-val.h"
30 #include "hphp/runtime/base/runtime-option.h"
31 #include "hphp/runtime/base/runtime-error.h"
32 #include "hphp/runtime/base/stats.h"
33 #include "hphp/runtime/base/str-key-table.h"
34 #include "hphp/runtime/base/tv-comparisons.h"
35 #include "hphp/runtime/base/tv-refcount.h"
36 #include "hphp/runtime/base/tv-type.h"
37 #include "hphp/runtime/base/variable-serializer.h"
39 #include "hphp/runtime/vm/member-operations.h"
41 #include "hphp/util/alloc.h"
42 #include "hphp/util/hash.h"
43 #include "hphp/util/lock.h"
44 #include "hphp/util/trace.h"
46 #include <folly/CPortability.h>
47 #include <folly/portability/Constexpr.h>
49 #include <algorithm>
50 #include <utility>
52 #include "hphp/runtime/base/mixed-array-defs.h"
53 #include "hphp/runtime/base/packed-array-defs.h"
55 namespace HPHP {
57 TRACE_SET_MOD(runtime);
59 static_assert(MixedArray::computeAllocBytes(MixedArray::SmallScale) ==
60 kEmptyMixedArraySize, "");
62 std::aligned_storage<kEmptyMixedArraySize, 16>::type s_theEmptyDictArray;
63 std::aligned_storage<kEmptyMixedArraySize, 16>::type s_theEmptyDArray;
65 struct MixedArray::Initializer {
66 Initializer() {
67 auto const ad = reinterpret_cast<MixedArray*>(&s_theEmptyDictArray);
68 ad->initHash(1);
69 ad->m_sizeAndPos = 0;
70 ad->m_scale_used = 1;
71 ad->m_nextKI = 0;
72 ad->initHeader(HeaderKind::Dict, StaticValue);
73 assertx(ad->checkInvariants());
76 MixedArray::Initializer MixedArray::s_initializer;
78 struct MixedArray::DArrayInitializer {
79 DArrayInitializer() {
80 auto const ad = reinterpret_cast<MixedArray*>(&s_theEmptyDArray);
81 ad->initHash(1);
82 ad->m_sizeAndPos = 0;
83 ad->m_scale_used = 1;
84 ad->m_nextKI = 0;
85 ad->initHeader_16(HeaderKind::Mixed, StaticValue, ArrayData::kDArray);
86 assertx(RuntimeOption::EvalHackArrDVArrs || ad->checkInvariants());
89 MixedArray::DArrayInitializer MixedArray::s_darr_initializer;
91 //////////////////////////////////////////////////////////////////////
93 ALWAYS_INLINE
94 ArrayData* MixedArray::MakeReserveImpl(uint32_t size,
95 HeaderKind hk,
96 ArrayData::DVArray dvArray) {
97 assertx(hk == HeaderKind::Mixed || hk == HeaderKind::Dict);
98 assertx(dvArray == ArrayData::kNotDVArray || dvArray == ArrayData::kDArray);
99 assertx(hk != HeaderKind::Dict || dvArray == ArrayData::kNotDVArray);
100 assertx(!RuntimeOption::EvalHackArrDVArrs ||
101 dvArray == ArrayData::kNotDVArray);
103 auto const scale = computeScaleFromSize(size);
104 auto const ad = reqAlloc(scale);
106 // Intialize the hash table first, because the header is already in L1 cache,
107 // but the hash table may not be. So let's issue the cache request ASAP.
108 ad->initHash(scale);
110 ad->m_sizeAndPos = 0; // size=0, pos=0
111 ad->initHeader_16(hk, OneReference, dvArray);
112 ad->m_scale_used = scale; // used=0
113 ad->m_nextKI = 0;
115 assertx(ad->m_kind == hk);
116 assertx(ad->dvArray() == dvArray);
117 assertx(ad->m_size == 0);
118 assertx(ad->m_pos == 0);
119 assertx(ad->hasExactlyOneRef());
120 assertx(ad->m_used == 0);
121 assertx(ad->m_nextKI == 0);
122 assertx(ad->m_scale == scale);
123 assertx(ad->checkInvariants());
124 return ad;
127 ArrayData* MixedArray::MakeReserveMixed(uint32_t size) {
128 auto ad = MakeReserveImpl(size, HeaderKind::Mixed, ArrayData::kNotDVArray);
129 assertx(ad->isMixedKind());
130 assertx(ad->isNotDVArray());
131 return ad;
134 ArrayData* MixedArray::MakeReserveDArray(uint32_t size) {
135 assertx(!RuntimeOption::EvalHackArrDVArrs);
136 auto ad = MakeReserveImpl(size, HeaderKind::Mixed, ArrayData::kDArray);
137 assertx(ad->isMixedKind());
138 assertx(ad->isDArray());
139 return tagArrProv(ad);
142 ArrayData* MixedArray::MakeReserveDict(uint32_t size) {
143 auto ad = MakeReserveImpl(size, HeaderKind::Dict, ArrayData::kNotDVArray);
144 assertx(ad->isDictKind());
145 return tagArrProv(ad);
148 ArrayData* MixedArray::MakeReserveLike(const ArrayData* other,
149 uint32_t capacity) {
150 capacity = (capacity ? capacity : other->size());
152 if (other->hasVanillaPackedLayout()) {
153 return PackedArray::MakeReserve(capacity);
154 } else {
155 return MixedArray::MakeReserveMixed(capacity);
159 ALWAYS_INLINE
160 MixedArray* MixedArray::MakeStructImpl(uint32_t size,
161 const StringData* const* keys,
162 const TypedValue* values,
163 HeaderKind hk,
164 ArrayData::DVArray dvArray) {
165 assertx(size > 0);
166 assertx(hk == HeaderKind::Mixed || hk == HeaderKind::Dict);
167 assertx(dvArray == ArrayData::kNotDVArray ||
168 dvArray == ArrayData::kDArray);
169 assertx(hk != HeaderKind::Dict || dvArray == ArrayData::kNotDVArray);
170 assertx(!RuntimeOption::EvalHackArrDVArrs ||
171 dvArray == ArrayData::kNotDVArray);
173 auto const scale = computeScaleFromSize(size);
174 auto const ad = reqAlloc(scale);
176 auto const aux = MixedArrayKeys::packStaticStrsForAux() |
177 static_cast<uint16_t>(dvArray);
179 ad->m_sizeAndPos = size; // pos=0
180 ad->initHeader_16(hk, OneReference, aux);
181 ad->m_scale_used = scale | uint64_t{size} << 32; // used=size
182 ad->m_nextKI = 0;
184 auto const table = ad->initHash(scale);
185 auto const mask = ad->mask();
186 auto const data = ad->data();
188 // Append values by moving -- Caller assumes we update refcount.
189 // Values are in reverse order since they come from the stack, which
190 // grows down.
191 for (uint32_t i = 0; i < size; i++) {
192 assertx(keys[i]->isStatic());
193 auto k = keys[i];
194 auto h = k->hash();
195 data[i].setStaticKey(const_cast<StringData*>(k), h);
196 const auto& tv = values[size - i - 1];
197 data[i].data.m_data = tv.m_data;
198 data[i].data.m_type = tv.m_type;
199 auto ei = ad->findForNewInsert(table, mask, h);
200 *ei = i;
203 assertx(ad->m_size == size);
204 assertx(ad->dvArray() == dvArray);
205 assertx(ad->m_pos == 0);
206 assertx(ad->m_kind == hk);
207 assertx(ad->m_scale == scale);
208 assertx(ad->hasExactlyOneRef());
209 assertx(ad->m_used == size);
210 assertx(ad->m_nextKI == 0);
211 assertx(ad->checkInvariants());
212 return ad;
215 MixedArray* MixedArray::MakeStruct(uint32_t size,
216 const StringData* const* keys,
217 const TypedValue* values) {
218 return MakeStructImpl(size, keys, values,
219 HeaderKind::Mixed,
220 ArrayData::kNotDVArray);
223 MixedArray* MixedArray::MakeStructDict(uint32_t size,
224 const StringData* const* keys,
225 const TypedValue* values) {
226 auto const ad = MakeStructImpl(size, keys, values,
227 HeaderKind::Dict,
228 ArrayData::kNotDVArray);
229 return asMixed(tagArrProv(ad));
232 MixedArray* MixedArray::MakeStructDArray(uint32_t size,
233 const StringData* const* keys,
234 const TypedValue* values) {
235 assertx(!RuntimeOption::EvalHackArrDVArrs);
236 auto const ad = MakeStructImpl(size, keys, values,
237 HeaderKind::Mixed,
238 ArrayData::kDArray);
239 return asMixed(tagArrProv(ad));
242 ALWAYS_INLINE
243 MixedArray* MixedArray::AllocStructImpl(uint32_t size,
244 const int32_t* hash,
245 HeaderKind hk,
246 ArrayData::DVArray dvArray) {
247 assertx(size > 0);
248 assertx(hk == HeaderKind::Mixed || hk == HeaderKind::Dict);
249 assertx(dvArray == ArrayData::kNotDVArray ||
250 dvArray == ArrayData::kDArray);
251 assertx(hk != HeaderKind::Dict || dvArray == ArrayData::kNotDVArray);
252 assertx(!RuntimeOption::EvalHackArrDVArrs ||
253 dvArray == ArrayData::kNotDVArray);
255 auto const scale = computeScaleFromSize(size);
256 auto const ad = reqAlloc(scale);
258 auto const aux = MixedArrayKeys::packStaticStrsForAux() |
259 static_cast<uint16_t>(dvArray);
261 ad->m_sizeAndPos = size; // pos=0
262 ad->initHeader_16(hk, OneReference, aux);
263 ad->m_scale_used = scale | uint64_t{size} << 32; // used=size
264 ad->m_nextKI = 0;
266 CopyHash(ad->hashTab(), hash, scale);
268 // If we don't trash the elements here, `ad` may fail checkInvariants
269 // because some of its element's type bytes happen to be tombstones.
271 // Trashing the elements isn't likely to hide real failures, because if we
272 // fail to initialize them later, any lookups will return implausible TVs.
273 if (debug) memset(ad->data(), kMixedElmFill, sizeof(MixedArrayElm) * size);
275 assertx(ad->m_size == size);
276 assertx(ad->dvArray() == dvArray);
277 assertx(ad->m_pos == 0);
278 assertx(ad->m_kind == hk);
279 assertx(ad->m_scale == scale);
280 assertx(ad->hasExactlyOneRef());
281 assertx(ad->m_used == size);
282 assertx(ad->m_nextKI == 0);
283 assertx(ad->checkInvariants());
284 return ad;
287 MixedArray* MixedArray::AllocStruct(uint32_t size, const int32_t* hash) {
288 return AllocStructImpl(size, hash, HeaderKind::Mixed, ArrayData::kNotDVArray);
291 MixedArray* MixedArray::AllocStructDict(uint32_t size, const int32_t* hash) {
292 auto const ad = AllocStructImpl(size, hash, HeaderKind::Dict,
293 ArrayData::kNotDVArray);
294 return asMixed(tagArrProv(ad));
297 MixedArray* MixedArray::AllocStructDArray(uint32_t size, const int32_t* hash) {
298 assertx(!RuntimeOption::EvalHackArrDVArrs);
299 auto const ad = AllocStructImpl(size, hash, HeaderKind::Mixed,
300 ArrayData::kDArray);
301 return asMixed(tagArrProv(ad));
304 template<HeaderKind hdr, ArrayData::DVArray dv>
305 MixedArray* MixedArray::MakeMixedImpl(uint32_t size, const TypedValue* kvs) {
306 assertx(size > 0);
308 auto const scale = computeScaleFromSize(size);
309 auto const ad = reqAlloc(scale);
311 ad->initHash(scale);
313 ad->m_sizeAndPos = size; // pos=0
314 ad->initHeader_16(hdr, OneReference, dv);
315 ad->m_scale_used = scale | uint64_t{size} << 32; // used=size
316 ad->m_nextKI = 0;
318 // Append values by moving -- no refcounts are updated.
319 auto const data = ad->data();
320 for (uint32_t i = 0; i < size; i++) {
321 auto& kTv = kvs[i * 2];
322 if (isStringType(kTv.m_type)) {
323 auto k = kTv.m_data.pstr;
324 auto h = k->hash();
325 auto ei = ad->findForInsertUpdate(k, h);
326 if (isValidPos(ei)) {
327 // it's the caller's responsibility to free kvs
328 ad->setZombie();
329 Release(ad);
330 return nullptr;
332 data[i].setStrKeyNoIncRef(k, h);
333 ad->mutableKeyTypes()->recordStr(k);
334 *ei = i;
335 } else {
336 assertx(kTv.m_type == KindOfInt64);
337 auto k = kTv.m_data.num;
338 auto h = hash_int64(k);
339 auto ei = ad->findForInsertUpdate(k, h);
340 if (isValidPos(ei)) {
341 // it's the caller's responsibility to free kvs
342 ad->setZombie();
343 Release(ad);
344 return nullptr;
346 data[i].setIntKey(k, h);
347 ad->mutableKeyTypes()->recordInt();
348 *ei = i;
350 const auto& tv = kvs[(i * 2) + 1];
351 data[i].data.m_data = tv.m_data;
352 data[i].data.m_type = tv.m_type;
355 assertx(ad->m_size == size);
356 assertx(ad->m_pos == 0);
357 assertx(ad->m_scale == scale);
358 assertx(ad->hasExactlyOneRef());
359 assertx(ad->m_used == size);
360 assertx(ad->m_nextKI == 0);
361 assertx(ad->checkInvariants());
362 return ad;
365 MixedArray* MixedArray::MakeMixed(uint32_t size, const TypedValue* kvs) {
366 auto const ad =
367 MakeMixedImpl<HeaderKind::Mixed, ArrayData::kNotDVArray>(size, kvs);
368 assertx(ad == nullptr || ad->kind() == kMixedKind);
369 assertx(ad == nullptr || ad->isNotDVArray());
370 return ad;
373 MixedArray* MixedArray::MakeDArray(uint32_t size, const TypedValue* kvs) {
374 if (RuntimeOption::EvalHackArrDVArrs) return MakeDict(size, kvs);
376 auto const ad =
377 MakeMixedImpl<HeaderKind::Mixed, ArrayData::kDArray>(size, kvs);
378 assertx(ad == nullptr || ad->kind() == kMixedKind);
379 assertx(ad == nullptr || ad->isDArray());
380 return ad ? asMixed(tagArrProv(ad)) : nullptr;
383 MixedArray* MixedArray::MakeDict(uint32_t size, const TypedValue* kvs) {
384 auto const ad =
385 MakeMixedImpl<HeaderKind::Dict, ArrayData::kNotDVArray>(size, kvs);
386 assertx(ad == nullptr || ad->kind() == kDictKind);
387 return ad ? asMixed(tagArrProv(ad)) : nullptr;
390 MixedArray* MixedArray::MakeDArrayNatural(uint32_t size,
391 const TypedValue* vals) {
392 assertx(size > 0);
394 auto const scale = computeScaleFromSize(size);
395 auto const ad = reqAlloc(scale);
397 ad->initHash(scale);
399 ad->m_sizeAndPos = size; // pos=0
400 if (RuntimeOption::EvalHackArrDVArrs) {
401 auto const aux = MixedArrayKeys::packIntsForAux() |
402 static_cast<uint16_t>(ArrayData::kNotDVArray);
403 ad->initHeader_16(HeaderKind::Dict, OneReference, aux);
404 } else {
405 auto const aux = MixedArrayKeys::packIntsForAux() |
406 static_cast<uint16_t>(ArrayData::kDArray);
407 ad->initHeader_16(HeaderKind::Mixed, OneReference, aux);
409 ad->m_scale_used = scale | uint64_t{size} << 32; // used=size
410 ad->m_nextKI = size;
412 // Append values by moving -- no refcounts are updated.
413 auto const data = ad->data();
414 for (uint32_t i = 0; i < size; i++) {
415 auto h = hash_int64(i);
416 auto ei = ad->findForInsertUpdate(i, h);
417 assertx(!isValidPos(ei));
418 data[i].setIntKey(i, h);
419 *ei = i;
421 const auto& tv = vals[i];
422 data[i].data.m_data = tv.m_data;
423 data[i].data.m_type = tv.m_type;
426 assertx(ad->m_size == size);
427 assertx(ad->m_pos == 0);
428 assertx(ad->m_scale == scale);
429 assertx(ad->hasExactlyOneRef());
430 assertx(ad->m_used == size);
431 assertx(ad->m_nextKI == size);
432 assertx(ad->checkInvariants());
433 return asMixed(tagArrProv(ad));
436 NEVER_INLINE
437 MixedArray* MixedArray::SlowCopy(MixedArray* ad, const ArrayData& old,
438 MixedArrayElm* elm, MixedArrayElm* end) {
439 assertx(ad->isRefCounted());
441 for (; elm < end; ++elm) {
442 if (elm->hasStrKey()) elm->skey->incRefCount();
443 // When we convert an element to a tombstone, we set its value type to
444 // kInvalidDataType, which is not a refcounted type.
445 tvIncRefGenUnsafe(elm->data);
447 return ad;
450 // for internal use by copyStatic() and copyMixed()
451 ALWAYS_INLINE
452 MixedArray* MixedArray::CopyMixed(const MixedArray& other,
453 AllocMode mode,
454 HeaderKind dest_hk,
455 ArrayData::DVArray dvArray) {
456 assertx(dest_hk == HeaderKind::Mixed || dest_hk == HeaderKind::Dict);
457 assertx(dvArray == ArrayData::kNotDVArray || dvArray == ArrayData::kDArray);
458 assertx(dest_hk != HeaderKind::Dict || dvArray == ArrayData::kNotDVArray);
459 if (mode == AllocMode::Static) {
460 assertx(other.m_size == other.m_used);
461 assertx(!other.keyTypes().mayIncludeCounted());
462 assertx(!other.keyTypes().mayIncludeTombstone());
465 auto const shouldCreateStrKeyTable =
466 mode == AllocMode::Static &&
467 other.keyTypes().mustBeStaticStrs() &&
468 other.size() < StrKeyTable::kStrKeyTableSize * 3 / 4;
469 auto const sizeOfStrKeyTable =
470 shouldCreateStrKeyTable ? sizeof(StrKeyTable) : 0;
471 auto const scale = other.m_scale;
472 auto const ad = mode == AllocMode::Request
473 ? reqAlloc(scale)
474 : staticAlloc(scale, arrprov::tagSize(&other) + sizeOfStrKeyTable);
475 // Copy everything including tombstones. This is a requirement for remove() to
476 // work correctly, which assumes the position is the same in the original and
477 // in the copy of the array, in case copying is needed.
478 #ifdef USE_JEMALLOC
479 // Copy elements and hashes separately, because the array may not be very
480 // full.
481 assertx(reinterpret_cast<uintptr_t>(ad) % 16 == 0);
482 assertx(reinterpret_cast<uintptr_t>(&other) % 16 == 0);
483 // Adding 24 bytes so that we can copy in 32-byte groups. This might
484 // overwrite the hash table, but won't overrun the allocated space as long as
485 // `malloc' returns multiple of 16 bytes.
486 bcopy32_inline(ad, &other,
487 sizeof(MixedArray) + sizeof(Elm) * other.m_used + 24);
488 #else
489 memcpy(ad, &other, sizeof(MixedArray) + sizeof(Elm) * other.m_used);
490 #endif
491 auto const count = mode == AllocMode::Request ? OneReference : StaticValue;
492 auto const aux =
493 other.keyTypes().packForAux() |
494 dvArray |
495 (other.isLegacyArray() ? ArrayData::kLegacyArray : 0) |
496 (shouldCreateStrKeyTable ? kHasStrKeyTable : 0);
497 ad->initHeader_16(dest_hk, count, aux);
499 // We want SlowCopy to be a tail call in the opt build, but we still want to
500 // check assertions in debug builds, so we check them in this helper.
501 auto const check = [&](MixedArray* res) {
502 assertx(res->checkInvariants());
503 assertx(res->m_kind == dest_hk);
504 assertx(res->dvArray() == dvArray);
505 assertx(res->isLegacyArray() == other.isLegacyArray());
506 assertx(res->keyTypes() == other.keyTypes());
507 assertx(res->m_size == other.m_size);
508 assertx(res->m_pos == other.m_pos);
509 assertx(res->m_used == other.m_used);
510 assertx(res->m_scale == scale);
511 return res;
514 MixedArrayElm* elm = ad->data();
515 auto const end = elm + ad->m_used;
516 if (shouldCreateStrKeyTable) {
517 auto table = ad->mutableStrKeyTable();
518 for (; elm < end; ++elm) {
519 assertx(elm->hasStrKey() && elm->strKey()->isStatic());
520 table->add(elm->strKey());
524 CopyHash(ad->hashTab(), other.hashTab(), scale);
525 if (mode == AllocMode::Static) return check(ad);
527 // Bump up refcounts as needed.
528 if (other.keyTypes().mayIncludeCounted()) {
529 return check(SlowCopy(ad, other, elm, end));
531 for (; elm < end; ++elm) {
532 assertx(IMPLIES(elm->hasStrKey(), elm->strKey()->isStatic()));
533 // When we convert an element to a tombstone, we set its key type to int
534 // and value type to kInvalidDataType, neither of which are refcounted.
535 tvIncRefGenUnsafe(elm->data);
537 return check(ad);
540 NEVER_INLINE ArrayData* MixedArray::CopyStatic(const ArrayData* in) {
541 auto a = asMixed(in);
542 assertx(a->checkInvariants());
543 auto ret = CopyMixed(*a, AllocMode::Static, in->m_kind, in->dvArray());
545 if (RuntimeOption::EvalArrayProvenance) {
546 assertx(!ret->hasProvenanceData());
547 auto const tag = arrprov::getTag(in);
548 if (tag.valid()) {
549 arrprov::setTag(ret, tag);
552 assertx(!arrprov::arrayWantsTag(ret) ||
553 !!arrprov::getTag(ret) == !!arrprov::getTag(in));
554 return ret;
557 NEVER_INLINE MixedArray* MixedArray::copyMixed() const {
558 assertx(checkInvariants());
559 auto const out = CopyMixed(*this, AllocMode::Request, m_kind, dvArray());
560 return asMixed(tagArrProv(out, this));
563 //////////////////////////////////////////////////////////////////////
565 ArrayData* MixedArray::MakeUncounted(ArrayData* array,
566 bool withApcTypedValue,
567 DataWalker::PointerMap* seen) {
568 auto const updateSeen = seen && array->hasMultipleRefs();
569 if (updateSeen) {
570 auto it = seen->find(array);
571 assertx(it != seen->end());
572 if (auto const arr = static_cast<ArrayData*>(it->second)) {
573 if (arr->uncountedIncRef()) {
574 return arr;
578 auto a = asMixed(array);
579 assertx(!a->empty());
580 auto const extra = uncountedAllocExtra(array, withApcTypedValue);
581 auto const ad = uncountedAlloc(a->scale(), extra);
582 auto const used = a->m_used;
583 // Do a raw copy first, without worrying about counted types or refcount
584 // manipulation. To copy in 32-byte chunks, we add 24 bytes to the length.
585 // This might overwrite the hash table, but won't go beyond the space
586 // allocated for the MixedArray, assuming `malloc()' always allocates
587 // multiple of 16 bytes and extra is also a multiple of 16.
588 assertx((extra & 0xf) == 0);
589 bcopy32_inline(ad, a, sizeof(MixedArray) + sizeof(Elm) * used + 24);
590 ad->m_count = UncountedValue; // after bcopy, update count
591 if (withApcTypedValue) {
592 ad->m_aux16 |= kHasApcTv;
593 } else {
594 ad->m_aux16 &= ~kHasApcTv;
596 ad->m_aux16 &= ~kHasProvenanceData;
597 assertx(ad->keyTypes() == a->keyTypes());
598 CopyHash(ad->hashTab(), a->hashTab(), a->scale());
599 ad->mutableKeyTypes()->makeStatic();
601 // Need to make sure keys and values are all uncounted.
602 auto dstElem = ad->data();
603 for (uint32_t i = 0; i < used; ++i) {
604 auto& te = dstElem[i];
605 auto const type = te.data.m_type;
606 if (UNLIKELY(isTombstone(type))) continue;
607 if (te.hasStrKey() && !te.skey->isStatic()) {
608 if (te.skey->isUncounted() && te.skey->uncountedIncRef()) {
609 ad->mutableKeyTypes()->recordNonStaticStr();
610 } else {
611 te.skey = [&] {
612 if (auto const st = lookupStaticString(te.skey)) return st;
613 ad->mutableKeyTypes()->recordNonStaticStr();
614 HeapObject** seenStr = nullptr;
615 if (seen && te.skey->hasMultipleRefs()) {
616 seenStr = &(*seen)[te.skey];
617 if (auto const st = static_cast<StringData*>(*seenStr)) {
618 if (st->uncountedIncRef()) return st;
621 auto const st = StringData::MakeUncounted(te.skey->slice());
622 if (seenStr) *seenStr = st;
623 return st;
624 }();
627 ConvertTvToUncounted(&te.data, seen);
629 if (APCStats::IsCreated()) {
630 APCStats::getAPCStats().addAPCUncountedBlock();
632 if (updateSeen) (*seen)[array] = ad;
633 assertx(ad->checkInvariants());
634 return tagArrProv(ad, array);
637 ArrayData* MixedArray::MakeDictFromAPC(const APCArray* apc) {
638 assertx(apc->isHashed());
639 auto const apcSize = apc->size();
640 DictInit init{apcSize};
641 for (uint32_t i = 0; i < apcSize; ++i) {
642 init.setValidKey(apc->getKey(i), apc->getValue(i)->toLocal());
644 auto const ad = init.create();
645 return tagArrProv(ad, apc);
648 ArrayData* MixedArray::MakeDArrayFromAPC(const APCArray* apc) {
649 assertx(!RuntimeOption::EvalHackArrDVArrs);
650 assertx(apc->isDArray());
651 auto const apcSize = apc->size();
652 DArrayInit init{apcSize};
653 for (uint32_t i = 0; i < apcSize; ++i) {
654 init.setValidKey(apc->getKey(i), apc->getValue(i)->toLocal());
656 auto const ad = init.create();
657 return tagArrProv(ad, apc);
660 //=============================================================================
661 // Destruction
663 NEVER_INLINE
664 void MixedArray::SlowRelease(MixedArray* ad) {
665 assertx(ad->isRefCounted());
666 assertx(ad->hasExactlyOneRef());
667 assertx(!ad->isZombie());
669 MixedArrayElm* elm = ad->data();
670 for (auto const end = elm + ad->m_used; elm < end; ++elm) {
671 if (elm->hasStrKey()) {
672 decRefStr(elm->skey);
673 // Keep GC from asserting on freed string keys in debug mode.
674 if (debug) elm->skey = nullptr;
676 // When we convert an element to a tombstone, we set its key type to int
677 // and value type to kInvalidDataType, neither of which are refcounted.
678 tvDecRefGen(&elm->data);
680 tl_heap->objFree(ad, ad->heapSize());
683 NEVER_INLINE
684 void MixedArray::Release(ArrayData* in) {
685 in->fixCountForRelease();
686 assertx(in->isRefCounted());
687 assertx(in->hasExactlyOneRef());
689 if (RuntimeOption::EvalArrayProvenance) {
690 arrprov::clearTag(in);
692 auto const ad = asMixed(in);
694 if (!ad->isZombie()) {
695 assertx(ad->checkInvariants());
696 // keyTypes checks are too slow to go in MixedArray::checkInvariants.
697 assertx(ad->keyTypes().checkInvariants(ad));
698 if (ad->keyTypes().mayIncludeCounted()) return SlowRelease(ad);
700 MixedArrayElm* elm = ad->data();
701 for (auto const end = elm + ad->m_used; elm < end; ++elm) {
702 // Keep the GC from asserting on freed string keys in debug mode.
703 assertx(IMPLIES(elm->hasStrKey(), elm->strKey()->isStatic()));
704 if (debug && elm->hasStrKey()) elm->skey = nullptr;
705 // When we convert an element to a tombstone, we set its key type to int
706 // and value type to kInvalidDataType, neither of which are refcounted.
707 tvDecRefGen(&elm->data);
710 tl_heap->objFree(ad, ad->heapSize());
711 AARCH64_WALKABLE_FRAME();
714 NEVER_INLINE
715 void MixedArray::ReleaseUncounted(ArrayData* in) {
716 auto const ad = asMixed(in);
717 if (!ad->uncountedDecRef()) return;
719 if (RuntimeOption::EvalArrayProvenance) {
720 arrprov::clearTag(in);
722 if (!ad->isZombie()) {
723 auto const data = ad->data();
724 auto const stop = data + ad->m_used;
726 for (auto ptr = data; ptr != stop; ++ptr) {
727 if (isTombstone(ptr->data.m_type)) continue;
728 if (ptr->hasStrKey()) {
729 assertx(!ptr->skey->isRefCounted());
730 if (ptr->skey->isUncounted()) {
731 StringData::ReleaseUncounted(ptr->skey);
734 ReleaseUncountedTv(&ptr->data);
737 auto const extra = uncountedAllocExtra(ad, ad->hasApcTv());
738 uncounted_sized_free(reinterpret_cast<char*>(ad) - extra,
739 computeAllocBytes(ad->scale()) + extra);
740 if (APCStats::IsCreated()) {
741 APCStats::getAPCStats().removeAPCUncountedBlock();
745 //=============================================================================
748 * Invariants:
750 * All arrays are either in a mode, or in zombie state. The zombie
751 * state happens if an array is "moved from"---the only legal
752 * operation on a zombie array is to decref and release it.
754 * All arrays (zombie or not):
756 * m_scale is 2^k (1/4 of the hashtable size and 1/3 of capacity)
757 * mask is 4*scale - 1 (even power of 2 required for quadratic probe)
758 * mask == folly::nextPowTwo(capacity()) - 1;
760 * Zombie state:
762 * m_used == UINT32_MAX
764 * Non-zombie:
766 * m_size <= m_used; m_used <= capacity()
767 * last element cannot be a tombstone
768 * m_pos and all external iterators can't be on a tombstone
769 * m_nextKI >= highest actual int key
770 * Elm.data.m_type maybe kInvalidDataType (tombstone)
771 * hash[] maybe Tombstone
772 * static arrays cannot contain tombstones
774 bool MixedArray::checkInvariants() const {
775 static_assert(ssize_t(Empty) == ssize_t(-1), "");
776 static_assert(Tombstone < 0, "");
777 static_assert((Tombstone & 1) == 0, "");
778 static_assert(sizeof(Elm) == 24, "");
779 static_assert(sizeof(ArrayData) == 2 * sizeof(uint64_t), "");
780 static_assert(
781 sizeof(MixedArray) == sizeof(ArrayData) + 2 * sizeof(uint64_t),
782 "Performance is sensitive to sizeof(MixedArray)."
783 " Make sure you changed it with good reason and then update this assert."
786 // All arrays:
787 assertx(hasVanillaMixedLayout());
788 assertx(!isMixedKind() || !isVArray());
789 assertx(!isDictKind() || isNotDVArray());
790 assertx(!RuntimeOption::EvalHackArrDVArrs || isNotDVArray());
791 assertx(checkCount());
792 assertx(m_scale >= 1 && (m_scale & (m_scale - 1)) == 0);
793 assertx(MixedArray::HashSize(m_scale) ==
794 folly::nextPowTwo<uint64_t>(capacity()));
795 assertx(arrprov::arrayWantsTag(this) || !hasProvenanceData());
797 if (isZombie()) return true;
799 // Non-zombie:
800 assertx(m_size <= m_used);
801 assertx(m_used <= capacity());
802 assertx(IMPLIES(isStatic(), m_used == m_size));
803 if (m_pos != m_used) {
804 assertx(size_t(m_pos) < m_used);
805 assertx(!isTombstone(data()[m_pos].data.m_type));
807 return true;
810 //=============================================================================
811 // Iteration.
813 size_t MixedArray::Vsize(const ArrayData*) { not_reached(); }
815 TypedValue MixedArray::GetPosVal(const ArrayData* ad, ssize_t pos) {
816 auto a = asMixed(ad);
817 assertx(a->checkInvariants());
818 assertx(pos != a->m_used);
819 auto const& e = a->data()[pos];
820 assertx(!e.isTombstone());
821 return e.data;
824 bool MixedArray::IsVectorData(const ArrayData* ad) {
825 auto a = asMixed(ad);
826 if (a->m_size == 0) {
827 // any 0-length array is "vector-like" for the sake of this function.
828 return true;
830 auto const elms = a->data();
831 int64_t i = 0;
832 for (uint32_t pos = 0, limit = a->m_used; pos < limit; ++pos) {
833 auto const& e = elms[pos];
834 if (isTombstone(e.data.m_type)) {
835 continue;
837 if (e.hasStrKey() || e.ikey != i) {
838 return false;
840 ++i;
842 return true;
845 //=============================================================================
846 // Lookup.
848 NEVER_INLINE
849 int32_t* warnUnbalanced(MixedArray* a, size_t n, int32_t* ei) {
850 if (n > size_t(RuntimeOption::MaxArrayChain)) {
851 decRefArr(a->asArrayData()); // otherwise, a leaks when exn propagates
852 raise_error("Array is too unbalanced (%lu)", n);
854 return ei;
857 MixedArray::InsertPos MixedArray::insert(int64_t k) {
858 assertx(!isFull());
859 auto h = hash_int64(k);
860 auto ei = findForInsertUpdate(k, h);
861 if (isValidPos(ei)) {
862 return InsertPos(true, data()[(int32_t)*ei].data);
864 if (k >= m_nextKI && m_nextKI >= 0) m_nextKI = static_cast<uint64_t>(k) + 1;
865 auto e = allocElm(ei);
866 e->setIntKey(k, h);
867 mutableKeyTypes()->recordInt();
868 return InsertPos(false, e->data);
871 MixedArray::InsertPos MixedArray::insert(StringData* k) {
872 assertx(!isFull());
873 auto const h = k->hash();
874 auto ei = findForInsertUpdate(k, h);
875 if (isValidPos(ei)) {
876 return InsertPos(true, data()[(int32_t)*ei].data);
878 auto e = allocElm(ei);
879 e->setStrKey(k, h);
880 mutableKeyTypes()->recordStr(k);
881 return InsertPos(false, e->data);
884 bool MixedArray::ExistsInt(const ArrayData* ad, int64_t k) {
885 return asMixed(ad)->findForExists(k, hash_int64(k));
888 bool MixedArray::ExistsStr(const ArrayData* ad, const StringData* k) {
889 return NvGetStr(ad, k).is_init();
892 //=============================================================================
893 // Append/insert/update.
896 * This is a streamlined copy of Variant.constructValHelper()
897 * with no incref+decref because we're moving v to this array.
899 ALWAYS_INLINE
900 MixedArray* MixedArray::moveVal(TypedValue& tv, TypedValue v) {
901 tv.m_type = v.m_type == KindOfUninit ? KindOfNull : v.m_type;
902 tv.m_data.num = v.m_data.num;
903 return this;
906 NEVER_INLINE MixedArray*
907 MixedArray::InsertCheckUnbalanced(MixedArray* ad,
908 int32_t* table,
909 uint32_t mask,
910 Elm* iter,
911 Elm* stop) {
912 for (uint32_t i = 0; iter != stop; ++iter, ++i) {
913 auto& e = *iter;
914 if (e.isTombstone()) continue;
915 *ad->findForNewInsertWarn(table,
916 mask,
917 e.probe()) = i;
919 return ad;
922 NEVER_INLINE
923 MixedArray* MixedArray::SlowGrow(MixedArray* ad, const ArrayData& old,
924 MixedArrayElm* elm, MixedArrayElm* end) {
925 for (; elm < end; ++elm) {
926 if (elm->hasStrKey()) elm->skey->incRefCount();
927 // When we convert an element to a tombstone, we set its value type to
928 // kInvalidDataType, which is not a refcounted type.
929 tvIncRefGenUnsafe(elm->data);
932 auto const table = ad->initHash(ad->m_scale);
933 auto const mask = MixedArray::Mask(ad->m_scale);
935 elm = ad->data();
936 if (UNLIKELY(ad->m_used >= 2000)) {
937 return InsertCheckUnbalanced(ad, table, mask, elm, end);
939 for (uint32_t i = 0; elm != end; ++elm, ++i) {
940 if (elm->isTombstone()) continue;
941 *ad->findForNewInsert(table, mask, elm->probe()) = i;
943 return ad;
946 NEVER_INLINE
947 MixedArray* MixedArray::Grow(MixedArray* old, uint32_t newScale, bool copy) {
948 assertx(old->m_size > 0);
949 assertx(MixedArray::Capacity(newScale) >= old->m_size);
950 assertx(newScale >= 1 && (newScale & (newScale - 1)) == 0);
952 auto ad = reqAlloc(newScale);
953 auto const oldUsed = old->m_used;
954 ad->m_sizeAndPos = old->m_sizeAndPos;
955 ad->initHeader_16(old->m_kind, OneReference, old->m_aux16);
956 ad->m_scale_used = newScale | uint64_t{oldUsed} << 32;
957 ad->m_aux16 &= ~(ArrayData::kHasProvenanceData |
958 ArrayData::kHasStrKeyTable);
960 copyElmsNextUnsafe(ad, old, oldUsed);
962 // We want SlowGrow to be a tail call in the opt build, but we still want to
963 // check assertions in debug builds, so we check them in this helper.
964 auto const check = [&](MixedArray* res) {
965 assertx(res->checkInvariants());
966 assertx(res->hasExactlyOneRef());
967 assertx(res->kind() == old->kind());
968 assertx(res->dvArray() == old->dvArray());
969 assertx(res->isLegacyArray() == old->isLegacyArray());
970 assertx(res->keyTypes() == old->keyTypes());
971 assertx(res->m_size == old->m_size);
972 assertx(res->m_pos == old->m_pos);
973 assertx(res->m_used == oldUsed);
974 assertx(res->m_scale == newScale);
975 assertx(!res->hasStrKeyTable());
976 return asMixed(tagArrProv(res, old));
979 if (copy) {
980 MixedArrayElm* elm = ad->data();
981 auto const end = elm + oldUsed;
982 if (ad->keyTypes().mayIncludeCounted()) {
983 return check(SlowGrow(ad, *old, elm, end));
985 for (; elm < end; ++elm) {
986 // When we convert an element to a tombstone, we set its key type to int
987 // and value type to kInvalidDataType, neither of which are refcounted.
988 tvIncRefGenUnsafe(elm->data);
990 } else {
991 old->setZombie();
994 auto const table = ad->initHash(newScale);
996 auto iter = ad->data();
997 auto const stop = iter + oldUsed;
998 assertx(newScale == ad->m_scale);
999 auto mask = MixedArray::Mask(newScale);
1001 if (UNLIKELY(oldUsed >= 2000)) {
1002 return check(InsertCheckUnbalanced(ad, table, mask, iter, stop));
1004 for (uint32_t i = 0; iter != stop; ++iter, ++i) {
1005 auto& e = *iter;
1006 if (e.isTombstone()) continue;
1007 *ad->findForNewInsert(table, mask, e.probe()) = i;
1009 return check(ad);
1012 ALWAYS_INLINE
1013 MixedArray* MixedArray::prepareForInsert(bool copy) {
1014 assertx(checkInvariants());
1015 if (isFull()) return Grow(this, m_scale * 2, copy);
1016 if (copy) return copyMixed();
1017 return this;
1020 void MixedArray::compact(bool renumber /* = false */) {
1021 bool updatePosAfterCompact = false;
1022 ElmKey mPos;
1024 // Prep work before beginning the compaction process
1025 if (LIKELY(!renumber)) {
1026 if (m_pos == m_used) {
1027 // If m_pos is the canonical invalid position, we need to update it to
1028 // what the new canonical invalid position will be after compaction
1029 m_pos = m_size;
1030 } else if (m_pos != 0) {
1031 // Cache key for element associated with m_pos in order to
1032 // update m_pos after the compaction has been performed.
1033 // We only need to do this if m_pos is nonzero and is not
1034 // the canonical invalid position.
1035 updatePosAfterCompact = true;
1036 assertx(size_t(m_pos) < m_used);
1037 auto& e = data()[m_pos];
1038 mPos.hash = e.hash();
1039 mPos.skey = e.skey;
1041 } else {
1042 m_pos = 0;
1043 // Set m_nextKI to 0 for now to prepare for renumbering integer keys
1044 m_nextKI = 0;
1047 // Perform compaction
1048 auto elms = data();
1049 auto const mask = this->mask();
1050 auto const table = initHash(m_scale);
1051 for (uint32_t frPos = 0, toPos = 0; toPos < m_size; ++toPos, ++frPos) {
1052 while (elms[frPos].isTombstone()) {
1053 assertx(frPos + 1 < m_used);
1054 ++frPos;
1056 auto& toE = elms[toPos];
1057 if (toPos != frPos) {
1058 toE = elms[frPos];
1060 if (UNLIKELY(renumber && toE.hasIntKey())) {
1061 toE.setIntKey(m_nextKI, hash_int64(m_nextKI));
1062 m_nextKI++;
1064 *findForNewInsert(table, mask, toE.probe()) = toPos;
1067 if (updatePosAfterCompact) {
1068 // Update m_pos, now that compaction is complete
1069 m_pos = mPos.hash >= 0 ? ssize_t(find(mPos.skey, mPos.hash))
1070 : ssize_t(find(mPos.ikey, mPos.hash));
1071 assertx(m_pos >= 0 && m_pos < m_size);
1074 m_used = m_size;
1075 // Even if renumber is true, we'll leave string keys in the array untouched,
1076 // so the only keyTypes update we can do here is to unset the tombstone bit.
1077 mutableKeyTypes()->makeCompact();
1078 assertx(checkInvariants());
1081 void MixedArray::nextInsert(TypedValue v) {
1082 assertx(m_nextKI >= 0);
1083 assertx(!isFull());
1085 int64_t ki = m_nextKI;
1086 auto h = hash_int64(ki);
1087 // The check above enforces an invariant that allows us to always
1088 // know that m_nextKI is not present in the array, so it is safe
1089 // to use findForNewInsert()
1090 auto ei = findForNewInsert(h);
1091 assertx(!isValidPos(ei));
1092 // Allocate and initialize a new element.
1093 auto e = allocElm(ei);
1094 e->setIntKey(ki, h);
1095 mutableKeyTypes()->recordInt();
1096 m_nextKI = static_cast<uint64_t>(ki) + 1; // Update next free element.
1097 tvDup(v, e->data);
1100 template <class K, bool move> ALWAYS_INLINE
1101 ArrayData* MixedArray::update(K k, TypedValue data) {
1102 assertx(!isFull());
1103 auto p = insert(k);
1104 if (p.found) {
1105 setElem(p.tv, data, move);
1106 return this;
1108 initElem(p.tv, data, move);
1109 return this;
1112 arr_lval MixedArray::LvalInt(ArrayData* adIn, int64_t key) {
1113 auto const pos = asMixed(adIn)->find(key, hash_int64(key));
1114 if (!validPos(pos)) throwMissingElementException("Lval");
1115 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1116 auto const& elm = asMixed(ad)->data()[pos];
1117 assertx(elm.intKey() == key);
1118 return { ad, const_cast<TypedValue*>(elm.datatv()) };
1121 arr_lval MixedArray::LvalStr(ArrayData* adIn, StringData* key) {
1122 auto const pos = asMixed(adIn)->find(key, key->hash());
1123 if (!validPos(pos)) throwMissingElementException("Lval");
1124 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1125 auto const& elm = asMixed(ad)->data()[pos];
1126 assertx(elm.strKey()->same(key));
1127 return { ad, const_cast<TypedValue*>(elm.datatv()) };
1130 tv_lval MixedArray::LvalInPlace(ArrayData* ad, const Variant& k) {
1131 auto arr = asMixed(ad);
1132 assertx(!arr->isFull());
1133 assertx(!arr->cowCheck());
1134 return k.isInteger() ? arr->addLvalImpl(k.asInt64Val())
1135 : arr->addLvalImpl(k.asCStrRef().get());
1138 arr_lval MixedArray::LvalSilentInt(ArrayData* ad, int64_t k) {
1139 auto a = asMixed(ad);
1140 auto const pos = a->find(k, hash_int64(k));
1141 if (UNLIKELY(!validPos(pos))) return arr_lval { a, nullptr };
1142 if (a->cowCheck()) a = a->copyMixed();
1143 return arr_lval { a, &a->data()[pos].data };
1146 arr_lval MixedArray::LvalSilentStr(ArrayData* ad, StringData* k) {
1147 auto a = asMixed(ad);
1148 auto const pos = a->find(k, k->hash());
1149 if (UNLIKELY(!validPos(pos))) return arr_lval { a, nullptr };
1150 if (a->cowCheck()) a = a->copyMixed();
1151 return arr_lval { a, &a->data()[pos].data };
1154 ArrayData* MixedArray::SetInt(ArrayData* ad, int64_t k, TypedValue v) {
1155 assertx(ad->cowCheck() || ad->notCyclic(v));
1156 return asMixed(ad)->prepareForInsert(ad->cowCheck())->update(k, v);
1159 ArrayData* MixedArray::SetIntMove(ArrayData* ad, int64_t k, TypedValue v) {
1160 assertx(ad->cowCheck() || ad->notCyclic(v));
1161 auto const preped = asMixed(ad)->prepareForInsert(ad->cowCheck());
1162 auto const result = preped->update<int64_t, true>(k, v);
1163 if (ad != result && ad->decReleaseCheck()) MixedArray::Release(ad);
1164 return result;
1167 ArrayData* MixedArray::SetIntInPlace(ArrayData* ad, int64_t k, TypedValue v) {
1168 assertx(!ad->cowCheck());
1169 assertx(ad->notCyclic(v));
1170 return asMixed(ad)->prepareForInsert(false/*copy*/)->update(k, v);
1173 ArrayData* MixedArray::SetStr(ArrayData* ad, StringData* k, TypedValue v) {
1174 assertx(ad->cowCheck() || ad->notCyclic(v));
1175 return asMixed(ad)->prepareForInsert(ad->cowCheck())->update(k, v);
1178 ArrayData* MixedArray::SetStrMove(ArrayData* ad, StringData* k, TypedValue v) {
1179 assertx(ad->cowCheck() || ad->notCyclic(v));
1180 auto const preped = asMixed(ad)->prepareForInsert(ad->cowCheck());
1181 auto const result = preped->update<StringData*, true>(k, v);
1182 if (ad != result && ad->decReleaseCheck()) MixedArray::Release(ad);
1183 return result;
1186 ArrayData* MixedArray::SetStrInPlace(ArrayData* ad, StringData* k, TypedValue v) {
1187 assertx(!ad->cowCheck());
1188 assertx(ad->notCyclic(v));
1189 return asMixed(ad)->prepareForInsert(false/*copy*/)->update(k, v);
1192 ArrayData* MixedArray::AddInt(ArrayData* ad, int64_t k, TypedValue v, bool copy) {
1193 assertx(!ad->exists(k));
1194 return asMixed(ad)->prepareForInsert(copy)->addVal(k, v);
1197 ArrayData* MixedArray::AddStr(ArrayData* ad, StringData* k, TypedValue v, bool copy) {
1198 assertx(!ad->exists(k));
1199 return asMixed(ad)->prepareForInsert(copy)->addVal(k, v);
1202 //=============================================================================
1203 // Delete.
1205 void MixedArray::updateNextKI(int64_t removedKi, bool updateNext) {
1206 // Conform to PHP5 behavior
1207 // Hacky: don't removed the unsigned cast, else g++ can optimize away
1208 // the check for == 0x7fff..., since there is no signed int k
1209 // for which k-1 == 0x7fff...
1210 if (((uint64_t)removedKi == (uint64_t)m_nextKI-1) &&
1211 (removedKi >= 0) &&
1212 (removedKi == 0x7fffffffffffffffLL || updateNext)) {
1213 m_nextKI = removedKi;
1217 void MixedArray::eraseNoCompact(RemovePos pos) {
1218 assertx(pos.valid());
1219 hashTab()[pos.probeIdx] = Tombstone;
1221 // If the internal pointer points to this element, advance it.
1222 Elm* elms = data();
1223 if (m_pos == pos.elmIdx) {
1224 m_pos = nextElm(elms, pos.elmIdx);
1227 auto& e = elms[pos.elmIdx];
1228 auto const oldTV = e.data;
1229 if (e.hasStrKey()) {
1230 decRefStr(e.skey);
1232 e.setTombstone();
1233 mutableKeyTypes()->recordTombstone();
1235 --m_size;
1236 // Mark the hash entry as "deleted".
1237 assertx(m_used <= capacity());
1239 // Finally, decref the old value
1240 tvDecRefGen(oldTV);
1243 ArrayData* MixedArray::RemoveIntImpl(ArrayData* ad, int64_t k, bool copy) {
1244 auto a = asMixed(ad);
1245 auto const pos = a->findForRemove(k, hash_int64(k));
1246 if (!pos.valid()) return a;
1247 if (copy) a = a->copyMixed();
1248 a->updateNextKI(k, false);
1249 a->erase(pos);
1250 return a;
1253 ArrayData* MixedArray::RemoveInt(ArrayData* ad, int64_t k) {
1254 return RemoveIntImpl(ad, k, ad->cowCheck());
1257 ArrayData*
1258 MixedArray::RemoveStrImpl(ArrayData* ad, const StringData* key, bool copy) {
1259 auto a = asMixed(ad);
1260 auto const pos = a->findForRemove(key, key->hash());
1261 if (!pos.valid()) return a;
1262 if (copy) a = a->copyMixed();
1263 a->erase(pos);
1264 return a;
1267 ArrayData* MixedArray::RemoveStr(ArrayData* ad, const StringData* key) {
1268 return RemoveStrImpl(ad, key, ad->cowCheck());
1271 ArrayData* MixedArray::Copy(const ArrayData* ad) {
1272 return asMixed(ad)->copyMixed();
1275 ArrayData* MixedArray::AppendImpl(ArrayData* ad, TypedValue v, bool copy) {
1276 assertx(copy || ad->notCyclic(v));
1277 auto a = asMixed(ad);
1278 if (UNLIKELY(a->m_nextKI < 0)) {
1279 raise_warning("Cannot add element to the array as the next element is "
1280 "already occupied");
1281 return a;
1283 a = a->prepareForInsert(copy);
1284 a->nextInsert(v);
1285 return a;
1288 ArrayData* MixedArray::Append(ArrayData* ad, TypedValue v) {
1289 return AppendImpl(ad, v, ad->cowCheck());
1293 * Copy an array to a new array of mixed kind, with a particular
1294 * pre-reserved size.
1296 NEVER_INLINE
1297 MixedArray* MixedArray::CopyReserve(const MixedArray* src,
1298 size_t expectedSize) {
1299 auto const scale = computeScaleFromSize(expectedSize);
1300 auto const ad = reqAlloc(scale);
1301 auto const oldUsed = src->m_used;
1303 auto const aux = MixedArrayKeys::compactPacked(src->m_aux16) &
1304 ~(ArrayData::kHasProvenanceData |
1305 ArrayData::kHasStrKeyTable);
1307 ad->m_sizeAndPos = src->m_sizeAndPos;
1308 ad->initHeader_16(src->m_kind, OneReference, aux);
1309 ad->m_scale = scale; // don't set m_used yet
1310 ad->m_nextKI = src->m_nextKI;
1312 auto const table = ad->initHash(scale);
1314 auto dstElm = ad->data();
1315 auto srcElm = src->data();
1316 auto const srcStop = src->data() + oldUsed;
1317 uint32_t i = 0;
1319 // We're not copying the tombstones over to the new array, so the
1320 // positions of the elements in the new array may be shifted. Cache
1321 // the key for element associated with src->m_pos so that we can
1322 // properly initialize ad->m_pos below.
1323 ElmKey mPos;
1324 bool updatePosAfterCopy = src->m_pos != 0 && src->m_pos < src->m_used;
1325 if (updatePosAfterCopy) {
1326 assertx(size_t(src->m_pos) < src->m_used);
1327 auto& e = srcElm[src->m_pos];
1328 mPos.hash = e.probe();
1329 mPos.skey = e.skey;
1332 // Copy the elements
1333 auto mask = MixedArray::Mask(scale);
1334 for (; srcElm != srcStop; ++srcElm) {
1335 if (srcElm->isTombstone()) continue;
1336 tvDup(srcElm->data, dstElm->data);
1337 auto const hash = static_cast<int32_t>(srcElm->probe());
1338 if (hash < 0) {
1339 dstElm->setIntKey(srcElm->ikey, hash);
1340 } else {
1341 dstElm->setStrKey(srcElm->skey, hash);
1343 *ad->findForNewInsert(table, mask, hash) = i;
1344 ++dstElm;
1345 ++i;
1348 // Now that we have finished copying the elements, update ad->m_pos
1349 if (updatePosAfterCopy) {
1350 ad->m_pos = mPos.hash >= 0 ? ssize_t(ad->find(mPos.skey, mPos.hash))
1351 : ssize_t(ad->find(mPos.ikey, mPos.hash));
1352 assertx(ad->m_pos >=0 && ad->m_pos < ad->m_size);
1353 } else {
1354 // If src->m_pos is equal to src's canonical invalid position, then
1355 // set ad->m_pos to ad's canonical invalid position.
1356 if (src->m_pos != 0)
1357 ad->m_pos = ad->m_size;
1360 // Set new used value (we've removed any tombstones).
1361 assertx(i == dstElm - ad->data());
1362 ad->m_used = i;
1364 assertx(ad->kind() == src->kind());
1365 assertx(ad->dvArray() == src->dvArray());
1366 assertx(ad->m_size == src->m_size);
1367 assertx(ad->hasExactlyOneRef());
1368 assertx(ad->m_used <= oldUsed);
1369 assertx(ad->m_used == dstElm - ad->data());
1370 assertx(ad->m_scale == scale);
1371 assertx(ad->m_nextKI == src->m_nextKI);
1372 assertx(ad->checkInvariants());
1373 assertx(!ad->hasStrKeyTable());
1374 return ad;
1377 NEVER_INLINE
1378 ArrayData* MixedArray::ArrayPlusEqGeneric(ArrayData* ad,
1379 MixedArray* ret,
1380 const ArrayData* elems,
1381 size_t neededSize) {
1382 assertx(ad->isPHPArrayType());
1383 assertx(elems->isPHPArrayType());
1384 assertx(ret->isMixedKind());
1386 for (ArrayIter it(elems); !it.end(); it.next()) {
1387 Variant key = it.first();
1388 auto const value = it.secondVal();
1390 if (UNLIKELY(ret->isFull())) {
1391 assertx(ret == ad);
1392 ret = CopyReserve(asMixed(ad), neededSize);
1395 auto tv = key.asTypedValue();
1396 auto p = tv->m_type == KindOfInt64
1397 ? ret->insert(tv->m_data.num)
1398 : ret->insert(tv->m_data.pstr);
1399 if (!p.found) {
1400 tvDup(value, p.tv);
1404 return ret;
1407 // Note: the logic relating to how to grow in this function is coupled
1408 // to PackedArray::PlusEq.
1409 ArrayData* MixedArray::PlusEq(ArrayData* ad, const ArrayData* elems) {
1410 assertx(asMixed(ad)->checkInvariants());
1412 if (!ad->isPHPArrayType()) throwInvalidAdditionException(ad);
1413 if (!elems->isPHPArrayType()) throwInvalidAdditionException(elems);
1415 auto const neededSize = ad->size() + elems->size();
1417 auto ret =
1418 ad->cowCheck() ? CopyReserve(asMixed(ad), neededSize) :
1419 asMixed(ad);
1421 if (UNLIKELY(!elems->hasVanillaMixedLayout())) {
1422 return ArrayPlusEqGeneric(ad, ret, elems, neededSize);
1425 auto const rhs = asMixed(elems);
1427 auto srcElem = rhs->data();
1428 auto const srcStop = rhs->data() + rhs->m_used;
1429 for (; srcElem != srcStop; ++srcElem) {
1430 if (srcElem->isTombstone()) continue;
1432 if (UNLIKELY(ret->isFull())) {
1433 assertx(ret == ad);
1434 ret = CopyReserve(ret, neededSize);
1437 auto const hash = srcElem->hash();
1438 if (srcElem->hasStrKey()) {
1439 auto const ei = ret->findForInsertUpdate(srcElem->skey, hash);
1440 if (isValidPos(ei)) continue;
1441 auto e = ret->allocElm(ei);
1442 e->setStrKey(srcElem->skey, hash);
1443 ret->mutableKeyTypes()->recordStr(srcElem->skey);
1444 tvDup(srcElem->data, e->data);
1445 } else {
1446 auto const ei = ret->findForInsertUpdate(srcElem->ikey, hash);
1447 if (isValidPos(ei)) continue;
1448 auto e = ret->allocElm(ei);
1449 e->setIntKey(srcElem->ikey, hash);
1450 ret->mutableKeyTypes()->recordInt();
1451 tvDup(srcElem->data, e->data);
1455 return ret;
1458 NEVER_INLINE
1459 ArrayData* MixedArray::ArrayMergeGeneric(MixedArray* ret,
1460 const ArrayData* elems) {
1461 assertx(ret->isMixedKind());
1462 assertx(ret->isNotDVArray());
1464 for (ArrayIter it(elems); !it.end(); it.next()) {
1465 Variant key = it.first();
1466 auto const value = it.secondVal();
1467 if (key.asTypedValue()->m_type == KindOfInt64) {
1468 ret->nextInsert(value);
1469 } else {
1470 StringData* sd = key.getStringData();
1471 auto const lval = ret->addLvalImpl(sd);
1472 assertx(value.m_type != KindOfUninit);
1473 tvSet(value, lval);
1476 return ret;
1479 ArrayData* MixedArray::Merge(ArrayData* ad, const ArrayData* elems) {
1480 assertx(asMixed(ad)->checkInvariants());
1481 auto const ret = CopyReserve(asMixed(ad), ad->size() + elems->size());
1482 assertx(ret->hasExactlyOneRef());
1483 // Output is always a non-darray
1484 auto const aux = asMixed(ad)->keyTypes().packForAux();
1485 ret->initHeader_16(HeaderKind::Mixed, OneReference, aux);
1487 if (elems->hasVanillaMixedLayout()) {
1488 auto const rhs = asMixed(elems);
1489 auto srcElem = rhs->data();
1490 auto const srcStop = rhs->data() + rhs->m_used;
1492 for (; srcElem != srcStop; ++srcElem) {
1493 if (isTombstone(srcElem->data.m_type)) continue;
1495 if (srcElem->hasIntKey()) {
1496 ret->nextInsert(srcElem->data);
1497 } else {
1498 auto const lval = ret->addLvalImpl(srcElem->skey);
1499 assertx(srcElem->data.m_type != KindOfUninit);
1500 tvSet(srcElem->data, lval);
1503 return ret;
1506 if (UNLIKELY(!elems->hasVanillaPackedLayout())) {
1507 return ArrayMergeGeneric(ret, elems);
1510 assertx(PackedArray::checkInvariants(elems));
1511 auto src = packedData(elems);
1512 auto const srcStop = src + elems->m_size;
1513 for (; src != srcStop; ++src) {
1514 ret->nextInsert(*src);
1517 return ret;
1519 // Note: currently caller is responsible for calling renumber after
1520 // this. Should refactor so we handle it (we already know things
1521 // about the array).
1524 ArrayData* MixedArray::Pop(ArrayData* ad, Variant& value) {
1525 auto a = asMixed(ad);
1526 if (a->cowCheck()) a = a->copyMixed();
1527 auto elms = a->data();
1528 if (a->m_size) {
1529 ssize_t pos = IterLast(a);
1530 assertx(pos >= 0 && pos < a->m_used);
1531 auto& e = elms[pos];
1532 assertx(!isTombstone(e.data.m_type));
1533 value = tvAsCVarRef(&e.data);
1534 auto pos2 = e.hasStrKey() ? a->findForRemove(e.skey, e.hash())
1535 : a->findForRemove(e.ikey, e.hash());
1536 assertx(pos2.elmIdx == pos);
1537 if (!e.hasStrKey()) {
1538 a->updateNextKI(e.ikey, true);
1540 a->erase(pos2);
1541 } else {
1542 value = uninit_null();
1544 // To conform to PHP5 behavior, the pop operation resets the array's
1545 // internal iterator.
1546 a->m_pos = a->nextElm(elms, -1);
1547 return a;
1550 ArrayData* MixedArray::Dequeue(ArrayData* adInput, Variant& value) {
1551 auto a = asMixed(adInput);
1552 if (a->cowCheck()) a = a->copyMixed();
1553 auto elms = a->data();
1554 if (a->m_size) {
1555 ssize_t pos = a->nextElm(elms, -1);
1556 assertx(pos >= 0 && pos < a->m_used);
1557 auto& e = elms[pos];
1558 assertx(!isTombstone(e.data.m_type));
1559 value = tvAsCVarRef(&e.data);
1560 auto pos2 = e.hasStrKey() ? a->findForRemove(e.skey, e.hash())
1561 : a->findForRemove(e.ikey, e.hash());
1562 if (!e.hasStrKey()) {
1563 a->updateNextKI(e.ikey, false);
1565 assertx(pos2.elmIdx == pos);
1566 a->erase(pos2);
1567 } else {
1568 value = uninit_null();
1570 // Even if the array is empty, for PHP5 conformity we need call
1571 // compact() because it has side-effects that are important
1572 a->compact(true);
1573 return a;
1576 ArrayData* MixedArray::Prepend(ArrayData* adInput, TypedValue v) {
1577 auto a = asMixed(adInput)->prepareForInsert(adInput->cowCheck());
1579 auto elms = a->data();
1580 if (a->m_used > 0 && !isTombstone(elms[0].data.m_type)) {
1581 // Move the existing elements to make element 0 available.
1582 memmove(&elms[1], &elms[0], a->m_used * sizeof(*elms));
1583 ++a->m_used;
1586 // Prepend.
1587 ++a->m_size;
1588 auto& e = elms[0];
1589 e.setIntKey(0, hash_int64(0));
1590 a->mutableKeyTypes()->recordInt();
1591 tvDup(v, e.data);
1593 // Renumber.
1594 a->compact(true);
1595 return a;
1598 ArrayData* MixedArray::ToPHPArray(ArrayData* in, bool copy) {
1599 auto adIn = asMixed(in);
1600 assertx(adIn->isPHPArrayKind());
1601 if (adIn->isNotDVArray()) return adIn;
1602 assertx(adIn->isDArray());
1603 if (adIn->getSize() == 0) return ArrayData::Create();
1604 auto ad = copy ? adIn->copyMixed() : adIn;
1605 ad->setDVArray(ArrayData::kNotDVArray);
1606 if (RO::EvalArrayProvenance) arrprov::clearTag(ad);
1607 assertx(ad->checkInvariants());
1608 return ad;
1611 bool MixedArray::hasIntishKeys() const {
1612 auto const elms = data();
1613 for (uint32_t i = 0, limit = m_used; i < limit; ++i) {
1614 auto const& e = elms[i];
1615 if (e.isTombstone()) continue;
1616 if (e.hasStrKey()) {
1617 int64_t ignore;
1618 if (e.skey->isStrictlyInteger(ignore)) {
1619 return true;
1623 return false;
1627 * Copy this from adIn, intish casting all the intish string keys in
1628 * accordance with the value of the intishCast template parameter
1630 template <IntishCast IC>
1631 ALWAYS_INLINE
1632 ArrayData* MixedArray::copyWithIntishCast(MixedArray* adIn,
1633 bool asDArray /* = false */) {
1634 auto size = adIn->size();
1635 auto const elms = adIn->data();
1636 auto out =
1637 asMixed(asDArray ? MakeReserveDArray(size) : MakeReserveMixed(size));
1638 for (uint32_t i = 0, limit = adIn->m_used; i < limit; ++i) {
1639 auto const& e = elms[i];
1640 if (e.isTombstone()) continue;
1641 if (e.hasIntKey()) {
1642 out->update(e.ikey, e.data);
1643 } else {
1644 if (auto const intish = tryIntishCast<IC>(e.skey)) {
1645 out->update(*intish, e.data);
1646 } else {
1647 out->update(e.skey, e.data);
1652 assertx(out->isMixedKind());
1653 assertx(out->checkInvariants());
1654 assertx(out->hasExactlyOneRef());
1655 return out;
1658 ArrayData* MixedArray::ToPHPArrayIntishCast(ArrayData* in, bool copy) {
1659 // the input array should already be a PHP-array so we just need to
1660 // clear DV array bits and cast any intish strings that may appear
1661 auto adIn = asMixed(in);
1662 assertx(adIn->isPHPArrayKind());
1663 if (adIn->size() == 0) return ArrayData::Create();
1665 if (copy || adIn->hasIntishKeys()) {
1666 return copyWithIntishCast<IntishCast::Cast>(adIn);
1667 } else {
1668 // we don't need to CoW and there were no intish keys, so we can just update
1669 // dv-arrayness in place and get on with our day
1670 adIn->setDVArray(ArrayData::kNotDVArray);
1671 return adIn;
1675 template <IntishCast IC>
1676 ALWAYS_INLINE
1677 ArrayData* MixedArray::FromDictImpl(ArrayData* adIn,
1678 bool copy,
1679 bool toDArray) {
1680 assertx(adIn->isDictKind());
1681 auto a = asMixed(adIn);
1683 auto const size = a->size();
1685 if (!size) return toDArray ? ArrayData::CreateDArray() : ArrayData::Create();
1687 // If we don't necessarily have to make a copy, first scan the dict looking
1688 // for any int-like string keys. If we don't find any, we can transform the
1689 // dict in place.
1690 if (!copy && !a->hasIntishKeys()) {
1691 // No int-like string keys, so transform in place.
1692 a->m_kind = HeaderKind::Mixed;
1693 if (toDArray) a->setDVArray(ArrayData::kDArray);
1694 a->setLegacyArray(false);
1695 if (RO::EvalArrayProvenance) arrprov::reassignTag(a);
1696 assertx(a->checkInvariants());
1697 return a;
1698 } else {
1699 // Either we need to make a copy anyways, or we don't, but there are
1700 // int-like string keys. In either case, create the array from scratch,
1701 // inserting each element one-by-one, doing key conversion as necessary.
1702 return copyWithIntishCast<IC>(a, toDArray);
1706 ArrayData* MixedArray::ToPHPArrayDict(ArrayData* adIn, bool copy) {
1707 auto out = FromDictImpl<IntishCast::None>(adIn, copy, false);
1708 assertx(out->isNotDVArray());
1709 assertx(!out->isLegacyArray());
1710 return out;
1713 ArrayData* MixedArray::ToPHPArrayIntishCastDict(ArrayData* adIn, bool copy) {
1714 auto out = FromDictImpl<IntishCast::Cast>(adIn, copy, false);
1715 assertx(out->isNotDVArray());
1716 assertx(!out->isLegacyArray());
1717 return out;
1720 ArrayData* MixedArray::ToDArray(ArrayData* in, bool copy) {
1721 auto a = asMixed(in);
1722 assertx(a->isMixedKind());
1723 if (RuntimeOption::EvalHackArrDVArrs) return ToDict(in, copy);
1724 if (a->isDArray()) return a;
1725 if (a->getSize() == 0) return ArrayData::CreateDArray();
1726 auto out = copy ? a->copyMixed() : a;
1727 out->setDVArray(ArrayData::kDArray);
1728 out->setLegacyArray(false);
1729 assertx(out->checkInvariants());
1730 if (RO::EvalArrayProvenance) arrprov::reassignTag(out);
1731 return out;
1734 ArrayData* MixedArray::ToDArrayDict(ArrayData* in, bool copy) {
1735 if (RuntimeOption::EvalHackArrDVArrs) return in;
1736 auto out = FromDictImpl<IntishCast::None>(in, copy, true);
1737 assertx(out->isDArray());
1738 assertx(!out->isLegacyArray());
1739 return out;
1742 MixedArray* MixedArray::ToDictInPlace(ArrayData* ad) {
1743 auto a = asMixed(ad);
1744 assertx(a->isMixedKind());
1745 assertx(!a->cowCheck());
1746 a->m_kind = HeaderKind::Dict;
1747 a->setDVArray(ArrayData::kNotDVArray);
1748 if (RO::EvalArrayProvenance) arrprov::reassignTag(a);
1749 return a;
1752 ArrayData* MixedArray::ToDict(ArrayData* ad, bool copy) {
1753 auto a = asMixed(ad);
1754 assertx(a->isMixedKind());
1756 if (a->empty() && a->m_nextKI == 0) return ArrayData::CreateDict();
1758 if (copy) {
1759 auto const out = CopyMixed(
1760 *a, AllocMode::Request,
1761 HeaderKind::Dict, ArrayData::kNotDVArray
1763 return tagArrProv(out);
1764 } else {
1765 return tagArrProv(ToDictInPlace(a));
1769 ArrayData* MixedArray::ToDictDict(ArrayData* ad, bool) {
1770 assertx(asMixed(ad)->checkInvariants());
1771 assertx(ad->isDictKind());
1772 return ad;
1775 ArrayData* MixedArray::Renumber(ArrayData* adIn) {
1776 auto const ad = adIn->cowCheck() ? Copy(adIn) : adIn;
1777 asMixed(ad)->compact(true);
1778 return ad;
1781 void MixedArray::OnSetEvalScalar(ArrayData* ad) {
1782 auto a = asMixed(ad);
1783 if (UNLIKELY(a->m_size < a->m_used)) {
1784 a->compact(/*renumber=*/false);
1786 a->mutableKeyTypes()->makeStatic();
1787 auto elm = a->data();
1788 for (auto const end = elm + a->m_used; elm < end; ++elm) {
1789 assertx(!elm->isTombstone());
1790 auto key = elm->skey;
1791 if (elm->hasStrKey() && !key->isStatic()) {
1792 elm->skey = makeStaticString(key);
1793 decRefStr(key);
1795 tvAsVariant(&elm->data).setEvalScalar();
1799 //////////////////////////////////////////////////////////////////////
1801 ALWAYS_INLINE
1802 bool MixedArray::DictEqualHelper(const ArrayData* ad1, const ArrayData* ad2,
1803 bool strict) {
1804 assertx(asMixed(ad1)->checkInvariants());
1805 assertx(asMixed(ad2)->checkInvariants());
1806 assertx(ad1->isDictKind());
1807 assertx(ad2->isDictKind());
1809 if (ad1 == ad2) return true;
1810 if (ad1->size() != ad2->size()) return false;
1812 // Prevent circular referenced objects/arrays or deep ones.
1813 check_recursion_error();
1815 if (strict) {
1816 auto const arr1 = asMixed(ad1);
1817 auto const arr2 = asMixed(ad2);
1818 auto elm1 = arr1->data();
1819 auto elm2 = arr2->data();
1820 auto i1 = arr1->m_used;
1821 auto i2 = arr2->m_used;
1822 while (i1 > 0 && i2 > 0) {
1823 if (UNLIKELY(elm1->isTombstone())) {
1824 ++elm1;
1825 --i1;
1826 continue;
1828 if (UNLIKELY(elm2->isTombstone())) {
1829 ++elm2;
1830 --i2;
1831 continue;
1833 if (elm1->hasIntKey()) {
1834 if (!elm2->hasIntKey()) return false;
1835 if (elm1->ikey != elm2->ikey) return false;
1836 } else {
1837 assertx(elm1->hasStrKey());
1838 if (!elm2->hasStrKey()) return false;
1839 if (!same(elm1->skey, elm2->skey)) return false;
1841 if (!tvSame(*tvAssertPlausible(&elm1->data), *tvAssertPlausible(&elm2->data))) {
1842 return false;
1844 ++elm1;
1845 ++elm2;
1846 --i1;
1847 --i2;
1850 if (!i1) {
1851 while (i2 > 0) {
1852 if (UNLIKELY(!elm2->isTombstone())) return false;
1853 ++elm2;
1854 --i2;
1856 } else {
1857 assertx(!i2);
1858 while (i1 > 0) {
1859 if (UNLIKELY(!elm1->isTombstone())) return false;
1860 ++elm1;
1861 --i1;
1864 } else {
1865 auto const arr1 = asMixed(ad1);
1866 auto elm = arr1->data();
1867 for (auto i = arr1->m_used; i--; elm++) {
1868 if (UNLIKELY(elm->isTombstone())) continue;
1869 auto const other_tv = [&] {
1870 if (elm->hasIntKey()) {
1871 return NvGetIntDict(ad2, elm->ikey);
1872 } else {
1873 assertx(elm->hasStrKey());
1874 return NvGetStrDict(ad2, elm->skey);
1876 }();
1877 if (!other_tv.is_init() ||
1878 !tvEqual(tvAssertPlausible(elm->data),
1879 tvAssertPlausible(other_tv))) {
1880 return false;
1885 return true;
1888 bool MixedArray::DictEqual(const ArrayData* ad1, const ArrayData* ad2) {
1889 return DictEqualHelper(ad1, ad2, false);
1892 bool MixedArray::DictNotEqual(const ArrayData* ad1, const ArrayData* ad2) {
1893 return !DictEqualHelper(ad1, ad2, false);
1896 bool MixedArray::DictSame(const ArrayData* ad1, const ArrayData* ad2) {
1897 return DictEqualHelper(ad1, ad2, true);
1900 bool MixedArray::DictNotSame(const ArrayData* ad1, const ArrayData* ad2) {
1901 return !DictEqualHelper(ad1, ad2, true);
1904 //////////////////////////////////////////////////////////////////////