2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/iter.h"
20 #include <folly/Likely.h>
22 #include "hphp/runtime/base/array-data.h"
23 #include "hphp/runtime/base/collections.h"
24 #include "hphp/runtime/base/mixed-array.h"
25 #include "hphp/runtime/base/packed-array.h"
26 #include "hphp/runtime/base/builtin-functions.h"
27 #include "hphp/runtime/base/tv-refcount.h"
29 #include "hphp/runtime/base/mixed-array-defs.h"
30 #include "hphp/runtime/base/packed-array-defs.h"
31 #include "hphp/runtime/vm/vm-regs.h"
33 #include "hphp/runtime/base/bespoke/monotype-vec.h"
37 TRACE_SET_MOD(runtime
);
39 //////////////////////////////////////////////////////////////////////
43 //////////////////////////////////////////////////////////////////////
45 // We don't want JIT iterators to take up too much space on the stack.
46 static_assert(sizeof(IterImpl
) == 32, "");
47 static_assert(sizeof(Iter
) == 32, "");
56 bool isMonotypeVec(const BespokeArray
* bad
) {
57 return bespoke::isMonotypeVecLayout(bad
->layoutIndex());
60 //////////////////////////////////////////////////////////////////////
64 //////////////////////////////////////////////////////////////////////
66 std::string
show(IterSpecialization type
) {
67 if (!type
.specialized
) return "Unspecialized";
68 auto const base_const
= type
.base_const
? "BaseConst" : "BaseMutable";
69 auto const base_type
= show((IterSpecialization::BaseType
)type
.base_type
);
70 if (type
.output_key
) {
71 auto const key_types
= show((IterSpecialization::KeyTypes
)type
.key_types
);
72 return folly::format("{}::{}::{}", base_type
, base_const
, key_types
).str();
74 return folly::format("{}::{}", base_type
, base_const
).str();
78 std::string
show(IterSpecialization::BaseType type
) {
80 case IterSpecialization::Packed
: return "Packed";
81 case IterSpecialization::Mixed
: return "Mixed";
82 case IterSpecialization::Vec
: return "Vec";
83 case IterSpecialization::Dict
: return "Dict";
84 case IterSpecialization::kNumBaseTypes
: always_assert(false);
89 std::string
show(IterSpecialization::KeyTypes type
) {
91 case IterSpecialization::ArrayKey
: return "ArrayKey";
92 case IterSpecialization::Int
: return "Int";
93 case IterSpecialization::Str
: return "Str";
94 case IterSpecialization::StaticStr
: return "StaticStr";
95 case IterSpecialization::kNumKeyTypes
: always_assert(false);
100 //////////////////////////////////////////////////////////////////////
102 IterImpl::IterImpl(const ArrayData
* data
) {
106 IterImpl::IterImpl(ObjectData
* obj
) {
110 IterImpl::IterImpl(ObjectData
* obj
, NoInc
) {
114 bool IterImpl::checkInvariants(const ArrayData
* ad
/* = nullptr */) const {
115 TRACE(3, "IterImpl::checkInvariants: %lx %lx %lx %lx (ad = %lx)\n",
116 uintptr_t(m_data
), size_t(m_typeFields
), m_pos
, m_end
, uintptr_t(ad
));
118 // We can't make many assertions for iterators over objects.
119 if (!hasArrayData()) {
120 assertx(ad
== nullptr);
121 assertx(m_nextHelperIdx
== IterNextIndex::Object
);
125 // Exactly one of the ArrayData pointers {ad, m_data} should be nullptr.
126 assertx((ad
== nullptr) != (m_data
== nullptr));
127 DEBUG_ONLY
auto const arr
= ad
? ad
: m_data
;
129 // Check that array's vtable index is compatible with the array's layout.
130 if (m_nextHelperIdx
== IterNextIndex::ArrayPacked
||
131 m_nextHelperIdx
== IterNextIndex::ArrayPackedPointer
) {
132 assertx(arr
->hasVanillaPackedLayout());
133 } else if (m_nextHelperIdx
== IterNextIndex::ArrayMixed
) {
134 assertx(arr
->hasVanillaMixedLayout());
135 } else if (m_nextHelperIdx
== IterNextIndex::ArrayMixedPointer
) {
136 assertx(arr
->hasVanillaMixedLayout());
137 assertx(arr
->size() == MixedArray::asMixed(arr
)->iterLimit());
138 } else if (m_nextHelperIdx
== IterNextIndex::MonotypeVec
) {
139 assertx(!arr
->isVanilla());
140 assertx(isMonotypeVec(BespokeArray::asBespoke(arr
)));
142 // We'd like to assert the converse, too: a packed or mixed array should
143 // a next helper that makes use of its layout. However, this condition
144 // can fail for local iters: e.g. an APC array base can be promoted to a
145 // packed or mixed array, with iteration still using array-generic code.
146 // We check it for non-local iters.
147 assertx(m_nextHelperIdx
== IterNextIndex::Array
);
148 if (m_data
!= nullptr) {
149 assertx(!m_data
->hasVanillaPackedLayout());
150 assertx(!m_data
->hasVanillaMixedLayout());
154 // Check the consistency of the pos and end fields.
155 if (m_nextHelperIdx
== IterNextIndex::ArrayPackedPointer
) {
156 assertx(m_packed_elm
< m_packed_end
);
157 assertx(m_packed_end
== packedData(arr
) + arr
->size());
158 } else if (m_nextHelperIdx
== IterNextIndex::ArrayMixedPointer
) {
159 assertx(m_mixed_elm
< m_mixed_end
);
160 assertx(m_mixed_end
== MixedArray::asMixed(arr
)->data() + arr
->size());
162 assertx(m_pos
< m_end
);
163 assertx(m_end
== arr
->iter_end());
168 template <bool incRef
/* = true */>
169 void IterImpl::arrInit(const ArrayData
* arr
) {
171 if (arr
&& incRef
) arr
->incRefCount();
174 template <bool incRef
>
175 void IterImpl::objInit(ObjectData
* obj
) {
178 if (LIKELY(obj
->isCollection())) {
179 if (auto ad
= collections::asArray(obj
)) {
181 if (!incRef
) decRefObj(obj
);
184 assertx(obj
->collectionType() == CollectionType::Pair
);
185 auto arr
= collections::toArray(obj
);
186 if (!incRef
) decRefObj(obj
);
187 setArrayData(arr
.detach());
192 assertx(obj
->instanceof(SystemLib::s_HH_IteratorClass
));
194 if (incRef
) obj
->incRefCount();
196 obj
->o_invoke_few_args(s_rewind
, 0);
198 // Regardless of whether the incRef template parameter is true or false,
199 // at this point, this IterImpl "owns" a reference to the object and is
200 // responsible for dec-ref-ing it when the iterator is destroyed.
202 // Normally, the destructor takes care of this case, but we'll never invoke
203 // it if the exception is thrown before the constructor finishes, so we
204 // must manually dec-ref the object here.
211 void IterImpl::kill() {
213 // IterImpl is not POD, so we memset each POD field separately.
214 memset(&m_data
, kIterTrashFill
, sizeof(m_data
));
215 memset(&m_typeFields
, kIterTrashFill
, sizeof(m_typeFields
));
216 memset(&m_pos
, kIterTrashFill
, sizeof(m_pos
));
217 memset(&m_end
, kIterTrashFill
, sizeof(m_end
));
220 IterImpl::~IterImpl() {
221 if (hasArrayData()) {
222 const ArrayData
* ad
= getArrayData();
223 if (ad
) decRefArr(const_cast<ArrayData
*>(ad
));
227 ObjectData
* obj
= getObject();
233 bool IterImpl::endHelper() const {
234 auto obj
= getObject();
235 return !obj
->o_invoke_few_args(s_valid
, 0).toBoolean();
238 void IterImpl::nextHelper() {
239 auto obj
= getObject();
240 obj
->o_invoke_few_args(s_next
, 0);
243 Variant
IterImpl::firstHelper() {
244 auto obj
= getObject();
245 return obj
->o_invoke_few_args(s_key
, 0);
248 Variant
IterImpl::second() {
249 if (LIKELY(hasArrayData())) return getArrayData()->getValue(m_pos
);
250 return getObject()->o_invoke_few_args(s_current
, 0);
253 TypedValue
IterImpl::secondVal() const {
254 if (LIKELY(hasArrayData())) return getArrayData()->nvGetVal(m_pos
);
255 raise_fatal_error("taking reference on iterator objects");
258 //////////////////////////////////////////////////////////////////////
260 bool Iter::init(TypedValue
* base
) {
261 // Get easy cases out of the way. Class methods promote to arrays and both
262 // of them are just an IterImpl contructor. end() never throws for arrays.
263 if (isClsMethType(base
->m_type
)) {
264 new (&m_iter
) IterImpl(tvAsVariant(base
).toArray().get());
265 return !m_iter
.end();
267 if (isArrayLikeType(base
->m_type
)) {
268 new (&m_iter
) IterImpl(base
->m_data
.parr
);
269 return !m_iter
.end();
272 // Get more easy cases out of the way: non-objects are not iterable.
273 // For these cases, we warn and branch to done.
274 if (!isObjectType(base
->m_type
)) {
275 raise_warning("Invalid argument supplied for foreach()");
279 if (base
->m_data
.pobj
->isCollection()) {
280 new (&m_iter
) IterImpl(base
->m_data
.pobj
);
283 Object obj
= base
->m_data
.pobj
->iterableObject(isIterator
);
285 new (&m_iter
) IterImpl(obj
.detach(), IterImpl::noInc
);
287 Class
* ctx
= arGetContextClass(vmfp());
288 auto ctxStr
= ctx
? ctx
->nameStr() : StrNR();
289 Array
iterArray(obj
->o_toIterArray(ctxStr
));
290 ArrayData
* ad
= iterArray
.get();
291 new (&m_iter
) IterImpl(ad
);
295 // If the object was empty, or if end throws, dec-ref it and branch to done.
309 // The emitter should never generate bytecode where the iterator is at the
310 // end before IterNext is executed. checkInvariants tests this invariant.
311 assertx(m_iter
.checkInvariants());
312 assertx(m_iter
.getHelperIndex() != IterNextIndex::ArrayPackedPointer
);
313 assertx(m_iter
.getHelperIndex() != IterNextIndex::ArrayMixedPointer
);
315 // If the iterator is now at the end, dec-ref the base. (For local iterators,
316 // m_data is null, so the destructor won't free it.)
325 // We can't make any assertions about other iterator fields here, because we
326 // want memory effects to know that IterFree only reads the base (so we can
327 // eliminate storing the rest of the fields in most cases).
331 std::string
Iter::toString() const {
332 return m_iter
.hasArrayData() ? "I:Array" : "I:Iterator";
336 * For the specialized iterator helpers below, we need to peek into the raw
337 * IterImpl to manipulate its fields. We could make all of these methods
338 * friends of the Iter struct, but that involves listing them in the class.
340 * To give us more flexibility to modify these helpers, we instead create this
341 * method that exposes the underlying IterImpl by casting.
343 IterImpl
* unwrap(Iter
* iter
) { return &iter
->m_iter
; }
348 * iter_value_cell* will store a copy of the current value at the address
349 * given by 'out'. iter_value_cell* will increment the refcount of the current
350 * value if appropriate.
353 template <bool typeArray
>
354 static inline void iter_value_cell_local_impl(Iter
* iter
, TypedValue
* out
) {
355 auto const oldVal
= *out
;
356 TRACE(2, "%s: typeArray: %s, I %p, out %p\n",
357 __func__
, typeArray
? "true" : "false", iter
, out
);
358 auto& arrIter
= *unwrap(iter
);
359 assertx(typeArray
== arrIter
.hasArrayData());
361 tvDup(arrIter
.nvSecond(), *out
);
363 Variant val
= arrIter
.second();
364 tvDup(*val
.asTypedValue(), *out
);
369 template <bool typeArray
>
370 static inline void iter_key_cell_local_impl(Iter
* iter
, TypedValue
* out
) {
371 auto const oldVal
= *out
;
372 TRACE(2, "%s: I %p, out %p\n", __func__
, iter
, out
);
373 auto& arrIter
= *unwrap(iter
);
374 assertx(typeArray
== arrIter
.hasArrayData());
376 tvDup(arrIter
.nvFirst(), *out
);
378 Variant key
= arrIter
.first();
379 tvDup(*key
.asTypedValue(), *out
);
384 inline void liter_value_cell_local_impl(Iter
* iter
,
386 const ArrayData
* ad
) {
387 auto const oldVal
= *out
;
388 auto const& arrIter
= *unwrap(iter
);
389 assertx(arrIter
.hasArrayData());
390 assertx(!arrIter
.getArrayData());
391 tvDup(arrIter
.nvSecondLocal(ad
), *out
);
395 inline void liter_key_cell_local_impl(Iter
* iter
,
397 const ArrayData
* ad
) {
398 auto const oldVal
= *out
;
399 auto const& arrIter
= *unwrap(iter
);
400 assertx(arrIter
.hasArrayData());
401 assertx(!arrIter
.getArrayData());
402 tvDup(arrIter
.nvFirstLocal(ad
), *out
);
406 NEVER_INLINE
void clearOutputLocal(TypedValue
* local
) {
407 tvDecRefCountable(local
);
408 local
->m_type
= KindOfNull
;
411 // These release methods are called by the iter_next_* implementations below
412 // that know the particular layout of the array they're iterating over. We pull
413 // them out into a separate method here so that a) we can inline the destructor
414 // for each array type into these methods and b) to make the call a tail call.
416 // These methods all return false (= 0) to signify that iteration is over.
418 NEVER_INLINE
int64_t iter_next_free_packed(Iter
* iter
, ArrayData
* arr
) {
419 PackedArray::Release(arr
);
424 NEVER_INLINE
int64_t iter_next_free_mixed(Iter
* iter
, ArrayData
* arr
) {
425 MixedArray::Release(arr
);
430 NEVER_INLINE
int64_t iter_next_free_monotype_vec(
431 Iter
* iter
, bespoke::MonotypeVec
* mad
) {
432 bespoke::MonotypeVec::Release(mad
);
440 * new_iter_array creates an iterator for the specified array iff the
441 * array is not empty. If new_iter_array creates an iterator, it does
442 * not increment the refcount of the specified array. If
443 * new_iter_array does not create an iterator, it decRefs the array.
445 template <bool Local
>
447 int64_t new_iter_array_cold(Iter
* dest
, ArrayData
* arr
, TypedValue
* valOut
,
448 TypedValue
* keyOut
) {
449 TRACE(2, "%s: I %p, arr %p\n", __func__
, dest
, arr
);
451 // We are transferring ownership of the array to the iterator, therefore
452 // we do not need to adjust the refcount.
453 auto const iter
= unwrap(dest
);
455 new (iter
) IterImpl(arr
, IterImpl::local
);
456 liter_value_cell_local_impl(dest
, valOut
, arr
);
458 liter_key_cell_local_impl(dest
, keyOut
, arr
);
461 new (iter
) IterImpl(arr
, IterImpl::noInc
);
462 iter_value_cell_local_impl
<true>(dest
, valOut
);
464 iter_key_cell_local_impl
<true>(dest
, keyOut
);
469 // We did not transfer ownership of the array to an iterator, so we need
470 // to decRef the array.
471 if (!Local
) decRefArr(arr
);
475 template <IterTypeOp Type
>
476 int64_t new_iter_array(Iter
* dest
, ArrayData
* ad
, TypedValue
* valOut
) {
477 TRACE(2, "%s: I %p, ad %p\n", __func__
, dest
, ad
);
478 auto constexpr BaseConst
= Type
!= IterTypeOp::LocalBaseMutable
;
479 auto constexpr Local
= Type
!= IterTypeOp::NonLocal
;
481 auto const size
= ad
->size();
482 if (UNLIKELY(size
== 0)) {
483 if (!Local
) decRefArr(ad
);
487 if (UNLIKELY(isRefcountedType(valOut
->type()))) {
488 clearOutputLocal(valOut
);
491 // We are transferring ownership of the array to the iterator, therefore
492 // we do not need to adjust the refcount.
493 auto& aiter
= *unwrap(dest
);
494 aiter
.m_data
= Local
? nullptr : ad
;
496 if (BaseConst
&& !ad
->isVanilla()) {
497 auto const bad
= BespokeArray::asBespoke(ad
);
498 if (isMonotypeVec(bad
)) {
501 aiter
.setArrayNext(IterNextIndex::MonotypeVec
);
502 auto const mad
= bespoke::MonotypeVec::As(ad
);
503 tvDup(bespoke::MonotypeVec::GetPosVal(mad
, 0), *valOut
);
508 if (LIKELY(ad
->hasVanillaPackedLayout())) {
510 aiter
.m_packed_elm
= packedData(ad
);
511 aiter
.m_packed_end
= aiter
.m_packed_elm
+ size
;
512 aiter
.setArrayNext(IterNextIndex::ArrayPackedPointer
);
516 aiter
.setArrayNext(IterNextIndex::ArrayPacked
);
518 tvDup(PackedArray::GetPosVal(ad
, 0), *valOut
);
522 if (LIKELY(ad
->hasVanillaMixedLayout())) {
523 auto const mixed
= MixedArray::asMixed(ad
);
524 if (BaseConst
&& LIKELY(mixed
->iterLimit() == size
)) {
525 aiter
.m_mixed_elm
= mixed
->data();
526 aiter
.m_mixed_end
= aiter
.m_mixed_elm
+ size
;
527 aiter
.setArrayNext(IterNextIndex::ArrayMixedPointer
);
528 mixed
->getArrayElm(0, valOut
);
531 aiter
.m_pos
= mixed
->getIterBeginNotEmpty();
532 aiter
.m_end
= mixed
->iterLimit();
533 aiter
.setArrayNext(IterNextIndex::ArrayMixed
);
534 mixed
->getArrayElm(aiter
.m_pos
, valOut
);
538 return new_iter_array_cold
<Local
>(dest
, ad
, valOut
, nullptr);
541 IterInitArr
new_iter_array_helper(IterTypeOp type
) {
543 case IterTypeOp::LocalBaseConst
:
544 return new_iter_array
<IterTypeOp::LocalBaseConst
>;
545 case IterTypeOp::LocalBaseMutable
:
546 return new_iter_array
<IterTypeOp::LocalBaseMutable
>;
547 case IterTypeOp::NonLocal
:
548 return new_iter_array
<IterTypeOp::NonLocal
>;
550 always_assert(false);
553 template<IterTypeOp Type
>
554 int64_t new_iter_array_key(Iter
* dest
,
557 TypedValue
* keyOut
) {
558 TRACE(2, "%s: I %p, ad %p\n", __func__
, dest
, ad
);
559 auto constexpr BaseConst
= Type
!= IterTypeOp::LocalBaseMutable
;
560 auto constexpr Local
= Type
!= IterTypeOp::NonLocal
;
562 auto const size
= ad
->size();
563 if (UNLIKELY(size
== 0)) {
564 if (!Local
) decRefArr(ad
);
568 if (UNLIKELY(isRefcountedType(valOut
->type()))) {
569 clearOutputLocal(valOut
);
571 if (UNLIKELY(isRefcountedType(keyOut
->type()))) {
572 clearOutputLocal(keyOut
);
575 // We are transferring ownership of the array to the iterator, therefore
576 // we do not need to adjust the refcount.
577 auto& aiter
= *unwrap(dest
);
578 aiter
.m_data
= Local
? nullptr : ad
;
580 if (BaseConst
&& !ad
->isVanilla()) {
581 auto const bad
= BespokeArray::asBespoke(ad
);
582 if (isMonotypeVec(bad
)) {
585 aiter
.setArrayNext(IterNextIndex::MonotypeVec
);
586 auto const mad
= bespoke::MonotypeVec::As(ad
);
587 tvDup(bespoke::MonotypeVec::GetPosVal(mad
, 0), *valOut
);
588 tvCopy(make_tv
<KindOfInt64
>(0), *keyOut
);
593 if (ad
->hasVanillaPackedLayout()) {
596 aiter
.setArrayNext(IterNextIndex::ArrayPacked
);
597 tvDup(PackedArray::GetPosVal(ad
, 0), *valOut
);
598 tvCopy(make_tv
<KindOfInt64
>(0), *keyOut
);
602 if (ad
->hasVanillaMixedLayout()) {
603 auto const mixed
= MixedArray::asMixed(ad
);
604 if (BaseConst
&& LIKELY(mixed
->iterLimit() == size
)) {
605 aiter
.m_mixed_elm
= mixed
->data();
606 aiter
.m_mixed_end
= aiter
.m_mixed_elm
+ size
;
607 aiter
.setArrayNext(IterNextIndex::ArrayMixedPointer
);
608 mixed
->getArrayElm(0, valOut
, keyOut
);
611 aiter
.m_pos
= mixed
->getIterBeginNotEmpty();
612 aiter
.m_end
= mixed
->iterLimit();
613 aiter
.setArrayNext(IterNextIndex::ArrayMixed
);
614 mixed
->getArrayElm(aiter
.m_pos
, valOut
, keyOut
);
618 return new_iter_array_cold
<Local
>(dest
, ad
, valOut
, keyOut
);
621 IterInitArrKey
new_iter_array_key_helper(IterTypeOp type
) {
623 case IterTypeOp::LocalBaseConst
:
624 return new_iter_array_key
<IterTypeOp::LocalBaseConst
>;
625 case IterTypeOp::LocalBaseMutable
:
626 return new_iter_array_key
<IterTypeOp::LocalBaseMutable
>;
627 case IterTypeOp::NonLocal
:
628 return new_iter_array_key
<IterTypeOp::NonLocal
>;
630 always_assert(false);
634 FreeObj() : m_obj(0) {}
635 void operator=(ObjectData
* obj
) { m_obj
= obj
; }
636 ~FreeObj() { if (UNLIKELY(m_obj
!= nullptr)) decRefObj(m_obj
); }
642 * new_iter_object_any creates an iterator for the specified object if the
643 * object is iterable and it is non-empty (has properties). If
644 * new_iter_object_any creates an iterator, it does not increment the refcount
645 * of the specified object. If new_iter_object does not create an iterator,
646 * it decRefs the object.
648 * If exceptions are thrown, new_iter_object_any takes care of decRefing the
651 static int64_t new_iter_object_any(Iter
* dest
, ObjectData
* obj
, Class
* ctx
,
652 TypedValue
* valOut
, TypedValue
* keyOut
) {
653 auto const iter
= unwrap(dest
);
654 auto object_base
= true;
657 if (obj
->isIterator()) {
658 TRACE(2, "%s: I %p, obj %p, ctx %p, collection or Iterator\n",
659 __func__
, dest
, obj
, ctx
);
660 new (iter
) IterImpl(obj
, IterImpl::noInc
);
662 bool isIteratorAggregate
;
664 * We are not going to transfer ownership of obj to the iterator,
665 * so arrange to decRef it later. The actual decRef has to happen
666 * after the call to arr().end() below, because both can have visible side
667 * effects (calls to valid()). Similarly it has to happen before the
668 * iter_*_cell_local_impl calls below, because they call current() and
669 * key() (hence the explicit scope around FreeObj fo;)
673 Object itObj
= obj
->iterableObject(isIteratorAggregate
, false);
674 if (isIteratorAggregate
) {
675 TRACE(2, "%s: I %p, obj %p, ctx %p, IteratorAggregate\n",
676 __func__
, dest
, obj
, ctx
);
677 new (iter
) IterImpl(itObj
.detach(), IterImpl::noInc
);
679 TRACE(2, "%s: I %p, obj %p, ctx %p, iterate as array\n",
680 __func__
, dest
, obj
, ctx
);
681 auto ctxStr
= ctx
? ctx
->nameStr() : StrNR();
682 Array
iterArray(itObj
->o_toIterArray(ctxStr
));
683 ArrayData
* ad
= iterArray
.get();
684 new (iter
) IterImpl(ad
);
690 // Iterator was empty; call the destructor on the iterator we just
702 iter_value_cell_local_impl
<false>(dest
, valOut
);
704 iter_key_cell_local_impl
<false>(dest
, keyOut
);
707 iter_value_cell_local_impl
<true>(dest
, valOut
);
709 iter_key_cell_local_impl
<true>(dest
, keyOut
);
715 int64_t new_iter_object(Iter
* dest
, ObjectData
* obj
, Class
* ctx
,
716 TypedValue
* valOut
, TypedValue
* keyOut
) {
717 TRACE(2, "%s: I %p, obj %p, ctx %p, collection or Iterator or Object\n",
718 __func__
, dest
, obj
, ctx
);
719 if (UNLIKELY(!obj
->isCollection())) {
720 return new_iter_object_any(dest
, obj
, ctx
, valOut
, keyOut
);
722 auto constexpr Type
= IterTypeOp::NonLocal
;
724 if (auto ad
= collections::asArray(obj
)) {
727 return keyOut
? new_iter_array_key
<Type
>(dest
, ad
, valOut
, keyOut
)
728 : new_iter_array
<Type
>(dest
, ad
, valOut
);
731 assertx(obj
->collectionType() == CollectionType::Pair
);
732 auto arr
= collections::toArray(obj
);
734 return keyOut
? new_iter_array_key
<Type
>(dest
, arr
.detach(), valOut
, keyOut
)
735 : new_iter_array
<Type
>(dest
, arr
.detach(), valOut
);
738 // Generic next implementation for non-local iterators. This method is used for
739 // both value and key-value iterators; for value iterators, keyOut is nullptr.
740 // The result is false (= 0) if iteration is done, or true (= 1) otherwise.
742 int64_t iter_next_cold(Iter
* iter
, TypedValue
* valOut
, TypedValue
* keyOut
) {
743 auto const ai
= unwrap(iter
);
744 assertx(ai
->hasArrayData() || !ai
->getObject()->isCollection());
747 // The IterImpl destructor will decRef the array
751 if (ai
->hasArrayData()) {
752 iter_value_cell_local_impl
<true>(iter
, valOut
);
754 iter_key_cell_local_impl
<true>(iter
, keyOut
);
757 iter_value_cell_local_impl
<false>(iter
, valOut
);
759 iter_key_cell_local_impl
<false>(iter
, keyOut
);
765 // Generic next implementation for non-local iterators. This method is used for
766 // both value and key-value iterators; for value iterators, keyOut is nullptr.
767 // The result is false (= 0) if iteration is done, or true (= 1) otherwise.
769 // Since local iterators are always over arrays, we take an ArrayData here.
771 int64_t liter_next_cold(Iter
* iter
,
774 TypedValue
* keyOut
) {
775 auto const ai
= unwrap(iter
);
776 assertx(ai
->hasArrayData());
777 assertx(!ai
->getArrayData());
778 if (ai
->nextLocal(ad
)) {
782 liter_value_cell_local_impl(iter
, valOut
, ad
);
783 if (keyOut
) liter_key_cell_local_impl(iter
, keyOut
, ad
);
787 ///////////////////////////////////////////////////////////////////////////////
788 // IterNext/IterNextK helpers
792 // Destroy the given local. Does not do refcounting ops.
793 NEVER_INLINE
void destroyOutputLocal(TypedValue
* out
) {
794 destructorForType(type(out
))(val(out
).pcnt
);
797 // Dec-ref the given local, and destroy it if we dec-ref to zero.
798 ALWAYS_INLINE
void decRefOutputLocal(TypedValue
* out
) {
799 if (isRefcountedType(type(out
)) && val(out
).pcnt
->decReleaseCheck()) {
800 destroyOutputLocal(out
);
804 // Store `tv` to the given local, dec-ref-ing and releasing the old val.
805 ALWAYS_INLINE
void setOutputLocal(TypedValue tv
, TypedValue
* out
) {
806 decRefOutputLocal(out
);
812 // "virtual" method implementation of *IterNext* for ArrayPackedPointer.
813 // We don't use this iteration mode for key-value iterators, so it's simple.
815 // See iter.h for the meaning of a "local" iterator. At this point,
816 // we have the base, but we only dec-ref it when non-local iters hit the end.
818 // The result is false (= 0) if iteration is done, or true (= 1) otherwise.
819 template<bool HasKey
, bool Local
>
820 int64_t iter_next_packed_pointer(
821 Iter
* it
, TypedValue
* valOut
, TypedValue
* keyOut
, ArrayData
* arr
) {
822 always_assert(!HasKey
);
823 auto& iter
= *unwrap(it
);
824 auto const elm
= iter
.m_packed_elm
+ 1;
825 if (elm
== iter
.m_packed_end
) {
826 if (!Local
&& arr
->decReleaseCheck()) {
827 return iter_next_free_packed(it
, arr
);
833 iter
.m_packed_elm
= elm
;
834 setOutputLocal(*elm
, valOut
);
838 // "virtual" method implementation of *IterNext* for ArrayMixedPointer.
839 // Since we know the base is mixed and free of tombstones, we can simply
840 // increment the element pointer and compare it to the end pointer.
842 // HasKey is true for key-value iters. HasKey is true iff keyOut != nullptr.
843 // See iter.h for the meaning of a "local" iterator. At this point,
844 // we have the base, but we only dec-ref it when non-local iters hit the end.
846 // The result is false (= 0) if iteration is done, or true (= 1) otherwise.
847 template<bool HasKey
, bool Local
>
848 int64_t iter_next_mixed_pointer(Iter
* it
, TypedValue
* valOut
,
849 TypedValue
* keyOut
, ArrayData
* arr
) {
850 auto& iter
= *unwrap(it
);
851 auto const elm
= iter
.m_mixed_elm
+ 1;
852 if (elm
== iter
.m_mixed_end
) {
853 if (!Local
&& arr
->decReleaseCheck()) {
854 return iter_next_free_mixed(it
, arr
);
860 iter
.m_mixed_elm
= elm
;
861 setOutputLocal(*elm
->datatv(), valOut
);
862 if (HasKey
) setOutputLocal(elm
->getKey(), keyOut
);
868 // "virtual" method implementation of *IterNext* for ArrayPacked iterators.
869 // Since we know the array is packed, we just need to increment the position
870 // and do a bounds check. The key is the position; for the value, we index.
872 // HasKey is true for key-value iters. HasKey is true iff keyOut != nullptr.
873 // See iter.h for the meaning of a "local" iterator. At this point,
874 // we have the base, but we only dec-ref it when non-local iters hit the end.
876 // The result is false (= 0) if iteration is done, or true (= 1) otherwise.
877 template<bool HasKey
, bool Local
>
878 int64_t iter_next_packed_impl(Iter
* it
, TypedValue
* valOut
,
879 TypedValue
* keyOut
, ArrayData
* ad
) {
880 auto& iter
= *unwrap(it
);
881 assertx(PackedArray::checkInvariants(ad
));
883 ssize_t pos
= iter
.getPos() + 1;
884 if (UNLIKELY(pos
== iter
.getEnd())) {
885 if (!Local
&& ad
->decReleaseCheck()) {
886 return iter_next_free_packed(it
, ad
);
893 setOutputLocal(PackedArray::GetPosVal(ad
, pos
), valOut
);
894 if constexpr (HasKey
) setOutputLocal(make_tv
<KindOfInt64
>(pos
), keyOut
);
898 // "virtual" method implementation of *IterNext* for ArrayMixed iterators.
899 // Since we know the array is mixed, we can do "while (elm[pos].isTombstone())"
900 // inline here, and we can use MixedArray helpers to extract the key and value.
902 // HasKey is true for key-value iters. HasKey is true iff keyOut != nullptr.
903 // See iter.h for the meaning of a "local" iterator. At this point,
904 // we have the base, but we only dec-ref it when non-local iters hit the end.
906 // The result is false (= 0) if iteration is done, or true (= 1) otherwise.
907 template<bool HasKey
, bool Local
>
908 int64_t iter_next_mixed_impl(Iter
* it
, TypedValue
* valOut
,
909 TypedValue
* keyOut
, ArrayData
* arrData
) {
910 auto& iter
= *unwrap(it
);
911 ssize_t pos
= iter
.getPos();
912 auto const arr
= MixedArray::asMixed(arrData
);
915 if ((++pos
) == iter
.getEnd()) {
916 if (!Local
&& arr
->decReleaseCheck()) {
917 return iter_next_free_mixed(it
, arr
);
922 } while (UNLIKELY(arr
->isTombstone(pos
)));
925 decRefOutputLocal(valOut
);
927 if constexpr (HasKey
) {
928 decRefOutputLocal(keyOut
);
929 arr
->getArrayElm(pos
, valOut
, keyOut
);
931 arr
->getArrayElm(pos
, valOut
);
936 // "virtual" method implementation of *IterNext* for MonotypeVec iterators.
937 // See iter_next_packed_impl for docs for template args and return value.
938 template<bool HasKey
, bool Local
>
939 int64_t iter_next_monotype_vec(Iter
* it
, TypedValue
* valOut
,
940 TypedValue
* keyOut
, ArrayData
* ad
) {
941 auto& iter
= *unwrap(it
);
942 auto const mad
= bespoke::MonotypeVec::As(ad
);
944 ssize_t pos
= iter
.getPos() + 1;
945 if (UNLIKELY(pos
== iter
.getEnd())) {
946 if (!Local
&& mad
->decReleaseCheck()) {
947 return iter_next_free_monotype_vec(it
, mad
);
954 setOutputLocal(bespoke::MonotypeVec::GetPosVal(mad
, pos
), valOut
);
955 if constexpr (HasKey
) setOutputLocal(make_tv
<KindOfInt64
>(pos
), keyOut
);
961 int64_t iterNextArray(Iter
* it
, TypedValue
* valOut
) {
962 TRACE(2, "iterNextArray: I %p\n", it
);
963 return iter_next_cold(it
, valOut
, nullptr);
966 int64_t literNextArray(Iter
* it
, TypedValue
* valOut
, ArrayData
* ad
) {
967 TRACE(2, "literNextArray: I %p\n", it
);
968 return liter_next_cold(it
, ad
, valOut
, nullptr);
971 int64_t iterNextKArray(Iter
* it
, TypedValue
* valOut
, TypedValue
* keyOut
) {
972 TRACE(2, "iterNextKArray: I %p\n", it
);
973 return iter_next_cold(it
, valOut
, keyOut
);
976 int64_t literNextKArray(Iter
* it
, TypedValue
* valOut
, TypedValue
* keyOut
, ArrayData
* ad
) {
977 TRACE(2, "literNextKArray: I %p\n", it
);
978 return liter_next_cold(it
, ad
, valOut
, keyOut
);
981 int64_t iterNextObject(Iter
* it
, TypedValue
* valOut
) {
982 TRACE(2, "iterNextObject: I %p\n", it
);
983 // We can't just put the address of iter_next_cold in the table
984 // below right now because we need to get a nullptr into the third
985 // argument register for it.
986 return iter_next_cold(it
, valOut
, nullptr);
989 int64_t literNextObject(Iter
*, TypedValue
*, ArrayData
*) {
990 always_assert(false);
992 int64_t literNextKObject(Iter
*, TypedValue
*, TypedValue
*, ArrayData
*) {
993 always_assert(false);
997 * This macro takes a name (e.g. ArrayPacked) and a helper that's templated
998 * on <bool HasKey, bool Local> (e.g. iter_next_packed_impl) and produces the
999 * four helpers that we'll call from the iter_next dispatch methods below.
1001 #define VTABLE_METHODS(name, fn) \
1002 int64_t iterNext##name(Iter* it, TypedValue* valOut) { \
1003 TRACE(2, "iterNext" #name ": I %p\n", it); \
1004 auto const ad = const_cast<ArrayData*>(unwrap(it)->getArrayData()); \
1005 return fn<false, false>(it, valOut, nullptr, ad); \
1007 int64_t literNext##name(Iter* it, TypedValue* valOut, ArrayData* ad) { \
1008 TRACE(2, "literNext" #name ": I %p\n", it); \
1009 return fn<false, true>(it, valOut, nullptr, ad); \
1011 int64_t iterNextK##name( \
1012 Iter* it, TypedValue* valOut, TypedValue* keyOut) { \
1013 TRACE(2, "iterNextK" #name ": I %p\n", it); \
1014 auto const ad = const_cast<ArrayData*>(unwrap(it)->getArrayData()); \
1015 return fn<true, false>(it, valOut, keyOut, ad); \
1017 int64_t literNextK##name( \
1018 Iter* it, TypedValue* valOut, TypedValue* keyOut, ArrayData* ad) { \
1019 TRACE(2, "literNextK" #name ": I %p\n", it); \
1020 return fn<true, true>(it, valOut, keyOut, ad); \
1023 VTABLE_METHODS(ArrayPacked, iter_next_packed_impl);
1024 VTABLE_METHODS(ArrayMixed
, iter_next_mixed_impl
);
1025 VTABLE_METHODS(ArrayPackedPointer
, iter_next_packed_pointer
);
1026 VTABLE_METHODS(ArrayMixedPointer
, iter_next_mixed_pointer
);
1027 VTABLE_METHODS(MonotypeVec
, iter_next_monotype_vec
);
1029 #undef VTABLE_METHODS
1031 using IterNextHelper
= int64_t (*)(Iter
*, TypedValue
*);
1032 using IterNextKHelper
= int64_t (*)(Iter
*, TypedValue
*, TypedValue
*);
1033 using LIterNextHelper
= int64_t (*)(Iter
*, TypedValue
*, ArrayData
*);
1034 using LIterNextKHelper
= int64_t (*)(Iter
*, TypedValue
*, TypedValue
*, ArrayData
*);
1036 const IterNextHelper g_iterNextHelpers
[] = {
1037 &iterNextArrayPacked
,
1038 &iterNextArrayMixed
,
1041 &iterNextArrayPackedPointer
,
1042 &iterNextArrayMixedPointer
,
1043 &iterNextMonotypeVec
,
1046 const IterNextKHelper g_iterNextKHelpers
[] = {
1047 &iterNextKArrayPacked
,
1048 &iterNextKArrayMixed
,
1050 &iter_next_cold
, // iterNextKObject
1051 &iterNextKArrayPackedPointer
,
1052 &iterNextKArrayMixedPointer
,
1053 &iterNextKMonotypeVec
,
1056 const LIterNextHelper g_literNextHelpers
[] = {
1057 &literNextArrayPacked
,
1058 &literNextArrayMixed
,
1061 &literNextArrayPackedPointer
,
1062 &literNextArrayMixedPointer
,
1063 &literNextMonotypeVec
,
1066 const LIterNextKHelper g_literNextKHelpers
[] = {
1067 &literNextKArrayPacked
,
1068 &literNextKArrayMixed
,
1071 &literNextKArrayPackedPointer
,
1072 &literNextKArrayMixedPointer
,
1073 &literNextKMonotypeVec
,
1076 int64_t iter_next_ind(Iter
* iter
, TypedValue
* valOut
) {
1077 TRACE(2, "iter_next_ind: I %p\n", iter
);
1078 assertx(unwrap(iter
)->checkInvariants());
1079 assertx(tvIsPlausible(*valOut
));
1080 auto const index
= unwrap(iter
)->getHelperIndex();
1081 IterNextHelper helper
= g_iterNextHelpers
[static_cast<uint32_t>(index
)];
1082 return helper(iter
, valOut
);
1085 int64_t iter_next_key_ind(Iter
* iter
, TypedValue
* valOut
, TypedValue
* keyOut
) {
1086 TRACE(2, "iter_next_key_ind: I %p\n", iter
);
1087 assertx(unwrap(iter
)->checkInvariants());
1088 assertx(tvIsPlausible(*valOut
));
1089 assertx(tvIsPlausible(*keyOut
));
1090 auto const index
= unwrap(iter
)->getHelperIndex();
1091 IterNextKHelper helper
= g_iterNextKHelpers
[static_cast<uint32_t>(index
)];
1092 return helper(iter
, valOut
, keyOut
);
1095 int64_t liter_next_ind(Iter
* iter
, TypedValue
* valOut
, ArrayData
* ad
) {
1096 TRACE(2, "liter_next_ind: I %p\n", iter
);
1097 assertx(unwrap(iter
)->checkInvariants(ad
));
1098 assertx(tvIsPlausible(*valOut
));
1099 auto const index
= unwrap(iter
)->getHelperIndex();
1100 LIterNextHelper helper
= g_literNextHelpers
[static_cast<uint32_t>(index
)];
1101 return helper(iter
, valOut
, ad
);
1104 int64_t liter_next_key_ind(Iter
* iter
,
1108 TRACE(2, "liter_next_key_ind: I %p\n", iter
);
1109 assertx(unwrap(iter
)->checkInvariants(ad
));
1110 assertx(tvIsPlausible(*valOut
));
1111 assertx(tvIsPlausible(*keyOut
));
1112 auto const index
= unwrap(iter
)->getHelperIndex();
1113 LIterNextKHelper helper
= g_literNextKHelpers
[static_cast<uint32_t>(index
)];
1114 return helper(iter
, valOut
, keyOut
, ad
);
1117 ///////////////////////////////////////////////////////////////////////////////