2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
23 #include <folly/Optional.h>
24 #include <folly/gen/Base.h>
25 #include <folly/gen/String.h>
27 #include "hphp/util/hash-set.h"
28 #include "hphp/util/trace.h"
29 #include "hphp/runtime/base/array-init.h"
30 #include "hphp/runtime/base/array-iterator.h"
31 #include "hphp/runtime/base/collections.h"
32 #include "hphp/runtime/base/static-string-table.h"
33 #include "hphp/runtime/base/tv-arith.h"
34 #include "hphp/runtime/base/tv-comparisons.h"
35 #include "hphp/runtime/base/tv-conversions.h"
36 #include "hphp/runtime/base/type-structure.h"
37 #include "hphp/runtime/base/type-structure-helpers.h"
38 #include "hphp/runtime/base/type-structure-helpers-defs.h"
39 #include "hphp/runtime/vm/runtime.h"
40 #include "hphp/runtime/vm/unit-util.h"
42 #include "hphp/runtime/ext/hh/ext_hh.h"
44 #include "hphp/hhbbc/analyze.h"
45 #include "hphp/hhbbc/bc.h"
46 #include "hphp/hhbbc/cfg.h"
47 #include "hphp/hhbbc/class-util.h"
48 #include "hphp/hhbbc/eval-cell.h"
49 #include "hphp/hhbbc/index.h"
50 #include "hphp/hhbbc/interp-state.h"
51 #include "hphp/hhbbc/optimize.h"
52 #include "hphp/hhbbc/representation.h"
53 #include "hphp/hhbbc/type-builtins.h"
54 #include "hphp/hhbbc/type-ops.h"
55 #include "hphp/hhbbc/type-system.h"
56 #include "hphp/hhbbc/unit-util.h"
57 #include "hphp/hhbbc/wide-func.h"
59 #include "hphp/hhbbc/interp-internal.h"
61 namespace HPHP
{ namespace HHBBC
{
63 //////////////////////////////////////////////////////////////////////
67 const StaticString
s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
68 const StaticString
s_IMemoizeParam("HH\\IMemoizeParam");
69 const StaticString
s_getInstanceKey("getInstanceKey");
70 const StaticString
s_Closure("Closure");
71 const StaticString
s_this("HH\\this");
73 bool poppable(Op op
) {
87 case Op::NewDictArray
:
95 void interpStep(ISS
& env
, const Bytecode
& bc
);
97 void record(ISS
& env
, const Bytecode
& bc
) {
98 if (bc
.srcLoc
!= env
.srcLoc
) {
100 tmp
.srcLoc
= env
.srcLoc
;
101 return record(env
, tmp
);
104 if (!env
.replacedBcs
.size() &&
105 env
.unchangedBcs
< env
.blk
.hhbcs
.size() &&
106 bc
== env
.blk
.hhbcs
[env
.unchangedBcs
]) {
111 ITRACE(2, " => {}\n", show(env
.ctx
.func
, bc
));
112 env
.replacedBcs
.push_back(bc
);
115 // The number of pops as seen by interp.
116 uint32_t numPop(const Bytecode
& bc
) {
117 if (bc
.op
== Op::CGetL2
) return 1;
121 // The number of pushes as seen by interp.
122 uint32_t numPush(const Bytecode
& bc
) {
123 if (bc
.op
== Op::CGetL2
) return 2;
127 void reprocess(ISS
& env
) {
128 env
.reprocess
= true;
131 ArrayData
** add_elem_array(ISS
& env
) {
132 auto const idx
= env
.trackedElems
.back().idx
;
133 if (idx
< env
.unchangedBcs
) {
134 auto const DEBUG_ONLY
& bc
= env
.blk
.hhbcs
[idx
];
135 assertx(bc
.op
== Op::Concat
);
138 assertx(idx
>= env
.unchangedBcs
);
139 auto& bc
= env
.replacedBcs
[idx
- env
.unchangedBcs
];
140 auto arr
= [&] () -> const ArrayData
** {
142 case Op::Array
: return &bc
.Array
.arr1
;
143 case Op::Dict
: return &bc
.Dict
.arr1
;
144 case Op::Keyset
: return &bc
.Keyset
.arr1
;
145 case Op::Vec
: return &bc
.Vec
.arr1
;
146 case Op::Concat
: return nullptr;
147 default: not_reached();
150 return const_cast<ArrayData
**>(arr
);
153 bool start_add_elem(ISS
& env
, Type
& ty
, Op op
) {
154 auto value
= tvNonStatic(ty
);
155 if (!value
|| !isArrayLikeType(value
->m_type
)) return false;
157 if (op
== Op::AddElemC
) {
158 reduce(env
, bc::PopC
{}, bc::PopC
{}, bc::PopC
{});
160 reduce(env
, bc::PopC
{}, bc::PopC
{});
162 env
.trackedElems
.emplace_back(
163 env
.state
.stack
.size(),
164 env
.unchangedBcs
+ env
.replacedBcs
.size()
167 auto const arr
= value
->m_data
.parr
;
168 env
.replacedBcs
.push_back(
170 if (arr
->isKeysetType()) {
171 return bc::Keyset
{ arr
};
173 if (arr
->isVecType()) {
174 return bc::Vec
{ arr
};
176 if (arr
->isDictType()) {
177 return bc::Dict
{ arr
};
179 if (arr
->isPHPArrayType()) {
180 return bc::Array
{ arr
};
186 env
.replacedBcs
.back().srcLoc
= env
.srcLoc
;
187 ITRACE(2, "(addelem* -> {}\n",
188 show(env
.ctx
.func
, env
.replacedBcs
.back()));
189 push(env
, std::move(ty
));
195 * Alter the saved add_elem array in a way that preserves its provenance tag
196 * or adds a new one if applicable (i.e. the array is a vec or dict)
198 * The `mutate` parameter should be callable with an ArrayData** pointing to the
199 * add_elem array cached in the interp state and should write to it directly.
201 template <typename Fn
>
202 bool mutate_add_elem_array(ISS
& env
, ProvTag loc
, Fn
&& mutate
) {
203 auto const arr
= add_elem_array(env
);
204 if (!arr
) return false;
206 assertx(!RuntimeOption::EvalArrayProvenance
|| loc
.valid());
208 if (!RuntimeOption::EvalArrayProvenance
) {
213 // We need to propagate the provenance info in case we promote *arr from
214 // static to counted (or if its representation changes in some other way)...
215 auto const tag
= ProvTag::FromSArr(*arr
);
219 // ...which means we'll have to setTag if
220 // - the array still needs a tag AND
222 // - the array had no tag coming into this op OR
223 // - the set op cleared the provenance bit somehow
224 // (representation changed or we CoWed a static array)
225 if (arrprov::arrayWantsTag(*arr
)) {
226 if (tag
== ProvTag::NoTag
) {
227 arrprov::setTag(*arr
, loc
.get());
228 } else if (!arrprov::getTag(*arr
).valid()) {
229 arrprov::setTag(*arr
, tag
.get());
233 // Make sure that, if provenance is enabled and the array wants a tag, we
234 // definitely assigned one leaving this op.
235 assertx(!loc
.valid() ||
236 !arrprov::arrayWantsTag(*arr
) ||
237 arrprov::getTag(*arr
).valid());
241 void finish_tracked_elem(ISS
& env
) {
242 auto const arr
= add_elem_array(env
);
243 env
.trackedElems
.pop_back();
244 if (arr
) ArrayData::GetScalarArray(arr
);
247 void finish_tracked_elems(ISS
& env
, size_t depth
) {
248 while (!env
.trackedElems
.empty() && env
.trackedElems
.back().depth
>= depth
) {
249 finish_tracked_elem(env
);
253 uint32_t id_from_slot(ISS
& env
, int slot
) {
254 auto const id
= (env
.state
.stack
.end() - (slot
+ 1))->id
;
255 assertx(id
== StackElem::NoId
||
256 id
< env
.unchangedBcs
+ env
.replacedBcs
.size());
260 const Bytecode
* op_from_id(ISS
& env
, uint32_t id
) {
261 if (id
== StackElem::NoId
) return nullptr;
262 if (id
< env
.unchangedBcs
) return &env
.blk
.hhbcs
[id
];
263 auto const off
= id
- env
.unchangedBcs
;
264 assertx(off
< env
.replacedBcs
.size());
265 return &env
.replacedBcs
[off
];
268 void ensure_mutable(ISS
& env
, uint32_t id
) {
269 if (id
< env
.unchangedBcs
) {
270 auto const delta
= env
.unchangedBcs
- id
;
271 env
.replacedBcs
.resize(env
.replacedBcs
.size() + delta
);
272 for (auto i
= env
.replacedBcs
.size(); i
-- > delta
; ) {
273 env
.replacedBcs
[i
] = std::move(env
.replacedBcs
[i
- delta
]);
275 for (auto i
= 0; i
< delta
; i
++) {
276 env
.replacedBcs
[i
] = env
.blk
.hhbcs
[id
+ i
];
278 env
.unchangedBcs
= id
;
283 * Turn the instruction that wrote the slot'th element from the top of
284 * the stack into a Nop, adjusting the stack appropriately. If its the
285 * previous instruction, just rewind.
287 int kill_by_slot(ISS
& env
, int slot
) {
288 auto const id
= id_from_slot(env
, slot
);
289 assertx(id
!= StackElem::NoId
);
290 auto const sz
= env
.state
.stack
.size();
291 // if its the last bytecode we processed, we can rewind and avoid
292 // the reprocess overhead.
293 if (id
== env
.unchangedBcs
+ env
.replacedBcs
.size() - 1) {
295 return env
.state
.stack
.size() - sz
;
297 ensure_mutable(env
, id
);
298 auto& bc
= env
.replacedBcs
[id
- env
.unchangedBcs
];
299 auto const pop
= numPop(bc
);
300 auto const push
= numPush(bc
);
301 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
302 slot
, id
, show(env
.ctx
.func
, bc
));
303 bc
= bc_with_loc(bc
.srcLoc
, bc::Nop
{});
304 env
.state
.stack
.kill(pop
, push
, id
);
306 return env
.state
.stack
.size() - sz
;
310 * Check whether an instruction can be inserted immediately after the
311 * slot'th stack entry was written. This is only possible if slot was
312 * the last thing written by the instruction that wrote it (ie some
313 * bytecodes push more than one value - there's no way to insert a
314 * bytecode that will write *between* those values on the stack).
316 bool can_insert_after_slot(ISS
& env
, int slot
) {
317 auto const it
= env
.state
.stack
.end() - (slot
+ 1);
318 if (it
->id
== StackElem::NoId
) return false;
319 if (auto const next
= it
.next_elem(1)) {
320 return next
->id
!= it
->id
;
326 * Insert a sequence of bytecodes after the instruction that wrote the
327 * slot'th element from the top of the stack.
329 * The entire sequence pops numPop, and pushes numPush stack
330 * elements. Only the last bytecode can push anything onto the stack,
331 * and the types it pushes are pointed to by types (if you have more
332 * than one bytecode that pushes, call this more than once).
334 void insert_after_slot(ISS
& env
, int slot
,
335 int numPop
, int numPush
, const Type
* types
,
336 const BytecodeVec
& bcs
) {
337 assertx(can_insert_after_slot(env
, slot
));
338 auto const id
= id_from_slot(env
, slot
);
339 assertx(id
!= StackElem::NoId
);
340 ensure_mutable(env
, id
+ 1);
341 env
.state
.stack
.insert_after(numPop
, numPush
, types
, bcs
.size(), id
);
342 env
.replacedBcs
.insert(env
.replacedBcs
.begin() + (id
+ 1 - env
.unchangedBcs
),
343 bcs
.begin(), bcs
.end());
344 using namespace folly::gen
;
345 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
348 map([&] (const Bytecode
& bc
) { return show(env
.ctx
.func
, bc
); }) |
349 unsplit
<std::string
>(", "));
352 Bytecode
& mutate_last_op(ISS
& env
) {
353 assertx(will_reduce(env
));
355 if (!env
.replacedBcs
.size()) {
356 assertx(env
.unchangedBcs
);
357 env
.replacedBcs
.push_back(env
.blk
.hhbcs
[--env
.unchangedBcs
]);
359 return env
.replacedBcs
.back();
363 * Can be used to replace one op with another when rewind/reduce isn't
364 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
365 * because the SetL changed both the Type and the equiv of its local).
367 void replace_last_op(ISS
& env
, Bytecode
&& bc
) {
368 auto& last
= mutate_last_op(env
);
369 auto const newPush
= numPush(bc
);
370 auto const oldPush
= numPush(last
);
371 auto const newPops
= numPop(bc
);
372 auto const oldPops
= numPop(last
);
374 assertx(newPush
<= oldPush
);
375 assertx(newPops
<= oldPops
);
377 if (newPush
!= oldPush
|| newPops
!= oldPops
) {
378 env
.state
.stack
.rewind(oldPops
- newPops
, oldPush
- newPush
);
380 ITRACE(2, "(replace: {}->{}\n",
381 show(env
.ctx
.func
, last
), show(env
.ctx
.func
, bc
));
382 last
= bc_with_loc(last
.srcLoc
, bc
);
387 //////////////////////////////////////////////////////////////////////
389 const Bytecode
* op_from_slot(ISS
& env
, int slot
, int prev
/* = 0 */) {
390 if (!will_reduce(env
)) return nullptr;
391 auto const id
= id_from_slot(env
, slot
);
392 if (id
== StackElem::NoId
) return nullptr;
393 if (id
< prev
) return nullptr;
394 return op_from_id(env
, id
- prev
);
397 const Bytecode
* last_op(ISS
& env
, int idx
/* = 0 */) {
398 if (!will_reduce(env
)) return nullptr;
400 if (env
.replacedBcs
.size() > idx
) {
401 return &env
.replacedBcs
[env
.replacedBcs
.size() - idx
- 1];
404 idx
-= env
.replacedBcs
.size();
405 if (env
.unchangedBcs
> idx
) {
406 return &env
.blk
.hhbcs
[env
.unchangedBcs
- idx
- 1];
412 * Assuming bc was just interped, rewind to the state immediately
413 * before it was interped.
415 * This is rarely what you want. Its used for constprop, where the
416 * bytecode has been interped, but not yet committed to the bytecode
417 * stream. We want to undo its effects, the spit out pops for its
418 * inputs, and commit a constant-generating bytecode.
420 void rewind(ISS
& env
, const Bytecode
& bc
) {
421 ITRACE(2, "(rewind: {}\n", show(env
.ctx
.func
, bc
));
422 env
.state
.stack
.rewind(numPop(bc
), numPush(bc
));
426 * Used for peephole opts. Will undo the *stack* effects of the last n
427 * committed byte codes, and remove them from the bytecode stream, in
428 * preparation for writing out an optimized replacement sequence.
430 * WARNING: Does not undo other changes to state, such as local types,
431 * local equivalency, and thisType. Take care when rewinding such
434 void rewind(ISS
& env
, int n
) {
436 while (env
.replacedBcs
.size()) {
437 rewind(env
, env
.replacedBcs
.back());
438 env
.replacedBcs
.pop_back();
442 rewind(env
, env
.blk
.hhbcs
[--env
.unchangedBcs
]);
446 void impl_vec(ISS
& env
, bool reduce
, BytecodeVec
&& bcs
) {
447 if (!will_reduce(env
)) reduce
= false;
450 using namespace folly::gen
;
451 ITRACE(2, "(reduce: {}\n",
453 map([&] (const Bytecode
& bc
) { return show(env
.ctx
.func
, bc
); }) |
454 unsplit
<std::string
>(", "));
456 auto ef
= !env
.flags
.reduced
|| env
.flags
.effectFree
;
458 for (auto const& bc
: bcs
) {
460 env
.flags
.jmpDest
== NoBlockId
&&
461 "you can't use impl with branching opcodes before last position"
464 if (!env
.flags
.effectFree
) ef
= false;
465 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
467 env
.flags
.effectFree
= ef
;
468 } else if (!env
.flags
.reduced
) {
471 env
.flags
.reduced
= true;
476 SCOPE_EXIT
{ env
.analyzeDepth
--; };
478 // We should be at the start of a bytecode.
479 assertx(env
.flags
.wasPEI
&&
480 !env
.flags
.canConstProp
&&
481 !env
.flags
.effectFree
);
483 env
.flags
.wasPEI
= false;
484 env
.flags
.canConstProp
= true;
485 env
.flags
.effectFree
= true;
487 for (auto const& bc
: bcs
) {
488 assert(env
.flags
.jmpDest
== NoBlockId
&&
489 "you can't use impl with branching opcodes before last position");
491 auto const wasPEI
= env
.flags
.wasPEI
;
492 auto const canConstProp
= env
.flags
.canConstProp
;
493 auto const effectFree
= env
.flags
.effectFree
;
495 ITRACE(3, " (impl {}\n", show(env
.ctx
.func
, bc
));
496 env
.flags
.wasPEI
= true;
497 env
.flags
.canConstProp
= false;
498 env
.flags
.effectFree
= false;
499 default_dispatch(env
, bc
);
501 if (env
.flags
.canConstProp
) {
503 if (env
.flags
.effectFree
&& !env
.flags
.wasPEI
) return;
504 auto stk
= env
.state
.stack
.end();
505 for (auto i
= bc
.numPush(); i
--; ) {
507 if (!is_scalar(stk
->type
)) return;
509 env
.flags
.effectFree
= true;
510 env
.flags
.wasPEI
= false;
514 // If any of the opcodes in the impl list said they could throw,
515 // then the whole thing could throw.
516 env
.flags
.wasPEI
= env
.flags
.wasPEI
|| wasPEI
;
517 env
.flags
.canConstProp
= env
.flags
.canConstProp
&& canConstProp
;
518 env
.flags
.effectFree
= env
.flags
.effectFree
&& effectFree
;
519 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
523 LocalId
equivLocalRange(ISS
& env
, const LocalRange
& range
) {
524 auto bestRange
= range
.first
;
525 auto equivFirst
= findLocEquiv(env
, range
.first
);
526 if (equivFirst
== NoLocalId
) return bestRange
;
528 if (equivFirst
< bestRange
) {
529 auto equivRange
= [&] {
530 // local equivalency includes differing by Uninit, so we need
531 // to check the types.
532 if (peekLocRaw(env
, equivFirst
) != peekLocRaw(env
, range
.first
)) {
536 for (uint32_t i
= 1; i
< range
.count
; ++i
) {
537 if (!locsAreEquiv(env
, equivFirst
+ i
, range
.first
+ i
) ||
538 peekLocRaw(env
, equivFirst
+ i
) !=
539 peekLocRaw(env
, range
.first
+ i
)) {
548 bestRange
= equivFirst
;
551 equivFirst
= findLocEquiv(env
, equivFirst
);
552 assert(equivFirst
!= NoLocalId
);
553 } while (equivFirst
!= range
.first
);
558 SString
getNameFromType(const Type
& t
) {
559 if (!t
.subtypeOf(BStr
)) return nullptr;
560 if (is_specialized_string(t
)) return sval_of(t
);
564 //////////////////////////////////////////////////////////////////////
569 resolveTSStaticallyImpl(ISS
& env
, hphp_fast_set
<SArray
>& seenTs
, SArray ts
,
570 const php::Class
* declaringCls
);
573 resolveTSListStatically(ISS
& env
, hphp_fast_set
<SArray
>& seenTs
,
574 SArray tsList
, const php::Class
* declaringCls
) {
575 auto arr
= Array::attach(const_cast<ArrayData
*>(tsList
));
576 for (auto i
= 0; i
< arr
.size(); i
++) {
577 auto elemArr
= arr
[i
].getArrayData();
578 auto elem
= resolveTSStaticallyImpl(env
, seenTs
, elemArr
, declaringCls
);
579 if (!elem
) return nullptr;
580 arr
.set(i
, Variant(elem
));
586 resolveTSStaticallyImpl(ISS
& env
, hphp_fast_set
<SArray
>& seenTs
, SArray ts
,
587 const php::Class
* declaringCls
) {
588 if (seenTs
.contains(ts
)) return nullptr;
590 SCOPE_EXIT
{ seenTs
.erase(ts
); };
592 auto const addModifiers
= [&](ArrayData
* result
) {
593 auto a
= Array::attach(result
);
594 if (is_ts_like(ts
) && !is_ts_like(a
.get())) {
595 a
.set(s_like
, make_tv
<KindOfBoolean
>(true));
597 if (is_ts_nullable(ts
) && !is_ts_nullable(a
.get())) {
598 a
.set(s_nullable
, make_tv
<KindOfBoolean
>(true));
600 if (is_ts_soft(ts
) && !is_ts_soft(a
.get())) {
601 a
.set(s_soft
, make_tv
<KindOfBoolean
>(true));
605 auto const finish
= [&](const ArrayData
* result
) {
606 auto r
= const_cast<ArrayData
*>(result
);
607 ArrayData::GetScalarArray(&r
);
610 switch (get_ts_kind(ts
)) {
611 case TypeStructure::Kind::T_int
:
612 case TypeStructure::Kind::T_bool
:
613 case TypeStructure::Kind::T_float
:
614 case TypeStructure::Kind::T_string
:
615 case TypeStructure::Kind::T_num
:
616 case TypeStructure::Kind::T_arraykey
:
617 case TypeStructure::Kind::T_void
:
618 case TypeStructure::Kind::T_null
:
619 case TypeStructure::Kind::T_nothing
:
620 case TypeStructure::Kind::T_noreturn
:
621 case TypeStructure::Kind::T_mixed
:
622 case TypeStructure::Kind::T_dynamic
:
623 case TypeStructure::Kind::T_nonnull
:
624 case TypeStructure::Kind::T_resource
:
626 case TypeStructure::Kind::T_typevar
:
627 if (ts
->exists(s_name
.get()) &&
628 get_ts_name(ts
)->equal(s_wildcard
.get())) {
632 case TypeStructure::Kind::T_dict
:
633 case TypeStructure::Kind::T_vec
:
634 case TypeStructure::Kind::T_keyset
:
635 case TypeStructure::Kind::T_vec_or_dict
:
636 case TypeStructure::Kind::T_arraylike
: {
637 if (!ts
->exists(s_generic_types
)) return finish(ts
);
638 auto const generics
= get_ts_generic_types(ts
);
640 resolveTSListStatically(env
, seenTs
, generics
, declaringCls
);
641 if (!rgenerics
) return nullptr;
642 auto result
= const_cast<ArrayData
*>(ts
);
643 return finish(result
->set(s_generic_types
.get(), Variant(rgenerics
)));
645 case TypeStructure::Kind::T_class
:
646 case TypeStructure::Kind::T_interface
:
647 case TypeStructure::Kind::T_xhp
:
648 case TypeStructure::Kind::T_enum
:
649 // Generics for these must have been resolved already as we'd never set
650 // the TS Kind to be one of these until resolution
652 case TypeStructure::Kind::T_tuple
: {
653 auto const elems
= get_ts_elem_types(ts
);
654 auto relems
= resolveTSListStatically(env
, seenTs
, elems
, declaringCls
);
655 if (!relems
) return nullptr;
656 auto result
= const_cast<ArrayData
*>(ts
);
657 return finish(result
->set(s_elem_types
.get(), Variant(relems
)));
659 case TypeStructure::Kind::T_shape
:
660 // TODO(T31677864): We can also optimize this but shapes could have
661 // optional fields or they could allow unknown fields, so this one is
662 // slightly more tricky
664 case TypeStructure::Kind::T_unresolved
: {
665 assertx(ts
->exists(s_classname
));
666 auto result
= const_cast<ArrayData
*>(ts
);
667 if (ts
->exists(s_generic_types
)) {
668 auto const generics
= get_ts_generic_types(ts
);
670 resolveTSListStatically(env
, seenTs
, generics
, declaringCls
);
671 if (!rgenerics
) return nullptr;
672 result
= result
->set(s_generic_types
.get(), Variant(rgenerics
));
674 auto const rcls
= env
.index
.resolve_class(env
.ctx
, get_ts_classname(ts
));
675 if (!rcls
|| !rcls
->resolved()) return nullptr;
676 auto const attrs
= rcls
->cls()->attrs
;
677 auto const kind
= [&] {
678 if (attrs
& AttrEnum
) return TypeStructure::Kind::T_enum
;
679 if (attrs
& AttrTrait
) return TypeStructure::Kind::T_trait
;
680 if (attrs
& AttrInterface
) return TypeStructure::Kind::T_interface
;
681 return TypeStructure::Kind::T_class
;
683 return finish(result
->set(s_kind
.get(),
684 Variant(static_cast<uint8_t>(kind
))));
686 case TypeStructure::Kind::T_typeaccess
: {
687 auto const accList
= get_ts_access_list(ts
);
688 auto const size
= accList
->size();
689 auto clsName
= get_ts_root_name(ts
);
690 auto checkNoOverrideOnFirst
= false;
692 if (clsName
->isame(s_self
.get())) {
693 clsName
= declaringCls
->name
;
694 } else if (clsName
->isame(s_parent
.get()) && declaringCls
->parentName
) {
695 clsName
= declaringCls
->parentName
;
696 } else if (clsName
->isame(s_this
.get())) {
697 clsName
= declaringCls
->name
;
698 checkNoOverrideOnFirst
= true;
701 ArrayData
* typeCnsVal
= nullptr;
702 for (auto i
= 0; i
< size
; i
++) {
703 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsName
);
704 if (!rcls
|| !rcls
->resolved()) return nullptr;
705 auto const cnsName
= accList
->at(i
);
706 if (!tvIsString(&cnsName
)) return nullptr;
707 auto const cnst
= env
.index
.lookup_class_const_ptr(env
.ctx
, *rcls
,
710 if (!cnst
|| !cnst
->val
|| !cnst
->isTypeconst
||
711 !tvIsHAMSafeDArray(&*cnst
->val
)) {
714 if (checkNoOverrideOnFirst
&& i
== 0 && !cnst
->isNoOverride
) {
717 typeCnsVal
= resolveTSStaticallyImpl(env
, seenTs
,
718 cnst
->val
->m_data
.parr
, cnst
->cls
);
719 if (!typeCnsVal
) return nullptr;
720 if (i
== size
- 1) break;
721 auto const kind
= get_ts_kind(typeCnsVal
);
722 if (kind
!= TypeStructure::Kind::T_class
&&
723 kind
!= TypeStructure::Kind::T_interface
) {
726 clsName
= get_ts_classname(typeCnsVal
);
728 if (!typeCnsVal
) return nullptr;
729 return finish(addModifiers(typeCnsVal
));
731 case TypeStructure::Kind::T_fun
: {
732 auto rreturn
= resolveTSStaticallyImpl(env
, seenTs
,
733 get_ts_return_type(ts
),
735 if (!rreturn
) return nullptr;
736 auto rparams
= resolveTSListStatically(env
, seenTs
,
737 get_ts_param_types(ts
),
739 if (!rparams
) return nullptr;
740 auto result
= const_cast<ArrayData
*>(ts
)
741 ->set(s_return_type
.get(), Variant(rreturn
))
742 ->set(s_param_types
.get(), Variant(rparams
));
743 auto const variadic
= get_ts_variadic_type_opt(ts
);
746 resolveTSStaticallyImpl(env
, seenTs
, variadic
, declaringCls
);
747 if (!rvariadic
) return nullptr;
748 result
= result
->set(s_variadic_type
.get(), Variant(rvariadic
));
750 return finish(result
);
752 case TypeStructure::Kind::T_array
:
753 case TypeStructure::Kind::T_darray
:
754 case TypeStructure::Kind::T_varray
:
755 case TypeStructure::Kind::T_varray_or_darray
:
756 case TypeStructure::Kind::T_reifiedtype
:
757 case TypeStructure::Kind::T_trait
:
766 resolveTSStatically(ISS
& env
, SArray ts
, const php::Class
* declaringCls
) {
767 hphp_fast_set
<SArray
> seenTs
;
768 return resolveTSStaticallyImpl(env
, seenTs
, ts
, declaringCls
);
771 //////////////////////////////////////////////////////////////////////
773 namespace interp_step
{
775 void in(ISS
& env
, const bc::Nop
&) { reduce(env
); }
777 void in(ISS
& env
, const bc::PopC
&) {
778 if (auto const last
= last_op(env
)) {
779 if (poppable(last
->op
)) {
783 if (last
->op
== Op::This
) {
784 // can't rewind This because it removed null from thisType (so
785 // CheckThis at this point is a no-op) - and note that it must
786 // have *been* nullable, or we'd have turned it into a
787 // `BareThis NeverNull`
788 replace_last_op(env
, bc::CheckThis
{});
791 if (last
->op
== Op::SetL
) {
792 // can't rewind a SetL because it changes local state
793 replace_last_op(env
, bc::PopL
{ last
->SetL
.loc1
});
796 if (last
->op
== Op::CGetL2
) {
797 auto loc
= last
->CGetL2
.nloc1
;
799 return reduce(env
, bc::PopC
{}, bc::CGetL
{ loc
});
807 void in(ISS
& env
, const bc::PopU
&) {
808 if (auto const last
= last_op(env
)) {
809 if (last
->op
== Op::NullUninit
) {
814 effect_free(env
); popU(env
);
817 void in(ISS
& env
, const bc::PopU2
&) {
819 auto equiv
= topStkEquiv(env
);
820 auto val
= popC(env
);
822 push(env
, std::move(val
), equiv
!= StackDupId
? equiv
: NoLocalId
);
825 void in(ISS
& env
, const bc::PopFrame
& op
) {
828 std::vector
<std::pair
<Type
, LocalId
>> vals
{op
.arg1
};
829 for (auto i
= op
.arg1
; i
> 0; --i
) {
830 vals
[i
- 1] = {popC(env
), topStkEquiv(env
)};
832 for (uint32_t i
= 0; i
< 3; i
++) popU(env
);
833 for (auto& p
: vals
) {
835 env
, std::move(p
.first
), p
.second
!= StackDupId
? p
.second
: NoLocalId
);
839 void in(ISS
& env
, const bc::EntryNop
&) { effect_free(env
); }
841 void in(ISS
& env
, const bc::Dup
& /*op*/) {
843 auto equiv
= topStkEquiv(env
);
844 auto val
= popC(env
);
845 push(env
, val
, equiv
);
846 push(env
, std::move(val
), StackDupId
);
849 void in(ISS
& env
, const bc::AssertRATL
& op
) {
850 mayReadLocal(env
, op
.loc1
);
854 void in(ISS
& env
, const bc::AssertRATStk
&) {
858 void in(ISS
& env
, const bc::BreakTraceHint
&) { effect_free(env
); }
860 void in(ISS
& env
, const bc::CGetCUNop
&) {
862 auto const t
= popCU(env
);
863 push(env
, remove_uninit(t
));
866 void in(ISS
& env
, const bc::UGetCUNop
&) {
872 void in(ISS
& env
, const bc::Null
&) {
874 push(env
, TInitNull
);
877 void in(ISS
& env
, const bc::NullUninit
&) {
882 void in(ISS
& env
, const bc::True
&) {
887 void in(ISS
& env
, const bc::False
&) {
892 void in(ISS
& env
, const bc::Int
& op
) {
894 push(env
, ival(op
.arg1
));
897 void in(ISS
& env
, const bc::Double
& op
) {
899 push(env
, dval(op
.dbl1
));
902 void in(ISS
& env
, const bc::String
& op
) {
904 push(env
, sval(op
.str1
));
907 void in(ISS
& env
, const bc::Array
& op
) {
908 assert(op
.arr1
->isPHPArrayType());
909 assertx(!RuntimeOption::EvalHackArrDVArrs
|| op
.arr1
->isNotDVArray());
911 push(env
, aval(op
.arr1
));
914 void in(ISS
& env
, const bc::Vec
& op
) {
915 assert(op
.arr1
->isVecType());
917 push(env
, vec_val(op
.arr1
));
920 void in(ISS
& env
, const bc::Dict
& op
) {
921 assert(op
.arr1
->isDictType());
923 push(env
, dict_val(op
.arr1
));
926 void in(ISS
& env
, const bc::Keyset
& op
) {
927 assert(op
.arr1
->isKeysetType());
929 push(env
, keyset_val(op
.arr1
));
932 void in(ISS
& env
, const bc::NewDictArray
& op
) {
934 push(env
, op
.arg1
== 0 ? dict_empty() : some_dict_empty());
937 void in(ISS
& env
, const bc::NewVArray
& op
) {
938 assertx(!RuntimeOption::EvalHackArrDVArrs
);
939 auto elems
= std::vector
<Type
>{};
940 elems
.reserve(op
.arg1
);
941 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
942 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
944 discard(env
, op
.arg1
);
945 push(env
, arr_packed_varray(std::move(elems
), provTagHere(env
)));
950 void in(ISS
& env
, const bc::NewDArray
& op
) {
951 assertx(!RuntimeOption::EvalHackArrDVArrs
);
953 auto const tag
= provTagHere(env
);
954 push(env
, op
.arg1
== 0 ? aempty_darray(tag
) : some_aempty_darray(tag
));
957 void in(ISS
& env
, const bc::NewRecord
& op
) {
958 discard(env
, op
.keys
.size());
959 auto const rrec
= env
.index
.resolve_record(op
.str1
);
960 push(env
, rrec
? exactRecord(*rrec
) : TRecord
);
963 void in(ISS
& env
, const bc::NewStructDArray
& op
) {
964 assertx(!RuntimeOption::EvalHackArrDVArrs
);
965 auto map
= MapElems
{};
966 for (auto it
= op
.keys
.end(); it
!= op
.keys
.begin(); ) {
967 map
.emplace_front(make_tv
<KindOfPersistentString
>(*--it
), popC(env
));
969 push(env
, arr_map_darray(std::move(map
), provTagHere(env
)));
974 void in(ISS
& env
, const bc::NewStructDict
& op
) {
975 auto map
= MapElems
{};
976 for (auto it
= op
.keys
.end(); it
!= op
.keys
.begin(); ) {
977 map
.emplace_front(make_tv
<KindOfPersistentString
>(*--it
), popC(env
));
979 push(env
, dict_map(std::move(map
)));
984 void in(ISS
& env
, const bc::NewVec
& op
) {
985 auto elems
= std::vector
<Type
>{};
986 elems
.reserve(op
.arg1
);
987 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
988 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
990 discard(env
, op
.arg1
);
993 push(env
, vec(std::move(elems
)));
996 void in(ISS
& env
, const bc::NewKeysetArray
& op
) {
998 auto map
= MapElems
{};
1002 auto mayThrow
= false;
1003 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
1004 auto k
= disect_strict_key(popC(env
));
1005 mayThrow
|= k
.mayThrow
;
1006 if (k
.type
== TBottom
) {
1011 if (auto const v
= k
.tv()) {
1012 map
.emplace_front(*v
, k
.type
);
1017 ty
|= std::move(k
.type
);
1019 if (!mayThrow
) effect_free(env
);
1021 push(env
, keyset_map(std::move(map
)));
1022 if (!mayThrow
) constprop(env
);
1024 push(env
, keyset_n(ty
));
1031 void in(ISS
& env
, const bc::AddElemC
& /*op*/) {
1032 auto const v
= topC(env
, 0);
1033 auto const k
= topC(env
, 1);
1035 auto inTy
= (env
.state
.stack
.end() - 3).unspecialize();
1037 auto const tag
= provTagHere(env
);
1039 auto outTy
= [&] (Type ty
) ->
1040 folly::Optional
<std::pair
<Type
,ThrowMode
>> {
1041 if (ty
.subtypeOf(BArr
)) {
1042 return array_set(std::move(ty
), k
, v
, tag
);
1044 if (ty
.subtypeOf(BDict
)) {
1045 return dict_set(std::move(ty
), k
, v
);
1050 if (outTy
&& outTy
->second
== ThrowMode::None
&& will_reduce(env
)) {
1051 if (!env
.trackedElems
.empty() &&
1052 env
.trackedElems
.back().depth
+ 3 == env
.state
.stack
.size()) {
1053 auto const handled
= [&] {
1054 if (!k
.subtypeOf(BArrKey
)) return false;
1056 if (!ktv
) return false;
1058 if (!vtv
) return false;
1059 return mutate_add_elem_array(env
, tag
, [&](ArrayData
** arr
) {
1060 *arr
= (*arr
)->set(*ktv
, *vtv
);
1064 (env
.state
.stack
.end() - 3)->type
= std::move(outTy
->first
);
1065 reduce(env
, bc::PopC
{}, bc::PopC
{});
1066 ITRACE(2, "(addelem* -> {}\n",
1068 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
1072 if (start_add_elem(env
, outTy
->first
, Op::AddElemC
)) {
1079 finish_tracked_elems(env
, env
.state
.stack
.size());
1082 return push(env
, union_of(TArr
, TDict
));
1085 if (outTy
->first
.subtypeOf(BBottom
)) {
1087 } else if (outTy
->second
== ThrowMode::None
) {
1091 push(env
, std::move(outTy
->first
));
1094 void in(ISS
& env
, const bc::AddNewElemC
&) {
1096 auto inTy
= (env
.state
.stack
.end() - 2).unspecialize();
1098 auto const tag
= provTagHere(env
);
1100 auto outTy
= [&] (Type ty
) -> folly::Optional
<Type
> {
1101 if (ty
.subtypeOf(BArr
)) {
1102 return array_newelem(std::move(ty
), std::move(v
), tag
).first
;
1104 if (ty
.subtypeOf(BVec
)) {
1105 return vec_newelem(std::move(ty
), std::move(v
)).first
;
1107 if (ty
.subtypeOf(BKeyset
)) {
1108 return keyset_newelem(std::move(ty
), std::move(v
)).first
;
1113 if (outTy
&& will_reduce(env
)) {
1114 if (!env
.trackedElems
.empty() &&
1115 env
.trackedElems
.back().depth
+ 2 == env
.state
.stack
.size()) {
1116 auto const handled
= [&] {
1118 if (!vtv
) return false;
1119 return mutate_add_elem_array(env
, tag
, [&](ArrayData
** arr
) {
1120 *arr
= (*arr
)->append(*vtv
);
1124 (env
.state
.stack
.end() - 2)->type
= std::move(*outTy
);
1125 reduce(env
, bc::PopC
{});
1126 ITRACE(2, "(addelem* -> {}\n",
1128 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
1132 if (start_add_elem(env
, *outTy
, Op::AddNewElemC
)) {
1139 finish_tracked_elems(env
, env
.state
.stack
.size());
1142 return push(env
, TInitCell
);
1145 if (outTy
->subtypeOf(BBottom
)) {
1150 push(env
, std::move(*outTy
));
1153 void in(ISS
& env
, const bc::NewCol
& op
) {
1154 auto const type
= static_cast<CollectionType
>(op
.subop1
);
1155 auto const name
= collections::typeToString(type
);
1156 push(env
, objExact(env
.index
.builtin_class(name
)));
1160 void in(ISS
& env
, const bc::NewPair
& /*op*/) {
1161 popC(env
); popC(env
);
1162 auto const name
= collections::typeToString(CollectionType::Pair
);
1163 push(env
, objExact(env
.index
.builtin_class(name
)));
1167 void in(ISS
& env
, const bc::ColFromArray
& op
) {
1168 auto const src
= popC(env
);
1169 auto const type
= static_cast<CollectionType
>(op
.subop1
);
1170 assertx(type
!= CollectionType::Pair
);
1171 if (type
== CollectionType::Vector
|| type
== CollectionType::ImmVector
) {
1172 if (src
.subtypeOf(TVec
)) effect_free(env
);
1174 assertx(type
== CollectionType::Map
||
1175 type
== CollectionType::ImmMap
||
1176 type
== CollectionType::Set
||
1177 type
== CollectionType::ImmSet
);
1178 if (src
.subtypeOf(TDict
)) effect_free(env
);
1180 auto const name
= collections::typeToString(type
);
1181 push(env
, objExact(env
.index
.builtin_class(name
)));
1184 void in(ISS
& env
, const bc::CnsE
& op
) {
1185 auto t
= env
.index
.lookup_constant(env
.ctx
, op
.str1
);
1186 if (t
.strictSubtypeOf(TInitCell
)) {
1187 // constprop will take care of nothrow *if* its a constant; and if
1188 // its not, we might trigger autoload.
1191 push(env
, std::move(t
));
1194 void in(ISS
& env
, const bc::ClsCns
& op
) {
1195 auto const& t1
= topC(env
);
1196 if (is_specialized_cls(t1
)) {
1197 auto const dcls
= dcls_of(t1
);
1198 auto const finish
= [&] {
1199 reduce(env
, bc::PopC
{ },
1200 bc::ClsCnsD
{ op
.str1
, dcls
.cls
.name() });
1202 if (dcls
.type
== DCls::Exact
) return finish();
1203 auto const cnst
= env
.index
.lookup_class_const_ptr(env
.ctx
, dcls
.cls
,
1205 if (cnst
&& cnst
->isNoOverride
) return finish();
1208 push(env
, TInitCell
);
1211 void in(ISS
& env
, const bc::ClsCnsD
& op
) {
1212 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str2
)) {
1213 auto t
= env
.index
.lookup_class_constant(env
.ctx
, *rcls
, op
.str1
, false);
1215 push(env
, std::move(t
));
1218 push(env
, TInitCell
);
1221 void in(ISS
& env
, const bc::File
&) { effect_free(env
); push(env
, TSStr
); }
1222 void in(ISS
& env
, const bc::Dir
&) { effect_free(env
); push(env
, TSStr
); }
1223 void in(ISS
& env
, const bc::Method
&) { effect_free(env
); push(env
, TSStr
); }
1225 void in(ISS
& env
, const bc::FuncCred
&) { effect_free(env
); push(env
, TObj
); }
1227 void in(ISS
& env
, const bc::ClassName
& op
) {
1228 auto const ty
= topC(env
);
1229 if (is_specialized_cls(ty
)) {
1230 auto const dcls
= dcls_of(ty
);
1231 if (dcls
.type
== DCls::Exact
) {
1234 bc::String
{ dcls
.cls
.name() });
1237 if (ty
.subtypeOf(TCls
)) nothrow(env
);
1242 void concatHelper(ISS
& env
, uint32_t n
) {
1243 auto changed
= false;
1244 auto side_effects
= false;
1245 if (will_reduce(env
)) {
1246 auto litstr
= [&] (SString next
, uint32_t i
) -> SString
{
1247 auto const t
= topC(env
, i
);
1248 auto const v
= tv(t
);
1249 if (!v
) return nullptr;
1250 if (!isStringType(v
->m_type
) &&
1251 v
->m_type
!= KindOfNull
&&
1252 v
->m_type
!= KindOfBoolean
&&
1253 v
->m_type
!= KindOfInt64
&&
1254 v
->m_type
!= KindOfDouble
) {
1257 auto const cell
= eval_cell_value(
1259 auto const s
= makeStaticString(
1261 StringData::Make(tvAsCVarRef(&*v
).toString().get(), next
) :
1262 tvAsCVarRef(&*v
).toString().get());
1263 return make_tv
<KindOfString
>(s
);
1266 if (!cell
) return nullptr;
1267 return cell
->m_data
.pstr
;
1270 auto fold
= [&] (uint32_t slot
, uint32_t num
, SString result
) {
1271 auto const cell
= make_tv
<KindOfPersistentString
>(result
);
1272 auto const ty
= from_cell(cell
);
1273 BytecodeVec bcs
{num
, bc::PopC
{}};
1274 if (num
> 1) bcs
.push_back(gen_constant(cell
));
1276 reduce(env
, std::move(bcs
));
1278 insert_after_slot(env
, slot
, num
, num
> 1 ? 1 : 0, &ty
, bcs
);
1285 for (auto i
= 0; i
< n
; i
++) {
1286 if (topC(env
, i
).couldBe(BObj
| BArrLike
| BRes
)) {
1287 side_effects
= true;
1292 if (!side_effects
) {
1293 for (auto i
= 0; i
< n
; i
++) {
1294 auto const tracked
= !env
.trackedElems
.empty() &&
1295 env
.trackedElems
.back().depth
+ i
+ 1 == env
.state
.stack
.size();
1296 if (tracked
) finish_tracked_elems(env
, env
.trackedElems
.back().depth
);
1297 auto const prev
= op_from_slot(env
, i
);
1298 if (!prev
) continue;
1299 if ((prev
->op
== Op::Concat
&& tracked
) || prev
->op
== Op::ConcatN
) {
1300 auto const extra
= kill_by_slot(env
, i
);
1308 SString result
= nullptr;
1312 // In order to collapse literals, we need to be able to insert
1313 // pops, and a constant after the sequence that generated the
1314 // literals. We can always insert after the last instruction
1315 // though, and we only need to check the first slot of a
1317 auto const next
= !i
|| result
|| can_insert_after_slot(env
, i
) ?
1318 litstr(result
, i
) : nullptr;
1319 if (next
== staticEmptyString()) {
1328 fold(i
- nlit
, nlit
, result
);
1338 if (nlit
> 1) fold(i
- nlit
, nlit
, result
);
1343 if (n
== 2 && !side_effects
&& will_reduce(env
)) {
1344 env
.trackedElems
.emplace_back(
1345 env
.state
.stack
.size(),
1346 env
.unchangedBcs
+ env
.replacedBcs
.size()
1354 if (!topC(env
).subtypeOf(BStr
)) {
1355 return reduce(env
, bc::CastString
{});
1361 // We can't reduce the emitted concats, or we'll end up with
1362 // infinite recursion.
1363 env
.flags
.wasPEI
= true;
1364 env
.flags
.effectFree
= false;
1365 env
.flags
.canConstProp
= false;
1367 auto concat
= [&] (uint32_t num
) {
1371 record(env
, bc::Concat
{});
1373 record(env
, bc::ConcatN
{ num
});
1381 if (n
> 1) concat(n
);
1384 void in(ISS
& env
, const bc::Concat
& /*op*/) {
1385 concatHelper(env
, 2);
1388 void in(ISS
& env
, const bc::ConcatN
& op
) {
1389 if (op
.arg1
== 2) return reduce(env
, bc::Concat
{});
1390 concatHelper(env
, op
.arg1
);
1393 template <class Op
, class Fun
>
1394 void arithImpl(ISS
& env
, const Op
& /*op*/, Fun fun
) {
1396 auto const t1
= popC(env
);
1397 auto const t2
= popC(env
);
1398 push(env
, fun(t2
, t1
));
1401 void in(ISS
& env
, const bc::Add
& op
) { arithImpl(env
, op
, typeAdd
); }
1402 void in(ISS
& env
, const bc::Sub
& op
) { arithImpl(env
, op
, typeSub
); }
1403 void in(ISS
& env
, const bc::Mul
& op
) { arithImpl(env
, op
, typeMul
); }
1404 void in(ISS
& env
, const bc::Div
& op
) { arithImpl(env
, op
, typeDiv
); }
1405 void in(ISS
& env
, const bc::Mod
& op
) { arithImpl(env
, op
, typeMod
); }
1406 void in(ISS
& env
, const bc::Pow
& op
) { arithImpl(env
, op
, typePow
); }
1407 void in(ISS
& env
, const bc::BitAnd
& op
) { arithImpl(env
, op
, typeBitAnd
); }
1408 void in(ISS
& env
, const bc::BitOr
& op
) { arithImpl(env
, op
, typeBitOr
); }
1409 void in(ISS
& env
, const bc::BitXor
& op
) { arithImpl(env
, op
, typeBitXor
); }
1410 void in(ISS
& env
, const bc::AddO
& op
) { arithImpl(env
, op
, typeAddO
); }
1411 void in(ISS
& env
, const bc::SubO
& op
) { arithImpl(env
, op
, typeSubO
); }
1412 void in(ISS
& env
, const bc::MulO
& op
) { arithImpl(env
, op
, typeMulO
); }
1413 void in(ISS
& env
, const bc::Shl
& op
) { arithImpl(env
, op
, typeShl
); }
1414 void in(ISS
& env
, const bc::Shr
& op
) { arithImpl(env
, op
, typeShr
); }
1416 void in(ISS
& env
, const bc::BitNot
& /*op*/) {
1417 auto const t
= popC(env
);
1418 auto const v
= tv(t
);
1421 auto cell
= eval_cell([&] {
1426 if (cell
) return push(env
, std::move(*cell
));
1428 push(env
, TInitCell
);
1433 template<bool NSame
>
1434 std::pair
<Type
,bool> resolveSame(ISS
& env
) {
1435 auto const l1
= topStkEquiv(env
, 0);
1436 auto const t1
= topC(env
, 0);
1437 auto const l2
= topStkEquiv(env
, 1);
1438 auto const t2
= topC(env
, 1);
1440 // EvalHackArrCompatNotices will notice on === and !== between PHP arrays and
1441 // Hack arrays. We can't really do better than this in general because of
1442 // arrays inside these arrays.
1443 auto warningsEnabled
=
1444 (RuntimeOption::EvalHackArrCompatNotices
||
1445 RuntimeOption::EvalEmitClsMethPointers
);
1447 auto const result
= [&] {
1448 auto const v1
= tv(t1
);
1449 auto const v2
= tv(t2
);
1451 if (l1
== StackDupId
||
1452 (l1
== l2
&& l1
!= NoLocalId
) ||
1453 (l1
<= MaxLocalId
&& l2
<= MaxLocalId
&& locsAreEquiv(env
, l1
, l2
))) {
1454 if (!t1
.couldBe(BDbl
) || !t2
.couldBe(BDbl
) ||
1455 (v1
&& (v1
->m_type
!= KindOfDouble
|| !std::isnan(v1
->m_data
.dbl
))) ||
1456 (v2
&& (v2
->m_type
!= KindOfDouble
|| !std::isnan(v2
->m_data
.dbl
)))) {
1457 return NSame
? TFalse
: TTrue
;
1462 if (auto r
= eval_cell_value([&]{ return tvSame(*v2
, *v1
); })) {
1463 // we wouldn't get here if cellSame raised a warning
1464 warningsEnabled
= false;
1465 return r
!= NSame
? TTrue
: TFalse
;
1469 return NSame
? typeNSame(t1
, t2
) : typeSame(t1
, t2
);
1472 if (warningsEnabled
&& result
== (NSame
? TFalse
: TTrue
)) {
1473 warningsEnabled
= false;
1475 return { result
, warningsEnabled
&& compare_might_raise(t1
, t2
) };
1478 template<bool Negate
>
1479 void sameImpl(ISS
& env
) {
1480 if (auto const last
= last_op(env
)) {
1481 if (last
->op
== Op::Null
) {
1483 reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
});
1484 if (Negate
) reduce(env
, bc::Not
{});
1487 if (auto const prev
= last_op(env
, 1)) {
1488 if (prev
->op
== Op::Null
&&
1489 (last
->op
== Op::CGetL
|| last
->op
== Op::CGetL2
||
1490 last
->op
== Op::CGetQuietL
)) {
1491 auto const loc
= [&]() {
1492 if (last
->op
== Op::CGetL
) {
1493 return last
->CGetL
.nloc1
;
1494 } else if (last
->op
== Op::CGetL2
) {
1495 return last
->CGetL2
.nloc1
;
1496 } else if (last
->op
== Op::CGetQuietL
) {
1497 return NamedLocal
{kInvalidLocalName
, last
->CGetQuietL
.loc1
};
1499 always_assert(false);
1502 reduce(env
, bc::IsTypeL
{ loc
, IsTypeOp::Null
});
1503 if (Negate
) reduce(env
, bc::Not
{});
1509 auto pair
= resolveSame
<Negate
>(env
);
1517 push(env
, std::move(pair
.first
));
1520 template<class JmpOp
>
1521 bool sameJmpImpl(ISS
& env
, Op sameOp
, const JmpOp
& jmp
) {
1522 const StackElem
* elems
[2];
1523 env
.state
.stack
.peek(2, elems
, 1);
1525 auto const loc0
= elems
[1]->equivLoc
;
1526 auto const loc1
= elems
[0]->equivLoc
;
1527 // If loc0 == loc1, either they're both NoLocalId, so there's
1528 // nothing for us to deduce, or both stack elements are the same
1529 // value, so the only thing we could deduce is that they are or are
1530 // not NaN. But we don't track that, so just bail.
1531 if (loc0
== loc1
|| loc0
== StackDupId
) return false;
1533 auto const ty0
= elems
[1]->type
;
1534 auto const ty1
= elems
[0]->type
;
1535 auto const val0
= tv(ty0
);
1536 auto const val1
= tv(ty1
);
1538 assertx(!val0
|| !val1
);
1539 if ((loc0
== NoLocalId
&& !val0
&& ty1
.subtypeOf(ty0
)) ||
1540 (loc1
== NoLocalId
&& !val1
&& ty0
.subtypeOf(ty1
))) {
1544 // Same currently lies about the distinction between Func/Cls/Str
1545 if (ty0
.couldBe(BCls
) && ty1
.couldBe(BStr
)) return false;
1546 if (ty1
.couldBe(BCls
) && ty0
.couldBe(BStr
)) return false;
1548 // We need to loosen provenance here because it doesn't affect same / equal.
1549 auto isect
= intersection_of(loosen_provenance(ty0
), loosen_provenance(ty1
));
1551 // Unfortunately, floating point negative zero and positive zero are
1552 // different, but are identical using as far as Same is concerened. We should
1553 // avoid refining a value to 0.0 because it compares identically to 0.0
1554 if (isect
.couldBe(dval(0.0)) || isect
.couldBe(dval(-0.0))) {
1555 isect
= union_of(isect
, TDbl
);
1560 auto handle_same
= [&] {
1561 // Currently dce uses equivalency to prove that something isn't
1562 // the last reference - so we can only assert equivalency here if
1563 // we know that won't be affected. Its irrelevant for uncounted
1564 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1566 if (loc0
<= MaxLocalId
&&
1567 (ty0
.subtypeOf(BObj
| BRes
| BPrim
) ||
1568 ty1
.subtypeOf(BObj
| BRes
| BPrim
) ||
1569 (ty0
.subtypeOf(BUnc
) && ty1
.subtypeOf(BUnc
)))) {
1570 if (loc1
== StackDupId
) {
1571 setStkLocal(env
, loc0
, 0);
1572 } else if (loc1
<= MaxLocalId
&& !locsAreEquiv(env
, loc0
, loc1
)) {
1575 auto const other
= findLocEquiv(env
, loc
);
1576 if (other
== NoLocalId
) break;
1577 killLocEquiv(env
, loc
);
1578 addLocEquiv(env
, loc
, loc1
);
1581 addLocEquiv(env
, loc
, loc1
);
1584 return refineLocation(env
, loc1
!= NoLocalId
? loc1
: loc0
, [&] (Type ty
) {
1585 auto const needsUninit
=
1586 ty
.couldBe(BUninit
) &&
1587 !isect
.couldBe(BUninit
) &&
1588 isect
.couldBe(BInitNull
);
1589 auto ret
= ty
.subtypeOf(BUnc
) ? isect
: loosen_staticness(isect
);
1590 if (needsUninit
) ret
= union_of(std::move(ret
), TUninit
);
1596 auto handle_differ_side
= [&] (LocalId location
, const Type
& ty
) {
1597 if (!ty
.subtypeOf(BInitNull
) && !ty
.strictSubtypeOf(TBool
)) return true;
1598 return refineLocation(env
, location
, [&] (Type t
) {
1599 if (ty
.subtypeOf(BNull
)) {
1600 t
= remove_uninit(std::move(t
));
1601 if (is_opt(t
)) t
= unopt(std::move(t
));
1603 } else if (ty
.strictSubtypeOf(TBool
) && t
.subtypeOf(BBool
)) {
1604 return ty
== TFalse
? TTrue
: TFalse
;
1610 auto handle_differ
= [&] {
1612 (loc0
== NoLocalId
|| handle_differ_side(loc0
, ty1
)) &&
1613 (loc1
== NoLocalId
|| handle_differ_side(loc1
, ty0
));
1616 auto const sameIsJmpTarget
=
1617 (sameOp
== Op::Same
) == (JmpOp::op
== Op::JmpNZ
);
1619 auto save
= env
.state
;
1620 auto const target_reachable
= sameIsJmpTarget
?
1621 handle_same() : handle_differ();
1622 if (!target_reachable
) jmp_nevertaken(env
);
1623 // swap, so we can restore this state if the branch is always taken.
1624 env
.state
.swap(save
);
1625 if (!(sameIsJmpTarget
? handle_differ() : handle_same())) {
1626 jmp_setdest(env
, jmp
.target1
);
1627 env
.state
.copy_from(std::move(save
));
1628 } else if (target_reachable
) {
1629 env
.propagate(jmp
.target1
, &save
);
1635 bc::JmpNZ
invertJmp(const bc::JmpZ
& jmp
) { return bc::JmpNZ
{ jmp
.target1
}; }
1636 bc::JmpZ
invertJmp(const bc::JmpNZ
& jmp
) { return bc::JmpZ
{ jmp
.target1
}; }
1640 void in(ISS
& env
, const bc::Same
&) { sameImpl
<false>(env
); }
1641 void in(ISS
& env
, const bc::NSame
&) { sameImpl
<true>(env
); }
1644 void binOpBoolImpl(ISS
& env
, Fun fun
) {
1645 auto const t1
= popC(env
);
1646 auto const t2
= popC(env
);
1647 auto const v1
= tv(t1
);
1648 auto const v2
= tv(t2
);
1650 if (auto r
= eval_cell_value([&]{ return fun(*v2
, *v1
); })) {
1652 return push(env
, *r
? TTrue
: TFalse
);
1655 // TODO_4: evaluate when these can throw, non-constant type stuff.
1660 void binOpInt64Impl(ISS
& env
, Fun fun
) {
1661 auto const t1
= popC(env
);
1662 auto const t2
= popC(env
);
1663 auto const v1
= tv(t1
);
1664 auto const v2
= tv(t2
);
1666 if (auto r
= eval_cell_value([&]{ return ival(fun(*v2
, *v1
)); })) {
1668 return push(env
, std::move(*r
));
1671 // TODO_4: evaluate when these can throw, non-constant type stuff.
1675 void in(ISS
& env
, const bc::Eq
&) {
1676 auto rs
= resolveSame
<false>(env
);
1677 if (rs
.first
== TTrue
) {
1678 if (!rs
.second
) constprop(env
);
1680 return push(env
, TTrue
);
1682 binOpBoolImpl(env
, [&] (TypedValue c1
, TypedValue c2
) { return tvEqual(c1
, c2
); });
1684 void in(ISS
& env
, const bc::Neq
&) {
1685 auto rs
= resolveSame
<false>(env
);
1686 if (rs
.first
== TTrue
) {
1687 if (!rs
.second
) constprop(env
);
1689 return push(env
, TFalse
);
1691 binOpBoolImpl(env
, [&] (TypedValue c1
, TypedValue c2
) { return !tvEqual(c1
, c2
); });
1693 void in(ISS
& env
, const bc::Lt
&) {
1694 binOpBoolImpl(env
, [&] (TypedValue c1
, TypedValue c2
) { return tvLess(c1
, c2
); });
1696 void in(ISS
& env
, const bc::Gt
&) {
1697 binOpBoolImpl(env
, [&] (TypedValue c1
, TypedValue c2
) { return tvGreater(c1
, c2
); });
1699 void in(ISS
& env
, const bc::Lte
&) { binOpBoolImpl(env
, tvLessOrEqual
); }
1700 void in(ISS
& env
, const bc::Gte
&) { binOpBoolImpl(env
, tvGreaterOrEqual
); }
1702 void in(ISS
& env
, const bc::Cmp
&) {
1703 binOpInt64Impl(env
, [&] (TypedValue c1
, TypedValue c2
) { return tvCompare(c1
, c2
); });
1706 void in(ISS
& env
, const bc::Xor
&) {
1707 binOpBoolImpl(env
, [&] (TypedValue c1
, TypedValue c2
) {
1708 return tvToBool(c1
) ^ tvToBool(c2
);
1712 void castBoolImpl(ISS
& env
, const Type
& t
, bool negate
) {
1716 auto const e
= emptiness(t
);
1718 case Emptiness::Empty
:
1719 case Emptiness::NonEmpty
:
1720 return push(env
, (e
== Emptiness::Empty
) == negate
? TTrue
: TFalse
);
1721 case Emptiness::Maybe
:
1728 void in(ISS
& env
, const bc::Not
&) {
1729 castBoolImpl(env
, popC(env
), true);
1732 void in(ISS
& env
, const bc::CastBool
&) {
1733 auto const t
= topC(env
);
1734 if (t
.subtypeOf(BBool
)) return reduce(env
);
1735 castBoolImpl(env
, popC(env
), false);
1738 void in(ISS
& env
, const bc::CastInt
&) {
1739 auto const t
= topC(env
);
1740 if (t
.subtypeOf(BInt
)) return reduce(env
);
1743 // Objects can raise a warning about converting to int.
1744 if (!t
.couldBe(BObj
)) nothrow(env
);
1745 if (auto const v
= tv(t
)) {
1746 auto cell
= eval_cell([&] {
1747 return make_tv
<KindOfInt64
>(tvToInt(*v
));
1749 if (cell
) return push(env
, std::move(*cell
));
1754 // Handle a casting operation, where "target" is the type being casted to. If
1755 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1756 // is set to true, if the source type is the same as the destination, the cast
1757 // will be optimized away.
1758 void castImpl(ISS
& env
, Type target
, void(*fn
)(TypedValue
*)) {
1759 auto const t
= topC(env
);
1760 if (t
.subtypeOf(target
)) return reduce(env
);
1763 auto const needsRuntimeProvenance
=
1764 RO::EvalArrayProvenance
&&
1765 env
.ctx
.func
->attrs
& AttrProvenanceSkipFrame
&&
1766 target
.subtypeOf(kProvBits
);
1768 if (fn
&& !needsRuntimeProvenance
) {
1769 if (auto val
= tv(t
)) {
1770 // Legacy dvarrays may raise a notice on cast. In order to simplify the
1771 // rollout of these notices, we don't const-fold casts on these arrays.
1772 auto const may_raise_notice
= [&]{
1773 if (!tvIsArrayLike(*val
)) return false;
1774 auto const ad
= val
->m_data
.parr
;
1775 if (!ad
->isLegacyArray()) return false;
1776 return (ad
->isDArray() && target
== TDict
) ||
1777 (ad
->isVArray() && target
== TVec
);
1779 if (!may_raise_notice
) {
1780 if (auto result
= eval_cell([&] { fn(&*val
); return *val
; })) {
1787 push(env
, std::move(target
));
1790 void in(ISS
& env
, const bc::CastDouble
&) {
1791 castImpl(env
, TDbl
, tvCastToDoubleInPlace
);
1794 void in(ISS
& env
, const bc::CastString
&) {
1795 castImpl(env
, TStr
, tvCastToStringInPlace
);
1798 void in(ISS
& env
, const bc::CastDict
&) {
1799 castImpl(env
, TDict
, tvCastToDictInPlace
);
1802 void in(ISS
& env
, const bc::CastVec
&) {
1803 castImpl(env
, TVec
, tvCastToVecInPlace
);
1806 void in(ISS
& env
, const bc::CastKeyset
&) {
1807 castImpl(env
, TKeyset
, tvCastToKeysetInPlace
);
1810 void in(ISS
& env
, const bc::CastVArray
&) {
1811 assertx(!RuntimeOption::EvalHackArrDVArrs
);
1812 arrprov::TagOverride tag_override
{provTagHere(env
).get()};
1813 castImpl(env
, TVArr
, tvCastToVArrayInPlace
);
1816 void in(ISS
& env
, const bc::CastDArray
&) {
1817 assertx(!RuntimeOption::EvalHackArrDVArrs
);
1818 arrprov::TagOverride tag_override
{provTagHere(env
).get()};
1819 castImpl(env
, TDArr
, tvCastToDArrayInPlace
);
1822 void in(ISS
& env
, const bc::DblAsBits
&) {
1826 auto const ty
= popC(env
);
1827 if (!ty
.couldBe(BDbl
)) return push(env
, ival(0));
1829 if (auto val
= tv(ty
)) {
1830 assertx(isDoubleType(val
->m_type
));
1831 val
->m_type
= KindOfInt64
;
1832 push(env
, from_cell(*val
));
1839 void in(ISS
& env
, const bc::Print
& /*op*/) {
1844 void in(ISS
& env
, const bc::Clone
& /*op*/) {
1845 auto val
= popC(env
);
1846 if (!val
.subtypeOf(BObj
)) {
1847 val
= is_opt(val
) ? unopt(std::move(val
)) : TObj
;
1849 push(env
, std::move(val
));
1852 void in(ISS
& env
, const bc::Exit
&) { popC(env
); push(env
, TInitNull
); }
1853 void in(ISS
& env
, const bc::Fatal
&) { popC(env
); }
1855 void in(ISS
& /*env*/, const bc::JmpNS
&) {
1856 always_assert(0 && "blocks should not contain JmpNS instructions");
1859 void in(ISS
& /*env*/, const bc::Jmp
&) {
1860 always_assert(0 && "blocks should not contain Jmp instructions");
1863 void in(ISS
& env
, const bc::Select
& op
) {
1864 auto const cond
= topC(env
);
1865 auto const t
= topC(env
, 1);
1866 auto const f
= topC(env
, 2);
1871 switch (emptiness(cond
)) {
1872 case Emptiness::Maybe
:
1874 push(env
, union_of(t
, f
));
1876 case Emptiness::NonEmpty
:
1880 case Emptiness::Empty
:
1881 return reduce(env
, bc::PopC
{}, bc::PopC
{});
1888 template<class JmpOp
>
1889 bool isTypeHelper(ISS
& env
,
1894 if (typeOp
== IsTypeOp::Scalar
) {
1898 auto const val
= [&] {
1899 if (op
!= Op::IsTypeC
) return locRaw(env
, location
);
1900 const StackElem
* elem
;
1901 env
.state
.stack
.peek(1, &elem
, 1);
1902 location
= elem
->equivLoc
;
1906 if (location
== NoLocalId
|| !val
.subtypeOf(BCell
)) return false;
1908 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1909 // Otherwise, refine location based on the testType.
1910 auto testTy
= type_of_istype(typeOp
);
1912 assertx(val
.couldBe(testTy
) &&
1913 (!val
.subtypeOf(testTy
) || val
.subtypeOf(BObj
)));
1917 if (op
== Op::IsTypeC
) {
1918 if (!is_type_might_raise(testTy
, val
)) nothrow(env
);
1919 } else if (op
== Op::IssetL
) {
1921 } else if (!locCouldBeUninit(env
, location
) &&
1922 !is_type_might_raise(testTy
, val
)) {
1926 auto const negate
= (jmp
.op
== Op::JmpNZ
) == (op
!= Op::IssetL
);
1927 auto const was_true
= [&] (Type t
) {
1928 if (testTy
.subtypeOf(BNull
)) return intersection_of(t
, TNull
);
1929 assertx(!testTy
.couldBe(BNull
));
1930 return intersection_of(t
, testTy
);
1932 auto const was_false
= [&] (Type t
) {
1933 auto tinit
= remove_uninit(t
);
1934 if (testTy
.subtypeOf(BNull
)) {
1935 return is_opt(tinit
) ? unopt(tinit
) : tinit
;
1937 if (is_opt(tinit
)) {
1938 assertx(!testTy
.couldBe(BNull
));
1939 if (unopt(tinit
).subtypeOf(testTy
)) return TNull
;
1944 auto const pre
= [&] (Type t
) {
1945 return negate
? was_true(std::move(t
)) : was_false(std::move(t
));
1948 auto const post
= [&] (Type t
) {
1949 return negate
? was_false(std::move(t
)) : was_true(std::move(t
));
1952 refineLocation(env
, location
, pre
, jmp
.target1
, post
);
1956 // If the current function is a memoize wrapper, return the inferred return type
1957 // of the function being wrapped along with if the wrapped function is effect
1959 std::pair
<Type
, bool> memoizeImplRetType(ISS
& env
) {
1960 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
1962 // Lookup the wrapped function. This should always resolve to a precise
1963 // function but we don't rely on it.
1964 auto const memo_impl_func
= [&] {
1965 if (env
.ctx
.func
->cls
) {
1966 auto const clsTy
= selfClsExact(env
);
1967 return env
.index
.resolve_method(
1969 clsTy
? *clsTy
: TCls
,
1970 memoize_impl_name(env
.ctx
.func
)
1973 return env
.index
.resolve_func(env
.ctx
, memoize_impl_name(env
.ctx
.func
));
1976 // Infer the return type of the wrapped function, taking into account the
1977 // types of the parameters for context sensitive types.
1978 auto const numArgs
= env
.ctx
.func
->params
.size();
1979 CompactVector
<Type
> args
{numArgs
};
1980 for (auto i
= LocalId
{0}; i
< numArgs
; ++i
) {
1981 args
[i
] = locAsCell(env
, i
);
1984 // Determine the context the wrapped function will be called on.
1985 auto const ctxType
= [&]() -> Type
{
1986 if (env
.ctx
.func
->cls
) {
1987 if (env
.ctx
.func
->attrs
& AttrStatic
) {
1988 // The class context for static methods is the method's class,
1989 // if LSB is not specified.
1991 env
.ctx
.func
->isMemoizeWrapperLSB
?
1994 return clsTy
? *clsTy
: TCls
;
1996 return thisTypeNonNull(env
);
2002 auto retTy
= env
.index
.lookup_return_type(
2008 auto const effectFree
= env
.index
.is_effect_free(memo_impl_func
);
2009 // Regardless of anything we know the return type will be an InitCell (this is
2010 // a requirement of memoize functions).
2011 if (!retTy
.subtypeOf(BInitCell
)) return { TInitCell
, effectFree
};
2012 return { retTy
, effectFree
};
2015 template<class JmpOp
>
2016 bool instanceOfJmpImpl(ISS
& env
,
2017 const bc::InstanceOfD
& inst
,
2020 const StackElem
* elem
;
2021 env
.state
.stack
.peek(1, &elem
, 1);
2023 auto const locId
= elem
->equivLoc
;
2024 if (locId
== NoLocalId
|| interface_supports_non_objects(inst
.str1
)) {
2027 auto const rcls
= env
.index
.resolve_class(env
.ctx
, inst
.str1
);
2028 if (!rcls
) return false;
2030 auto const val
= elem
->type
;
2031 auto const instTy
= subObj(*rcls
);
2032 assertx(!val
.subtypeOf(instTy
) && val
.couldBe(instTy
));
2034 // If we have an optional type, whose unopt is guaranteed to pass
2035 // the instanceof check, then failing to pass implies it was null.
2036 auto const fail_implies_null
= is_opt(val
) && unopt(val
).subtypeOf(instTy
);
2039 auto const negate
= jmp
.op
== Op::JmpNZ
;
2040 auto const result
= [&] (Type t
, bool pass
) {
2041 return pass
? instTy
: fail_implies_null
? TNull
: t
;
2043 auto const pre
= [&] (Type t
) { return result(t
, negate
); };
2044 auto const post
= [&] (Type t
) { return result(t
, !negate
); };
2045 refineLocation(env
, locId
, pre
, jmp
.target1
, post
);
2049 template<class JmpOp
>
2050 bool isTypeStructCJmpImpl(ISS
& env
,
2051 const bc::IsTypeStructC
& inst
,
2054 const StackElem
* elems
[2];
2055 env
.state
.stack
.peek(2, elems
, 1);
2057 auto const locId
= elems
[0]->equivLoc
;
2058 if (locId
== NoLocalId
) return false;
2060 auto const a
= tv(elems
[1]->type
);
2061 if (!a
) return false;
2062 // if it wasn't valid, the JmpOp wouldn't be reachable
2063 assertx(isValidTSType(*a
, false));
2065 auto const is_nullable_ts
= is_ts_nullable(a
->m_data
.parr
);
2066 auto const ts_kind
= get_ts_kind(a
->m_data
.parr
);
2067 // type_of_type_structure does not resolve these types. It is important we
2068 // do resolve them here, or we may have issues when we reduce the checks to
2069 // InstanceOfD checks. This logic performs the same exact refinement as
2070 // instanceOfD will.
2071 if (is_nullable_ts
||
2072 (ts_kind
!= TypeStructure::Kind::T_class
&&
2073 ts_kind
!= TypeStructure::Kind::T_interface
&&
2074 ts_kind
!= TypeStructure::Kind::T_xhp
&&
2075 ts_kind
!= TypeStructure::Kind::T_unresolved
)) {
2079 auto const clsName
= get_ts_classname(a
->m_data
.parr
);
2080 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsName
);
2082 !rcls
->resolved() ||
2083 rcls
->cls()->attrs
& AttrEnum
||
2084 interface_supports_non_objects(clsName
)) {
2088 auto const val
= elems
[0]->type
;
2089 auto const instTy
= subObj(*rcls
);
2090 if (val
.subtypeOf(instTy
) || !val
.couldBe(instTy
)) {
2094 // If we have an optional type, whose unopt is guaranteed to pass
2095 // the instanceof check, then failing to pass implies it was null.
2096 auto const fail_implies_null
= is_opt(val
) && unopt(val
).subtypeOf(instTy
);
2100 auto const negate
= jmp
.op
== Op::JmpNZ
;
2101 auto const result
= [&] (Type t
, bool pass
) {
2102 return pass
? instTy
: fail_implies_null
? TNull
: t
;
2104 auto const pre
= [&] (Type t
) { return result(t
, negate
); };
2105 auto const post
= [&] (Type t
) { return result(t
, !negate
); };
2106 refineLocation(env
, locId
, pre
, jmp
.target1
, post
);
2110 template<class JmpOp
>
2111 void jmpImpl(ISS
& env
, const JmpOp
& op
) {
2112 auto const Negate
= std::is_same
<JmpOp
, bc::JmpNZ
>::value
;
2113 auto const location
= topStkEquiv(env
);
2114 auto const e
= emptiness(topC(env
));
2115 if (e
== (Negate
? Emptiness::NonEmpty
: Emptiness::Empty
)) {
2116 reduce(env
, bc::PopC
{});
2117 return jmp_setdest(env
, op
.target1
);
2120 if (e
== (Negate
? Emptiness::Empty
: Emptiness::NonEmpty
) ||
2121 (next_real_block(env
.ctx
.func
, env
.blk
.fallthrough
) ==
2122 next_real_block(env
.ctx
.func
, op
.target1
))) {
2123 return reduce(env
, bc::PopC
{});
2127 if (env
.flags
.jmpDest
== NoBlockId
) return;
2128 auto const jmpDest
= env
.flags
.jmpDest
;
2129 env
.flags
.jmpDest
= NoBlockId
;
2131 reduce(env
, bc::PopC
{});
2132 env
.flags
.jmpDest
= jmpDest
;
2135 if (auto const last
= last_op(env
)) {
2136 if (last
->op
== Op::Not
) {
2138 return reduce(env
, invertJmp(op
));
2140 if (last
->op
== Op::Same
|| last
->op
== Op::NSame
) {
2141 if (sameJmpImpl(env
, last
->op
, op
)) return fix();
2142 } else if (last
->op
== Op::IssetL
) {
2143 if (isTypeHelper(env
,
2150 } else if (last
->op
== Op::IsTypeL
) {
2151 if (isTypeHelper(env
,
2152 last
->IsTypeL
.subop2
,
2153 last
->IsTypeL
.nloc1
.id
,
2158 } else if (last
->op
== Op::IsTypeC
) {
2159 if (isTypeHelper(env
,
2160 last
->IsTypeC
.subop1
,
2166 } else if (last
->op
== Op::InstanceOfD
) {
2167 if (instanceOfJmpImpl(env
, last
->InstanceOfD
, op
)) return fix();
2168 } else if (last
->op
== Op::IsTypeStructC
) {
2169 if (isTypeStructCJmpImpl(env
, last
->IsTypeStructC
, op
)) return fix();
2176 if (location
== NoLocalId
) return env
.propagate(op
.target1
, &env
.state
);
2178 refineLocation(env
, location
,
2179 Negate
? assert_nonemptiness
: assert_emptiness
,
2181 Negate
? assert_emptiness
: assert_nonemptiness
);
2187 void in(ISS
& env
, const bc::JmpNZ
& op
) { jmpImpl(env
, op
); }
2188 void in(ISS
& env
, const bc::JmpZ
& op
) { jmpImpl(env
, op
); }
2190 void in(ISS
& env
, const bc::Switch
& op
) {
2191 auto v
= tv(topC(env
));
2194 auto go
= [&] (BlockId blk
) {
2195 reduce(env
, bc::PopC
{});
2196 return jmp_setdest(env
, blk
);
2198 auto num_elems
= op
.targets
.size();
2199 if (op
.subop1
== SwitchKind::Unbounded
) {
2200 if (v
->m_type
== KindOfInt64
&&
2201 v
->m_data
.num
>= 0 && v
->m_data
.num
< num_elems
) {
2202 return go(op
.targets
[v
->m_data
.num
]);
2205 assertx(num_elems
> 2);
2207 for (auto i
= size_t{}; ; i
++) {
2208 if (i
== num_elems
) {
2209 return go(op
.targets
.back());
2211 auto match
= eval_cell_value([&] {
2212 return tvEqual(*v
, static_cast<int64_t>(op
.arg2
+ i
));
2216 return go(op
.targets
[i
]);
2223 forEachTakenEdge(op
, [&] (BlockId id
) {
2224 env
.propagate(id
, &env
.state
);
2228 void in(ISS
& env
, const bc::SSwitch
& op
) {
2229 auto v
= tv(topC(env
));
2232 for (auto& kv
: op
.targets
) {
2233 auto match
= eval_cell_value([&] {
2234 return !kv
.first
|| tvEqual(*v
, kv
.first
);
2238 reduce(env
, bc::PopC
{});
2239 return jmp_setdest(env
, kv
.second
);
2245 forEachTakenEdge(op
, [&] (BlockId id
) {
2246 env
.propagate(id
, &env
.state
);
2250 void in(ISS
& env
, const bc::RetC
& /*op*/) {
2251 auto const locEquiv
= topStkLocal(env
);
2252 doRet(env
, popC(env
), false);
2253 if (locEquiv
!= NoLocalId
&& locEquiv
< env
.ctx
.func
->params
.size()) {
2254 env
.flags
.retParam
= locEquiv
;
2257 void in(ISS
& env
, const bc::RetM
& op
) {
2258 std::vector
<Type
> ret(op
.arg1
);
2259 for (int i
= 0; i
< op
.arg1
; i
++) {
2260 ret
[op
.arg1
- i
- 1] = popC(env
);
2262 doRet(env
, vec(std::move(ret
)), false);
2265 void in(ISS
& env
, const bc::RetCSuspended
&) {
2266 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
2268 auto const t
= popC(env
);
2271 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
,
2276 void in(ISS
& env
, const bc::Throw
& /*op*/) {
2280 void in(ISS
& env
, const bc::ThrowNonExhaustiveSwitch
& /*op*/) {}
2282 void in(ISS
& env
, const bc::RaiseClassStringConversionWarning
& /*op*/) {}
2284 void in(ISS
& env
, const bc::ChainFaults
&) {
2288 void in(ISS
& env
, const bc::NativeImpl
&) {
2291 if (is_collection_method_returning_this(env
.ctx
.cls
, env
.ctx
.func
)) {
2292 auto const resCls
= env
.index
.builtin_class(env
.ctx
.cls
->name
);
2293 return doRet(env
, objExact(resCls
), true);
2296 if (env
.ctx
.func
->nativeInfo
) {
2297 return doRet(env
, native_function_return_type(env
.ctx
.func
), true);
2299 doRet(env
, TInitCell
, true);
2302 void in(ISS
& env
, const bc::CGetL
& op
) {
2303 if (locIsThis(env
, op
.nloc1
.id
)) {
2304 auto const& ty
= peekLocRaw(env
, op
.nloc1
.id
);
2305 if (!ty
.subtypeOf(BInitNull
)) {
2306 auto const subop
= ty
.couldBe(BUninit
) ?
2307 BareThisOp::Notice
: ty
.couldBe(BNull
) ?
2308 BareThisOp::NoNotice
: BareThisOp::NeverNull
;
2309 return reduce(env
, bc::BareThis
{ subop
});
2312 if (auto const last
= last_op(env
)) {
2313 if (last
->op
== Op::PopL
&&
2314 op
.nloc1
.id
== last
->PopL
.loc1
) {
2317 setLocRaw(env
, op
.nloc1
.id
, TCell
);
2318 return reduce(env
, bc::SetL
{ op
.nloc1
.id
});
2321 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2322 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2323 auto const loc
= minLocEquiv
!= NoLocalId
? minLocEquiv
: op
.nloc1
.id
;
2324 return reduce(env
, bc::CGetQuietL
{ loc
});
2326 mayReadLocal(env
, op
.nloc1
.id
);
2327 push(env
, locAsCell(env
, op
.nloc1
.id
), op
.nloc1
.id
);
2330 void in(ISS
& env
, const bc::CGetQuietL
& op
) {
2331 if (locIsThis(env
, op
.loc1
)) {
2332 return reduce(env
, bc::BareThis
{ BareThisOp::NoNotice
});
2334 if (auto const last
= last_op(env
)) {
2335 if (last
->op
== Op::PopL
&&
2336 op
.loc1
== last
->PopL
.loc1
) {
2339 setLocRaw(env
, op
.loc1
, TCell
);
2340 return reduce(env
, bc::SetL
{ op
.loc1
});
2343 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, true);
2344 if (minLocEquiv
!= NoLocalId
) {
2345 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
});
2350 mayReadLocal(env
, op
.loc1
);
2351 push(env
, locAsCell(env
, op
.loc1
), op
.loc1
);
2354 void in(ISS
& env
, const bc::CUGetL
& op
) {
2355 auto ty
= locRaw(env
, op
.loc1
);
2356 if (ty
.subtypeOf(BUninit
)) {
2357 return reduce(env
, bc::NullUninit
{});
2360 if (!ty
.couldBe(BUninit
)) constprop(env
);
2361 if (!ty
.subtypeOf(BCell
)) ty
= TCell
;
2362 push(env
, std::move(ty
), op
.loc1
);
2365 void in(ISS
& env
, const bc::PushL
& op
) {
2366 if (auto val
= tv(peekLocRaw(env
, op
.loc1
))) {
2367 return reduce(env
, bc::UnsetL
{ op
.loc1
}, gen_constant(*val
));
2370 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, false);
2371 if (minLocEquiv
!= NoLocalId
) {
2372 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
}, bc::UnsetL
{ op
.loc1
});
2375 if (auto const last
= last_op(env
)) {
2376 if (last
->op
== Op::PopL
&&
2377 last
->PopL
.loc1
== op
.loc1
) {
2378 // rewind is ok, because we're just going to unset the local
2379 // (and note the unset can't be a no-op because the PopL set it
2380 // to an InitCell). But its possible that before the PopL, the
2381 // local *was* unset, so maybe would have killed the no-op. The
2382 // only way to fix that is to reprocess the block with the new
2383 // instruction sequence and see what happens.
2386 return reduce(env
, bc::UnsetL
{ op
.loc1
});
2390 impl(env
, bc::CGetQuietL
{ op
.loc1
}, bc::UnsetL
{ op
.loc1
});
2393 void in(ISS
& env
, const bc::CGetL2
& op
) {
2394 if (auto const last
= last_op(env
)) {
2395 if ((poppable(last
->op
) && !numPop(*last
)) ||
2396 ((last
->op
== Op::CGetL
|| last
->op
== Op::CGetQuietL
) &&
2397 !peekLocCouldBeUninit(env
, op
.nloc1
.id
))) {
2398 auto const other
= *last
;
2400 return reduce(env
, bc::CGetL
{ op
.nloc1
}, other
);
2404 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2405 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2406 if (minLocEquiv
!= NoLocalId
) {
2407 return reduce(env
, bc::CGetL2
{ { kInvalidLocalName
, minLocEquiv
} });
2411 mayReadLocal(env
, op
.nloc1
.id
);
2412 auto loc
= locAsCell(env
, op
.nloc1
.id
);
2413 auto topEquiv
= topStkLocal(env
);
2414 auto top
= popT(env
);
2415 push(env
, std::move(loc
), op
.nloc1
.id
);
2416 push(env
, std::move(top
), topEquiv
);
2419 void in(ISS
& env
, const bc::CGetG
&) { popC(env
); push(env
, TInitCell
); }
2421 void in(ISS
& env
, const bc::CGetS
& op
) {
2422 auto const tcls
= popC(env
);
2423 auto const tname
= popC(env
);
2424 auto const vname
= tv(tname
);
2425 auto const self
= selfCls(env
);
2427 if (vname
&& vname
->m_type
== KindOfPersistentString
&&
2428 self
&& tcls
.subtypeOf(*self
)) {
2429 if (auto ty
= selfPropAsCell(env
, vname
->m_data
.pstr
)) {
2430 // Only nothrow when we know it's a private declared property (and thus
2431 // accessible here), class initialization won't throw, and its not a
2432 // LateInit prop (which will throw if not initialized).
2433 if (!classInitMightRaise(env
, tcls
) &&
2434 !isMaybeLateInitSelfProp(env
, vname
->m_data
.pstr
)) {
2437 // We can only constprop here if we know for sure this is exactly the
2438 // correct class. The reason for this is that you could have a LSB
2439 // class attempting to access a private static in a derived class with
2440 // the same name as a private static in this class, which is supposed to
2441 // fatal at runtime (for an example see test/quick/static_sprop2.php).
2442 auto const selfExact
= selfClsExact(env
);
2443 if (selfExact
&& tcls
.subtypeOf(*selfExact
)) constprop(env
);
2446 if (ty
->subtypeOf(BBottom
)) unreachable(env
);
2447 return push(env
, std::move(*ty
));
2451 auto indexTy
= env
.index
.lookup_public_static(env
.ctx
, tcls
, tname
);
2452 if (indexTy
.subtypeOf(BInitCell
)) {
2454 * Constant propagation here can change when we invoke autoload.
2455 * It's safe not to check anything about private or protected static
2456 * properties, because you can't override a public static property with
2457 * a private or protected one---if the index gave us back a constant type,
2458 * it's because it found a public static and it must be the property this
2459 * would have read dynamically.
2461 if (!classInitMightRaise(env
, tcls
) &&
2462 !env
.index
.lookup_public_static_maybe_late_init(tcls
, tname
)) {
2465 if (indexTy
.subtypeOf(BBottom
)) unreachable(env
);
2466 return push(env
, std::move(indexTy
));
2469 push(env
, TInitCell
);
2472 void in(ISS
& env
, const bc::ClassGetC
& op
) {
2473 auto const t
= topC(env
);
2475 if (t
.subtypeOf(BCls
)) return reduce(env
, bc::Nop
{});
2478 if (t
.subtypeOf(BObj
)) {
2480 push(env
, objcls(t
));
2484 if (auto const clsname
= getNameFromType(t
)) {
2485 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
)) {
2486 if (rcls
->cls()) effect_free(env
);
2487 push(env
, clsExact(*rcls
));
2495 void in(ISS
& env
, const bc::ClassGetTS
& op
) {
2496 // TODO(T31677864): implement real optimizations
2497 auto const ts
= popC(env
);
2498 auto const requiredTSType
= RuntimeOption::EvalHackArrDVArrs
? BDict
: BDArr
;
2499 if (!ts
.couldBe(requiredTSType
)) {
2504 auto const& genericsType
=
2505 RuntimeOption::EvalHackArrDVArrs
? TVec
: TVArr
;
2508 push(env
, opt(genericsType
));
2511 void in(ISS
& env
, const bc::AKExists
& /*op*/) {
2512 auto const base
= popC(env
);
2513 auto const key
= popC(env
);
2515 // Bases other than array-like or object will raise a warning and return
2517 if (!base
.couldBeAny(TArr
, TVec
, TDict
, TKeyset
, TObj
)) {
2518 return push(env
, TFalse
);
2521 // Push the returned type and annotate effects appropriately, taking into
2522 // account if the base might be null. Allowing for a possibly null base lets
2523 // us capture more cases.
2524 auto const finish
= [&] (const Type
& t
, bool mayThrow
) {
2525 if (base
.couldBe(BInitNull
)) return push(env
, union_of(t
, TFalse
));
2530 if (base
.subtypeOf(BBottom
)) unreachable(env
);
2531 return push(env
, t
);
2534 // Helper for Hack arrays. "validKey" is the set of key types which can return
2535 // a value from AKExists. "silentKey" is the set of key types which will
2536 // silently return false (anything else throws). The Hack array elem functions
2537 // will treat values of "silentKey" as throwing, so we must identify those
2538 // cases and deal with them.
2539 auto const hackArr
= [&] (std::pair
<Type
, ThrowMode
> elem
,
2540 const Type
& validKey
,
2541 const Type
& silentKey
) {
2542 switch (elem
.second
) {
2543 case ThrowMode::None
:
2544 assertx(key
.subtypeOf(validKey
));
2545 return finish(TTrue
, false);
2546 case ThrowMode::MaybeMissingElement
:
2547 assertx(key
.subtypeOf(validKey
));
2548 return finish(TBool
, false);
2549 case ThrowMode::MissingElement
:
2550 assertx(key
.subtypeOf(validKey
));
2551 return finish(TFalse
, false);
2552 case ThrowMode::MaybeBadKey
:
2553 assertx(key
.couldBe(validKey
));
2555 elem
.first
.subtypeOf(BBottom
) ? TFalse
: TBool
,
2556 !key
.subtypeOf(BArrKeyCompat
)
2558 case ThrowMode::BadOperation
:
2559 assertx(!key
.couldBe(validKey
));
2560 return finish(key
.couldBe(silentKey
) ? TFalse
: TBottom
, true);
2564 // Vecs will throw for any key other than Int or Str, and will silently
2565 // return false for Str.
2566 if (base
.subtypeOrNull(BVec
)) {
2567 if (key
.subtypeOf(BStr
)) return finish(TFalse
, false);
2568 return hackArr(vec_elem(base
, key
, TBottom
), TInt
, TStr
);
2571 // Dicts and keysets will throw for any key other than Int or Str.
2572 if (base
.subtypeOfAny(TOptDict
, TOptKeyset
)) {
2573 auto const elem
= base
.subtypeOrNull(BDict
)
2574 ? dict_elem(base
, key
, TBottom
)
2575 : keyset_elem(base
, key
, TBottom
);
2576 return hackArr(elem
, TArrKeyCompat
, TBottom
);
2579 if (base
.subtypeOrNull(BArr
)) {
2580 // Unlike Idx, AKExists will transform a null key on arrays into the static
2581 // empty string, so we don't need to do any fixups here.
2582 auto const elem
= array_elem(base
, key
, TBottom
);
2583 switch (elem
.second
) {
2584 case ThrowMode::None
: return finish(TTrue
, false);
2585 case ThrowMode::MaybeMissingElement
: return finish(TBool
, false);
2586 case ThrowMode::MissingElement
: return finish(TFalse
, false);
2587 case ThrowMode::MaybeBadKey
:
2588 return finish(elem
.first
.subtypeOf(BBottom
) ? TFalse
: TBool
, true);
2589 case ThrowMode::BadOperation
: always_assert(false);
2593 // Objects or other unions of possible bases
2598 s_implicit_context_set("HH\\ImplicitContext::set"),
2599 s_implicit_context_genSet("HH\\ImplicitContext::genSet");
2601 void in(ISS
& env
, const bc::GetMemoKeyL
& op
) {
2602 auto const& func
= env
.ctx
.func
;
2603 auto const name
= folly::to
<std::string
>(
2604 func
&& func
->cls
? func
->cls
->name
->data() : "",
2605 func
&& func
->cls
? "::" : "",
2606 func
? func
->name
->data() : "");
2607 always_assert(func
->isMemoizeWrapper
||
2608 name
== s_implicit_context_set
.get()->toCppString() ||
2609 name
== s_implicit_context_genSet
.get()->toCppString());
2611 auto const rclsIMemoizeParam
= env
.index
.builtin_class(s_IMemoizeParam
.get());
2612 auto const tyIMemoizeParam
= subObj(rclsIMemoizeParam
);
2614 auto const inTy
= locAsCell(env
, op
.nloc1
.id
);
2616 // If the local could be uninit, we might raise a warning (as
2617 // usual). Converting an object to a memo key might invoke PHP code if it has
2618 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2619 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2620 !inTy
.couldBeAny(TObj
, TArr
, TVec
, TDict
)) {
2621 nothrow(env
); constprop(env
);
2624 // If type constraints are being enforced and the local being turned into a
2625 // memo key is a parameter, then we can possibly using the type constraint to
2626 // infer a more efficient memo key mode.
2627 using MK
= MemoKeyConstraint
;
2628 folly::Optional
<res::Class
> resolvedCls
;
2629 auto const mkc
= [&] {
2630 if (op
.nloc1
.id
>= env
.ctx
.func
->params
.size()) return MK::None
;
2631 auto tc
= env
.ctx
.func
->params
[op
.nloc1
.id
].typeConstraint
;
2632 if (tc
.type() == AnnotType::Object
) {
2633 auto res
= env
.index
.resolve_type_name(tc
.typeName());
2634 if (res
.type
!= AnnotType::Object
) {
2635 tc
.resolveType(res
.type
, res
.nullable
|| tc
.isNullable());
2637 resolvedCls
= env
.index
.resolve_class(env
.ctx
, tc
.typeName());
2640 return memoKeyConstraintFromTC(tc
);
2643 // Use the type-constraint to reduce this operation to a more efficient memo
2644 // mode. Some of the modes can be reduced to simple bytecode operations
2645 // inline. Even with the type-constraints, we still need to check the inferred
2646 // type of the local. Something may have possibly clobbered the local between
2647 // the type-check and this op.
2650 // Always an int, so the key is always an identity mapping
2651 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2654 // Always a bool, so the key is the bool cast to an int
2655 if (inTy
.subtypeOf(BBool
)) {
2656 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::CastInt
{});
2660 // Always a string, so the key is always an identity mapping
2661 if (inTy
.subtypeOf(BStr
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2664 // Either an int or string, so the key can be an identity mapping
2665 if (inTy
.subtypeOf(BArrKey
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2668 // A nullable string. The key will either be the string or the integer
2670 if (inTy
.subtypeOrNull(BStr
)) {
2673 bc::CGetL
{ op
.nloc1
},
2675 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2681 // A nullable int. The key will either be the integer, or the static empty
2683 if (inTy
.subtypeOrNull(BInt
)) {
2686 bc::CGetL
{ op
.nloc1
},
2687 bc::String
{ staticEmptyString() },
2688 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2693 case MK::BoolOrNull
:
2694 // A nullable bool. The key will either be 0, 1, or 2.
2695 if (inTy
.subtypeOrNull(BBool
)) {
2698 bc::CGetL
{ op
.nloc1
},
2701 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2707 // The double will be converted (losslessly) to an integer.
2708 if (inTy
.subtypeOf(BDbl
)) {
2709 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::DblAsBits
{});
2713 // A nullable double. The key will be an integer, or the static empty
2715 if (inTy
.subtypeOrNull(BDbl
)) {
2718 bc::CGetL
{ op
.nloc1
},
2720 bc::String
{ staticEmptyString() },
2721 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2727 // An object. If the object is definitely known to implement IMemoizeParam
2728 // we can simply call that method, casting the output to ensure its always
2729 // a string (which is what the generic mode does). If not, it will use the
2730 // generic mode, which can handle collections or classes which don't
2731 // implement getInstanceKey.
2733 resolvedCls
->mustBeSubtypeOf(rclsIMemoizeParam
) &&
2734 inTy
.subtypeOf(tyIMemoizeParam
)) {
2737 bc::CGetL
{ op
.nloc1
},
2740 bc::FCallObjMethodD
{
2742 staticEmptyString(),
2743 ObjMethodOp::NullThrows
,
2744 s_getInstanceKey
.get()
2750 case MK::ObjectOrNull
:
2751 // An object or null. We can use the null safe version of a function call
2752 // when invoking getInstanceKey and then select from the result of that,
2753 // or the integer 0. This might seem wasteful, but the JIT does a good job
2754 // inlining away the call in the null case.
2756 resolvedCls
->mustBeSubtypeOf(rclsIMemoizeParam
) &&
2757 inTy
.subtypeOf(opt(tyIMemoizeParam
))) {
2760 bc::CGetL
{ op
.nloc1
},
2763 bc::FCallObjMethodD
{
2765 staticEmptyString(),
2766 ObjMethodOp::NullSafe
,
2767 s_getInstanceKey
.get()
2771 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2780 // No type constraint, or one that isn't usuable. Use the generic memoization
2781 // scheme which can handle any type:
2783 if (auto const val
= tv(inTy
)) {
2784 auto const key
= eval_cell(
2785 [&]{ return HHVM_FN(serialize_memoize_param
)(*val
); }
2787 if (key
) return push(env
, *key
);
2790 // Integer keys are always mapped to themselves
2791 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2792 if (inTy
.subtypeOrNull(BInt
)) {
2795 bc::CGetL
{ op
.nloc1
},
2796 bc::String
{ s_nullMemoKey
.get() },
2797 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2801 if (inTy
.subtypeOf(BBool
)) {
2804 bc::String
{ s_falseMemoKey
.get() },
2805 bc::String
{ s_trueMemoKey
.get() },
2806 bc::CGetL
{ op
.nloc1
},
2811 // A memo key can be an integer if the input might be an integer, and is a
2812 // string otherwise. Booleans and nulls are always static strings.
2814 if (inTy
.subtypeOrNull(BBool
)) return TSStr
;
2815 if (inTy
.couldBe(BInt
)) return union_of(TInt
, TStr
);
2818 push(env
, std::move(keyTy
));
2821 void in(ISS
& env
, const bc::IssetL
& op
) {
2822 if (locIsThis(env
, op
.loc1
)) {
2824 bc::BareThis
{ BareThisOp::NoNotice
},
2825 bc::IsTypeC
{ IsTypeOp::Null
},
2830 auto const loc
= locAsCell(env
, op
.loc1
);
2831 if (loc
.subtypeOf(BNull
)) return push(env
, TFalse
);
2832 if (!loc
.couldBe(BNull
)) return push(env
, TTrue
);
2836 void in(ISS
& env
, const bc::IsUnsetL
& op
) {
2839 auto const loc
= locAsCell(env
, op
.loc1
);
2840 if (loc
.subtypeOf(BUninit
)) return push(env
, TTrue
);
2841 if (!loc
.couldBe(BUninit
)) return push(env
, TFalse
);
2845 void in(ISS
& env
, const bc::IssetS
& op
) {
2846 auto const tcls
= popC(env
);
2847 auto const tname
= popC(env
);
2848 auto const vname
= tv(tname
);
2849 auto const self
= selfCls(env
);
2851 if (self
&& tcls
.subtypeOf(*self
) &&
2852 vname
&& vname
->m_type
== KindOfPersistentString
) {
2853 if (auto const t
= selfPropAsCell(env
, vname
->m_data
.pstr
)) {
2854 if (isMaybeLateInitSelfProp(env
, vname
->m_data
.pstr
)) {
2855 if (!classInitMightRaise(env
, tcls
)) constprop(env
);
2856 return push(env
, t
->subtypeOf(BBottom
) ? TFalse
: TBool
);
2858 if (t
->subtypeOf(BNull
)) {
2859 if (!classInitMightRaise(env
, tcls
)) constprop(env
);
2860 return push(env
, TFalse
);
2862 if (!t
->couldBe(BNull
)) {
2863 if (!classInitMightRaise(env
, tcls
)) constprop(env
);
2864 return push(env
, TTrue
);
2869 auto const indexTy
= env
.index
.lookup_public_static(env
.ctx
, tcls
, tname
);
2870 if (indexTy
.subtypeOf(BInitCell
)) {
2871 // See the comments in CGetS about constprop for public statics.
2872 if (!classInitMightRaise(env
, tcls
)) {
2875 if (env
.index
.lookup_public_static_maybe_late_init(tcls
, tname
)) {
2876 return push(env
, indexTy
.subtypeOf(BBottom
) ? TFalse
: TBool
);
2878 if (indexTy
.subtypeOf(BNull
)) { return push(env
, TFalse
); }
2879 if (!indexTy
.couldBe(BNull
)) { return push(env
, TTrue
); }
2885 void in(ISS
& env
, const bc::IssetG
&) { popC(env
); push(env
, TBool
); }
2887 void isTypeImpl(ISS
& env
, const Type
& locOrCell
, const Type
& test
) {
2888 if (locOrCell
.subtypeOf(test
)) return push(env
, TTrue
);
2889 if (!locOrCell
.couldBe(test
)) return push(env
, TFalse
);
2893 void isTypeObj(ISS
& env
, const Type
& ty
) {
2894 if (!ty
.couldBe(BObj
)) return push(env
, TFalse
);
2895 if (ty
.subtypeOf(BObj
)) {
2896 auto const incompl
= objExact(
2897 env
.index
.builtin_class(s_PHP_Incomplete_Class
.get()));
2898 if (!ty
.couldBe(incompl
)) return push(env
, TTrue
);
2899 if (ty
.subtypeOf(incompl
)) return push(env
, TFalse
);
2905 void isTypeLImpl(ISS
& env
, const Op
& op
) {
2906 auto const loc
= locAsCell(env
, op
.nloc1
.id
);
2907 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2908 !is_type_might_raise(op
.subop2
, loc
)) {
2913 switch (op
.subop2
) {
2914 case IsTypeOp::Scalar
: return push(env
, TBool
);
2915 case IsTypeOp::Obj
: return isTypeObj(env
, loc
);
2916 default: return isTypeImpl(env
, loc
, type_of_istype(op
.subop2
));
2921 void isTypeCImpl(ISS
& env
, const Op
& op
) {
2922 auto const t1
= popC(env
);
2923 if (!is_type_might_raise(op
.subop1
, t1
)) {
2928 switch (op
.subop1
) {
2929 case IsTypeOp::Scalar
: return push(env
, TBool
);
2930 case IsTypeOp::Obj
: return isTypeObj(env
, t1
);
2931 default: return isTypeImpl(env
, t1
, type_of_istype(op
.subop1
));
2935 void in(ISS
& env
, const bc::IsTypeC
& op
) { isTypeCImpl(env
, op
); }
2936 void in(ISS
& env
, const bc::IsTypeL
& op
) { isTypeLImpl(env
, op
); }
2938 void in(ISS
& env
, const bc::InstanceOfD
& op
) {
2939 auto t1
= topC(env
);
2940 // Note: InstanceOfD can do autoload if the type might be a type
2941 // alias, so it's not nothrow unless we know it's an object type.
2942 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str1
)) {
2943 auto result
= [&] (const Type
& r
) {
2945 if (r
!= TBool
) constprop(env
);
2949 if (!interface_supports_non_objects(rcls
->name())) {
2950 auto testTy
= subObj(*rcls
);
2951 if (t1
.subtypeOf(testTy
)) return result(TTrue
);
2952 if (!t1
.couldBe(testTy
)) return result(TFalse
);
2954 t1
= unopt(std::move(t1
));
2955 if (t1
.subtypeOf(testTy
)) {
2956 return reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
}, bc::Not
{});
2959 return result(TBool
);
2966 void in(ISS
& env
, const bc::InstanceOf
& /*op*/) {
2967 auto const t1
= topC(env
);
2968 auto const v1
= tv(t1
);
2969 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
2970 return reduce(env
, bc::PopC
{},
2971 bc::InstanceOfD
{ v1
->m_data
.pstr
});
2974 if (t1
.subtypeOf(BObj
) && is_specialized_obj(t1
)) {
2975 auto const dobj
= dobj_of(t1
);
2976 switch (dobj
.type
) {
2980 return reduce(env
, bc::PopC
{},
2981 bc::InstanceOfD
{ dobj
.cls
.name() });
2992 bool isValidTypeOpForIsAs(const IsTypeOp
& op
) {
2994 case IsTypeOp::Null
:
2995 case IsTypeOp::Bool
:
3003 case IsTypeOp::Dict
:
3004 case IsTypeOp::Keyset
:
3005 case IsTypeOp::VArray
:
3006 case IsTypeOp::DArray
:
3007 case IsTypeOp::ArrLike
:
3008 case IsTypeOp::Scalar
:
3009 case IsTypeOp::ClsMeth
:
3010 case IsTypeOp::Func
:
3011 case IsTypeOp::PHPArr
:
3012 case IsTypeOp::Class
:
3018 void isTypeStructImpl(ISS
& env
, SArray inputTS
) {
3019 auto const resolvedTS
= resolveTSStatically(env
, inputTS
, env
.ctx
.cls
);
3020 auto const ts
= resolvedTS
? resolvedTS
: inputTS
;
3021 auto const t
= loosen_likeness(topC(env
, 1)); // operand to is/as
3023 bool may_raise
= true;
3024 auto result
= [&] (const Type
& out
) {
3025 popC(env
); // type structure
3026 popC(env
); // operand to is/as
3028 if (!may_raise
) nothrow(env
);
3029 return push(env
, out
);
3033 const folly::Optional
<Type
> type
,
3034 const folly::Optional
<Type
> deopt
= folly::none
3036 if (!type
|| is_type_might_raise(*type
, t
)) return result(TBool
);
3037 auto test
= type
.value();
3038 if (t
.subtypeOf(test
)) return result(TTrue
);
3039 if (!t
.couldBe(test
) && (!deopt
|| !t
.couldBe(deopt
.value()))) {
3040 return result(TFalse
);
3042 auto const op
= type_to_istypeop(test
);
3043 if (!op
|| !isValidTypeOpForIsAs(op
.value())) return result(TBool
);
3044 return reduce(env
, bc::PopC
{}, bc::IsTypeC
{ *op
});
3047 auto const is_nullable_ts
= is_ts_nullable(ts
);
3048 auto const is_definitely_null
= t
.subtypeOf(BNull
);
3049 auto const is_definitely_not_null
= !t
.couldBe(BNull
);
3051 if (is_nullable_ts
&& is_definitely_null
) return result(TTrue
);
3053 auto const ts_type
= type_of_type_structure(env
.index
, env
.ctx
, ts
);
3055 if (is_nullable_ts
&& !is_definitely_not_null
&& ts_type
== folly::none
) {
3056 // Ts is nullable and we know that t could be null but we dont know for sure
3057 // Also we didn't get a type out of the type structure
3058 return result(TBool
);
3061 if (ts_type
&& !is_type_might_raise(*ts_type
, t
)) may_raise
= false;
3062 switch (get_ts_kind(ts
)) {
3063 case TypeStructure::Kind::T_int
:
3064 case TypeStructure::Kind::T_bool
:
3065 case TypeStructure::Kind::T_float
:
3066 case TypeStructure::Kind::T_string
:
3067 case TypeStructure::Kind::T_num
:
3068 case TypeStructure::Kind::T_arraykey
:
3069 case TypeStructure::Kind::T_keyset
:
3070 case TypeStructure::Kind::T_void
:
3071 case TypeStructure::Kind::T_null
:
3072 return check(ts_type
);
3073 case TypeStructure::Kind::T_tuple
:
3074 return check(ts_type
, TVec
);
3075 case TypeStructure::Kind::T_shape
:
3076 return check(ts_type
, TDict
);
3077 case TypeStructure::Kind::T_dict
:
3078 return check(ts_type
, TDArr
);
3079 case TypeStructure::Kind::T_vec
:
3080 return check(ts_type
, TVArr
);
3081 case TypeStructure::Kind::T_nothing
:
3082 case TypeStructure::Kind::T_noreturn
:
3083 return result(TFalse
);
3084 case TypeStructure::Kind::T_mixed
:
3085 case TypeStructure::Kind::T_dynamic
:
3086 return result(TTrue
);
3087 case TypeStructure::Kind::T_nonnull
:
3088 if (is_definitely_null
) return result(TFalse
);
3089 if (is_definitely_not_null
) return result(TTrue
);
3092 bc::IsTypeC
{ IsTypeOp::Null
},
3094 case TypeStructure::Kind::T_class
:
3095 case TypeStructure::Kind::T_interface
:
3096 case TypeStructure::Kind::T_xhp
: {
3097 auto clsname
= get_ts_classname(ts
);
3098 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
);
3099 if (!rcls
|| !rcls
->resolved() || (ts
->exists(s_generic_types
) &&
3100 (rcls
->cls()->hasReifiedGenerics
||
3101 !isTSAllWildcards(ts
)))) {
3102 // If it is a reified class or has non wildcard generics,
3104 return result(TBool
);
3106 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ clsname
});
3108 case TypeStructure::Kind::T_unresolved
: {
3109 auto classname
= get_ts_classname(ts
);
3110 auto const has_generics
= ts
->exists(s_generic_types
);
3111 if (!has_generics
&& classname
->isame(s_this
.get())) {
3112 return reduce(env
, bc::PopC
{}, bc::IsLateBoundCls
{});
3114 auto const rcls
= env
.index
.resolve_class(env
.ctx
, classname
);
3115 // We can only reduce to instance of if we know for sure that this class
3116 // can be resolved since instanceof undefined class does not throw
3117 if (!rcls
|| !rcls
->resolved() || rcls
->cls()->attrs
& AttrEnum
) {
3118 return result(TBool
);
3121 (rcls
->cls()->hasReifiedGenerics
|| !isTSAllWildcards(ts
))) {
3122 // If it is a reified class or has non wildcard generics,
3124 return result(TBool
);
3126 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ rcls
->name() });
3128 case TypeStructure::Kind::T_enum
:
3129 case TypeStructure::Kind::T_resource
:
3130 case TypeStructure::Kind::T_vec_or_dict
:
3131 case TypeStructure::Kind::T_arraylike
:
3132 // TODO(T29232862): implement
3133 return result(TBool
);
3134 case TypeStructure::Kind::T_typeaccess
:
3135 case TypeStructure::Kind::T_array
:
3136 case TypeStructure::Kind::T_darray
:
3137 case TypeStructure::Kind::T_varray
:
3138 case TypeStructure::Kind::T_varray_or_darray
:
3139 case TypeStructure::Kind::T_reifiedtype
:
3140 return result(TBool
);
3141 case TypeStructure::Kind::T_fun
:
3142 case TypeStructure::Kind::T_typevar
:
3143 case TypeStructure::Kind::T_trait
:
3144 // We will error on these at the JIT
3145 return result(TBool
);
3151 bool canReduceToDontResolveList(SArray tsList
, bool checkArrays
);
3153 bool canReduceToDontResolve(SArray ts
, bool checkArrays
) {
3154 auto const checkGenerics
= [&](SArray arr
) {
3155 if (!ts
->exists(s_generic_types
)) return true;
3156 return canReduceToDontResolveList(get_ts_generic_types(ts
), true);
3158 switch (get_ts_kind(ts
)) {
3159 case TypeStructure::Kind::T_int
:
3160 case TypeStructure::Kind::T_bool
:
3161 case TypeStructure::Kind::T_float
:
3162 case TypeStructure::Kind::T_string
:
3163 case TypeStructure::Kind::T_num
:
3164 case TypeStructure::Kind::T_arraykey
:
3165 case TypeStructure::Kind::T_void
:
3166 case TypeStructure::Kind::T_null
:
3167 case TypeStructure::Kind::T_nothing
:
3168 case TypeStructure::Kind::T_noreturn
:
3169 case TypeStructure::Kind::T_mixed
:
3170 case TypeStructure::Kind::T_dynamic
:
3171 case TypeStructure::Kind::T_nonnull
:
3172 case TypeStructure::Kind::T_resource
:
3174 // Following have generic parameters that may need to be resolved
3175 case TypeStructure::Kind::T_dict
:
3176 case TypeStructure::Kind::T_vec
:
3177 case TypeStructure::Kind::T_keyset
:
3178 case TypeStructure::Kind::T_vec_or_dict
:
3179 case TypeStructure::Kind::T_arraylike
:
3180 return !checkArrays
|| checkGenerics(ts
);
3181 case TypeStructure::Kind::T_class
:
3182 case TypeStructure::Kind::T_interface
:
3183 case TypeStructure::Kind::T_xhp
:
3184 case TypeStructure::Kind::T_enum
:
3185 return isTSAllWildcards(ts
) || checkGenerics(ts
);
3186 case TypeStructure::Kind::T_tuple
:
3187 return canReduceToDontResolveList(get_ts_elem_types(ts
), checkArrays
);
3188 case TypeStructure::Kind::T_fun
: {
3189 auto const variadicType
= get_ts_variadic_type_opt(ts
);
3190 return canReduceToDontResolve(get_ts_return_type(ts
), checkArrays
)
3191 && canReduceToDontResolveList(get_ts_param_types(ts
), checkArrays
)
3192 && (!variadicType
|| canReduceToDontResolve(variadicType
, checkArrays
));
3194 case TypeStructure::Kind::T_shape
:
3195 // We cannot skip resolution on shapes since shapes contain "value" field
3196 // which resolution removes.
3198 // Following needs to be resolved
3199 case TypeStructure::Kind::T_unresolved
:
3200 case TypeStructure::Kind::T_typeaccess
:
3201 // Following cannot be used in is/as expressions, we need to error on them
3202 // Currently erroring happens as a part of the resolving phase,
3203 // so keep resolving them
3204 case TypeStructure::Kind::T_array
:
3205 case TypeStructure::Kind::T_darray
:
3206 case TypeStructure::Kind::T_varray
:
3207 case TypeStructure::Kind::T_varray_or_darray
:
3208 case TypeStructure::Kind::T_reifiedtype
:
3209 case TypeStructure::Kind::T_typevar
:
3210 case TypeStructure::Kind::T_trait
:
3216 bool canReduceToDontResolveList(SArray tsList
, bool checkArrays
) {
3221 assertx(isArrayLikeType(v
.m_type
));
3222 result
&= canReduceToDontResolve(v
.m_data
.parr
, checkArrays
);
3223 // when result is false, we can short circuit
3232 void in(ISS
& env
, const bc::IsLateBoundCls
& op
) {
3233 auto const cls
= env
.ctx
.cls
;
3234 if (cls
&& !(cls
->attrs
& AttrTrait
)) effect_free(env
);
3236 return push(env
, TBool
);
3239 void in(ISS
& env
, const bc::IsTypeStructC
& op
) {
3240 auto const requiredTSType
= RuntimeOption::EvalHackArrDVArrs
? BDict
: BDArr
;
3241 if (!topC(env
).couldBe(requiredTSType
)) {
3244 return unreachable(env
);
3246 auto const a
= tv(topC(env
));
3247 if (!a
|| !isValidTSType(*a
, false)) {
3250 return push(env
, TBool
);
3252 if (op
.subop1
== TypeStructResolveOp::Resolve
&&
3253 canReduceToDontResolve(a
->m_data
.parr
, false)) {
3254 return reduce(env
, bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
});
3256 isTypeStructImpl(env
, a
->m_data
.parr
);
3259 void in(ISS
& env
, const bc::ThrowAsTypeStructException
& op
) {
3266 void in(ISS
& env
, const bc::CombineAndResolveTypeStruct
& op
) {
3267 assertx(op
.arg1
> 0);
3269 auto const requiredTSType
= RuntimeOption::EvalHackArrDVArrs
? BDict
: BDArr
;
3270 auto const first
= tv(topC(env
));
3271 if (first
&& isValidTSType(*first
, false)) {
3272 auto const ts
= first
->m_data
.parr
;
3273 // Optimize single input that does not need any combination
3275 if (canReduceToDontResolve(ts
, true)) return reduce(env
);
3276 if (auto const resolved
= resolveTSStatically(env
, ts
, env
.ctx
.cls
)) {
3277 return RuntimeOption::EvalHackArrDVArrs
3278 ? reduce(env
, bc::PopC
{}, bc::Dict
{ resolved
})
3279 : reduce(env
, bc::PopC
{}, bc::Array
{ resolved
});
3282 // Optimize double input that needs a single combination and looks of the
3283 // form ?T, @T or ~T
3284 if (op
.arg1
== 2 && get_ts_kind(ts
) == TypeStructure::Kind::T_reifiedtype
) {
3285 BytecodeVec instrs
{ bc::PopC
{} };
3286 auto const tv_true
= gen_constant(make_tv
<KindOfBoolean
>(true));
3287 if (ts
->exists(s_like
.get())) {
3288 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_like
.get())));
3289 instrs
.push_back(tv_true
);
3290 instrs
.push_back(bc::AddElemC
{});
3292 if (ts
->exists(s_nullable
.get())) {
3293 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_nullable
.get())));
3294 instrs
.push_back(tv_true
);
3295 instrs
.push_back(bc::AddElemC
{});
3297 if (ts
->exists(s_soft
.get())) {
3298 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_soft
.get())));
3299 instrs
.push_back(tv_true
);
3300 instrs
.push_back(bc::AddElemC
{});
3302 return reduce(env
, std::move(instrs
));
3306 for (int i
= 0; i
< op
.arg1
; ++i
) {
3307 auto const t
= popC(env
);
3308 valid
&= t
.couldBe(requiredTSType
);
3310 if (!valid
) return unreachable(env
);
3312 push(env
, Type
{requiredTSType
});
3315 void in(ISS
& env
, const bc::RecordReifiedGeneric
& op
) {
3316 // TODO(T31677864): implement real optimizations
3317 auto const t
= popC(env
);
3318 auto const required
= RuntimeOption::EvalHackArrDVArrs
? BVec
: BVArr
;
3319 if (!t
.couldBe(required
)) return unreachable(env
);
3320 if (t
.subtypeOf(required
)) nothrow(env
);
3321 push(env
, RuntimeOption::EvalHackArrDVArrs
? TSVec
: TSVArr
);
3324 void in(ISS
& env
, const bc::CheckReifiedGenericMismatch
& op
) {
3325 // TODO(T31677864): implement real optimizations
3332 * If the value on the top of the stack is known to be equivalent to the local
3333 * its being moved/copied to, return folly::none without modifying any
3334 * state. Otherwise, pop the stack value, perform the set, and return a pair
3335 * giving the value's type, and any other local its known to be equivalent to.
3337 template <typename Set
>
3338 folly::Optional
<std::pair
<Type
, LocalId
>> moveToLocImpl(ISS
& env
,
3340 if (auto const prev
= last_op(env
, 1)) {
3341 if (prev
->op
== Op::CGetL2
&&
3342 prev
->CGetL2
.nloc1
.id
== op
.loc1
&&
3343 last_op(env
)->op
== Op::Concat
) {
3345 reduce(env
, bc::SetOpL
{ op
.loc1
, SetOpOp::ConcatEqual
});
3350 auto equivLoc
= topStkEquiv(env
);
3351 // If the local could be a Ref, don't record equality because the stack
3352 // element and the local won't actually have the same type.
3353 if (equivLoc
== StackThisId
&& env
.state
.thisLoc
!= NoLocalId
) {
3354 if (env
.state
.thisLoc
== op
.loc1
||
3355 locsAreEquiv(env
, env
.state
.thisLoc
, op
.loc1
)) {
3358 equivLoc
= env
.state
.thisLoc
;
3361 if (!is_volatile_local(env
.ctx
.func
, op
.loc1
)) {
3362 if (equivLoc
<= MaxLocalId
) {
3363 if (equivLoc
== op
.loc1
||
3364 locsAreEquiv(env
, equivLoc
, op
.loc1
)) {
3365 // We allow equivalency to ignore Uninit, so we need to check
3367 if (peekLocRaw(env
, op
.loc1
) == topC(env
)) {
3371 } else if (equivLoc
== NoLocalId
) {
3374 if (!any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3378 equivLoc
= NoLocalId
;
3381 auto val
= popC(env
);
3382 setLoc(env
, op
.loc1
, val
);
3383 if (equivLoc
== StackThisId
) {
3384 assertx(env
.state
.thisLoc
== NoLocalId
);
3385 equivLoc
= env
.state
.thisLoc
= op
.loc1
;
3387 if (equivLoc
== StackDupId
) {
3388 setStkLocal(env
, op
.loc1
);
3389 } else if (equivLoc
!= op
.loc1
&& equivLoc
!= NoLocalId
) {
3390 addLocEquiv(env
, op
.loc1
, equivLoc
);
3392 return { std::make_pair(std::move(val
), equivLoc
) };
3397 void in(ISS
& env
, const bc::PopL
& op
) {
3398 // If the same value is already in the local, do nothing but pop
3399 // it. Otherwise, the set has been done by moveToLocImpl.
3400 if (!moveToLocImpl(env
, op
)) return reduce(env
, bc::PopC
{});
3403 void in(ISS
& env
, const bc::SetL
& op
) {
3404 // If the same value is already in the local, do nothing because SetL keeps
3405 // the value on the stack. If it isn't, we need to push it back onto the stack
3406 // because moveToLocImpl popped it.
3407 if (auto p
= moveToLocImpl(env
, op
)) {
3408 push(env
, std::move(p
->first
), p
->second
);
3414 void in(ISS
& env
, const bc::SetG
&) {
3415 auto t1
= popC(env
);
3417 push(env
, std::move(t1
));
3420 void in(ISS
& env
, const bc::SetS
& op
) {
3421 auto const t1
= loosen_likeness(popC(env
));
3422 auto const tcls
= popC(env
);
3423 auto const tname
= popC(env
);
3424 auto const vname
= tv(tname
);
3425 auto const self
= selfCls(env
);
3427 if (vname
&& vname
->m_type
== KindOfPersistentString
&&
3428 canSkipMergeOnConstProp(env
, tcls
, vname
->m_data
.pstr
)) {
3434 if (!self
|| tcls
.couldBe(*self
)) {
3435 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
3436 mergeSelfProp(env
, vname
->m_data
.pstr
, t1
);
3438 mergeEachSelfPropRaw(env
, [&] (Type
) { return t1
; });
3442 env
.collect
.publicSPropMutations
.merge(env
.index
, env
.ctx
, tcls
, tname
, t1
);
3444 push(env
, std::move(t1
));
3447 void in(ISS
& env
, const bc::SetOpL
& op
) {
3448 auto const t1
= popC(env
);
3449 auto const v1
= tv(t1
);
3450 auto const loc
= locAsCell(env
, op
.loc1
);
3451 auto const locVal
= tv(loc
);
3453 // Can't constprop at this eval_cell, because of the effects on
3455 auto resultTy
= eval_cell([&] {
3456 TypedValue c
= *locVal
;
3457 TypedValue rhs
= *v1
;
3458 setopBody(&c
, op
.subop2
, &rhs
);
3461 if (!resultTy
) resultTy
= TInitCell
;
3463 // We may have inferred a TSStr or TSArr with a value here, but
3464 // at runtime it will not be static. For now just throw that
3465 // away. TODO(#3696042): should be able to loosen_staticness here.
3466 if (resultTy
->subtypeOf(BStr
)) resultTy
= TStr
;
3467 else if (resultTy
->subtypeOf(BArr
)) resultTy
= TArr
;
3468 else if (resultTy
->subtypeOf(BVec
)) resultTy
= TVec
;
3469 else if (resultTy
->subtypeOf(BDict
)) resultTy
= TDict
;
3470 else if (resultTy
->subtypeOf(BKeyset
)) resultTy
= TKeyset
;
3472 setLoc(env
, op
.loc1
, *resultTy
);
3473 push(env
, *resultTy
);
3477 auto resultTy
= typeSetOp(op
.subop2
, loc
, t1
);
3478 setLoc(env
, op
.loc1
, resultTy
);
3479 push(env
, std::move(resultTy
));
3482 void in(ISS
& env
, const bc::SetOpG
&) {
3483 popC(env
); popC(env
);
3484 push(env
, TInitCell
);
3487 void in(ISS
& env
, const bc::SetOpS
& op
) {
3489 auto const tcls
= popC(env
);
3490 auto const tname
= popC(env
);
3491 auto const vname
= tv(tname
);
3492 auto const self
= selfCls(env
);
3494 if (vname
&& vname
->m_type
== KindOfPersistentString
&&
3495 canSkipMergeOnConstProp(env
, tcls
, vname
->m_data
.pstr
)) {
3501 if (!self
|| tcls
.couldBe(*self
)) {
3502 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
3503 mergeSelfProp(env
, vname
->m_data
.pstr
, TInitCell
);
3509 env
.collect
.publicSPropMutations
.merge(
3510 env
.index
, env
.ctx
, tcls
, tname
, TInitCell
3513 push(env
, TInitCell
);
3516 void in(ISS
& env
, const bc::IncDecL
& op
) {
3517 auto loc
= locAsCell(env
, op
.nloc1
.id
);
3518 auto newT
= typeIncDec(op
.subop2
, loc
);
3519 auto const pre
= isPre(op
.subop2
);
3521 // If it's a non-numeric string, this may cause it to exceed the max length.
3522 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
3523 !loc
.couldBe(BStr
)) {
3527 if (!pre
) push(env
, std::move(loc
));
3528 setLoc(env
, op
.nloc1
.id
, newT
);
3529 if (pre
) push(env
, std::move(newT
));
3532 void in(ISS
& env
, const bc::IncDecG
&) { popC(env
); push(env
, TInitCell
); }
3534 void in(ISS
& env
, const bc::IncDecS
& op
) {
3535 auto const tcls
= popC(env
);
3536 auto const tname
= popC(env
);
3537 auto const vname
= tv(tname
);
3538 auto const self
= selfCls(env
);
3540 if (vname
&& vname
->m_type
== KindOfPersistentString
&&
3541 canSkipMergeOnConstProp(env
, tcls
, vname
->m_data
.pstr
)) {
3547 if (!self
|| tcls
.couldBe(*self
)) {
3548 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
3549 mergeSelfProp(env
, vname
->m_data
.pstr
, TInitCell
);
3555 env
.collect
.publicSPropMutations
.merge(
3556 env
.index
, env
.ctx
, tcls
, tname
, TInitCell
3559 push(env
, TInitCell
);
3562 void in(ISS
& env
, const bc::UnsetL
& op
) {
3563 if (locRaw(env
, op
.loc1
).subtypeOf(TUninit
)) {
3566 if (any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3571 setLocRaw(env
, op
.loc1
, TUninit
);
3574 void in(ISS
& env
, const bc::UnsetG
& /*op*/) {
3575 auto const t1
= popC(env
);
3576 if (!t1
.couldBe(BObj
| BRes
)) nothrow(env
);
3579 bool fcallCanSkipRepack(ISS
& env
, const FCallArgs
& fca
, const res::Func
& func
) {
3580 // Can't skip repack if potentially calling a function with too many args.
3581 if (fca
.numArgs() > func
.minNonVariadicParams()) return false;
3582 // Repack not needed if not unpacking and not having too many arguments.
3583 if (!fca
.hasUnpack()) return true;
3584 // Can't skip repack if unpack args are in a wrong position.
3585 if (fca
.numArgs() != func
.maxNonVariadicParams()) return false;
3587 // Repack not needed if unpack args have the correct type.
3588 auto const unpackArgs
= topC(env
, fca
.hasGenerics() ? 1 : 0);
3589 return unpackArgs
.subtypeOf(RuntimeOption::EvalHackArrDVArrs
? BVec
: BVArr
);
3592 template<class FCallWithFCA
>
3593 bool fcallOptimizeChecks(
3595 const FCallArgs
& fca
,
3596 const res::Func
& func
,
3597 FCallWithFCA fcallWithFCA
3599 auto const numOut
= env
.index
.lookup_num_inout_params(env
.ctx
, func
);
3600 if (fca
.enforceInOut() && numOut
== fca
.numRets() - 1) {
3602 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3603 auto const kind
= env
.index
.lookup_param_prep(env
.ctx
, func
, i
);
3604 if (kind
== PrepKind::Unknown
) {
3609 if (kind
!= (fca
.isInOut(i
) ? PrepKind::InOut
: PrepKind::Val
)) {
3610 // The function/method may not exist, in which case we should raise a
3611 // different error. Just defer the checks to the runtime.
3612 if (!func
.exactFunc()) return false;
3615 auto const exCls
= makeStaticString("InvalidArgumentException");
3616 auto const err
= makeStaticString(formatParamInOutMismatch(
3617 func
.name()->data(), i
, !fca
.isInOut(i
)));
3621 bc::NewObjD
{ exCls
},
3626 bc::FCallCtor
{ FCallArgs(1), staticEmptyString() },
3636 // Optimize away the runtime inout-ness check.
3637 reduce(env
, fcallWithFCA(fca
.withoutInOut()));
3642 // Infer whether the callee supports async eager return.
3643 if (fca
.asyncEagerTarget() != NoBlockId
) {
3644 auto const status
= env
.index
.supports_async_eager_return(func
);
3645 if (status
&& !*status
) {
3646 reduce(env
, fcallWithFCA(fca
.withoutAsyncEagerTarget()));
3651 if (!fca
.skipRepack() && fcallCanSkipRepack(env
, fca
, func
)) {
3652 reduce(env
, fcallWithFCA(fca
.withoutRepack()));
3661 const FCallArgs
& fca
,
3662 const res::Func
& func
,
3665 uint32_t numExtraInputs
3667 auto const foldableFunc
= func
.exactFunc();
3668 if (!foldableFunc
) return false;
3669 if (!shouldAttemptToFold(env
, foldableFunc
, fca
, context
, maybeDynamic
)) {
3673 assertx(!fca
.hasUnpack() && !fca
.hasGenerics() && fca
.numRets() == 1);
3674 assertx(options
.ConstantFoldBuiltins
);
3676 auto tried_lookup
= false;
3678 if (foldableFunc
->attrs
& AttrBuiltin
&&
3679 foldableFunc
->attrs
& AttrIsFoldable
) {
3680 auto ret
= const_fold(env
, fca
.numArgs(), numExtraInputs
, *foldableFunc
,
3682 return ret
? *ret
: TBottom
;
3684 CompactVector
<Type
> args(fca
.numArgs());
3685 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3686 for (auto i
= uint32_t{0}; i
< fca
.numArgs(); ++i
) {
3687 auto const& arg
= topT(env
, firstArgPos
- i
);
3688 auto const isScalar
= is_scalar(arg
);
3690 (env
.index
.func_depends_on_arg(foldableFunc
, i
) ||
3691 !arg
.subtypeOf(BInitCell
))) {
3694 args
[i
] = isScalar
? scalarize(arg
) : arg
;
3697 tried_lookup
= true;
3698 return env
.index
.lookup_foldable_return_type(
3699 env
.ctx
, foldableFunc
, context
, std::move(args
));
3702 if (auto v
= tv(ty
)) {
3704 for (uint32_t i
= 0; i
< numExtraInputs
; ++i
) repl
.push_back(bc::PopC
{});
3705 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) repl
.push_back(bc::PopC
{});
3706 repl
.push_back(bc::PopU
{});
3707 repl
.push_back(bc::PopU
{});
3708 if (topT(env
, fca
.numArgs() + 2 + numExtraInputs
).subtypeOf(TInitCell
)) {
3709 repl
.push_back(bc::PopC
{});
3711 assertx(topT(env
, fca
.numArgs() + 2 + numExtraInputs
).subtypeOf(TUninit
));
3712 repl
.push_back(bc::PopU
{});
3714 repl
.push_back(gen_constant(*v
));
3715 reduce(env
, std::move(repl
));
3720 env
.collect
.unfoldableFuncs
.emplace(foldableFunc
, env
.bid
);
3725 Type
typeFromWH(Type t
) {
3726 if (!t
.couldBe(BObj
)) {
3727 // Exceptions will be thrown if a non-object is awaited.
3731 // Throw away non-obj component.
3734 // If we aren't even sure this is a wait handle, there's nothing we can
3736 if (!is_specialized_wait_handle(t
)) {
3740 return wait_handle_inner(t
);
3743 void pushCallReturnType(ISS
& env
, Type
&& ty
, const FCallArgs
& fca
) {
3744 if (ty
== TBottom
) {
3745 // The callee function never returns. It might throw, or loop forever.
3748 auto const numRets
= fca
.numRets();
3750 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3751 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3752 if (is_specialized_vec(ty
)) {
3753 for (int32_t i
= 1; i
< numRets
; i
++) {
3754 push(env
, vec_elem(ty
, ival(i
)).first
);
3756 push(env
, vec_elem(ty
, ival(0)).first
);
3758 for (int32_t i
= 0; i
< numRets
; i
++) push(env
, TInitCell
);
3762 if (fca
.asyncEagerTarget() != NoBlockId
) {
3763 push(env
, typeFromWH(ty
));
3764 assertx(topC(env
) != TBottom
);
3765 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3768 return push(env
, std::move(ty
));
3771 const StaticString s_defined
{ "defined" };
3772 const StaticString s_function_exists
{ "function_exists" };
3774 template<class FCallWithFCA
>
3775 void fcallKnownImpl(
3777 const FCallArgs
& fca
,
3778 const res::Func
& func
,
3781 uint32_t numExtraInputs
,
3782 FCallWithFCA fcallWithFCA
3784 auto const numArgs
= fca
.numArgs();
3785 auto returnType
= [&] {
3786 CompactVector
<Type
> args(numArgs
);
3787 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3788 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3789 args
[i
] = topCV(env
, firstArgPos
- i
);
3792 auto ty
= fca
.hasUnpack()
3793 ? env
.index
.lookup_return_type(env
.ctx
, func
)
3794 : env
.index
.lookup_return_type(env
.ctx
, args
, context
, func
);
3796 ty
= union_of(std::move(ty
), TInitNull
);
3801 if (fca
.asyncEagerTarget() != NoBlockId
&& typeFromWH(returnType
) == TBottom
) {
3802 // Kill the async eager target if the function never returns.
3803 reduce(env
, fcallWithFCA(std::move(fca
.withoutAsyncEagerTarget())));
3807 if (func
.name()->isame(s_function_exists
.get()) &&
3808 (numArgs
== 1 || numArgs
== 2) &&
3809 !fca
.hasUnpack() && !fca
.hasGenerics()) {
3810 handle_function_exists(env
, topT(env
, numExtraInputs
+ numArgs
- 1));
3813 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
3814 if (fca
.hasGenerics()) popC(env
);
3815 if (fca
.hasUnpack()) popC(env
);
3816 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) popCV(env
);
3820 pushCallReturnType(env
, std::move(returnType
), fca
);
3823 void fcallUnknownImpl(ISS
& env
, const FCallArgs
& fca
) {
3824 if (fca
.hasGenerics()) popC(env
);
3825 if (fca
.hasUnpack()) popC(env
);
3826 auto const numArgs
= fca
.numArgs();
3827 auto const numRets
= fca
.numRets();
3828 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) popCV(env
);
3832 if (fca
.asyncEagerTarget() != NoBlockId
) {
3833 assertx(numRets
== 1);
3834 push(env
, TInitCell
);
3835 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3838 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3839 for (auto i
= uint32_t{0}; i
< numRets
; ++i
) push(env
, TInitCell
);
3842 void in(ISS
& env
, const bc::FCallFuncD
& op
) {
3843 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, op
.str2
);
3845 if (op
.fca
.hasGenerics()) {
3846 auto const tsList
= topC(env
);
3847 if (!tsList
.couldBe(RuntimeOption::EvalHackArrDVArrs
? BVec
: BVArr
)) {
3848 return unreachable(env
);
3851 if (!rfunc
.couldHaveReifiedGenerics()) {
3855 bc::FCallFuncD
{ op
.fca
.withoutGenerics(), op
.str2
}
3860 auto const updateBC
= [&] (FCallArgs fca
) {
3861 return bc::FCallFuncD
{ std::move(fca
), op
.str2
};
3864 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
) ||
3865 fcallTryFold(env
, op
.fca
, rfunc
, TBottom
, false, 0)) {
3869 if (auto const func
= rfunc
.exactFunc()) {
3870 if (can_emit_builtin(env
, func
, op
.fca
)) {
3871 return finish_builtin(env
, func
, op
.fca
);
3875 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 0, updateBC
);
3880 void fcallFuncUnknown(ISS
& env
, const bc::FCallFunc
& op
) {
3882 fcallUnknownImpl(env
, op
.fca
);
3885 void fcallFuncClsMeth(ISS
& env
, const bc::FCallFunc
& op
) {
3886 assertx(topC(env
).subtypeOf(BClsMeth
));
3888 // TODO: optimize me
3889 fcallFuncUnknown(env
, op
);
3892 void fcallFuncFunc(ISS
& env
, const bc::FCallFunc
& op
) {
3893 assertx(topC(env
).subtypeOf(BFunc
));
3895 // TODO: optimize me
3896 fcallFuncUnknown(env
, op
);
3899 void fcallFuncObj(ISS
& env
, const bc::FCallFunc
& op
) {
3900 assertx(topC(env
).subtypeOf(BObj
));
3902 // TODO: optimize me
3903 fcallFuncUnknown(env
, op
);
3906 void fcallFuncStr(ISS
& env
, const bc::FCallFunc
& op
) {
3907 assertx(topC(env
).subtypeOf(BStr
));
3908 auto funcName
= getNameFromType(topC(env
));
3909 if (!funcName
) return fcallFuncUnknown(env
, op
);
3911 funcName
= normalizeNS(funcName
);
3912 if (!isNSNormalized(funcName
) || !notClassMethodPair(funcName
)) {
3913 return fcallFuncUnknown(env
, op
);
3916 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, funcName
);
3917 if (!rfunc
.mightCareAboutDynCalls()) {
3918 return reduce(env
, bc::PopC
{}, bc::FCallFuncD
{ op
.fca
, funcName
});
3921 auto const updateBC
= [&] (FCallArgs fca
) {
3922 return bc::FCallFunc
{ std::move(fca
) };
3925 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
)) return;
3926 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 1, updateBC
);
3931 void in(ISS
& env
, const bc::FCallFunc
& op
) {
3932 auto const callable
= topC(env
);
3933 if (callable
.subtypeOf(BFunc
)) return fcallFuncFunc(env
, op
);
3934 if (callable
.subtypeOf(BClsMeth
)) return fcallFuncClsMeth(env
, op
);
3935 if (callable
.subtypeOf(BObj
)) return fcallFuncObj(env
, op
);
3936 if (callable
.subtypeOf(BStr
)) return fcallFuncStr(env
, op
);
3937 fcallFuncUnknown(env
, op
);
3940 void in(ISS
& env
, const bc::ResolveFunc
& op
) {
3944 void in(ISS
& env
, const bc::ResolveMethCaller
& op
) {
3949 void in(ISS
& env
, const bc::ResolveRFunc
& op
) {
3951 push(env
, TFuncLike
);
3954 void in(ISS
& env
, const bc::ResolveObjMethod
& op
) {
3957 if (RuntimeOption::EvalHackArrDVArrs
) {
3966 Type
ctxCls(ISS
& env
) {
3967 auto const s
= selfCls(env
);
3968 return setctx(s
? *s
: TCls
);
3971 Type
specialClsRefToCls(ISS
& env
, SpecialClsRef ref
) {
3972 if (!env
.ctx
.cls
) return TCls
;
3973 auto const op
= [&]()-> folly::Optional
<Type
> {
3975 case SpecialClsRef::Static
: return ctxCls(env
);
3976 case SpecialClsRef::Self
: return selfClsExact(env
);
3977 case SpecialClsRef::Parent
: return parentClsExact(env
);
3979 always_assert(false);
3981 return op
? *op
: TCls
;
3984 template<bool reifiedVersion
= false>
3985 void resolveClsMethodSImpl(ISS
& env
, SpecialClsRef ref
, LSString meth_name
) {
3986 auto const clsTy
= specialClsRefToCls(env
, ref
);
3987 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, meth_name
);
3988 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).type
== DCls::Exact
&&
3989 !rfunc
.couldHaveReifiedGenerics()) {
3990 auto const clsName
= dcls_of(clsTy
).cls
.name();
3991 return reduce(env
, bc::ResolveClsMethodD
{ clsName
, meth_name
});
3993 if (reifiedVersion
) popC(env
);
3994 if (!reifiedVersion
|| !rfunc
.couldHaveReifiedGenerics()) {
3995 push(env
, TClsMeth
);
3997 push(env
, TClsMethLike
);
4003 void in(ISS
& env
, const bc::ResolveClsMethod
& op
) {
4005 push(env
, TClsMeth
);
4008 void in(ISS
& env
, const bc::ResolveClsMethodD
& op
) {
4009 push(env
, TClsMeth
);
4012 void in(ISS
& env
, const bc::ResolveClsMethodS
& op
) {
4013 resolveClsMethodSImpl
<false>(env
, op
.subop1
, op
.str2
);
4016 void in(ISS
& env
, const bc::ResolveRClsMethod
&) {
4019 push(env
, TClsMethLike
);
4022 void in(ISS
& env
, const bc::ResolveRClsMethodD
&) {
4024 push(env
, TClsMethLike
);
4027 void in(ISS
& env
, const bc::ResolveRClsMethodS
& op
) {
4028 resolveClsMethodSImpl
<true>(env
, op
.subop1
, op
.str2
);
4031 void in(ISS
& env
, const bc::ResolveClass
& op
) {
4033 auto cls
= env
.index
.resolve_class(env
.ctx
, op
.str1
);
4034 if (cls
&& cls
->resolved()) {
4035 push(env
, clsExact(*cls
));
4037 // If the class is not resolved,
4038 // it might not be unique or it might not be a valid classname.
4039 push(env
, TArrKeyCompat
);
4043 void in(ISS
& env
, const bc::LazyClass
&) {
4044 // TODO: T70712990: Specialize HHBBC types for lazy classes
4045 push(env
, TLazyCls
);
4050 Context
getCallContext(const ISS
& env
, const FCallArgs
& fca
) {
4051 if (auto const name
= fca
.context()) {
4052 auto const rcls
= env
.index
.resolve_class(env
.ctx
, name
);
4053 if (rcls
&& rcls
->cls()) {
4054 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, rcls
->cls() };
4056 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, nullptr };
4061 void fcallObjMethodNullsafe(ISS
& env
, const FCallArgs
& fca
, bool extraInput
) {
4063 if (extraInput
) repl
.push_back(bc::PopC
{});
4064 if (fca
.hasGenerics()) repl
.push_back(bc::PopC
{});
4065 if (fca
.hasUnpack()) repl
.push_back(bc::PopC
{});
4066 auto const numArgs
= fca
.numArgs();
4067 for (uint32_t i
= 0; i
< numArgs
; ++i
) {
4068 assertx(topC(env
, repl
.size()).subtypeOf(BInitCell
));
4069 repl
.push_back(bc::PopC
{});
4071 repl
.push_back(bc::PopU
{});
4072 repl
.push_back(bc::PopU
{});
4073 repl
.push_back(bc::PopC
{});
4074 auto const numRets
= fca
.numRets();
4075 for (uint32_t i
= 0; i
< numRets
- 1; ++i
) {
4076 repl
.push_back(bc::PopU
{});
4078 repl
.push_back(bc::Null
{});
4080 reduce(env
, std::move(repl
));
4083 template <typename Op
, class UpdateBC
>
4084 void fcallObjMethodImpl(ISS
& env
, const Op
& op
, SString methName
, bool dynamic
,
4085 bool extraInput
, UpdateBC updateBC
) {
4086 auto const nullThrows
= op
.subop3
== ObjMethodOp::NullThrows
;
4087 auto const inputPos
= op
.fca
.numInputs() + (extraInput
? 3 : 2);
4088 auto const input
= topC(env
, inputPos
);
4089 auto const location
= topStkEquiv(env
, inputPos
);
4090 auto const mayCallMethod
= input
.couldBe(BObj
);
4091 auto const mayUseNullsafe
= !nullThrows
&& input
.couldBe(BNull
);
4092 auto const mayThrowNonObj
= !input
.subtypeOf(nullThrows
? BObj
: BOptObj
);
4094 auto const refineLoc
= [&] {
4095 if (location
== NoLocalId
) return;
4096 if (!refineLocation(env
, location
, [&] (Type t
) {
4097 if (nullThrows
) return intersection_of(t
, TObj
);
4098 if (!t
.couldBe(BUninit
)) return intersection_of(t
, TOptObj
);
4099 if (!t
.couldBe(BObj
)) return intersection_of(t
, TNull
);
4106 auto const unknown
= [&] {
4107 if (extraInput
) popC(env
);
4108 fcallUnknownImpl(env
, op
.fca
);
4112 if (!mayCallMethod
&& !mayUseNullsafe
) {
4113 // This FCallObjMethodD may only throw, make sure it's not optimized away.
4119 if (!mayCallMethod
&& !mayThrowNonObj
) {
4120 // Null input, this may only return null, so do that.
4121 return fcallObjMethodNullsafe(env
, op
.fca
, extraInput
);
4124 if (!mayCallMethod
) {
4125 // May only return null, but can't fold as we may still throw.
4126 assertx(mayUseNullsafe
&& mayThrowNonObj
);
4130 auto const ctx
= getCallContext(env
, op
.fca
);
4131 auto const ctxTy
= intersection_of(input
, TObj
);
4132 auto const clsTy
= objcls(ctxTy
);
4133 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4135 auto const canFold
= !mayUseNullsafe
&& !mayThrowNonObj
;
4136 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
) ||
4137 (canFold
&& fcallTryFold(env
, op
.fca
, rfunc
, ctxTy
, dynamic
,
4138 extraInput
? 1 : 0))) {
4142 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4143 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4146 fcallKnownImpl(env
, op
.fca
, rfunc
, ctxTy
, mayUseNullsafe
, extraInput
? 1 : 0,
4153 void in(ISS
& env
, const bc::FCallObjMethodD
& op
) {
4154 if (op
.fca
.hasGenerics()) {
4155 auto const tsList
= topC(env
);
4156 if (!tsList
.couldBe(RuntimeOption::EvalHackArrDVArrs
? BVec
: BVArr
)) {
4157 return unreachable(env
);
4160 auto const input
= topC(env
, op
.fca
.numInputs() + 2);
4161 auto const clsTy
= objcls(intersection_of(input
, TObj
));
4162 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4163 if (!rfunc
.couldHaveReifiedGenerics()) {
4167 bc::FCallObjMethodD
{
4168 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4173 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4174 if (!clsHint
) clsHint
= op
.str2
;
4175 return bc::FCallObjMethodD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4177 fcallObjMethodImpl(env
, op
, op
.str4
, false, false, updateBC
);
4180 void in(ISS
& env
, const bc::FCallObjMethod
& op
) {
4181 auto const methName
= getNameFromType(topC(env
));
4184 fcallUnknownImpl(env
, op
.fca
);
4188 auto const input
= topC(env
, op
.fca
.numInputs() + 3);
4189 auto const clsTy
= objcls(intersection_of(input
, TObj
));
4190 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4191 if (!rfunc
.mightCareAboutDynCalls()) {
4195 bc::FCallObjMethodD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4199 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4200 if (!clsHint
) clsHint
= op
.str2
;
4201 return bc::FCallObjMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4203 fcallObjMethodImpl(env
, op
, methName
, true, true, updateBC
);
4208 template <typename Op
, class UpdateBC
>
4209 void fcallClsMethodImpl(ISS
& env
, const Op
& op
, Type clsTy
, SString methName
,
4210 bool dynamic
, uint32_t numExtraInputs
,
4211 UpdateBC updateBC
) {
4212 auto const ctx
= getCallContext(env
, op
.fca
);
4213 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4215 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
) ||
4216 fcallTryFold(env
, op
.fca
, rfunc
, clsTy
, dynamic
, numExtraInputs
)) {
4220 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4221 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4224 fcallKnownImpl(env
, op
.fca
, rfunc
, clsTy
, false /* nullsafe */,
4225 numExtraInputs
, updateBC
);
4230 void in(ISS
& env
, const bc::FCallClsMethodD
& op
) {
4231 auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str3
);
4232 auto const clsTy
= rcls
? clsExact(*rcls
) : TCls
;
4233 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4235 if (op
.fca
.hasGenerics() && !rfunc
.couldHaveReifiedGenerics()) {
4239 bc::FCallClsMethodD
{
4240 op
.fca
.withoutGenerics(), op
.str2
, op
.str3
, op
.str4
}
4244 if (auto const func
= rfunc
.exactFunc()) {
4245 assertx(func
->cls
!= nullptr);
4246 if (func
->cls
->name
->same(op
.str3
) && can_emit_builtin(env
, func
, op
.fca
)) {
4247 // When we use FCallBuiltin to call a static method, the litstr method
4248 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4250 // As a result, we can only do this optimization if the name of the
4251 // builtin function's class matches this op's class name immediate.
4252 return finish_builtin(env
, func
, op
.fca
);
4256 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4257 if (!clsHint
) clsHint
= op
.str2
;
4258 return bc::FCallClsMethodD
{ std::move(fca
), clsHint
, op
.str3
, op
.str4
};
4260 fcallClsMethodImpl(env
, op
, clsTy
, op
.str4
, false, 0, updateBC
);
4263 void in(ISS
& env
, const bc::FCallClsMethod
& op
) {
4264 auto const methName
= getNameFromType(topC(env
, 1));
4268 fcallUnknownImpl(env
, op
.fca
);
4272 auto const clsTy
= topC(env
);
4273 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4274 auto const skipLogAsDynamicCall
=
4275 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls
&&
4276 op
.subop3
== IsLogAsDynamicCallOp::DontLogAsDynamicCall
;
4277 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).type
== DCls::Exact
&&
4278 (!rfunc
.mightCareAboutDynCalls() || skipLogAsDynamicCall
)) {
4279 auto const clsName
= dcls_of(clsTy
).cls
.name();
4284 bc::FCallClsMethodD
{ op
.fca
, op
.str2
, clsName
, methName
}
4288 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4289 if (!clsHint
) clsHint
= op
.str2
;
4290 return bc::FCallClsMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4292 fcallClsMethodImpl(env
, op
, clsTy
, methName
, true, 2, updateBC
);
4297 template <typename Op
, class UpdateBC
>
4298 void fcallClsMethodSImpl(ISS
& env
, const Op
& op
, SString methName
, bool dynamic
,
4299 bool extraInput
, UpdateBC updateBC
) {
4300 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4301 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).type
== DCls::Exact
&&
4302 !dynamic
&& op
.subop3
== SpecialClsRef::Static
) {
4303 auto const clsName
= dcls_of(clsTy
).cls
.name();
4304 reduce(env
, bc::FCallClsMethodD
{ op
.fca
, op
.str2
, clsName
, methName
});
4308 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4310 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
) ||
4311 fcallTryFold(env
, op
.fca
, rfunc
, ctxCls(env
), dynamic
,
4312 extraInput
? 1 : 0)) {
4316 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4317 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4320 fcallKnownImpl(env
, op
.fca
, rfunc
, ctxCls(env
), false /* nullsafe */,
4321 extraInput
? 1 : 0, updateBC
);
4326 void in(ISS
& env
, const bc::FCallClsMethodSD
& op
) {
4327 if (op
.fca
.hasGenerics()) {
4328 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4329 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4330 if (!rfunc
.couldHaveReifiedGenerics()) {
4334 bc::FCallClsMethodSD
{
4335 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4340 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4341 if (!clsHint
) clsHint
= op
.str2
;
4342 return bc::FCallClsMethodSD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4344 fcallClsMethodSImpl(env
, op
, op
.str4
, false, false, updateBC
);
4347 void in(ISS
& env
, const bc::FCallClsMethodS
& op
) {
4348 auto const methName
= getNameFromType(topC(env
));
4351 fcallUnknownImpl(env
, op
.fca
);
4355 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4356 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4357 if (!rfunc
.mightCareAboutDynCalls() && !rfunc
.couldHaveReifiedGenerics()) {
4361 bc::FCallClsMethodSD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4365 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4366 if (!clsHint
) clsHint
= op
.str2
;
4367 return bc::FCallClsMethodS
{ std::move(fca
), clsHint
, op
.subop3
};
4369 fcallClsMethodSImpl(env
, op
, methName
, true, true, updateBC
);
4374 void newObjDImpl(ISS
& env
, const StringData
* className
, bool rflavor
) {
4375 auto const rcls
= env
.index
.resolve_class(env
.ctx
, className
);
4377 if (rflavor
) popC(env
);
4381 if (rflavor
&& !rcls
->couldHaveReifiedGenerics()) {
4382 return reduce(env
, bc::PopC
{}, bc::NewObjD
{ className
});
4384 auto const isCtx
= !rcls
->couldBeOverriden() && env
.ctx
.cls
&&
4385 rcls
->same(env
.index
.resolve_class(env
.ctx
.cls
));
4386 if (rflavor
) popC(env
);
4387 push(env
, setctx(objExact(*rcls
), isCtx
));
4392 void in(ISS
& env
, const bc::NewObjD
& op
) { newObjDImpl(env
, op
.str1
, false); }
4393 void in(ISS
& env
, const bc::NewObjRD
& op
) { newObjDImpl(env
, op
.str1
, true); }
4395 void in(ISS
& env
, const bc::NewObjS
& op
) {
4396 auto const cls
= specialClsRefToCls(env
, op
.subop1
);
4397 if (!is_specialized_cls(cls
)) {
4402 auto const dcls
= dcls_of(cls
);
4403 auto const exact
= dcls
.type
== DCls::Exact
;
4404 if (exact
&& !dcls
.cls
.couldHaveReifiedGenerics() &&
4405 (!dcls
.cls
.couldBeOverriden() || equivalently_refined(cls
, unctx(cls
)))) {
4406 return reduce(env
, bc::NewObjD
{ dcls
.cls
.name() });
4409 push(env
, toobj(cls
));
4412 void in(ISS
& env
, const bc::NewObj
& op
) {
4413 auto const cls
= topC(env
);
4414 if (!is_specialized_cls(cls
)) {
4420 auto const dcls
= dcls_of(cls
);
4421 auto const exact
= dcls
.type
== DCls::Exact
;
4422 if (exact
&& !dcls
.cls
.mightCareAboutDynConstructs()) {
4426 bc::NewObjD
{ dcls
.cls
.name() }
4431 push(env
, toobj(cls
));
4434 void in(ISS
& env
, const bc::NewObjR
& op
) {
4435 auto const generics
= topC(env
);
4436 auto const cls
= topC(env
, 1);
4438 if (generics
.subtypeOf(BInitNull
)) {
4446 if (!is_specialized_cls(cls
)) {
4453 auto const dcls
= dcls_of(cls
);
4454 auto const exact
= dcls
.type
== DCls::Exact
;
4455 if (exact
&& !dcls
.cls
.couldHaveReifiedGenerics()) {
4465 push(env
, toobj(cls
));
4470 bool objMightHaveConstProps(const Type
& t
) {
4471 assertx(t
.subtypeOf(BObj
));
4472 assertx(is_specialized_obj(t
));
4473 auto const dobj
= dobj_of(t
);
4474 switch (dobj
.type
) {
4476 return dobj
.cls
.couldHaveConstProp();
4478 return dobj
.cls
.derivedCouldHaveConstProp();
4485 void in(ISS
& env
, const bc::FCallCtor
& op
) {
4486 auto const obj
= topC(env
, op
.fca
.numInputs() + 2);
4487 assertx(op
.fca
.numRets() == 1);
4489 if (!is_specialized_obj(obj
)) {
4490 return fcallUnknownImpl(env
, op
.fca
);
4493 if (op
.fca
.lockWhileUnwinding() && !objMightHaveConstProps(obj
)) {
4495 env
, bc::FCallCtor
{ op
.fca
.withoutLockWhileUnwinding(), op
.str2
}
4499 auto const dobj
= dobj_of(obj
);
4500 auto const exact
= dobj
.type
== DObj::Exact
;
4501 auto const rfunc
= env
.index
.resolve_ctor(env
.ctx
, dobj
.cls
, exact
);
4503 return fcallUnknownImpl(env
, op
.fca
);
4506 auto const updateFCA
= [&] (FCallArgs
&& fca
) {
4507 return bc::FCallCtor
{ std::move(fca
), op
.str2
};
4510 auto const canFold
= obj
.subtypeOf(BObj
);
4511 if (fcallOptimizeChecks(env
, op
.fca
, *rfunc
, updateFCA
) ||
4512 (canFold
&& fcallTryFold(env
, op
.fca
, *rfunc
,
4513 obj
, false /* dynamic */, 0))) {
4517 if (rfunc
->exactFunc() && op
.str2
->empty()) {
4518 // We've found the exact func that will be called, set the hint.
4519 return reduce(env
, bc::FCallCtor
{ op
.fca
, rfunc
->exactFunc()->cls
->name
});
4522 fcallKnownImpl(env
, op
.fca
, *rfunc
, obj
, false /* nullsafe */, 0,
4526 void in(ISS
& env
, const bc::LockObj
& op
) {
4527 auto const t
= topC(env
);
4530 return push(env
, t
);
4532 if (!t
.subtypeOf(BObj
)) return bail();
4533 if (!is_specialized_obj(t
) || objMightHaveConstProps(t
)) {
4542 // baseLoc is NoLocalId for non-local iterators.
4543 void iterInitImpl(ISS
& env
, IterArgs ita
, BlockId target
, LocalId baseLoc
) {
4544 auto const local
= baseLoc
!= NoLocalId
;
4545 auto const sourceLoc
= local
? baseLoc
: topStkLocal(env
);
4546 auto const base
= local
? locAsCell(env
, baseLoc
) : topC(env
);
4547 auto ity
= iter_types(base
);
4549 auto const fallthrough
= [&] {
4550 auto const baseCannotBeObject
= !base
.couldBe(BObj
);
4551 setIter(env
, ita
.iterId
, LiveIter
{ ity
, sourceLoc
, NoLocalId
, env
.bid
,
4552 false, baseCannotBeObject
});
4553 // Do this after setting the iterator, in case it clobbers the base local
4555 setLoc(env
, ita
.valId
, std::move(ity
.value
));
4557 setLoc(env
, ita
.keyId
, std::move(ity
.key
));
4558 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4562 assert(iterIsDead(env
, ita
.iterId
));
4564 if (!ity
.mayThrowOnInit
) {
4565 if (ity
.count
== IterTypes::Count::Empty
&& will_reduce(env
)) {
4569 reduce(env
, bc::PopC
{});
4571 return jmp_setdest(env
, target
);
4576 if (!local
) popC(env
);
4578 switch (ity
.count
) {
4579 case IterTypes::Count::Empty
:
4580 mayReadLocal(env
, ita
.valId
);
4581 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4582 jmp_setdest(env
, target
);
4584 case IterTypes::Count::Single
:
4585 case IterTypes::Count::NonEmpty
:
4587 return jmp_nevertaken(env
);
4588 case IterTypes::Count::ZeroOrOne
:
4589 case IterTypes::Count::Any
:
4590 // Take the branch before setting locals if the iter is already
4591 // empty, but after popping. Similar for the other IterInits
4593 env
.propagate(target
, &env
.state
);
4597 always_assert(false);
4600 // baseLoc is NoLocalId for non-local iterators.
4601 void iterNextImpl(ISS
& env
, IterArgs ita
, BlockId target
, LocalId baseLoc
) {
4602 auto const curVal
= peekLocRaw(env
, ita
.valId
);
4603 auto const curKey
= ita
.hasKey() ? peekLocRaw(env
, ita
.keyId
) : TBottom
;
4605 auto noThrow
= false;
4606 auto const noTaken
= match
<bool>(
4607 env
.state
.iters
[ita
.iterId
],
4609 always_assert(false && "IterNext on dead iter");
4612 [&] (const LiveIter
& ti
) {
4613 if (!ti
.types
.mayThrowOnNext
) noThrow
= true;
4614 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4615 switch (ti
.types
.count
) {
4616 case IterTypes::Count::Single
:
4617 case IterTypes::Count::ZeroOrOne
:
4619 case IterTypes::Count::NonEmpty
:
4620 case IterTypes::Count::Any
:
4621 setLoc(env
, ita
.valId
, ti
.types
.value
);
4623 setLoc(env
, ita
.keyId
, ti
.types
.key
);
4624 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4627 case IterTypes::Count::Empty
:
4628 always_assert(false);
4634 if (noTaken
&& noThrow
&& will_reduce(env
)) {
4635 auto const iterId
= safe_cast
<IterId
>(ita
.iterId
);
4636 return baseLoc
== NoLocalId
4637 ? reduce(env
, bc::IterFree
{ iterId
})
4638 : reduce(env
, bc::LIterFree
{ iterId
, baseLoc
});
4641 mayReadLocal(env
, baseLoc
);
4642 mayReadLocal(env
, ita
.valId
);
4643 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4645 if (noThrow
) nothrow(env
);
4648 jmp_nevertaken(env
);
4649 freeIter(env
, ita
.iterId
);
4653 env
.propagate(target
, &env
.state
);
4655 freeIter(env
, ita
.iterId
);
4656 setLocRaw(env
, ita
.valId
, curVal
);
4657 if (ita
.hasKey()) setLocRaw(env
, ita
.keyId
, curKey
);
4662 void in(ISS
& env
, const bc::IterInit
& op
) {
4663 iterInitImpl(env
, op
.ita
, op
.target2
, NoLocalId
);
4666 void in(ISS
& env
, const bc::LIterInit
& op
) {
4667 iterInitImpl(env
, op
.ita
, op
.target3
, op
.loc2
);
4670 void in(ISS
& env
, const bc::IterNext
& op
) {
4671 iterNextImpl(env
, op
.ita
, op
.target2
, NoLocalId
);
4674 void in(ISS
& env
, const bc::LIterNext
& op
) {
4675 iterNextImpl(env
, op
.ita
, op
.target3
, op
.loc2
);
4678 void in(ISS
& env
, const bc::IterFree
& op
) {
4679 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
4680 auto const isNop
= match
<bool>(
4681 env
.state
.iters
[op
.iter1
],
4685 [&] (const LiveIter
& ti
) {
4686 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4691 if (isNop
&& will_reduce(env
)) return reduce(env
);
4694 freeIter(env
, op
.iter1
);
4697 void in(ISS
& env
, const bc::LIterFree
& op
) {
4699 mayReadLocal(env
, op
.loc2
);
4700 freeIter(env
, op
.iter1
);
4704 * Any include/require (or eval) op kills all locals, and private properties.
4706 void inclOpImpl(ISS
& env
) {
4711 push(env
, TInitCell
);
4714 void in(ISS
& env
, const bc::Incl
&) { inclOpImpl(env
); }
4715 void in(ISS
& env
, const bc::InclOnce
&) { inclOpImpl(env
); }
4716 void in(ISS
& env
, const bc::Req
&) { inclOpImpl(env
); }
4717 void in(ISS
& env
, const bc::ReqOnce
&) { inclOpImpl(env
); }
4718 void in(ISS
& env
, const bc::ReqDoc
&) { inclOpImpl(env
); }
4719 void in(ISS
& env
, const bc::Eval
&) { inclOpImpl(env
); }
4721 void in(ISS
& env
, const bc::This
&) {
4722 if (thisAvailable(env
)) {
4723 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4725 auto const ty
= thisTypeNonNull(env
);
4726 push(env
, ty
, StackThisId
);
4727 setThisAvailable(env
);
4728 if (ty
.subtypeOf(BBottom
)) unreachable(env
);
4731 void in(ISS
& env
, const bc::LateBoundCls
& op
) {
4732 if (env
.ctx
.cls
) effect_free(env
);
4733 auto const ty
= selfCls(env
);
4734 push(env
, setctx(ty
? *ty
: TCls
));
4737 void in(ISS
& env
, const bc::CheckThis
&) {
4738 if (thisAvailable(env
)) {
4741 setThisAvailable(env
);
4744 void in(ISS
& env
, const bc::BareThis
& op
) {
4745 if (thisAvailable(env
)) {
4746 if (op
.subop1
!= BareThisOp::NeverNull
) {
4747 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4751 auto const ty
= thisType(env
);
4752 switch (op
.subop1
) {
4753 case BareThisOp::Notice
:
4755 case BareThisOp::NoNotice
:
4758 case BareThisOp::NeverNull
:
4759 setThisAvailable(env
);
4760 if (!env
.state
.unreachable
) effect_free(env
);
4761 return push(env
, ty
, StackThisId
);
4764 push(env
, ty
, StackThisId
);
4768 * Amongst other things, we use this to mark units non-persistent.
4770 void in(ISS
& env
, const bc::OODeclExists
& op
) {
4771 auto flag
= popC(env
);
4772 auto name
= popC(env
);
4774 if (!name
.strictSubtypeOf(TStr
)) return TBool
;
4775 auto const v
= tv(name
);
4776 if (!v
) return TBool
;
4777 auto rcls
= env
.index
.resolve_class(env
.ctx
, v
->m_data
.pstr
);
4778 if (!rcls
|| !rcls
->cls()) return TBool
;
4779 auto const mayExist
= [&] () -> bool {
4780 switch (op
.subop1
) {
4781 case OODeclExistsOp::Class
:
4782 return !(rcls
->cls()->attrs
& (AttrInterface
| AttrTrait
));
4783 case OODeclExistsOp::Interface
:
4784 return rcls
->cls()->attrs
& AttrInterface
;
4785 case OODeclExistsOp::Trait
:
4786 return rcls
->cls()->attrs
& AttrTrait
;
4790 auto unit
= rcls
->cls()->unit
;
4791 auto canConstProp
= [&] {
4792 // Its generally not safe to constprop this, because of
4793 // autoload. We're safe if its part of systemlib, or a
4794 // superclass of the current context.
4795 if (is_systemlib_part(*unit
)) return true;
4796 if (!env
.ctx
.cls
) return false;
4797 auto thisClass
= env
.index
.resolve_class(env
.ctx
.cls
);
4798 return thisClass
.mustBeSubtypeOf(*rcls
);
4800 if (canConstProp()) {
4802 return mayExist
? TTrue
: TFalse
;
4804 // At this point, if it mayExist, we still don't know that it
4805 // *does* exist, but if not we know that it either doesn't
4806 // exist, or it doesn't have the right type.
4807 return mayExist
? TBool
: TFalse
;
4812 bool couldBeMocked(const Type
& t
) {
4813 if (is_specialized_cls(t
)) {
4814 return dcls_of(t
).cls
.couldBeMocked();
4815 } else if (is_specialized_obj(t
)) {
4816 return dobj_of(t
).cls
.couldBeMocked();
4818 // In practice this should not occur since this is used mostly on the result
4819 // of looked up type constraints.
4824 using TCVec
= std::vector
<const TypeConstraint
*>;
4826 void in(ISS
& env
, const bc::VerifyParamType
& op
) {
4827 IgnoreUsedParams _
{env
};
4829 if (env
.ctx
.func
->isMemoizeImpl
) {
4830 // a MemoizeImpl's params have already been checked by the wrapper
4834 auto const& pinfo
= env
.ctx
.func
->params
[op
.loc1
];
4835 // Generally we won't know anything about the params, but
4836 // analyze_func_inline does - and this can help with effect-free analysis
4837 TCVec tcs
= {&pinfo
.typeConstraint
};
4838 for (auto const& t
: pinfo
.upperBounds
) tcs
.push_back(&t
);
4839 if (std::all_of(std::begin(tcs
), std::end(tcs
),
4840 [&](const TypeConstraint
* tc
) {
4841 return env
.index
.satisfies_constraint(env
.ctx
,
4842 locAsCell(env
, op
.loc1
),
4845 if (!locAsCell(env
, op
.loc1
).couldBe(BCls
)) {
4851 * We assume that if this opcode doesn't throw, the parameter was of the
4855 for (auto const& constraint
: tcs
) {
4856 if (constraint
->hasConstraint() && !constraint
->isTypeVar() &&
4857 !constraint
->isTypeConstant()) {
4858 auto t
= env
.index
.lookup_constraint(env
.ctx
, *constraint
);
4859 if (constraint
->isThis() && couldBeMocked(t
)) {
4860 t
= unctx(std::move(t
));
4862 FTRACE(2, " {} ({})\n", constraint
->fullName(), show(t
));
4863 tcT
= intersection_of(std::move(tcT
), std::move(t
));
4864 if (tcT
.subtypeOf(BBottom
)) unreachable(env
);
4867 if (tcT
!= TTop
) setLoc(env
, op
.loc1
, std::move(tcT
));
4870 void in(ISS
& env
, const bc::VerifyParamTypeTS
& op
) {
4871 auto const a
= topC(env
);
4872 auto const requiredTSType
= RuntimeOption::EvalHackArrDVArrs
? BDict
: BDArr
;
4873 if (!a
.couldBe(requiredTSType
)) {
4878 auto const constraint
= env
.ctx
.func
->params
[op
.loc1
].typeConstraint
;
4879 // TODO(T31677864): We are being extremely pessimistic here, relax it
4880 if (!env
.ctx
.func
->isReified
&&
4881 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
4882 !env
.index
.could_have_reified_type(env
.ctx
, constraint
)) {
4883 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
4886 if (auto const inputTS
= tv(a
)) {
4887 if (!isValidTSType(*inputTS
, false)) {
4892 auto const resolvedTS
=
4893 resolveTSStatically(env
, inputTS
->m_data
.parr
, env
.ctx
.cls
);
4894 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
4895 reduce(env
, bc::PopC
{});
4896 RuntimeOption::EvalHackArrDVArrs
? reduce(env
, bc::Dict
{ resolvedTS
})
4897 : reduce(env
, bc::Array
{ resolvedTS
});
4898 reduce(env
, bc::VerifyParamTypeTS
{ op
.loc1
});
4905 void verifyRetImpl(ISS
& env
, const TCVec
& tcs
,
4906 bool reduce_this
, bool ts_flavor
) {
4907 // If it is the ts flavor, then second thing on the stack, otherwise first
4908 auto stackT
= topC(env
, (int)ts_flavor
);
4909 auto const stackEquiv
= topStkEquiv(env
, (int)ts_flavor
);
4911 // If there is no return type constraint, or if the return type
4912 // constraint is a typevar, or if the top of stack is the same or a
4913 // subtype of the type constraint, then this is a no-op, unless
4914 // reified types could be involved.
4915 if (std::all_of(std::begin(tcs
), std::end(tcs
),
4916 [&](const TypeConstraint
* tc
) {
4917 return env
.index
.satisfies_constraint(env
.ctx
, stackT
, *tc
);
4920 // we wouldn't get here if reified types were definitely not
4921 // involved, so just bail.
4924 push(env
, std::move(stackT
), stackEquiv
);
4930 std::vector
<Type
> constraintTypes
;
4931 auto dont_reduce
= false;
4933 for (auto const& constraint
: tcs
) {
4934 // When the constraint is not soft.
4935 // We can safely assume that either VerifyRetTypeC will
4936 // throw or it will produce a value whose type is compatible with the
4937 // return type constraint.
4938 auto tcT
= remove_uninit(env
.index
.lookup_constraint(env
.ctx
, *constraint
));
4939 constraintTypes
.push_back(tcT
);
4941 // In some circumstances, verifyRetType can modify the type. If it
4942 // does that we can't reduce even when we know it succeeds.
4943 // VerifyRetType will convert a TCls to a TStr implicitly
4944 // (and possibly warn)
4945 if (tcT
.couldBe(BStr
) && stackT
.couldBe(BCls
)) {
4950 // VerifyRetType will convert TClsMeth to TVec/TVArr/TArr implicitly
4951 if (stackT
.couldBe(BClsMeth
)) {
4952 if (tcT
.couldBe(BVec
)) {
4956 if (tcT
.couldBe(BVArr
)) {
4960 if (tcT
.couldBe(TArr
)) {
4966 // If the constraint is soft, then there are no optimizations we can safely
4967 // do here, so just leave the top of stack as is.
4968 if (constraint
->isSoft() ||
4969 (RuntimeOption::EvalEnforceGenericsUB
< 2 &&
4970 constraint
->isUpperBound()))
4972 if (ts_flavor
) popC(env
);
4974 push(env
, std::move(stackT
), stackEquiv
);
4979 // In cases where we have a `this` hint where stackT is an TOptObj known to
4980 // be this, we can replace the check with a non null check. These cases are
4981 // likely from a BareThis that could return Null. Since the runtime will
4982 // split these translations, it will rarely in practice return null.
4986 std::all_of(std::begin(tcs
), std::end(tcs
),
4987 [&](const TypeConstraint
* constraint
) {
4988 return constraint
->isThis() &&
4989 !constraint
->isNullable() &&
4990 env
.index
.satisfies_constraint(
4991 env
.ctx
, unopt(stackT
), *constraint
);
4996 return reduce(env
, bc::PopC
{}, bc::VerifyRetNonNullC
{});
4998 return reduce(env
, bc::VerifyRetNonNullC
{});
5001 auto retT
= std::move(stackT
);
5002 for (auto& tcT
: constraintTypes
) {
5003 retT
= intersection_of(std::move(tcT
), std::move(retT
));
5004 if (retT
.subtypeOf(BBottom
)) {
5006 if (ts_flavor
) popC(env
); // the type structure
5011 if (ts_flavor
) popC(env
); // the type structure
5013 push(env
, std::move(retT
));
5016 void in(ISS
& env
, const bc::VerifyOutType
& op
) {
5018 auto const& pinfo
= env
.ctx
.func
->params
[op
.arg1
];
5019 tcs
.push_back(&pinfo
.typeConstraint
);
5020 for (auto const& t
: pinfo
.upperBounds
) tcs
.push_back(&t
);
5021 verifyRetImpl(env
, tcs
, false, false);
5024 void in(ISS
& env
, const bc::VerifyRetTypeC
& /*op*/) {
5026 tcs
.push_back(&env
.ctx
.func
->retTypeConstraint
);
5027 for (auto const& t
: env
.ctx
.func
->returnUBs
) tcs
.push_back(&t
);
5028 verifyRetImpl(env
, tcs
, true, false);
5031 void in(ISS
& env
, const bc::VerifyRetTypeTS
& /*op*/) {
5032 auto const a
= topC(env
);
5033 auto const requiredTSType
= RuntimeOption::EvalHackArrDVArrs
? BDict
: BDArr
;
5034 if (!a
.couldBe(requiredTSType
)) {
5039 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5040 // TODO(T31677864): We are being extremely pessimistic here, relax it
5041 if (!env
.ctx
.func
->isReified
&&
5042 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
5043 !env
.index
.could_have_reified_type(env
.ctx
, constraint
)) {
5044 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5046 if (auto const inputTS
= tv(a
)) {
5047 if (!isValidTSType(*inputTS
, false)) {
5052 auto const resolvedTS
=
5053 resolveTSStatically(env
, inputTS
->m_data
.parr
, env
.ctx
.cls
);
5054 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
5055 reduce(env
, bc::PopC
{});
5056 RuntimeOption::EvalHackArrDVArrs
? reduce(env
, bc::Dict
{ resolvedTS
})
5057 : reduce(env
, bc::Array
{ resolvedTS
});
5058 reduce(env
, bc::VerifyRetTypeTS
{});
5062 TCVec tcs
{&constraint
};
5063 for (auto const& t
: env
.ctx
.func
->returnUBs
) tcs
.push_back(&t
);
5064 verifyRetImpl(env
, tcs
, true, true);
5067 void in(ISS
& env
, const bc::VerifyRetNonNullC
& /*op*/) {
5068 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5069 if (constraint
.isSoft()) {
5073 auto stackT
= topC(env
);
5075 if (!stackT
.couldBe(BInitNull
)) {
5080 if (stackT
.subtypeOf(BNull
)) return unreachable(env
);
5082 auto const equiv
= topStkEquiv(env
);
5084 if (is_opt(stackT
)) stackT
= unopt(std::move(stackT
));
5087 push(env
, stackT
, equiv
);
5090 void in(ISS
& env
, const bc::Self
& op
) {
5091 auto const self
= selfClsExact(env
);
5100 void in(ISS
& env
, const bc::Parent
& op
) {
5101 auto const parent
= parentClsExact(env
);
5110 void in(ISS
& env
, const bc::CreateCl
& op
) {
5111 auto const nargs
= op
.arg1
;
5112 auto const clsPair
= env
.index
.resolve_closure_class(env
.ctx
, op
.arg2
);
5115 * Every closure should have a unique allocation site, but we may see it
5116 * multiple times in a given round of analyzing this function. Each time we
5117 * may have more information about the used variables; the types should only
5118 * possibly grow. If it's already there we need to merge the used vars in
5119 * with what we saw last time.
5122 CompactVector
<Type
> usedVars(nargs
);
5123 for (auto i
= uint32_t{0}; i
< nargs
; ++i
) {
5124 usedVars
[nargs
- i
- 1] = unctx(popCU(env
));
5126 merge_closure_use_vars_into(
5127 env
.collect
.closureUseTypes
,
5133 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
5134 // assert the exact type of closure objects. The best we can do is assert
5135 // that it's a subclass of Closure.
5136 auto const closure
= env
.index
.builtin_class(s_Closure
.get());
5138 return push(env
, subObj(closure
));
5141 void in(ISS
& env
, const bc::CreateCont
& /*op*/) {
5142 // First resume is always next() which pushes null.
5143 push(env
, TInitNull
);
5146 void in(ISS
& env
, const bc::ContEnter
&) { popC(env
); push(env
, TInitCell
); }
5147 void in(ISS
& env
, const bc::ContRaise
&) { popC(env
); push(env
, TInitCell
); }
5149 void in(ISS
& env
, const bc::Yield
&) {
5151 push(env
, TInitCell
);
5154 void in(ISS
& env
, const bc::YieldK
&) {
5157 push(env
, TInitCell
);
5160 void in(ISS
& /*env*/, const bc::ContCheck
&) {}
5161 void in(ISS
& env
, const bc::ContValid
&) { push(env
, TBool
); }
5162 void in(ISS
& env
, const bc::ContKey
&) { push(env
, TInitCell
); }
5163 void in(ISS
& env
, const bc::ContCurrent
&) { push(env
, TInitCell
); }
5164 void in(ISS
& env
, const bc::ContGetReturn
&) { push(env
, TInitCell
); }
5166 void pushTypeFromWH(ISS
& env
, Type t
) {
5167 auto inner
= typeFromWH(t
);
5168 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5169 if (inner
.subtypeOf(BBottom
)) unreachable(env
);
5170 push(env
, std::move(inner
));
5173 void in(ISS
& env
, const bc::WHResult
&) {
5174 pushTypeFromWH(env
, popC(env
));
5177 void in(ISS
& env
, const bc::Await
&) {
5178 pushTypeFromWH(env
, popC(env
));
5181 void in(ISS
& env
, const bc::AwaitAll
& op
) {
5182 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5183 if (equiv
!= op
.locrange
.first
) {
5186 bc::AwaitAll
{LocalRange
{equiv
, op
.locrange
.count
}}
5190 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5191 mayReadLocal(env
, op
.locrange
.first
+ i
);
5194 push(env
, TInitNull
);
5199 void idxImpl(ISS
& env
, bool arraysOnly
) {
5200 auto const def
= popC(env
);
5201 auto const key
= popC(env
);
5202 auto const base
= popC(env
);
5204 if (key
.subtypeOf(BInitNull
)) {
5205 // A null key, regardless of whether we're ArrayIdx or Idx will always
5206 // silently return the default value, regardless of the base type.
5209 return push(env
, def
);
5212 // Push the returned type and annotate effects appropriately, taking into
5213 // account if the base might be null. Allowing for a possibly null base lets
5214 // us capture more cases.
5215 auto const finish
= [&] (const Type
& t
, bool canThrow
) {
5216 // A null base will raise if we're ArrayIdx. For Idx, it will silently
5217 // return the default value.
5218 auto const baseMaybeNull
= base
.couldBe(BInitNull
);
5219 if (!canThrow
&& (!arraysOnly
|| !baseMaybeNull
)) {
5223 if (!arraysOnly
&& baseMaybeNull
) return push(env
, union_of(t
, def
));
5224 if (t
.subtypeOf(BBottom
)) unreachable(env
);
5225 return push(env
, t
);
5229 // If ArrayIdx, we'll raise an error for anything other than array-like and
5230 // null. This op is only terminal if null isn't possible.
5231 if (!base
.couldBe(BArr
| BVec
| BDict
| BKeyset
| BClsMeth
)) {
5232 return finish(key
.couldBe(BInitNull
) ? def
: TBottom
, true);
5235 !base
.couldBe(BArr
| BVec
| BDict
| BKeyset
| BStr
| BObj
| BClsMeth
)) {
5236 // Otherwise, any strange bases for Idx will just return the default value
5238 return finish(def
, false);
5241 // Helper for Hack arrays. "validKey" is the set key types which can return a
5242 // value from Idx. "silentKey" is the set of key types which will silently
5243 // return null (anything else throws). The Hack array elem functions will
5244 // treat values of "silentKey" as throwing, so we must identify those cases
5245 // and deal with them.
5246 auto const hackArr
= [&] (std::pair
<Type
, ThrowMode
> elem
,
5247 const Type
& validKey
,
5248 const Type
& silentKey
) {
5249 switch (elem
.second
) {
5250 case ThrowMode::None
:
5251 case ThrowMode::MaybeMissingElement
:
5252 case ThrowMode::MissingElement
:
5253 assertx(key
.subtypeOf(validKey
));
5254 return finish(elem
.first
, false);
5255 case ThrowMode::MaybeBadKey
:
5256 assertx(key
.couldBe(validKey
));
5257 if (key
.couldBe(silentKey
)) elem
.first
|= def
;
5258 return finish(elem
.first
, !key
.subtypeOf(BOptArrKeyCompat
));
5259 case ThrowMode::BadOperation
:
5260 assertx(!key
.couldBe(validKey
));
5261 return finish(key
.couldBe(silentKey
) ? def
: TBottom
, true);
5265 if (base
.subtypeOrNull(BVec
)) {
5266 // Vecs will throw for any key other than Int, Str, or Null, and will
5267 // silently return the default value for the latter two.
5268 if (key
.subtypeOrNull(BStr
)) return finish(def
, false);
5269 return hackArr(vec_elem(base
, key
, def
), TInt
, TOptStr
);
5272 if (base
.subtypeOfAny(TOptDict
, TOptKeyset
)) {
5273 // Dicts and keysets will throw for any key other than Int, Str, or Null,
5274 // and will silently return the default value for Null.
5275 auto const elem
= base
.subtypeOrNull(BDict
)
5276 ? dict_elem(base
, key
, def
)
5277 : keyset_elem(base
, key
, def
);
5278 return hackArr(elem
, TArrKeyCompat
, TInitNull
);
5281 if (base
.subtypeOrNull(BArr
)) {
5282 // A possibly null key is more complicated for arrays. array_elem() will
5283 // transform a null key into an empty string (matching the semantics of
5284 // array access), but that's not what Idx does. So, attempt to remove
5285 // nullish from the key first. If we can't, it just means we'll get a more
5286 // conservative value.
5287 auto maybeNull
= false;
5288 auto const fixedKey
= [&]{
5289 if (key
.couldBe(TInitNull
)) {
5291 if (is_nullish(key
)) return unnullish(key
);
5296 auto elem
= array_elem(base
, fixedKey
, def
);
5297 // If the key was null, Idx will return the default value, so add to the
5299 if (maybeNull
) elem
.first
|= def
;
5301 switch (elem
.second
) {
5302 case ThrowMode::None
:
5303 case ThrowMode::MaybeMissingElement
:
5304 case ThrowMode::MissingElement
:
5305 return finish(elem
.first
, false);
5306 case ThrowMode::MaybeBadKey
:
5307 return finish(elem
.first
, true);
5308 case ThrowMode::BadOperation
:
5309 always_assert(false);
5313 if (!arraysOnly
&& base
.subtypeOrNull(BStr
)) {
5314 // Idx on a string always produces a string or the default value (without
5316 return finish(union_of(TStr
, def
), false);
5319 // Objects or other unions of possible bases
5320 push(env
, TInitCell
);
5325 void in(ISS
& env
, const bc::Idx
&) { idxImpl(env
, false); }
5326 void in(ISS
& env
, const bc::ArrayIdx
&) { idxImpl(env
, true); }
5328 void in(ISS
& env
, const bc::CheckProp
&) {
5329 if (env
.ctx
.cls
->attrs
& AttrNoOverride
) {
5330 return reduce(env
, bc::False
{});
5336 void in(ISS
& env
, const bc::InitProp
& op
) {
5337 auto const t
= topC(env
);
5338 switch (op
.subop2
) {
5339 case InitPropOp::Static
:
5340 mergeSelfProp(env
, op
.str1
, t
);
5341 env
.collect
.publicSPropMutations
.merge(
5342 env
.index
, env
.ctx
, *env
.ctx
.cls
, sval(op
.str1
), t
, true
5345 case InitPropOp::NonStatic
:
5346 mergeThisProp(env
, op
.str1
, t
);
5350 for (auto& prop
: env
.ctx
.func
->cls
->properties
) {
5351 if (prop
.name
!= op
.str1
) continue;
5353 ITRACE(1, "InitProp: {} = {}\n", op
.str1
, show(t
));
5355 if (env
.index
.satisfies_constraint(env
.ctx
, t
, prop
.typeConstraint
) &&
5356 std::all_of(prop
.ubs
.begin(), prop
.ubs
.end(),
5357 [&](TypeConstraint ub
) {
5358 applyFlagsToUB(ub
, prop
.typeConstraint
);
5359 return env
.index
.satisfies_constraint(env
.ctx
, t
, ub
);
5361 prop
.attrs
|= AttrInitialSatisfiesTC
;
5363 badPropInitialValue(env
);
5364 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrInitialSatisfiesTC
);
5367 auto const v
= tv(t
);
5368 if (v
|| !could_contain_objects(t
)) {
5369 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrDeepInit
);
5372 env
.index
.update_static_prop_init_val(env
.ctx
.func
->cls
, op
.str1
);
5373 return reduce(env
, bc::PopC
{});
5380 void in(ISS
& env
, const bc::Silence
& op
) {
5382 switch (op
.subop2
) {
5383 case SilenceOp::Start
:
5384 setLoc(env
, op
.loc1
, TInt
);
5386 case SilenceOp::End
:
5387 locRaw(env
, op
.loc1
);
5394 template <typename Op
, typename Rebind
>
5395 bool memoGetImpl(ISS
& env
, const Op
& op
, Rebind
&& rebind
) {
5396 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5397 always_assert(op
.locrange
.first
+ op
.locrange
.count
5398 <= env
.ctx
.func
->locals
.size());
5400 if (will_reduce(env
)) {
5401 // If we can use an equivalent, earlier range, then use that instead.
5402 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5403 if (equiv
!= op
.locrange
.first
) {
5404 reduce(env
, rebind(LocalRange
{ equiv
, op
.locrange
.count
}));
5409 auto retTy
= memoizeImplRetType(env
);
5411 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5412 // and $this isn't available.
5413 auto allArrKey
= true;
5414 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5415 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5418 (!env
.ctx
.func
->cls
||
5419 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5420 thisAvailable(env
))) {
5421 if (will_reduce(env
)) {
5422 if (retTy
.first
.subtypeOf(BBottom
)) {
5424 jmp_setdest(env
, op
.target1
);
5427 // deal with constprop manually; otherwise we will propagate the
5428 // taken edge and *then* replace the MemoGet with a constant.
5430 if (auto v
= tv(retTy
.first
)) {
5431 reduce(env
, gen_constant(*v
));
5439 if (retTy
.first
== TBottom
) {
5440 jmp_setdest(env
, op
.target1
);
5444 env
.propagate(op
.target1
, &env
.state
);
5445 push(env
, std::move(retTy
.first
));
5451 void in(ISS
& env
, const bc::MemoGet
& op
) {
5454 [&] (const LocalRange
& l
) { return bc::MemoGet
{ op
.target1
, l
}; }
5458 void in(ISS
& env
, const bc::MemoGetEager
& op
) {
5459 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5461 auto const reduced
= memoGetImpl(
5463 [&] (const LocalRange
& l
) {
5464 return bc::MemoGetEager
{ op
.target1
, op
.target2
, l
};
5467 if (reduced
) return;
5469 env
.propagate(op
.target2
, &env
.state
);
5470 auto const t
= popC(env
);
5473 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
5479 template <typename Op
>
5480 void memoSetImpl(ISS
& env
, const Op
& op
) {
5481 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5482 always_assert(op
.locrange
.first
+ op
.locrange
.count
5483 <= env
.ctx
.func
->locals
.size());
5485 // If we can use an equivalent, earlier range, then use that instead.
5486 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5487 if (equiv
!= op
.locrange
.first
) {
5490 Op
{ LocalRange
{ equiv
, op
.locrange
.count
} }
5494 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5495 // and $this isn't available.
5496 auto allArrKey
= true;
5497 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5498 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5501 (!env
.ctx
.func
->cls
||
5502 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5503 thisAvailable(env
))) {
5506 push(env
, popC(env
));
5511 void in(ISS
& env
, const bc::MemoSet
& op
) {
5512 memoSetImpl(env
, op
);
5515 void in(ISS
& env
, const bc::MemoSetEager
& op
) {
5516 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5517 memoSetImpl(env
, op
);
5524 //////////////////////////////////////////////////////////////////////
5526 void dispatch(ISS
& env
, const Bytecode
& op
) {
5527 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5528 switch (op
.op
) { OPCODES
}
5533 //////////////////////////////////////////////////////////////////////
5535 void interpStep(ISS
& env
, const Bytecode
& bc
) {
5536 ITRACE(2, " {} ({})\n",
5537 show(env
.ctx
.func
, bc
),
5538 env
.unchangedBcs
+ env
.replacedBcs
.size());
5541 // If there are throw exit edges, make a copy of the state (except
5542 // stacks) in case we need to propagate across throw exits (if
5544 if (!env
.stateBefore
&& env
.blk
.throwExit
!= NoBlockId
) {
5545 env
.stateBefore
.emplace(with_throwable_only(env
.index
, env
.state
));
5550 default_dispatch(env
, bc
);
5552 if (env
.flags
.reduced
) return;
5554 auto const_prop
= [&] {
5555 if (!options
.ConstantProp
|| !env
.flags
.canConstProp
) return false;
5557 auto const numPushed
= bc
.numPush();
5558 TinyVector
<TypedValue
> cells
;
5561 while (i
< numPushed
) {
5562 auto const v
= tv(topT(env
, i
));
5563 if (!v
) return false;
5564 cells
.push_back(*v
);
5568 if (env
.flags
.wasPEI
) {
5569 ITRACE(2, " nothrow (due to constprop)\n");
5570 env
.flags
.wasPEI
= false;
5572 if (!env
.flags
.effectFree
) {
5573 ITRACE(2, " effect_free (due to constprop)\n");
5574 env
.flags
.effectFree
= true;
5579 auto const numPop
= bc
.numPop();
5580 for (auto j
= 0; j
< numPop
; j
++) {
5581 auto const flavor
= bc
.popFlavor(j
);
5582 if (flavor
== Flavor::C
) {
5583 interpStep(env
, bc::PopC
{});
5584 } else if (flavor
== Flavor::U
) {
5585 interpStep(env
, bc::PopU
{});
5587 assertx(flavor
== Flavor::CU
);
5588 auto const& popped
= topT(env
);
5589 if (popped
.subtypeOf(BUninit
)) {
5590 interpStep(env
, bc::PopU
{});
5592 assertx(popped
.subtypeOf(BInitCell
));
5593 interpStep(env
, bc::PopC
{});
5599 push(env
, from_cell(cells
[i
]));
5600 record(env
, gen_constant(cells
[i
]));
5609 assertx(!env
.flags
.effectFree
|| !env
.flags
.wasPEI
);
5610 if (env
.flags
.wasPEI
) {
5611 ITRACE(2, " PEI.\n");
5612 if (env
.stateBefore
) {
5613 env
.propagate(env
.blk
.throwExit
, &*env
.stateBefore
);
5616 env
.stateBefore
.reset();
5621 void interpOne(ISS
& env
, const Bytecode
& bc
) {
5622 env
.srcLoc
= bc
.srcLoc
;
5623 interpStep(env
, bc
);
5626 BlockId
speculate(Interp
& interp
) {
5627 auto low_water
= interp
.state
.stack
.size();
5629 interp
.collect
.opts
= interp
.collect
.opts
| CollectionOpts::Speculating
;
5631 interp
.collect
.opts
= interp
.collect
.opts
- CollectionOpts::Speculating
;
5634 auto failed
= false;
5635 ISS env
{ interp
, [&] (BlockId
, const State
*) { failed
= true; } };
5637 FTRACE(4, " Speculate B{}\n", interp
.bid
);
5638 for (auto const& bc
: interp
.blk
->hhbcs
) {
5639 assertx(!interp
.state
.unreachable
);
5640 auto const numPop
= bc
.numPop() +
5641 (bc
.op
== Op::CGetL2
? 1 :
5642 bc
.op
== Op::Dup
? -1 : 0);
5643 if (interp
.state
.stack
.size() - numPop
< low_water
) {
5644 low_water
= interp
.state
.stack
.size() - numPop
;
5649 env
.collect
.mInstrState
.clear();
5650 FTRACE(3, " Bailing from speculate because propagate was called\n");
5654 auto const& flags
= env
.flags
;
5655 if (!flags
.effectFree
) {
5656 env
.collect
.mInstrState
.clear();
5657 FTRACE(3, " Bailing from speculate because not effect free\n");
5661 assertx(!flags
.returned
);
5663 if (flags
.jmpDest
!= NoBlockId
&& interp
.state
.stack
.size() == low_water
) {
5664 FTRACE(2, " Speculate found target block {}\n", flags
.jmpDest
);
5665 return flags
.jmpDest
;
5669 if (interp
.state
.stack
.size() != low_water
) {
5671 " Bailing from speculate because the speculated block "
5672 "left items on the stack\n");
5676 if (interp
.blk
->fallthrough
== NoBlockId
) {
5678 " Bailing from speculate because there was no fallthrough");
5682 FTRACE(2, " Speculate found fallthrough block {}\n",
5683 interp
.blk
->fallthrough
);
5685 return interp
.blk
->fallthrough
;
5688 BlockId
speculateHelper(ISS
& env
, BlockId orig
, bool updateTaken
) {
5689 assertx(orig
!= NoBlockId
);
5691 if (!will_reduce(env
)) return orig
;
5693 auto const last
= last_op(env
);
5694 bool endsInControlFlow
= last
&& instrIsNonCallControlFlow(last
->op
);
5698 if (options
.RemoveDeadBlocks
) {
5699 State temp
{env
.state
, State::Compact
{}};
5701 auto const& func
= env
.ctx
.func
;
5702 auto const targetBlk
= func
.blocks()[target
].get();
5703 if (!targetBlk
->multiPred
) break;
5704 auto const ok
= [&] {
5705 switch (targetBlk
->hhbcs
.back().op
) {
5719 env
.index
, env
.ctx
, env
.collect
, target
, targetBlk
, temp
5722 auto const old_size
= temp
.stack
.size();
5723 auto const new_target
= speculate(interp
);
5724 if (new_target
== NoBlockId
) break;
5726 const ssize_t delta
= old_size
- temp
.stack
.size();
5727 assertx(delta
>= 0);
5728 if (delta
&& endsInControlFlow
) break;
5731 target
= new_target
;
5732 temp
.stack
.compact();
5736 if (endsInControlFlow
&& updateTaken
) {
5738 auto needsUpdate
= target
!= orig
;
5743 if (bid
!= orig
) needsUpdate
= true;
5748 auto& bc
= mutate_last_op(env
);
5751 [&] (BlockId
& bid
) {
5752 bid
= bid
== orig
? target
: NoBlockId
;
5759 auto const& popped
= topT(env
);
5760 if (popped
.subtypeOf(BInitCell
)) {
5761 interpStep(env
, bc::PopC
{});
5763 assertx(popped
.subtypeOf(BUninit
));
5764 interpStep(env
, bc::PopU
{});
5773 //////////////////////////////////////////////////////////////////////
5775 RunFlags
run(Interp
& interp
, const State
& in
, PropagateFn propagate
) {
5777 FTRACE(2, "out {}{}\n",
5778 state_string(*interp
.ctx
.func
, interp
.state
, interp
.collect
),
5779 property_state_string(interp
.collect
.props
));
5782 auto env
= ISS
{ interp
, propagate
};
5783 auto ret
= RunFlags
{};
5784 auto finish
= [&] (BlockId fallthrough
) {
5785 ret
.updateInfo
.fallthrough
= fallthrough
;
5786 ret
.updateInfo
.unchangedBcs
= env
.unchangedBcs
;
5787 ret
.updateInfo
.replacedBcs
= std::move(env
.replacedBcs
);
5791 BytecodeVec retryBcs
;
5792 auto retryOffset
= interp
.blk
->hhbcs
.size();
5793 auto size
= retryOffset
;
5794 BlockId retryFallthrough
= interp
.blk
->fallthrough
;
5799 finish_tracked_elems(env
, 0);
5800 if (!env
.reprocess
) break;
5801 FTRACE(2, " Reprocess mutated block {}\n", interp
.bid
);
5802 assertx(env
.unchangedBcs
< retryOffset
|| env
.replacedBcs
.size());
5803 retryOffset
= env
.unchangedBcs
;
5804 retryBcs
= std::move(env
.replacedBcs
);
5805 env
.unchangedBcs
= 0;
5806 env
.state
.copy_from(in
);
5807 env
.reprocess
= false;
5808 env
.replacedBcs
.clear();
5809 size
= retryOffset
+ retryBcs
.size();
5814 auto const& bc
= idx
< retryOffset
?
5815 interp
.blk
->hhbcs
[idx
] : retryBcs
[idx
- retryOffset
];
5819 auto const& flags
= env
.flags
;
5820 if (interp
.collect
.effectFree
&& !flags
.effectFree
) {
5821 interp
.collect
.effectFree
= false;
5822 if (any(interp
.collect
.opts
& CollectionOpts::EffectFreeOnly
)) {
5823 env
.collect
.mInstrState
.clear();
5824 FTRACE(2, " Bailing because not effect free\n");
5825 return finish(NoBlockId
);
5829 if (flags
.returned
) {
5830 always_assert(idx
== size
);
5831 if (env
.reprocess
) continue;
5833 always_assert(interp
.blk
->fallthrough
== NoBlockId
);
5834 assertx(!ret
.returned
);
5835 FTRACE(2, " returned {}\n", show(*flags
.returned
));
5836 ret
.retParam
= flags
.retParam
;
5837 ret
.returned
= flags
.returned
;
5838 return finish(NoBlockId
);
5841 if (flags
.jmpDest
!= NoBlockId
) {
5842 always_assert(idx
== size
);
5843 auto const hasFallthrough
= [&] {
5844 if (flags
.jmpDest
!= interp
.blk
->fallthrough
) {
5845 FTRACE(2, " <took branch; no fallthrough>\n");
5846 auto const last
= last_op(env
);
5847 return !last
|| !instrIsNonCallControlFlow(last
->op
);
5849 FTRACE(2, " <branch never taken>\n");
5853 if (hasFallthrough
) retryFallthrough
= flags
.jmpDest
;
5854 if (env
.reprocess
) continue;
5855 finish_tracked_elems(env
, 0);
5856 auto const newDest
= speculateHelper(env
, flags
.jmpDest
, true);
5857 propagate(newDest
, &interp
.state
);
5858 return finish(hasFallthrough
? newDest
: NoBlockId
);
5861 if (interp
.state
.unreachable
) {
5862 if (env
.reprocess
) {
5866 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
5867 finish_tracked_elems(env
, 0);
5868 return finish(NoBlockId
);
5872 FTRACE(2, " <end block>\n");
5873 if (retryFallthrough
!= NoBlockId
) {
5874 retryFallthrough
= speculateHelper(env
, retryFallthrough
, false);
5875 propagate(retryFallthrough
, &interp
.state
);
5877 return finish(retryFallthrough
);
5880 StepFlags
step(Interp
& interp
, const Bytecode
& op
) {
5881 auto noop
= [] (BlockId
, const State
*) {};
5882 ISS env
{ interp
, noop
};
5884 default_dispatch(env
, op
);
5885 if (env
.state
.unreachable
) {
5886 env
.collect
.mInstrState
.clear();
5888 assertx(env
.trackedElems
.empty());
5892 void default_dispatch(ISS
& env
, const Bytecode
& op
) {
5893 if (!env
.trackedElems
.empty()) {
5894 auto const pops
= [&] () -> uint32_t {
5897 case Op::AddNewElemC
:
5898 return numPop(op
) - 1;
5907 finish_tracked_elems(env
, env
.state
.stack
.size() - pops
);
5910 if (instrFlags(op
.op
) & TF
&& env
.flags
.jmpDest
== NoBlockId
) {
5912 } else if (env
.state
.unreachable
) {
5913 env
.collect
.mInstrState
.clear();
5917 folly::Optional
<Type
> thisType(const Index
& index
, Context ctx
) {
5918 return thisTypeFromContext(index
, ctx
);
5921 //////////////////////////////////////////////////////////////////////