2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
23 #include <folly/gen/Base.h>
24 #include <folly/gen/String.h>
26 #include "hphp/util/hash-set.h"
27 #include "hphp/util/trace.h"
28 #include "hphp/runtime/base/array-init.h"
29 #include "hphp/runtime/base/array-iterator.h"
30 #include "hphp/runtime/base/collections.h"
31 #include "hphp/runtime/base/static-string-table.h"
32 #include "hphp/runtime/base/tv-arith.h"
33 #include "hphp/runtime/base/tv-comparisons.h"
34 #include "hphp/runtime/base/tv-conversions.h"
35 #include "hphp/runtime/base/type-structure.h"
36 #include "hphp/runtime/base/type-structure-helpers.h"
37 #include "hphp/runtime/base/type-structure-helpers-defs.h"
38 #include "hphp/runtime/vm/runtime.h"
39 #include "hphp/runtime/vm/unit-util.h"
41 #include "hphp/runtime/ext/hh/ext_hh.h"
43 #include "hphp/hhbbc/analyze.h"
44 #include "hphp/hhbbc/bc.h"
45 #include "hphp/hhbbc/cfg.h"
46 #include "hphp/hhbbc/class-util.h"
47 #include "hphp/hhbbc/eval-cell.h"
48 #include "hphp/hhbbc/index.h"
49 #include "hphp/hhbbc/interp-state.h"
50 #include "hphp/hhbbc/optimize.h"
51 #include "hphp/hhbbc/representation.h"
52 #include "hphp/hhbbc/type-builtins.h"
53 #include "hphp/hhbbc/type-ops.h"
54 #include "hphp/hhbbc/type-structure.h"
55 #include "hphp/hhbbc/type-system.h"
56 #include "hphp/hhbbc/unit-util.h"
57 #include "hphp/hhbbc/wide-func.h"
59 #include "hphp/hhbbc/stats.h"
61 #include "hphp/hhbbc/interp-internal.h"
63 namespace HPHP
{ namespace HHBBC
{
65 //////////////////////////////////////////////////////////////////////
69 const StaticString
s_MethCallerHelper("__SystemLib\\MethCallerHelper");
70 const StaticString
s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
71 const StaticString
s_IMemoizeParam("HH\\IMemoizeParam");
72 const StaticString
s_getInstanceKey("getInstanceKey");
73 const StaticString
s_Closure("Closure");
74 const StaticString
s_this("HH\\this");
76 bool poppable(Op op
) {
88 case Op::NewDictArray
:
97 void interpStep(ISS
& env
, const Bytecode
& bc
);
99 void record(ISS
& env
, const Bytecode
& bc
) {
100 if (bc
.srcLoc
!= env
.srcLoc
) {
102 tmp
.srcLoc
= env
.srcLoc
;
103 return record(env
, tmp
);
106 if (!env
.replacedBcs
.size() &&
107 env
.unchangedBcs
< env
.blk
.hhbcs
.size() &&
108 bc
== env
.blk
.hhbcs
[env
.unchangedBcs
]) {
113 ITRACE(2, " => {}\n", show(env
.ctx
.func
, bc
));
114 env
.replacedBcs
.push_back(bc
);
117 // The number of pops as seen by interp.
118 uint32_t numPop(const Bytecode
& bc
) {
119 if (bc
.op
== Op::CGetL2
) return 1;
123 // The number of pushes as seen by interp.
124 uint32_t numPush(const Bytecode
& bc
) {
125 if (bc
.op
== Op::CGetL2
) return 2;
129 void reprocess(ISS
& env
) {
130 env
.reprocess
= true;
133 ArrayData
** add_elem_array(ISS
& env
) {
134 auto const idx
= env
.trackedElems
.back().idx
;
135 if (idx
< env
.unchangedBcs
) {
136 auto const DEBUG_ONLY
& bc
= env
.blk
.hhbcs
[idx
];
137 assertx(bc
.op
== Op::Concat
);
140 assertx(idx
>= env
.unchangedBcs
);
141 auto& bc
= env
.replacedBcs
[idx
- env
.unchangedBcs
];
142 auto arr
= [&] () -> const ArrayData
** {
144 case Op::Vec
: return &bc
.Vec
.arr1
;
145 case Op::Dict
: return &bc
.Dict
.arr1
;
146 case Op::Keyset
: return &bc
.Keyset
.arr1
;
147 case Op::Concat
: return nullptr;
148 default: not_reached();
151 return const_cast<ArrayData
**>(arr
);
154 bool start_add_elem(ISS
& env
, Type
& ty
, Op op
) {
155 auto value
= tvNonStatic(ty
);
156 if (!value
|| !isArrayLikeType(value
->m_type
)) return false;
158 if (op
== Op::AddElemC
) {
159 reduce(env
, bc::PopC
{}, bc::PopC
{}, bc::PopC
{});
161 reduce(env
, bc::PopC
{}, bc::PopC
{});
163 env
.trackedElems
.emplace_back(
164 env
.state
.stack
.size(),
165 env
.unchangedBcs
+ env
.replacedBcs
.size()
168 auto const arr
= value
->m_data
.parr
;
169 env
.replacedBcs
.push_back(
171 if (arr
->isVecType()) return bc::Vec
{ arr
};
172 if (arr
->isDictType()) return bc::Dict
{ arr
};
173 if (arr
->isKeysetType()) return bc::Keyset
{ arr
};
174 always_assert(false);
177 env
.replacedBcs
.back().srcLoc
= env
.srcLoc
;
178 ITRACE(2, "(addelem* -> {}\n",
179 show(env
.ctx
.func
, env
.replacedBcs
.back()));
180 push(env
, std::move(ty
));
186 * Alter the saved add_elem array in a way that preserves its provenance tag
187 * or adds a new one if applicable (i.e. the array is a vec or dict)
189 * The `mutate` parameter should be callable with an ArrayData** pointing to the
190 * add_elem array cached in the interp state and should write to it directly.
192 template <typename Fn
>
193 bool mutate_add_elem_array(ISS
& env
, Fn
&& mutate
) {
194 auto const arr
= add_elem_array(env
);
195 if (!arr
) return false;
200 void finish_tracked_elem(ISS
& env
) {
201 auto const arr
= add_elem_array(env
);
202 env
.trackedElems
.pop_back();
203 if (arr
) ArrayData::GetScalarArray(arr
);
206 void finish_tracked_elems(ISS
& env
, size_t depth
) {
207 while (!env
.trackedElems
.empty() && env
.trackedElems
.back().depth
>= depth
) {
208 finish_tracked_elem(env
);
212 uint32_t id_from_slot(ISS
& env
, int slot
) {
213 auto const id
= (env
.state
.stack
.end() - (slot
+ 1))->id
;
214 assertx(id
== StackElem::NoId
||
215 id
< env
.unchangedBcs
+ env
.replacedBcs
.size());
219 const Bytecode
* op_from_id(ISS
& env
, uint32_t id
) {
220 if (id
== StackElem::NoId
) return nullptr;
221 if (id
< env
.unchangedBcs
) return &env
.blk
.hhbcs
[id
];
222 auto const off
= id
- env
.unchangedBcs
;
223 assertx(off
< env
.replacedBcs
.size());
224 return &env
.replacedBcs
[off
];
227 void ensure_mutable(ISS
& env
, uint32_t id
) {
228 if (id
< env
.unchangedBcs
) {
229 auto const delta
= env
.unchangedBcs
- id
;
230 env
.replacedBcs
.resize(env
.replacedBcs
.size() + delta
);
231 for (auto i
= env
.replacedBcs
.size(); i
-- > delta
; ) {
232 env
.replacedBcs
[i
] = std::move(env
.replacedBcs
[i
- delta
]);
234 for (auto i
= 0; i
< delta
; i
++) {
235 env
.replacedBcs
[i
] = env
.blk
.hhbcs
[id
+ i
];
237 env
.unchangedBcs
= id
;
242 * Turn the instruction that wrote the slot'th element from the top of
243 * the stack into a Nop, adjusting the stack appropriately. If its the
244 * previous instruction, just rewind.
246 int kill_by_slot(ISS
& env
, int slot
) {
248 auto const id
= id_from_slot(env
, slot
);
249 assertx(id
!= StackElem::NoId
);
250 auto const sz
= env
.state
.stack
.size();
251 // if its the last bytecode we processed, we can rewind and avoid
252 // the reprocess overhead.
253 if (id
== env
.unchangedBcs
+ env
.replacedBcs
.size() - 1) {
255 return env
.state
.stack
.size() - sz
;
257 ensure_mutable(env
, id
);
258 auto& bc
= env
.replacedBcs
[id
- env
.unchangedBcs
];
259 auto const pop
= numPop(bc
);
260 auto const push
= numPush(bc
);
261 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
262 slot
, id
, show(env
.ctx
.func
, bc
));
263 bc
= bc_with_loc(bc
.srcLoc
, bc::Nop
{});
264 env
.state
.stack
.kill(pop
, push
, id
);
266 return env
.state
.stack
.size() - sz
;
270 * Check whether an instruction can be inserted immediately after the
271 * slot'th stack entry was written. This is only possible if slot was
272 * the last thing written by the instruction that wrote it (ie some
273 * bytecodes push more than one value - there's no way to insert a
274 * bytecode that will write *between* those values on the stack).
276 bool can_insert_after_slot(ISS
& env
, int slot
) {
277 auto const it
= env
.state
.stack
.end() - (slot
+ 1);
278 if (it
->id
== StackElem::NoId
) return false;
279 if (auto const next
= it
.next_elem(1)) {
280 return next
->id
!= it
->id
;
286 * Insert a sequence of bytecodes after the instruction that wrote the
287 * slot'th element from the top of the stack.
289 * The entire sequence pops numPop, and pushes numPush stack
290 * elements. Only the last bytecode can push anything onto the stack,
291 * and the types it pushes are pointed to by types (if you have more
292 * than one bytecode that pushes, call this more than once).
294 void insert_after_slot(ISS
& env
, int slot
,
295 int numPop
, int numPush
, const Type
* types
,
296 const BytecodeVec
& bcs
) {
297 assertx(can_insert_after_slot(env
, slot
));
299 auto const id
= id_from_slot(env
, slot
);
300 assertx(id
!= StackElem::NoId
);
301 ensure_mutable(env
, id
+ 1);
302 env
.state
.stack
.insert_after(numPop
, numPush
, types
, bcs
.size(), id
);
303 env
.replacedBcs
.insert(env
.replacedBcs
.begin() + (id
+ 1 - env
.unchangedBcs
),
304 bcs
.begin(), bcs
.end());
305 using namespace folly::gen
;
306 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
309 map([&] (const Bytecode
& bc
) { return show(env
.ctx
.func
, bc
); }) |
310 unsplit
<std::string
>(", "));
313 Bytecode
& mutate_last_op(ISS
& env
) {
314 assertx(will_reduce(env
));
316 if (!env
.replacedBcs
.size()) {
317 assertx(env
.unchangedBcs
);
318 env
.replacedBcs
.push_back(env
.blk
.hhbcs
[--env
.unchangedBcs
]);
320 return env
.replacedBcs
.back();
324 * Can be used to replace one op with another when rewind/reduce isn't
325 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
326 * because the SetL changed both the Type and the equiv of its local).
328 void replace_last_op(ISS
& env
, Bytecode
&& bc
) {
329 auto& last
= mutate_last_op(env
);
330 auto const newPush
= numPush(bc
);
331 auto const oldPush
= numPush(last
);
332 auto const newPops
= numPop(bc
);
333 auto const oldPops
= numPop(last
);
335 assertx(newPush
<= oldPush
);
336 assertx(newPops
<= oldPops
);
338 if (newPush
!= oldPush
|| newPops
!= oldPops
) {
340 env
.state
.stack
.rewind(oldPops
- newPops
, oldPush
- newPush
);
342 ITRACE(2, "(replace: {}->{}\n",
343 show(env
.ctx
.func
, last
), show(env
.ctx
.func
, bc
));
344 last
= bc_with_loc(last
.srcLoc
, bc
);
349 //////////////////////////////////////////////////////////////////////
351 const Bytecode
* op_from_slot(ISS
& env
, int slot
, int prev
/* = 0 */) {
352 if (!will_reduce(env
)) return nullptr;
353 auto const id
= id_from_slot(env
, slot
);
354 if (id
== StackElem::NoId
) return nullptr;
355 if (id
< prev
) return nullptr;
356 return op_from_id(env
, id
- prev
);
359 const Bytecode
* last_op(ISS
& env
, int idx
/* = 0 */) {
360 if (!will_reduce(env
)) return nullptr;
362 if (env
.replacedBcs
.size() > idx
) {
363 return &env
.replacedBcs
[env
.replacedBcs
.size() - idx
- 1];
366 idx
-= env
.replacedBcs
.size();
367 if (env
.unchangedBcs
> idx
) {
368 return &env
.blk
.hhbcs
[env
.unchangedBcs
- idx
- 1];
374 * Assuming bc was just interped, rewind to the state immediately
375 * before it was interped.
377 * This is rarely what you want. Its used for constprop, where the
378 * bytecode has been interped, but not yet committed to the bytecode
379 * stream. We want to undo its effects, the spit out pops for its
380 * inputs, and commit a constant-generating bytecode.
382 void rewind(ISS
& env
, const Bytecode
& bc
) {
384 ITRACE(2, "(rewind: {}\n", show(env
.ctx
.func
, bc
));
385 env
.state
.stack
.rewind(numPop(bc
), numPush(bc
));
389 * Used for peephole opts. Will undo the *stack* effects of the last n
390 * committed byte codes, and remove them from the bytecode stream, in
391 * preparation for writing out an optimized replacement sequence.
393 * WARNING: Does not undo other changes to state, such as local types,
394 * local equivalency, and thisType. Take care when rewinding such
397 void rewind(ISS
& env
, int n
) {
400 while (env
.replacedBcs
.size()) {
401 rewind(env
, env
.replacedBcs
.back());
402 env
.replacedBcs
.pop_back();
406 rewind(env
, env
.blk
.hhbcs
[--env
.unchangedBcs
]);
410 void impl_vec(ISS
& env
, bool reduce
, BytecodeVec
&& bcs
) {
411 if (!will_reduce(env
)) reduce
= false;
414 using namespace folly::gen
;
415 ITRACE(2, "(reduce: {}\n",
417 map([&] (const Bytecode
& bc
) { return show(env
.ctx
.func
, bc
); }) |
418 unsplit
<std::string
>(", "));
420 auto ef
= !env
.flags
.reduced
|| env
.flags
.effectFree
;
422 for (auto const& bc
: bcs
) {
424 env
.flags
.jmpDest
== NoBlockId
&&
425 "you can't use impl with branching opcodes before last position"
428 if (!env
.flags
.effectFree
) ef
= false;
429 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
431 env
.flags
.effectFree
= ef
;
432 } else if (!env
.flags
.reduced
) {
435 env
.flags
.reduced
= true;
440 SCOPE_EXIT
{ env
.analyzeDepth
--; };
442 // We should be at the start of a bytecode.
443 assertx(env
.flags
.wasPEI
&&
444 !env
.flags
.canConstProp
&&
445 !env
.flags
.effectFree
);
447 env
.flags
.wasPEI
= false;
448 env
.flags
.canConstProp
= true;
449 env
.flags
.effectFree
= true;
451 for (auto const& bc
: bcs
) {
452 assertx(env
.flags
.jmpDest
== NoBlockId
&&
453 "you can't use impl with branching opcodes before last position");
455 auto const wasPEI
= env
.flags
.wasPEI
;
456 auto const canConstProp
= env
.flags
.canConstProp
;
457 auto const effectFree
= env
.flags
.effectFree
;
459 ITRACE(3, " (impl {}\n", show(env
.ctx
.func
, bc
));
460 env
.flags
.wasPEI
= true;
461 env
.flags
.canConstProp
= false;
462 env
.flags
.effectFree
= false;
463 default_dispatch(env
, bc
);
465 if (env
.flags
.canConstProp
) {
467 if (env
.flags
.effectFree
&& !env
.flags
.wasPEI
) return;
468 auto stk
= env
.state
.stack
.end();
469 for (auto i
= bc
.numPush(); i
--; ) {
471 if (!is_scalar(stk
->type
)) return;
473 env
.flags
.effectFree
= true;
474 env
.flags
.wasPEI
= false;
478 // If any of the opcodes in the impl list said they could throw,
479 // then the whole thing could throw.
480 env
.flags
.wasPEI
= env
.flags
.wasPEI
|| wasPEI
;
481 env
.flags
.canConstProp
= env
.flags
.canConstProp
&& canConstProp
;
482 env
.flags
.effectFree
= env
.flags
.effectFree
&& effectFree
;
483 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
487 LocalId
equivLocalRange(ISS
& env
, const LocalRange
& range
) {
488 auto bestRange
= range
.first
;
489 auto equivFirst
= findLocEquiv(env
, range
.first
);
490 if (equivFirst
== NoLocalId
) return bestRange
;
492 if (equivFirst
< bestRange
) {
493 auto equivRange
= [&] {
494 // local equivalency includes differing by Uninit, so we need
495 // to check the types.
496 if (peekLocRaw(env
, equivFirst
) != peekLocRaw(env
, range
.first
)) {
500 for (uint32_t i
= 1; i
< range
.count
; ++i
) {
501 if (!locsAreEquiv(env
, equivFirst
+ i
, range
.first
+ i
) ||
502 peekLocRaw(env
, equivFirst
+ i
) !=
503 peekLocRaw(env
, range
.first
+ i
)) {
512 bestRange
= equivFirst
;
515 equivFirst
= findLocEquiv(env
, equivFirst
);
516 assertx(equivFirst
!= NoLocalId
);
517 } while (equivFirst
!= range
.first
);
522 SString
getNameFromType(const Type
& t
) {
523 if (!t
.subtypeOf(BStr
) && !t
.subtypeOf(BLazyCls
)) return nullptr;
524 if (is_specialized_string(t
)) return sval_of(t
);
525 if (is_specialized_lazycls(t
)) return lazyclsval_of(t
);
529 //////////////////////////////////////////////////////////////////////
534 * Very simple check to see if the top level class is reified or not
535 * If not we can reduce a VerifyTypeTS to a regular VerifyType
537 bool shouldReduceToNonReifiedVerifyType(ISS
& env
, SArray ts
) {
538 if (get_ts_kind(ts
) != TypeStructure::Kind::T_unresolved
) return false;
539 auto const clsName
= get_ts_classname(ts
);
540 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsName
);
541 if (rcls
&& rcls
->resolved()) return !rcls
->cls()->hasReifiedGenerics
;
542 // Type aliases cannot have reified generics
543 return env
.index
.lookup_type_alias(clsName
) != nullptr;
548 //////////////////////////////////////////////////////////////////////
550 namespace interp_step
{
552 void in(ISS
& env
, const bc::Nop
&) { reduce(env
); }
554 void in(ISS
& env
, const bc::PopC
&) {
555 if (auto const last
= last_op(env
)) {
556 if (poppable(last
->op
)) {
560 if (last
->op
== Op::This
) {
561 // can't rewind This because it removed null from thisType (so
562 // CheckThis at this point is a no-op) - and note that it must
563 // have *been* nullable, or we'd have turned it into a
564 // `BareThis NeverNull`
565 replace_last_op(env
, bc::CheckThis
{});
568 if (last
->op
== Op::SetL
) {
569 // can't rewind a SetL because it changes local state
570 replace_last_op(env
, bc::PopL
{ last
->SetL
.loc1
});
573 if (last
->op
== Op::CGetL2
) {
574 auto loc
= last
->CGetL2
.nloc1
;
576 return reduce(env
, bc::PopC
{}, bc::CGetL
{ loc
});
584 void in(ISS
& env
, const bc::PopU
&) {
585 if (auto const last
= last_op(env
)) {
586 if (last
->op
== Op::NullUninit
) {
591 effect_free(env
); popU(env
);
594 void in(ISS
& env
, const bc::PopU2
&) {
596 auto equiv
= topStkEquiv(env
);
597 auto val
= popC(env
);
599 push(env
, std::move(val
), equiv
!= StackDupId
? equiv
: NoLocalId
);
602 void in(ISS
& env
, const bc::EntryNop
&) { effect_free(env
); }
604 void in(ISS
& env
, const bc::Dup
& /*op*/) {
606 auto equiv
= topStkEquiv(env
);
607 auto val
= popC(env
);
608 push(env
, val
, equiv
);
609 push(env
, std::move(val
), StackDupId
);
612 void in(ISS
& env
, const bc::AssertRATL
& op
) {
613 mayReadLocal(env
, op
.loc1
);
617 void in(ISS
& env
, const bc::AssertRATStk
&) {
621 void in(ISS
& env
, const bc::BreakTraceHint
&) { effect_free(env
); }
623 void in(ISS
& env
, const bc::CGetCUNop
&) {
625 auto const t
= popCU(env
);
626 push(env
, remove_uninit(t
));
629 void in(ISS
& env
, const bc::UGetCUNop
&) {
635 void in(ISS
& env
, const bc::Null
&) {
637 push(env
, TInitNull
);
640 void in(ISS
& env
, const bc::NullUninit
&) {
645 void in(ISS
& env
, const bc::True
&) {
650 void in(ISS
& env
, const bc::False
&) {
655 void in(ISS
& env
, const bc::Int
& op
) {
657 push(env
, ival(op
.arg1
));
660 void in(ISS
& env
, const bc::Double
& op
) {
662 push(env
, dval(op
.dbl1
));
665 void in(ISS
& env
, const bc::String
& op
) {
667 push(env
, sval(op
.str1
));
670 void in(ISS
& env
, const bc::Vec
& op
) {
671 assertx(op
.arr1
->isVecType());
673 push(env
, vec_val(op
.arr1
));
676 void in(ISS
& env
, const bc::Dict
& op
) {
677 assertx(op
.arr1
->isDictType());
679 push(env
, dict_val(op
.arr1
));
682 void in(ISS
& env
, const bc::Keyset
& op
) {
683 assertx(op
.arr1
->isKeysetType());
685 push(env
, keyset_val(op
.arr1
));
688 void in(ISS
& env
, const bc::NewDictArray
& op
) {
690 push(env
, op
.arg1
== 0 ? dict_empty() : some_dict_empty());
693 void in(ISS
& env
, const bc::NewStructDict
& op
) {
694 auto map
= MapElems
{};
695 for (auto it
= op
.keys
.end(); it
!= op
.keys
.begin(); ) {
697 make_tv
<KindOfPersistentString
>(*--it
),
698 MapElem::SStrKey(popC(env
))
701 push(env
, dict_map(std::move(map
)));
706 void in(ISS
& env
, const bc::NewVec
& op
) {
707 auto elems
= std::vector
<Type
>{};
708 elems
.reserve(op
.arg1
);
709 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
710 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
712 discard(env
, op
.arg1
);
715 push(env
, vec(std::move(elems
)));
718 void in(ISS
& env
, const bc::NewKeysetArray
& op
) {
719 assertx(op
.arg1
> 0);
720 auto map
= MapElems
{};
724 auto effectful
= false;
725 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
726 auto [key
, promotion
] = promote_classlike_to_key(popC(env
));
728 auto const keyValid
= key
.subtypeOf(BArrKey
);
729 if (!keyValid
) key
= intersection_of(std::move(key
), TArrKey
);
730 if (key
.is(BBottom
)) {
737 if (auto const v
= tv(key
)) {
738 map
.emplace_front(*v
, MapElem::KeyFromType(key
, key
));
744 ty
|= std::move(key
);
745 effectful
|= !keyValid
|| (promotion
== Promotion::YesMightThrow
);
754 push(env
, keyset_map(std::move(map
)));
756 push(env
, keyset_n(ty
));
764 void in(ISS
& env
, const bc::AddElemC
&) {
765 auto const v
= topC(env
, 0);
766 auto const [k
, promotion
] = promote_classlike_to_key(topC(env
, 1));
767 auto const promoteMayThrow
= (promotion
== Promotion::YesMightThrow
);
769 auto inTy
= (env
.state
.stack
.end() - 3).unspecialize();
770 // Unspecialize modifies the stack location
771 if (env
.undo
) env
.undo
->onStackWrite(env
.state
.stack
.size() - 3, inTy
);
773 auto outTy
= [&] (const Type
& key
) -> Optional
<Type
> {
774 if (!key
.subtypeOf(BArrKey
)) return std::nullopt
;
775 if (inTy
.subtypeOf(BDict
)) {
776 auto const r
= array_like_set(std::move(inTy
), key
, v
);
777 if (!r
.second
) return r
.first
;
782 if (outTy
&& !promoteMayThrow
&& will_reduce(env
)) {
783 if (!env
.trackedElems
.empty() &&
784 env
.trackedElems
.back().depth
+ 3 == env
.state
.stack
.size()) {
785 auto const handled
= [&] (const Type
& key
) {
786 if (!key
.subtypeOf(BArrKey
)) return false;
788 if (!ktv
) return false;
790 if (!vtv
) return false;
791 return mutate_add_elem_array(env
, [&](ArrayData
** arr
) {
792 *arr
= (*arr
)->setMove(*ktv
, *vtv
);
796 (env
.state
.stack
.end() - 3)->type
= std::move(*outTy
);
797 reduce(env
, bc::PopC
{}, bc::PopC
{});
798 ITRACE(2, "(addelem* -> {}\n",
800 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
804 if (start_add_elem(env
, *outTy
, Op::AddElemC
)) return;
809 finish_tracked_elems(env
, env
.state
.stack
.size());
811 if (!outTy
) return push(env
, TInitCell
);
813 if (outTy
->subtypeOf(BBottom
)) {
815 } else if (!promoteMayThrow
) {
819 push(env
, std::move(*outTy
));
822 void in(ISS
& env
, const bc::AddNewElemC
&) {
824 auto inTy
= (env
.state
.stack
.end() - 2).unspecialize();
825 // Unspecialize modifies the stack location
826 if (env
.undo
) env
.undo
->onStackWrite(env
.state
.stack
.size() - 2, inTy
);
828 auto outTy
= [&] () -> Optional
<Type
> {
829 if (inTy
.subtypeOf(BVec
| BKeyset
)) {
830 auto const r
= array_like_newelem(std::move(inTy
), v
);
831 if (!r
.second
) return r
.first
;
836 if (outTy
&& will_reduce(env
)) {
837 if (!env
.trackedElems
.empty() &&
838 env
.trackedElems
.back().depth
+ 2 == env
.state
.stack
.size()) {
839 auto const handled
= [&] {
841 if (!vtv
) return false;
842 return mutate_add_elem_array(env
, [&](ArrayData
** arr
) {
843 *arr
= (*arr
)->appendMove(*vtv
);
847 (env
.state
.stack
.end() - 2)->type
= std::move(*outTy
);
848 reduce(env
, bc::PopC
{});
849 ITRACE(2, "(addelem* -> {}\n",
851 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
855 if (start_add_elem(env
, *outTy
, Op::AddNewElemC
)) {
862 finish_tracked_elems(env
, env
.state
.stack
.size());
864 if (!outTy
) return push(env
, TInitCell
);
866 if (outTy
->is(BBottom
)) {
871 push(env
, std::move(*outTy
));
874 void in(ISS
& env
, const bc::NewCol
& op
) {
875 auto const type
= static_cast<CollectionType
>(op
.subop1
);
876 auto const name
= collections::typeToString(type
);
877 push(env
, objExact(env
.index
.builtin_class(name
)));
881 void in(ISS
& env
, const bc::NewPair
& /*op*/) {
882 popC(env
); popC(env
);
883 auto const name
= collections::typeToString(CollectionType::Pair
);
884 push(env
, objExact(env
.index
.builtin_class(name
)));
888 void in(ISS
& env
, const bc::ColFromArray
& op
) {
889 auto const src
= popC(env
);
890 auto const type
= static_cast<CollectionType
>(op
.subop1
);
891 assertx(type
!= CollectionType::Pair
);
892 if (type
== CollectionType::Vector
|| type
== CollectionType::ImmVector
) {
893 if (src
.subtypeOf(TVec
)) effect_free(env
);
895 assertx(type
== CollectionType::Map
||
896 type
== CollectionType::ImmMap
||
897 type
== CollectionType::Set
||
898 type
== CollectionType::ImmSet
);
899 if (src
.subtypeOf(TDict
)) effect_free(env
);
901 auto const name
= collections::typeToString(type
);
902 push(env
, objExact(env
.index
.builtin_class(name
)));
905 void in(ISS
& env
, const bc::CnsE
& op
) {
906 auto t
= env
.index
.lookup_constant(env
.ctx
, op
.str1
);
907 if (t
.strictSubtypeOf(TInitCell
)) {
908 // constprop will take care of nothrow *if* its a constant; and if
909 // its not, we might trigger autoload.
912 push(env
, std::move(t
));
917 void clsCnsImpl(ISS
& env
, const Type
& cls
, const Type
& name
) {
918 if (!cls
.couldBe(BCls
) || !name
.couldBe(BStr
)) {
924 auto lookup
= env
.index
.lookup_class_constant(env
.ctx
, cls
, name
);
925 if (lookup
.found
== TriBool::No
) {
931 if (cls
.subtypeOf(BCls
) &&
932 name
.subtypeOf(BStr
) &&
933 lookup
.found
== TriBool::Yes
&&
934 !lookup
.mightThrow
) {
939 push(env
, std::move(lookup
.ty
));
944 void in(ISS
& env
, const bc::ClsCns
& op
) {
945 auto const cls
= topC(env
);
947 if (cls
.subtypeOf(BCls
) && is_specialized_cls(cls
)) {
948 auto const dcls
= dcls_of(cls
);
949 if (dcls
.type
== DCls::Exact
) {
950 return reduce(env
, bc::PopC
{}, bc::ClsCnsD
{ op
.str1
, dcls
.cls
.name() });
955 clsCnsImpl(env
, cls
, sval(op
.str1
));
958 void in(ISS
& env
, const bc::ClsCnsL
& op
) {
959 auto const cls
= topC(env
);
960 auto const name
= locRaw(env
, op
.loc1
);
962 if (name
.subtypeOf(BStr
) && is_specialized_string(name
)) {
963 return reduce(env
, bc::ClsCns
{ sval_of(name
) });
967 clsCnsImpl(env
, cls
, name
);
970 void in(ISS
& env
, const bc::ClsCnsD
& op
) {
971 auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str2
);
972 if (!rcls
|| !rcls
->resolved()) {
973 push(env
, TInitCell
);
976 clsCnsImpl(env
, clsExact(*rcls
), sval(op
.str1
));
979 void in(ISS
& env
, const bc::File
&) { effect_free(env
); push(env
, TSStr
); }
980 void in(ISS
& env
, const bc::Dir
&) { effect_free(env
); push(env
, TSStr
); }
981 void in(ISS
& env
, const bc::Method
&) { effect_free(env
); push(env
, TSStr
); }
983 void in(ISS
& env
, const bc::FuncCred
&) { effect_free(env
); push(env
, TObj
); }
985 void in(ISS
& env
, const bc::ClassName
& op
) {
986 auto const ty
= topC(env
);
987 if (ty
.subtypeOf(BCls
) && is_specialized_cls(ty
)) {
988 auto const dcls
= dcls_of(ty
);
989 if (dcls
.type
== DCls::Exact
) {
992 bc::String
{ dcls
.cls
.name() });
1000 void in(ISS
& env
, const bc::LazyClassFromClass
&) {
1001 auto const ty
= topC(env
);
1002 if (ty
.subtypeOf(BCls
) && is_specialized_cls(ty
)) {
1003 auto const dcls
= dcls_of(ty
);
1004 if (dcls
.type
== DCls::Exact
) {
1007 bc::LazyClass
{ dcls
.cls
.name() });
1012 push(env
, TLazyCls
);
1015 void concatHelper(ISS
& env
, uint32_t n
) {
1016 auto changed
= false;
1017 auto side_effects
= false;
1018 if (will_reduce(env
)) {
1019 auto litstr
= [&] (SString next
, uint32_t i
) -> SString
{
1020 auto const t
= topC(env
, i
);
1021 auto const v
= tv(t
);
1022 if (!v
) return nullptr;
1023 if (!isStringType(v
->m_type
) && !isIntType(v
->m_type
)) return nullptr;
1024 auto const cell
= eval_cell_value(
1026 auto const s
= makeStaticString(
1028 StringData::Make(tvAsCVarRef(&*v
).toString().get(), next
) :
1029 tvAsCVarRef(&*v
).toString().get());
1030 return make_tv
<KindOfString
>(s
);
1033 if (!cell
) return nullptr;
1034 return cell
->m_data
.pstr
;
1037 auto fold
= [&] (uint32_t slot
, uint32_t num
, SString result
) {
1038 auto const cell
= make_tv
<KindOfPersistentString
>(result
);
1039 auto const ty
= from_cell(cell
);
1040 BytecodeVec bcs
{num
, bc::PopC
{}};
1041 if (num
> 1) bcs
.push_back(gen_constant(cell
));
1043 reduce(env
, std::move(bcs
));
1045 insert_after_slot(env
, slot
, num
, num
> 1 ? 1 : 0, &ty
, bcs
);
1052 for (auto i
= 0; i
< n
; i
++) {
1053 if (!topC(env
, i
).subtypeOf(BArrKey
)) {
1054 side_effects
= true;
1059 if (!side_effects
) {
1060 for (auto i
= 0; i
< n
; i
++) {
1061 auto const tracked
= !env
.trackedElems
.empty() &&
1062 env
.trackedElems
.back().depth
+ i
+ 1 == env
.state
.stack
.size();
1063 if (tracked
) finish_tracked_elems(env
, env
.trackedElems
.back().depth
);
1064 auto const prev
= op_from_slot(env
, i
);
1065 if (!prev
) continue;
1066 if ((prev
->op
== Op::Concat
&& tracked
) || prev
->op
== Op::ConcatN
) {
1067 auto const extra
= kill_by_slot(env
, i
);
1075 SString result
= nullptr;
1079 // In order to collapse literals, we need to be able to insert
1080 // pops, and a constant after the sequence that generated the
1081 // literals. We can always insert after the last instruction
1082 // though, and we only need to check the first slot of a
1084 auto const next
= !i
|| result
|| can_insert_after_slot(env
, i
) ?
1085 litstr(result
, i
) : nullptr;
1086 if (next
== staticEmptyString()) {
1088 // don't fold away empty strings if the concat could trigger exceptions
1089 if (i
== 0 && !topC(env
, 1).subtypeOf(BArrKey
)) break;
1090 if (n
== 2 && i
== 1 && !topC(env
, 0).subtypeOf(BArrKey
)) break;
1098 fold(i
- nlit
, nlit
, result
);
1108 if (nlit
> 1) fold(i
- nlit
, nlit
, result
);
1113 if (n
== 2 && !side_effects
&& will_reduce(env
)) {
1114 env
.trackedElems
.emplace_back(
1115 env
.state
.stack
.size(),
1116 env
.unchangedBcs
+ env
.replacedBcs
.size()
1124 if (!topC(env
).subtypeOf(BStr
)) {
1125 return reduce(env
, bc::CastString
{});
1131 // We can't reduce the emitted concats, or we'll end up with
1132 // infinite recursion.
1133 env
.flags
.wasPEI
= true;
1134 env
.flags
.effectFree
= false;
1135 env
.flags
.canConstProp
= false;
1137 auto concat
= [&] (uint32_t num
) {
1141 record(env
, bc::Concat
{});
1143 record(env
, bc::ConcatN
{ num
});
1151 if (n
> 1) concat(n
);
1154 void in(ISS
& env
, const bc::Concat
& /*op*/) {
1155 concatHelper(env
, 2);
1158 void in(ISS
& env
, const bc::ConcatN
& op
) {
1159 if (op
.arg1
== 2) return reduce(env
, bc::Concat
{});
1160 concatHelper(env
, op
.arg1
);
1163 template <class Op
, class Fun
>
1164 void arithImpl(ISS
& env
, const Op
& /*op*/, Fun fun
) {
1166 auto const t1
= popC(env
);
1167 auto const t2
= popC(env
);
1168 push(env
, fun(t2
, t1
));
1171 void in(ISS
& env
, const bc::Add
& op
) { arithImpl(env
, op
, typeAdd
); }
1172 void in(ISS
& env
, const bc::Sub
& op
) { arithImpl(env
, op
, typeSub
); }
1173 void in(ISS
& env
, const bc::Mul
& op
) { arithImpl(env
, op
, typeMul
); }
1174 void in(ISS
& env
, const bc::Div
& op
) { arithImpl(env
, op
, typeDiv
); }
1175 void in(ISS
& env
, const bc::Mod
& op
) { arithImpl(env
, op
, typeMod
); }
1176 void in(ISS
& env
, const bc::Pow
& op
) { arithImpl(env
, op
, typePow
); }
1177 void in(ISS
& env
, const bc::BitAnd
& op
) { arithImpl(env
, op
, typeBitAnd
); }
1178 void in(ISS
& env
, const bc::BitOr
& op
) { arithImpl(env
, op
, typeBitOr
); }
1179 void in(ISS
& env
, const bc::BitXor
& op
) { arithImpl(env
, op
, typeBitXor
); }
1180 void in(ISS
& env
, const bc::AddO
& op
) { arithImpl(env
, op
, typeAddO
); }
1181 void in(ISS
& env
, const bc::SubO
& op
) { arithImpl(env
, op
, typeSubO
); }
1182 void in(ISS
& env
, const bc::MulO
& op
) { arithImpl(env
, op
, typeMulO
); }
1183 void in(ISS
& env
, const bc::Shl
& op
) { arithImpl(env
, op
, typeShl
); }
1184 void in(ISS
& env
, const bc::Shr
& op
) { arithImpl(env
, op
, typeShr
); }
1186 void in(ISS
& env
, const bc::BitNot
& /*op*/) {
1187 auto const t
= popC(env
);
1188 auto const v
= tv(t
);
1189 if (!t
.couldBe(BInt
| BStr
| BSStr
| BLazyCls
| BCls
)) {
1190 return push(env
, TBottom
);
1195 auto cell
= eval_cell([&] {
1200 if (cell
) return push(env
, std::move(*cell
));
1202 push(env
, TInitCell
);
1207 template<bool NSame
>
1208 std::pair
<Type
,bool> resolveSame(ISS
& env
) {
1209 auto const l1
= topStkEquiv(env
, 0);
1210 auto const t1
= topC(env
, 0);
1211 auto const l2
= topStkEquiv(env
, 1);
1212 auto const t2
= topC(env
, 1);
1214 auto warningsEnabled
=
1215 (RuntimeOption::EvalEmitClsMethPointers
||
1216 RuntimeOption::EvalRaiseClassConversionWarning
);
1218 auto const result
= [&] {
1219 auto const v1
= tv(t1
);
1220 auto const v2
= tv(t2
);
1222 if (l1
== StackDupId
||
1223 (l1
== l2
&& l1
!= NoLocalId
) ||
1224 (l1
<= MaxLocalId
&& l2
<= MaxLocalId
&& locsAreEquiv(env
, l1
, l2
))) {
1225 if (!t1
.couldBe(BDbl
) || !t2
.couldBe(BDbl
) ||
1226 (v1
&& (v1
->m_type
!= KindOfDouble
|| !std::isnan(v1
->m_data
.dbl
))) ||
1227 (v2
&& (v2
->m_type
!= KindOfDouble
|| !std::isnan(v2
->m_data
.dbl
)))) {
1228 return NSame
? TFalse
: TTrue
;
1233 if (auto r
= eval_cell_value([&]{ return tvSame(*v2
, *v1
); })) {
1234 // we wouldn't get here if cellSame raised a warning
1235 warningsEnabled
= false;
1236 return r
!= NSame
? TTrue
: TFalse
;
1240 return NSame
? typeNSame(t1
, t2
) : typeSame(t1
, t2
);
1243 if (warningsEnabled
&& result
== (NSame
? TFalse
: TTrue
)) {
1244 warningsEnabled
= false;
1246 return { result
, warningsEnabled
&& compare_might_raise(t1
, t2
) };
1249 template<bool Negate
>
1250 void sameImpl(ISS
& env
) {
1251 if (auto const last
= last_op(env
)) {
1252 if (last
->op
== Op::Null
) {
1254 reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
});
1255 if (Negate
) reduce(env
, bc::Not
{});
1258 if (auto const prev
= last_op(env
, 1)) {
1259 if (prev
->op
== Op::Null
&&
1260 (last
->op
== Op::CGetL
|| last
->op
== Op::CGetL2
||
1261 last
->op
== Op::CGetQuietL
)) {
1262 auto const loc
= [&]() {
1263 if (last
->op
== Op::CGetL
) {
1264 return last
->CGetL
.nloc1
;
1265 } else if (last
->op
== Op::CGetL2
) {
1266 return last
->CGetL2
.nloc1
;
1267 } else if (last
->op
== Op::CGetQuietL
) {
1268 return NamedLocal
{kInvalidLocalName
, last
->CGetQuietL
.loc1
};
1270 always_assert(false);
1273 reduce(env
, bc::IsTypeL
{ loc
, IsTypeOp::Null
});
1274 if (Negate
) reduce(env
, bc::Not
{});
1280 auto pair
= resolveSame
<Negate
>(env
);
1288 push(env
, std::move(pair
.first
));
1291 template<class JmpOp
>
1292 bool sameJmpImpl(ISS
& env
, Op sameOp
, const JmpOp
& jmp
) {
1293 const StackElem
* elems
[2];
1294 env
.state
.stack
.peek(2, elems
, 1);
1296 auto const loc0
= elems
[1]->equivLoc
;
1297 auto const loc1
= elems
[0]->equivLoc
;
1298 // If loc0 == loc1, either they're both NoLocalId, so there's
1299 // nothing for us to deduce, or both stack elements are the same
1300 // value, so the only thing we could deduce is that they are or are
1301 // not NaN. But we don't track that, so just bail.
1302 if (loc0
== loc1
|| loc0
== StackDupId
) return false;
1304 auto const ty0
= elems
[1]->type
;
1305 auto const ty1
= elems
[0]->type
;
1306 auto const val0
= tv(ty0
);
1307 auto const val1
= tv(ty1
);
1309 assertx(!val0
|| !val1
);
1310 if ((loc0
== NoLocalId
&& !val0
&& ty1
.subtypeOf(ty0
)) ||
1311 (loc1
== NoLocalId
&& !val1
&& ty0
.subtypeOf(ty1
))) {
1315 // Same currently lies about the distinction between Func/Cls/Str
1316 if (ty0
.couldBe(BCls
) && ty1
.couldBe(BStr
)) return false;
1317 if (ty1
.couldBe(BCls
) && ty0
.couldBe(BStr
)) return false;
1318 if (ty0
.couldBe(BLazyCls
) && ty1
.couldBe(BStr
)) return false;
1319 if (ty1
.couldBe(BLazyCls
) && ty0
.couldBe(BStr
)) return false;
1321 auto isect
= intersection_of(ty0
, ty1
);
1323 // Unfortunately, floating point negative zero and positive zero are
1324 // different, but are identical using as far as Same is concerened. We should
1325 // avoid refining a value to 0.0 because it compares identically to 0.0
1326 if (isect
.couldBe(dval(0.0)) || isect
.couldBe(dval(-0.0))) {
1327 isect
= union_of(isect
, TDbl
);
1332 auto handle_same
= [&] {
1333 // Currently dce uses equivalency to prove that something isn't
1334 // the last reference - so we can only assert equivalency here if
1335 // we know that won't be affected. Its irrelevant for uncounted
1336 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1338 if (loc0
<= MaxLocalId
&&
1339 (ty0
.subtypeOf(BObj
| BRes
| BPrim
) ||
1340 ty1
.subtypeOf(BObj
| BRes
| BPrim
) ||
1341 (ty0
.subtypeOf(BUnc
) && ty1
.subtypeOf(BUnc
)))) {
1342 if (loc1
== StackDupId
) {
1343 setStkLocal(env
, loc0
, 0);
1344 } else if (loc1
<= MaxLocalId
&& !locsAreEquiv(env
, loc0
, loc1
)) {
1347 auto const other
= findLocEquiv(env
, loc
);
1348 if (other
== NoLocalId
) break;
1349 killLocEquiv(env
, loc
);
1350 addLocEquiv(env
, loc
, loc1
);
1353 addLocEquiv(env
, loc
, loc1
);
1356 return refineLocation(env
, loc1
!= NoLocalId
? loc1
: loc0
, [&] (Type ty
) {
1357 auto const needsUninit
=
1358 ty
.couldBe(BUninit
) &&
1359 !isect
.couldBe(BUninit
) &&
1360 isect
.couldBe(BInitNull
);
1361 auto ret
= ty
.subtypeOf(BUnc
) ? isect
: loosen_staticness(isect
);
1362 if (needsUninit
) ret
= union_of(std::move(ret
), TUninit
);
1368 auto handle_differ_side
= [&] (LocalId location
, const Type
& ty
) {
1369 if (!ty
.subtypeOf(BInitNull
) && !ty
.strictSubtypeOf(TBool
)) return true;
1370 return refineLocation(env
, location
, [&] (Type t
) {
1371 if (ty
.subtypeOf(BNull
)) {
1372 t
= remove_uninit(std::move(t
));
1373 if (t
.couldBe(BInitNull
) && !t
.subtypeOf(BInitNull
)) {
1374 t
= unopt(std::move(t
));
1377 } else if (ty
.strictSubtypeOf(TBool
) && t
.subtypeOf(BBool
)) {
1378 return ty
== TFalse
? TTrue
: TFalse
;
1384 auto handle_differ
= [&] {
1386 (loc0
== NoLocalId
|| handle_differ_side(loc0
, ty1
)) &&
1387 (loc1
== NoLocalId
|| handle_differ_side(loc1
, ty0
));
1390 auto const sameIsJmpTarget
=
1391 (sameOp
== Op::Same
) == (JmpOp::op
== Op::JmpNZ
);
1393 auto save
= env
.state
;
1394 auto const target_reachable
= sameIsJmpTarget
?
1395 handle_same() : handle_differ();
1396 if (!target_reachable
) jmp_nevertaken(env
);
1397 // swap, so we can restore this state if the branch is always taken.
1398 env
.state
.swap(save
);
1399 if (!(sameIsJmpTarget
? handle_differ() : handle_same())) {
1400 jmp_setdest(env
, jmp
.target1
);
1401 env
.state
.copy_from(std::move(save
));
1402 } else if (target_reachable
) {
1403 env
.propagate(jmp
.target1
, &save
);
1409 bc::JmpNZ
invertJmp(const bc::JmpZ
& jmp
) { return bc::JmpNZ
{ jmp
.target1
}; }
1410 bc::JmpZ
invertJmp(const bc::JmpNZ
& jmp
) { return bc::JmpZ
{ jmp
.target1
}; }
1414 void in(ISS
& env
, const bc::Same
&) { sameImpl
<false>(env
); }
1415 void in(ISS
& env
, const bc::NSame
&) { sameImpl
<true>(env
); }
1418 void cmpImpl(ISS
& env
, Fun fun
) {
1419 auto const t1
= popC(env
);
1420 auto const t2
= popC(env
);
1421 auto const v1
= tv(t1
);
1422 auto const v2
= tv(t2
);
1424 if (auto r
= eval_cell_value([&]{ return fun(*v2
, *v1
); })) {
1426 return push(env
, *r
? TTrue
: TFalse
);
1429 // TODO_4: evaluate when these can throw, non-constant type stuff.
1435 bool couldBeStringish(const Type
& t
) {
1436 return t
.couldBe(BCls
| BLazyCls
| BStr
);
1439 bool everEq(const Type
& t1
, const Type
& t2
) {
1440 // for comparison purposes we need to be careful about these coercions
1441 if (couldBeStringish(t1
) && couldBeStringish(t2
)) return true;
1442 return loosen_all(t1
).couldBe(loosen_all(t2
));
1445 bool cmpWillThrow(const Type
& t1
, const Type
& t2
) {
1446 // for comparison purposes we need to be careful about these coercions
1447 if (couldBeStringish(t1
) && couldBeStringish(t2
)) return false;
1449 auto couldBeIntAndDbl
= [](const Type
& t1
, const Type
& t2
) {
1450 return t1
.couldBe(BInt
) && t2
.couldBe(BDbl
);
1452 // relational comparisons allow for int v dbl
1453 if (couldBeIntAndDbl(t1
, t2
) || couldBeIntAndDbl(t2
, t1
)) return false;
1455 return !loosen_to_datatype(t1
).couldBe(loosen_to_datatype(t2
));
1458 void eqImpl(ISS
& env
, bool eq
) {
1459 auto rs
= resolveSame
<false>(env
);
1460 if (rs
.first
== TTrue
) {
1461 if (!rs
.second
) constprop(env
);
1463 return push(env
, eq
? TTrue
: TFalse
);
1466 if (!everEq(topC(env
, 0), topC(env
, 1))) {
1468 return push(env
, eq
? TFalse
: TTrue
);
1471 cmpImpl(env
, [&] (TypedValue c1
, TypedValue c2
) {
1472 return tvEqual(c1
, c2
) == eq
;
1476 bool cmpThrowCheck(ISS
& env
, const Type
& t1
, const Type
& t2
) {
1477 if (!cmpWillThrow(t1
, t2
)) return false;
1486 void in(ISS
& env
, const bc::Eq
&) { eqImpl(env
, true); }
1487 void in(ISS
& env
, const bc::Neq
&) { eqImpl(env
, false); }
1489 void in(ISS
& env
, const bc::Lt
&) {
1490 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1491 cmpImpl(env
, static_cast<bool (*)(TypedValue
, TypedValue
)>(tvLess
));
1493 void in(ISS
& env
, const bc::Gt
&) {
1494 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1495 cmpImpl(env
, static_cast<bool (*)(TypedValue
, TypedValue
)>(tvGreater
));
1497 void in(ISS
& env
, const bc::Lte
&) {
1498 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1499 cmpImpl(env
, tvLessOrEqual
);
1501 void in(ISS
& env
, const bc::Gte
&) {
1502 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1503 cmpImpl(env
, tvGreaterOrEqual
);
1506 void in(ISS
& env
, const bc::Cmp
&) {
1507 auto const t1
= topC(env
, 0);
1508 auto const t2
= topC(env
, 1);
1509 if (cmpThrowCheck(env
, t1
, t2
)) return;
1512 auto const v1
= tv(t1
);
1513 auto const v2
= tv(t2
);
1515 if (auto r
= eval_cell_value([&]{ return ival(tvCompare(*v2
, *v1
)); })) {
1517 return push(env
, std::move(*r
));
1521 // TODO_4: evaluate when these can throw, non-constant type stuff.
1525 void castBoolImpl(ISS
& env
, const Type
& t
, bool negate
) {
1529 auto const e
= emptiness(t
);
1531 case Emptiness::Empty
:
1532 case Emptiness::NonEmpty
:
1533 return push(env
, (e
== Emptiness::Empty
) == negate
? TTrue
: TFalse
);
1534 case Emptiness::Maybe
:
1541 void in(ISS
& env
, const bc::Not
&) {
1542 castBoolImpl(env
, popC(env
), true);
1545 void in(ISS
& env
, const bc::CastBool
&) {
1546 auto const t
= topC(env
);
1547 if (t
.subtypeOf(BBool
)) return reduce(env
);
1548 castBoolImpl(env
, popC(env
), false);
1551 void in(ISS
& env
, const bc::CastInt
&) {
1552 auto const t
= topC(env
);
1553 if (t
.subtypeOf(BInt
)) return reduce(env
);
1556 // Objects can raise a warning about converting to int.
1557 if (!t
.couldBe(BObj
)) nothrow(env
);
1558 if (auto const v
= tv(t
)) {
1559 auto cell
= eval_cell([&] {
1560 return make_tv
<KindOfInt64
>(tvToInt(*v
));
1562 if (cell
) return push(env
, std::move(*cell
));
1567 // Handle a casting operation, where "target" is the type being casted to. If
1568 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1569 // is set to true, if the source type is the same as the destination, the cast
1570 // will be optimized away.
1571 void castImpl(ISS
& env
, Type target
, void(*fn
)(TypedValue
*)) {
1572 auto const t
= topC(env
);
1573 if (t
.subtypeOf(target
)) return reduce(env
);
1577 if (auto val
= tv(t
)) {
1578 if (auto result
= eval_cell([&] { fn(&*val
); return *val
; })) {
1584 push(env
, std::move(target
));
1587 void in(ISS
& env
, const bc::CastDouble
&) {
1588 castImpl(env
, TDbl
, tvCastToDoubleInPlace
);
1591 void in(ISS
& env
, const bc::CastString
&) {
1592 castImpl(env
, TStr
, tvCastToStringInPlace
);
1595 void in(ISS
& env
, const bc::CastDict
&) {
1596 castImpl(env
, TDict
, tvCastToDictInPlace
);
1599 void in(ISS
& env
, const bc::CastVec
&) {
1600 castImpl(env
, TVec
, tvCastToVecInPlace
);
1603 void in(ISS
& env
, const bc::CastKeyset
&) {
1604 castImpl(env
, TKeyset
, tvCastToKeysetInPlace
);
1607 void in(ISS
& env
, const bc::DblAsBits
&) {
1611 auto const ty
= popC(env
);
1612 if (!ty
.couldBe(BDbl
)) return push(env
, ival(0));
1614 if (auto val
= tv(ty
)) {
1615 assertx(isDoubleType(val
->m_type
));
1616 val
->m_type
= KindOfInt64
;
1617 push(env
, from_cell(*val
));
1624 void in(ISS
& env
, const bc::Print
& /*op*/) {
1629 void in(ISS
& env
, const bc::Clone
& /*op*/) {
1630 auto val
= popC(env
);
1631 if (!val
.subtypeOf(BObj
)) {
1633 if (val
.is(BBottom
)) unreachable(env
);
1635 push(env
, std::move(val
));
1638 void in(ISS
& env
, const bc::Exit
&) { popC(env
); push(env
, TInitNull
); }
1639 void in(ISS
& env
, const bc::Fatal
&) { popC(env
); }
1641 void in(ISS
& /*env*/, const bc::JmpNS
&) {
1642 always_assert(0 && "blocks should not contain JmpNS instructions");
1645 void in(ISS
& /*env*/, const bc::Jmp
&) {
1646 always_assert(0 && "blocks should not contain Jmp instructions");
1649 void in(ISS
& env
, const bc::Select
& op
) {
1650 auto const cond
= topC(env
);
1651 auto const t
= topC(env
, 1);
1652 auto const f
= topC(env
, 2);
1657 switch (emptiness(cond
)) {
1658 case Emptiness::Maybe
:
1660 push(env
, union_of(t
, f
));
1662 case Emptiness::NonEmpty
:
1666 case Emptiness::Empty
:
1667 return reduce(env
, bc::PopC
{}, bc::PopC
{});
1674 template<class JmpOp
>
1675 bool isTypeHelper(ISS
& env
,
1680 if (typeOp
== IsTypeOp::Scalar
|| typeOp
== IsTypeOp::LegacyArrLike
||
1681 typeOp
== IsTypeOp::Func
) {
1685 auto const val
= [&] {
1686 if (op
!= Op::IsTypeC
) return locRaw(env
, location
);
1687 const StackElem
* elem
;
1688 env
.state
.stack
.peek(1, &elem
, 1);
1689 location
= elem
->equivLoc
;
1693 if (location
== NoLocalId
|| !val
.subtypeOf(BCell
)) return false;
1695 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1696 // Otherwise, refine location based on the testType.
1697 auto testTy
= type_of_istype(typeOp
);
1699 assertx(val
.couldBe(testTy
) &&
1700 (!val
.subtypeOf(testTy
) || val
.subtypeOf(BObj
)));
1704 if (op
== Op::IsTypeC
) {
1705 if (!is_type_might_raise(testTy
, val
)) nothrow(env
);
1706 } else if (op
== Op::IssetL
) {
1708 } else if (!locCouldBeUninit(env
, location
) &&
1709 !is_type_might_raise(testTy
, val
)) {
1713 auto const negate
= (jmp
.op
== Op::JmpNZ
) == (op
!= Op::IssetL
);
1714 auto const was_true
= [&] (Type t
) {
1715 if (testTy
.subtypeOf(BNull
)) return intersection_of(t
, TNull
);
1716 assertx(!testTy
.couldBe(BNull
));
1717 return intersection_of(t
, testTy
);
1719 auto const was_false
= [&] (Type t
) {
1720 auto tinit
= remove_uninit(t
);
1721 if (testTy
.subtypeOf(BNull
)) {
1722 return (tinit
.couldBe(BInitNull
) && !tinit
.subtypeOf(BInitNull
))
1723 ? unopt(std::move(tinit
)) : tinit
;
1725 if (t
.couldBe(BInitNull
) && !t
.subtypeOf(BInitNull
)) {
1726 assertx(!testTy
.couldBe(BNull
));
1727 if (unopt(tinit
).subtypeOf(testTy
)) return TNull
;
1732 auto const pre
= [&] (Type t
) {
1733 return negate
? was_true(std::move(t
)) : was_false(std::move(t
));
1736 auto const post
= [&] (Type t
) {
1737 return negate
? was_false(std::move(t
)) : was_true(std::move(t
));
1740 refineLocation(env
, location
, pre
, jmp
.target1
, post
);
1744 // If the current function is a memoize wrapper, return the inferred return type
1745 // of the function being wrapped along with if the wrapped function is effect
1747 std::pair
<Type
, bool> memoizeImplRetType(ISS
& env
) {
1748 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
1750 // Lookup the wrapped function. This should always resolve to a precise
1751 // function but we don't rely on it.
1752 auto const memo_impl_func
= [&] {
1753 if (env
.ctx
.func
->cls
) {
1754 auto const clsTy
= selfClsExact(env
);
1755 return env
.index
.resolve_method(
1757 clsTy
? *clsTy
: TCls
,
1758 memoize_impl_name(env
.ctx
.func
)
1761 return env
.index
.resolve_func(env
.ctx
, memoize_impl_name(env
.ctx
.func
));
1764 // Infer the return type of the wrapped function, taking into account the
1765 // types of the parameters for context sensitive types.
1766 auto const numArgs
= env
.ctx
.func
->params
.size();
1767 CompactVector
<Type
> args
{numArgs
};
1768 for (auto i
= LocalId
{0}; i
< numArgs
; ++i
) {
1769 args
[i
] = locAsCell(env
, i
);
1772 // Determine the context the wrapped function will be called on.
1773 auto const ctxType
= [&]() -> Type
{
1774 if (env
.ctx
.func
->cls
) {
1775 if (env
.ctx
.func
->attrs
& AttrStatic
) {
1776 // The class context for static methods is the method's class,
1777 // if LSB is not specified.
1779 env
.ctx
.func
->isMemoizeWrapperLSB
?
1782 return clsTy
? *clsTy
: TCls
;
1784 return thisTypeNonNull(env
);
1790 auto retTy
= env
.index
.lookup_return_type(
1792 &env
.collect
.methods
,
1797 auto const effectFree
= env
.index
.is_effect_free(memo_impl_func
);
1798 // Regardless of anything we know the return type will be an InitCell (this is
1799 // a requirement of memoize functions).
1800 if (!retTy
.subtypeOf(BInitCell
)) return { TInitCell
, effectFree
};
1801 return { retTy
, effectFree
};
1804 template<class JmpOp
>
1805 bool instanceOfJmpImpl(ISS
& env
,
1806 const bc::InstanceOfD
& inst
,
1809 const StackElem
* elem
;
1810 env
.state
.stack
.peek(1, &elem
, 1);
1812 auto const locId
= elem
->equivLoc
;
1813 if (locId
== NoLocalId
|| interface_supports_non_objects(inst
.str1
)) {
1816 auto const rcls
= env
.index
.resolve_class(env
.ctx
, inst
.str1
);
1817 if (!rcls
) return false;
1819 auto const val
= elem
->type
;
1820 auto const instTy
= subObj(*rcls
);
1821 assertx(!val
.subtypeOf(instTy
) && val
.couldBe(instTy
));
1823 // If we have an optional type, whose unopt is guaranteed to pass
1824 // the instanceof check, then failing to pass implies it was null.
1825 auto const fail_implies_null
=
1826 val
.couldBe(BInitNull
) &&
1827 !val
.subtypeOf(BInitNull
) &&
1828 unopt(val
).subtypeOf(instTy
);
1831 auto const negate
= jmp
.op
== Op::JmpNZ
;
1832 auto const result
= [&] (Type t
, bool pass
) {
1833 return pass
? instTy
: fail_implies_null
? TNull
: t
;
1835 auto const pre
= [&] (Type t
) { return result(t
, negate
); };
1836 auto const post
= [&] (Type t
) { return result(t
, !negate
); };
1837 refineLocation(env
, locId
, pre
, jmp
.target1
, post
);
1841 template<class JmpOp
>
1842 bool isTypeStructCJmpImpl(ISS
& env
,
1843 const bc::IsTypeStructC
& inst
,
1846 const StackElem
* elems
[2];
1847 env
.state
.stack
.peek(2, elems
, 1);
1849 auto const locId
= elems
[0]->equivLoc
;
1850 if (locId
== NoLocalId
) return false;
1852 auto const a
= tv(elems
[1]->type
);
1853 if (!a
) return false;
1854 // if it wasn't valid, the JmpOp wouldn't be reachable
1855 assertx(isValidTSType(*a
, false));
1857 auto const is_nullable_ts
= is_ts_nullable(a
->m_data
.parr
);
1858 auto const ts_kind
= get_ts_kind(a
->m_data
.parr
);
1859 // type_of_type_structure does not resolve these types. It is important we
1860 // do resolve them here, or we may have issues when we reduce the checks to
1861 // InstanceOfD checks. This logic performs the same exact refinement as
1862 // instanceOfD will.
1863 if (is_nullable_ts
||
1864 (ts_kind
!= TypeStructure::Kind::T_class
&&
1865 ts_kind
!= TypeStructure::Kind::T_interface
&&
1866 ts_kind
!= TypeStructure::Kind::T_xhp
&&
1867 ts_kind
!= TypeStructure::Kind::T_unresolved
)) {
1871 auto const clsName
= get_ts_classname(a
->m_data
.parr
);
1872 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsName
);
1874 !rcls
->resolved() ||
1875 rcls
->cls()->attrs
& AttrEnum
||
1876 interface_supports_non_objects(clsName
)) {
1880 auto const val
= elems
[0]->type
;
1881 auto const instTy
= subObj(*rcls
);
1882 if (val
.subtypeOf(instTy
) || !val
.couldBe(instTy
)) {
1886 // If we have an optional type, whose unopt is guaranteed to pass
1887 // the instanceof check, then failing to pass implies it was null.
1888 auto const fail_implies_null
=
1889 val
.couldBe(BInitNull
) &&
1890 !val
.subtypeOf(BInitNull
) &&
1891 unopt(val
).subtypeOf(instTy
);
1895 auto const negate
= jmp
.op
== Op::JmpNZ
;
1896 auto const result
= [&] (Type t
, bool pass
) {
1897 return pass
? instTy
: fail_implies_null
? TNull
: t
;
1899 auto const pre
= [&] (Type t
) { return result(t
, negate
); };
1900 auto const post
= [&] (Type t
) { return result(t
, !negate
); };
1901 refineLocation(env
, locId
, pre
, jmp
.target1
, post
);
1905 template<class JmpOp
>
1906 void jmpImpl(ISS
& env
, const JmpOp
& op
) {
1907 auto const Negate
= std::is_same
<JmpOp
, bc::JmpNZ
>::value
;
1908 auto const location
= topStkEquiv(env
);
1909 auto const e
= emptiness(topC(env
));
1910 if (e
== (Negate
? Emptiness::NonEmpty
: Emptiness::Empty
)) {
1911 reduce(env
, bc::PopC
{});
1912 return jmp_setdest(env
, op
.target1
);
1915 if (e
== (Negate
? Emptiness::Empty
: Emptiness::NonEmpty
) ||
1916 (next_real_block(env
.ctx
.func
, env
.blk
.fallthrough
) ==
1917 next_real_block(env
.ctx
.func
, op
.target1
))) {
1918 return reduce(env
, bc::PopC
{});
1922 if (env
.flags
.jmpDest
== NoBlockId
) return;
1923 auto const jmpDest
= env
.flags
.jmpDest
;
1924 env
.flags
.jmpDest
= NoBlockId
;
1926 reduce(env
, bc::PopC
{});
1927 env
.flags
.jmpDest
= jmpDest
;
1930 if (auto const last
= last_op(env
)) {
1931 if (last
->op
== Op::Not
) {
1933 return reduce(env
, invertJmp(op
));
1935 if (last
->op
== Op::Same
|| last
->op
== Op::NSame
) {
1936 if (sameJmpImpl(env
, last
->op
, op
)) return fix();
1937 } else if (last
->op
== Op::IssetL
) {
1938 if (isTypeHelper(env
,
1945 } else if (last
->op
== Op::IsTypeL
) {
1946 if (isTypeHelper(env
,
1947 last
->IsTypeL
.subop2
,
1948 last
->IsTypeL
.nloc1
.id
,
1953 } else if (last
->op
== Op::IsTypeC
) {
1954 if (isTypeHelper(env
,
1955 last
->IsTypeC
.subop1
,
1961 } else if (last
->op
== Op::InstanceOfD
) {
1962 if (instanceOfJmpImpl(env
, last
->InstanceOfD
, op
)) return fix();
1963 } else if (last
->op
== Op::IsTypeStructC
) {
1964 if (isTypeStructCJmpImpl(env
, last
->IsTypeStructC
, op
)) return fix();
1971 if (location
== NoLocalId
) return env
.propagate(op
.target1
, &env
.state
);
1973 refineLocation(env
, location
,
1974 Negate
? assert_nonemptiness
: assert_emptiness
,
1976 Negate
? assert_emptiness
: assert_nonemptiness
);
1982 void in(ISS
& env
, const bc::JmpNZ
& op
) { jmpImpl(env
, op
); }
1983 void in(ISS
& env
, const bc::JmpZ
& op
) { jmpImpl(env
, op
); }
1985 void in(ISS
& env
, const bc::Switch
& op
) {
1986 const auto t
= topC(env
);
1987 const auto v
= tv(t
);
1991 forEachTakenEdge(op
, [&] (BlockId id
) {
1992 env
.propagate(id
, &env
.state
);
1996 auto go
= [&] (BlockId blk
) {
1997 reduce(env
, bc::PopC
{});
1998 return jmp_setdest(env
, blk
);
2001 if (!t
.couldBe(BInt
)) {
2002 if (op
.subop1
== SwitchKind::Unbounded
) return bail();
2003 return go(op
.targets
.back());
2006 if (!v
) return bail();
2008 auto num_elems
= op
.targets
.size();
2009 if (op
.subop1
== SwitchKind::Unbounded
) {
2010 if (v
->m_data
.num
< 0 || v
->m_data
.num
>= num_elems
) return bail();
2011 return go(op
.targets
[v
->m_data
.num
]);
2014 assertx(num_elems
> 2);
2016 auto const i
= v
->m_data
.num
- op
.arg2
;
2017 return i
>= 0 && i
< num_elems
? go(op
.targets
[i
]) : go(op
.targets
.back());
2020 void in(ISS
& env
, const bc::SSwitch
& op
) {
2021 const auto t
= topC(env
);
2022 const auto v
= tv(t
);
2024 if (!couldBeStringish(t
)) {
2025 reduce(env
, bc::PopC
{});
2026 return jmp_setdest(env
, op
.targets
.back().second
);
2030 for (auto& kv
: op
.targets
) {
2031 auto match
= eval_cell_value([&] {
2032 if (!kv
.first
) return true;
2033 return v
->m_data
.pstr
->equal(kv
.first
);
2038 reduce(env
, bc::PopC
{});
2039 return jmp_setdest(env
, kv
.second
);
2045 forEachTakenEdge(op
, [&] (BlockId id
) {
2046 env
.propagate(id
, &env
.state
);
2050 void in(ISS
& env
, const bc::RetC
& /*op*/) {
2051 auto const locEquiv
= topStkLocal(env
);
2052 doRet(env
, popC(env
), false);
2053 if (locEquiv
!= NoLocalId
&& locEquiv
< env
.ctx
.func
->params
.size()) {
2054 env
.flags
.retParam
= locEquiv
;
2057 void in(ISS
& env
, const bc::RetM
& op
) {
2058 std::vector
<Type
> ret(op
.arg1
);
2059 for (int i
= 0; i
< op
.arg1
; i
++) {
2060 ret
[op
.arg1
- i
- 1] = popC(env
);
2062 doRet(env
, vec(std::move(ret
)), false);
2065 void in(ISS
& env
, const bc::RetCSuspended
&) {
2066 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
2068 auto const t
= popC(env
);
2071 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
,
2076 void in(ISS
& env
, const bc::Throw
& /*op*/) {
2080 void in(ISS
& env
, const bc::ThrowNonExhaustiveSwitch
& /*op*/) {}
2082 void in(ISS
& env
, const bc::RaiseClassStringConversionWarning
& /*op*/) {}
2084 void in(ISS
& env
, const bc::ChainFaults
&) {
2088 void in(ISS
& env
, const bc::NativeImpl
&) {
2091 if (is_collection_method_returning_this(env
.ctx
.cls
, env
.ctx
.func
)) {
2092 auto const resCls
= env
.index
.builtin_class(env
.ctx
.cls
->name
);
2093 return doRet(env
, objExact(resCls
), true);
2096 if (env
.ctx
.func
->nativeInfo
) {
2097 return doRet(env
, native_function_return_type(env
.ctx
.func
), true);
2099 doRet(env
, TInitCell
, true);
2102 void in(ISS
& env
, const bc::CGetL
& op
) {
2103 if (locIsThis(env
, op
.nloc1
.id
)) {
2104 auto const& ty
= peekLocRaw(env
, op
.nloc1
.id
);
2105 if (!ty
.subtypeOf(BInitNull
)) {
2106 auto const subop
= ty
.couldBe(BUninit
) ?
2107 BareThisOp::Notice
: ty
.couldBe(BNull
) ?
2108 BareThisOp::NoNotice
: BareThisOp::NeverNull
;
2109 return reduce(env
, bc::BareThis
{ subop
});
2112 if (auto const last
= last_op(env
)) {
2113 if (last
->op
== Op::PopL
&&
2114 op
.nloc1
.id
== last
->PopL
.loc1
) {
2117 setLocRaw(env
, op
.nloc1
.id
, TCell
);
2118 return reduce(env
, bc::SetL
{ op
.nloc1
.id
});
2121 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2122 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2123 auto const loc
= minLocEquiv
!= NoLocalId
? minLocEquiv
: op
.nloc1
.id
;
2124 return reduce(env
, bc::CGetQuietL
{ loc
});
2126 mayReadLocal(env
, op
.nloc1
.id
);
2127 push(env
, locAsCell(env
, op
.nloc1
.id
), op
.nloc1
.id
);
2130 void in(ISS
& env
, const bc::CGetQuietL
& op
) {
2131 if (locIsThis(env
, op
.loc1
)) {
2132 return reduce(env
, bc::BareThis
{ BareThisOp::NoNotice
});
2134 if (auto const last
= last_op(env
)) {
2135 if (last
->op
== Op::PopL
&&
2136 op
.loc1
== last
->PopL
.loc1
) {
2139 setLocRaw(env
, op
.loc1
, TCell
);
2140 return reduce(env
, bc::SetL
{ op
.loc1
});
2143 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, true);
2144 if (minLocEquiv
!= NoLocalId
) {
2145 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
});
2150 mayReadLocal(env
, op
.loc1
);
2151 push(env
, locAsCell(env
, op
.loc1
), op
.loc1
);
2154 void in(ISS
& env
, const bc::CUGetL
& op
) {
2155 auto ty
= locRaw(env
, op
.loc1
);
2158 push(env
, std::move(ty
), op
.loc1
);
2161 void in(ISS
& env
, const bc::PushL
& op
) {
2162 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, false);
2163 if (minLocEquiv
!= NoLocalId
) {
2164 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
}, bc::UnsetL
{ op
.loc1
});
2167 if (auto const last
= last_op(env
)) {
2168 if (last
->op
== Op::PopL
&&
2169 last
->PopL
.loc1
== op
.loc1
) {
2170 // rewind is ok, because we're just going to unset the local
2171 // (and note the unset can't be a no-op because the PopL set it
2172 // to an InitCell). But its possible that before the PopL, the
2173 // local *was* unset, so maybe would have killed the no-op. The
2174 // only way to fix that is to reprocess the block with the new
2175 // instruction sequence and see what happens.
2178 return reduce(env
, bc::UnsetL
{ op
.loc1
});
2182 if (auto val
= tv(peekLocRaw(env
, op
.loc1
))) {
2183 return reduce(env
, bc::UnsetL
{ op
.loc1
}, gen_constant(*val
));
2186 impl(env
, bc::CGetQuietL
{ op
.loc1
}, bc::UnsetL
{ op
.loc1
});
2189 void in(ISS
& env
, const bc::CGetL2
& op
) {
2190 if (auto const last
= last_op(env
)) {
2191 if ((poppable(last
->op
) && !numPop(*last
)) ||
2192 ((last
->op
== Op::CGetL
|| last
->op
== Op::CGetQuietL
) &&
2193 !peekLocCouldBeUninit(env
, op
.nloc1
.id
))) {
2194 auto const other
= *last
;
2196 return reduce(env
, bc::CGetL
{ op
.nloc1
}, other
);
2200 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2201 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2202 if (minLocEquiv
!= NoLocalId
) {
2203 return reduce(env
, bc::CGetL2
{ { kInvalidLocalName
, minLocEquiv
} });
2207 mayReadLocal(env
, op
.nloc1
.id
);
2208 auto loc
= locAsCell(env
, op
.nloc1
.id
);
2209 auto topEquiv
= topStkLocal(env
);
2210 auto top
= popT(env
);
2211 push(env
, std::move(loc
), op
.nloc1
.id
);
2212 push(env
, std::move(top
), topEquiv
);
2215 void in(ISS
& env
, const bc::CGetG
&) { popC(env
); push(env
, TInitCell
); }
2217 void in(ISS
& env
, const bc::CGetS
& op
) {
2218 auto const tcls
= popC(env
);
2219 auto const tname
= popC(env
);
2221 auto const throws
= [&] {
2223 return push(env
, TBottom
);
2226 if (!tcls
.couldBe(BCls
)) return throws();
2228 auto lookup
= env
.index
.lookup_static(
2235 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
2239 if (checkReadonlyOpThrows(ReadonlyOp::Mutable
, op
.subop1
) &&
2240 lookup
.readOnly
== TriBool::Yes
) {
2243 auto const mightReadOnlyThrow
= checkReadonlyOpMaybeThrows(ReadonlyOp::Mutable
, op
.subop1
) &&
2244 (lookup
.readOnly
== TriBool::Yes
|| lookup
.readOnly
== TriBool::Maybe
);
2246 if (lookup
.found
== TriBool::Yes
&&
2247 lookup
.lateInit
== TriBool::No
&&
2248 !lookup
.classInitMightRaise
&&
2249 !mightReadOnlyThrow
&&
2250 tcls
.subtypeOf(BCls
) &&
2251 tname
.subtypeOf(BStr
)) {
2256 push(env
, std::move(lookup
.ty
));
2259 void in(ISS
& env
, const bc::ClassGetC
& op
) {
2260 auto const t
= topC(env
);
2262 if (t
.subtypeOf(BCls
)) return reduce(env
, bc::Nop
{});
2265 if (!t
.couldBe(BObj
| BCls
| BStr
| BLazyCls
)) {
2271 if (t
.subtypeOf(BObj
)) {
2273 push(env
, objcls(t
));
2277 if (auto const clsname
= getNameFromType(t
)) {
2278 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
)) {
2279 if (rcls
->cls()) effect_free(env
);
2280 push(env
, clsExact(*rcls
));
2288 void in(ISS
& env
, const bc::ClassGetTS
& op
) {
2289 // TODO(T31677864): implement real optimizations
2290 auto const ts
= popC(env
);
2291 if (!ts
.couldBe(BDict
)) {
2301 void in(ISS
& env
, const bc::AKExists
&) {
2302 auto const base
= popC(env
);
2303 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
2305 auto result
= TBottom
;
2306 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
2308 if (!base
.subtypeOf(BObj
| BArrLike
)) {
2313 if (base
.couldBe(BObj
)) {
2317 if (base
.couldBe(BArrLike
)) {
2318 auto const validKey
= key
.subtypeOf(BArrKey
);
2319 if (!validKey
) effectFree
= false;
2320 if (key
.couldBe(BArrKey
)) {
2322 array_like_elem(base
, validKey
? key
: intersection_of(key
, TArrKey
));
2323 if (elem
.first
.is(BBottom
)) {
2325 } else if (elem
.second
) {
2333 if (result
.is(BBottom
)) {
2334 assertx(!effectFree
);
2341 push(env
, std::move(result
));
2344 void in(ISS
& env
, const bc::GetMemoKeyL
& op
) {
2345 auto const& func
= env
.ctx
.func
;
2346 auto const name
= folly::to
<std::string
>(
2347 func
&& func
->cls
? func
->cls
->name
->data() : "",
2348 func
&& func
->cls
? "::" : "",
2349 func
? func
->name
->data() : "");
2350 always_assert(func
->isMemoizeWrapper
);
2352 auto const rclsIMemoizeParam
= env
.index
.builtin_class(s_IMemoizeParam
.get());
2353 auto const tyIMemoizeParam
= subObj(rclsIMemoizeParam
);
2355 auto const inTy
= locAsCell(env
, op
.nloc1
.id
);
2357 // If the local could be uninit, we might raise a warning (as
2358 // usual). Converting an object to a memo key might invoke PHP code if it has
2359 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2360 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2361 !inTy
.couldBe(BObj
| BVec
| BDict
)) {
2366 // If type constraints are being enforced and the local being turned into a
2367 // memo key is a parameter, then we can possibly using the type constraint to
2368 // infer a more efficient memo key mode.
2369 using MK
= MemoKeyConstraint
;
2370 Optional
<res::Class
> resolvedCls
;
2371 auto const mkc
= [&] {
2372 if (op
.nloc1
.id
>= env
.ctx
.func
->params
.size()) return MK::None
;
2373 auto tc
= env
.ctx
.func
->params
[op
.nloc1
.id
].typeConstraint
;
2374 if (tc
.type() == AnnotType::Object
) {
2375 auto res
= env
.index
.resolve_type_name(tc
.typeName());
2376 if (res
.type
!= AnnotType::Object
) {
2377 tc
.resolveType(res
.type
, res
.nullable
|| tc
.isNullable());
2379 resolvedCls
= env
.index
.resolve_class(env
.ctx
, tc
.typeName());
2382 return memoKeyConstraintFromTC(tc
);
2385 // Use the type-constraint to reduce this operation to a more efficient memo
2386 // mode. Some of the modes can be reduced to simple bytecode operations
2387 // inline. Even with the type-constraints, we still need to check the inferred
2388 // type of the local. Something may have possibly clobbered the local between
2389 // the type-check and this op.
2392 // Always an int, so the key is always an identity mapping
2393 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2396 // Always a bool, so the key is the bool cast to an int
2397 if (inTy
.subtypeOf(BBool
)) {
2398 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::CastInt
{});
2402 // Always a string, so the key is always an identity mapping
2403 if (inTy
.subtypeOf(BStr
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2406 // Either an int or string, so the key can be an identity mapping
2407 if (inTy
.subtypeOf(BArrKey
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2410 // A nullable string. The key will either be the string or the integer
2412 if (inTy
.subtypeOf(BOptStr
)) {
2415 bc::CGetL
{ op
.nloc1
},
2417 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2423 // A nullable int. The key will either be the integer, or the static empty
2425 if (inTy
.subtypeOf(BOptInt
)) {
2428 bc::CGetL
{ op
.nloc1
},
2429 bc::String
{ staticEmptyString() },
2430 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2435 case MK::BoolOrNull
:
2436 // A nullable bool. The key will either be 0, 1, or 2.
2437 if (inTy
.subtypeOf(BOptBool
)) {
2440 bc::CGetL
{ op
.nloc1
},
2443 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2449 // The double will be converted (losslessly) to an integer.
2450 if (inTy
.subtypeOf(BDbl
)) {
2451 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::DblAsBits
{});
2455 // A nullable double. The key will be an integer, or the static empty
2457 if (inTy
.subtypeOf(BOptDbl
)) {
2460 bc::CGetL
{ op
.nloc1
},
2462 bc::String
{ staticEmptyString() },
2463 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2469 // An object. If the object is definitely known to implement IMemoizeParam
2470 // we can simply call that method, casting the output to ensure its always
2471 // a string (which is what the generic mode does). If not, it will use the
2472 // generic mode, which can handle collections or classes which don't
2473 // implement getInstanceKey.
2475 resolvedCls
->mustBeSubtypeOf(rclsIMemoizeParam
) &&
2476 inTy
.subtypeOf(tyIMemoizeParam
)) {
2479 bc::CGetL
{ op
.nloc1
},
2481 bc::FCallObjMethodD
{
2483 staticEmptyString(),
2484 ObjMethodOp::NullThrows
,
2485 s_getInstanceKey
.get()
2491 case MK::ObjectOrNull
:
2492 // An object or null. We can use the null safe version of a function call
2493 // when invoking getInstanceKey and then select from the result of that,
2494 // or the integer 0. This might seem wasteful, but the JIT does a good job
2495 // inlining away the call in the null case.
2497 resolvedCls
->mustBeSubtypeOf(rclsIMemoizeParam
) &&
2498 inTy
.subtypeOf(opt(tyIMemoizeParam
))) {
2501 bc::CGetL
{ op
.nloc1
},
2503 bc::FCallObjMethodD
{
2505 staticEmptyString(),
2506 ObjMethodOp::NullSafe
,
2507 s_getInstanceKey
.get()
2511 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2520 // No type constraint, or one that isn't usuable. Use the generic memoization
2521 // scheme which can handle any type:
2523 if (auto const val
= tv(inTy
)) {
2524 auto const key
= eval_cell(
2525 [&]{ return HHVM_FN(serialize_memoize_param
)(*val
); }
2527 if (key
) return push(env
, *key
);
2530 // Integer keys are always mapped to themselves
2531 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2532 if (inTy
.subtypeOf(BOptInt
)) {
2535 bc::CGetL
{ op
.nloc1
},
2536 bc::String
{ s_nullMemoKey
.get() },
2537 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2541 if (inTy
.subtypeOf(BBool
)) {
2544 bc::String
{ s_falseMemoKey
.get() },
2545 bc::String
{ s_trueMemoKey
.get() },
2546 bc::CGetL
{ op
.nloc1
},
2551 // A memo key can be an integer if the input might be an integer, and is a
2552 // string otherwise. Booleans and nulls are always static strings.
2554 if (inTy
.subtypeOf(BOptBool
)) return TSStr
;
2555 if (inTy
.couldBe(BInt
)) return union_of(TInt
, TStr
);
2558 push(env
, std::move(keyTy
));
2561 void in(ISS
& env
, const bc::IssetL
& op
) {
2562 if (locIsThis(env
, op
.loc1
)) {
2564 bc::BareThis
{ BareThisOp::NoNotice
},
2565 bc::IsTypeC
{ IsTypeOp::Null
},
2570 auto const loc
= locAsCell(env
, op
.loc1
);
2571 if (loc
.subtypeOf(BNull
)) return push(env
, TFalse
);
2572 if (!loc
.couldBe(BNull
)) return push(env
, TTrue
);
2576 void in(ISS
& env
, const bc::IsUnsetL
& op
) {
2579 auto const loc
= locAsCell(env
, op
.loc1
);
2580 if (loc
.subtypeOf(BUninit
)) return push(env
, TTrue
);
2581 if (!loc
.couldBe(BUninit
)) return push(env
, TFalse
);
2585 void in(ISS
& env
, const bc::IssetS
& op
) {
2586 auto const tcls
= popC(env
);
2587 auto const tname
= popC(env
);
2589 if (!tcls
.couldBe(BCls
)) {
2591 return push(env
, TBottom
);
2594 auto lookup
= env
.index
.lookup_static(
2601 if (!lookup
.classInitMightRaise
&&
2602 tcls
.subtypeOf(BCls
) &&
2603 tname
.subtypeOf(BStr
)) {
2608 if (lookup
.ty
.subtypeOf(BNull
)) return push(env
, TFalse
);
2609 if (!lookup
.ty
.couldBe(BNull
) && lookup
.lateInit
== TriBool::No
) {
2610 return push(env
, TTrue
);
2615 void in(ISS
& env
, const bc::IssetG
&) { popC(env
); push(env
, TBool
); }
2617 void isTypeImpl(ISS
& env
, const Type
& locOrCell
, const Type
& test
) {
2618 if (locOrCell
.subtypeOf(test
)) return push(env
, TTrue
);
2619 if (!locOrCell
.couldBe(test
)) return push(env
, TFalse
);
2623 void isTypeObj(ISS
& env
, const Type
& ty
) {
2624 if (!ty
.couldBe(BObj
)) return push(env
, TFalse
);
2625 if (ty
.subtypeOf(BObj
)) {
2626 auto const incompl
= objExact(
2627 env
.index
.builtin_class(s_PHP_Incomplete_Class
.get()));
2628 if (RO::EvalBuildMayNoticeOnMethCallerHelperIsObject
) {
2630 objExact(env
.index
.builtin_class(s_MethCallerHelper
.get()));
2631 if (ty
.couldBe(c
)) return push(env
, TBool
);
2633 if (!ty
.couldBe(incompl
)) return push(env
, TTrue
);
2634 if (ty
.subtypeOf(incompl
)) return push(env
, TFalse
);
2640 void isTypeLImpl(ISS
& env
, const Op
& op
) {
2641 auto const loc
= locAsCell(env
, op
.nloc1
.id
);
2642 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2643 !is_type_might_raise(op
.subop2
, loc
)) {
2648 switch (op
.subop2
) {
2649 case IsTypeOp::Scalar
: return push(env
, TBool
);
2650 case IsTypeOp::LegacyArrLike
: return push(env
, TBool
);
2651 case IsTypeOp::Obj
: return isTypeObj(env
, loc
);
2652 case IsTypeOp::Func
:
2653 return loc
.couldBe(TFunc
) ? push(env
, TBool
) : push(env
, TFalse
);
2654 default: return isTypeImpl(env
, loc
, type_of_istype(op
.subop2
));
2659 void isTypeCImpl(ISS
& env
, const Op
& op
) {
2660 auto const t1
= popC(env
);
2661 if (!is_type_might_raise(op
.subop1
, t1
)) {
2666 switch (op
.subop1
) {
2667 case IsTypeOp::Scalar
: return push(env
, TBool
);
2668 case IsTypeOp::LegacyArrLike
: return push(env
, TBool
);
2669 case IsTypeOp::Obj
: return isTypeObj(env
, t1
);
2670 case IsTypeOp::Func
:
2671 return t1
.couldBe(TFunc
) ? push(env
, TBool
) : push(env
, TFalse
);
2672 default: return isTypeImpl(env
, t1
, type_of_istype(op
.subop1
));
2676 void in(ISS
& env
, const bc::IsTypeC
& op
) { isTypeCImpl(env
, op
); }
2677 void in(ISS
& env
, const bc::IsTypeL
& op
) { isTypeLImpl(env
, op
); }
2679 void in(ISS
& env
, const bc::InstanceOfD
& op
) {
2680 auto t1
= topC(env
);
2681 // Note: InstanceOfD can do autoload if the type might be a type
2682 // alias, so it's not nothrow unless we know it's an object type.
2683 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str1
)) {
2684 auto result
= [&] (const Type
& r
) {
2686 if (r
!= TBool
) constprop(env
);
2690 if (!interface_supports_non_objects(rcls
->name())) {
2691 auto const testTy
= subObj(*rcls
);
2692 if (t1
.subtypeOf(testTy
)) return result(TTrue
);
2693 if (!t1
.couldBe(testTy
)) return result(TFalse
);
2694 if (t1
.couldBe(BInitNull
) && !t1
.subtypeOf(BInitNull
)) {
2695 t1
= unopt(std::move(t1
));
2696 if (t1
.subtypeOf(testTy
)) {
2697 return reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
}, bc::Not
{});
2700 return result(TBool
);
2707 void in(ISS
& env
, const bc::InstanceOf
& /*op*/) {
2708 auto const t1
= topC(env
);
2709 auto const v1
= tv(t1
);
2710 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
2711 return reduce(env
, bc::PopC
{},
2712 bc::InstanceOfD
{ v1
->m_data
.pstr
});
2715 if (t1
.subtypeOf(BObj
) && is_specialized_obj(t1
)) {
2716 auto const dobj
= dobj_of(t1
);
2717 switch (dobj
.type
) {
2721 return reduce(env
, bc::PopC
{},
2722 bc::InstanceOfD
{ dobj
.cls
.name() });
2731 void in(ISS
& env
, const bc::IsLateBoundCls
& op
) {
2732 auto const cls
= env
.ctx
.cls
;
2733 if (cls
&& !(cls
->attrs
& AttrTrait
)) effect_free(env
);
2735 return push(env
, TBool
);
2740 bool isValidTypeOpForIsAs(const IsTypeOp
& op
) {
2742 case IsTypeOp::Null
:
2743 case IsTypeOp::Bool
:
2751 case IsTypeOp::Dict
:
2752 case IsTypeOp::Keyset
:
2753 case IsTypeOp::ArrLike
:
2754 case IsTypeOp::LegacyArrLike
:
2755 case IsTypeOp::Scalar
:
2756 case IsTypeOp::ClsMeth
:
2757 case IsTypeOp::Func
:
2758 case IsTypeOp::Class
:
2764 void isTypeStructImpl(ISS
& env
, SArray inputTS
) {
2765 auto const ts
= inputTS
;
2766 auto const t
= loosen_likeness(topC(env
, 1)); // operand to is/as
2768 bool may_raise
= true;
2769 auto result
= [&] (const Type
& out
) {
2770 popC(env
); // type structure
2771 popC(env
); // operand to is/as
2773 if (!may_raise
) nothrow(env
);
2774 return push(env
, out
);
2778 const Optional
<Type
> type
,
2779 const Optional
<Type
> deopt
= std::nullopt
2781 if (!type
|| is_type_might_raise(*type
, t
)) return result(TBool
);
2782 auto test
= type
.value();
2783 if (t
.subtypeOf(test
)) return result(TTrue
);
2784 if (!t
.couldBe(test
) && (!deopt
|| !t
.couldBe(deopt
.value()))) {
2785 return result(TFalse
);
2787 auto const op
= type_to_istypeop(test
);
2788 if (!op
|| !isValidTypeOpForIsAs(op
.value())) return result(TBool
);
2789 return reduce(env
, bc::PopC
{}, bc::IsTypeC
{ *op
});
2792 auto const is_nullable_ts
= is_ts_nullable(ts
);
2793 auto const is_definitely_null
= t
.subtypeOf(BNull
);
2794 auto const is_definitely_not_null
= !t
.couldBe(BNull
);
2796 if (is_nullable_ts
&& is_definitely_null
) return result(TTrue
);
2798 auto const ts_type
= type_of_type_structure(env
.index
, env
.ctx
, ts
);
2800 if (is_nullable_ts
&& !is_definitely_not_null
&& ts_type
== std::nullopt
) {
2801 // Ts is nullable and we know that t could be null but we dont know for sure
2802 // Also we didn't get a type out of the type structure
2803 return result(TBool
);
2806 if (ts_type
&& !is_type_might_raise(*ts_type
, t
)) may_raise
= false;
2807 switch (get_ts_kind(ts
)) {
2808 case TypeStructure::Kind::T_int
:
2809 case TypeStructure::Kind::T_bool
:
2810 case TypeStructure::Kind::T_float
:
2811 case TypeStructure::Kind::T_string
:
2812 case TypeStructure::Kind::T_num
:
2813 case TypeStructure::Kind::T_arraykey
:
2814 case TypeStructure::Kind::T_keyset
:
2815 case TypeStructure::Kind::T_void
:
2816 case TypeStructure::Kind::T_null
:
2817 return check(ts_type
);
2818 case TypeStructure::Kind::T_tuple
:
2819 return check(ts_type
, TVec
);
2820 case TypeStructure::Kind::T_shape
:
2821 return check(ts_type
, TDict
);
2822 case TypeStructure::Kind::T_dict
:
2823 return check(ts_type
);
2824 case TypeStructure::Kind::T_vec
:
2825 return check(ts_type
);
2826 case TypeStructure::Kind::T_nothing
:
2827 case TypeStructure::Kind::T_noreturn
:
2828 return result(TFalse
);
2829 case TypeStructure::Kind::T_mixed
:
2830 case TypeStructure::Kind::T_dynamic
:
2831 return result(TTrue
);
2832 case TypeStructure::Kind::T_nonnull
:
2833 if (is_definitely_null
) return result(TFalse
);
2834 if (is_definitely_not_null
) return result(TTrue
);
2837 bc::IsTypeC
{ IsTypeOp::Null
},
2839 case TypeStructure::Kind::T_class
:
2840 case TypeStructure::Kind::T_interface
:
2841 case TypeStructure::Kind::T_xhp
: {
2842 auto clsname
= get_ts_classname(ts
);
2843 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
);
2844 if (!rcls
|| !rcls
->resolved() || (ts
->exists(s_generic_types
) &&
2845 (rcls
->cls()->hasReifiedGenerics
||
2846 !isTSAllWildcards(ts
)))) {
2847 // If it is a reified class or has non wildcard generics,
2849 return result(TBool
);
2851 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ clsname
});
2853 case TypeStructure::Kind::T_unresolved
: {
2854 auto classname
= get_ts_classname(ts
);
2855 auto const has_generics
= ts
->exists(s_generic_types
);
2856 if (!has_generics
&& classname
->isame(s_this
.get())) {
2857 return reduce(env
, bc::PopC
{}, bc::IsLateBoundCls
{});
2859 auto const rcls
= env
.index
.resolve_class(env
.ctx
, classname
);
2860 // We can only reduce to instance of if we know for sure that this class
2861 // can be resolved since instanceof undefined class does not throw
2862 if (!rcls
|| !rcls
->resolved() || rcls
->cls()->attrs
& AttrEnum
) {
2863 return result(TBool
);
2866 (rcls
->cls()->hasReifiedGenerics
|| !isTSAllWildcards(ts
))) {
2867 // If it is a reified class or has non wildcard generics,
2869 return result(TBool
);
2871 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ rcls
->name() });
2873 case TypeStructure::Kind::T_enum
:
2874 case TypeStructure::Kind::T_resource
:
2875 case TypeStructure::Kind::T_vec_or_dict
:
2876 case TypeStructure::Kind::T_any_array
:
2877 // TODO(T29232862): implement
2878 return result(TBool
);
2879 case TypeStructure::Kind::T_typeaccess
:
2880 case TypeStructure::Kind::T_darray
:
2881 case TypeStructure::Kind::T_varray
:
2882 case TypeStructure::Kind::T_varray_or_darray
:
2883 case TypeStructure::Kind::T_reifiedtype
:
2884 return result(TBool
);
2885 case TypeStructure::Kind::T_fun
:
2886 case TypeStructure::Kind::T_typevar
:
2887 case TypeStructure::Kind::T_trait
:
2888 // We will error on these at the JIT
2889 return result(TBool
);
2895 const StaticString
s_hh_type_structure_no_throw("HH\\type_structure_no_throw");
2899 void in(ISS
& env
, const bc::IsTypeStructC
& op
) {
2900 if (!topC(env
).couldBe(BDict
)) {
2903 return unreachable(env
);
2905 auto const a
= tv(topC(env
));
2906 if (!a
|| !isValidTSType(*a
, false)) {
2909 return push(env
, TBool
);
2911 if (op
.subop1
== TypeStructResolveOp::Resolve
) {
2912 if (auto const ts
= resolve_type_structure(env
, a
->m_data
.parr
).sarray()) {
2917 bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
}
2920 if (auto const val
= get_ts_this_type_access(a
->m_data
.parr
)) {
2921 // Convert `$x is this::T` into
2922 // `$x is type_structure_no_throw(static::class, 'T')`
2923 // to take advantage of the caching that comes with the type_structure
2929 bc::LateBoundCls
{},
2931 bc::FCallFuncD
{FCallArgs(2), s_hh_type_structure_no_throw
.get()},
2932 bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
}
2936 isTypeStructImpl(env
, a
->m_data
.parr
);
2939 void in(ISS
& env
, const bc::ThrowAsTypeStructException
& op
) {
2945 void in(ISS
& env
, const bc::CombineAndResolveTypeStruct
& op
) {
2946 assertx(op
.arg1
> 0);
2948 auto const first
= tv(topC(env
));
2949 if (first
&& isValidTSType(*first
, false)) {
2950 auto const ts
= first
->m_data
.parr
;
2951 // Optimize single input that does not need any combination
2953 if (auto const r
= resolve_type_structure(env
, ts
).sarray()) {
2962 // Optimize double input that needs a single combination and looks of the
2963 // form ?T, @T or ~T
2964 if (op
.arg1
== 2 && get_ts_kind(ts
) == TypeStructure::Kind::T_reifiedtype
) {
2965 BytecodeVec instrs
{ bc::PopC
{} };
2966 auto const tv_true
= gen_constant(make_tv
<KindOfBoolean
>(true));
2967 if (ts
->exists(s_like
.get())) {
2968 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_like
.get())));
2969 instrs
.push_back(tv_true
);
2970 instrs
.push_back(bc::AddElemC
{});
2972 if (ts
->exists(s_nullable
.get())) {
2973 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_nullable
.get())));
2974 instrs
.push_back(tv_true
);
2975 instrs
.push_back(bc::AddElemC
{});
2977 if (ts
->exists(s_soft
.get())) {
2978 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_soft
.get())));
2979 instrs
.push_back(tv_true
);
2980 instrs
.push_back(bc::AddElemC
{});
2982 return reduce(env
, std::move(instrs
));
2986 for (int i
= 0; i
< op
.arg1
; ++i
) {
2987 auto const t
= popC(env
);
2988 valid
&= t
.couldBe(BDict
);
2990 if (!valid
) return unreachable(env
);
2995 void in(ISS
& env
, const bc::RecordReifiedGeneric
& op
) {
2996 // TODO(T31677864): implement real optimizations
2997 auto const t
= popC(env
);
2998 if (!t
.couldBe(BVec
)) return unreachable(env
);
2999 if (t
.subtypeOf(BVec
)) nothrow(env
);
3003 void in(ISS
& env
, const bc::CheckReifiedGenericMismatch
& op
) {
3004 auto const location
= topStkEquiv(env
, 0);
3007 if (location
== NoLocalId
) return;
3008 auto const ok
= refineLocation(
3011 return get_type_of_reified_list(env
.ctx
.cls
->userAttributes
);
3014 if (!ok
) unreachable(env
);
3020 * If the value on the top of the stack is known to be equivalent to the local
3021 * its being moved/copied to, return std::nullopt without modifying any
3022 * state. Otherwise, pop the stack value, perform the set, and return a pair
3023 * giving the value's type, and any other local its known to be equivalent to.
3025 template <typename Set
>
3026 Optional
<std::pair
<Type
, LocalId
>> moveToLocImpl(ISS
& env
,
3028 if (auto const prev
= last_op(env
, 1)) {
3029 if (prev
->op
== Op::CGetL2
&&
3030 prev
->CGetL2
.nloc1
.id
== op
.loc1
&&
3031 last_op(env
)->op
== Op::Concat
) {
3033 reduce(env
, bc::SetOpL
{ op
.loc1
, SetOpOp::ConcatEqual
});
3034 return std::nullopt
;
3038 auto equivLoc
= topStkEquiv(env
);
3039 // If the local could be a Ref, don't record equality because the stack
3040 // element and the local won't actually have the same type.
3041 if (equivLoc
== StackThisId
&& env
.state
.thisLoc
!= NoLocalId
) {
3042 if (env
.state
.thisLoc
== op
.loc1
||
3043 locsAreEquiv(env
, env
.state
.thisLoc
, op
.loc1
)) {
3044 return std::nullopt
;
3046 equivLoc
= env
.state
.thisLoc
;
3049 if (!is_volatile_local(env
.ctx
.func
, op
.loc1
)) {
3050 if (equivLoc
<= MaxLocalId
) {
3051 if (equivLoc
== op
.loc1
||
3052 locsAreEquiv(env
, equivLoc
, op
.loc1
)) {
3053 // We allow equivalency to ignore Uninit, so we need to check
3055 if (peekLocRaw(env
, op
.loc1
) == topC(env
)) {
3056 return std::nullopt
;
3059 } else if (equivLoc
== NoLocalId
) {
3062 if (!any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3066 equivLoc
= NoLocalId
;
3069 auto val
= popC(env
);
3070 setLoc(env
, op
.loc1
, val
);
3071 if (equivLoc
== StackThisId
) {
3072 assertx(env
.state
.thisLoc
== NoLocalId
);
3073 equivLoc
= env
.state
.thisLoc
= op
.loc1
;
3075 if (equivLoc
== StackDupId
) {
3076 setStkLocal(env
, op
.loc1
);
3077 } else if (equivLoc
!= op
.loc1
&& equivLoc
!= NoLocalId
) {
3078 addLocEquiv(env
, op
.loc1
, equivLoc
);
3080 return { std::make_pair(std::move(val
), equivLoc
) };
3085 void in(ISS
& env
, const bc::PopL
& op
) {
3086 // If the same value is already in the local, do nothing but pop
3087 // it. Otherwise, the set has been done by moveToLocImpl.
3088 if (!moveToLocImpl(env
, op
)) return reduce(env
, bc::PopC
{});
3091 void in(ISS
& env
, const bc::SetL
& op
) {
3092 // If the same value is already in the local, do nothing because SetL keeps
3093 // the value on the stack. If it isn't, we need to push it back onto the stack
3094 // because moveToLocImpl popped it.
3095 if (auto p
= moveToLocImpl(env
, op
)) {
3096 push(env
, std::move(p
->first
), p
->second
);
3102 void in(ISS
& env
, const bc::SetG
&) {
3103 auto t1
= popC(env
);
3105 push(env
, std::move(t1
));
3108 void in(ISS
& env
, const bc::SetS
& op
) {
3109 auto const val
= popC(env
);
3110 auto const tcls
= popC(env
);
3111 auto const tname
= popC(env
);
3113 auto const throws
= [&] {
3115 return push(env
, TBottom
);
3118 if (!tcls
.couldBe(BCls
)) return throws();
3120 auto merge
= env
.index
.merge_static_type(
3122 env
.collect
.publicSPropMutations
,
3129 checkReadonlyOpThrows(ReadonlyOp::Readonly
, op
.subop1
)
3132 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3136 if (merge
.throws
== TriBool::No
&&
3137 tcls
.subtypeOf(BCls
) &&
3138 tname
.subtypeOf(BStr
)) {
3142 push(env
, std::move(merge
.adjusted
));
3145 void in(ISS
& env
, const bc::SetOpL
& op
) {
3146 auto const t1
= popC(env
);
3147 auto const loc
= locAsCell(env
, op
.loc1
);
3149 auto resultTy
= typeSetOp(op
.subop2
, loc
, t1
);
3150 setLoc(env
, op
.loc1
, resultTy
);
3151 push(env
, std::move(resultTy
));
3154 void in(ISS
& env
, const bc::SetOpG
&) {
3155 popC(env
); popC(env
);
3156 push(env
, TInitCell
);
3159 void in(ISS
& env
, const bc::SetOpS
& op
) {
3160 auto const rhs
= popC(env
);
3161 auto const tcls
= popC(env
);
3162 auto const tname
= popC(env
);
3164 auto const throws
= [&] {
3166 return push(env
, TBottom
);
3169 if (!tcls
.couldBe(BCls
)) return throws();
3171 auto const lookup
= env
.index
.lookup_static(
3178 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
3182 auto const newTy
= typeSetOp(op
.subop1
, lookup
.ty
, rhs
);
3183 if (newTy
.subtypeOf(BBottom
)) return throws();
3185 auto merge
= env
.index
.merge_static_type(
3187 env
.collect
.publicSPropMutations
,
3194 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3198 // NB: Unlike IncDecS, SetOpS pushes the post-TypeConstraint
3199 // adjustment value.
3200 push(env
, std::move(merge
.adjusted
));
3203 void in(ISS
& env
, const bc::IncDecL
& op
) {
3204 auto loc
= locAsCell(env
, op
.nloc1
.id
);
3205 auto newT
= typeIncDec(op
.subop2
, loc
);
3207 if (newT
.subtypeOf(BBottom
)) {
3209 return push(env
, TBottom
);
3212 if (!locCouldBeUninit(env
, op
.nloc1
.id
) && loc
.subtypeOf(BNum
)) nothrow(env
);
3214 auto const pre
= isPre(op
.subop2
);
3215 if (!pre
) push(env
, std::move(loc
));
3216 setLoc(env
, op
.nloc1
.id
, newT
);
3217 if (pre
) push(env
, std::move(newT
));
3220 void in(ISS
& env
, const bc::IncDecG
&) { popC(env
); push(env
, TInitCell
); }
3222 void in(ISS
& env
, const bc::IncDecS
& op
) {
3223 auto const tcls
= popC(env
);
3224 auto const tname
= popC(env
);
3225 auto const pre
= isPre(op
.subop1
);
3227 auto const throws
= [&] {
3229 return push(env
, TBottom
);
3232 if (!tcls
.couldBe(BCls
)) return throws();
3234 auto lookup
= env
.index
.lookup_static(
3241 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
3245 auto newTy
= typeIncDec(op
.subop1
, lookup
.ty
);
3246 if (newTy
.subtypeOf(BBottom
)) return throws();
3248 auto const merge
= env
.index
.merge_static_type(
3250 env
.collect
.publicSPropMutations
,
3257 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3261 if (lookup
.found
== TriBool::Yes
&&
3262 lookup
.lateInit
== TriBool::No
&&
3263 !lookup
.classInitMightRaise
&&
3264 merge
.throws
== TriBool::No
&&
3265 tcls
.subtypeOf(BCls
) &&
3266 tname
.subtypeOf(BStr
) &&
3267 lookup
.ty
.subtypeOf(BNum
)) {
3271 // NB: IncDecS pushes the value pre-TypeConstraint modification
3272 push(env
, pre
? std::move(newTy
) : std::move(lookup
.ty
));
3275 void in(ISS
& env
, const bc::UnsetL
& op
) {
3276 if (locRaw(env
, op
.loc1
).subtypeOf(TUninit
)) {
3280 if (auto const last
= last_op(env
)) {
3281 // No point in popping into the local if we're just going to
3282 // immediately unset it.
3283 if (last
->op
== Op::PopL
&&
3284 last
->PopL
.loc1
== op
.loc1
) {
3287 setLocRaw(env
, op
.loc1
, TCell
);
3288 return reduce(env
, bc::PopC
{}, bc::UnsetL
{ op
.loc1
});
3292 if (any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3297 setLocRaw(env
, op
.loc1
, TUninit
);
3300 void in(ISS
& env
, const bc::UnsetG
& /*op*/) {
3301 auto const t1
= popC(env
);
3302 if (!t1
.couldBe(BObj
| BRes
)) nothrow(env
);
3305 bool fcallCanSkipRepack(ISS
& env
, const FCallArgs
& fca
, const res::Func
& func
) {
3306 // Can't skip repack if potentially calling a function with too many args.
3307 if (fca
.numArgs() > func
.minNonVariadicParams()) return false;
3308 // Repack not needed if not unpacking and not having too many arguments.
3309 if (!fca
.hasUnpack()) return true;
3310 // Can't skip repack if unpack args are in a wrong position.
3311 if (fca
.numArgs() != func
.maxNonVariadicParams()) return false;
3313 // Repack not needed if unpack args have the correct type.
3314 auto const unpackArgs
= topC(env
, fca
.hasGenerics() ? 1 : 0);
3315 return unpackArgs
.subtypeOf(BVec
);
3318 bool coeffectRulesMatch(ISS
& env
,
3319 const FCallArgs
& fca
,
3320 const res::Func
& func
,
3321 uint32_t numExtraInputs
,
3322 const CoeffectRule
& caller
,
3323 const CoeffectRule
& callee
) {
3324 if (caller
.m_type
!= callee
.m_type
) return false;
3325 switch (caller
.m_type
) {
3326 case CoeffectRule::Type::CCThis
: {
3327 if (caller
.m_name
!= callee
.m_name
||
3328 caller
.m_types
!= callee
.m_types
) {
3331 if (!thisAvailable(env
)) return false;
3332 auto const loc
= topStkEquiv(env
, fca
.numInputs() + numExtraInputs
+ 1);
3333 return loc
== StackThisId
|| (loc
<= MaxLocalId
&& locIsThis(env
, loc
));
3335 case CoeffectRule::Type::CCParam
:
3336 if (caller
.m_name
!= callee
.m_name
) return false;
3338 case CoeffectRule::Type::FunParam
: {
3339 if (fca
.hasUnpack()) return false;
3340 if (fca
.numArgs() <= callee
.m_index
) return false;
3341 auto const l1
= caller
.m_index
;
3342 auto const l2
= topStkEquiv(env
, fca
.numInputs() - callee
.m_index
- 1);
3344 (l1
<= MaxLocalId
&&
3346 locsAreEquiv(env
, l1
, l2
));
3348 case CoeffectRule::Type::CCReified
:
3349 // TODO: optimize these
3351 case CoeffectRule::Type::ClosureParentScope
:
3352 case CoeffectRule::Type::GeneratorThis
:
3353 case CoeffectRule::Type::Caller
:
3354 case CoeffectRule::Type::Invalid
:
3360 bool fcallCanSkipCoeffectsCheck(ISS
& env
,
3361 const FCallArgs
& fca
,
3362 const res::Func
& func
,
3363 uint32_t numExtraInputs
) {
3364 auto const requiredCoeffectsOpt
= func
.requiredCoeffects();
3365 if (!requiredCoeffectsOpt
) return false;
3366 auto const required
= *requiredCoeffectsOpt
;
3367 auto const provided
=
3368 RuntimeCoeffects::fromValue(env
.ctx
.func
->requiredCoeffects
.value() |
3369 env
.ctx
.func
->coeffectEscapes
.value());
3370 if (!provided
.canCall(required
)) return false;
3371 auto const calleeRules
= func
.coeffectRules();
3372 // If we couldn't tell whether callee has rules or not, punt.
3373 if (!calleeRules
) return false;
3374 if (calleeRules
->empty()) return true;
3375 if (calleeRules
->size() == 1 && (*calleeRules
)[0].isCaller()) return true;
3376 auto const callerRules
= env
.ctx
.func
->coeffectRules
;
3377 return std::is_permutation(callerRules
.begin(), callerRules
.end(),
3378 calleeRules
->begin(), calleeRules
->end(),
3379 [&] (const CoeffectRule
& a
,
3380 const CoeffectRule
& b
) {
3381 return coeffectRulesMatch(env
, fca
, func
,
3387 template<typename FCallWithFCA
>
3388 bool fcallOptimizeChecks(
3390 const FCallArgs
& fca
,
3391 const res::Func
& func
,
3392 FCallWithFCA fcallWithFCA
,
3393 Optional
<uint32_t> inOutNum
,
3395 uint32_t numExtraInputs
3397 // Don't optimize away in-out checks if we might use the null safe
3398 // operator. If we do so, we need the in-out bits to shuffle the
3400 if (!maybeNullsafe
&& fca
.enforceInOut()) {
3401 if (inOutNum
== fca
.numRets() - 1) {
3403 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3404 auto const kind
= env
.index
.lookup_param_prep(env
.ctx
, func
, i
);
3405 if (kind
.inOut
== TriBool::Maybe
) {
3410 if (yesOrNo(fca
.isInOut(i
)) != kind
.inOut
) {
3411 // The function/method may not exist, in which case we should raise a
3412 // different error. Just defer the checks to the runtime.
3413 if (!func
.exactFunc()) return false;
3416 auto const exCls
= makeStaticString("InvalidArgumentException");
3417 auto const err
= makeStaticString(formatParamInOutMismatch(
3418 func
.name()->data(), i
, !fca
.isInOut(i
)));
3422 bc::NewObjD
{ exCls
},
3426 bc::FCallCtor
{ FCallArgs(1), staticEmptyString() },
3436 // Optimize away the runtime inout-ness check.
3437 reduce(env
, fcallWithFCA(fca
.withoutInOut()));
3443 if (fca
.enforceReadonly()) {
3445 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3446 if (!fca
.isReadonly(i
)) continue;
3447 auto const kind
= env
.index
.lookup_param_prep(env
.ctx
, func
, i
);
3448 if (kind
.readonly
== TriBool::Maybe
) {
3453 if (kind
.readonly
!= TriBool::Yes
) {
3454 // The function/method may not exist, in which case we should raise a
3455 // different error. Just defer the checks to the runtime.
3456 if (!func
.exactFunc()) return false;
3463 // Optimize away the runtime readonly-ness check.
3464 reduce(env
, fcallWithFCA(fca
.withoutReadonly()));
3469 if (fca
.enforceMutableReturn()) {
3470 if (env
.index
.lookup_return_readonly(env
.ctx
, func
) == TriBool::No
) {
3471 reduce(env
, fcallWithFCA(fca
.withoutEnforceMutableReturn()));
3476 if (fca
.enforceReadonlyThis()) {
3477 if (env
.index
.lookup_readonly_this(env
.ctx
, func
) == TriBool::Yes
) {
3478 reduce(env
, fcallWithFCA(fca
.withoutEnforceReadonlyThis()));
3483 // Infer whether the callee supports async eager return.
3484 if (fca
.asyncEagerTarget() != NoBlockId
) {
3485 auto const status
= env
.index
.supports_async_eager_return(func
);
3486 if (status
&& !*status
) {
3487 reduce(env
, fcallWithFCA(fca
.withoutAsyncEagerTarget()));
3492 if (!fca
.skipRepack() && fcallCanSkipRepack(env
, fca
, func
)) {
3493 reduce(env
, fcallWithFCA(fca
.withoutRepack()));
3497 if (!fca
.skipCoeffectsCheck() &&
3498 fcallCanSkipCoeffectsCheck(env
, fca
, func
, numExtraInputs
)) {
3499 reduce(env
, fcallWithFCA(fca
.withoutCoeffectsCheck()));
3508 const FCallArgs
& fca
,
3509 const res::Func
& func
,
3512 uint32_t numExtraInputs
3514 auto const foldableFunc
= func
.exactFunc();
3515 if (!foldableFunc
) return false;
3516 if (!shouldAttemptToFold(env
, foldableFunc
, fca
, context
, maybeDynamic
)) {
3520 assertx(!fca
.hasUnpack() && !fca
.hasGenerics() && fca
.numRets() == 1);
3521 assertx(options
.ConstantFoldBuiltins
);
3523 auto const finish
= [&] (Type ty
) {
3524 auto const v
= tv(ty
);
3525 if (!v
) return false;
3527 for (uint32_t i
= 0; i
< numExtraInputs
; ++i
) repl
.push_back(bc::PopC
{});
3528 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) repl
.push_back(bc::PopC
{});
3529 repl
.push_back(bc::PopU
{});
3530 if (topT(env
, fca
.numArgs() + 1 + numExtraInputs
).subtypeOf(TInitCell
)) {
3531 repl
.push_back(bc::PopC
{});
3533 assertx(topT(env
, fca
.numArgs() + 1 + numExtraInputs
).subtypeOf(TUninit
));
3534 repl
.push_back(bc::PopU
{});
3536 repl
.push_back(gen_constant(*v
));
3537 reduce(env
, std::move(repl
));
3541 if (foldableFunc
->attrs
& AttrBuiltin
&&
3542 foldableFunc
->attrs
& AttrIsFoldable
) {
3543 auto ret
= const_fold(env
, fca
.numArgs(), numExtraInputs
, *foldableFunc
,
3545 if (!ret
) return false;
3546 return finish(std::move(*ret
));
3549 CompactVector
<Type
> args(fca
.numArgs());
3550 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3551 for (auto i
= uint32_t{0}; i
< fca
.numArgs(); ++i
) {
3552 auto const& arg
= topT(env
, firstArgPos
- i
);
3553 auto const isScalar
= is_scalar(arg
);
3555 (env
.index
.func_depends_on_arg(foldableFunc
, i
) ||
3556 !arg
.subtypeOf(BInitCell
))) {
3559 args
[i
] = isScalar
? scalarize(arg
) : arg
;
3562 auto calleeCtx
= CallContext
{
3567 if (env
.collect
.unfoldableFuncs
.count(calleeCtx
)) return false;
3569 if (finish(env
.index
.lookup_foldable_return_type(env
.ctx
, calleeCtx
))) {
3572 env
.collect
.unfoldableFuncs
.emplace(std::move(calleeCtx
));
3576 Type
typeFromWH(Type t
) {
3577 if (!t
.couldBe(BObj
)) {
3578 // Exceptions will be thrown if a non-object is awaited.
3582 // Throw away non-obj component.
3585 // If we aren't even sure this is a wait handle, there's nothing we can
3587 if (!is_specialized_wait_handle(t
)) {
3591 return wait_handle_inner(t
);
3594 void pushCallReturnType(ISS
& env
,
3596 const FCallArgs
& fca
,
3598 std::vector
<Type
> inOuts
) {
3599 auto const numRets
= fca
.numRets();
3601 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3602 assertx(IMPLIES(nullsafe
, inOuts
.size() == numRets
- 1));
3604 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3605 if (!ty
.couldBe(BVecN
)) {
3606 // Function cannot have an in-out args match, so call will
3609 for (int32_t i
= 0; i
< numRets
; i
++) push(env
, TBottom
);
3610 return unreachable(env
);
3612 // We'll only hit the nullsafe null case, so the outputs are the
3614 for (auto& t
: inOuts
) push(env
, std::move(t
));
3615 push(env
, TInitNull
);
3619 // If we might use the nullsafe operator, we need to union in the
3620 // null case (which means the inout args are unchanged).
3621 if (is_specialized_array_like(ty
)) {
3622 for (int32_t i
= 1; i
< numRets
; i
++) {
3623 auto elem
= array_like_elem(ty
, ival(i
)).first
;
3624 if (nullsafe
) elem
|= inOuts
[i
-1];
3625 push(env
, std::move(elem
));
3630 ? opt(array_like_elem(ty
, ival(0)).first
)
3631 : array_like_elem(ty
, ival(0)).first
3634 for (int32_t i
= 0; i
< numRets
; ++i
) push(env
, TInitCell
);
3638 if (fca
.asyncEagerTarget() != NoBlockId
) {
3639 assertx(!ty
.is(BBottom
));
3640 push(env
, typeFromWH(ty
));
3641 assertx(!topC(env
).subtypeOf(BBottom
));
3642 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3645 if (nullsafe
) ty
= opt(std::move(ty
));
3646 if (ty
.is(BBottom
)) {
3647 // The callee function never returns. It might throw, or loop
3650 return unreachable(env
);
3652 return push(env
, std::move(ty
));
3655 const StaticString s_defined
{ "defined" };
3656 const StaticString s_function_exists
{ "function_exists" };
3658 template<typename FCallWithFCA
>
3659 void fcallKnownImpl(
3661 const FCallArgs
& fca
,
3662 const res::Func
& func
,
3665 uint32_t numExtraInputs
,
3666 FCallWithFCA fcallWithFCA
,
3667 Optional
<uint32_t> inOutNum
3669 auto const numArgs
= fca
.numArgs();
3670 auto returnType
= [&] {
3671 CompactVector
<Type
> args(numArgs
);
3672 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3673 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3674 args
[i
] = topCV(env
, firstArgPos
- i
);
3677 return fca
.hasUnpack()
3678 ? env
.index
.lookup_return_type(env
.ctx
, &env
.collect
.methods
, func
)
3679 : env
.index
.lookup_return_type(
3680 env
.ctx
, &env
.collect
.methods
, args
, context
, func
3684 // If there's a caller/callee inout mismatch, then the call will
3686 if (fca
.enforceInOut()) {
3687 if (inOutNum
&& (*inOutNum
+ 1 != fca
.numRets())) {
3688 returnType
= TBottom
;
3692 if (fca
.asyncEagerTarget() != NoBlockId
&& typeFromWH(returnType
) == TBottom
) {
3693 // Kill the async eager target if the function never returns.
3694 reduce(env
, fcallWithFCA(std::move(fca
.withoutAsyncEagerTarget())));
3698 if (func
.name()->isame(s_function_exists
.get()) &&
3699 (numArgs
== 1 || numArgs
== 2) &&
3700 !fca
.hasUnpack() && !fca
.hasGenerics()) {
3701 handle_function_exists(env
, topT(env
, numExtraInputs
+ numArgs
- 1));
3704 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
3705 if (fca
.hasGenerics()) popC(env
);
3706 if (fca
.hasUnpack()) popC(env
);
3707 std::vector
<Type
> inOuts
;
3708 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3709 if (nullsafe
&& fca
.isInOut(numArgs
- i
- 1)) {
3710 inOuts
.emplace_back(popCV(env
));
3717 pushCallReturnType(env
, std::move(returnType
),
3718 fca
, nullsafe
, std::move(inOuts
));
3721 void fcallUnknownImpl(ISS
& env
,
3722 const FCallArgs
& fca
,
3723 const Type
& retTy
= TInitCell
) {
3724 if (fca
.hasGenerics()) popC(env
);
3725 if (fca
.hasUnpack()) popC(env
);
3726 auto const numArgs
= fca
.numArgs();
3727 auto const numRets
= fca
.numRets();
3728 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) popCV(env
);
3731 if (fca
.asyncEagerTarget() != NoBlockId
) {
3732 assertx(numRets
== 1);
3733 assertx(!retTy
.is(BBottom
));
3735 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3738 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3739 for (auto i
= uint32_t{0}; i
< numRets
; ++i
) push(env
, retTy
);
3742 void in(ISS
& env
, const bc::FCallFuncD
& op
) {
3743 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, op
.str2
);
3745 if (op
.fca
.hasGenerics()) {
3746 auto const tsList
= topC(env
);
3747 if (!tsList
.couldBe(BVec
)) {
3748 return unreachable(env
);
3751 if (!rfunc
.couldHaveReifiedGenerics()) {
3755 bc::FCallFuncD
{ op
.fca
.withoutGenerics(), op
.str2
}
3760 auto const updateBC
= [&] (FCallArgs fca
) {
3761 return bc::FCallFuncD
{ std::move(fca
), op
.str2
};
3764 auto const numInOut
= op
.fca
.enforceInOut()
3765 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
3768 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false, 0) ||
3769 fcallTryFold(env
, op
.fca
, rfunc
, TBottom
, false, 0)) {
3773 if (auto const func
= rfunc
.exactFunc()) {
3774 if (optimize_builtin(env
, func
, op
.fca
)) return;
3777 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 0, updateBC
, numInOut
);
3782 void fcallFuncUnknown(ISS
& env
, const bc::FCallFunc
& op
) {
3784 fcallUnknownImpl(env
, op
.fca
);
3787 void fcallFuncClsMeth(ISS
& env
, const bc::FCallFunc
& op
) {
3788 assertx(topC(env
).subtypeOf(BClsMeth
));
3790 // TODO: optimize me
3791 fcallFuncUnknown(env
, op
);
3794 void fcallFuncFunc(ISS
& env
, const bc::FCallFunc
& op
) {
3795 assertx(topC(env
).subtypeOf(BFunc
));
3797 // TODO: optimize me
3798 fcallFuncUnknown(env
, op
);
3801 void fcallFuncObj(ISS
& env
, const bc::FCallFunc
& op
) {
3802 assertx(topC(env
).subtypeOf(BObj
));
3804 // TODO: optimize me
3805 fcallFuncUnknown(env
, op
);
3808 void fcallFuncStr(ISS
& env
, const bc::FCallFunc
& op
) {
3809 assertx(topC(env
).subtypeOf(BStr
));
3810 auto funcName
= getNameFromType(topC(env
));
3811 if (!funcName
) return fcallFuncUnknown(env
, op
);
3813 funcName
= normalizeNS(funcName
);
3814 if (!isNSNormalized(funcName
) || !notClassMethodPair(funcName
)) {
3815 return fcallFuncUnknown(env
, op
);
3818 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, funcName
);
3819 if (!rfunc
.mightCareAboutDynCalls()) {
3820 return reduce(env
, bc::PopC
{}, bc::FCallFuncD
{ op
.fca
, funcName
});
3823 auto const updateBC
= [&] (FCallArgs fca
) {
3824 return bc::FCallFunc
{ std::move(fca
) };
3827 auto const numInOut
= op
.fca
.enforceInOut()
3828 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
3831 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false, 0)) {
3834 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 1, updateBC
, numInOut
);
3839 void in(ISS
& env
, const bc::FCallFunc
& op
) {
3840 auto const callable
= topC(env
);
3841 if (callable
.subtypeOf(BFunc
)) return fcallFuncFunc(env
, op
);
3842 if (callable
.subtypeOf(BClsMeth
)) return fcallFuncClsMeth(env
, op
);
3843 if (callable
.subtypeOf(BObj
)) return fcallFuncObj(env
, op
);
3844 if (callable
.subtypeOf(BStr
)) return fcallFuncStr(env
, op
);
3845 fcallFuncUnknown(env
, op
);
3848 void in(ISS
& env
, const bc::ResolveFunc
& op
) {
3852 void in(ISS
& env
, const bc::ResolveMethCaller
& op
) {
3857 void in(ISS
& env
, const bc::ResolveRFunc
& op
) {
3859 push(env
, union_of(TFunc
, TRFunc
));
3864 Type
ctxCls(ISS
& env
) {
3865 auto const s
= selfCls(env
);
3866 return setctx(s
? *s
: TCls
);
3869 Type
specialClsRefToCls(ISS
& env
, SpecialClsRef ref
) {
3870 if (!env
.ctx
.cls
) return TCls
;
3871 auto const op
= [&]()-> Optional
<Type
> {
3873 case SpecialClsRef::Static
: return ctxCls(env
);
3874 case SpecialClsRef::Self
: return selfClsExact(env
);
3875 case SpecialClsRef::Parent
: return parentClsExact(env
);
3877 always_assert(false);
3879 return op
? *op
: TCls
;
3882 template<bool reifiedVersion
= false>
3883 void resolveClsMethodSImpl(ISS
& env
, SpecialClsRef ref
, LSString meth_name
) {
3884 auto const clsTy
= specialClsRefToCls(env
, ref
);
3885 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, meth_name
);
3886 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).type
== DCls::Exact
&&
3887 !rfunc
.couldHaveReifiedGenerics()) {
3888 auto const clsName
= dcls_of(clsTy
).cls
.name();
3889 return reduce(env
, bc::ResolveClsMethodD
{ clsName
, meth_name
});
3891 if (reifiedVersion
) popC(env
);
3892 if (!reifiedVersion
|| !rfunc
.couldHaveReifiedGenerics()) {
3893 push(env
, TClsMeth
);
3895 push(env
, union_of(TClsMeth
, TRClsMeth
));
3901 void in(ISS
& env
, const bc::ResolveClsMethod
& op
) {
3903 push(env
, TClsMeth
);
3906 void in(ISS
& env
, const bc::ResolveClsMethodD
& op
) {
3907 push(env
, TClsMeth
);
3910 void in(ISS
& env
, const bc::ResolveClsMethodS
& op
) {
3911 resolveClsMethodSImpl
<false>(env
, op
.subop1
, op
.str2
);
3914 void in(ISS
& env
, const bc::ResolveRClsMethod
&) {
3917 push(env
, union_of(TClsMeth
, TRClsMeth
));
3920 void in(ISS
& env
, const bc::ResolveRClsMethodD
&) {
3922 push(env
, union_of(TClsMeth
, TRClsMeth
));
3925 void in(ISS
& env
, const bc::ResolveRClsMethodS
& op
) {
3926 resolveClsMethodSImpl
<true>(env
, op
.subop1
, op
.str2
);
3929 void in(ISS
& env
, const bc::ResolveClass
& op
) {
3931 auto cls
= env
.index
.resolve_class(env
.ctx
, op
.str1
);
3932 if (cls
&& cls
->resolved()) {
3933 push(env
, clsExact(*cls
));
3935 // If the class is not resolved,
3936 // it might not be unique or it might not be a valid classname.
3937 push(env
, union_of(TArrKey
, TCls
, TLazyCls
));
3941 void in(ISS
& env
, const bc::LazyClass
& op
) {
3943 push(env
, lazyclsval(op
.str1
));
3949 s_DynamicContextOverrideUnsafe("__SystemLib\\DynamicContextOverrideUnsafe");
3951 bool isBadContext(const FCallArgs
& fca
) {
3952 return fca
.context() &&
3953 fca
.context()->isame(s_DynamicContextOverrideUnsafe
.get());
3956 Context
getCallContext(const ISS
& env
, const FCallArgs
& fca
) {
3957 if (auto const name
= fca
.context()) {
3958 auto const rcls
= env
.index
.resolve_class(env
.ctx
, name
);
3959 if (rcls
&& rcls
->cls()) {
3960 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, rcls
->cls() };
3962 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, nullptr };
3967 void fcallObjMethodNullsafeNoFold(ISS
& env
,
3968 const FCallArgs
& fca
,
3970 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3971 if (extraInput
) popC(env
);
3972 if (fca
.hasGenerics()) popC(env
);
3973 if (fca
.hasUnpack()) popC(env
);
3974 auto const numArgs
= fca
.numArgs();
3975 auto const numRets
= fca
.numRets();
3976 std::vector
<Type
> inOuts
;
3977 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3978 if (fca
.enforceInOut() && fca
.isInOut(numArgs
- i
- 1)) {
3979 inOuts
.emplace_back(popCV(env
));
3986 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3987 assertx(inOuts
.size() == numRets
- 1);
3988 for (auto& t
: inOuts
) push(env
, std::move(t
));
3989 push(env
, TInitNull
);
3992 void fcallObjMethodNullsafe(ISS
& env
, const FCallArgs
& fca
, bool extraInput
) {
3993 // Don't fold if there's inout arguments. We could, in principal,
3994 // fold away the inout case like we do below, but we don't have the
3995 // bytecodes necessary to shuffle the stack.
3996 if (fca
.enforceInOut()) {
3997 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) {
3998 if (fca
.isInOut(i
)) {
3999 return fcallObjMethodNullsafeNoFold(env
, fca
, extraInput
);
4005 if (extraInput
) repl
.push_back(bc::PopC
{});
4006 if (fca
.hasGenerics()) repl
.push_back(bc::PopC
{});
4007 if (fca
.hasUnpack()) repl
.push_back(bc::PopC
{});
4009 auto const numArgs
= fca
.numArgs();
4010 for (uint32_t i
= 0; i
< numArgs
; ++i
) {
4011 assertx(topC(env
, repl
.size()).subtypeOf(BInitCell
));
4012 repl
.push_back(bc::PopC
{});
4014 repl
.push_back(bc::PopU
{});
4015 repl
.push_back(bc::PopC
{});
4016 assertx(fca
.numRets() == 1);
4017 repl
.push_back(bc::Null
{});
4019 reduce(env
, std::move(repl
));
4022 template <typename Op
, class UpdateBC
>
4023 void fcallObjMethodImpl(ISS
& env
, const Op
& op
, SString methName
, bool dynamic
,
4024 bool extraInput
, UpdateBC updateBC
) {
4025 auto const nullThrows
= op
.subop3
== ObjMethodOp::NullThrows
;
4026 auto const inputPos
= op
.fca
.numInputs() + (extraInput
? 2 : 1);
4027 auto const input
= topC(env
, inputPos
);
4028 auto const location
= topStkEquiv(env
, inputPos
);
4029 auto const mayCallMethod
= input
.couldBe(BObj
);
4030 auto const mayUseNullsafe
= !nullThrows
&& input
.couldBe(BNull
);
4031 auto const mayThrowNonObj
= !input
.subtypeOf(nullThrows
? BObj
: BOptObj
);
4033 auto const refineLoc
= [&] {
4034 if (location
== NoLocalId
) return;
4035 if (!refineLocation(env
, location
, [&] (Type t
) {
4036 if (nullThrows
) return intersection_of(t
, TObj
);
4037 if (!t
.couldBe(BUninit
)) return intersection_of(t
, TOptObj
);
4038 if (!t
.couldBe(BObj
)) return intersection_of(t
, TNull
);
4045 auto const throws
= [&] {
4046 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4047 // Kill the async eager target if the function never returns.
4048 return reduce(env
, updateBC(op
.fca
.withoutAsyncEagerTarget()));
4050 if (extraInput
) popC(env
);
4051 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4055 if (!mayCallMethod
&& !mayUseNullsafe
) {
4056 // This FCallObjMethodD may only throw
4060 if (!mayCallMethod
&& !mayThrowNonObj
) {
4061 // Null input, this may only return null, so do that.
4062 return fcallObjMethodNullsafe(env
, op
.fca
, extraInput
);
4065 if (!mayCallMethod
) {
4066 // May only return null, but can't fold as we may still throw.
4067 assertx(mayUseNullsafe
&& mayThrowNonObj
);
4068 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4069 return reduce(env
, updateBC(op
.fca
.withoutAsyncEagerTarget()));
4071 return fcallObjMethodNullsafeNoFold(env
, op
.fca
, extraInput
);
4074 if (isBadContext(op
.fca
)) return throws();
4076 auto const ctx
= getCallContext(env
, op
.fca
);
4077 auto const ctxTy
= input
.couldBe(BObj
)
4078 ? intersection_of(input
, TObj
)
4080 auto const clsTy
= objcls(ctxTy
);
4081 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4083 auto const numInOut
= op
.fca
.enforceInOut()
4084 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4087 auto const canFold
= !mayUseNullsafe
&& !mayThrowNonObj
;
4088 auto const numExtraInputs
= extraInput
? 1 : 0;
4089 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
,
4090 numInOut
, mayUseNullsafe
, numExtraInputs
) ||
4091 (canFold
&& fcallTryFold(env
, op
.fca
, rfunc
, ctxTy
, dynamic
,
4096 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4097 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4100 fcallKnownImpl(env
, op
.fca
, rfunc
, ctxTy
, mayUseNullsafe
, extraInput
? 1 : 0,
4101 updateBC
, numInOut
);
4107 void in(ISS
& env
, const bc::FCallObjMethodD
& op
) {
4108 if (op
.fca
.hasGenerics()) {
4109 auto const tsList
= topC(env
);
4110 if (!tsList
.couldBe(BVec
)) {
4111 return unreachable(env
);
4114 auto const input
= topC(env
, op
.fca
.numInputs() + 1);
4115 auto const clsTy
= input
.couldBe(BObj
)
4116 ? objcls(intersection_of(input
, TObj
))
4118 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4119 if (!rfunc
.couldHaveReifiedGenerics()) {
4123 bc::FCallObjMethodD
{
4124 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4129 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4130 if (!clsHint
) clsHint
= op
.str2
;
4131 return bc::FCallObjMethodD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4133 fcallObjMethodImpl(env
, op
, op
.str4
, false, false, updateBC
);
4136 void in(ISS
& env
, const bc::FCallObjMethod
& op
) {
4137 auto const methName
= getNameFromType(topC(env
));
4140 fcallUnknownImpl(env
, op
.fca
);
4144 auto const input
= topC(env
, op
.fca
.numInputs() + 2);
4145 auto const clsTy
= input
.couldBe(BObj
)
4146 ? objcls(intersection_of(input
, TObj
))
4148 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4149 if (!rfunc
.mightCareAboutDynCalls()) {
4153 bc::FCallObjMethodD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4157 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4158 if (!clsHint
) clsHint
= op
.str2
;
4159 return bc::FCallObjMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4161 fcallObjMethodImpl(env
, op
, methName
, true, true, updateBC
);
4166 template <typename Op
, class UpdateBC
>
4167 void fcallClsMethodImpl(ISS
& env
, const Op
& op
, Type clsTy
, SString methName
,
4168 bool dynamic
, uint32_t numExtraInputs
,
4169 UpdateBC updateBC
) {
4170 if (isBadContext(op
.fca
)) {
4171 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
4172 fcallUnknownImpl(env
, op
.fca
);
4177 auto const ctx
= getCallContext(env
, op
.fca
);
4178 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4180 auto const numInOut
= op
.fca
.enforceInOut()
4181 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4184 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false,
4186 fcallTryFold(env
, op
.fca
, rfunc
, clsTy
, dynamic
, numExtraInputs
)) {
4190 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4191 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4194 fcallKnownImpl(env
, op
.fca
, rfunc
, clsTy
, false /* nullsafe */,
4195 numExtraInputs
, updateBC
, numInOut
);
4200 void in(ISS
& env
, const bc::FCallClsMethodD
& op
) {
4201 auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str3
);
4202 auto const clsTy
= rcls
? clsExact(*rcls
) : TCls
;
4203 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4205 if (op
.fca
.hasGenerics() && !rfunc
.couldHaveReifiedGenerics()) {
4209 bc::FCallClsMethodD
{
4210 op
.fca
.withoutGenerics(), op
.str2
, op
.str3
, op
.str4
}
4214 if (auto const func
= rfunc
.exactFunc()) {
4215 assertx(func
->cls
!= nullptr);
4216 if (func
->cls
->name
->same(op
.str3
) &&
4217 optimize_builtin(env
, func
, op
.fca
)) {
4218 // When we use FCallBuiltin to call a static method, the litstr method
4219 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4221 // As a result, we can only do this optimization if the name of the
4222 // builtin function's class matches this op's class name immediate.
4227 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4228 if (!clsHint
) clsHint
= op
.str2
;
4229 return bc::FCallClsMethodD
{ std::move(fca
), clsHint
, op
.str3
, op
.str4
};
4231 fcallClsMethodImpl(env
, op
, clsTy
, op
.str4
, false, 0, updateBC
);
4234 void in(ISS
& env
, const bc::FCallClsMethod
& op
) {
4235 auto const methName
= getNameFromType(topC(env
, 1));
4239 fcallUnknownImpl(env
, op
.fca
);
4243 auto const clsTy
= topC(env
);
4244 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4245 auto const skipLogAsDynamicCall
=
4246 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls
&&
4247 op
.subop3
== IsLogAsDynamicCallOp::DontLogAsDynamicCall
;
4248 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).type
== DCls::Exact
&&
4249 (!rfunc
.mightCareAboutDynCalls() || skipLogAsDynamicCall
)) {
4250 auto const clsName
= dcls_of(clsTy
).cls
.name();
4255 bc::FCallClsMethodD
{ op
.fca
, op
.str2
, clsName
, methName
}
4259 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4260 if (!clsHint
) clsHint
= op
.str2
;
4261 return bc::FCallClsMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4263 fcallClsMethodImpl(env
, op
, clsTy
, methName
, true, 2, updateBC
);
4268 template <typename Op
, class UpdateBC
>
4269 void fcallClsMethodSImpl(ISS
& env
, const Op
& op
, SString methName
, bool dynamic
,
4270 bool extraInput
, UpdateBC updateBC
) {
4271 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4272 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).type
== DCls::Exact
&&
4273 !dynamic
&& op
.subop3
== SpecialClsRef::Static
) {
4274 auto const clsName
= dcls_of(clsTy
).cls
.name();
4275 reduce(env
, bc::FCallClsMethodD
{ op
.fca
, op
.str2
, clsName
, methName
});
4279 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4281 auto const numInOut
= op
.fca
.enforceInOut()
4282 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4285 auto const numExtraInputs
= extraInput
? 1 : 0;
4286 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false,
4288 fcallTryFold(env
, op
.fca
, rfunc
, ctxCls(env
), dynamic
,
4293 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4294 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4297 fcallKnownImpl(env
, op
.fca
, rfunc
, ctxCls(env
), false /* nullsafe */,
4298 extraInput
? 1 : 0, updateBC
, numInOut
);
4303 void in(ISS
& env
, const bc::FCallClsMethodSD
& op
) {
4304 if (op
.fca
.hasGenerics()) {
4305 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4306 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4307 if (!rfunc
.couldHaveReifiedGenerics()) {
4311 bc::FCallClsMethodSD
{
4312 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4317 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4318 if (!clsHint
) clsHint
= op
.str2
;
4319 return bc::FCallClsMethodSD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4321 fcallClsMethodSImpl(env
, op
, op
.str4
, false, false, updateBC
);
4324 void in(ISS
& env
, const bc::FCallClsMethodS
& op
) {
4325 auto const methName
= getNameFromType(topC(env
));
4328 fcallUnknownImpl(env
, op
.fca
);
4332 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4333 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4334 if (!rfunc
.mightCareAboutDynCalls() && !rfunc
.couldHaveReifiedGenerics()) {
4338 bc::FCallClsMethodSD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4342 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4343 if (!clsHint
) clsHint
= op
.str2
;
4344 return bc::FCallClsMethodS
{ std::move(fca
), clsHint
, op
.subop3
};
4346 fcallClsMethodSImpl(env
, op
, methName
, true, true, updateBC
);
4351 void newObjDImpl(ISS
& env
, const StringData
* className
, bool rflavor
) {
4352 auto const rcls
= env
.index
.resolve_class(env
.ctx
, className
);
4354 if (rflavor
) popC(env
);
4358 if (rflavor
&& !rcls
->couldHaveReifiedGenerics()) {
4359 return reduce(env
, bc::PopC
{}, bc::NewObjD
{ className
});
4361 auto const isCtx
= !rcls
->couldBeOverriden() && env
.ctx
.cls
&&
4362 rcls
->same(env
.index
.resolve_class(env
.ctx
.cls
));
4363 if (rflavor
) popC(env
);
4364 push(env
, setctx(objExact(*rcls
), isCtx
));
4369 void in(ISS
& env
, const bc::NewObjD
& op
) { newObjDImpl(env
, op
.str1
, false); }
4370 void in(ISS
& env
, const bc::NewObjRD
& op
) { newObjDImpl(env
, op
.str1
, true); }
4372 void in(ISS
& env
, const bc::NewObjS
& op
) {
4373 auto const cls
= specialClsRefToCls(env
, op
.subop1
);
4374 if (!is_specialized_cls(cls
)) {
4379 auto const dcls
= dcls_of(cls
);
4380 auto const exact
= dcls
.type
== DCls::Exact
;
4381 if (exact
&& !dcls
.cls
.couldHaveReifiedGenerics() &&
4382 (!dcls
.cls
.couldBeOverriden() || equivalently_refined(cls
, unctx(cls
)))) {
4383 return reduce(env
, bc::NewObjD
{ dcls
.cls
.name() });
4386 push(env
, toobj(cls
));
4389 void in(ISS
& env
, const bc::NewObj
& op
) {
4390 auto const cls
= topC(env
);
4391 if (!cls
.subtypeOf(BCls
) || !is_specialized_cls(cls
)) {
4397 auto const dcls
= dcls_of(cls
);
4398 auto const exact
= dcls
.type
== DCls::Exact
;
4399 if (exact
&& !dcls
.cls
.mightCareAboutDynConstructs()) {
4403 bc::NewObjD
{ dcls
.cls
.name() }
4408 push(env
, toobj(cls
));
4411 void in(ISS
& env
, const bc::NewObjR
& op
) {
4412 auto const generics
= topC(env
);
4413 auto const cls
= topC(env
, 1);
4415 if (generics
.subtypeOf(BInitNull
)) {
4423 if (!cls
.subtypeOf(BCls
) || !is_specialized_cls(cls
)) {
4430 auto const dcls
= dcls_of(cls
);
4431 auto const exact
= dcls
.type
== DCls::Exact
;
4432 if (exact
&& !dcls
.cls
.couldHaveReifiedGenerics()) {
4442 push(env
, toobj(cls
));
4447 bool objMightHaveConstProps(const Type
& t
) {
4448 assertx(t
.subtypeOf(BObj
));
4449 assertx(is_specialized_obj(t
));
4450 auto const dobj
= dobj_of(t
);
4451 switch (dobj
.type
) {
4453 return dobj
.cls
.couldHaveConstProp();
4455 return dobj
.cls
.derivedCouldHaveConstProp();
4462 void in(ISS
& env
, const bc::FCallCtor
& op
) {
4463 auto const obj
= topC(env
, op
.fca
.numInputs() + 1);
4464 assertx(op
.fca
.numRets() == 1);
4466 if (!is_specialized_obj(obj
)) {
4467 return fcallUnknownImpl(env
, op
.fca
);
4470 if (op
.fca
.lockWhileUnwinding() && !objMightHaveConstProps(obj
)) {
4472 env
, bc::FCallCtor
{ op
.fca
.withoutLockWhileUnwinding(), op
.str2
}
4476 auto const dobj
= dobj_of(obj
);
4477 auto const exact
= dobj
.type
== DObj::Exact
;
4478 auto const rfunc
= env
.index
.resolve_ctor(env
.ctx
, dobj
.cls
, exact
);
4480 return fcallUnknownImpl(env
, op
.fca
);
4483 auto const updateFCA
= [&] (FCallArgs
&& fca
) {
4484 return bc::FCallCtor
{ std::move(fca
), op
.str2
};
4487 auto const numInOut
= op
.fca
.enforceInOut()
4488 ? env
.index
.lookup_num_inout_params(env
.ctx
, *rfunc
)
4491 auto const canFold
= obj
.subtypeOf(BObj
);
4492 if (fcallOptimizeChecks(env
, op
.fca
, *rfunc
, updateFCA
, numInOut
, false, 0) ||
4493 (canFold
&& fcallTryFold(env
, op
.fca
, *rfunc
,
4494 obj
, false /* dynamic */, 0))) {
4498 if (rfunc
->exactFunc() && op
.str2
->empty()) {
4499 // We've found the exact func that will be called, set the hint.
4500 return reduce(env
, bc::FCallCtor
{ op
.fca
, rfunc
->exactFunc()->cls
->name
});
4503 fcallKnownImpl(env
, op
.fca
, *rfunc
, obj
, false /* nullsafe */, 0,
4504 updateFCA
, numInOut
);
4507 void in(ISS
& env
, const bc::LockObj
& op
) {
4508 auto const t
= topC(env
);
4511 return push(env
, t
);
4513 if (!t
.subtypeOf(BObj
)) return bail();
4514 if (!is_specialized_obj(t
) || objMightHaveConstProps(t
)) {
4523 // baseLoc is NoLocalId for non-local iterators.
4524 void iterInitImpl(ISS
& env
, IterArgs ita
, BlockId target
, LocalId baseLoc
) {
4525 auto const local
= baseLoc
!= NoLocalId
;
4526 auto const sourceLoc
= local
? baseLoc
: topStkLocal(env
);
4527 auto const base
= local
? locAsCell(env
, baseLoc
) : topC(env
);
4528 auto ity
= iter_types(base
);
4530 auto const fallthrough
= [&] {
4531 auto const baseCannotBeObject
= !base
.couldBe(BObj
);
4532 setIter(env
, ita
.iterId
, LiveIter
{ ity
, sourceLoc
, NoLocalId
, env
.bid
,
4533 false, baseCannotBeObject
});
4534 // Do this after setting the iterator, in case it clobbers the base local
4536 setLoc(env
, ita
.valId
, std::move(ity
.value
));
4538 setLoc(env
, ita
.keyId
, std::move(ity
.key
));
4539 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4543 assertx(iterIsDead(env
, ita
.iterId
));
4545 if (!ity
.mayThrowOnInit
) {
4546 if (ity
.count
== IterTypes::Count::Empty
&& will_reduce(env
)) {
4550 reduce(env
, bc::PopC
{});
4552 return jmp_setdest(env
, target
);
4557 if (!local
) popC(env
);
4559 switch (ity
.count
) {
4560 case IterTypes::Count::Empty
:
4561 mayReadLocal(env
, ita
.valId
);
4562 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4563 jmp_setdest(env
, target
);
4565 case IterTypes::Count::Single
:
4566 case IterTypes::Count::NonEmpty
:
4568 return jmp_nevertaken(env
);
4569 case IterTypes::Count::ZeroOrOne
:
4570 case IterTypes::Count::Any
:
4571 // Take the branch before setting locals if the iter is already
4572 // empty, but after popping. Similar for the other IterInits
4574 env
.propagate(target
, &env
.state
);
4578 always_assert(false);
4581 // baseLoc is NoLocalId for non-local iterators.
4582 void iterNextImpl(ISS
& env
, IterArgs ita
, BlockId target
, LocalId baseLoc
) {
4583 auto const curVal
= peekLocRaw(env
, ita
.valId
);
4584 auto const curKey
= ita
.hasKey() ? peekLocRaw(env
, ita
.keyId
) : TBottom
;
4586 auto noThrow
= false;
4587 auto const noTaken
= match
<bool>(
4588 env
.state
.iters
[ita
.iterId
],
4590 always_assert(false && "IterNext on dead iter");
4593 [&] (const LiveIter
& ti
) {
4594 if (!ti
.types
.mayThrowOnNext
) noThrow
= true;
4595 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4596 switch (ti
.types
.count
) {
4597 case IterTypes::Count::Single
:
4598 case IterTypes::Count::ZeroOrOne
:
4600 case IterTypes::Count::NonEmpty
:
4601 case IterTypes::Count::Any
:
4602 setLoc(env
, ita
.valId
, ti
.types
.value
);
4604 setLoc(env
, ita
.keyId
, ti
.types
.key
);
4605 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4608 case IterTypes::Count::Empty
:
4609 always_assert(false);
4615 if (noTaken
&& noThrow
&& will_reduce(env
)) {
4616 auto const iterId
= safe_cast
<IterId
>(ita
.iterId
);
4617 return baseLoc
== NoLocalId
4618 ? reduce(env
, bc::IterFree
{ iterId
})
4619 : reduce(env
, bc::LIterFree
{ iterId
, baseLoc
});
4622 mayReadLocal(env
, baseLoc
);
4623 mayReadLocal(env
, ita
.valId
);
4624 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4626 if (noThrow
) nothrow(env
);
4629 jmp_nevertaken(env
);
4630 freeIter(env
, ita
.iterId
);
4634 env
.propagate(target
, &env
.state
);
4636 freeIter(env
, ita
.iterId
);
4637 setLocRaw(env
, ita
.valId
, curVal
);
4638 if (ita
.hasKey()) setLocRaw(env
, ita
.keyId
, curKey
);
4643 void in(ISS
& env
, const bc::IterInit
& op
) {
4644 iterInitImpl(env
, op
.ita
, op
.target2
, NoLocalId
);
4647 void in(ISS
& env
, const bc::LIterInit
& op
) {
4648 iterInitImpl(env
, op
.ita
, op
.target3
, op
.loc2
);
4651 void in(ISS
& env
, const bc::IterNext
& op
) {
4652 iterNextImpl(env
, op
.ita
, op
.target2
, NoLocalId
);
4655 void in(ISS
& env
, const bc::LIterNext
& op
) {
4656 iterNextImpl(env
, op
.ita
, op
.target3
, op
.loc2
);
4659 void in(ISS
& env
, const bc::IterFree
& op
) {
4660 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
4661 auto const isNop
= match
<bool>(
4662 env
.state
.iters
[op
.iter1
],
4666 [&] (const LiveIter
& ti
) {
4667 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4672 if (isNop
&& will_reduce(env
)) return reduce(env
);
4675 freeIter(env
, op
.iter1
);
4678 void in(ISS
& env
, const bc::LIterFree
& op
) {
4680 mayReadLocal(env
, op
.loc2
);
4681 freeIter(env
, op
.iter1
);
4685 * Any include/require (or eval) op kills all locals, and private properties.
4687 void inclOpImpl(ISS
& env
) {
4691 killPrivateStatics(env
);
4692 push(env
, TInitCell
);
4695 void in(ISS
& env
, const bc::Incl
&) { inclOpImpl(env
); }
4696 void in(ISS
& env
, const bc::InclOnce
&) { inclOpImpl(env
); }
4697 void in(ISS
& env
, const bc::Req
&) { inclOpImpl(env
); }
4698 void in(ISS
& env
, const bc::ReqOnce
&) { inclOpImpl(env
); }
4699 void in(ISS
& env
, const bc::ReqDoc
&) { inclOpImpl(env
); }
4700 void in(ISS
& env
, const bc::Eval
&) { inclOpImpl(env
); }
4702 void in(ISS
& env
, const bc::This
&) {
4703 if (thisAvailable(env
)) {
4704 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4706 auto const ty
= thisTypeNonNull(env
);
4707 push(env
, ty
, StackThisId
);
4708 setThisAvailable(env
);
4709 if (ty
.subtypeOf(BBottom
)) unreachable(env
);
4712 void in(ISS
& env
, const bc::LateBoundCls
& op
) {
4713 if (env
.ctx
.cls
) effect_free(env
);
4714 auto const ty
= selfCls(env
);
4715 push(env
, setctx(ty
? *ty
: TCls
));
4718 void in(ISS
& env
, const bc::CheckThis
&) {
4719 if (thisAvailable(env
)) {
4722 setThisAvailable(env
);
4725 void in(ISS
& env
, const bc::BareThis
& op
) {
4726 if (thisAvailable(env
)) {
4727 if (op
.subop1
!= BareThisOp::NeverNull
) {
4728 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4732 auto const ty
= thisType(env
);
4733 switch (op
.subop1
) {
4734 case BareThisOp::Notice
:
4736 case BareThisOp::NoNotice
:
4739 case BareThisOp::NeverNull
:
4740 setThisAvailable(env
);
4741 if (!env
.state
.unreachable
) effect_free(env
);
4742 return push(env
, ty
, StackThisId
);
4745 push(env
, ty
, StackThisId
);
4749 * Amongst other things, we use this to mark units non-persistent.
4751 void in(ISS
& env
, const bc::OODeclExists
& op
) {
4752 auto flag
= popC(env
);
4753 auto name
= popC(env
);
4755 if (!name
.strictSubtypeOf(TStr
)) return TBool
;
4756 auto const v
= tv(name
);
4757 if (!v
) return TBool
;
4758 auto rcls
= env
.index
.resolve_class(env
.ctx
, v
->m_data
.pstr
);
4759 if (!rcls
|| !rcls
->cls()) return TFalse
;
4760 auto const exist
= [&] () -> bool {
4761 switch (op
.subop1
) {
4762 case OODeclExistsOp::Class
:
4763 return !(rcls
->cls()->attrs
& (AttrInterface
| AttrTrait
));
4764 case OODeclExistsOp::Interface
:
4765 return rcls
->cls()->attrs
& AttrInterface
;
4766 case OODeclExistsOp::Trait
:
4767 return rcls
->cls()->attrs
& AttrTrait
;
4772 return exist
? TTrue
: TFalse
;
4777 bool couldBeMocked(const Type
& t
) {
4778 if (is_specialized_cls(t
)) {
4779 return dcls_of(t
).cls
.couldBeMocked();
4780 } else if (is_specialized_obj(t
)) {
4781 return dobj_of(t
).cls
.couldBeMocked();
4783 // In practice this should not occur since this is used mostly on the result
4784 // of looked up type constraints.
4789 using TCVec
= std::vector
<const TypeConstraint
*>;
4791 void in(ISS
& env
, const bc::VerifyParamType
& op
) {
4792 IgnoreUsedParams _
{env
};
4794 if (env
.ctx
.func
->isMemoizeImpl
) {
4795 // a MemoizeImpl's params have already been checked by the wrapper
4799 auto const& pinfo
= env
.ctx
.func
->params
[op
.loc1
];
4800 // Generally we won't know anything about the params, but
4801 // analyze_func_inline does - and this can help with effect-free analysis
4802 TCVec tcs
= {&pinfo
.typeConstraint
};
4803 for (auto const& t
: pinfo
.upperBounds
) tcs
.push_back(&t
);
4804 if (std::all_of(std::begin(tcs
), std::end(tcs
),
4805 [&](const TypeConstraint
* tc
) {
4806 return env
.index
.satisfies_constraint(env
.ctx
,
4807 locAsCell(env
, op
.loc1
),
4810 if (!locAsCell(env
, op
.loc1
).couldBe(BCls
)) {
4816 * We assume that if this opcode doesn't throw, the parameter was of the
4820 for (auto const& constraint
: tcs
) {
4821 if (constraint
->hasConstraint() && !constraint
->isTypeVar() &&
4822 !constraint
->isTypeConstant()) {
4823 auto t
= env
.index
.lookup_constraint(env
.ctx
, *constraint
);
4824 if (constraint
->isThis() && couldBeMocked(t
)) {
4825 t
= unctx(std::move(t
));
4827 FTRACE(2, " {} ({})\n", constraint
->fullName(), show(t
));
4828 tcT
= intersection_of(std::move(tcT
), std::move(t
));
4829 if (tcT
.subtypeOf(BBottom
)) unreachable(env
);
4832 if (tcT
!= TTop
) setLoc(env
, op
.loc1
, std::move(tcT
));
4835 void in(ISS
& env
, const bc::VerifyParamTypeTS
& op
) {
4836 auto const a
= topC(env
);
4837 if (!a
.couldBe(BDict
)) {
4842 auto const constraint
= env
.ctx
.func
->params
[op
.loc1
].typeConstraint
;
4843 // TODO(T31677864): We are being extremely pessimistic here, relax it
4844 if (!env
.ctx
.func
->isReified
&&
4845 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
4846 !env
.index
.could_have_reified_type(env
.ctx
, constraint
)) {
4847 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
4850 if (auto const inputTS
= tv(a
)) {
4851 if (!isValidTSType(*inputTS
, false)) {
4856 auto const resolvedTS
=
4857 resolve_type_structure(env
, inputTS
->m_data
.parr
).sarray();
4858 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
4859 reduce(env
, bc::PopC
{});
4860 reduce(env
, bc::Dict
{ resolvedTS
});
4861 reduce(env
, bc::VerifyParamTypeTS
{ op
.loc1
});
4864 if (shouldReduceToNonReifiedVerifyType(env
, inputTS
->m_data
.parr
)) {
4865 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
4868 if (auto const last
= last_op(env
)) {
4869 if (last
->op
== Op::CombineAndResolveTypeStruct
) {
4870 if (auto const last2
= last_op(env
, 1)) {
4871 if (last2
->op
== Op::Dict
&&
4872 shouldReduceToNonReifiedVerifyType(env
, last2
->Dict
.arr1
)) {
4873 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
4881 void verifyRetImpl(ISS
& env
, const TCVec
& tcs
,
4882 bool reduce_this
, bool ts_flavor
) {
4883 // If it is the ts flavor, then second thing on the stack, otherwise first
4884 auto stackT
= topC(env
, (int)ts_flavor
);
4885 auto const stackEquiv
= topStkEquiv(env
, (int)ts_flavor
);
4887 // If there is no return type constraint, or if the return type
4888 // constraint is a typevar, or if the top of stack is the same or a
4889 // subtype of the type constraint, then this is a no-op, unless
4890 // reified types could be involved.
4891 if (std::all_of(std::begin(tcs
), std::end(tcs
),
4892 [&](const TypeConstraint
* tc
) {
4893 return env
.index
.satisfies_constraint(env
.ctx
, stackT
, *tc
);
4896 // we wouldn't get here if reified types were definitely not
4897 // involved, so just bail.
4900 push(env
, std::move(stackT
), stackEquiv
);
4906 std::vector
<Type
> constraintTypes
;
4907 auto dont_reduce
= false;
4909 for (auto const& constraint
: tcs
) {
4910 // When the constraint is not soft.
4911 // We can safely assume that either VerifyRetTypeC will
4912 // throw or it will produce a value whose type is compatible with the
4913 // return type constraint.
4914 auto tcT
= remove_uninit(env
.index
.lookup_constraint(env
.ctx
, *constraint
));
4915 constraintTypes
.push_back(tcT
);
4917 // In some circumstances, verifyRetType can modify the type. If it
4918 // does that we can't reduce even when we know it succeeds.
4919 // VerifyRetType will convert a TCls to a TStr implicitly
4920 // (and possibly warn)
4921 if (tcT
.couldBe(BStr
) && stackT
.couldBe(BCls
| BLazyCls
)) {
4926 // If the constraint is soft, then there are no optimizations we can safely
4927 // do here, so just leave the top of stack as is.
4928 if (constraint
->isSoft() ||
4929 (RuntimeOption::EvalEnforceGenericsUB
< 2 &&
4930 constraint
->isUpperBound()))
4932 if (ts_flavor
) popC(env
);
4934 push(env
, std::move(stackT
), stackEquiv
);
4939 // In cases where we have a `this` hint where stackT is an TOptObj known to
4940 // be this, we can replace the check with a non null check. These cases are
4941 // likely from a BareThis that could return Null. Since the runtime will
4942 // split these translations, it will rarely in practice return null.
4945 stackT
.couldBe(BInitNull
) &&
4946 !stackT
.subtypeOf(BInitNull
) &&
4947 std::all_of(std::begin(tcs
), std::end(tcs
),
4948 [&](const TypeConstraint
* constraint
) {
4949 return constraint
->isThis() &&
4950 !constraint
->isNullable() &&
4951 env
.index
.satisfies_constraint(
4952 env
.ctx
, unopt(stackT
), *constraint
);
4957 return reduce(env
, bc::PopC
{}, bc::VerifyRetNonNullC
{});
4959 return reduce(env
, bc::VerifyRetNonNullC
{});
4962 auto retT
= std::move(stackT
);
4963 for (auto& tcT
: constraintTypes
) {
4964 retT
= intersection_of(std::move(tcT
), std::move(retT
));
4965 if (retT
.subtypeOf(BBottom
)) {
4967 if (ts_flavor
) popC(env
); // the type structure
4972 if (ts_flavor
) popC(env
); // the type structure
4974 push(env
, std::move(retT
));
4977 void in(ISS
& env
, const bc::VerifyOutType
& op
) {
4979 auto const& pinfo
= env
.ctx
.func
->params
[op
.arg1
];
4980 tcs
.push_back(&pinfo
.typeConstraint
);
4981 for (auto const& t
: pinfo
.upperBounds
) tcs
.push_back(&t
);
4982 verifyRetImpl(env
, tcs
, false, false);
4985 void in(ISS
& env
, const bc::VerifyRetTypeC
& /*op*/) {
4987 tcs
.push_back(&env
.ctx
.func
->retTypeConstraint
);
4988 for (auto const& t
: env
.ctx
.func
->returnUBs
) tcs
.push_back(&t
);
4989 verifyRetImpl(env
, tcs
, true, false);
4992 void in(ISS
& env
, const bc::VerifyRetTypeTS
& /*op*/) {
4993 auto const a
= topC(env
);
4994 if (!a
.couldBe(BDict
)) {
4999 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5000 // TODO(T31677864): We are being extremely pessimistic here, relax it
5001 if (!env
.ctx
.func
->isReified
&&
5002 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
5003 !env
.index
.could_have_reified_type(env
.ctx
, constraint
)) {
5004 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5006 if (auto const inputTS
= tv(a
)) {
5007 if (!isValidTSType(*inputTS
, false)) {
5012 auto const resolvedTS
=
5013 resolve_type_structure(env
, inputTS
->m_data
.parr
).sarray();
5014 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
5015 reduce(env
, bc::PopC
{});
5016 reduce(env
, bc::Dict
{ resolvedTS
});
5017 reduce(env
, bc::VerifyRetTypeTS
{});
5020 if (shouldReduceToNonReifiedVerifyType(env
, inputTS
->m_data
.parr
)) {
5021 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5024 if (auto const last
= last_op(env
)) {
5025 if (last
->op
== Op::CombineAndResolveTypeStruct
) {
5026 if (auto const last2
= last_op(env
, 1)) {
5027 if (last2
->op
== Op::Dict
&&
5028 shouldReduceToNonReifiedVerifyType(env
, last2
->Dict
.arr1
)) {
5029 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5034 TCVec tcs
{&constraint
};
5035 for (auto const& t
: env
.ctx
.func
->returnUBs
) tcs
.push_back(&t
);
5036 verifyRetImpl(env
, tcs
, true, true);
5039 void in(ISS
& env
, const bc::VerifyRetNonNullC
& /*op*/) {
5040 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5041 if (constraint
.isSoft()) {
5045 auto stackT
= topC(env
);
5047 if (!stackT
.couldBe(BInitNull
)) {
5052 if (stackT
.subtypeOf(BNull
)) return unreachable(env
);
5054 auto const equiv
= topStkEquiv(env
);
5056 stackT
= unopt(std::move(stackT
));
5059 push(env
, stackT
, equiv
);
5062 void in(ISS
& env
, const bc::Self
& op
) {
5063 auto const self
= selfClsExact(env
);
5072 void in(ISS
& env
, const bc::Parent
& op
) {
5073 auto const parent
= parentClsExact(env
);
5082 void in(ISS
& env
, const bc::CreateCl
& op
) {
5083 auto const nargs
= op
.arg1
;
5084 auto const clsPair
= env
.index
.resolve_closure_class(env
.ctx
, op
.arg2
);
5087 * Every closure should have a unique allocation site, but we may see it
5088 * multiple times in a given round of analyzing this function. Each time we
5089 * may have more information about the used variables; the types should only
5090 * possibly grow. If it's already there we need to merge the used vars in
5091 * with what we saw last time.
5094 CompactVector
<Type
> usedVars(nargs
);
5095 for (auto i
= uint32_t{0}; i
< nargs
; ++i
) {
5096 usedVars
[nargs
- i
- 1] = unctx(popCU(env
));
5098 merge_closure_use_vars_into(
5099 env
.collect
.closureUseTypes
,
5105 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
5106 // assert the exact type of closure objects. The best we can do is assert
5107 // that it's a subclass of Closure.
5108 auto const closure
= env
.index
.builtin_class(s_Closure
.get());
5110 return push(env
, subObj(closure
));
5113 void in(ISS
& env
, const bc::CreateCont
& /*op*/) {
5114 // First resume is always next() which pushes null.
5115 push(env
, TInitNull
);
5118 void in(ISS
& env
, const bc::ContEnter
&) { popC(env
); push(env
, TInitCell
); }
5119 void in(ISS
& env
, const bc::ContRaise
&) { popC(env
); push(env
, TInitCell
); }
5121 void in(ISS
& env
, const bc::Yield
&) {
5123 push(env
, TInitCell
);
5126 void in(ISS
& env
, const bc::YieldK
&) {
5129 push(env
, TInitCell
);
5132 void in(ISS
& /*env*/, const bc::ContCheck
&) {}
5133 void in(ISS
& env
, const bc::ContValid
&) { push(env
, TBool
); }
5134 void in(ISS
& env
, const bc::ContKey
&) { push(env
, TInitCell
); }
5135 void in(ISS
& env
, const bc::ContCurrent
&) { push(env
, TInitCell
); }
5136 void in(ISS
& env
, const bc::ContGetReturn
&) { push(env
, TInitCell
); }
5138 void pushTypeFromWH(ISS
& env
, Type t
) {
5139 auto inner
= typeFromWH(t
);
5140 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5141 if (inner
.subtypeOf(BBottom
)) unreachable(env
);
5142 push(env
, std::move(inner
));
5145 void in(ISS
& env
, const bc::WHResult
&) {
5146 pushTypeFromWH(env
, popC(env
));
5149 void in(ISS
& env
, const bc::Await
&) {
5150 pushTypeFromWH(env
, popC(env
));
5153 void in(ISS
& env
, const bc::AwaitAll
& op
) {
5154 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5155 if (equiv
!= op
.locrange
.first
) {
5158 bc::AwaitAll
{LocalRange
{equiv
, op
.locrange
.count
}}
5162 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5163 mayReadLocal(env
, op
.locrange
.first
+ i
);
5166 push(env
, TInitNull
);
5169 void in(ISS
& env
, const bc::SetImplicitContextByIndex
&) {
5174 void in(ISS
& env
, const bc::Idx
&) {
5175 auto const def
= popC(env
);
5176 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
5177 auto const base
= popC(env
);
5179 assertx(!def
.is(BBottom
));
5181 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
5182 auto result
= TBottom
;
5184 auto const finish
= [&] {
5185 if (result
.is(BBottom
)) {
5186 assertx(!effectFree
);
5193 push(env
, std::move(result
));
5196 if (key
.couldBe(BNull
)) result
|= def
;
5197 if (key
.subtypeOf(BNull
)) return finish();
5199 if (!base
.subtypeOf(BArrLike
| BObj
| BStr
)) result
|= def
;
5201 if (base
.couldBe(BArrLike
)) {
5202 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5203 if (key
.couldBe(BArrKey
)) {
5204 auto elem
= array_like_elem(
5206 key
.subtypeOf(BArrKey
) ? key
: intersection_of(key
, TArrKey
)
5208 result
|= std::move(elem
.first
);
5209 if (!elem
.second
) result
|= def
;
5212 if (base
.couldBe(BObj
)) {
5213 result
|= TInitCell
;
5216 if (base
.couldBe(BStr
)) {
5219 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5225 void in(ISS
& env
, const bc::ArrayIdx
&) {
5226 auto def
= popC(env
);
5227 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
5228 auto const base
= popC(env
);
5230 assertx(!def
.is(BBottom
));
5232 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
5233 auto result
= TBottom
;
5235 auto const finish
= [&] {
5236 if (result
.is(BBottom
)) {
5237 assertx(!effectFree
);
5244 push(env
, std::move(result
));
5247 if (key
.couldBe(BNull
)) result
|= def
;
5248 if (key
.subtypeOf(BNull
)) return finish();
5250 if (!base
.subtypeOf(BArrLike
)) effectFree
= false;
5251 if (!base
.couldBe(BArrLike
)) return finish();
5253 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5254 if (!key
.couldBe(BArrKey
)) return finish();
5256 auto elem
= array_like_elem(
5258 key
.subtypeOf(BArrKey
) ? key
: intersection_of(key
, TArrKey
)
5260 result
|= std::move(elem
.first
);
5261 if (!elem
.second
) result
|= std::move(def
);
5266 void implArrayMarkLegacy(ISS
& env
, bool legacy
) {
5267 auto const recursive
= popC(env
);
5268 auto const value
= popC(env
);
5270 if (auto const tv_recursive
= tv(recursive
)) {
5271 if (auto const tv_value
= tv(value
)) {
5272 if (tvIsBool(*tv_recursive
)) {
5273 auto const result
= eval_cell([&]{
5274 return val(*tv_recursive
).num
5275 ? arrprov::markTvRecursively(*tv_value
, legacy
)
5276 : arrprov::markTvShallow(*tv_value
, legacy
);
5288 // TODO(kshaunak): We could add some type info here.
5289 push(env
, TInitCell
);
5293 void in(ISS
& env
, const bc::ArrayMarkLegacy
&) {
5294 implArrayMarkLegacy(env
, true);
5297 void in(ISS
& env
, const bc::ArrayUnmarkLegacy
&) {
5298 implArrayMarkLegacy(env
, false);
5301 void in(ISS
& env
, const bc::CheckProp
&) {
5302 if (env
.ctx
.cls
->attrs
& AttrNoOverride
) {
5303 return reduce(env
, bc::False
{});
5309 void in(ISS
& env
, const bc::InitProp
& op
) {
5310 auto const t
= topC(env
);
5311 switch (op
.subop2
) {
5312 case InitPropOp::Static
:
5313 env
.index
.merge_static_type(
5315 env
.collect
.publicSPropMutations
,
5317 clsExact(env
.index
.resolve_class(env
.ctx
.cls
)),
5324 case InitPropOp::NonStatic
:
5325 mergeThisProp(env
, op
.str1
, t
);
5329 for (auto& prop
: env
.ctx
.func
->cls
->properties
) {
5330 if (prop
.name
!= op
.str1
) continue;
5332 ITRACE(1, "InitProp: {} = {}\n", op
.str1
, show(t
));
5334 if (env
.index
.satisfies_constraint(env
.ctx
, t
, prop
.typeConstraint
) &&
5335 std::all_of(prop
.ubs
.begin(), prop
.ubs
.end(),
5336 [&](TypeConstraint ub
) {
5337 applyFlagsToUB(ub
, prop
.typeConstraint
);
5338 return env
.index
.satisfies_constraint(env
.ctx
, t
, ub
);
5340 prop
.attrs
|= AttrInitialSatisfiesTC
;
5342 badPropInitialValue(env
);
5343 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrInitialSatisfiesTC
);
5347 auto const v
= tv(t
);
5348 if (v
|| !could_contain_objects(t
)) {
5349 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrDeepInit
);
5352 env
.index
.update_static_prop_init_val(env
.ctx
.func
->cls
, op
.str1
);
5353 return reduce(env
, bc::PopC
{});
5360 void in(ISS
& env
, const bc::Silence
& op
) {
5362 switch (op
.subop2
) {
5363 case SilenceOp::Start
:
5364 setLoc(env
, op
.loc1
, TInt
);
5366 case SilenceOp::End
:
5367 locRaw(env
, op
.loc1
);
5374 template <typename Op
, typename Rebind
>
5375 bool memoGetImpl(ISS
& env
, const Op
& op
, Rebind
&& rebind
) {
5376 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5377 always_assert(op
.locrange
.first
+ op
.locrange
.count
5378 <= env
.ctx
.func
->locals
.size());
5380 if (will_reduce(env
)) {
5381 // If we can use an equivalent, earlier range, then use that instead.
5382 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5383 if (equiv
!= op
.locrange
.first
) {
5384 reduce(env
, rebind(LocalRange
{ equiv
, op
.locrange
.count
}));
5389 auto retTy
= memoizeImplRetType(env
);
5391 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5392 // and $this isn't available.
5393 auto allArrKey
= true;
5394 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5395 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5398 (!env
.ctx
.func
->cls
||
5399 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5400 thisAvailable(env
))) {
5401 if (will_reduce(env
)) {
5402 if (retTy
.first
.subtypeOf(BBottom
)) {
5404 jmp_setdest(env
, op
.target1
);
5407 // deal with constprop manually; otherwise we will propagate the
5408 // taken edge and *then* replace the MemoGet with a constant.
5410 if (auto v
= tv(retTy
.first
)) {
5411 reduce(env
, gen_constant(*v
));
5419 if (retTy
.first
== TBottom
) {
5420 jmp_setdest(env
, op
.target1
);
5424 env
.propagate(op
.target1
, &env
.state
);
5425 push(env
, std::move(retTy
.first
));
5431 void in(ISS
& env
, const bc::MemoGet
& op
) {
5434 [&] (const LocalRange
& l
) { return bc::MemoGet
{ op
.target1
, l
}; }
5438 void in(ISS
& env
, const bc::MemoGetEager
& op
) {
5439 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5441 auto const reduced
= memoGetImpl(
5443 [&] (const LocalRange
& l
) {
5444 return bc::MemoGetEager
{ op
.target1
, op
.target2
, l
};
5447 if (reduced
) return;
5449 env
.propagate(op
.target2
, &env
.state
);
5450 auto const t
= popC(env
);
5453 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
5459 template <typename Op
>
5460 void memoSetImpl(ISS
& env
, const Op
& op
) {
5461 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5462 always_assert(op
.locrange
.first
+ op
.locrange
.count
5463 <= env
.ctx
.func
->locals
.size());
5465 // If we can use an equivalent, earlier range, then use that instead.
5466 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5467 if (equiv
!= op
.locrange
.first
) {
5470 Op
{ LocalRange
{ equiv
, op
.locrange
.count
} }
5474 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5475 // and $this isn't available.
5476 auto allArrKey
= true;
5477 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5478 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5481 (!env
.ctx
.func
->cls
||
5482 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5483 thisAvailable(env
))) {
5486 push(env
, popC(env
));
5491 void in(ISS
& env
, const bc::MemoSet
& op
) {
5492 memoSetImpl(env
, op
);
5495 void in(ISS
& env
, const bc::MemoSetEager
& op
) {
5496 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5497 memoSetImpl(env
, op
);
5504 //////////////////////////////////////////////////////////////////////
5506 void dispatch(ISS
& env
, const Bytecode
& op
) {
5507 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5508 switch (op
.op
) { OPCODES
}
5513 //////////////////////////////////////////////////////////////////////
5515 void interpStep(ISS
& env
, const Bytecode
& bc
) {
5516 ITRACE(2, " {} ({})\n",
5517 show(env
.ctx
.func
, bc
),
5518 env
.unchangedBcs
+ env
.replacedBcs
.size());
5521 // If there are throw exit edges, make a copy of the state (except
5522 // stacks) in case we need to propagate across throw exits (if
5524 if (!env
.stateBefore
&& env
.blk
.throwExit
!= NoBlockId
) {
5525 env
.stateBefore
.emplace(with_throwable_only(env
.index
, env
.state
));
5530 default_dispatch(env
, bc
);
5532 if (env
.flags
.reduced
) return;
5534 auto const_prop
= [&] {
5535 if (!options
.ConstantProp
|| !env
.flags
.canConstProp
) return false;
5537 auto const numPushed
= bc
.numPush();
5538 TinyVector
<TypedValue
> cells
;
5541 while (i
< numPushed
) {
5542 auto const v
= tv(topT(env
, i
));
5543 if (!v
) return false;
5544 cells
.push_back(*v
);
5548 if (env
.flags
.wasPEI
) {
5549 ITRACE(2, " nothrow (due to constprop)\n");
5550 env
.flags
.wasPEI
= false;
5552 if (!env
.flags
.effectFree
) {
5553 ITRACE(2, " effect_free (due to constprop)\n");
5554 env
.flags
.effectFree
= true;
5557 // If we're doing inline interp, don't actually perform the
5558 // constprop. If we do, we can infer static types that won't
5559 // actually exist at runtime.
5560 if (any(env
.collect
.opts
& CollectionOpts::Inlining
)) {
5561 ITRACE(2, " inlining, skipping actual constprop\n");
5567 auto const numPop
= bc
.numPop();
5568 for (auto j
= 0; j
< numPop
; j
++) {
5569 auto const flavor
= bc
.popFlavor(j
);
5570 if (flavor
== Flavor::C
) {
5571 interpStep(env
, bc::PopC
{});
5572 } else if (flavor
== Flavor::U
) {
5573 interpStep(env
, bc::PopU
{});
5575 assertx(flavor
== Flavor::CU
);
5576 auto const& popped
= topT(env
);
5577 if (popped
.subtypeOf(BUninit
)) {
5578 interpStep(env
, bc::PopU
{});
5580 assertx(popped
.subtypeOf(BInitCell
));
5581 interpStep(env
, bc::PopC
{});
5587 push(env
, from_cell(cells
[i
]));
5588 record(env
, gen_constant(cells
[i
]));
5597 assertx(!env
.flags
.effectFree
|| !env
.flags
.wasPEI
);
5598 if (env
.flags
.wasPEI
) {
5599 ITRACE(2, " PEI.\n");
5600 if (env
.stateBefore
) {
5601 env
.propagate(env
.blk
.throwExit
, &*env
.stateBefore
);
5604 env
.stateBefore
.reset();
5609 void interpOne(ISS
& env
, const Bytecode
& bc
) {
5610 env
.srcLoc
= bc
.srcLoc
;
5611 interpStep(env
, bc
);
5614 BlockId
speculate(Interp
& interp
) {
5615 auto low_water
= interp
.state
.stack
.size();
5617 interp
.collect
.opts
= interp
.collect
.opts
| CollectionOpts::Speculating
;
5619 interp
.collect
.opts
= interp
.collect
.opts
- CollectionOpts::Speculating
;
5622 auto failed
= false;
5623 ISS env
{ interp
, [&] (BlockId
, const State
*) { failed
= true; } };
5625 FTRACE(4, " Speculate B{}\n", interp
.bid
);
5626 for (auto const& bc
: interp
.blk
->hhbcs
) {
5627 assertx(!interp
.state
.unreachable
);
5628 auto const numPop
= bc
.numPop() +
5629 (bc
.op
== Op::CGetL2
? 1 :
5630 bc
.op
== Op::Dup
? -1 : 0);
5631 if (interp
.state
.stack
.size() - numPop
< low_water
) {
5632 low_water
= interp
.state
.stack
.size() - numPop
;
5637 env
.collect
.mInstrState
.clear();
5638 FTRACE(3, " Bailing from speculate because propagate was called\n");
5642 auto const& flags
= env
.flags
;
5643 if (!flags
.effectFree
) {
5644 env
.collect
.mInstrState
.clear();
5645 FTRACE(3, " Bailing from speculate because not effect free\n");
5649 assertx(!flags
.returned
);
5651 if (flags
.jmpDest
!= NoBlockId
&& interp
.state
.stack
.size() == low_water
) {
5652 FTRACE(2, " Speculate found target block {}\n", flags
.jmpDest
);
5653 return flags
.jmpDest
;
5657 if (interp
.state
.stack
.size() != low_water
) {
5659 " Bailing from speculate because the speculated block "
5660 "left items on the stack\n");
5664 if (interp
.blk
->fallthrough
== NoBlockId
) {
5666 " Bailing from speculate because there was no fallthrough");
5670 FTRACE(2, " Speculate found fallthrough block {}\n",
5671 interp
.blk
->fallthrough
);
5673 return interp
.blk
->fallthrough
;
5676 BlockId
speculateHelper(ISS
& env
, BlockId orig
, bool updateTaken
) {
5677 assertx(orig
!= NoBlockId
);
5679 if (!will_reduce(env
)) return orig
;
5681 auto const last
= last_op(env
);
5682 bool endsInControlFlow
= last
&& instrIsNonCallControlFlow(last
->op
);
5686 if (options
.RemoveDeadBlocks
) {
5687 State temp
{env
.state
, State::Compact
{}};
5689 auto const& func
= env
.ctx
.func
;
5690 auto const targetBlk
= func
.blocks()[target
].get();
5691 if (!targetBlk
->multiPred
) break;
5692 auto const ok
= [&] {
5693 switch (targetBlk
->hhbcs
.back().op
) {
5707 env
.index
, env
.ctx
, env
.collect
, target
, targetBlk
, temp
5710 auto const old_size
= temp
.stack
.size();
5711 auto const new_target
= speculate(interp
);
5712 if (new_target
== NoBlockId
) break;
5714 const ssize_t delta
= old_size
- temp
.stack
.size();
5715 assertx(delta
>= 0);
5716 if (delta
&& endsInControlFlow
) break;
5719 target
= new_target
;
5720 temp
.stack
.compact();
5724 if (endsInControlFlow
&& updateTaken
) {
5726 auto needsUpdate
= target
!= orig
;
5731 if (bid
!= orig
) needsUpdate
= true;
5736 auto& bc
= mutate_last_op(env
);
5739 [&] (BlockId
& bid
) {
5740 bid
= bid
== orig
? target
: NoBlockId
;
5747 auto const& popped
= topT(env
);
5748 if (popped
.subtypeOf(BInitCell
)) {
5749 interpStep(env
, bc::PopC
{});
5751 assertx(popped
.subtypeOf(BUninit
));
5752 interpStep(env
, bc::PopU
{});
5761 //////////////////////////////////////////////////////////////////////
5763 RunFlags
run(Interp
& interp
, const State
& in
, PropagateFn propagate
) {
5765 FTRACE(2, "out {}{}\n",
5766 state_string(*interp
.ctx
.func
, interp
.state
, interp
.collect
),
5767 property_state_string(interp
.collect
.props
));
5770 auto env
= ISS
{ interp
, propagate
};
5771 auto ret
= RunFlags
{};
5772 auto finish
= [&] (BlockId fallthrough
) {
5773 ret
.updateInfo
.fallthrough
= fallthrough
;
5774 ret
.updateInfo
.unchangedBcs
= env
.unchangedBcs
;
5775 ret
.updateInfo
.replacedBcs
= std::move(env
.replacedBcs
);
5779 BytecodeVec retryBcs
;
5780 auto retryOffset
= interp
.blk
->hhbcs
.size();
5781 auto size
= retryOffset
;
5782 BlockId retryFallthrough
= interp
.blk
->fallthrough
;
5787 finish_tracked_elems(env
, 0);
5788 if (!env
.reprocess
) break;
5789 FTRACE(2, " Reprocess mutated block {}\n", interp
.bid
);
5790 assertx(env
.unchangedBcs
< retryOffset
|| env
.replacedBcs
.size());
5792 retryOffset
= env
.unchangedBcs
;
5793 retryBcs
= std::move(env
.replacedBcs
);
5794 env
.unchangedBcs
= 0;
5795 env
.state
.copy_from(in
);
5796 env
.reprocess
= false;
5797 env
.replacedBcs
.clear();
5798 size
= retryOffset
+ retryBcs
.size();
5803 auto const& bc
= idx
< retryOffset
?
5804 interp
.blk
->hhbcs
[idx
] : retryBcs
[idx
- retryOffset
];
5808 auto const& flags
= env
.flags
;
5810 if (flags
.wasPEI
) ret
.noThrow
= false;
5812 if (interp
.collect
.effectFree
&& !flags
.effectFree
) {
5813 interp
.collect
.effectFree
= false;
5814 if (any(interp
.collect
.opts
& CollectionOpts::EffectFreeOnly
)) {
5815 env
.collect
.mInstrState
.clear();
5816 FTRACE(2, " Bailing because not effect free\n");
5817 return finish(NoBlockId
);
5821 if (flags
.returned
) {
5822 always_assert(idx
== size
);
5823 if (env
.reprocess
) continue;
5825 always_assert(interp
.blk
->fallthrough
== NoBlockId
);
5826 assertx(!ret
.returned
);
5827 FTRACE(2, " returned {}\n", show(*flags
.returned
));
5828 ret
.retParam
= flags
.retParam
;
5829 ret
.returned
= flags
.returned
;
5830 return finish(NoBlockId
);
5833 if (flags
.jmpDest
!= NoBlockId
) {
5834 always_assert(idx
== size
);
5835 auto const hasFallthrough
= [&] {
5836 if (flags
.jmpDest
!= interp
.blk
->fallthrough
) {
5837 FTRACE(2, " <took branch; no fallthrough>\n");
5838 auto const last
= last_op(env
);
5839 return !last
|| !instrIsNonCallControlFlow(last
->op
);
5841 FTRACE(2, " <branch never taken>\n");
5845 if (hasFallthrough
) retryFallthrough
= flags
.jmpDest
;
5846 if (env
.reprocess
) continue;
5847 finish_tracked_elems(env
, 0);
5848 auto const newDest
= speculateHelper(env
, flags
.jmpDest
, true);
5849 propagate(newDest
, &interp
.state
);
5850 return finish(hasFallthrough
? newDest
: NoBlockId
);
5853 if (interp
.state
.unreachable
) {
5854 if (env
.reprocess
) {
5858 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
5859 finish_tracked_elems(env
, 0);
5860 return finish(NoBlockId
);
5864 FTRACE(2, " <end block>\n");
5865 if (retryFallthrough
!= NoBlockId
) {
5866 retryFallthrough
= speculateHelper(env
, retryFallthrough
, false);
5867 propagate(retryFallthrough
, &interp
.state
);
5869 return finish(retryFallthrough
);
5872 StepFlags
step(Interp
& interp
, const Bytecode
& op
) {
5873 auto noop
= [] (BlockId
, const State
*) {};
5874 ISS env
{ interp
, noop
};
5876 default_dispatch(env
, op
);
5877 if (env
.state
.unreachable
) {
5878 env
.collect
.mInstrState
.clear();
5880 assertx(env
.trackedElems
.empty());
5884 void default_dispatch(ISS
& env
, const Bytecode
& op
) {
5885 if (!env
.trackedElems
.empty()) {
5886 auto const pops
= [&] () -> uint32_t {
5889 case Op::AddNewElemC
:
5890 return numPop(op
) - 1;
5899 finish_tracked_elems(env
, env
.state
.stack
.size() - pops
);
5902 if (instrFlags(op
.op
) & TF
&& env
.flags
.jmpDest
== NoBlockId
) {
5904 } else if (env
.state
.unreachable
) {
5905 env
.collect
.mInstrState
.clear();
5909 Optional
<Type
> thisType(const Index
& index
, Context ctx
) {
5910 return thisTypeFromContext(index
, ctx
);
5913 //////////////////////////////////////////////////////////////////////