2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
23 #include <folly/gen/Base.h>
24 #include <folly/gen/String.h>
26 #include "hphp/util/hash-set.h"
27 #include "hphp/util/trace.h"
28 #include "hphp/runtime/base/array-init.h"
29 #include "hphp/runtime/base/array-iterator.h"
30 #include "hphp/runtime/base/collections.h"
31 #include "hphp/runtime/base/static-string-table.h"
32 #include "hphp/runtime/base/tv-arith.h"
33 #include "hphp/runtime/base/tv-comparisons.h"
34 #include "hphp/runtime/base/tv-conversions.h"
35 #include "hphp/runtime/base/type-structure.h"
36 #include "hphp/runtime/base/type-structure-helpers.h"
37 #include "hphp/runtime/base/type-structure-helpers-defs.h"
38 #include "hphp/runtime/vm/runtime.h"
39 #include "hphp/runtime/vm/unit-util.h"
41 #include "hphp/runtime/ext/hh/ext_hh.h"
43 #include "hphp/hhbbc/analyze.h"
44 #include "hphp/hhbbc/bc.h"
45 #include "hphp/hhbbc/cfg.h"
46 #include "hphp/hhbbc/class-util.h"
47 #include "hphp/hhbbc/eval-cell.h"
48 #include "hphp/hhbbc/index.h"
49 #include "hphp/hhbbc/interp-state.h"
50 #include "hphp/hhbbc/optimize.h"
51 #include "hphp/hhbbc/representation.h"
52 #include "hphp/hhbbc/type-builtins.h"
53 #include "hphp/hhbbc/type-ops.h"
54 #include "hphp/hhbbc/type-structure.h"
55 #include "hphp/hhbbc/type-system.h"
56 #include "hphp/hhbbc/unit-util.h"
57 #include "hphp/hhbbc/wide-func.h"
59 #include "hphp/hhbbc/stats.h"
61 #include "hphp/hhbbc/interp-internal.h"
63 namespace HPHP::HHBBC
{
65 //////////////////////////////////////////////////////////////////////
69 const StaticString
s_MethCallerHelper("__SystemLib\\MethCallerHelper");
70 const StaticString
s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
71 const StaticString
s_IMemoizeParam("HH\\IMemoizeParam");
72 const StaticString
s_getInstanceKey("getInstanceKey");
73 const StaticString
s_Closure("Closure");
74 const StaticString
s_this("HH\\this");
76 bool poppable(Op op
) {
88 case Op::NewDictArray
:
97 void interpStep(ISS
& env
, const Bytecode
& bc
);
99 void record(ISS
& env
, const Bytecode
& bc
) {
100 if (bc
.srcLoc
!= env
.srcLoc
) {
102 tmp
.srcLoc
= env
.srcLoc
;
103 return record(env
, tmp
);
106 if (!env
.replacedBcs
.size() &&
107 env
.unchangedBcs
< env
.blk
.hhbcs
.size() &&
108 bc
== env
.blk
.hhbcs
[env
.unchangedBcs
]) {
113 ITRACE(2, " => {}\n", show(env
.ctx
.func
, bc
));
114 env
.replacedBcs
.push_back(bc
);
117 // The number of pops as seen by interp.
118 uint32_t numPop(const Bytecode
& bc
) {
119 if (bc
.op
== Op::CGetL2
) return 1;
123 // The number of pushes as seen by interp.
124 uint32_t numPush(const Bytecode
& bc
) {
125 if (bc
.op
== Op::CGetL2
) return 2;
129 void reprocess(ISS
& env
) {
130 env
.reprocess
= true;
133 ArrayData
** add_elem_array(ISS
& env
) {
134 auto const idx
= env
.trackedElems
.back().idx
;
135 if (idx
< env
.unchangedBcs
) {
136 auto const DEBUG_ONLY
& bc
= env
.blk
.hhbcs
[idx
];
137 assertx(bc
.op
== Op::Concat
);
140 assertx(idx
>= env
.unchangedBcs
);
141 auto& bc
= env
.replacedBcs
[idx
- env
.unchangedBcs
];
142 auto arr
= [&] () -> const ArrayData
** {
144 case Op::Vec
: return &bc
.Vec
.arr1
;
145 case Op::Dict
: return &bc
.Dict
.arr1
;
146 case Op::Keyset
: return &bc
.Keyset
.arr1
;
147 case Op::Concat
: return nullptr;
148 default: not_reached();
151 return const_cast<ArrayData
**>(arr
);
154 bool start_add_elem(ISS
& env
, Type
& ty
, Op op
) {
155 auto value
= tvNonStatic(ty
);
156 if (!value
|| !isArrayLikeType(value
->m_type
)) return false;
158 if (op
== Op::AddElemC
) {
159 reduce(env
, bc::PopC
{}, bc::PopC
{}, bc::PopC
{});
161 reduce(env
, bc::PopC
{}, bc::PopC
{});
163 env
.trackedElems
.emplace_back(
164 env
.state
.stack
.size(),
165 env
.unchangedBcs
+ env
.replacedBcs
.size()
168 auto const arr
= value
->m_data
.parr
;
169 env
.replacedBcs
.push_back(
171 if (arr
->isVecType()) return bc::Vec
{ arr
};
172 if (arr
->isDictType()) return bc::Dict
{ arr
};
173 if (arr
->isKeysetType()) return bc::Keyset
{ arr
};
174 always_assert(false);
177 env
.replacedBcs
.back().srcLoc
= env
.srcLoc
;
178 ITRACE(2, "(addelem* -> {}\n",
179 show(env
.ctx
.func
, env
.replacedBcs
.back()));
180 push(env
, std::move(ty
));
186 * Alter the saved add_elem array in a way that preserves its provenance tag
187 * or adds a new one if applicable (i.e. the array is a vec or dict)
189 * The `mutate` parameter should be callable with an ArrayData** pointing to the
190 * add_elem array cached in the interp state and should write to it directly.
192 template <typename Fn
>
193 bool mutate_add_elem_array(ISS
& env
, Fn
&& mutate
) {
194 auto const arr
= add_elem_array(env
);
195 if (!arr
) return false;
200 void finish_tracked_elem(ISS
& env
) {
201 auto const arr
= add_elem_array(env
);
202 env
.trackedElems
.pop_back();
203 if (arr
) ArrayData::GetScalarArray(arr
);
206 void finish_tracked_elems(ISS
& env
, size_t depth
) {
207 while (!env
.trackedElems
.empty() && env
.trackedElems
.back().depth
>= depth
) {
208 finish_tracked_elem(env
);
212 uint32_t id_from_slot(ISS
& env
, int slot
) {
213 auto const id
= (env
.state
.stack
.end() - (slot
+ 1))->id
;
214 assertx(id
== StackElem::NoId
||
215 id
< env
.unchangedBcs
+ env
.replacedBcs
.size());
219 const Bytecode
* op_from_id(ISS
& env
, uint32_t id
) {
220 if (id
== StackElem::NoId
) return nullptr;
221 if (id
< env
.unchangedBcs
) return &env
.blk
.hhbcs
[id
];
222 auto const off
= id
- env
.unchangedBcs
;
223 assertx(off
< env
.replacedBcs
.size());
224 return &env
.replacedBcs
[off
];
227 void ensure_mutable(ISS
& env
, uint32_t id
) {
228 if (id
< env
.unchangedBcs
) {
229 auto const delta
= env
.unchangedBcs
- id
;
230 env
.replacedBcs
.resize(env
.replacedBcs
.size() + delta
);
231 for (auto i
= env
.replacedBcs
.size(); i
-- > delta
; ) {
232 env
.replacedBcs
[i
] = std::move(env
.replacedBcs
[i
- delta
]);
234 for (auto i
= 0; i
< delta
; i
++) {
235 env
.replacedBcs
[i
] = env
.blk
.hhbcs
[id
+ i
];
237 env
.unchangedBcs
= id
;
242 * Turn the instruction that wrote the slot'th element from the top of
243 * the stack into a Nop, adjusting the stack appropriately. If its the
244 * previous instruction, just rewind.
246 int kill_by_slot(ISS
& env
, int slot
) {
248 auto const id
= id_from_slot(env
, slot
);
249 assertx(id
!= StackElem::NoId
);
250 auto const sz
= env
.state
.stack
.size();
251 // if its the last bytecode we processed, we can rewind and avoid
252 // the reprocess overhead.
253 if (id
== env
.unchangedBcs
+ env
.replacedBcs
.size() - 1) {
255 return env
.state
.stack
.size() - sz
;
257 ensure_mutable(env
, id
);
258 auto& bc
= env
.replacedBcs
[id
- env
.unchangedBcs
];
259 auto const pop
= numPop(bc
);
260 auto const push
= numPush(bc
);
261 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
262 slot
, id
, show(env
.ctx
.func
, bc
));
263 bc
= bc_with_loc(bc
.srcLoc
, bc::Nop
{});
264 env
.state
.stack
.kill(pop
, push
, id
);
266 return env
.state
.stack
.size() - sz
;
270 * Check whether an instruction can be inserted immediately after the
271 * slot'th stack entry was written. This is only possible if slot was
272 * the last thing written by the instruction that wrote it (ie some
273 * bytecodes push more than one value - there's no way to insert a
274 * bytecode that will write *between* those values on the stack).
276 bool can_insert_after_slot(ISS
& env
, int slot
) {
277 auto const it
= env
.state
.stack
.end() - (slot
+ 1);
278 if (it
->id
== StackElem::NoId
) return false;
279 if (auto const next
= it
.next_elem(1)) {
280 return next
->id
!= it
->id
;
286 * Insert a sequence of bytecodes after the instruction that wrote the
287 * slot'th element from the top of the stack.
289 * The entire sequence pops numPop, and pushes numPush stack
290 * elements. Only the last bytecode can push anything onto the stack,
291 * and the types it pushes are pointed to by types (if you have more
292 * than one bytecode that pushes, call this more than once).
294 void insert_after_slot(ISS
& env
, int slot
,
295 int numPop
, int numPush
, const Type
* types
,
296 const BytecodeVec
& bcs
) {
297 assertx(can_insert_after_slot(env
, slot
));
299 auto const id
= id_from_slot(env
, slot
);
300 assertx(id
!= StackElem::NoId
);
301 ensure_mutable(env
, id
+ 1);
302 env
.state
.stack
.insert_after(numPop
, numPush
, types
, bcs
.size(), id
);
303 env
.replacedBcs
.insert(env
.replacedBcs
.begin() + (id
+ 1 - env
.unchangedBcs
),
304 bcs
.begin(), bcs
.end());
305 using namespace folly::gen
;
306 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
309 map([&] (const Bytecode
& bc
) { return show(env
.ctx
.func
, bc
); }) |
310 unsplit
<std::string
>(", "));
313 Bytecode
& mutate_last_op(ISS
& env
) {
314 assertx(will_reduce(env
));
316 if (!env
.replacedBcs
.size()) {
317 assertx(env
.unchangedBcs
);
318 env
.replacedBcs
.push_back(env
.blk
.hhbcs
[--env
.unchangedBcs
]);
320 return env
.replacedBcs
.back();
324 * Can be used to replace one op with another when rewind/reduce isn't
325 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
326 * because the SetL changed both the Type and the equiv of its local).
328 void replace_last_op(ISS
& env
, Bytecode
&& bc
) {
329 auto& last
= mutate_last_op(env
);
330 auto const newPush
= numPush(bc
);
331 auto const oldPush
= numPush(last
);
332 auto const newPops
= numPop(bc
);
333 auto const oldPops
= numPop(last
);
335 assertx(newPush
<= oldPush
);
336 assertx(newPops
<= oldPops
);
338 if (newPush
!= oldPush
|| newPops
!= oldPops
) {
340 env
.state
.stack
.rewind(oldPops
- newPops
, oldPush
- newPush
);
342 ITRACE(2, "(replace: {}->{}\n",
343 show(env
.ctx
.func
, last
), show(env
.ctx
.func
, bc
));
344 last
= bc_with_loc(last
.srcLoc
, bc
);
349 //////////////////////////////////////////////////////////////////////
351 const Bytecode
* op_from_slot(ISS
& env
, int slot
, int prev
/* = 0 */) {
352 if (!will_reduce(env
)) return nullptr;
353 auto const id
= id_from_slot(env
, slot
);
354 if (id
== StackElem::NoId
) return nullptr;
355 if (id
< prev
) return nullptr;
356 return op_from_id(env
, id
- prev
);
359 const Bytecode
* last_op(ISS
& env
, int idx
/* = 0 */) {
360 if (!will_reduce(env
)) return nullptr;
362 if (env
.replacedBcs
.size() > idx
) {
363 return &env
.replacedBcs
[env
.replacedBcs
.size() - idx
- 1];
366 idx
-= env
.replacedBcs
.size();
367 if (env
.unchangedBcs
> idx
) {
368 return &env
.blk
.hhbcs
[env
.unchangedBcs
- idx
- 1];
374 * Assuming bc was just interped, rewind to the state immediately
375 * before it was interped.
377 * This is rarely what you want. Its used for constprop, where the
378 * bytecode has been interped, but not yet committed to the bytecode
379 * stream. We want to undo its effects, the spit out pops for its
380 * inputs, and commit a constant-generating bytecode.
382 void rewind(ISS
& env
, const Bytecode
& bc
) {
384 ITRACE(2, "(rewind: {}\n", show(env
.ctx
.func
, bc
));
385 env
.state
.stack
.rewind(numPop(bc
), numPush(bc
));
389 * Used for peephole opts. Will undo the *stack* effects of the last n
390 * committed byte codes, and remove them from the bytecode stream, in
391 * preparation for writing out an optimized replacement sequence.
393 * WARNING: Does not undo other changes to state, such as local types,
394 * local equivalency, and thisType. Take care when rewinding such
397 void rewind(ISS
& env
, int n
) {
400 while (env
.replacedBcs
.size()) {
401 rewind(env
, env
.replacedBcs
.back());
402 env
.replacedBcs
.pop_back();
406 rewind(env
, env
.blk
.hhbcs
[--env
.unchangedBcs
]);
410 void impl_vec(ISS
& env
, bool reduce
, BytecodeVec
&& bcs
) {
411 if (!will_reduce(env
)) reduce
= false;
414 using namespace folly::gen
;
415 ITRACE(2, "(reduce: {}\n",
417 map([&] (const Bytecode
& bc
) { return show(env
.ctx
.func
, bc
); }) |
418 unsplit
<std::string
>(", "));
420 auto ef
= !env
.flags
.reduced
|| env
.flags
.effectFree
;
422 for (auto const& bc
: bcs
) {
424 env
.flags
.jmpDest
== NoBlockId
&&
425 "you can't use impl with branching opcodes before last position"
428 if (!env
.flags
.effectFree
) ef
= false;
429 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
431 env
.flags
.effectFree
= ef
;
432 } else if (!env
.flags
.reduced
) {
435 env
.flags
.reduced
= true;
440 SCOPE_EXIT
{ env
.analyzeDepth
--; };
442 // We should be at the start of a bytecode.
443 assertx(env
.flags
.wasPEI
&&
444 !env
.flags
.canConstProp
&&
445 !env
.flags
.effectFree
);
447 env
.flags
.wasPEI
= false;
448 env
.flags
.canConstProp
= true;
449 env
.flags
.effectFree
= true;
451 for (auto const& bc
: bcs
) {
452 assertx(env
.flags
.jmpDest
== NoBlockId
&&
453 "you can't use impl with branching opcodes before last position");
455 auto const wasPEI
= env
.flags
.wasPEI
;
456 auto const canConstProp
= env
.flags
.canConstProp
;
457 auto const effectFree
= env
.flags
.effectFree
;
459 ITRACE(3, " (impl {}\n", show(env
.ctx
.func
, bc
));
460 env
.flags
.wasPEI
= true;
461 env
.flags
.canConstProp
= false;
462 env
.flags
.effectFree
= false;
463 default_dispatch(env
, bc
);
465 if (env
.flags
.canConstProp
) {
467 if (env
.flags
.effectFree
&& !env
.flags
.wasPEI
) return;
468 auto stk
= env
.state
.stack
.end();
469 for (auto i
= bc
.numPush(); i
--; ) {
471 if (!is_scalar(stk
->type
)) return;
473 env
.flags
.effectFree
= true;
474 env
.flags
.wasPEI
= false;
478 // If any of the opcodes in the impl list said they could throw,
479 // then the whole thing could throw.
480 env
.flags
.wasPEI
= env
.flags
.wasPEI
|| wasPEI
;
481 env
.flags
.canConstProp
= env
.flags
.canConstProp
&& canConstProp
;
482 env
.flags
.effectFree
= env
.flags
.effectFree
&& effectFree
;
483 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
487 LocalId
equivLocalRange(ISS
& env
, const LocalRange
& range
) {
488 auto bestRange
= range
.first
;
489 auto equivFirst
= findLocEquiv(env
, range
.first
);
490 if (equivFirst
== NoLocalId
) return bestRange
;
492 if (equivFirst
< bestRange
) {
493 auto equivRange
= [&] {
494 // local equivalency includes differing by Uninit, so we need
495 // to check the types.
496 if (peekLocRaw(env
, equivFirst
) != peekLocRaw(env
, range
.first
)) {
500 for (uint32_t i
= 1; i
< range
.count
; ++i
) {
501 if (!locsAreEquiv(env
, equivFirst
+ i
, range
.first
+ i
) ||
502 peekLocRaw(env
, equivFirst
+ i
) !=
503 peekLocRaw(env
, range
.first
+ i
)) {
512 bestRange
= equivFirst
;
515 equivFirst
= findLocEquiv(env
, equivFirst
);
516 assertx(equivFirst
!= NoLocalId
);
517 } while (equivFirst
!= range
.first
);
522 SString
getNameFromType(const Type
& t
) {
523 if (!t
.subtypeOf(BStr
) && !t
.subtypeOf(BLazyCls
)) return nullptr;
524 if (is_specialized_string(t
)) return sval_of(t
);
525 if (is_specialized_lazycls(t
)) return lazyclsval_of(t
);
529 //////////////////////////////////////////////////////////////////////
534 * Very simple check to see if the top level class is reified or not
535 * If not we can reduce a VerifyTypeTS to a regular VerifyType
537 bool shouldReduceToNonReifiedVerifyType(ISS
& env
, SArray ts
) {
538 if (get_ts_kind(ts
) != TypeStructure::Kind::T_unresolved
) return false;
539 auto const clsName
= get_ts_classname(ts
);
540 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsName
);
541 if (rcls
&& rcls
->resolved()) return !rcls
->cls()->hasReifiedGenerics
;
542 // Type aliases cannot have reified generics
543 return env
.index
.lookup_type_alias(clsName
) != nullptr;
548 //////////////////////////////////////////////////////////////////////
550 namespace interp_step
{
552 void in(ISS
& env
, const bc::Nop
&) { reduce(env
); }
554 void in(ISS
& env
, const bc::PopC
&) {
555 if (auto const last
= last_op(env
)) {
556 if (poppable(last
->op
)) {
560 if (last
->op
== Op::This
) {
561 // can't rewind This because it removed null from thisType (so
562 // CheckThis at this point is a no-op) - and note that it must
563 // have *been* nullable, or we'd have turned it into a
564 // `BareThis NeverNull`
565 replace_last_op(env
, bc::CheckThis
{});
568 if (last
->op
== Op::SetL
) {
569 // can't rewind a SetL because it changes local state
570 replace_last_op(env
, bc::PopL
{ last
->SetL
.loc1
});
573 if (last
->op
== Op::CGetL2
) {
574 auto loc
= last
->CGetL2
.nloc1
;
576 return reduce(env
, bc::PopC
{}, bc::CGetL
{ loc
});
584 void in(ISS
& env
, const bc::PopU
&) {
585 if (auto const last
= last_op(env
)) {
586 if (last
->op
== Op::NullUninit
) {
591 effect_free(env
); popU(env
);
594 void in(ISS
& env
, const bc::PopU2
&) {
596 auto equiv
= topStkEquiv(env
);
597 auto val
= popC(env
);
599 push(env
, std::move(val
), equiv
!= StackDupId
? equiv
: NoLocalId
);
602 void in(ISS
& env
, const bc::EntryNop
&) { effect_free(env
); }
604 void in(ISS
& env
, const bc::Dup
& /*op*/) {
606 auto equiv
= topStkEquiv(env
);
607 auto val
= popC(env
);
608 push(env
, val
, equiv
);
609 push(env
, std::move(val
), StackDupId
);
612 void in(ISS
& env
, const bc::AssertRATL
& op
) {
613 mayReadLocal(env
, op
.loc1
);
617 void in(ISS
& env
, const bc::AssertRATStk
&) {
621 void in(ISS
& env
, const bc::BreakTraceHint
&) { effect_free(env
); }
623 void in(ISS
& env
, const bc::CGetCUNop
&) {
625 auto const t
= popCU(env
);
626 push(env
, remove_uninit(t
));
629 void in(ISS
& env
, const bc::UGetCUNop
&) {
635 void in(ISS
& env
, const bc::Null
&) {
637 push(env
, TInitNull
);
640 void in(ISS
& env
, const bc::NullUninit
&) {
645 void in(ISS
& env
, const bc::True
&) {
650 void in(ISS
& env
, const bc::False
&) {
655 void in(ISS
& env
, const bc::Int
& op
) {
657 push(env
, ival(op
.arg1
));
660 void in(ISS
& env
, const bc::Double
& op
) {
662 push(env
, dval(op
.dbl1
));
665 void in(ISS
& env
, const bc::String
& op
) {
667 push(env
, sval(op
.str1
));
670 void in(ISS
& env
, const bc::Vec
& op
) {
671 assertx(op
.arr1
->isVecType());
673 push(env
, vec_val(op
.arr1
));
676 void in(ISS
& env
, const bc::Dict
& op
) {
677 assertx(op
.arr1
->isDictType());
679 push(env
, dict_val(op
.arr1
));
682 void in(ISS
& env
, const bc::Keyset
& op
) {
683 assertx(op
.arr1
->isKeysetType());
685 push(env
, keyset_val(op
.arr1
));
688 void in(ISS
& env
, const bc::NewDictArray
& op
) {
690 push(env
, op
.arg1
== 0 ? dict_empty() : some_dict_empty());
693 void in(ISS
& env
, const bc::NewStructDict
& op
) {
694 auto map
= MapElems
{};
695 for (auto it
= op
.keys
.end(); it
!= op
.keys
.begin(); ) {
697 make_tv
<KindOfPersistentString
>(*--it
),
698 MapElem::SStrKey(popC(env
))
701 push(env
, dict_map(std::move(map
)));
706 void in(ISS
& env
, const bc::NewVec
& op
) {
707 auto elems
= std::vector
<Type
>{};
708 elems
.reserve(op
.arg1
);
709 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
710 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
712 discard(env
, op
.arg1
);
715 push(env
, vec(std::move(elems
)));
718 void in(ISS
& env
, const bc::NewKeysetArray
& op
) {
719 assertx(op
.arg1
> 0);
720 auto map
= MapElems
{};
724 auto effectful
= false;
725 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
726 auto [key
, promotion
] = promote_classlike_to_key(popC(env
));
728 auto const keyValid
= key
.subtypeOf(BArrKey
);
729 if (!keyValid
) key
= intersection_of(std::move(key
), TArrKey
);
730 if (key
.is(BBottom
)) {
737 if (auto const v
= tv(key
)) {
738 map
.emplace_front(*v
, MapElem::KeyFromType(key
, key
));
744 ty
|= std::move(key
);
745 effectful
|= !keyValid
|| (promotion
== Promotion::YesMightThrow
);
754 push(env
, keyset_map(std::move(map
)));
756 push(env
, keyset_n(ty
));
764 void in(ISS
& env
, const bc::AddElemC
&) {
765 auto const v
= topC(env
, 0);
766 auto const [k
, promotion
] = promote_classlike_to_key(topC(env
, 1));
767 auto const promoteMayThrow
= (promotion
== Promotion::YesMightThrow
);
769 auto inTy
= (env
.state
.stack
.end() - 3).unspecialize();
770 // Unspecialize modifies the stack location
771 if (env
.undo
) env
.undo
->onStackWrite(env
.state
.stack
.size() - 3, inTy
);
773 auto outTy
= [&] (const Type
& key
) -> Optional
<Type
> {
774 if (!key
.subtypeOf(BArrKey
)) return std::nullopt
;
775 if (inTy
.subtypeOf(BDict
)) {
776 auto const r
= array_like_set(std::move(inTy
), key
, v
);
777 if (!r
.second
) return r
.first
;
782 if (outTy
&& !promoteMayThrow
&& will_reduce(env
)) {
783 if (!env
.trackedElems
.empty() &&
784 env
.trackedElems
.back().depth
+ 3 == env
.state
.stack
.size()) {
785 auto const handled
= [&] (const Type
& key
) {
786 if (!key
.subtypeOf(BArrKey
)) return false;
788 if (!ktv
) return false;
790 if (!vtv
) return false;
791 return mutate_add_elem_array(env
, [&](ArrayData
** arr
) {
792 *arr
= (*arr
)->setMove(*ktv
, *vtv
);
796 (env
.state
.stack
.end() - 3)->type
= std::move(*outTy
);
797 reduce(env
, bc::PopC
{}, bc::PopC
{});
798 ITRACE(2, "(addelem* -> {}\n",
800 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
804 if (start_add_elem(env
, *outTy
, Op::AddElemC
)) return;
809 finish_tracked_elems(env
, env
.state
.stack
.size());
811 if (!outTy
) return push(env
, TInitCell
);
813 if (outTy
->subtypeOf(BBottom
)) {
815 } else if (!promoteMayThrow
) {
819 push(env
, std::move(*outTy
));
822 void in(ISS
& env
, const bc::AddNewElemC
&) {
824 auto inTy
= (env
.state
.stack
.end() - 2).unspecialize();
825 // Unspecialize modifies the stack location
826 if (env
.undo
) env
.undo
->onStackWrite(env
.state
.stack
.size() - 2, inTy
);
828 auto outTy
= [&] () -> Optional
<Type
> {
829 if (inTy
.subtypeOf(BVec
| BKeyset
)) {
830 auto const r
= array_like_newelem(std::move(inTy
), v
);
831 if (!r
.second
) return r
.first
;
836 if (outTy
&& will_reduce(env
)) {
837 if (!env
.trackedElems
.empty() &&
838 env
.trackedElems
.back().depth
+ 2 == env
.state
.stack
.size()) {
839 auto const handled
= [&] {
841 if (!vtv
) return false;
842 return mutate_add_elem_array(env
, [&](ArrayData
** arr
) {
843 *arr
= (*arr
)->appendMove(*vtv
);
847 (env
.state
.stack
.end() - 2)->type
= std::move(*outTy
);
848 reduce(env
, bc::PopC
{});
849 ITRACE(2, "(addelem* -> {}\n",
851 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
855 if (start_add_elem(env
, *outTy
, Op::AddNewElemC
)) {
862 finish_tracked_elems(env
, env
.state
.stack
.size());
864 if (!outTy
) return push(env
, TInitCell
);
866 if (outTy
->is(BBottom
)) {
871 push(env
, std::move(*outTy
));
874 void in(ISS
& env
, const bc::NewCol
& op
) {
875 auto const type
= static_cast<CollectionType
>(op
.subop1
);
876 auto const name
= collections::typeToString(type
);
877 push(env
, objExact(env
.index
.builtin_class(name
)));
881 void in(ISS
& env
, const bc::NewPair
& /*op*/) {
882 popC(env
); popC(env
);
883 auto const name
= collections::typeToString(CollectionType::Pair
);
884 push(env
, objExact(env
.index
.builtin_class(name
)));
888 void in(ISS
& env
, const bc::ColFromArray
& op
) {
889 auto const src
= popC(env
);
890 auto const type
= static_cast<CollectionType
>(op
.subop1
);
891 assertx(type
!= CollectionType::Pair
);
892 if (type
== CollectionType::Vector
|| type
== CollectionType::ImmVector
) {
893 if (src
.subtypeOf(TVec
)) effect_free(env
);
895 assertx(type
== CollectionType::Map
||
896 type
== CollectionType::ImmMap
||
897 type
== CollectionType::Set
||
898 type
== CollectionType::ImmSet
);
899 if (src
.subtypeOf(TDict
)) effect_free(env
);
901 auto const name
= collections::typeToString(type
);
902 push(env
, objExact(env
.index
.builtin_class(name
)));
905 void in(ISS
& env
, const bc::CnsE
& op
) {
906 auto t
= env
.index
.lookup_constant(env
.ctx
, op
.str1
);
907 if (t
.strictSubtypeOf(TInitCell
)) {
908 // constprop will take care of nothrow *if* its a constant; and if
909 // its not, we might trigger autoload.
912 push(env
, std::move(t
));
917 void clsCnsImpl(ISS
& env
, const Type
& cls
, const Type
& name
) {
918 if (!cls
.couldBe(BCls
) || !name
.couldBe(BStr
)) {
924 auto lookup
= env
.index
.lookup_class_constant(env
.ctx
, cls
, name
);
925 if (lookup
.found
== TriBool::No
) {
931 if (cls
.subtypeOf(BCls
) &&
932 name
.subtypeOf(BStr
) &&
933 lookup
.found
== TriBool::Yes
&&
934 !lookup
.mightThrow
) {
939 push(env
, std::move(lookup
.ty
));
944 void in(ISS
& env
, const bc::ClsCns
& op
) {
945 auto const cls
= topC(env
);
947 if (cls
.subtypeOf(BCls
) && is_specialized_cls(cls
)) {
948 auto const& dcls
= dcls_of(cls
);
949 if (dcls
.isExact()) {
951 env
, bc::PopC
{}, bc::ClsCnsD
{ op
.str1
, dcls
.cls().name() }
957 clsCnsImpl(env
, cls
, sval(op
.str1
));
960 void in(ISS
& env
, const bc::ClsCnsL
& op
) {
961 auto const cls
= topC(env
);
962 auto const name
= locRaw(env
, op
.loc1
);
964 if (name
.subtypeOf(BStr
) && is_specialized_string(name
)) {
965 return reduce(env
, bc::ClsCns
{ sval_of(name
) });
969 clsCnsImpl(env
, cls
, name
);
972 void in(ISS
& env
, const bc::ClsCnsD
& op
) {
973 auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str2
);
974 if (!rcls
|| !rcls
->resolved()) {
975 push(env
, TInitCell
);
978 clsCnsImpl(env
, clsExact(*rcls
), sval(op
.str1
));
981 void in(ISS
& env
, const bc::File
&) {
982 if (!options
.SourceRootForFileBC
) {
984 return push(env
, TSStr
);
987 auto filename
= env
.ctx
.func
->originalFilename
988 ? env
.ctx
.func
->originalFilename
989 : env
.ctx
.func
->unit
->filename
;
990 if (!FileUtil::isAbsolutePath(filename
->slice())) {
991 filename
= makeStaticString(
992 *options
.SourceRootForFileBC
+ filename
->toCppString()
996 push(env
, sval(filename
));
999 void in(ISS
& env
, const bc::Dir
&) {
1000 if (!options
.SourceRootForFileBC
) {
1002 return push(env
, TSStr
);
1005 auto filename
= env
.ctx
.func
->originalFilename
1006 ? env
.ctx
.func
->originalFilename
1007 : env
.ctx
.func
->unit
->filename
;
1008 if (!FileUtil::isAbsolutePath(filename
->slice())) {
1009 filename
= makeStaticString(
1010 *options
.SourceRootForFileBC
+ filename
->toCppString()
1014 push(env
, sval(makeStaticString(FileUtil::dirname(StrNR
{filename
}))));
1017 void in(ISS
& env
, const bc::Method
&) {
1018 auto const fullName
= [&] () -> const StringData
* {
1019 if (!env
.ctx
.func
->cls
) return env
.ctx
.func
->name
;
1020 return makeStaticString(
1021 folly::sformat("{}::{}", env
.ctx
.func
->cls
->name
, env
.ctx
.func
->name
)
1025 push(env
, sval(fullName
));
1028 void in(ISS
& env
, const bc::FuncCred
&) { effect_free(env
); push(env
, TObj
); }
1030 void in(ISS
& env
, const bc::ClassName
& op
) {
1031 auto const ty
= topC(env
);
1032 if (ty
.subtypeOf(BCls
) && is_specialized_cls(ty
)) {
1033 auto const& dcls
= dcls_of(ty
);
1034 if (dcls
.isExact()) {
1037 bc::String
{ dcls
.cls().name() });
1045 void in(ISS
& env
, const bc::LazyClassFromClass
&) {
1046 auto const ty
= topC(env
);
1047 if (ty
.subtypeOf(BCls
) && is_specialized_cls(ty
)) {
1048 auto const& dcls
= dcls_of(ty
);
1049 if (dcls
.isExact()) {
1052 bc::LazyClass
{ dcls
.cls().name() });
1057 push(env
, TLazyCls
);
1060 void concatHelper(ISS
& env
, uint32_t n
) {
1061 auto changed
= false;
1062 auto side_effects
= false;
1063 if (will_reduce(env
)) {
1064 auto litstr
= [&] (SString next
, uint32_t i
) -> SString
{
1065 auto const t
= topC(env
, i
);
1066 auto const v
= tv(t
);
1067 if (!v
) return nullptr;
1068 if (!isStringType(v
->m_type
) && !isIntType(v
->m_type
)) return nullptr;
1069 auto const cell
= eval_cell_value(
1071 auto const s
= makeStaticString(
1073 StringData::Make(tvAsCVarRef(&*v
).toString().get(), next
) :
1074 tvAsCVarRef(&*v
).toString().get());
1075 return make_tv
<KindOfString
>(s
);
1078 if (!cell
) return nullptr;
1079 return cell
->m_data
.pstr
;
1082 auto fold
= [&] (uint32_t slot
, uint32_t num
, SString result
) {
1083 auto const cell
= make_tv
<KindOfPersistentString
>(result
);
1084 auto const ty
= from_cell(cell
);
1085 BytecodeVec bcs
{num
, bc::PopC
{}};
1086 if (num
> 1) bcs
.push_back(gen_constant(cell
));
1088 reduce(env
, std::move(bcs
));
1090 insert_after_slot(env
, slot
, num
, num
> 1 ? 1 : 0, &ty
, bcs
);
1097 for (auto i
= 0; i
< n
; i
++) {
1098 if (!topC(env
, i
).subtypeOf(BArrKey
)) {
1099 side_effects
= true;
1104 if (!side_effects
) {
1105 for (auto i
= 0; i
< n
; i
++) {
1106 auto const tracked
= !env
.trackedElems
.empty() &&
1107 env
.trackedElems
.back().depth
+ i
+ 1 == env
.state
.stack
.size();
1108 if (tracked
) finish_tracked_elems(env
, env
.trackedElems
.back().depth
);
1109 auto const prev
= op_from_slot(env
, i
);
1110 if (!prev
) continue;
1111 if ((prev
->op
== Op::Concat
&& tracked
) || prev
->op
== Op::ConcatN
) {
1112 auto const extra
= kill_by_slot(env
, i
);
1120 SString result
= nullptr;
1124 // In order to collapse literals, we need to be able to insert
1125 // pops, and a constant after the sequence that generated the
1126 // literals. We can always insert after the last instruction
1127 // though, and we only need to check the first slot of a
1129 auto const next
= !i
|| result
|| can_insert_after_slot(env
, i
) ?
1130 litstr(result
, i
) : nullptr;
1131 if (next
== staticEmptyString()) {
1133 // don't fold away empty strings if the concat could trigger exceptions
1134 if (i
== 0 && !topC(env
, 1).subtypeOf(BArrKey
)) break;
1135 if (n
== 2 && i
== 1 && !topC(env
, 0).subtypeOf(BArrKey
)) break;
1143 fold(i
- nlit
, nlit
, result
);
1153 if (nlit
> 1) fold(i
- nlit
, nlit
, result
);
1158 if (n
== 2 && !side_effects
&& will_reduce(env
)) {
1159 env
.trackedElems
.emplace_back(
1160 env
.state
.stack
.size(),
1161 env
.unchangedBcs
+ env
.replacedBcs
.size()
1169 if (!topC(env
).subtypeOf(BStr
)) {
1170 return reduce(env
, bc::CastString
{});
1176 // We can't reduce the emitted concats, or we'll end up with
1177 // infinite recursion.
1178 env
.flags
.wasPEI
= true;
1179 env
.flags
.effectFree
= false;
1180 env
.flags
.canConstProp
= false;
1182 auto concat
= [&] (uint32_t num
) {
1186 record(env
, bc::Concat
{});
1188 record(env
, bc::ConcatN
{ num
});
1196 if (n
> 1) concat(n
);
1199 void in(ISS
& env
, const bc::Concat
& /*op*/) {
1200 concatHelper(env
, 2);
1203 void in(ISS
& env
, const bc::ConcatN
& op
) {
1204 if (op
.arg1
== 2) return reduce(env
, bc::Concat
{});
1205 concatHelper(env
, op
.arg1
);
1208 template <class Op
, class Fun
>
1209 void arithImpl(ISS
& env
, const Op
& /*op*/, Fun fun
) {
1211 auto const t1
= popC(env
);
1212 auto const t2
= popC(env
);
1213 push(env
, fun(t2
, t1
));
1216 void in(ISS
& env
, const bc::Add
& op
) { arithImpl(env
, op
, typeAdd
); }
1217 void in(ISS
& env
, const bc::Sub
& op
) { arithImpl(env
, op
, typeSub
); }
1218 void in(ISS
& env
, const bc::Mul
& op
) { arithImpl(env
, op
, typeMul
); }
1219 void in(ISS
& env
, const bc::Div
& op
) { arithImpl(env
, op
, typeDiv
); }
1220 void in(ISS
& env
, const bc::Mod
& op
) { arithImpl(env
, op
, typeMod
); }
1221 void in(ISS
& env
, const bc::Pow
& op
) { arithImpl(env
, op
, typePow
); }
1222 void in(ISS
& env
, const bc::BitAnd
& op
) { arithImpl(env
, op
, typeBitAnd
); }
1223 void in(ISS
& env
, const bc::BitOr
& op
) { arithImpl(env
, op
, typeBitOr
); }
1224 void in(ISS
& env
, const bc::BitXor
& op
) { arithImpl(env
, op
, typeBitXor
); }
1225 void in(ISS
& env
, const bc::AddO
& op
) { arithImpl(env
, op
, typeAddO
); }
1226 void in(ISS
& env
, const bc::SubO
& op
) { arithImpl(env
, op
, typeSubO
); }
1227 void in(ISS
& env
, const bc::MulO
& op
) { arithImpl(env
, op
, typeMulO
); }
1228 void in(ISS
& env
, const bc::Shl
& op
) { arithImpl(env
, op
, typeShl
); }
1229 void in(ISS
& env
, const bc::Shr
& op
) { arithImpl(env
, op
, typeShr
); }
1231 void in(ISS
& env
, const bc::BitNot
& /*op*/) {
1232 auto const t
= popC(env
);
1233 auto const v
= tv(t
);
1234 if (!t
.couldBe(BInt
| BStr
| BSStr
| BLazyCls
| BCls
)) {
1235 return push(env
, TBottom
);
1240 auto cell
= eval_cell([&] {
1245 if (cell
) return push(env
, std::move(*cell
));
1247 push(env
, TInitCell
);
1252 template<bool NSame
>
1253 std::pair
<Type
,bool> resolveSame(ISS
& env
) {
1254 auto const l1
= topStkEquiv(env
, 0);
1255 auto const t1
= topC(env
, 0);
1256 auto const l2
= topStkEquiv(env
, 1);
1257 auto const t2
= topC(env
, 1);
1259 auto warningsEnabled
=
1260 (RuntimeOption::EvalEmitClsMethPointers
||
1261 RuntimeOption::EvalRaiseClassConversionWarning
);
1263 auto const result
= [&] {
1264 auto const v1
= tv(t1
);
1265 auto const v2
= tv(t2
);
1267 if (l1
== StackDupId
||
1268 (l1
== l2
&& l1
!= NoLocalId
) ||
1269 (l1
<= MaxLocalId
&& l2
<= MaxLocalId
&& locsAreEquiv(env
, l1
, l2
))) {
1270 if (!t1
.couldBe(BDbl
) || !t2
.couldBe(BDbl
) ||
1271 (v1
&& (v1
->m_type
!= KindOfDouble
|| !std::isnan(v1
->m_data
.dbl
))) ||
1272 (v2
&& (v2
->m_type
!= KindOfDouble
|| !std::isnan(v2
->m_data
.dbl
)))) {
1273 return NSame
? TFalse
: TTrue
;
1278 if (auto r
= eval_cell_value([&]{ return tvSame(*v2
, *v1
); })) {
1279 // we wouldn't get here if cellSame raised a warning
1280 warningsEnabled
= false;
1281 return r
!= NSame
? TTrue
: TFalse
;
1285 return NSame
? typeNSame(t1
, t2
) : typeSame(t1
, t2
);
1288 if (warningsEnabled
&& result
== (NSame
? TFalse
: TTrue
)) {
1289 warningsEnabled
= false;
1291 return { result
, warningsEnabled
&& compare_might_raise(t1
, t2
) };
1294 template<bool Negate
>
1295 void sameImpl(ISS
& env
) {
1296 if (auto const last
= last_op(env
)) {
1297 if (last
->op
== Op::Null
) {
1299 reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
});
1300 if (Negate
) reduce(env
, bc::Not
{});
1303 if (auto const prev
= last_op(env
, 1)) {
1304 if (prev
->op
== Op::Null
&&
1305 (last
->op
== Op::CGetL
|| last
->op
== Op::CGetL2
||
1306 last
->op
== Op::CGetQuietL
)) {
1307 auto const loc
= [&]() {
1308 if (last
->op
== Op::CGetL
) {
1309 return last
->CGetL
.nloc1
;
1310 } else if (last
->op
== Op::CGetL2
) {
1311 return last
->CGetL2
.nloc1
;
1312 } else if (last
->op
== Op::CGetQuietL
) {
1313 return NamedLocal
{kInvalidLocalName
, last
->CGetQuietL
.loc1
};
1315 always_assert(false);
1318 reduce(env
, bc::IsTypeL
{ loc
, IsTypeOp::Null
});
1319 if (Negate
) reduce(env
, bc::Not
{});
1325 auto pair
= resolveSame
<Negate
>(env
);
1333 push(env
, std::move(pair
.first
));
1336 template<class JmpOp
>
1337 bool sameJmpImpl(ISS
& env
, Op sameOp
, const JmpOp
& jmp
) {
1338 const StackElem
* elems
[2];
1339 env
.state
.stack
.peek(2, elems
, 1);
1341 auto const loc0
= elems
[1]->equivLoc
;
1342 auto const loc1
= elems
[0]->equivLoc
;
1343 // If loc0 == loc1, either they're both NoLocalId, so there's
1344 // nothing for us to deduce, or both stack elements are the same
1345 // value, so the only thing we could deduce is that they are or are
1346 // not NaN. But we don't track that, so just bail.
1347 if (loc0
== loc1
|| loc0
== StackDupId
) return false;
1349 auto const ty0
= elems
[1]->type
;
1350 auto const ty1
= elems
[0]->type
;
1351 auto const val0
= tv(ty0
);
1352 auto const val1
= tv(ty1
);
1354 assertx(!val0
|| !val1
);
1355 if ((loc0
== NoLocalId
&& !val0
&& ty1
.subtypeOf(ty0
)) ||
1356 (loc1
== NoLocalId
&& !val1
&& ty0
.subtypeOf(ty1
))) {
1360 // Same currently lies about the distinction between Func/Cls/Str
1361 if (ty0
.couldBe(BCls
) && ty1
.couldBe(BStr
)) return false;
1362 if (ty1
.couldBe(BCls
) && ty0
.couldBe(BStr
)) return false;
1363 if (ty0
.couldBe(BLazyCls
) && ty1
.couldBe(BStr
)) return false;
1364 if (ty1
.couldBe(BLazyCls
) && ty0
.couldBe(BStr
)) return false;
1366 auto isect
= intersection_of(ty0
, ty1
);
1368 // Unfortunately, floating point negative zero and positive zero are
1369 // different, but are identical using as far as Same is concerened. We should
1370 // avoid refining a value to 0.0 because it compares identically to 0.0
1371 if (isect
.couldBe(dval(0.0)) || isect
.couldBe(dval(-0.0))) {
1372 isect
= union_of(isect
, TDbl
);
1377 auto handle_same
= [&] {
1378 // Currently dce uses equivalency to prove that something isn't
1379 // the last reference - so we can only assert equivalency here if
1380 // we know that won't be affected. Its irrelevant for uncounted
1381 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1383 if (loc0
<= MaxLocalId
&&
1384 (ty0
.subtypeOf(BObj
| BRes
| BPrim
) ||
1385 ty1
.subtypeOf(BObj
| BRes
| BPrim
) ||
1386 (ty0
.subtypeOf(BUnc
) && ty1
.subtypeOf(BUnc
)))) {
1387 if (loc1
== StackDupId
) {
1388 setStkLocal(env
, loc0
, 0);
1389 } else if (loc1
<= MaxLocalId
&& !locsAreEquiv(env
, loc0
, loc1
)) {
1392 auto const other
= findLocEquiv(env
, loc
);
1393 if (other
== NoLocalId
) break;
1394 killLocEquiv(env
, loc
);
1395 addLocEquiv(env
, loc
, loc1
);
1398 addLocEquiv(env
, loc
, loc1
);
1401 return refineLocation(env
, loc1
!= NoLocalId
? loc1
: loc0
, [&] (Type ty
) {
1402 auto const needsUninit
=
1403 ty
.couldBe(BUninit
) &&
1404 !isect
.couldBe(BUninit
) &&
1405 isect
.couldBe(BInitNull
);
1406 auto ret
= ty
.subtypeOf(BUnc
) ? isect
: loosen_staticness(isect
);
1407 if (needsUninit
) ret
= union_of(std::move(ret
), TUninit
);
1413 auto handle_differ_side
= [&] (LocalId location
, const Type
& ty
) {
1414 if (!ty
.subtypeOf(BInitNull
) && !ty
.strictSubtypeOf(TBool
)) return true;
1415 return refineLocation(env
, location
, [&] (Type t
) {
1416 if (ty
.subtypeOf(BNull
)) {
1417 t
= remove_uninit(std::move(t
));
1418 if (t
.couldBe(BInitNull
) && !t
.subtypeOf(BInitNull
)) {
1419 t
= unopt(std::move(t
));
1422 } else if (ty
.strictSubtypeOf(TBool
) && t
.subtypeOf(BBool
)) {
1423 return ty
== TFalse
? TTrue
: TFalse
;
1429 auto handle_differ
= [&] {
1431 (loc0
== NoLocalId
|| handle_differ_side(loc0
, ty1
)) &&
1432 (loc1
== NoLocalId
|| handle_differ_side(loc1
, ty0
));
1435 auto const sameIsJmpTarget
=
1436 (sameOp
== Op::Same
) == (JmpOp::op
== Op::JmpNZ
);
1438 auto save
= env
.state
;
1439 auto const target_reachable
= sameIsJmpTarget
?
1440 handle_same() : handle_differ();
1441 if (!target_reachable
) jmp_nevertaken(env
);
1442 // swap, so we can restore this state if the branch is always taken.
1443 env
.state
.swap(save
);
1444 if (!(sameIsJmpTarget
? handle_differ() : handle_same())) {
1445 jmp_setdest(env
, jmp
.target1
);
1446 env
.state
.copy_from(std::move(save
));
1447 } else if (target_reachable
) {
1448 env
.propagate(jmp
.target1
, &save
);
1454 bc::JmpNZ
invertJmp(const bc::JmpZ
& jmp
) { return bc::JmpNZ
{ jmp
.target1
}; }
1455 bc::JmpZ
invertJmp(const bc::JmpNZ
& jmp
) { return bc::JmpZ
{ jmp
.target1
}; }
1459 void in(ISS
& env
, const bc::Same
&) { sameImpl
<false>(env
); }
1460 void in(ISS
& env
, const bc::NSame
&) { sameImpl
<true>(env
); }
1463 void cmpImpl(ISS
& env
, Fun fun
) {
1464 auto const t1
= popC(env
);
1465 auto const t2
= popC(env
);
1466 auto const v1
= tv(t1
);
1467 auto const v2
= tv(t2
);
1469 if (auto r
= eval_cell_value([&]{ return fun(*v2
, *v1
); })) {
1471 return push(env
, *r
? TTrue
: TFalse
);
1474 // TODO_4: evaluate when these can throw, non-constant type stuff.
1480 bool couldBeStringish(const Type
& t
) {
1481 return t
.couldBe(BCls
| BLazyCls
| BStr
);
1484 bool everEq(const Type
& t1
, const Type
& t2
) {
1485 // for comparison purposes we need to be careful about these coercions
1486 if (couldBeStringish(t1
) && couldBeStringish(t2
)) return true;
1487 return loosen_all(t1
).couldBe(loosen_all(t2
));
1490 bool cmpWillThrow(const Type
& t1
, const Type
& t2
) {
1491 // for comparison purposes we need to be careful about these coercions
1492 if (couldBeStringish(t1
) && couldBeStringish(t2
)) return false;
1494 auto couldBeIntAndDbl
= [](const Type
& t1
, const Type
& t2
) {
1495 return t1
.couldBe(BInt
) && t2
.couldBe(BDbl
);
1497 // relational comparisons allow for int v dbl
1498 if (couldBeIntAndDbl(t1
, t2
) || couldBeIntAndDbl(t2
, t1
)) return false;
1500 return !loosen_to_datatype(t1
).couldBe(loosen_to_datatype(t2
));
1503 void eqImpl(ISS
& env
, bool eq
) {
1504 auto rs
= resolveSame
<false>(env
);
1505 if (rs
.first
== TTrue
) {
1506 if (!rs
.second
) constprop(env
);
1508 return push(env
, eq
? TTrue
: TFalse
);
1511 if (!everEq(topC(env
, 0), topC(env
, 1))) {
1513 return push(env
, eq
? TFalse
: TTrue
);
1516 cmpImpl(env
, [&] (TypedValue c1
, TypedValue c2
) {
1517 return tvEqual(c1
, c2
) == eq
;
1521 bool cmpThrowCheck(ISS
& env
, const Type
& t1
, const Type
& t2
) {
1522 if (!cmpWillThrow(t1
, t2
)) return false;
1531 void in(ISS
& env
, const bc::Eq
&) { eqImpl(env
, true); }
1532 void in(ISS
& env
, const bc::Neq
&) { eqImpl(env
, false); }
1534 void in(ISS
& env
, const bc::Lt
&) {
1535 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1536 cmpImpl(env
, static_cast<bool (*)(TypedValue
, TypedValue
)>(tvLess
));
1538 void in(ISS
& env
, const bc::Gt
&) {
1539 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1540 cmpImpl(env
, static_cast<bool (*)(TypedValue
, TypedValue
)>(tvGreater
));
1542 void in(ISS
& env
, const bc::Lte
&) {
1543 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1544 cmpImpl(env
, tvLessOrEqual
);
1546 void in(ISS
& env
, const bc::Gte
&) {
1547 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1548 cmpImpl(env
, tvGreaterOrEqual
);
1551 void in(ISS
& env
, const bc::Cmp
&) {
1552 auto const t1
= topC(env
, 0);
1553 auto const t2
= topC(env
, 1);
1554 if (cmpThrowCheck(env
, t1
, t2
)) return;
1557 auto const v1
= tv(t1
);
1558 auto const v2
= tv(t2
);
1560 if (auto r
= eval_cell_value([&]{ return ival(tvCompare(*v2
, *v1
)); })) {
1562 return push(env
, std::move(*r
));
1566 // TODO_4: evaluate when these can throw, non-constant type stuff.
1570 void castBoolImpl(ISS
& env
, const Type
& t
, bool negate
) {
1574 auto const e
= emptiness(t
);
1576 case Emptiness::Empty
:
1577 case Emptiness::NonEmpty
:
1578 return push(env
, (e
== Emptiness::Empty
) == negate
? TTrue
: TFalse
);
1579 case Emptiness::Maybe
:
1586 void in(ISS
& env
, const bc::Not
&) {
1587 castBoolImpl(env
, popC(env
), true);
1590 void in(ISS
& env
, const bc::CastBool
&) {
1591 auto const t
= topC(env
);
1592 if (t
.subtypeOf(BBool
)) return reduce(env
);
1593 castBoolImpl(env
, popC(env
), false);
1596 void in(ISS
& env
, const bc::CastInt
&) {
1597 auto const t
= topC(env
);
1598 if (t
.subtypeOf(BInt
)) return reduce(env
);
1601 // Objects can raise a warning about converting to int.
1602 if (!t
.couldBe(BObj
)) nothrow(env
);
1603 if (auto const v
= tv(t
)) {
1604 auto cell
= eval_cell([&] {
1605 return make_tv
<KindOfInt64
>(tvToInt(*v
));
1607 if (cell
) return push(env
, std::move(*cell
));
1612 // Handle a casting operation, where "target" is the type being casted to. If
1613 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1614 // is set to true, if the source type is the same as the destination, the cast
1615 // will be optimized away.
1616 void castImpl(ISS
& env
, Type target
, void(*fn
)(TypedValue
*)) {
1617 auto const t
= topC(env
);
1618 if (t
.subtypeOf(target
)) return reduce(env
);
1622 if (auto val
= tv(t
)) {
1623 if (auto result
= eval_cell([&] { fn(&*val
); return *val
; })) {
1629 push(env
, std::move(target
));
1632 void in(ISS
& env
, const bc::CastDouble
&) {
1633 castImpl(env
, TDbl
, tvCastToDoubleInPlace
);
1636 void in(ISS
& env
, const bc::CastString
&) {
1637 castImpl(env
, TStr
, tvCastToStringInPlace
);
1640 void in(ISS
& env
, const bc::CastDict
&) {
1641 castImpl(env
, TDict
, tvCastToDictInPlace
);
1644 void in(ISS
& env
, const bc::CastVec
&) {
1645 castImpl(env
, TVec
, tvCastToVecInPlace
);
1648 void in(ISS
& env
, const bc::CastKeyset
&) {
1649 castImpl(env
, TKeyset
, tvCastToKeysetInPlace
);
1652 void in(ISS
& env
, const bc::DblAsBits
&) {
1656 auto const ty
= popC(env
);
1657 if (!ty
.couldBe(BDbl
)) return push(env
, ival(0));
1659 if (auto val
= tv(ty
)) {
1660 assertx(isDoubleType(val
->m_type
));
1661 val
->m_type
= KindOfInt64
;
1662 push(env
, from_cell(*val
));
1669 void in(ISS
& env
, const bc::Print
& /*op*/) {
1674 void in(ISS
& env
, const bc::Clone
& /*op*/) {
1675 auto val
= popC(env
);
1676 if (!val
.subtypeOf(BObj
)) {
1678 if (val
.is(BBottom
)) unreachable(env
);
1680 push(env
, std::move(val
));
1683 void in(ISS
& env
, const bc::Exit
&) { popC(env
); push(env
, TInitNull
); }
1684 void in(ISS
& env
, const bc::Fatal
&) { popC(env
); }
1686 void in(ISS
& /*env*/, const bc::JmpNS
&) {
1687 always_assert(0 && "blocks should not contain JmpNS instructions");
1690 void in(ISS
& /*env*/, const bc::Jmp
&) {
1691 always_assert(0 && "blocks should not contain Jmp instructions");
1694 void in(ISS
& env
, const bc::Select
& op
) {
1695 auto const cond
= topC(env
);
1696 auto const t
= topC(env
, 1);
1697 auto const f
= topC(env
, 2);
1702 switch (emptiness(cond
)) {
1703 case Emptiness::Maybe
:
1705 push(env
, union_of(t
, f
));
1707 case Emptiness::NonEmpty
:
1711 case Emptiness::Empty
:
1712 return reduce(env
, bc::PopC
{}, bc::PopC
{});
1719 template<class JmpOp
>
1720 bool isTypeHelper(ISS
& env
,
1725 if (typeOp
== IsTypeOp::Scalar
|| typeOp
== IsTypeOp::LegacyArrLike
||
1726 typeOp
== IsTypeOp::Func
) {
1730 auto const val
= [&] {
1731 if (op
!= Op::IsTypeC
) return locRaw(env
, location
);
1732 const StackElem
* elem
;
1733 env
.state
.stack
.peek(1, &elem
, 1);
1734 location
= elem
->equivLoc
;
1738 if (location
== NoLocalId
|| !val
.subtypeOf(BCell
)) return false;
1740 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1741 // Otherwise, refine location based on the testType.
1742 auto testTy
= type_of_istype(typeOp
);
1744 assertx(val
.couldBe(testTy
) &&
1745 (!val
.subtypeOf(testTy
) || val
.subtypeOf(BObj
)));
1749 if (op
== Op::IsTypeC
) {
1750 if (!is_type_might_raise(testTy
, val
)) nothrow(env
);
1751 } else if (op
== Op::IssetL
) {
1753 } else if (!locCouldBeUninit(env
, location
) &&
1754 !is_type_might_raise(testTy
, val
)) {
1758 auto const negate
= (jmp
.op
== Op::JmpNZ
) == (op
!= Op::IssetL
);
1759 auto const was_true
= [&] (Type t
) {
1760 if (testTy
.subtypeOf(BNull
)) return intersection_of(t
, TNull
);
1761 assertx(!testTy
.couldBe(BNull
));
1762 return intersection_of(t
, testTy
);
1764 auto const was_false
= [&] (Type t
) {
1765 auto tinit
= remove_uninit(t
);
1766 if (testTy
.subtypeOf(BNull
)) {
1767 return (tinit
.couldBe(BInitNull
) && !tinit
.subtypeOf(BInitNull
))
1768 ? unopt(std::move(tinit
)) : tinit
;
1770 if (t
.couldBe(BInitNull
) && !t
.subtypeOf(BInitNull
)) {
1771 assertx(!testTy
.couldBe(BNull
));
1772 if (unopt(tinit
).subtypeOf(testTy
)) return TNull
;
1777 auto const pre
= [&] (Type t
) {
1778 return negate
? was_true(std::move(t
)) : was_false(std::move(t
));
1781 auto const post
= [&] (Type t
) {
1782 return negate
? was_false(std::move(t
)) : was_true(std::move(t
));
1785 refineLocation(env
, location
, pre
, jmp
.target1
, post
);
1789 // If the current function is a memoize wrapper, return the inferred return type
1790 // of the function being wrapped along with if the wrapped function is effect
1792 std::pair
<Type
, bool> memoizeImplRetType(ISS
& env
) {
1793 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
1795 // Lookup the wrapped function. This should always resolve to a precise
1796 // function but we don't rely on it.
1797 auto const memo_impl_func
= [&] {
1798 if (env
.ctx
.func
->cls
) {
1799 auto const clsTy
= selfClsExact(env
);
1800 return env
.index
.resolve_method(
1802 clsTy
? *clsTy
: TCls
,
1803 memoize_impl_name(env
.ctx
.func
)
1806 return env
.index
.resolve_func(env
.ctx
, memoize_impl_name(env
.ctx
.func
));
1809 // Infer the return type of the wrapped function, taking into account the
1810 // types of the parameters for context sensitive types.
1811 auto const numArgs
= env
.ctx
.func
->params
.size();
1812 CompactVector
<Type
> args
{numArgs
};
1813 for (auto i
= LocalId
{0}; i
< numArgs
; ++i
) {
1814 args
[i
] = locAsCell(env
, i
);
1817 // Determine the context the wrapped function will be called on.
1818 auto const ctxType
= [&]() -> Type
{
1819 if (env
.ctx
.func
->cls
) {
1820 if (env
.ctx
.func
->attrs
& AttrStatic
) {
1821 // The class context for static methods is the method's class,
1822 // if LSB is not specified.
1824 env
.ctx
.func
->isMemoizeWrapperLSB
?
1827 return clsTy
? *clsTy
: TCls
;
1829 return thisTypeNonNull(env
);
1835 auto retTy
= env
.index
.lookup_return_type(
1837 &env
.collect
.methods
,
1842 auto const effectFree
= env
.index
.is_effect_free(
1846 // Regardless of anything we know the return type will be an InitCell (this is
1847 // a requirement of memoize functions).
1848 if (!retTy
.subtypeOf(BInitCell
)) return { TInitCell
, effectFree
};
1849 return { retTy
, effectFree
};
1852 template<class JmpOp
>
1853 bool instanceOfJmpImpl(ISS
& env
,
1854 const bc::InstanceOfD
& inst
,
1857 const StackElem
* elem
;
1858 env
.state
.stack
.peek(1, &elem
, 1);
1860 auto const locId
= elem
->equivLoc
;
1861 if (locId
== NoLocalId
|| interface_supports_non_objects(inst
.str1
)) {
1864 auto const rcls
= env
.index
.resolve_class(env
.ctx
, inst
.str1
);
1865 if (!rcls
) return false;
1867 auto const val
= elem
->type
;
1868 auto const instTy
= subObj(*rcls
);
1869 assertx(!val
.subtypeOf(instTy
) && val
.couldBe(instTy
));
1871 // If we have an optional type, whose unopt is guaranteed to pass
1872 // the instanceof check, then failing to pass implies it was null.
1873 auto const fail_implies_null
=
1874 val
.couldBe(BInitNull
) &&
1875 !val
.subtypeOf(BInitNull
) &&
1876 unopt(val
).subtypeOf(instTy
);
1879 auto const negate
= jmp
.op
== Op::JmpNZ
;
1880 auto const result
= [&] (Type t
, bool pass
) {
1881 return pass
? instTy
: fail_implies_null
? TNull
: t
;
1883 auto const pre
= [&] (Type t
) { return result(t
, negate
); };
1884 auto const post
= [&] (Type t
) { return result(t
, !negate
); };
1885 refineLocation(env
, locId
, pre
, jmp
.target1
, post
);
1889 template<class JmpOp
>
1890 bool isTypeStructCJmpImpl(ISS
& env
,
1891 const bc::IsTypeStructC
& inst
,
1894 const StackElem
* elems
[2];
1895 env
.state
.stack
.peek(2, elems
, 1);
1897 auto const locId
= elems
[0]->equivLoc
;
1898 if (locId
== NoLocalId
) return false;
1900 auto const a
= tv(elems
[1]->type
);
1901 if (!a
) return false;
1902 // if it wasn't valid, the JmpOp wouldn't be reachable
1903 assertx(isValidTSType(*a
, false));
1905 auto const is_nullable_ts
= is_ts_nullable(a
->m_data
.parr
);
1906 auto const ts_kind
= get_ts_kind(a
->m_data
.parr
);
1907 // type_of_type_structure does not resolve these types. It is important we
1908 // do resolve them here, or we may have issues when we reduce the checks to
1909 // InstanceOfD checks. This logic performs the same exact refinement as
1910 // instanceOfD will.
1911 if (is_nullable_ts
||
1912 (ts_kind
!= TypeStructure::Kind::T_class
&&
1913 ts_kind
!= TypeStructure::Kind::T_interface
&&
1914 ts_kind
!= TypeStructure::Kind::T_xhp
&&
1915 ts_kind
!= TypeStructure::Kind::T_unresolved
)) {
1919 auto const clsName
= get_ts_classname(a
->m_data
.parr
);
1920 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsName
);
1922 !rcls
->resolved() ||
1923 rcls
->cls()->attrs
& AttrEnum
||
1924 interface_supports_non_objects(clsName
)) {
1928 auto const val
= elems
[0]->type
;
1929 auto const instTy
= subObj(*rcls
);
1930 if (val
.subtypeOf(instTy
) || !val
.couldBe(instTy
)) {
1934 // If we have an optional type, whose unopt is guaranteed to pass
1935 // the instanceof check, then failing to pass implies it was null.
1936 auto const fail_implies_null
=
1937 val
.couldBe(BInitNull
) &&
1938 !val
.subtypeOf(BInitNull
) &&
1939 unopt(val
).subtypeOf(instTy
);
1943 auto const negate
= jmp
.op
== Op::JmpNZ
;
1944 auto const result
= [&] (Type t
, bool pass
) {
1945 return pass
? instTy
: fail_implies_null
? TNull
: t
;
1947 auto const pre
= [&] (Type t
) { return result(t
, negate
); };
1948 auto const post
= [&] (Type t
) { return result(t
, !negate
); };
1949 refineLocation(env
, locId
, pre
, jmp
.target1
, post
);
1953 template<class JmpOp
>
1954 void jmpImpl(ISS
& env
, const JmpOp
& op
) {
1955 auto const Negate
= std::is_same
<JmpOp
, bc::JmpNZ
>::value
;
1956 auto const location
= topStkEquiv(env
);
1957 auto const e
= emptiness(topC(env
));
1958 if (e
== (Negate
? Emptiness::NonEmpty
: Emptiness::Empty
)) {
1959 reduce(env
, bc::PopC
{});
1960 return jmp_setdest(env
, op
.target1
);
1963 if (e
== (Negate
? Emptiness::Empty
: Emptiness::NonEmpty
) ||
1964 (next_real_block(env
.ctx
.func
, env
.blk
.fallthrough
) ==
1965 next_real_block(env
.ctx
.func
, op
.target1
))) {
1966 return reduce(env
, bc::PopC
{});
1970 if (env
.flags
.jmpDest
== NoBlockId
) return;
1971 auto const jmpDest
= env
.flags
.jmpDest
;
1972 env
.flags
.jmpDest
= NoBlockId
;
1974 reduce(env
, bc::PopC
{});
1975 env
.flags
.jmpDest
= jmpDest
;
1978 if (auto const last
= last_op(env
)) {
1979 if (last
->op
== Op::Not
) {
1981 return reduce(env
, invertJmp(op
));
1983 if (last
->op
== Op::Same
|| last
->op
== Op::NSame
) {
1984 if (sameJmpImpl(env
, last
->op
, op
)) return fix();
1985 } else if (last
->op
== Op::IssetL
) {
1986 if (isTypeHelper(env
,
1993 } else if (last
->op
== Op::IsTypeL
) {
1994 if (isTypeHelper(env
,
1995 last
->IsTypeL
.subop2
,
1996 last
->IsTypeL
.nloc1
.id
,
2001 } else if (last
->op
== Op::IsTypeC
) {
2002 if (isTypeHelper(env
,
2003 last
->IsTypeC
.subop1
,
2009 } else if (last
->op
== Op::InstanceOfD
) {
2010 if (instanceOfJmpImpl(env
, last
->InstanceOfD
, op
)) return fix();
2011 } else if (last
->op
== Op::IsTypeStructC
) {
2012 if (isTypeStructCJmpImpl(env
, last
->IsTypeStructC
, op
)) return fix();
2019 if (location
== NoLocalId
) return env
.propagate(op
.target1
, &env
.state
);
2021 refineLocation(env
, location
,
2022 Negate
? assert_nonemptiness
: assert_emptiness
,
2024 Negate
? assert_emptiness
: assert_nonemptiness
);
2030 void in(ISS
& env
, const bc::JmpNZ
& op
) { jmpImpl(env
, op
); }
2031 void in(ISS
& env
, const bc::JmpZ
& op
) { jmpImpl(env
, op
); }
2033 void in(ISS
& env
, const bc::Switch
& op
) {
2034 const auto t
= topC(env
);
2035 const auto v
= tv(t
);
2039 forEachTakenEdge(op
, [&] (BlockId id
) {
2040 env
.propagate(id
, &env
.state
);
2044 auto go
= [&] (BlockId blk
) {
2045 reduce(env
, bc::PopC
{});
2046 return jmp_setdest(env
, blk
);
2049 if (!t
.couldBe(BInt
)) {
2050 if (op
.subop1
== SwitchKind::Unbounded
) return bail();
2051 return go(op
.targets
.back());
2054 if (!v
) return bail();
2056 auto num_elems
= op
.targets
.size();
2057 if (op
.subop1
== SwitchKind::Unbounded
) {
2058 if (v
->m_data
.num
< 0 || v
->m_data
.num
>= num_elems
) return bail();
2059 return go(op
.targets
[v
->m_data
.num
]);
2062 assertx(num_elems
> 2);
2064 auto const i
= v
->m_data
.num
- op
.arg2
;
2065 return i
>= 0 && i
< num_elems
? go(op
.targets
[i
]) : go(op
.targets
.back());
2068 void in(ISS
& env
, const bc::SSwitch
& op
) {
2069 const auto t
= topC(env
);
2070 const auto v
= tv(t
);
2072 if (!couldBeStringish(t
)) {
2073 reduce(env
, bc::PopC
{});
2074 return jmp_setdest(env
, op
.targets
.back().second
);
2078 for (auto& kv
: op
.targets
) {
2079 auto match
= eval_cell_value([&] {
2080 if (!kv
.first
) return true;
2081 return v
->m_data
.pstr
->equal(kv
.first
);
2086 reduce(env
, bc::PopC
{});
2087 return jmp_setdest(env
, kv
.second
);
2093 forEachTakenEdge(op
, [&] (BlockId id
) {
2094 env
.propagate(id
, &env
.state
);
2098 void in(ISS
& env
, const bc::RetC
& /*op*/) {
2099 auto const locEquiv
= topStkLocal(env
);
2100 doRet(env
, popC(env
), false);
2101 if (locEquiv
!= NoLocalId
&& locEquiv
< env
.ctx
.func
->params
.size()) {
2102 env
.flags
.retParam
= locEquiv
;
2105 void in(ISS
& env
, const bc::RetM
& op
) {
2106 std::vector
<Type
> ret(op
.arg1
);
2107 for (int i
= 0; i
< op
.arg1
; i
++) {
2108 ret
[op
.arg1
- i
- 1] = popC(env
);
2110 doRet(env
, vec(std::move(ret
)), false);
2113 void in(ISS
& env
, const bc::RetCSuspended
&) {
2114 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
2116 auto const t
= popC(env
);
2119 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
,
2124 void in(ISS
& env
, const bc::Throw
& /*op*/) {
2128 void in(ISS
& env
, const bc::ThrowNonExhaustiveSwitch
& /*op*/) {}
2130 void in(ISS
& env
, const bc::RaiseClassStringConversionWarning
& /*op*/) {}
2132 void in(ISS
& env
, const bc::ChainFaults
&) {
2136 void in(ISS
& env
, const bc::NativeImpl
&) {
2139 if (is_collection_method_returning_this(env
.ctx
.cls
, env
.ctx
.func
)) {
2140 auto const resCls
= env
.index
.builtin_class(env
.ctx
.cls
->name
);
2141 return doRet(env
, objExact(resCls
), true);
2144 if (env
.ctx
.func
->nativeInfo
) {
2145 return doRet(env
, native_function_return_type(env
.ctx
.func
), true);
2147 doRet(env
, TInitCell
, true);
2150 void in(ISS
& env
, const bc::CGetL
& op
) {
2151 if (locIsThis(env
, op
.nloc1
.id
)) {
2152 auto const& ty
= peekLocRaw(env
, op
.nloc1
.id
);
2153 if (!ty
.subtypeOf(BInitNull
)) {
2154 auto const subop
= ty
.couldBe(BUninit
) ?
2155 BareThisOp::Notice
: ty
.couldBe(BNull
) ?
2156 BareThisOp::NoNotice
: BareThisOp::NeverNull
;
2157 return reduce(env
, bc::BareThis
{ subop
});
2160 if (auto const last
= last_op(env
)) {
2161 if (last
->op
== Op::PopL
&&
2162 op
.nloc1
.id
== last
->PopL
.loc1
) {
2165 setLocRaw(env
, op
.nloc1
.id
, TCell
);
2166 return reduce(env
, bc::SetL
{ op
.nloc1
.id
});
2169 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2170 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2171 auto const loc
= minLocEquiv
!= NoLocalId
? minLocEquiv
: op
.nloc1
.id
;
2172 return reduce(env
, bc::CGetQuietL
{ loc
});
2174 mayReadLocal(env
, op
.nloc1
.id
);
2175 push(env
, locAsCell(env
, op
.nloc1
.id
), op
.nloc1
.id
);
2178 void in(ISS
& env
, const bc::CGetQuietL
& op
) {
2179 if (locIsThis(env
, op
.loc1
)) {
2180 return reduce(env
, bc::BareThis
{ BareThisOp::NoNotice
});
2182 if (auto const last
= last_op(env
)) {
2183 if (last
->op
== Op::PopL
&&
2184 op
.loc1
== last
->PopL
.loc1
) {
2187 setLocRaw(env
, op
.loc1
, TCell
);
2188 return reduce(env
, bc::SetL
{ op
.loc1
});
2191 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, true);
2192 if (minLocEquiv
!= NoLocalId
) {
2193 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
});
2198 mayReadLocal(env
, op
.loc1
);
2199 push(env
, locAsCell(env
, op
.loc1
), op
.loc1
);
2202 void in(ISS
& env
, const bc::CUGetL
& op
) {
2203 auto ty
= locRaw(env
, op
.loc1
);
2206 push(env
, std::move(ty
), op
.loc1
);
2209 void in(ISS
& env
, const bc::PushL
& op
) {
2210 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, false);
2211 if (minLocEquiv
!= NoLocalId
) {
2212 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
}, bc::UnsetL
{ op
.loc1
});
2215 if (auto const last
= last_op(env
)) {
2216 if (last
->op
== Op::PopL
&&
2217 last
->PopL
.loc1
== op
.loc1
) {
2218 // rewind is ok, because we're just going to unset the local
2219 // (and note the unset can't be a no-op because the PopL set it
2220 // to an InitCell). But its possible that before the PopL, the
2221 // local *was* unset, so maybe would have killed the no-op. The
2222 // only way to fix that is to reprocess the block with the new
2223 // instruction sequence and see what happens.
2226 return reduce(env
, bc::UnsetL
{ op
.loc1
});
2230 if (auto val
= tv(peekLocRaw(env
, op
.loc1
))) {
2231 return reduce(env
, bc::UnsetL
{ op
.loc1
}, gen_constant(*val
));
2234 impl(env
, bc::CGetQuietL
{ op
.loc1
}, bc::UnsetL
{ op
.loc1
});
2237 void in(ISS
& env
, const bc::CGetL2
& op
) {
2238 if (auto const last
= last_op(env
)) {
2239 if ((poppable(last
->op
) && !numPop(*last
)) ||
2240 ((last
->op
== Op::CGetL
|| last
->op
== Op::CGetQuietL
) &&
2241 !peekLocCouldBeUninit(env
, op
.nloc1
.id
))) {
2242 auto const other
= *last
;
2244 return reduce(env
, bc::CGetL
{ op
.nloc1
}, other
);
2248 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2249 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2250 if (minLocEquiv
!= NoLocalId
) {
2251 return reduce(env
, bc::CGetL2
{ { kInvalidLocalName
, minLocEquiv
} });
2255 mayReadLocal(env
, op
.nloc1
.id
);
2256 auto loc
= locAsCell(env
, op
.nloc1
.id
);
2257 auto topEquiv
= topStkLocal(env
);
2258 auto top
= popT(env
);
2259 push(env
, std::move(loc
), op
.nloc1
.id
);
2260 push(env
, std::move(top
), topEquiv
);
2263 void in(ISS
& env
, const bc::CGetG
&) { popC(env
); push(env
, TInitCell
); }
2265 void in(ISS
& env
, const bc::CGetS
& op
) {
2266 auto const tcls
= popC(env
);
2267 auto const tname
= popC(env
);
2269 auto const throws
= [&] {
2271 return push(env
, TBottom
);
2274 if (!tcls
.couldBe(BCls
)) return throws();
2276 auto lookup
= env
.index
.lookup_static(
2283 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
2287 auto const mustBeMutable
= ReadonlyOp::Mutable
== op
.subop1
;
2288 if (mustBeMutable
&& lookup
.readOnly
== TriBool::Yes
) {
2291 auto const mightReadOnlyThrow
= mustBeMutable
&& lookup
.readOnly
== TriBool::Maybe
;
2293 if (lookup
.found
== TriBool::Yes
&&
2294 lookup
.lateInit
== TriBool::No
&&
2295 !lookup
.classInitMightRaise
&&
2296 !mightReadOnlyThrow
&&
2297 tcls
.subtypeOf(BCls
) &&
2298 tname
.subtypeOf(BStr
)) {
2303 push(env
, std::move(lookup
.ty
));
2306 void in(ISS
& env
, const bc::ClassGetC
& op
) {
2307 auto const t
= topC(env
);
2309 if (t
.subtypeOf(BCls
)) return reduce(env
, bc::Nop
{});
2312 if (!t
.couldBe(BObj
| BCls
| BStr
| BLazyCls
)) {
2318 if (t
.subtypeOf(BObj
)) {
2320 push(env
, objcls(t
));
2324 if (auto const clsname
= getNameFromType(t
)) {
2325 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
)) {
2326 if (rcls
->cls()) effect_free(env
);
2327 push(env
, clsExact(*rcls
));
2335 void in(ISS
& env
, const bc::ClassGetTS
& op
) {
2336 // TODO(T31677864): implement real optimizations
2337 auto const ts
= popC(env
);
2338 if (!ts
.couldBe(BDict
)) {
2348 void in(ISS
& env
, const bc::AKExists
&) {
2349 auto const base
= popC(env
);
2350 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
2352 auto result
= TBottom
;
2353 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
2355 if (!base
.subtypeOf(BObj
| BArrLike
)) {
2360 if (base
.couldBe(BObj
)) {
2364 if (base
.couldBe(BArrLike
)) {
2365 auto const validKey
= key
.subtypeOf(BArrKey
);
2366 if (!validKey
) effectFree
= false;
2367 if (key
.couldBe(BArrKey
)) {
2369 array_like_elem(base
, validKey
? key
: intersection_of(key
, TArrKey
));
2370 if (elem
.first
.is(BBottom
)) {
2372 } else if (elem
.second
) {
2380 if (result
.is(BBottom
)) {
2381 assertx(!effectFree
);
2388 push(env
, std::move(result
));
2391 void in(ISS
& env
, const bc::GetMemoKeyL
& op
) {
2392 auto const& func
= env
.ctx
.func
;
2393 auto const name
= folly::to
<std::string
>(
2394 func
&& func
->cls
? func
->cls
->name
->data() : "",
2395 func
&& func
->cls
? "::" : "",
2396 func
? func
->name
->data() : "");
2397 always_assert(func
->isMemoizeWrapper
);
2399 auto const rclsIMemoizeParam
= env
.index
.builtin_class(s_IMemoizeParam
.get());
2400 auto const tyIMemoizeParam
= subObj(rclsIMemoizeParam
);
2402 auto const inTy
= locAsCell(env
, op
.nloc1
.id
);
2404 // If the local could be uninit, we might raise a warning (as
2405 // usual). Converting an object to a memo key might invoke PHP code if it has
2406 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2407 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2408 !inTy
.couldBe(BObj
| BVec
| BDict
)) {
2413 // If type constraints are being enforced and the local being turned into a
2414 // memo key is a parameter, then we can possibly using the type constraint to
2415 // infer a more efficient memo key mode.
2416 using MK
= MemoKeyConstraint
;
2417 Optional
<res::Class
> resolvedCls
;
2418 auto const mkc
= [&] {
2419 if (op
.nloc1
.id
>= env
.ctx
.func
->params
.size()) return MK::None
;
2420 auto tc
= env
.ctx
.func
->params
[op
.nloc1
.id
].typeConstraint
;
2421 if (tc
.isUnresolved()) {
2422 auto res
= env
.index
.resolve_type_name(tc
.typeName());
2423 if (res
.type
!= AnnotType::Unresolved
) {
2424 auto const typeName
= res
.type
== AnnotType::Object
2425 ? res
.value
->name() : nullptr;
2426 tc
.resolveType(res
.type
, res
.nullable
, typeName
);
2429 if (tc
.isObject()) {
2430 resolvedCls
= env
.index
.resolve_class(env
.ctx
, tc
.clsName());
2432 return memoKeyConstraintFromTC(tc
);
2435 // Use the type-constraint to reduce this operation to a more efficient memo
2436 // mode. Some of the modes can be reduced to simple bytecode operations
2437 // inline. Even with the type-constraints, we still need to check the inferred
2438 // type of the local. Something may have possibly clobbered the local between
2439 // the type-check and this op.
2442 // Always an int, so the key is always an identity mapping
2443 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2446 // Always a bool, so the key is the bool cast to an int
2447 if (inTy
.subtypeOf(BBool
)) {
2448 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::CastInt
{});
2452 // Always a string, so the key is always an identity mapping
2453 if (inTy
.subtypeOf(BStr
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2456 // Either an int or string, so the key can be an identity mapping
2457 if (inTy
.subtypeOf(BArrKey
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2460 // A nullable string. The key will either be the string or the integer
2462 if (inTy
.subtypeOf(BOptStr
)) {
2465 bc::CGetL
{ op
.nloc1
},
2467 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2473 // A nullable int. The key will either be the integer, or the static empty
2475 if (inTy
.subtypeOf(BOptInt
)) {
2478 bc::CGetL
{ op
.nloc1
},
2479 bc::String
{ staticEmptyString() },
2480 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2485 case MK::BoolOrNull
:
2486 // A nullable bool. The key will either be 0, 1, or 2.
2487 if (inTy
.subtypeOf(BOptBool
)) {
2490 bc::CGetL
{ op
.nloc1
},
2493 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2499 // The double will be converted (losslessly) to an integer.
2500 if (inTy
.subtypeOf(BDbl
)) {
2501 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::DblAsBits
{});
2505 // A nullable double. The key will be an integer, or the static empty
2507 if (inTy
.subtypeOf(BOptDbl
)) {
2510 bc::CGetL
{ op
.nloc1
},
2512 bc::String
{ staticEmptyString() },
2513 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2519 // An object. If the object is definitely known to implement IMemoizeParam
2520 // we can simply call that method, casting the output to ensure its always
2521 // a string (which is what the generic mode does). If not, it will use the
2522 // generic mode, which can handle collections or classes which don't
2523 // implement getInstanceKey.
2525 resolvedCls
->subSubtypeOf(rclsIMemoizeParam
) &&
2526 inTy
.subtypeOf(tyIMemoizeParam
)) {
2529 bc::CGetL
{ op
.nloc1
},
2531 bc::FCallObjMethodD
{
2533 staticEmptyString(),
2534 ObjMethodOp::NullThrows
,
2535 s_getInstanceKey
.get()
2541 case MK::ObjectOrNull
:
2542 // An object or null. We can use the null safe version of a function call
2543 // when invoking getInstanceKey and then select from the result of that,
2544 // or the integer 0. This might seem wasteful, but the JIT does a good job
2545 // inlining away the call in the null case.
2547 resolvedCls
->subSubtypeOf(rclsIMemoizeParam
) &&
2548 inTy
.subtypeOf(opt(tyIMemoizeParam
))) {
2551 bc::CGetL
{ op
.nloc1
},
2553 bc::FCallObjMethodD
{
2555 staticEmptyString(),
2556 ObjMethodOp::NullSafe
,
2557 s_getInstanceKey
.get()
2561 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2570 // No type constraint, or one that isn't usuable. Use the generic memoization
2571 // scheme which can handle any type:
2573 if (auto const val
= tv(inTy
)) {
2574 auto const key
= eval_cell(
2575 [&]{ return HHVM_FN(serialize_memoize_param
)(*val
); }
2577 if (key
) return push(env
, *key
);
2580 // Integer keys are always mapped to themselves
2581 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2582 if (inTy
.subtypeOf(BOptInt
)) {
2585 bc::CGetL
{ op
.nloc1
},
2586 bc::String
{ s_nullMemoKey
.get() },
2587 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2591 if (inTy
.subtypeOf(BBool
)) {
2594 bc::String
{ s_falseMemoKey
.get() },
2595 bc::String
{ s_trueMemoKey
.get() },
2596 bc::CGetL
{ op
.nloc1
},
2601 // A memo key can be an integer if the input might be an integer, and is a
2602 // string otherwise. Booleans and nulls are always static strings.
2604 if (inTy
.subtypeOf(BOptBool
)) return TSStr
;
2605 if (inTy
.couldBe(BInt
)) return union_of(TInt
, TStr
);
2608 push(env
, std::move(keyTy
));
2611 void in(ISS
& env
, const bc::IssetL
& op
) {
2612 if (locIsThis(env
, op
.loc1
)) {
2614 bc::BareThis
{ BareThisOp::NoNotice
},
2615 bc::IsTypeC
{ IsTypeOp::Null
},
2620 auto const loc
= locAsCell(env
, op
.loc1
);
2621 if (loc
.subtypeOf(BNull
)) return push(env
, TFalse
);
2622 if (!loc
.couldBe(BNull
)) return push(env
, TTrue
);
2626 void in(ISS
& env
, const bc::IsUnsetL
& op
) {
2629 auto const loc
= locAsCell(env
, op
.loc1
);
2630 if (loc
.subtypeOf(BUninit
)) return push(env
, TTrue
);
2631 if (!loc
.couldBe(BUninit
)) return push(env
, TFalse
);
2635 void in(ISS
& env
, const bc::IssetS
& op
) {
2636 auto const tcls
= popC(env
);
2637 auto const tname
= popC(env
);
2639 if (!tcls
.couldBe(BCls
)) {
2641 return push(env
, TBottom
);
2644 auto lookup
= env
.index
.lookup_static(
2651 if (!lookup
.classInitMightRaise
&&
2652 tcls
.subtypeOf(BCls
) &&
2653 tname
.subtypeOf(BStr
)) {
2658 if (lookup
.ty
.subtypeOf(BNull
)) return push(env
, TFalse
);
2659 if (!lookup
.ty
.couldBe(BNull
) && lookup
.lateInit
== TriBool::No
) {
2660 return push(env
, TTrue
);
2665 void in(ISS
& env
, const bc::IssetG
&) { popC(env
); push(env
, TBool
); }
2667 void isTypeImpl(ISS
& env
, const Type
& locOrCell
, const Type
& test
) {
2668 if (locOrCell
.subtypeOf(test
)) return push(env
, TTrue
);
2669 if (!locOrCell
.couldBe(test
)) return push(env
, TFalse
);
2673 void isTypeObj(ISS
& env
, const Type
& ty
) {
2674 if (!ty
.couldBe(BObj
)) return push(env
, TFalse
);
2675 if (ty
.subtypeOf(BObj
)) {
2676 auto const incompl
= objExact(
2677 env
.index
.builtin_class(s_PHP_Incomplete_Class
.get()));
2678 if (RO::EvalBuildMayNoticeOnMethCallerHelperIsObject
) {
2680 objExact(env
.index
.builtin_class(s_MethCallerHelper
.get()));
2681 if (ty
.couldBe(c
)) return push(env
, TBool
);
2683 if (!ty
.couldBe(incompl
)) return push(env
, TTrue
);
2684 if (ty
.subtypeOf(incompl
)) return push(env
, TFalse
);
2690 void isTypeLImpl(ISS
& env
, const Op
& op
) {
2691 auto const loc
= locAsCell(env
, op
.nloc1
.id
);
2692 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2693 !is_type_might_raise(op
.subop2
, loc
)) {
2698 switch (op
.subop2
) {
2699 case IsTypeOp::Scalar
: return push(env
, TBool
);
2700 case IsTypeOp::LegacyArrLike
: return push(env
, TBool
);
2701 case IsTypeOp::Obj
: return isTypeObj(env
, loc
);
2702 case IsTypeOp::Func
:
2703 return loc
.couldBe(TFunc
) ? push(env
, TBool
) : push(env
, TFalse
);
2704 default: return isTypeImpl(env
, loc
, type_of_istype(op
.subop2
));
2709 void isTypeCImpl(ISS
& env
, const Op
& op
) {
2710 auto const t1
= popC(env
);
2711 if (!is_type_might_raise(op
.subop1
, t1
)) {
2716 switch (op
.subop1
) {
2717 case IsTypeOp::Scalar
: return push(env
, TBool
);
2718 case IsTypeOp::LegacyArrLike
: return push(env
, TBool
);
2719 case IsTypeOp::Obj
: return isTypeObj(env
, t1
);
2720 case IsTypeOp::Func
:
2721 return t1
.couldBe(TFunc
) ? push(env
, TBool
) : push(env
, TFalse
);
2722 default: return isTypeImpl(env
, t1
, type_of_istype(op
.subop1
));
2726 void in(ISS
& env
, const bc::IsTypeC
& op
) { isTypeCImpl(env
, op
); }
2727 void in(ISS
& env
, const bc::IsTypeL
& op
) { isTypeLImpl(env
, op
); }
2729 void in(ISS
& env
, const bc::InstanceOfD
& op
) {
2730 auto t1
= topC(env
);
2731 // Note: InstanceOfD can do autoload if the type might be a type
2732 // alias, so it's not nothrow unless we know it's an object type.
2733 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str1
)) {
2734 auto result
= [&] (const Type
& r
) {
2736 if (r
!= TBool
) constprop(env
);
2740 if (!interface_supports_non_objects(rcls
->name())) {
2741 auto const testTy
= subObj(*rcls
);
2742 if (t1
.subtypeOf(testTy
)) return result(TTrue
);
2743 if (!t1
.couldBe(testTy
)) return result(TFalse
);
2744 if (t1
.couldBe(BInitNull
) && !t1
.subtypeOf(BInitNull
)) {
2745 t1
= unopt(std::move(t1
));
2746 if (t1
.subtypeOf(testTy
)) {
2747 return reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
}, bc::Not
{});
2750 return result(TBool
);
2757 void in(ISS
& env
, const bc::InstanceOf
& /*op*/) {
2758 auto const t1
= topC(env
);
2759 auto const v1
= tv(t1
);
2760 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
2761 return reduce(env
, bc::PopC
{},
2762 bc::InstanceOfD
{ v1
->m_data
.pstr
});
2765 if (t1
.subtypeOf(BObj
) && is_specialized_obj(t1
)) {
2766 auto const& dobj
= dobj_of(t1
);
2767 if (dobj
.isExact()) {
2768 return reduce(env
, bc::PopC
{},
2769 bc::InstanceOfD
{ dobj
.cls().name() });
2778 void in(ISS
& env
, const bc::IsLateBoundCls
& op
) {
2779 auto const cls
= env
.ctx
.cls
;
2780 if (cls
&& !(cls
->attrs
& AttrTrait
)) effect_free(env
);
2782 return push(env
, TBool
);
2787 bool isValidTypeOpForIsAs(const IsTypeOp
& op
) {
2789 case IsTypeOp::Null
:
2790 case IsTypeOp::Bool
:
2798 case IsTypeOp::Dict
:
2799 case IsTypeOp::Keyset
:
2800 case IsTypeOp::ArrLike
:
2801 case IsTypeOp::LegacyArrLike
:
2802 case IsTypeOp::Scalar
:
2803 case IsTypeOp::ClsMeth
:
2804 case IsTypeOp::Func
:
2805 case IsTypeOp::Class
:
2811 void isTypeStructImpl(ISS
& env
, SArray inputTS
) {
2812 auto const ts
= inputTS
;
2813 auto const t
= loosen_likeness(topC(env
, 1)); // operand to is/as
2815 bool may_raise
= true;
2816 auto result
= [&] (const Type
& out
) {
2817 popC(env
); // type structure
2818 popC(env
); // operand to is/as
2820 if (!may_raise
) nothrow(env
);
2821 return push(env
, out
);
2825 const Optional
<Type
> type
,
2826 const Optional
<Type
> deopt
= std::nullopt
2828 if (!type
|| is_type_might_raise(*type
, t
)) return result(TBool
);
2829 auto test
= type
.value();
2830 if (t
.subtypeOf(test
)) return result(TTrue
);
2831 if (!t
.couldBe(test
) && (!deopt
|| !t
.couldBe(deopt
.value()))) {
2832 return result(TFalse
);
2834 auto const op
= type_to_istypeop(test
);
2835 if (!op
|| !isValidTypeOpForIsAs(op
.value())) return result(TBool
);
2836 return reduce(env
, bc::PopC
{}, bc::IsTypeC
{ *op
});
2839 auto const is_nullable_ts
= is_ts_nullable(ts
);
2840 auto const is_definitely_null
= t
.subtypeOf(BNull
);
2841 auto const is_definitely_not_null
= !t
.couldBe(BNull
);
2843 if (is_nullable_ts
&& is_definitely_null
) return result(TTrue
);
2845 auto const ts_type
= type_of_type_structure(env
.index
, env
.ctx
, ts
);
2847 if (is_nullable_ts
&& !is_definitely_not_null
&& ts_type
== std::nullopt
) {
2848 // Ts is nullable and we know that t could be null but we dont know for sure
2849 // Also we didn't get a type out of the type structure
2850 return result(TBool
);
2853 if (ts_type
&& !is_type_might_raise(*ts_type
, t
)) may_raise
= false;
2854 switch (get_ts_kind(ts
)) {
2855 case TypeStructure::Kind::T_int
:
2856 case TypeStructure::Kind::T_bool
:
2857 case TypeStructure::Kind::T_float
:
2858 case TypeStructure::Kind::T_string
:
2859 case TypeStructure::Kind::T_num
:
2860 case TypeStructure::Kind::T_arraykey
:
2861 case TypeStructure::Kind::T_keyset
:
2862 case TypeStructure::Kind::T_void
:
2863 case TypeStructure::Kind::T_null
:
2864 return check(ts_type
);
2865 case TypeStructure::Kind::T_tuple
:
2866 return check(ts_type
, TVec
);
2867 case TypeStructure::Kind::T_shape
:
2868 return check(ts_type
, TDict
);
2869 case TypeStructure::Kind::T_dict
:
2870 return check(ts_type
);
2871 case TypeStructure::Kind::T_vec
:
2872 return check(ts_type
);
2873 case TypeStructure::Kind::T_nothing
:
2874 case TypeStructure::Kind::T_noreturn
:
2875 return result(TFalse
);
2876 case TypeStructure::Kind::T_mixed
:
2877 case TypeStructure::Kind::T_dynamic
:
2878 return result(TTrue
);
2879 case TypeStructure::Kind::T_nonnull
:
2880 if (is_definitely_null
) return result(TFalse
);
2881 if (is_definitely_not_null
) return result(TTrue
);
2884 bc::IsTypeC
{ IsTypeOp::Null
},
2886 case TypeStructure::Kind::T_class
:
2887 case TypeStructure::Kind::T_interface
:
2888 case TypeStructure::Kind::T_xhp
: {
2889 auto clsname
= get_ts_classname(ts
);
2890 auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
);
2891 if (!rcls
|| !rcls
->resolved() || (ts
->exists(s_generic_types
) &&
2892 (rcls
->cls()->hasReifiedGenerics
||
2893 !isTSAllWildcards(ts
)))) {
2894 // If it is a reified class or has non wildcard generics,
2896 return result(TBool
);
2898 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ clsname
});
2900 case TypeStructure::Kind::T_unresolved
: {
2901 auto classname
= get_ts_classname(ts
);
2902 auto const has_generics
= ts
->exists(s_generic_types
);
2903 if (!has_generics
&& classname
->isame(s_this
.get())) {
2904 return reduce(env
, bc::PopC
{}, bc::IsLateBoundCls
{});
2906 auto const rcls
= env
.index
.resolve_class(env
.ctx
, classname
);
2907 // We can only reduce to instance of if we know for sure that this class
2908 // can be resolved since instanceof undefined class does not throw
2909 if (!rcls
|| !rcls
->resolved() || rcls
->cls()->attrs
& AttrEnum
) {
2910 return result(TBool
);
2913 (rcls
->cls()->hasReifiedGenerics
|| !isTSAllWildcards(ts
))) {
2914 // If it is a reified class or has non wildcard generics,
2916 return result(TBool
);
2918 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ rcls
->name() });
2920 case TypeStructure::Kind::T_enum
:
2921 case TypeStructure::Kind::T_resource
:
2922 case TypeStructure::Kind::T_vec_or_dict
:
2923 case TypeStructure::Kind::T_any_array
:
2924 // TODO(T29232862): implement
2925 return result(TBool
);
2926 case TypeStructure::Kind::T_typeaccess
:
2927 case TypeStructure::Kind::T_darray
:
2928 case TypeStructure::Kind::T_varray
:
2929 case TypeStructure::Kind::T_varray_or_darray
:
2930 case TypeStructure::Kind::T_reifiedtype
:
2931 return result(TBool
);
2932 case TypeStructure::Kind::T_fun
:
2933 case TypeStructure::Kind::T_typevar
:
2934 case TypeStructure::Kind::T_trait
:
2935 // We will error on these at the JIT
2936 return result(TBool
);
2942 const StaticString
s_hh_type_structure_no_throw("HH\\type_structure_no_throw");
2946 void in(ISS
& env
, const bc::IsTypeStructC
& op
) {
2947 if (!topC(env
).couldBe(BDict
)) {
2950 return unreachable(env
);
2952 auto const a
= tv(topC(env
));
2953 if (!a
|| !isValidTSType(*a
, false)) {
2956 return push(env
, TBool
);
2958 if (op
.subop1
== TypeStructResolveOp::Resolve
) {
2959 if (auto const ts
= resolve_type_structure(env
, a
->m_data
.parr
).sarray()) {
2964 bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
}
2967 if (auto const val
= get_ts_this_type_access(a
->m_data
.parr
)) {
2968 // Convert `$x is this::T` into
2969 // `$x is type_structure_no_throw(static::class, 'T')`
2970 // to take advantage of the caching that comes with the type_structure
2976 bc::LateBoundCls
{},
2978 bc::FCallFuncD
{FCallArgs(2), s_hh_type_structure_no_throw
.get()},
2979 bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
}
2983 isTypeStructImpl(env
, a
->m_data
.parr
);
2986 void in(ISS
& env
, const bc::ThrowAsTypeStructException
& op
) {
2992 void in(ISS
& env
, const bc::CombineAndResolveTypeStruct
& op
) {
2993 assertx(op
.arg1
> 0);
2995 auto const first
= tv(topC(env
));
2996 if (first
&& isValidTSType(*first
, false)) {
2997 auto const ts
= first
->m_data
.parr
;
2998 // Optimize single input that does not need any combination
3000 if (auto const r
= resolve_type_structure(env
, ts
).sarray()) {
3009 // Optimize double input that needs a single combination and looks of the
3010 // form ?T, @T or ~T
3011 if (op
.arg1
== 2 && get_ts_kind(ts
) == TypeStructure::Kind::T_reifiedtype
) {
3012 BytecodeVec instrs
{ bc::PopC
{} };
3013 auto const tv_true
= gen_constant(make_tv
<KindOfBoolean
>(true));
3014 if (ts
->exists(s_like
.get())) {
3015 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_like
.get())));
3016 instrs
.push_back(tv_true
);
3017 instrs
.push_back(bc::AddElemC
{});
3019 if (ts
->exists(s_nullable
.get())) {
3020 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_nullable
.get())));
3021 instrs
.push_back(tv_true
);
3022 instrs
.push_back(bc::AddElemC
{});
3024 if (ts
->exists(s_soft
.get())) {
3025 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_soft
.get())));
3026 instrs
.push_back(tv_true
);
3027 instrs
.push_back(bc::AddElemC
{});
3029 return reduce(env
, std::move(instrs
));
3033 for (int i
= 0; i
< op
.arg1
; ++i
) {
3034 auto const t
= popC(env
);
3035 valid
&= t
.couldBe(BDict
);
3037 if (!valid
) return unreachable(env
);
3042 void in(ISS
& env
, const bc::RecordReifiedGeneric
& op
) {
3043 // TODO(T31677864): implement real optimizations
3044 auto const t
= popC(env
);
3045 if (!t
.couldBe(BVec
)) return unreachable(env
);
3046 if (t
.subtypeOf(BVec
)) nothrow(env
);
3050 void in(ISS
& env
, const bc::CheckClsReifiedGenericMismatch
& op
) {
3051 auto const location
= topStkEquiv(env
, 0);
3054 if (location
== NoLocalId
) return;
3055 auto const ok
= refineLocation(
3058 return get_type_of_reified_list(env
.ctx
.cls
->userAttributes
);
3061 if (!ok
) unreachable(env
);
3064 void in(ISS
& env
, const bc::ClassHasReifiedGenerics
& op
) {
3065 // TODO(T121050961) Optimize for lazy classes too
3066 auto const cls
= popC(env
);
3067 if (!cls
.couldBe(BCls
)) {
3069 return push(env
, TBottom
);
3071 if (!cls
.subtypeOf(BCls
)) {
3077 auto const t
= [&] {
3078 if (!is_specialized_cls(cls
) || !dcls_of(cls
).isExact()) {
3081 auto const& dcls
= dcls_of(cls
);
3082 if (!dcls
.cls().couldHaveReifiedGenerics()) {
3085 if (dcls
.cls().mustHaveReifiedGenerics()) {
3093 void in(ISS
& env
, const bc::HasReifiedParent
& op
) {
3094 // TODO(T121050961) Optimize for lazy classes too
3095 auto const cls
= popC(env
);
3096 if (!cls
.couldBe(BCls
)) {
3098 return push(env
, TBottom
);
3100 if (!cls
.subtypeOf(BCls
)) {
3106 auto const t
= [&] {
3107 if (!is_specialized_cls(cls
) || !dcls_of(cls
).isExact()) {
3110 auto const& dcls
= dcls_of(cls
);
3111 if (!dcls
.cls().couldHaveReifiedParent()) {
3114 if (dcls
.cls().mustHaveReifiedParent()) {
3125 * If the value on the top of the stack is known to be equivalent to the local
3126 * its being moved/copied to, return std::nullopt without modifying any
3127 * state. Otherwise, pop the stack value, perform the set, and return a pair
3128 * giving the value's type, and any other local its known to be equivalent to.
3130 template <typename Set
>
3131 Optional
<std::pair
<Type
, LocalId
>> moveToLocImpl(ISS
& env
,
3133 if (auto const prev
= last_op(env
, 1)) {
3134 if (prev
->op
== Op::CGetL2
&&
3135 prev
->CGetL2
.nloc1
.id
== op
.loc1
&&
3136 last_op(env
)->op
== Op::Concat
) {
3138 reduce(env
, bc::SetOpL
{ op
.loc1
, SetOpOp::ConcatEqual
});
3139 return std::nullopt
;
3143 auto equivLoc
= topStkEquiv(env
);
3144 // If the local could be a Ref, don't record equality because the stack
3145 // element and the local won't actually have the same type.
3146 if (equivLoc
== StackThisId
&& env
.state
.thisLoc
!= NoLocalId
) {
3147 if (env
.state
.thisLoc
== op
.loc1
||
3148 locsAreEquiv(env
, env
.state
.thisLoc
, op
.loc1
)) {
3149 return std::nullopt
;
3151 equivLoc
= env
.state
.thisLoc
;
3154 if (!is_volatile_local(env
.ctx
.func
, op
.loc1
)) {
3155 if (equivLoc
<= MaxLocalId
) {
3156 if (equivLoc
== op
.loc1
||
3157 locsAreEquiv(env
, equivLoc
, op
.loc1
)) {
3158 // We allow equivalency to ignore Uninit, so we need to check
3160 if (peekLocRaw(env
, op
.loc1
) == topC(env
)) {
3161 return std::nullopt
;
3164 } else if (equivLoc
== NoLocalId
) {
3167 if (!any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3171 equivLoc
= NoLocalId
;
3174 auto val
= popC(env
);
3175 setLoc(env
, op
.loc1
, val
);
3176 if (equivLoc
== StackThisId
) {
3177 assertx(env
.state
.thisLoc
== NoLocalId
);
3178 equivLoc
= env
.state
.thisLoc
= op
.loc1
;
3180 if (equivLoc
== StackDupId
) {
3181 setStkLocal(env
, op
.loc1
);
3182 } else if (equivLoc
!= op
.loc1
&& equivLoc
!= NoLocalId
) {
3183 addLocEquiv(env
, op
.loc1
, equivLoc
);
3185 return { std::make_pair(std::move(val
), equivLoc
) };
3190 void in(ISS
& env
, const bc::PopL
& op
) {
3191 // If the same value is already in the local, do nothing but pop
3192 // it. Otherwise, the set has been done by moveToLocImpl.
3193 if (!moveToLocImpl(env
, op
)) return reduce(env
, bc::PopC
{});
3196 void in(ISS
& env
, const bc::SetL
& op
) {
3197 // If the same value is already in the local, do nothing because SetL keeps
3198 // the value on the stack. If it isn't, we need to push it back onto the stack
3199 // because moveToLocImpl popped it.
3200 if (auto p
= moveToLocImpl(env
, op
)) {
3201 push(env
, std::move(p
->first
), p
->second
);
3207 void in(ISS
& env
, const bc::SetG
&) {
3208 auto t1
= popC(env
);
3210 push(env
, std::move(t1
));
3213 void in(ISS
& env
, const bc::SetS
& op
) {
3214 auto const val
= popC(env
);
3215 auto const tcls
= popC(env
);
3216 auto const tname
= popC(env
);
3218 auto const throws
= [&] {
3220 return push(env
, TBottom
);
3223 if (!tcls
.couldBe(BCls
)) return throws();
3225 auto merge
= env
.index
.merge_static_type(
3227 env
.collect
.publicSPropMutations
,
3234 ReadonlyOp::Readonly
== op
.subop1
3237 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3241 if (merge
.throws
== TriBool::No
&&
3242 tcls
.subtypeOf(BCls
) &&
3243 tname
.subtypeOf(BStr
)) {
3247 push(env
, std::move(merge
.adjusted
));
3250 void in(ISS
& env
, const bc::SetOpL
& op
) {
3251 auto const t1
= popC(env
);
3252 auto const loc
= locAsCell(env
, op
.loc1
);
3254 auto resultTy
= typeSetOp(op
.subop2
, loc
, t1
);
3255 setLoc(env
, op
.loc1
, resultTy
);
3256 push(env
, std::move(resultTy
));
3259 void in(ISS
& env
, const bc::SetOpG
&) {
3260 popC(env
); popC(env
);
3261 push(env
, TInitCell
);
3264 void in(ISS
& env
, const bc::SetOpS
& op
) {
3265 auto const rhs
= popC(env
);
3266 auto const tcls
= popC(env
);
3267 auto const tname
= popC(env
);
3269 auto const throws
= [&] {
3271 return push(env
, TBottom
);
3274 if (!tcls
.couldBe(BCls
)) return throws();
3276 auto const lookup
= env
.index
.lookup_static(
3283 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
3287 auto const newTy
= typeSetOp(op
.subop1
, lookup
.ty
, rhs
);
3288 if (newTy
.subtypeOf(BBottom
)) return throws();
3290 auto merge
= env
.index
.merge_static_type(
3292 env
.collect
.publicSPropMutations
,
3299 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3303 // NB: Unlike IncDecS, SetOpS pushes the post-TypeConstraint
3304 // adjustment value.
3305 push(env
, std::move(merge
.adjusted
));
3308 void in(ISS
& env
, const bc::IncDecL
& op
) {
3309 auto loc
= locAsCell(env
, op
.nloc1
.id
);
3310 auto newT
= typeIncDec(op
.subop2
, loc
);
3312 if (newT
.subtypeOf(BBottom
)) {
3314 return push(env
, TBottom
);
3317 if (!locCouldBeUninit(env
, op
.nloc1
.id
) && loc
.subtypeOf(BNum
)) nothrow(env
);
3319 auto const pre
= isPre(op
.subop2
);
3320 if (!pre
) push(env
, std::move(loc
));
3321 setLoc(env
, op
.nloc1
.id
, newT
);
3322 if (pre
) push(env
, std::move(newT
));
3325 void in(ISS
& env
, const bc::IncDecG
&) { popC(env
); push(env
, TInitCell
); }
3327 void in(ISS
& env
, const bc::IncDecS
& op
) {
3328 auto const tcls
= popC(env
);
3329 auto const tname
= popC(env
);
3330 auto const pre
= isPre(op
.subop1
);
3332 auto const throws
= [&] {
3334 return push(env
, TBottom
);
3337 if (!tcls
.couldBe(BCls
)) return throws();
3339 auto lookup
= env
.index
.lookup_static(
3346 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
3350 auto newTy
= typeIncDec(op
.subop1
, lookup
.ty
);
3351 if (newTy
.subtypeOf(BBottom
)) return throws();
3353 auto const merge
= env
.index
.merge_static_type(
3355 env
.collect
.publicSPropMutations
,
3362 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3366 if (lookup
.found
== TriBool::Yes
&&
3367 lookup
.lateInit
== TriBool::No
&&
3368 !lookup
.classInitMightRaise
&&
3369 merge
.throws
== TriBool::No
&&
3370 tcls
.subtypeOf(BCls
) &&
3371 tname
.subtypeOf(BStr
) &&
3372 lookup
.ty
.subtypeOf(BNum
)) {
3376 // NB: IncDecS pushes the value pre-TypeConstraint modification
3377 push(env
, pre
? std::move(newTy
) : std::move(lookup
.ty
));
3380 void in(ISS
& env
, const bc::UnsetL
& op
) {
3381 if (locRaw(env
, op
.loc1
).subtypeOf(TUninit
)) {
3385 if (auto const last
= last_op(env
)) {
3386 // No point in popping into the local if we're just going to
3387 // immediately unset it.
3388 if (last
->op
== Op::PopL
&&
3389 last
->PopL
.loc1
== op
.loc1
) {
3392 setLocRaw(env
, op
.loc1
, TCell
);
3393 return reduce(env
, bc::PopC
{}, bc::UnsetL
{ op
.loc1
});
3397 if (any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3402 setLocRaw(env
, op
.loc1
, TUninit
);
3405 void in(ISS
& env
, const bc::UnsetG
& /*op*/) {
3406 auto const t1
= popC(env
);
3407 if (!t1
.couldBe(BObj
| BRes
)) nothrow(env
);
3410 bool fcallCanSkipRepack(ISS
& env
, const FCallArgs
& fca
, const res::Func
& func
) {
3411 // Can't skip repack if potentially calling a function with too many args.
3412 if (fca
.numArgs() > func
.minNonVariadicParams()) return false;
3413 // Repack not needed if not unpacking and not having too many arguments.
3414 if (!fca
.hasUnpack()) return true;
3415 // Can't skip repack if unpack args are in a wrong position.
3416 if (fca
.numArgs() != func
.maxNonVariadicParams()) return false;
3418 // Repack not needed if unpack args have the correct type.
3419 auto const unpackArgs
= topC(env
, fca
.hasGenerics() ? 1 : 0);
3420 return unpackArgs
.subtypeOf(BVec
);
3423 bool coeffectRulesMatch(ISS
& env
,
3424 const FCallArgs
& fca
,
3425 const res::Func
& func
,
3426 uint32_t numExtraInputs
,
3427 const CoeffectRule
& caller
,
3428 const CoeffectRule
& callee
) {
3429 if (caller
.m_type
!= callee
.m_type
) return false;
3430 switch (caller
.m_type
) {
3431 case CoeffectRule::Type::CCThis
: {
3432 if (caller
.m_name
!= callee
.m_name
||
3433 caller
.m_types
!= callee
.m_types
) {
3436 if (!thisAvailable(env
)) return false;
3437 auto const loc
= topStkEquiv(env
, fca
.numInputs() + numExtraInputs
+ 1);
3438 return loc
== StackThisId
|| (loc
<= MaxLocalId
&& locIsThis(env
, loc
));
3440 case CoeffectRule::Type::CCParam
:
3441 if (caller
.m_name
!= callee
.m_name
) return false;
3443 case CoeffectRule::Type::FunParam
: {
3444 if (fca
.hasUnpack()) return false;
3445 if (fca
.numArgs() <= callee
.m_index
) return false;
3446 auto const l1
= caller
.m_index
;
3447 auto const l2
= topStkEquiv(env
, fca
.numInputs() - callee
.m_index
- 1);
3449 (l1
<= MaxLocalId
&&
3451 locsAreEquiv(env
, l1
, l2
));
3453 case CoeffectRule::Type::CCReified
:
3454 // TODO: optimize these
3456 case CoeffectRule::Type::ClosureParentScope
:
3457 case CoeffectRule::Type::GeneratorThis
:
3458 case CoeffectRule::Type::Caller
:
3459 case CoeffectRule::Type::Invalid
:
3465 bool fcallCanSkipCoeffectsCheck(ISS
& env
,
3466 const FCallArgs
& fca
,
3467 const res::Func
& func
,
3468 uint32_t numExtraInputs
) {
3469 auto const requiredCoeffectsOpt
= func
.requiredCoeffects();
3470 if (!requiredCoeffectsOpt
) return false;
3471 auto const required
= *requiredCoeffectsOpt
;
3472 auto const provided
=
3473 RuntimeCoeffects::fromValue(env
.ctx
.func
->requiredCoeffects
.value() |
3474 env
.ctx
.func
->coeffectEscapes
.value());
3475 if (!provided
.canCall(required
)) return false;
3476 auto const calleeRules
= func
.coeffectRules();
3477 // If we couldn't tell whether callee has rules or not, punt.
3478 if (!calleeRules
) return false;
3479 if (calleeRules
->empty()) return true;
3480 if (calleeRules
->size() == 1 && (*calleeRules
)[0].isCaller()) return true;
3481 auto const callerRules
= env
.ctx
.func
->coeffectRules
;
3482 return std::is_permutation(callerRules
.begin(), callerRules
.end(),
3483 calleeRules
->begin(), calleeRules
->end(),
3484 [&] (const CoeffectRule
& a
,
3485 const CoeffectRule
& b
) {
3486 return coeffectRulesMatch(env
, fca
, func
,
3492 template<typename FCallWithFCA
>
3493 bool fcallOptimizeChecks(
3495 const FCallArgs
& fca
,
3496 const res::Func
& func
,
3497 FCallWithFCA fcallWithFCA
,
3498 Optional
<uint32_t> inOutNum
,
3500 uint32_t numExtraInputs
3502 // Don't optimize away in-out checks if we might use the null safe
3503 // operator. If we do so, we need the in-out bits to shuffle the
3505 if (!maybeNullsafe
&& fca
.enforceInOut()) {
3506 if (inOutNum
== fca
.numRets() - 1) {
3508 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3509 auto const kind
= env
.index
.lookup_param_prep(env
.ctx
, func
, i
);
3510 if (kind
.inOut
== TriBool::Maybe
) {
3515 if (yesOrNo(fca
.isInOut(i
)) != kind
.inOut
) {
3516 // The function/method may not exist, in which case we should raise a
3517 // different error. Just defer the checks to the runtime.
3518 if (!func
.exactFunc()) return false;
3521 auto const exCls
= makeStaticString("InvalidArgumentException");
3522 auto const err
= makeStaticString(formatParamInOutMismatch(
3523 func
.name()->data(), i
, !fca
.isInOut(i
)));
3527 bc::NewObjD
{ exCls
},
3531 bc::FCallCtor
{ FCallArgs(1), staticEmptyString() },
3541 // Optimize away the runtime inout-ness check.
3542 reduce(env
, fcallWithFCA(fca
.withoutInOut()));
3548 if (fca
.enforceReadonly()) {
3550 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3551 if (!fca
.isReadonly(i
)) continue;
3552 auto const kind
= env
.index
.lookup_param_prep(env
.ctx
, func
, i
);
3553 if (kind
.readonly
== TriBool::Maybe
) {
3558 if (kind
.readonly
!= TriBool::Yes
) {
3559 // The function/method may not exist, in which case we should raise a
3560 // different error. Just defer the checks to the runtime.
3561 if (!func
.exactFunc()) return false;
3568 // Optimize away the runtime readonly-ness check.
3569 reduce(env
, fcallWithFCA(fca
.withoutReadonly()));
3574 if (fca
.enforceMutableReturn()) {
3575 if (env
.index
.lookup_return_readonly(env
.ctx
, func
) == TriBool::No
) {
3576 reduce(env
, fcallWithFCA(fca
.withoutEnforceMutableReturn()));
3581 if (fca
.enforceReadonlyThis()) {
3582 if (env
.index
.lookup_readonly_this(env
.ctx
, func
) == TriBool::Yes
) {
3583 reduce(env
, fcallWithFCA(fca
.withoutEnforceReadonlyThis()));
3588 // Infer whether the callee supports async eager return.
3589 if (fca
.asyncEagerTarget() != NoBlockId
) {
3590 if (env
.index
.supports_async_eager_return(func
) == TriBool::No
) {
3591 reduce(env
, fcallWithFCA(fca
.withoutAsyncEagerTarget()));
3596 if (!fca
.skipRepack() && fcallCanSkipRepack(env
, fca
, func
)) {
3597 reduce(env
, fcallWithFCA(fca
.withoutRepack()));
3601 if (!fca
.skipCoeffectsCheck() &&
3602 fcallCanSkipCoeffectsCheck(env
, fca
, func
, numExtraInputs
)) {
3603 reduce(env
, fcallWithFCA(fca
.withoutCoeffectsCheck()));
3612 const FCallArgs
& fca
,
3613 const res::Func
& func
,
3616 uint32_t numExtraInputs
3618 auto const foldableFunc
= func
.exactFunc();
3619 if (!foldableFunc
) return false;
3620 if (!shouldAttemptToFold(env
, foldableFunc
, fca
, context
, maybeDynamic
)) {
3624 assertx(!fca
.hasUnpack() && !fca
.hasGenerics() && fca
.numRets() == 1);
3625 assertx(options
.ConstantFoldBuiltins
);
3627 auto const finish
= [&] (Type ty
) {
3628 auto const v
= tv(ty
);
3629 if (!v
) return false;
3631 for (uint32_t i
= 0; i
< numExtraInputs
; ++i
) repl
.push_back(bc::PopC
{});
3632 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) repl
.push_back(bc::PopC
{});
3633 repl
.push_back(bc::PopU
{});
3634 if (topT(env
, fca
.numArgs() + 1 + numExtraInputs
).subtypeOf(TInitCell
)) {
3635 repl
.push_back(bc::PopC
{});
3637 assertx(topT(env
, fca
.numArgs() + 1 + numExtraInputs
).subtypeOf(TUninit
));
3638 repl
.push_back(bc::PopU
{});
3640 repl
.push_back(gen_constant(*v
));
3641 reduce(env
, std::move(repl
));
3645 if (foldableFunc
->attrs
& AttrBuiltin
&&
3646 foldableFunc
->attrs
& AttrIsFoldable
) {
3647 auto ret
= const_fold(env
, fca
.numArgs(), numExtraInputs
, *foldableFunc
,
3649 if (!ret
) return false;
3650 return finish(std::move(*ret
));
3653 CompactVector
<Type
> args(fca
.numArgs());
3654 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3655 for (auto i
= uint32_t{0}; i
< fca
.numArgs(); ++i
) {
3656 auto const& arg
= topT(env
, firstArgPos
- i
);
3657 auto const isScalar
= is_scalar(arg
);
3659 (env
.index
.func_depends_on_arg(foldableFunc
, i
) ||
3660 !arg
.subtypeOf(BInitCell
))) {
3663 args
[i
] = isScalar
? scalarize(arg
) : arg
;
3666 auto calleeCtx
= CallContext
{
3671 if (env
.collect
.unfoldableFuncs
.count(calleeCtx
)) return false;
3673 auto foldableReturnType
= env
.index
.lookup_foldable_return_type(
3677 if (finish(std::move(foldableReturnType
))) return true;
3679 env
.collect
.unfoldableFuncs
.emplace(std::move(calleeCtx
));
3683 Type
typeFromWH(Type t
) {
3684 if (!t
.couldBe(BObj
)) {
3685 // Exceptions will be thrown if a non-object is awaited.
3689 // Throw away non-obj component.
3692 // If we aren't even sure this is a wait handle, there's nothing we can
3694 if (!is_specialized_wait_handle(t
)) {
3698 return wait_handle_inner(t
);
3701 void pushCallReturnType(ISS
& env
,
3703 const FCallArgs
& fca
,
3705 std::vector
<Type
> inOuts
) {
3706 auto const numRets
= fca
.numRets();
3708 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3709 assertx(IMPLIES(nullsafe
, inOuts
.size() == numRets
- 1));
3711 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3712 if (!ty
.couldBe(BVecN
)) {
3713 // Function cannot have an in-out args match, so call will
3716 for (int32_t i
= 0; i
< numRets
; i
++) push(env
, TBottom
);
3717 return unreachable(env
);
3719 // We'll only hit the nullsafe null case, so the outputs are the
3721 for (auto& t
: inOuts
) push(env
, std::move(t
));
3722 push(env
, TInitNull
);
3726 // If we might use the nullsafe operator, we need to union in the
3727 // null case (which means the inout args are unchanged).
3728 if (is_specialized_array_like(ty
)) {
3729 for (int32_t i
= 1; i
< numRets
; i
++) {
3730 auto elem
= array_like_elem(ty
, ival(i
)).first
;
3731 if (nullsafe
) elem
|= inOuts
[i
-1];
3732 push(env
, std::move(elem
));
3737 ? opt(array_like_elem(ty
, ival(0)).first
)
3738 : array_like_elem(ty
, ival(0)).first
3741 for (int32_t i
= 0; i
< numRets
; ++i
) push(env
, TInitCell
);
3745 if (fca
.asyncEagerTarget() != NoBlockId
) {
3746 assertx(!ty
.is(BBottom
));
3747 push(env
, typeFromWH(ty
));
3748 assertx(!topC(env
).subtypeOf(BBottom
));
3749 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3752 if (nullsafe
) ty
= opt(std::move(ty
));
3753 if (ty
.is(BBottom
)) {
3754 // The callee function never returns. It might throw, or loop
3757 return unreachable(env
);
3759 return push(env
, std::move(ty
));
3762 const StaticString s_defined
{ "defined" };
3763 const StaticString s_function_exists
{ "function_exists" };
3765 template<typename FCallWithFCA
>
3766 void fcallKnownImpl(
3768 const FCallArgs
& fca
,
3769 const res::Func
& func
,
3772 uint32_t numExtraInputs
,
3773 FCallWithFCA fcallWithFCA
,
3774 Optional
<uint32_t> inOutNum
3776 auto const numArgs
= fca
.numArgs();
3777 auto returnType
= [&] {
3778 CompactVector
<Type
> args(numArgs
);
3779 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3780 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3781 args
[i
] = topCV(env
, firstArgPos
- i
);
3784 return fca
.hasUnpack()
3785 ? env
.index
.lookup_return_type(env
.ctx
, &env
.collect
.methods
, func
)
3786 : env
.index
.lookup_return_type(
3787 env
.ctx
, &env
.collect
.methods
, args
, context
, func
3791 // If there's a caller/callee inout mismatch, then the call will
3793 if (fca
.enforceInOut()) {
3794 if (inOutNum
&& (*inOutNum
+ 1 != fca
.numRets())) {
3795 returnType
= TBottom
;
3799 if (fca
.asyncEagerTarget() != NoBlockId
&& typeFromWH(returnType
) == TBottom
) {
3800 // Kill the async eager target if the function never returns.
3801 reduce(env
, fcallWithFCA(std::move(fca
.withoutAsyncEagerTarget())));
3805 if (func
.name()->isame(s_function_exists
.get()) &&
3806 (numArgs
== 1 || numArgs
== 2) &&
3807 !fca
.hasUnpack() && !fca
.hasGenerics()) {
3808 handle_function_exists(env
, topT(env
, numExtraInputs
+ numArgs
- 1));
3811 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
3812 if (fca
.hasGenerics()) popC(env
);
3813 if (fca
.hasUnpack()) popC(env
);
3814 std::vector
<Type
> inOuts
;
3815 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3816 if (nullsafe
&& fca
.isInOut(numArgs
- i
- 1)) {
3817 inOuts
.emplace_back(popCV(env
));
3824 pushCallReturnType(env
, std::move(returnType
),
3825 fca
, nullsafe
, std::move(inOuts
));
3828 void fcallUnknownImpl(ISS
& env
,
3829 const FCallArgs
& fca
,
3830 const Type
& retTy
= TInitCell
) {
3831 if (fca
.hasGenerics()) popC(env
);
3832 if (fca
.hasUnpack()) popC(env
);
3833 auto const numArgs
= fca
.numArgs();
3834 auto const numRets
= fca
.numRets();
3835 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) popCV(env
);
3838 if (fca
.asyncEagerTarget() != NoBlockId
) {
3839 assertx(numRets
== 1);
3840 assertx(!retTy
.is(BBottom
));
3842 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3845 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3846 for (auto i
= uint32_t{0}; i
< numRets
; ++i
) push(env
, retTy
);
3849 void in(ISS
& env
, const bc::FCallFuncD
& op
) {
3850 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, op
.str2
);
3852 if (op
.fca
.hasGenerics()) {
3853 auto const tsList
= topC(env
);
3854 if (!tsList
.couldBe(BVec
)) {
3855 return unreachable(env
);
3858 if (!rfunc
.couldHaveReifiedGenerics()) {
3862 bc::FCallFuncD
{ op
.fca
.withoutGenerics(), op
.str2
}
3867 auto const updateBC
= [&] (FCallArgs fca
) {
3868 return bc::FCallFuncD
{ std::move(fca
), op
.str2
};
3871 auto const numInOut
= op
.fca
.enforceInOut()
3872 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
3875 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false, 0) ||
3876 fcallTryFold(env
, op
.fca
, rfunc
, TBottom
, false, 0)) {
3880 if (auto const func
= rfunc
.exactFunc()) {
3881 if (optimize_builtin(env
, func
, op
.fca
)) return;
3884 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 0, updateBC
, numInOut
);
3889 const StaticString
s_invoke("__invoke");
3891 s_DynamicContextOverrideUnsafe("__SystemLib\\DynamicContextOverrideUnsafe");
3893 bool isBadContext(const FCallArgs
& fca
) {
3894 return fca
.context() &&
3895 fca
.context()->isame(s_DynamicContextOverrideUnsafe
.get());
3898 Context
getCallContext(const ISS
& env
, const FCallArgs
& fca
) {
3899 if (auto const name
= fca
.context()) {
3900 auto const rcls
= env
.index
.resolve_class(env
.ctx
, name
);
3901 if (rcls
&& rcls
->cls()) {
3902 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, rcls
->cls() };
3904 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, nullptr };
3909 void fcallObjMethodNullsafeNoFold(ISS
& env
,
3910 const FCallArgs
& fca
,
3912 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3913 if (extraInput
) popC(env
);
3914 if (fca
.hasGenerics()) popC(env
);
3915 if (fca
.hasUnpack()) popC(env
);
3916 auto const numArgs
= fca
.numArgs();
3917 auto const numRets
= fca
.numRets();
3918 std::vector
<Type
> inOuts
;
3919 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3920 if (fca
.enforceInOut() && fca
.isInOut(numArgs
- i
- 1)) {
3921 inOuts
.emplace_back(popCV(env
));
3928 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3929 assertx(inOuts
.size() == numRets
- 1);
3930 for (auto& t
: inOuts
) push(env
, std::move(t
));
3931 push(env
, TInitNull
);
3934 void fcallObjMethodNullsafe(ISS
& env
, const FCallArgs
& fca
, bool extraInput
) {
3935 // Don't fold if there's inout arguments. We could, in principal,
3936 // fold away the inout case like we do below, but we don't have the
3937 // bytecodes necessary to shuffle the stack.
3938 if (fca
.enforceInOut()) {
3939 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) {
3940 if (fca
.isInOut(i
)) {
3941 return fcallObjMethodNullsafeNoFold(env
, fca
, extraInput
);
3947 if (extraInput
) repl
.push_back(bc::PopC
{});
3948 if (fca
.hasGenerics()) repl
.push_back(bc::PopC
{});
3949 if (fca
.hasUnpack()) repl
.push_back(bc::PopC
{});
3951 auto const numArgs
= fca
.numArgs();
3952 for (uint32_t i
= 0; i
< numArgs
; ++i
) {
3953 assertx(topC(env
, repl
.size()).subtypeOf(BInitCell
));
3954 repl
.push_back(bc::PopC
{});
3956 repl
.push_back(bc::PopU
{});
3957 repl
.push_back(bc::PopC
{});
3958 assertx(fca
.numRets() == 1);
3959 repl
.push_back(bc::Null
{});
3961 reduce(env
, std::move(repl
));
3964 template <typename UpdateBC
>
3965 void fcallObjMethodImpl(ISS
& env
, const FCallArgs
& fca
, SString methName
,
3966 bool nullThrows
, bool dynamic
, bool extraInput
,
3967 uint32_t inputPos
, SString clsHint
,
3968 UpdateBC updateBC
) {
3969 auto const input
= topC(env
, inputPos
);
3970 auto const location
= topStkEquiv(env
, inputPos
);
3971 auto const mayCallMethod
= input
.couldBe(BObj
);
3972 auto const mayUseNullsafe
= !nullThrows
&& input
.couldBe(BNull
);
3973 auto const mayThrowNonObj
= !input
.subtypeOf(nullThrows
? BObj
: BOptObj
);
3975 auto const refineLoc
= [&] {
3976 if (location
== NoLocalId
) return;
3977 if (!refineLocation(env
, location
, [&] (Type t
) {
3978 if (nullThrows
) return intersection_of(t
, TObj
);
3979 if (!t
.couldBe(BUninit
)) return intersection_of(t
, TOptObj
);
3980 if (!t
.couldBe(BObj
)) return intersection_of(t
, TNull
);
3987 auto const throws
= [&] {
3988 if (fca
.asyncEagerTarget() != NoBlockId
) {
3989 // Kill the async eager target if the function never returns.
3990 return reduce(env
, updateBC(fca
.withoutAsyncEagerTarget()));
3992 if (extraInput
) popC(env
);
3993 fcallUnknownImpl(env
, fca
, TBottom
);
3997 if (!mayCallMethod
&& !mayUseNullsafe
) {
3998 // This FCallObjMethodD may only throw
4002 if (!mayCallMethod
&& !mayThrowNonObj
) {
4003 // Null input, this may only return null, so do that.
4004 return fcallObjMethodNullsafe(env
, fca
, extraInput
);
4007 if (!mayCallMethod
) {
4008 // May only return null, but can't fold as we may still throw.
4009 assertx(mayUseNullsafe
&& mayThrowNonObj
);
4010 if (fca
.asyncEagerTarget() != NoBlockId
) {
4011 return reduce(env
, updateBC(fca
.withoutAsyncEagerTarget()));
4013 return fcallObjMethodNullsafeNoFold(env
, fca
, extraInput
);
4016 if (isBadContext(fca
)) return throws();
4018 auto const ctx
= getCallContext(env
, fca
);
4019 auto const ctxTy
= input
.couldBe(BObj
)
4020 ? intersection_of(input
, TObj
)
4022 auto const clsTy
= objcls(ctxTy
);
4023 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4025 auto const numInOut
= fca
.enforceInOut()
4026 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4029 auto const canFold
= !mayUseNullsafe
&& !mayThrowNonObj
;
4030 auto const numExtraInputs
= extraInput
? 1 : 0;
4031 if (fcallOptimizeChecks(env
, fca
, rfunc
, updateBC
,
4032 numInOut
, mayUseNullsafe
, numExtraInputs
) ||
4033 (canFold
&& fcallTryFold(env
, fca
, rfunc
, ctxTy
, dynamic
,
4038 if (clsHint
&& clsHint
->empty() && rfunc
.exactFunc()) {
4039 return reduce(env
, updateBC(fca
, rfunc
.exactFunc()->cls
->name
));
4042 fcallKnownImpl(env
, fca
, rfunc
, ctxTy
, mayUseNullsafe
, extraInput
? 1 : 0,
4043 updateBC
, numInOut
);
4047 void fcallFuncUnknown(ISS
& env
, const bc::FCallFunc
& op
) {
4049 fcallUnknownImpl(env
, op
.fca
);
4052 void fcallFuncClsMeth(ISS
& env
, const bc::FCallFunc
& op
) {
4053 assertx(topC(env
).subtypeOf(BClsMeth
));
4055 // TODO: optimize me
4056 fcallFuncUnknown(env
, op
);
4059 void fcallFuncFunc(ISS
& env
, const bc::FCallFunc
& op
) {
4060 assertx(topC(env
).subtypeOf(BFunc
));
4062 // TODO: optimize me
4063 fcallFuncUnknown(env
, op
);
4066 void fcallFuncObj(ISS
& env
, const bc::FCallFunc
& op
) {
4067 assertx(topC(env
).subtypeOf(BOptObj
));
4069 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4071 return bc::FCallFunc
{ std::move(fca
) };
4074 env
, op
.fca
, s_invoke
.get(),
4075 true, false, true, 0, nullptr,
4080 void fcallFuncStr(ISS
& env
, const bc::FCallFunc
& op
) {
4081 assertx(topC(env
).subtypeOf(BStr
));
4082 auto funcName
= getNameFromType(topC(env
));
4083 if (!funcName
) return fcallFuncUnknown(env
, op
);
4085 funcName
= normalizeNS(funcName
);
4086 if (!isNSNormalized(funcName
) || !notClassMethodPair(funcName
)) {
4087 return fcallFuncUnknown(env
, op
);
4090 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, funcName
);
4091 if (!rfunc
.mightCareAboutDynCalls()) {
4092 return reduce(env
, bc::PopC
{}, bc::FCallFuncD
{ op
.fca
, funcName
});
4095 auto const updateBC
= [&] (FCallArgs fca
) {
4096 return bc::FCallFunc
{ std::move(fca
) };
4099 auto const numInOut
= op
.fca
.enforceInOut()
4100 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4103 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false, 1)) {
4106 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 1, updateBC
, numInOut
);
4111 void in(ISS
& env
, const bc::FCallFunc
& op
) {
4112 auto const callable
= topC(env
);
4113 if (!callable
.couldBe(BObj
| BArrLike
| BStr
| BFunc
|
4114 BRFunc
| BClsMeth
| BRClsMeth
)) {
4115 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4116 return reduce(env
, bc::FCallFunc
{ op
.fca
.withoutAsyncEagerTarget() });
4119 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4120 return unreachable(env
);
4122 if (callable
.subtypeOf(BOptObj
)) return fcallFuncObj(env
, op
);
4123 if (callable
.subtypeOf(BFunc
)) return fcallFuncFunc(env
, op
);
4124 if (callable
.subtypeOf(BClsMeth
)) return fcallFuncClsMeth(env
, op
);
4125 if (callable
.subtypeOf(BStr
)) return fcallFuncStr(env
, op
);
4126 fcallFuncUnknown(env
, op
);
4129 void in(ISS
& env
, const bc::ResolveFunc
& op
) {
4133 void in(ISS
& env
, const bc::ResolveMethCaller
& op
) {
4138 void in(ISS
& env
, const bc::ResolveRFunc
& op
) {
4140 push(env
, union_of(TFunc
, TRFunc
));
4145 Type
ctxCls(ISS
& env
) {
4146 auto const s
= selfCls(env
);
4147 return setctx(s
? *s
: TCls
);
4150 Type
specialClsRefToCls(ISS
& env
, SpecialClsRef ref
) {
4151 if (!env
.ctx
.cls
) return TCls
;
4152 auto const op
= [&]()-> Optional
<Type
> {
4154 case SpecialClsRef::LateBoundCls
: return ctxCls(env
);
4155 case SpecialClsRef::SelfCls
: return selfClsExact(env
);
4156 case SpecialClsRef::ParentCls
: return parentClsExact(env
);
4158 always_assert(false);
4160 return op
? *op
: TCls
;
4163 template<bool reifiedVersion
= false>
4164 void resolveClsMethodSImpl(ISS
& env
, SpecialClsRef ref
, LSString meth_name
) {
4165 auto const clsTy
= specialClsRefToCls(env
, ref
);
4166 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, meth_name
);
4167 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4168 !rfunc
.couldHaveReifiedGenerics()) {
4169 auto const clsName
= dcls_of(clsTy
).cls().name();
4170 return reduce(env
, bc::ResolveClsMethodD
{ clsName
, meth_name
});
4172 if (reifiedVersion
) popC(env
);
4173 if (!reifiedVersion
|| !rfunc
.couldHaveReifiedGenerics()) {
4174 push(env
, TClsMeth
);
4176 push(env
, union_of(TClsMeth
, TRClsMeth
));
4182 void in(ISS
& env
, const bc::ResolveClsMethod
& op
) {
4184 push(env
, TClsMeth
);
4187 void in(ISS
& env
, const bc::ResolveClsMethodD
& op
) {
4188 push(env
, TClsMeth
);
4191 void in(ISS
& env
, const bc::ResolveClsMethodS
& op
) {
4192 resolveClsMethodSImpl
<false>(env
, op
.subop1
, op
.str2
);
4195 void in(ISS
& env
, const bc::ResolveRClsMethod
&) {
4198 push(env
, union_of(TClsMeth
, TRClsMeth
));
4201 void in(ISS
& env
, const bc::ResolveRClsMethodD
&) {
4203 push(env
, union_of(TClsMeth
, TRClsMeth
));
4206 void in(ISS
& env
, const bc::ResolveRClsMethodS
& op
) {
4207 resolveClsMethodSImpl
<true>(env
, op
.subop1
, op
.str2
);
4210 void in(ISS
& env
, const bc::ResolveClass
& op
) {
4212 auto cls
= env
.index
.resolve_class(env
.ctx
, op
.str1
);
4213 if (cls
&& cls
->resolved()) {
4214 push(env
, clsExact(*cls
));
4216 // If the class is not resolved,
4217 // it might not be unique or it might not be a valid classname.
4218 push(env
, union_of(TArrKey
, TCls
, TLazyCls
));
4222 void in(ISS
& env
, const bc::LazyClass
& op
) {
4224 push(env
, lazyclsval(op
.str1
));
4227 void in(ISS
& env
, const bc::FCallObjMethodD
& op
) {
4228 if (op
.fca
.hasGenerics()) {
4229 auto const tsList
= topC(env
);
4230 if (!tsList
.couldBe(BVec
)) {
4231 return unreachable(env
);
4234 auto const input
= topC(env
, op
.fca
.numInputs() + 1);
4235 auto const clsTy
= input
.couldBe(BObj
)
4236 ? objcls(intersection_of(input
, TObj
))
4238 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4239 if (!rfunc
.couldHaveReifiedGenerics()) {
4243 bc::FCallObjMethodD
{
4244 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4249 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4250 if (!clsHint
) clsHint
= op
.str2
;
4251 return bc::FCallObjMethodD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4254 env
, op
.fca
, op
.str4
,
4255 op
.subop3
== ObjMethodOp::NullThrows
,
4256 false, false, op
.fca
.numInputs() + 1,
4261 void in(ISS
& env
, const bc::FCallObjMethod
& op
) {
4262 auto const methName
= getNameFromType(topC(env
));
4265 fcallUnknownImpl(env
, op
.fca
);
4269 auto const input
= topC(env
, op
.fca
.numInputs() + 2);
4270 auto const clsTy
= input
.couldBe(BObj
)
4271 ? objcls(intersection_of(input
, TObj
))
4273 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4274 if (!rfunc
.mightCareAboutDynCalls()) {
4278 bc::FCallObjMethodD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4282 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4283 if (!clsHint
) clsHint
= op
.str2
;
4284 return bc::FCallObjMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4287 env
, op
.fca
, methName
,
4288 op
.subop3
== ObjMethodOp::NullThrows
,
4289 true, true, op
.fca
.numInputs() + 2,
4296 template <typename Op
, class UpdateBC
>
4297 void fcallClsMethodImpl(ISS
& env
, const Op
& op
, Type clsTy
, SString methName
,
4298 bool dynamic
, uint32_t numExtraInputs
, SString clsHint
,
4299 UpdateBC updateBC
) {
4300 if (isBadContext(op
.fca
)) {
4301 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4302 return reduce(env
, updateBC(op
.fca
.withoutAsyncEagerTarget()));
4304 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
4305 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4310 auto const ctx
= getCallContext(env
, op
.fca
);
4311 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4313 auto const numInOut
= op
.fca
.enforceInOut()
4314 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4317 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false,
4319 fcallTryFold(env
, op
.fca
, rfunc
, clsTy
, dynamic
, numExtraInputs
)) {
4323 if (clsHint
&& rfunc
.exactFunc() && clsHint
->empty()) {
4324 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4327 fcallKnownImpl(env
, op
.fca
, rfunc
, clsTy
, false /* nullsafe */,
4328 numExtraInputs
, updateBC
, numInOut
);
4333 void in(ISS
& env
, const bc::FCallClsMethodD
& op
) {
4334 auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str2
);
4335 auto const clsTy
= rcls
? clsExact(*rcls
) : TCls
;
4336 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str3
);
4338 if (op
.fca
.hasGenerics() && !rfunc
.couldHaveReifiedGenerics()) {
4342 bc::FCallClsMethodD
{
4343 op
.fca
.withoutGenerics(), op
.str2
, op
.str3
}
4347 if (auto const func
= rfunc
.exactFunc()) {
4348 assertx(func
->cls
!= nullptr);
4349 if (func
->cls
->name
->same(op
.str2
) &&
4350 optimize_builtin(env
, func
, op
.fca
)) {
4351 // When we use FCallBuiltin to call a static method, the litstr method
4352 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4354 // As a result, we can only do this optimization if the name of the
4355 // builtin function's class matches this op's class name immediate.
4360 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4361 return bc::FCallClsMethodD
{ std::move(fca
), op
.str2
, op
.str3
};
4363 fcallClsMethodImpl(env
, op
, clsTy
, op
.str3
, false, 0, nullptr, updateBC
);
4366 void in(ISS
& env
, const bc::FCallClsMethod
& op
) {
4367 auto const methName
= getNameFromType(topC(env
, 1));
4371 fcallUnknownImpl(env
, op
.fca
);
4375 auto const clsTy
= topC(env
);
4376 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4377 auto const skipLogAsDynamicCall
=
4378 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls
&&
4379 op
.subop3
== IsLogAsDynamicCallOp::DontLogAsDynamicCall
;
4380 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4381 (!rfunc
.mightCareAboutDynCalls() || skipLogAsDynamicCall
)) {
4382 auto const clsName
= dcls_of(clsTy
).cls().name();
4387 bc::FCallClsMethodD
{ op
.fca
, clsName
, methName
}
4391 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4392 if (!clsHint
) clsHint
= op
.str2
;
4393 return bc::FCallClsMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4395 fcallClsMethodImpl(env
, op
, clsTy
, methName
, true, 2, op
.str2
, updateBC
);
4398 void in(ISS
& env
, const bc::FCallClsMethodM
& op
) {
4399 auto const t
= topC(env
);
4400 if (!t
.couldBe(BObj
| BCls
| BStr
| BLazyCls
)) {
4401 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4402 // Kill the async eager target if the function never returns.
4405 bc::FCallClsMethodM
{
4406 op
.fca
.withoutAsyncEagerTarget(),
4414 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4418 auto const clsTy
= [&] {
4419 if (t
.subtypeOf(BCls
)) return t
;
4420 if (t
.subtypeOf(BObj
)) {
4423 if (auto const clsname
= getNameFromType(t
)) {
4424 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, clsname
)) {
4425 return clsExact(*rcls
);
4430 auto const methName
= op
.str4
;
4431 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4432 auto const maybeDynamicCall
=
4433 RuntimeOption::EvalEmitClassPointers
== 0 || t
.couldBe(TStr
);
4434 auto const skipLogAsDynamicCall
=
4435 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls
&&
4436 op
.subop3
== IsLogAsDynamicCallOp::DontLogAsDynamicCall
;
4437 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4438 (!rfunc
.mightCareAboutDynCalls() ||
4439 !maybeDynamicCall
||
4440 skipLogAsDynamicCall
4443 auto const clsName
= dcls_of(clsTy
).cls().name();
4447 bc::FCallClsMethodD
{ op
.fca
, clsName
, methName
}
4451 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4452 if (!clsHint
) clsHint
= op
.str2
;
4453 return bc::FCallClsMethodM
{ std::move(fca
), clsHint
, op
.subop3
, methName
};
4455 fcallClsMethodImpl(env
, op
, clsTy
, methName
, maybeDynamicCall
, 1, op
.str2
, updateBC
);
4460 template <typename Op
, class UpdateBC
>
4461 void fcallClsMethodSImpl(ISS
& env
, const Op
& op
, SString methName
, bool dynamic
,
4462 bool extraInput
, UpdateBC updateBC
) {
4463 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4464 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4465 !dynamic
&& op
.subop3
== SpecialClsRef::LateBoundCls
) {
4466 auto const clsName
= dcls_of(clsTy
).cls().name();
4467 reduce(env
, bc::FCallClsMethodD
{ op
.fca
, clsName
, methName
});
4471 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4473 auto const numInOut
= op
.fca
.enforceInOut()
4474 ? env
.index
.lookup_num_inout_params(env
.ctx
, rfunc
)
4477 auto const numExtraInputs
= extraInput
? 1 : 0;
4478 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false,
4480 fcallTryFold(env
, op
.fca
, rfunc
, ctxCls(env
), dynamic
,
4485 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4486 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4489 fcallKnownImpl(env
, op
.fca
, rfunc
, ctxCls(env
), false /* nullsafe */,
4490 extraInput
? 1 : 0, updateBC
, numInOut
);
4495 void in(ISS
& env
, const bc::FCallClsMethodSD
& op
) {
4496 if (op
.fca
.hasGenerics()) {
4497 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4498 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4499 if (!rfunc
.couldHaveReifiedGenerics()) {
4503 bc::FCallClsMethodSD
{
4504 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4509 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4510 if (!clsHint
) clsHint
= op
.str2
;
4511 return bc::FCallClsMethodSD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4513 fcallClsMethodSImpl(env
, op
, op
.str4
, false, false, updateBC
);
4516 void in(ISS
& env
, const bc::FCallClsMethodS
& op
) {
4517 auto const methName
= getNameFromType(topC(env
));
4520 fcallUnknownImpl(env
, op
.fca
);
4524 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4525 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4526 if (!rfunc
.mightCareAboutDynCalls() && !rfunc
.couldHaveReifiedGenerics()) {
4530 bc::FCallClsMethodSD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4534 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4535 if (!clsHint
) clsHint
= op
.str2
;
4536 return bc::FCallClsMethodS
{ std::move(fca
), clsHint
, op
.subop3
};
4538 fcallClsMethodSImpl(env
, op
, methName
, true, true, updateBC
);
4543 void newObjDImpl(ISS
& env
, const StringData
* className
, bool rflavor
) {
4544 auto const rcls
= env
.index
.resolve_class(env
.ctx
, className
);
4546 if (rflavor
) popC(env
);
4550 if (rflavor
&& !rcls
->couldHaveReifiedGenerics()) {
4551 return reduce(env
, bc::PopC
{}, bc::NewObjD
{ className
});
4553 auto const isCtx
= !rcls
->couldBeOverriden() && env
.ctx
.cls
&&
4554 rcls
->same(env
.index
.resolve_class(env
.ctx
.cls
));
4555 if (rflavor
) popC(env
);
4556 push(env
, setctx(objExact(*rcls
), isCtx
));
4561 void in(ISS
& env
, const bc::NewObjD
& op
) { newObjDImpl(env
, op
.str1
, false); }
4562 void in(ISS
& env
, const bc::NewObjRD
& op
) { newObjDImpl(env
, op
.str1
, true); }
4564 void in(ISS
& env
, const bc::NewObjS
& op
) {
4565 auto const cls
= specialClsRefToCls(env
, op
.subop1
);
4566 if (!is_specialized_cls(cls
)) {
4571 auto const& dcls
= dcls_of(cls
);
4572 if (dcls
.isExact() && !dcls
.cls().couldHaveReifiedGenerics() &&
4573 (!dcls
.cls().couldBeOverriden() ||
4574 equivalently_refined(cls
, unctx(cls
)))) {
4575 return reduce(env
, bc::NewObjD
{ dcls
.cls().name() });
4578 push(env
, toobj(cls
));
4581 void in(ISS
& env
, const bc::NewObj
& op
) {
4582 auto const cls
= topC(env
);
4583 if (!cls
.subtypeOf(BCls
) || !is_specialized_cls(cls
)) {
4589 auto const& dcls
= dcls_of(cls
);
4590 if (dcls
.isExact() && !dcls
.cls().mightCareAboutDynConstructs()) {
4594 bc::NewObjD
{ dcls
.cls().name() }
4599 push(env
, toobj(cls
));
4602 void in(ISS
& env
, const bc::NewObjR
& op
) {
4603 auto const generics
= topC(env
);
4604 auto const cls
= topC(env
, 1);
4606 if (generics
.subtypeOf(BInitNull
)) {
4614 if (!cls
.subtypeOf(BCls
) || !is_specialized_cls(cls
)) {
4621 auto const& dcls
= dcls_of(cls
);
4622 if (dcls
.isExact() && !dcls
.cls().couldHaveReifiedGenerics()) {
4632 push(env
, toobj(cls
));
4637 bool objMightHaveConstProps(const Type
& t
) {
4638 assertx(t
.subtypeOf(BObj
));
4639 assertx(is_specialized_obj(t
));
4640 auto const& dobj
= dobj_of(t
);
4641 if (dobj
.isExact()) return dobj
.cls().couldHaveConstProp();
4642 if (dobj
.isSub()) return dobj
.cls().subCouldHaveConstProp();
4643 for (auto const cls
: dobj
.isect()) {
4644 if (!cls
.subCouldHaveConstProp()) return false;
4651 void in(ISS
& env
, const bc::FCallCtor
& op
) {
4652 auto const obj
= topC(env
, op
.fca
.numInputs() + 1);
4653 assertx(op
.fca
.numRets() == 1);
4655 if (!is_specialized_obj(obj
)) {
4656 return fcallUnknownImpl(env
, op
.fca
);
4659 if (op
.fca
.lockWhileUnwinding() && !objMightHaveConstProps(obj
)) {
4661 env
, bc::FCallCtor
{ op
.fca
.withoutLockWhileUnwinding(), op
.str2
}
4665 auto const& dobj
= dobj_of(obj
);
4666 auto const rfunc
= env
.index
.resolve_ctor(env
.ctx
, dobj
.cls(), dobj
.isExact());
4668 return fcallUnknownImpl(env
, op
.fca
);
4671 auto const updateFCA
= [&] (FCallArgs
&& fca
) {
4672 return bc::FCallCtor
{ std::move(fca
), op
.str2
};
4675 auto const numInOut
= op
.fca
.enforceInOut()
4676 ? env
.index
.lookup_num_inout_params(env
.ctx
, *rfunc
)
4679 auto const canFold
= obj
.subtypeOf(BObj
);
4680 if (fcallOptimizeChecks(env
, op
.fca
, *rfunc
, updateFCA
, numInOut
, false, 0) ||
4681 (canFold
&& fcallTryFold(env
, op
.fca
, *rfunc
,
4682 obj
, false /* dynamic */, 0))) {
4686 if (rfunc
->exactFunc() && op
.str2
->empty()) {
4687 // We've found the exact func that will be called, set the hint.
4688 return reduce(env
, bc::FCallCtor
{ op
.fca
, rfunc
->exactFunc()->cls
->name
});
4691 fcallKnownImpl(env
, op
.fca
, *rfunc
, obj
, false /* nullsafe */, 0,
4692 updateFCA
, numInOut
);
4695 void in(ISS
& env
, const bc::LockObj
& op
) {
4696 auto const t
= topC(env
);
4699 return push(env
, t
);
4701 if (!t
.subtypeOf(BObj
)) return bail();
4702 if (!is_specialized_obj(t
) || objMightHaveConstProps(t
)) {
4711 // baseLoc is NoLocalId for non-local iterators.
4712 void iterInitImpl(ISS
& env
, IterArgs ita
, BlockId target
, LocalId baseLoc
) {
4713 auto const local
= baseLoc
!= NoLocalId
;
4714 auto const sourceLoc
= local
? baseLoc
: topStkLocal(env
);
4715 auto const base
= local
? locAsCell(env
, baseLoc
) : topC(env
);
4716 auto ity
= iter_types(base
);
4718 auto const fallthrough
= [&] {
4719 auto const baseCannotBeObject
= !base
.couldBe(BObj
);
4720 setIter(env
, ita
.iterId
, LiveIter
{ ity
, sourceLoc
, NoLocalId
, env
.bid
,
4721 false, baseCannotBeObject
});
4722 // Do this after setting the iterator, in case it clobbers the base local
4724 setLoc(env
, ita
.valId
, std::move(ity
.value
));
4726 setLoc(env
, ita
.keyId
, std::move(ity
.key
));
4727 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4731 assertx(iterIsDead(env
, ita
.iterId
));
4733 if (!ity
.mayThrowOnInit
) {
4734 if (ity
.count
== IterTypes::Count::Empty
&& will_reduce(env
)) {
4738 reduce(env
, bc::PopC
{});
4740 return jmp_setdest(env
, target
);
4745 if (!local
) popC(env
);
4747 switch (ity
.count
) {
4748 case IterTypes::Count::Empty
:
4749 mayReadLocal(env
, ita
.valId
);
4750 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4751 jmp_setdest(env
, target
);
4753 case IterTypes::Count::Single
:
4754 case IterTypes::Count::NonEmpty
:
4756 return jmp_nevertaken(env
);
4757 case IterTypes::Count::ZeroOrOne
:
4758 case IterTypes::Count::Any
:
4759 // Take the branch before setting locals if the iter is already
4760 // empty, but after popping. Similar for the other IterInits
4762 env
.propagate(target
, &env
.state
);
4766 always_assert(false);
4769 // baseLoc is NoLocalId for non-local iterators.
4770 void iterNextImpl(ISS
& env
, IterArgs ita
, BlockId target
, LocalId baseLoc
) {
4771 auto const curVal
= peekLocRaw(env
, ita
.valId
);
4772 auto const curKey
= ita
.hasKey() ? peekLocRaw(env
, ita
.keyId
) : TBottom
;
4774 auto noThrow
= false;
4775 auto const noTaken
= match
<bool>(
4776 env
.state
.iters
[ita
.iterId
],
4778 always_assert(false && "IterNext on dead iter");
4781 [&] (const LiveIter
& ti
) {
4782 if (!ti
.types
.mayThrowOnNext
) noThrow
= true;
4783 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4784 switch (ti
.types
.count
) {
4785 case IterTypes::Count::Single
:
4786 case IterTypes::Count::ZeroOrOne
:
4788 case IterTypes::Count::NonEmpty
:
4789 case IterTypes::Count::Any
:
4790 setLoc(env
, ita
.valId
, ti
.types
.value
);
4792 setLoc(env
, ita
.keyId
, ti
.types
.key
);
4793 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4796 case IterTypes::Count::Empty
:
4797 always_assert(false);
4803 if (noTaken
&& noThrow
&& will_reduce(env
)) {
4804 auto const iterId
= safe_cast
<IterId
>(ita
.iterId
);
4805 return baseLoc
== NoLocalId
4806 ? reduce(env
, bc::IterFree
{ iterId
})
4807 : reduce(env
, bc::LIterFree
{ iterId
, baseLoc
});
4810 mayReadLocal(env
, baseLoc
);
4811 mayReadLocal(env
, ita
.valId
);
4812 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4814 if (noThrow
) nothrow(env
);
4817 jmp_nevertaken(env
);
4818 freeIter(env
, ita
.iterId
);
4822 env
.propagate(target
, &env
.state
);
4824 freeIter(env
, ita
.iterId
);
4825 setLocRaw(env
, ita
.valId
, curVal
);
4826 if (ita
.hasKey()) setLocRaw(env
, ita
.keyId
, curKey
);
4831 void in(ISS
& env
, const bc::IterInit
& op
) {
4832 iterInitImpl(env
, op
.ita
, op
.target2
, NoLocalId
);
4835 void in(ISS
& env
, const bc::LIterInit
& op
) {
4836 iterInitImpl(env
, op
.ita
, op
.target3
, op
.loc2
);
4839 void in(ISS
& env
, const bc::IterNext
& op
) {
4840 iterNextImpl(env
, op
.ita
, op
.target2
, NoLocalId
);
4843 void in(ISS
& env
, const bc::LIterNext
& op
) {
4844 iterNextImpl(env
, op
.ita
, op
.target3
, op
.loc2
);
4847 void in(ISS
& env
, const bc::IterFree
& op
) {
4848 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
4849 auto const isNop
= match
<bool>(
4850 env
.state
.iters
[op
.iter1
],
4854 [&] (const LiveIter
& ti
) {
4855 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4860 if (isNop
&& will_reduce(env
)) return reduce(env
);
4863 freeIter(env
, op
.iter1
);
4866 void in(ISS
& env
, const bc::LIterFree
& op
) {
4868 mayReadLocal(env
, op
.loc2
);
4869 freeIter(env
, op
.iter1
);
4873 * Any include/require (or eval) op kills all locals, and private properties.
4875 void inclOpImpl(ISS
& env
) {
4879 killPrivateStatics(env
);
4880 push(env
, TInitCell
);
4883 void in(ISS
& env
, const bc::Incl
&) { inclOpImpl(env
); }
4884 void in(ISS
& env
, const bc::InclOnce
&) { inclOpImpl(env
); }
4885 void in(ISS
& env
, const bc::Req
&) { inclOpImpl(env
); }
4886 void in(ISS
& env
, const bc::ReqOnce
&) { inclOpImpl(env
); }
4887 void in(ISS
& env
, const bc::ReqDoc
&) { inclOpImpl(env
); }
4888 void in(ISS
& env
, const bc::Eval
&) { inclOpImpl(env
); }
4890 void in(ISS
& env
, const bc::This
&) {
4891 if (thisAvailable(env
)) {
4892 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4894 auto const ty
= thisTypeNonNull(env
);
4895 push(env
, ty
, StackThisId
);
4896 setThisAvailable(env
);
4897 if (ty
.subtypeOf(BBottom
)) unreachable(env
);
4900 void in(ISS
& env
, const bc::LateBoundCls
& op
) {
4901 if (env
.ctx
.cls
) effect_free(env
);
4902 auto const ty
= selfCls(env
);
4903 push(env
, setctx(ty
? *ty
: TCls
));
4906 void in(ISS
& env
, const bc::CheckThis
&) {
4907 if (thisAvailable(env
)) {
4910 setThisAvailable(env
);
4913 void in(ISS
& env
, const bc::BareThis
& op
) {
4914 if (thisAvailable(env
)) {
4915 if (op
.subop1
!= BareThisOp::NeverNull
) {
4916 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4920 auto const ty
= thisType(env
);
4921 switch (op
.subop1
) {
4922 case BareThisOp::Notice
:
4924 case BareThisOp::NoNotice
:
4927 case BareThisOp::NeverNull
:
4928 setThisAvailable(env
);
4929 if (!env
.state
.unreachable
) effect_free(env
);
4930 return push(env
, ty
, StackThisId
);
4933 push(env
, ty
, StackThisId
);
4937 * Amongst other things, we use this to mark units non-persistent.
4939 void in(ISS
& env
, const bc::OODeclExists
& op
) {
4940 auto flag
= popC(env
);
4941 auto name
= popC(env
);
4943 if (!name
.strictSubtypeOf(TStr
)) return TBool
;
4944 auto const v
= tv(name
);
4945 if (!v
) return TBool
;
4946 auto rcls
= env
.index
.resolve_class(env
.ctx
, v
->m_data
.pstr
);
4947 if (!rcls
|| !rcls
->cls()) return TFalse
;
4948 auto const exist
= [&] () -> bool {
4949 switch (op
.subop1
) {
4950 case OODeclExistsOp::Class
:
4951 return !(rcls
->cls()->attrs
& (AttrInterface
| AttrTrait
));
4952 case OODeclExistsOp::Interface
:
4953 return rcls
->cls()->attrs
& AttrInterface
;
4954 case OODeclExistsOp::Trait
:
4955 return rcls
->cls()->attrs
& AttrTrait
;
4960 return exist
? TTrue
: TFalse
;
4965 bool couldBeMocked(const Type
& t
) {
4966 auto const dcls
= [&] () -> const DCls
* {
4967 if (is_specialized_cls(t
)) {
4969 } else if (is_specialized_obj(t
)) {
4974 // In practice this should not occur since this is used mostly on
4975 // the result of looked up type constraints.
4976 if (!dcls
) return true;
4977 if (!dcls
->isIsect()) return dcls
->cls().couldBeMocked();
4978 for (auto const cls
: dcls
->isect()) {
4979 if (!cls
.couldBeMocked()) return false;
4985 using TCVec
= std::vector
<const TypeConstraint
*>;
4987 void in(ISS
& env
, const bc::VerifyParamType
& op
) {
4988 IgnoreUsedParams _
{env
};
4990 if (env
.ctx
.func
->isMemoizeImpl
) {
4991 // a MemoizeImpl's params have already been checked by the wrapper
4995 auto [newTy
, effectFree
] =
4996 env
.index
.verify_param_type(env
.ctx
, op
.loc1
, locAsCell(env
, op
.loc1
));
4998 if (effectFree
) return reduce(env
);
4999 if (newTy
.subtypeOf(BBottom
)) unreachable(env
);
5001 setLoc(env
, op
.loc1
, std::move(newTy
));
5004 void in(ISS
& env
, const bc::VerifyParamTypeTS
& op
) {
5005 IgnoreUsedParams _
{env
};
5007 auto const a
= topC(env
);
5008 if (!a
.couldBe(BDict
)) {
5013 auto const constraint
= env
.ctx
.func
->params
[op
.loc1
].typeConstraint
;
5014 // TODO(T31677864): We are being extremely pessimistic here, relax it
5015 if (!env
.ctx
.func
->isReified
&&
5016 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
5017 !env
.index
.could_have_reified_type(env
.ctx
, constraint
)) {
5018 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
5021 if (auto const inputTS
= tv(a
)) {
5022 if (!isValidTSType(*inputTS
, false)) {
5027 auto const resolvedTS
=
5028 resolve_type_structure(env
, inputTS
->m_data
.parr
).sarray();
5029 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
5030 reduce(env
, bc::PopC
{});
5031 reduce(env
, bc::Dict
{ resolvedTS
});
5032 reduce(env
, bc::VerifyParamTypeTS
{ op
.loc1
});
5035 if (shouldReduceToNonReifiedVerifyType(env
, inputTS
->m_data
.parr
)) {
5036 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
5039 if (auto const last
= last_op(env
)) {
5040 if (last
->op
== Op::CombineAndResolveTypeStruct
) {
5041 if (auto const last2
= last_op(env
, 1)) {
5042 if (last2
->op
== Op::Dict
&&
5043 shouldReduceToNonReifiedVerifyType(env
, last2
->Dict
.arr1
)) {
5044 return reduce(env
, bc::PopC
{}, bc::VerifyParamType
{ op
.loc1
});
5049 mayReadLocal(env
, op
.loc1
);
5053 void verifyRetImpl(ISS
& env
, const TCVec
& tcs
,
5054 bool reduce_this
, bool ts_flavor
) {
5055 // If it is the ts flavor, then second thing on the stack, otherwise first
5056 auto stackT
= topC(env
, (int)ts_flavor
);
5057 auto const stackEquiv
= topStkEquiv(env
, (int)ts_flavor
);
5059 // If there is no return type constraint, or if the return type
5060 // constraint is a typevar, or if the top of stack is the same or a
5061 // subtype of the type constraint, then this is a no-op, unless
5062 // reified types could be involved.
5063 if (std::all_of(std::begin(tcs
), std::end(tcs
),
5064 [&](const TypeConstraint
* tc
) {
5065 return env
.index
.satisfies_constraint(env
.ctx
, stackT
, *tc
);
5068 // we wouldn't get here if reified types were definitely not
5069 // involved, so just bail.
5072 push(env
, std::move(stackT
), stackEquiv
);
5078 std::vector
<Type
> constraintTypes
;
5079 auto dont_reduce
= false;
5081 for (auto const& constraint
: tcs
) {
5082 // When the constraint is not soft.
5083 // We can safely assume that either VerifyRetTypeC will
5084 // throw or it will produce a value whose type is compatible with the
5085 // return type constraint.
5086 auto tcT
= remove_uninit(env
.index
.lookup_constraint(env
.ctx
, *constraint
));
5087 constraintTypes
.push_back(tcT
);
5089 // In some circumstances, verifyRetType can modify the type. If it
5090 // does that we can't reduce even when we know it succeeds.
5091 // VerifyRetType will convert a TCls to a TStr implicitly
5092 // (and possibly warn)
5093 if (tcT
.couldBe(BStr
) && stackT
.couldBe(BCls
| BLazyCls
)) {
5098 // If the constraint is soft, then there are no optimizations we can safely
5099 // do here, so just leave the top of stack as is.
5100 if (constraint
->isSoft() ||
5101 (RuntimeOption::EvalEnforceGenericsUB
< 2 &&
5102 constraint
->isUpperBound()))
5104 if (ts_flavor
) popC(env
);
5106 push(env
, std::move(stackT
), stackEquiv
);
5111 // In cases where we have a `this` hint where stackT is an TOptObj known to
5112 // be this, we can replace the check with a non null check. These cases are
5113 // likely from a BareThis that could return Null. Since the runtime will
5114 // split these translations, it will rarely in practice return null.
5117 stackT
.couldBe(BInitNull
) &&
5118 !stackT
.subtypeOf(BInitNull
) &&
5119 std::all_of(std::begin(tcs
), std::end(tcs
),
5120 [&](const TypeConstraint
* constraint
) {
5121 return constraint
->isThis() &&
5122 !constraint
->isNullable() &&
5123 env
.index
.satisfies_constraint(
5124 env
.ctx
, unopt(stackT
), *constraint
);
5129 return reduce(env
, bc::PopC
{}, bc::VerifyRetNonNullC
{});
5131 return reduce(env
, bc::VerifyRetNonNullC
{});
5134 auto retT
= std::move(stackT
);
5135 for (auto& tcT
: constraintTypes
) {
5136 retT
= intersection_of(std::move(tcT
), std::move(retT
));
5137 if (retT
.subtypeOf(BBottom
)) {
5139 if (ts_flavor
) popC(env
); // the type structure
5144 if (ts_flavor
) popC(env
); // the type structure
5146 push(env
, std::move(retT
));
5149 void in(ISS
& env
, const bc::VerifyOutType
& op
) {
5151 auto const& pinfo
= env
.ctx
.func
->params
[op
.loc1
];
5152 tcs
.push_back(&pinfo
.typeConstraint
);
5153 for (auto const& t
: pinfo
.upperBounds
) tcs
.push_back(&t
);
5154 verifyRetImpl(env
, tcs
, false, false);
5157 void in(ISS
& env
, const bc::VerifyRetTypeC
& /*op*/) {
5159 tcs
.push_back(&env
.ctx
.func
->retTypeConstraint
);
5160 for (auto const& t
: env
.ctx
.func
->returnUBs
) tcs
.push_back(&t
);
5161 verifyRetImpl(env
, tcs
, true, false);
5164 void in(ISS
& env
, const bc::VerifyRetTypeTS
& /*op*/) {
5165 auto const a
= topC(env
);
5166 if (!a
.couldBe(BDict
)) {
5171 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5172 // TODO(T31677864): We are being extremely pessimistic here, relax it
5173 if (!env
.ctx
.func
->isReified
&&
5174 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
5175 !env
.index
.could_have_reified_type(env
.ctx
, constraint
)) {
5176 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5178 if (auto const inputTS
= tv(a
)) {
5179 if (!isValidTSType(*inputTS
, false)) {
5184 auto const resolvedTS
=
5185 resolve_type_structure(env
, inputTS
->m_data
.parr
).sarray();
5186 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
5187 reduce(env
, bc::PopC
{});
5188 reduce(env
, bc::Dict
{ resolvedTS
});
5189 reduce(env
, bc::VerifyRetTypeTS
{});
5192 if (shouldReduceToNonReifiedVerifyType(env
, inputTS
->m_data
.parr
)) {
5193 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5196 if (auto const last
= last_op(env
)) {
5197 if (last
->op
== Op::CombineAndResolveTypeStruct
) {
5198 if (auto const last2
= last_op(env
, 1)) {
5199 if (last2
->op
== Op::Dict
&&
5200 shouldReduceToNonReifiedVerifyType(env
, last2
->Dict
.arr1
)) {
5201 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5206 TCVec tcs
{&constraint
};
5207 for (auto const& t
: env
.ctx
.func
->returnUBs
) tcs
.push_back(&t
);
5208 verifyRetImpl(env
, tcs
, true, true);
5211 void in(ISS
& env
, const bc::VerifyRetNonNullC
& /*op*/) {
5212 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5213 if (constraint
.isSoft()) {
5217 auto stackT
= topC(env
);
5219 if (!stackT
.couldBe(BInitNull
)) {
5224 if (stackT
.subtypeOf(BNull
)) return unreachable(env
);
5226 auto const equiv
= topStkEquiv(env
);
5228 stackT
= unopt(std::move(stackT
));
5231 push(env
, stackT
, equiv
);
5234 void in(ISS
& env
, const bc::SelfCls
&) {
5235 auto const self
= selfClsExact(env
);
5244 void in(ISS
& env
, const bc::ParentCls
&) {
5245 auto const parent
= parentClsExact(env
);
5254 void in(ISS
& env
, const bc::CreateCl
& op
) {
5255 auto const nargs
= op
.arg1
;
5256 auto const clsPair
= env
.index
.resolve_closure_class(env
.ctx
, op
.arg2
);
5259 * Every closure should have a unique allocation site, but we may see it
5260 * multiple times in a given round of analyzing this function. Each time we
5261 * may have more information about the used variables; the types should only
5262 * possibly grow. If it's already there we need to merge the used vars in
5263 * with what we saw last time.
5266 CompactVector
<Type
> usedVars(nargs
);
5267 for (auto i
= uint32_t{0}; i
< nargs
; ++i
) {
5268 usedVars
[nargs
- i
- 1] = unctx(popCU(env
));
5270 merge_closure_use_vars_into(
5271 env
.collect
.closureUseTypes
,
5279 if (env
.ctx
.cls
&& is_used_trait(*env
.ctx
.cls
)) {
5280 // Be pessimistic if we're within a trait. The closure will get
5281 // rescoped potentially multiple times at runtime.
5284 subObj(env
.index
.builtin_class(s_Closure
.get()))
5287 push(env
, objExact(clsPair
.first
));
5291 void in(ISS
& env
, const bc::CreateCont
& /*op*/) {
5292 // First resume is always next() which pushes null.
5293 push(env
, TInitNull
);
5296 void in(ISS
& env
, const bc::ContEnter
&) { popC(env
); push(env
, TInitCell
); }
5297 void in(ISS
& env
, const bc::ContRaise
&) { popC(env
); push(env
, TInitCell
); }
5299 void in(ISS
& env
, const bc::Yield
&) {
5301 push(env
, TInitCell
);
5304 void in(ISS
& env
, const bc::YieldK
&) {
5307 push(env
, TInitCell
);
5310 void in(ISS
& /*env*/, const bc::ContCheck
&) {}
5311 void in(ISS
& env
, const bc::ContValid
&) { push(env
, TBool
); }
5312 void in(ISS
& env
, const bc::ContKey
&) { push(env
, TInitCell
); }
5313 void in(ISS
& env
, const bc::ContCurrent
&) { push(env
, TInitCell
); }
5314 void in(ISS
& env
, const bc::ContGetReturn
&) { push(env
, TInitCell
); }
5316 void pushTypeFromWH(ISS
& env
, Type t
) {
5317 auto inner
= typeFromWH(t
);
5318 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5319 if (inner
.subtypeOf(BBottom
)) unreachable(env
);
5320 push(env
, std::move(inner
));
5323 void in(ISS
& env
, const bc::WHResult
&) {
5324 pushTypeFromWH(env
, popC(env
));
5327 void in(ISS
& env
, const bc::Await
&) {
5328 pushTypeFromWH(env
, popC(env
));
5331 void in(ISS
& env
, const bc::AwaitAll
& op
) {
5332 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5333 if (equiv
!= op
.locrange
.first
) {
5336 bc::AwaitAll
{LocalRange
{equiv
, op
.locrange
.count
}}
5340 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5341 mayReadLocal(env
, op
.locrange
.first
+ i
);
5344 push(env
, TInitNull
);
5347 void in(ISS
& env
, const bc::SetImplicitContextByValue
&) {
5349 push(env
, Type
{BObj
| BInitNull
});
5352 void in(ISS
& env
, const bc::Idx
&) {
5353 auto const def
= popC(env
);
5354 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
5355 auto const base
= popC(env
);
5357 assertx(!def
.is(BBottom
));
5359 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
5360 auto result
= TBottom
;
5362 auto const finish
= [&] {
5363 if (result
.is(BBottom
)) {
5364 assertx(!effectFree
);
5371 push(env
, std::move(result
));
5374 if (key
.couldBe(BNull
)) result
|= def
;
5375 if (key
.subtypeOf(BNull
)) return finish();
5377 if (!base
.subtypeOf(BArrLike
| BObj
| BStr
)) result
|= def
;
5379 if (base
.couldBe(BArrLike
)) {
5380 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5381 if (key
.couldBe(BArrKey
)) {
5382 auto elem
= array_like_elem(
5384 key
.subtypeOf(BArrKey
) ? key
: intersection_of(key
, TArrKey
)
5386 result
|= std::move(elem
.first
);
5387 if (!elem
.second
) result
|= def
;
5390 if (base
.couldBe(BObj
)) {
5391 result
|= TInitCell
;
5394 if (base
.couldBe(BStr
)) {
5397 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5403 void in(ISS
& env
, const bc::ArrayIdx
&) {
5404 auto def
= popC(env
);
5405 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
5406 auto const base
= popC(env
);
5408 assertx(!def
.is(BBottom
));
5410 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
5411 auto result
= TBottom
;
5413 auto const finish
= [&] {
5414 if (result
.is(BBottom
)) {
5415 assertx(!effectFree
);
5422 push(env
, std::move(result
));
5425 if (key
.couldBe(BNull
)) result
|= def
;
5426 if (key
.subtypeOf(BNull
)) return finish();
5428 if (!base
.subtypeOf(BArrLike
)) effectFree
= false;
5429 if (!base
.couldBe(BArrLike
)) return finish();
5431 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5432 if (!key
.couldBe(BArrKey
)) return finish();
5434 auto elem
= array_like_elem(
5436 key
.subtypeOf(BArrKey
) ? key
: intersection_of(key
, TArrKey
)
5438 result
|= std::move(elem
.first
);
5439 if (!elem
.second
) result
|= std::move(def
);
5444 void implArrayMarkLegacy(ISS
& env
, bool legacy
) {
5445 auto const recursive
= popC(env
);
5446 auto const value
= popC(env
);
5448 if (auto const tv_recursive
= tv(recursive
)) {
5449 if (auto const tv_value
= tv(value
)) {
5450 if (tvIsBool(*tv_recursive
)) {
5451 auto const result
= eval_cell([&]{
5452 return val(*tv_recursive
).num
5453 ? arrprov::markTvRecursively(*tv_value
, legacy
)
5454 : arrprov::markTvShallow(*tv_value
, legacy
);
5466 // TODO(kshaunak): We could add some type info here.
5467 push(env
, TInitCell
);
5471 void in(ISS
& env
, const bc::ArrayMarkLegacy
&) {
5472 implArrayMarkLegacy(env
, true);
5475 void in(ISS
& env
, const bc::ArrayUnmarkLegacy
&) {
5476 implArrayMarkLegacy(env
, false);
5479 void in(ISS
& env
, const bc::CheckProp
&) {
5480 if (env
.ctx
.cls
->attrs
& AttrNoOverride
) {
5481 return reduce(env
, bc::False
{});
5487 void in(ISS
& env
, const bc::InitProp
& op
) {
5488 auto const t
= topC(env
);
5489 switch (op
.subop2
) {
5490 case InitPropOp::Static
:
5491 env
.index
.merge_static_type(
5493 env
.collect
.publicSPropMutations
,
5495 clsExact(env
.index
.resolve_class(env
.ctx
.cls
)),
5502 case InitPropOp::NonStatic
:
5503 mergeThisProp(env
, op
.str1
, t
);
5507 for (auto& prop
: env
.ctx
.func
->cls
->properties
) {
5508 if (prop
.name
!= op
.str1
) continue;
5510 ITRACE(1, "InitProp: {} = {}\n", op
.str1
, show(t
));
5512 if (env
.index
.satisfies_constraint(env
.ctx
, t
, prop
.typeConstraint
) &&
5513 std::all_of(prop
.ubs
.begin(), prop
.ubs
.end(),
5514 [&](TypeConstraint ub
) {
5515 applyFlagsToUB(ub
, prop
.typeConstraint
);
5516 return env
.index
.satisfies_constraint(env
.ctx
, t
, ub
);
5518 prop
.attrs
|= AttrInitialSatisfiesTC
;
5520 badPropInitialValue(env
);
5521 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrInitialSatisfiesTC
);
5525 auto const v
= tv(t
);
5526 if (v
|| !could_contain_objects(t
)) {
5527 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrDeepInit
);
5530 env
.index
.update_static_prop_init_val(env
.ctx
.func
->cls
, op
.str1
);
5531 return reduce(env
, bc::PopC
{});
5538 void in(ISS
& env
, const bc::Silence
& op
) {
5540 switch (op
.subop2
) {
5541 case SilenceOp::Start
:
5542 setLoc(env
, op
.loc1
, TInt
);
5544 case SilenceOp::End
:
5545 locRaw(env
, op
.loc1
);
5552 template <typename Op
, typename Rebind
>
5553 bool memoGetImpl(ISS
& env
, const Op
& op
, Rebind
&& rebind
) {
5554 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5555 always_assert(op
.locrange
.first
+ op
.locrange
.count
5556 <= env
.ctx
.func
->locals
.size());
5558 if (will_reduce(env
)) {
5559 // If we can use an equivalent, earlier range, then use that instead.
5560 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5561 if (equiv
!= op
.locrange
.first
) {
5562 reduce(env
, rebind(LocalRange
{ equiv
, op
.locrange
.count
}));
5567 auto retTy
= memoizeImplRetType(env
);
5569 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5570 // and $this isn't available.
5571 auto allArrKey
= true;
5572 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5573 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5576 (!env
.ctx
.func
->cls
||
5577 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5578 thisAvailable(env
))) {
5579 if (will_reduce(env
)) {
5580 if (retTy
.first
.subtypeOf(BBottom
)) {
5582 jmp_setdest(env
, op
.target1
);
5585 // deal with constprop manually; otherwise we will propagate the
5586 // taken edge and *then* replace the MemoGet with a constant.
5588 if (auto v
= tv(retTy
.first
)) {
5589 reduce(env
, gen_constant(*v
));
5597 if (retTy
.first
== TBottom
) {
5598 jmp_setdest(env
, op
.target1
);
5602 env
.propagate(op
.target1
, &env
.state
);
5603 push(env
, std::move(retTy
.first
));
5609 void in(ISS
& env
, const bc::MemoGet
& op
) {
5612 [&] (const LocalRange
& l
) { return bc::MemoGet
{ op
.target1
, l
}; }
5616 void in(ISS
& env
, const bc::MemoGetEager
& op
) {
5617 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5619 auto const reduced
= memoGetImpl(
5621 [&] (const LocalRange
& l
) {
5622 return bc::MemoGetEager
{ op
.target1
, op
.target2
, l
};
5625 if (reduced
) return;
5627 env
.propagate(op
.target2
, &env
.state
);
5628 auto const t
= popC(env
);
5631 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
5637 template <typename Op
>
5638 void memoSetImpl(ISS
& env
, const Op
& op
) {
5639 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5640 always_assert(op
.locrange
.first
+ op
.locrange
.count
5641 <= env
.ctx
.func
->locals
.size());
5643 // If we can use an equivalent, earlier range, then use that instead.
5644 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5645 if (equiv
!= op
.locrange
.first
) {
5648 Op
{ LocalRange
{ equiv
, op
.locrange
.count
} }
5652 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5653 // and $this isn't available.
5654 auto allArrKey
= true;
5655 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5656 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5659 (!env
.ctx
.func
->cls
||
5660 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5661 thisAvailable(env
))) {
5664 push(env
, popC(env
));
5669 void in(ISS
& env
, const bc::MemoSet
& op
) {
5670 memoSetImpl(env
, op
);
5673 void in(ISS
& env
, const bc::MemoSetEager
& op
) {
5674 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5675 memoSetImpl(env
, op
);
5682 //////////////////////////////////////////////////////////////////////
5684 void dispatch(ISS
& env
, const Bytecode
& op
) {
5685 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5686 switch (op
.op
) { OPCODES
}
5691 //////////////////////////////////////////////////////////////////////
5693 void interpStep(ISS
& env
, const Bytecode
& bc
) {
5694 ITRACE(2, " {} ({})\n",
5695 show(env
.ctx
.func
, bc
),
5696 env
.unchangedBcs
+ env
.replacedBcs
.size());
5699 // If there are throw exit edges, make a copy of the state (except
5700 // stacks) in case we need to propagate across throw exits (if
5702 if (!env
.stateBefore
&& env
.blk
.throwExit
!= NoBlockId
) {
5703 env
.stateBefore
.emplace(with_throwable_only(env
.index
, env
.state
));
5708 default_dispatch(env
, bc
);
5710 if (env
.flags
.reduced
) return;
5712 auto const_prop
= [&] {
5713 if (!options
.ConstantProp
|| !env
.flags
.canConstProp
) return false;
5715 auto const numPushed
= bc
.numPush();
5716 TinyVector
<TypedValue
> cells
;
5719 while (i
< numPushed
) {
5720 auto const v
= tv(topT(env
, i
));
5721 if (!v
) return false;
5722 cells
.push_back(*v
);
5726 if (env
.flags
.wasPEI
) {
5727 ITRACE(2, " nothrow (due to constprop)\n");
5728 env
.flags
.wasPEI
= false;
5730 if (!env
.flags
.effectFree
) {
5731 ITRACE(2, " effect_free (due to constprop)\n");
5732 env
.flags
.effectFree
= true;
5735 // If we're doing inline interp, don't actually perform the
5736 // constprop. If we do, we can infer static types that won't
5737 // actually exist at runtime.
5738 if (any(env
.collect
.opts
& CollectionOpts::Inlining
)) {
5739 ITRACE(2, " inlining, skipping actual constprop\n");
5745 auto const numPop
= bc
.numPop();
5746 for (auto j
= 0; j
< numPop
; j
++) {
5747 auto const flavor
= bc
.popFlavor(j
);
5748 if (flavor
== Flavor::C
) {
5749 interpStep(env
, bc::PopC
{});
5750 } else if (flavor
== Flavor::U
) {
5751 interpStep(env
, bc::PopU
{});
5753 assertx(flavor
== Flavor::CU
);
5754 auto const& popped
= topT(env
);
5755 if (popped
.subtypeOf(BUninit
)) {
5756 interpStep(env
, bc::PopU
{});
5758 assertx(popped
.subtypeOf(BInitCell
));
5759 interpStep(env
, bc::PopC
{});
5765 push(env
, from_cell(cells
[i
]));
5766 record(env
, gen_constant(cells
[i
]));
5775 assertx(!env
.flags
.effectFree
|| !env
.flags
.wasPEI
);
5776 if (env
.flags
.wasPEI
) {
5777 ITRACE(2, " PEI.\n");
5778 if (env
.stateBefore
) {
5779 env
.propagate(env
.blk
.throwExit
, &*env
.stateBefore
);
5782 env
.stateBefore
.reset();
5787 void interpOne(ISS
& env
, const Bytecode
& bc
) {
5788 env
.srcLoc
= bc
.srcLoc
;
5789 interpStep(env
, bc
);
5792 BlockId
speculate(Interp
& interp
) {
5793 auto low_water
= interp
.state
.stack
.size();
5795 interp
.collect
.opts
= interp
.collect
.opts
| CollectionOpts::Speculating
;
5797 interp
.collect
.opts
= interp
.collect
.opts
- CollectionOpts::Speculating
;
5800 auto failed
= false;
5801 ISS env
{ interp
, [&] (BlockId
, const State
*) { failed
= true; } };
5803 FTRACE(4, " Speculate B{}\n", interp
.bid
);
5804 for (auto const& bc
: interp
.blk
->hhbcs
) {
5805 assertx(!interp
.state
.unreachable
);
5806 auto const numPop
= bc
.numPop() +
5807 (bc
.op
== Op::CGetL2
? 1 :
5808 bc
.op
== Op::Dup
? -1 : 0);
5809 if (interp
.state
.stack
.size() - numPop
< low_water
) {
5810 low_water
= interp
.state
.stack
.size() - numPop
;
5815 env
.collect
.mInstrState
.clear();
5816 FTRACE(3, " Bailing from speculate because propagate was called\n");
5820 auto const& flags
= env
.flags
;
5821 if (!flags
.effectFree
) {
5822 env
.collect
.mInstrState
.clear();
5823 FTRACE(3, " Bailing from speculate because not effect free\n");
5827 assertx(!flags
.returned
);
5829 if (flags
.jmpDest
!= NoBlockId
&& interp
.state
.stack
.size() == low_water
) {
5830 FTRACE(2, " Speculate found target block {}\n", flags
.jmpDest
);
5831 return flags
.jmpDest
;
5835 if (interp
.state
.stack
.size() != low_water
) {
5837 " Bailing from speculate because the speculated block "
5838 "left items on the stack\n");
5842 if (interp
.blk
->fallthrough
== NoBlockId
) {
5844 " Bailing from speculate because there was no fallthrough");
5848 FTRACE(2, " Speculate found fallthrough block {}\n",
5849 interp
.blk
->fallthrough
);
5851 return interp
.blk
->fallthrough
;
5854 BlockId
speculateHelper(ISS
& env
, BlockId orig
, bool updateTaken
) {
5855 assertx(orig
!= NoBlockId
);
5857 if (!will_reduce(env
)) return orig
;
5859 auto const last
= last_op(env
);
5860 bool endsInControlFlow
= last
&& instrIsNonCallControlFlow(last
->op
);
5864 if (options
.RemoveDeadBlocks
) {
5865 State temp
{env
.state
, State::Compact
{}};
5867 auto const& func
= env
.ctx
.func
;
5868 auto const targetBlk
= func
.blocks()[target
].get();
5869 if (!targetBlk
->multiPred
) break;
5870 auto const ok
= [&] {
5871 switch (targetBlk
->hhbcs
.back().op
) {
5885 env
.index
, env
.ctx
, env
.collect
, target
, targetBlk
, temp
5888 auto const old_size
= temp
.stack
.size();
5889 auto const new_target
= speculate(interp
);
5890 if (new_target
== NoBlockId
) break;
5892 const ssize_t delta
= old_size
- temp
.stack
.size();
5893 assertx(delta
>= 0);
5894 if (delta
&& endsInControlFlow
) break;
5897 target
= new_target
;
5898 temp
.stack
.compact();
5902 if (endsInControlFlow
&& updateTaken
) {
5904 auto needsUpdate
= target
!= orig
;
5909 if (bid
!= orig
) needsUpdate
= true;
5914 auto& bc
= mutate_last_op(env
);
5917 [&] (BlockId
& bid
) {
5918 bid
= bid
== orig
? target
: NoBlockId
;
5925 auto const& popped
= topT(env
);
5926 if (popped
.subtypeOf(BInitCell
)) {
5927 interpStep(env
, bc::PopC
{});
5929 assertx(popped
.subtypeOf(BUninit
));
5930 interpStep(env
, bc::PopU
{});
5939 //////////////////////////////////////////////////////////////////////
5941 RunFlags
run(Interp
& interp
, const State
& in
, PropagateFn propagate
) {
5943 FTRACE(2, "out {}{}\n",
5944 state_string(*interp
.ctx
.func
, interp
.state
, interp
.collect
),
5945 property_state_string(interp
.collect
.props
));
5948 auto env
= ISS
{ interp
, propagate
};
5949 auto ret
= RunFlags
{};
5950 auto finish
= [&] (BlockId fallthrough
) {
5951 ret
.updateInfo
.fallthrough
= fallthrough
;
5952 ret
.updateInfo
.unchangedBcs
= env
.unchangedBcs
;
5953 ret
.updateInfo
.replacedBcs
= std::move(env
.replacedBcs
);
5957 BytecodeVec retryBcs
;
5958 auto retryOffset
= interp
.blk
->hhbcs
.size();
5959 auto size
= retryOffset
;
5960 BlockId retryFallthrough
= interp
.blk
->fallthrough
;
5965 finish_tracked_elems(env
, 0);
5966 if (!env
.reprocess
) break;
5967 FTRACE(2, " Reprocess mutated block {}\n", interp
.bid
);
5968 assertx(env
.unchangedBcs
< retryOffset
|| env
.replacedBcs
.size());
5970 retryOffset
= env
.unchangedBcs
;
5971 retryBcs
= std::move(env
.replacedBcs
);
5972 env
.unchangedBcs
= 0;
5973 env
.state
.copy_from(in
);
5974 env
.reprocess
= false;
5975 env
.replacedBcs
.clear();
5976 size
= retryOffset
+ retryBcs
.size();
5981 auto const& bc
= idx
< retryOffset
?
5982 interp
.blk
->hhbcs
[idx
] : retryBcs
[idx
- retryOffset
];
5986 auto const& flags
= env
.flags
;
5988 if (flags
.wasPEI
) ret
.noThrow
= false;
5990 if (interp
.collect
.effectFree
&& !flags
.effectFree
) {
5991 interp
.collect
.effectFree
= false;
5992 if (any(interp
.collect
.opts
& CollectionOpts::EffectFreeOnly
)) {
5993 env
.collect
.mInstrState
.clear();
5994 FTRACE(2, " Bailing because not effect free\n");
5995 return finish(NoBlockId
);
5999 if (flags
.returned
) {
6000 always_assert(idx
== size
);
6001 if (env
.reprocess
) continue;
6003 always_assert(interp
.blk
->fallthrough
== NoBlockId
);
6004 assertx(!ret
.returned
);
6005 FTRACE(2, " returned {}\n", show(*flags
.returned
));
6006 ret
.retParam
= flags
.retParam
;
6007 ret
.returned
= flags
.returned
;
6008 return finish(NoBlockId
);
6011 if (flags
.jmpDest
!= NoBlockId
) {
6012 always_assert(idx
== size
);
6013 auto const hasFallthrough
= [&] {
6014 if (flags
.jmpDest
!= interp
.blk
->fallthrough
) {
6015 FTRACE(2, " <took branch; no fallthrough>\n");
6016 auto const last
= last_op(env
);
6017 return !last
|| !instrIsNonCallControlFlow(last
->op
);
6019 FTRACE(2, " <branch never taken>\n");
6023 if (hasFallthrough
) retryFallthrough
= flags
.jmpDest
;
6024 if (env
.reprocess
) continue;
6025 finish_tracked_elems(env
, 0);
6026 auto const newDest
= speculateHelper(env
, flags
.jmpDest
, true);
6027 propagate(newDest
, &interp
.state
);
6028 return finish(hasFallthrough
? newDest
: NoBlockId
);
6031 if (interp
.state
.unreachable
) {
6032 if (env
.reprocess
) {
6036 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
6037 finish_tracked_elems(env
, 0);
6038 return finish(NoBlockId
);
6042 FTRACE(2, " <end block>\n");
6043 if (retryFallthrough
!= NoBlockId
) {
6044 retryFallthrough
= speculateHelper(env
, retryFallthrough
, false);
6045 propagate(retryFallthrough
, &interp
.state
);
6047 return finish(retryFallthrough
);
6050 StepFlags
step(Interp
& interp
, const Bytecode
& op
) {
6051 auto noop
= [] (BlockId
, const State
*) {};
6052 ISS env
{ interp
, noop
};
6054 default_dispatch(env
, op
);
6055 if (env
.state
.unreachable
) {
6056 env
.collect
.mInstrState
.clear();
6058 assertx(env
.trackedElems
.empty());
6062 void default_dispatch(ISS
& env
, const Bytecode
& op
) {
6063 if (!env
.trackedElems
.empty()) {
6064 auto const pops
= [&] () -> uint32_t {
6067 case Op::AddNewElemC
:
6068 return numPop(op
) - 1;
6077 finish_tracked_elems(env
, env
.state
.stack
.size() - pops
);
6080 if (instrFlags(op
.op
) & TF
&& env
.flags
.jmpDest
== NoBlockId
) {
6082 } else if (env
.state
.unreachable
) {
6083 env
.collect
.mInstrState
.clear();
6087 Optional
<Type
> thisType(const Index
& index
, Context ctx
) {
6088 return thisTypeFromContext(index
, ctx
);
6091 //////////////////////////////////////////////////////////////////////