Fix HHBBC monotonicity violation
[hiphop-php.git] / hphp / hhbbc / interp.cpp
blob9eb19b3adfe134e55fd907b07e02f746fcd12ad4
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
18 #include <algorithm>
19 #include <vector>
20 #include <string>
21 #include <iterator>
23 #include <folly/gen/Base.h>
24 #include <folly/gen/String.h>
26 #include "hphp/util/hash-set.h"
27 #include "hphp/util/trace.h"
28 #include "hphp/runtime/base/array-init.h"
29 #include "hphp/runtime/base/array-iterator.h"
30 #include "hphp/runtime/base/collections.h"
31 #include "hphp/runtime/base/static-string-table.h"
32 #include "hphp/runtime/base/tv-arith.h"
33 #include "hphp/runtime/base/tv-comparisons.h"
34 #include "hphp/runtime/base/tv-conversions.h"
35 #include "hphp/runtime/base/type-structure.h"
36 #include "hphp/runtime/base/type-structure-helpers.h"
37 #include "hphp/runtime/base/type-structure-helpers-defs.h"
38 #include "hphp/runtime/vm/runtime.h"
39 #include "hphp/runtime/vm/unit-util.h"
41 #include "hphp/runtime/ext/hh/ext_hh.h"
43 #include "hphp/hhbbc/analyze.h"
44 #include "hphp/hhbbc/bc.h"
45 #include "hphp/hhbbc/cfg.h"
46 #include "hphp/hhbbc/class-util.h"
47 #include "hphp/hhbbc/eval-cell.h"
48 #include "hphp/hhbbc/index.h"
49 #include "hphp/hhbbc/interp-state.h"
50 #include "hphp/hhbbc/optimize.h"
51 #include "hphp/hhbbc/representation.h"
52 #include "hphp/hhbbc/type-builtins.h"
53 #include "hphp/hhbbc/type-ops.h"
54 #include "hphp/hhbbc/type-structure.h"
55 #include "hphp/hhbbc/type-system.h"
56 #include "hphp/hhbbc/unit-util.h"
57 #include "hphp/hhbbc/wide-func.h"
59 #include "hphp/hhbbc/stats.h"
61 #include "hphp/hhbbc/interp-internal.h"
63 namespace HPHP { namespace HHBBC {
65 //////////////////////////////////////////////////////////////////////
67 namespace {
69 const StaticString s_MethCallerHelper("__SystemLib\\MethCallerHelper");
70 const StaticString s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
71 const StaticString s_IMemoizeParam("HH\\IMemoizeParam");
72 const StaticString s_getInstanceKey("getInstanceKey");
73 const StaticString s_Closure("Closure");
74 const StaticString s_this("HH\\this");
76 bool poppable(Op op) {
77 switch (op) {
78 case Op::Dup:
79 case Op::Null:
80 case Op::False:
81 case Op::True:
82 case Op::Int:
83 case Op::Double:
84 case Op::String:
85 case Op::Vec:
86 case Op::Dict:
87 case Op::Keyset:
88 case Op::NewDictArray:
89 case Op::NewCol:
90 case Op::LazyClass:
91 return true;
92 default:
93 return false;
97 void interpStep(ISS& env, const Bytecode& bc);
99 void record(ISS& env, const Bytecode& bc) {
100 if (bc.srcLoc != env.srcLoc) {
101 Bytecode tmp = bc;
102 tmp.srcLoc = env.srcLoc;
103 return record(env, tmp);
106 if (!env.replacedBcs.size() &&
107 env.unchangedBcs < env.blk.hhbcs.size() &&
108 bc == env.blk.hhbcs[env.unchangedBcs]) {
109 env.unchangedBcs++;
110 return;
113 ITRACE(2, " => {}\n", show(env.ctx.func, bc));
114 env.replacedBcs.push_back(bc);
117 // The number of pops as seen by interp.
118 uint32_t numPop(const Bytecode& bc) {
119 if (bc.op == Op::CGetL2) return 1;
120 return bc.numPop();
123 // The number of pushes as seen by interp.
124 uint32_t numPush(const Bytecode& bc) {
125 if (bc.op == Op::CGetL2) return 2;
126 return bc.numPush();
129 void reprocess(ISS& env) {
130 env.reprocess = true;
133 ArrayData** add_elem_array(ISS& env) {
134 auto const idx = env.trackedElems.back().idx;
135 if (idx < env.unchangedBcs) {
136 auto const DEBUG_ONLY& bc = env.blk.hhbcs[idx];
137 assertx(bc.op == Op::Concat);
138 return nullptr;
140 assertx(idx >= env.unchangedBcs);
141 auto& bc = env.replacedBcs[idx - env.unchangedBcs];
142 auto arr = [&] () -> const ArrayData** {
143 switch (bc.op) {
144 case Op::Vec: return &bc.Vec.arr1;
145 case Op::Dict: return &bc.Dict.arr1;
146 case Op::Keyset: return &bc.Keyset.arr1;
147 case Op::Concat: return nullptr;
148 default: not_reached();
150 }();
151 return const_cast<ArrayData**>(arr);
154 bool start_add_elem(ISS& env, Type& ty, Op op) {
155 auto value = tvNonStatic(ty);
156 if (!value || !isArrayLikeType(value->m_type)) return false;
158 if (op == Op::AddElemC) {
159 reduce(env, bc::PopC {}, bc::PopC {}, bc::PopC {});
160 } else {
161 reduce(env, bc::PopC {}, bc::PopC {});
163 env.trackedElems.emplace_back(
164 env.state.stack.size(),
165 env.unchangedBcs + env.replacedBcs.size()
168 auto const arr = value->m_data.parr;
169 env.replacedBcs.push_back(
170 [&] () -> Bytecode {
171 if (arr->isVecType()) return bc::Vec { arr };
172 if (arr->isDictType()) return bc::Dict { arr };
173 if (arr->isKeysetType()) return bc::Keyset { arr };
174 always_assert(false);
177 env.replacedBcs.back().srcLoc = env.srcLoc;
178 ITRACE(2, "(addelem* -> {}\n",
179 show(env.ctx.func, env.replacedBcs.back()));
180 push(env, std::move(ty));
181 effect_free(env);
182 return true;
186 * Alter the saved add_elem array in a way that preserves its provenance tag
187 * or adds a new one if applicable (i.e. the array is a vec or dict)
189 * The `mutate` parameter should be callable with an ArrayData** pointing to the
190 * add_elem array cached in the interp state and should write to it directly.
192 template <typename Fn>
193 bool mutate_add_elem_array(ISS& env, Fn&& mutate) {
194 auto const arr = add_elem_array(env);
195 if (!arr) return false;
196 mutate(arr);
197 return true;
200 void finish_tracked_elem(ISS& env) {
201 auto const arr = add_elem_array(env);
202 env.trackedElems.pop_back();
203 if (arr) ArrayData::GetScalarArray(arr);
206 void finish_tracked_elems(ISS& env, size_t depth) {
207 while (!env.trackedElems.empty() && env.trackedElems.back().depth >= depth) {
208 finish_tracked_elem(env);
212 uint32_t id_from_slot(ISS& env, int slot) {
213 auto const id = (env.state.stack.end() - (slot + 1))->id;
214 assertx(id == StackElem::NoId ||
215 id < env.unchangedBcs + env.replacedBcs.size());
216 return id;
219 const Bytecode* op_from_id(ISS& env, uint32_t id) {
220 if (id == StackElem::NoId) return nullptr;
221 if (id < env.unchangedBcs) return &env.blk.hhbcs[id];
222 auto const off = id - env.unchangedBcs;
223 assertx(off < env.replacedBcs.size());
224 return &env.replacedBcs[off];
227 void ensure_mutable(ISS& env, uint32_t id) {
228 if (id < env.unchangedBcs) {
229 auto const delta = env.unchangedBcs - id;
230 env.replacedBcs.resize(env.replacedBcs.size() + delta);
231 for (auto i = env.replacedBcs.size(); i-- > delta; ) {
232 env.replacedBcs[i] = std::move(env.replacedBcs[i - delta]);
234 for (auto i = 0; i < delta; i++) {
235 env.replacedBcs[i] = env.blk.hhbcs[id + i];
237 env.unchangedBcs = id;
242 * Turn the instruction that wrote the slot'th element from the top of
243 * the stack into a Nop, adjusting the stack appropriately. If its the
244 * previous instruction, just rewind.
246 int kill_by_slot(ISS& env, int slot) {
247 assertx(!env.undo);
248 auto const id = id_from_slot(env, slot);
249 assertx(id != StackElem::NoId);
250 auto const sz = env.state.stack.size();
251 // if its the last bytecode we processed, we can rewind and avoid
252 // the reprocess overhead.
253 if (id == env.unchangedBcs + env.replacedBcs.size() - 1) {
254 rewind(env, 1);
255 return env.state.stack.size() - sz;
257 ensure_mutable(env, id);
258 auto& bc = env.replacedBcs[id - env.unchangedBcs];
259 auto const pop = numPop(bc);
260 auto const push = numPush(bc);
261 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
262 slot, id, show(env.ctx.func, bc));
263 bc = bc_with_loc(bc.srcLoc, bc::Nop {});
264 env.state.stack.kill(pop, push, id);
265 reprocess(env);
266 return env.state.stack.size() - sz;
270 * Check whether an instruction can be inserted immediately after the
271 * slot'th stack entry was written. This is only possible if slot was
272 * the last thing written by the instruction that wrote it (ie some
273 * bytecodes push more than one value - there's no way to insert a
274 * bytecode that will write *between* those values on the stack).
276 bool can_insert_after_slot(ISS& env, int slot) {
277 auto const it = env.state.stack.end() - (slot + 1);
278 if (it->id == StackElem::NoId) return false;
279 if (auto const next = it.next_elem(1)) {
280 return next->id != it->id;
282 return true;
286 * Insert a sequence of bytecodes after the instruction that wrote the
287 * slot'th element from the top of the stack.
289 * The entire sequence pops numPop, and pushes numPush stack
290 * elements. Only the last bytecode can push anything onto the stack,
291 * and the types it pushes are pointed to by types (if you have more
292 * than one bytecode that pushes, call this more than once).
294 void insert_after_slot(ISS& env, int slot,
295 int numPop, int numPush, const Type* types,
296 const BytecodeVec& bcs) {
297 assertx(can_insert_after_slot(env, slot));
298 assertx(!env.undo);
299 auto const id = id_from_slot(env, slot);
300 assertx(id != StackElem::NoId);
301 ensure_mutable(env, id + 1);
302 env.state.stack.insert_after(numPop, numPush, types, bcs.size(), id);
303 env.replacedBcs.insert(env.replacedBcs.begin() + (id + 1 - env.unchangedBcs),
304 bcs.begin(), bcs.end());
305 using namespace folly::gen;
306 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
307 slot, id,
308 from(bcs) |
309 map([&] (const Bytecode& bc) { return show(env.ctx.func, bc); }) |
310 unsplit<std::string>(", "));
313 Bytecode& mutate_last_op(ISS& env) {
314 assertx(will_reduce(env));
316 if (!env.replacedBcs.size()) {
317 assertx(env.unchangedBcs);
318 env.replacedBcs.push_back(env.blk.hhbcs[--env.unchangedBcs]);
320 return env.replacedBcs.back();
324 * Can be used to replace one op with another when rewind/reduce isn't
325 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
326 * because the SetL changed both the Type and the equiv of its local).
328 void replace_last_op(ISS& env, Bytecode&& bc) {
329 auto& last = mutate_last_op(env);
330 auto const newPush = numPush(bc);
331 auto const oldPush = numPush(last);
332 auto const newPops = numPop(bc);
333 auto const oldPops = numPop(last);
335 assertx(newPush <= oldPush);
336 assertx(newPops <= oldPops);
338 if (newPush != oldPush || newPops != oldPops) {
339 assertx(!env.undo);
340 env.state.stack.rewind(oldPops - newPops, oldPush - newPush);
342 ITRACE(2, "(replace: {}->{}\n",
343 show(env.ctx.func, last), show(env.ctx.func, bc));
344 last = bc_with_loc(last.srcLoc, bc);
349 //////////////////////////////////////////////////////////////////////
351 const Bytecode* op_from_slot(ISS& env, int slot, int prev /* = 0 */) {
352 if (!will_reduce(env)) return nullptr;
353 auto const id = id_from_slot(env, slot);
354 if (id == StackElem::NoId) return nullptr;
355 if (id < prev) return nullptr;
356 return op_from_id(env, id - prev);
359 const Bytecode* last_op(ISS& env, int idx /* = 0 */) {
360 if (!will_reduce(env)) return nullptr;
362 if (env.replacedBcs.size() > idx) {
363 return &env.replacedBcs[env.replacedBcs.size() - idx - 1];
366 idx -= env.replacedBcs.size();
367 if (env.unchangedBcs > idx) {
368 return &env.blk.hhbcs[env.unchangedBcs - idx - 1];
370 return nullptr;
374 * Assuming bc was just interped, rewind to the state immediately
375 * before it was interped.
377 * This is rarely what you want. Its used for constprop, where the
378 * bytecode has been interped, but not yet committed to the bytecode
379 * stream. We want to undo its effects, the spit out pops for its
380 * inputs, and commit a constant-generating bytecode.
382 void rewind(ISS& env, const Bytecode& bc) {
383 assertx(!env.undo);
384 ITRACE(2, "(rewind: {}\n", show(env.ctx.func, bc));
385 env.state.stack.rewind(numPop(bc), numPush(bc));
389 * Used for peephole opts. Will undo the *stack* effects of the last n
390 * committed byte codes, and remove them from the bytecode stream, in
391 * preparation for writing out an optimized replacement sequence.
393 * WARNING: Does not undo other changes to state, such as local types,
394 * local equivalency, and thisType. Take care when rewinding such
395 * things.
397 void rewind(ISS& env, int n) {
398 assertx(n);
399 assertx(!env.undo);
400 while (env.replacedBcs.size()) {
401 rewind(env, env.replacedBcs.back());
402 env.replacedBcs.pop_back();
403 if (!--n) return;
405 while (n--) {
406 rewind(env, env.blk.hhbcs[--env.unchangedBcs]);
410 void impl_vec(ISS& env, bool reduce, BytecodeVec&& bcs) {
411 if (!will_reduce(env)) reduce = false;
413 if (reduce) {
414 using namespace folly::gen;
415 ITRACE(2, "(reduce: {}\n",
416 from(bcs) |
417 map([&] (const Bytecode& bc) { return show(env.ctx.func, bc); }) |
418 unsplit<std::string>(", "));
419 if (bcs.size()) {
420 auto ef = !env.flags.reduced || env.flags.effectFree;
421 Trace::Indent _;
422 for (auto const& bc : bcs) {
423 assertx(
424 env.flags.jmpDest == NoBlockId &&
425 "you can't use impl with branching opcodes before last position"
427 interpStep(env, bc);
428 if (!env.flags.effectFree) ef = false;
429 if (env.state.unreachable || env.flags.jmpDest != NoBlockId) break;
431 env.flags.effectFree = ef;
432 } else if (!env.flags.reduced) {
433 effect_free(env);
435 env.flags.reduced = true;
436 return;
439 env.analyzeDepth++;
440 SCOPE_EXIT { env.analyzeDepth--; };
442 // We should be at the start of a bytecode.
443 assertx(env.flags.wasPEI &&
444 !env.flags.canConstProp &&
445 !env.flags.effectFree);
447 env.flags.wasPEI = false;
448 env.flags.canConstProp = true;
449 env.flags.effectFree = true;
451 for (auto const& bc : bcs) {
452 assertx(env.flags.jmpDest == NoBlockId &&
453 "you can't use impl with branching opcodes before last position");
455 auto const wasPEI = env.flags.wasPEI;
456 auto const canConstProp = env.flags.canConstProp;
457 auto const effectFree = env.flags.effectFree;
459 ITRACE(3, " (impl {}\n", show(env.ctx.func, bc));
460 env.flags.wasPEI = true;
461 env.flags.canConstProp = false;
462 env.flags.effectFree = false;
463 default_dispatch(env, bc);
465 if (env.flags.canConstProp) {
466 [&] {
467 if (env.flags.effectFree && !env.flags.wasPEI) return;
468 auto stk = env.state.stack.end();
469 for (auto i = bc.numPush(); i--; ) {
470 --stk;
471 if (!is_scalar(stk->type)) return;
473 env.flags.effectFree = true;
474 env.flags.wasPEI = false;
475 }();
478 // If any of the opcodes in the impl list said they could throw,
479 // then the whole thing could throw.
480 env.flags.wasPEI = env.flags.wasPEI || wasPEI;
481 env.flags.canConstProp = env.flags.canConstProp && canConstProp;
482 env.flags.effectFree = env.flags.effectFree && effectFree;
483 if (env.state.unreachable || env.flags.jmpDest != NoBlockId) break;
487 LocalId equivLocalRange(ISS& env, const LocalRange& range) {
488 auto bestRange = range.first;
489 auto equivFirst = findLocEquiv(env, range.first);
490 if (equivFirst == NoLocalId) return bestRange;
491 do {
492 if (equivFirst < bestRange) {
493 auto equivRange = [&] {
494 // local equivalency includes differing by Uninit, so we need
495 // to check the types.
496 if (peekLocRaw(env, equivFirst) != peekLocRaw(env, range.first)) {
497 return false;
500 for (uint32_t i = 1; i < range.count; ++i) {
501 if (!locsAreEquiv(env, equivFirst + i, range.first + i) ||
502 peekLocRaw(env, equivFirst + i) !=
503 peekLocRaw(env, range.first + i)) {
504 return false;
508 return true;
509 }();
511 if (equivRange) {
512 bestRange = equivFirst;
515 equivFirst = findLocEquiv(env, equivFirst);
516 assertx(equivFirst != NoLocalId);
517 } while (equivFirst != range.first);
519 return bestRange;
522 SString getNameFromType(const Type& t) {
523 if (!t.subtypeOf(BStr) && !t.subtypeOf(BLazyCls)) return nullptr;
524 if (is_specialized_string(t)) return sval_of(t);
525 if (is_specialized_lazycls(t)) return lazyclsval_of(t);
526 return nullptr;
529 //////////////////////////////////////////////////////////////////////
531 namespace {
534 * Very simple check to see if the top level class is reified or not
535 * If not we can reduce a VerifyTypeTS to a regular VerifyType
537 bool shouldReduceToNonReifiedVerifyType(ISS& env, SArray ts) {
538 if (get_ts_kind(ts) != TypeStructure::Kind::T_unresolved) return false;
539 auto const clsName = get_ts_classname(ts);
540 auto const rcls = env.index.resolve_class(env.ctx, clsName);
541 if (rcls && rcls->resolved()) return !rcls->cls()->hasReifiedGenerics;
542 // Type aliases cannot have reified generics
543 return env.index.lookup_type_alias(clsName) != nullptr;
548 //////////////////////////////////////////////////////////////////////
550 namespace interp_step {
552 void in(ISS& env, const bc::Nop&) { reduce(env); }
554 void in(ISS& env, const bc::PopC&) {
555 if (auto const last = last_op(env)) {
556 if (poppable(last->op)) {
557 rewind(env, 1);
558 return reduce(env);
560 if (last->op == Op::This) {
561 // can't rewind This because it removed null from thisType (so
562 // CheckThis at this point is a no-op) - and note that it must
563 // have *been* nullable, or we'd have turned it into a
564 // `BareThis NeverNull`
565 replace_last_op(env, bc::CheckThis {});
566 return reduce(env);
568 if (last->op == Op::SetL) {
569 // can't rewind a SetL because it changes local state
570 replace_last_op(env, bc::PopL { last->SetL.loc1 });
571 return reduce(env);
573 if (last->op == Op::CGetL2) {
574 auto loc = last->CGetL2.nloc1;
575 rewind(env, 1);
576 return reduce(env, bc::PopC {}, bc::CGetL { loc });
580 effect_free(env);
581 popC(env);
584 void in(ISS& env, const bc::PopU&) {
585 if (auto const last = last_op(env)) {
586 if (last->op == Op::NullUninit) {
587 rewind(env, 1);
588 return reduce(env);
591 effect_free(env); popU(env);
594 void in(ISS& env, const bc::PopU2&) {
595 effect_free(env);
596 auto equiv = topStkEquiv(env);
597 auto val = popC(env);
598 popU(env);
599 push(env, std::move(val), equiv != StackDupId ? equiv : NoLocalId);
602 void in(ISS& env, const bc::EntryNop&) { effect_free(env); }
604 void in(ISS& env, const bc::Dup& /*op*/) {
605 effect_free(env);
606 auto equiv = topStkEquiv(env);
607 auto val = popC(env);
608 push(env, val, equiv);
609 push(env, std::move(val), StackDupId);
612 void in(ISS& env, const bc::AssertRATL& op) {
613 mayReadLocal(env, op.loc1);
614 effect_free(env);
617 void in(ISS& env, const bc::AssertRATStk&) {
618 effect_free(env);
621 void in(ISS& env, const bc::BreakTraceHint&) { effect_free(env); }
623 void in(ISS& env, const bc::CGetCUNop&) {
624 effect_free(env);
625 auto const t = popCU(env);
626 push(env, remove_uninit(t));
629 void in(ISS& env, const bc::UGetCUNop&) {
630 effect_free(env);
631 popCU(env);
632 push(env, TUninit);
635 void in(ISS& env, const bc::Null&) {
636 effect_free(env);
637 push(env, TInitNull);
640 void in(ISS& env, const bc::NullUninit&) {
641 effect_free(env);
642 push(env, TUninit);
645 void in(ISS& env, const bc::True&) {
646 effect_free(env);
647 push(env, TTrue);
650 void in(ISS& env, const bc::False&) {
651 effect_free(env);
652 push(env, TFalse);
655 void in(ISS& env, const bc::Int& op) {
656 effect_free(env);
657 push(env, ival(op.arg1));
660 void in(ISS& env, const bc::Double& op) {
661 effect_free(env);
662 push(env, dval(op.dbl1));
665 void in(ISS& env, const bc::String& op) {
666 effect_free(env);
667 push(env, sval(op.str1));
670 void in(ISS& env, const bc::Vec& op) {
671 assertx(op.arr1->isVecType());
672 effect_free(env);
673 push(env, vec_val(op.arr1));
676 void in(ISS& env, const bc::Dict& op) {
677 assertx(op.arr1->isDictType());
678 effect_free(env);
679 push(env, dict_val(op.arr1));
682 void in(ISS& env, const bc::Keyset& op) {
683 assertx(op.arr1->isKeysetType());
684 effect_free(env);
685 push(env, keyset_val(op.arr1));
688 void in(ISS& env, const bc::NewDictArray& op) {
689 effect_free(env);
690 push(env, op.arg1 == 0 ? dict_empty() : some_dict_empty());
693 void in(ISS& env, const bc::NewStructDict& op) {
694 auto map = MapElems{};
695 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
696 map.emplace_front(
697 make_tv<KindOfPersistentString>(*--it),
698 MapElem::SStrKey(popC(env))
701 push(env, dict_map(std::move(map)));
702 effect_free(env);
703 constprop(env);
706 void in(ISS& env, const bc::NewVec& op) {
707 auto elems = std::vector<Type>{};
708 elems.reserve(op.arg1);
709 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
710 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
712 discard(env, op.arg1);
713 effect_free(env);
714 constprop(env);
715 push(env, vec(std::move(elems)));
718 void in(ISS& env, const bc::NewKeysetArray& op) {
719 assertx(op.arg1 > 0);
720 auto map = MapElems{};
721 auto ty = TBottom;
722 auto useMap = true;
723 auto bad = false;
724 auto effectful = false;
725 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
726 auto [key, promotion] = promote_classlike_to_key(popC(env));
728 auto const keyValid = key.subtypeOf(BArrKey);
729 if (!keyValid) key = intersection_of(std::move(key), TArrKey);
730 if (key.is(BBottom)) {
731 bad = true;
732 useMap = false;
733 effectful = true;
736 if (useMap) {
737 if (auto const v = tv(key)) {
738 map.emplace_front(*v, MapElem::KeyFromType(key, key));
739 } else {
740 useMap = false;
744 ty |= std::move(key);
745 effectful |= !keyValid || (promotion == Promotion::YesMightThrow);
748 if (!effectful) {
749 effect_free(env);
750 constprop(env);
753 if (useMap) {
754 push(env, keyset_map(std::move(map)));
755 } else if (!bad) {
756 push(env, keyset_n(ty));
757 } else {
758 assertx(effectful);
759 unreachable(env);
760 push(env, TBottom);
764 void in(ISS& env, const bc::AddElemC&) {
765 auto const v = topC(env, 0);
766 auto const [k, promotion] = promote_classlike_to_key(topC(env, 1));
767 auto const promoteMayThrow = (promotion == Promotion::YesMightThrow);
769 auto inTy = (env.state.stack.end() - 3).unspecialize();
770 // Unspecialize modifies the stack location
771 if (env.undo) env.undo->onStackWrite(env.state.stack.size() - 3, inTy);
773 auto outTy = [&] (const Type& key) -> Optional<Type> {
774 if (!key.subtypeOf(BArrKey)) return std::nullopt;
775 if (inTy.subtypeOf(BDict)) {
776 auto const r = array_like_set(std::move(inTy), key, v);
777 if (!r.second) return r.first;
779 return std::nullopt;
780 }(k);
782 if (outTy && !promoteMayThrow && will_reduce(env)) {
783 if (!env.trackedElems.empty() &&
784 env.trackedElems.back().depth + 3 == env.state.stack.size()) {
785 auto const handled = [&] (const Type& key) {
786 if (!key.subtypeOf(BArrKey)) return false;
787 auto ktv = tv(key);
788 if (!ktv) return false;
789 auto vtv = tv(v);
790 if (!vtv) return false;
791 return mutate_add_elem_array(env, [&](ArrayData** arr) {
792 *arr = (*arr)->setMove(*ktv, *vtv);
794 }(k);
795 if (handled) {
796 (env.state.stack.end() - 3)->type = std::move(*outTy);
797 reduce(env, bc::PopC {}, bc::PopC {});
798 ITRACE(2, "(addelem* -> {}\n",
799 show(env.ctx.func,
800 env.replacedBcs[env.trackedElems.back().idx - env.unchangedBcs]));
801 return;
803 } else {
804 if (start_add_elem(env, *outTy, Op::AddElemC)) return;
808 discard(env, 3);
809 finish_tracked_elems(env, env.state.stack.size());
811 if (!outTy) return push(env, TInitCell);
813 if (outTy->subtypeOf(BBottom)) {
814 unreachable(env);
815 } else if (!promoteMayThrow) {
816 effect_free(env);
817 constprop(env);
819 push(env, std::move(*outTy));
822 void in(ISS& env, const bc::AddNewElemC&) {
823 auto v = topC(env);
824 auto inTy = (env.state.stack.end() - 2).unspecialize();
825 // Unspecialize modifies the stack location
826 if (env.undo) env.undo->onStackWrite(env.state.stack.size() - 2, inTy);
828 auto outTy = [&] () -> Optional<Type> {
829 if (inTy.subtypeOf(BVec | BKeyset)) {
830 auto const r = array_like_newelem(std::move(inTy), v);
831 if (!r.second) return r.first;
833 return std::nullopt;
834 }();
836 if (outTy && will_reduce(env)) {
837 if (!env.trackedElems.empty() &&
838 env.trackedElems.back().depth + 2 == env.state.stack.size()) {
839 auto const handled = [&] {
840 auto vtv = tv(v);
841 if (!vtv) return false;
842 return mutate_add_elem_array(env, [&](ArrayData** arr) {
843 *arr = (*arr)->appendMove(*vtv);
845 }();
846 if (handled) {
847 (env.state.stack.end() - 2)->type = std::move(*outTy);
848 reduce(env, bc::PopC {});
849 ITRACE(2, "(addelem* -> {}\n",
850 show(env.ctx.func,
851 env.replacedBcs[env.trackedElems.back().idx - env.unchangedBcs]));
852 return;
854 } else {
855 if (start_add_elem(env, *outTy, Op::AddNewElemC)) {
856 return;
861 discard(env, 2);
862 finish_tracked_elems(env, env.state.stack.size());
864 if (!outTy) return push(env, TInitCell);
866 if (outTy->is(BBottom)) {
867 unreachable(env);
868 } else {
869 constprop(env);
871 push(env, std::move(*outTy));
874 void in(ISS& env, const bc::NewCol& op) {
875 auto const type = static_cast<CollectionType>(op.subop1);
876 auto const name = collections::typeToString(type);
877 push(env, objExact(env.index.builtin_class(name)));
878 effect_free(env);
881 void in(ISS& env, const bc::NewPair& /*op*/) {
882 popC(env); popC(env);
883 auto const name = collections::typeToString(CollectionType::Pair);
884 push(env, objExact(env.index.builtin_class(name)));
885 effect_free(env);
888 void in(ISS& env, const bc::ColFromArray& op) {
889 auto const src = popC(env);
890 auto const type = static_cast<CollectionType>(op.subop1);
891 assertx(type != CollectionType::Pair);
892 if (type == CollectionType::Vector || type == CollectionType::ImmVector) {
893 if (src.subtypeOf(TVec)) effect_free(env);
894 } else {
895 assertx(type == CollectionType::Map ||
896 type == CollectionType::ImmMap ||
897 type == CollectionType::Set ||
898 type == CollectionType::ImmSet);
899 if (src.subtypeOf(TDict)) effect_free(env);
901 auto const name = collections::typeToString(type);
902 push(env, objExact(env.index.builtin_class(name)));
905 void in(ISS& env, const bc::CnsE& op) {
906 auto t = env.index.lookup_constant(env.ctx, op.str1);
907 if (t.strictSubtypeOf(TInitCell)) {
908 // constprop will take care of nothrow *if* its a constant; and if
909 // its not, we might trigger autoload.
910 constprop(env);
912 push(env, std::move(t));
915 namespace {
917 void clsCnsImpl(ISS& env, const Type& cls, const Type& name) {
918 if (!cls.couldBe(BCls) || !name.couldBe(BStr)) {
919 push(env, TBottom);
920 unreachable(env);
921 return;
924 auto lookup = env.index.lookup_class_constant(env.ctx, cls, name);
925 if (lookup.found == TriBool::No) {
926 push(env, TBottom);
927 unreachable(env);
928 return;
931 if (cls.subtypeOf(BCls) &&
932 name.subtypeOf(BStr) &&
933 lookup.found == TriBool::Yes &&
934 !lookup.mightThrow) {
935 constprop(env);
936 effect_free(env);
939 push(env, std::move(lookup.ty));
944 void in(ISS& env, const bc::ClsCns& op) {
945 auto const cls = topC(env);
947 if (cls.subtypeOf(BCls) && is_specialized_cls(cls)) {
948 auto const dcls = dcls_of(cls);
949 if (dcls.type == DCls::Exact) {
950 return reduce(env, bc::PopC {}, bc::ClsCnsD { op.str1, dcls.cls.name() });
954 popC(env);
955 clsCnsImpl(env, cls, sval(op.str1));
958 void in(ISS& env, const bc::ClsCnsL& op) {
959 auto const cls = topC(env);
960 auto const name = locRaw(env, op.loc1);
962 if (name.subtypeOf(BStr) && is_specialized_string(name)) {
963 return reduce(env, bc::ClsCns { sval_of(name) });
966 popC(env);
967 clsCnsImpl(env, cls, name);
970 void in(ISS& env, const bc::ClsCnsD& op) {
971 auto const rcls = env.index.resolve_class(env.ctx, op.str2);
972 if (!rcls || !rcls->resolved()) {
973 push(env, TInitCell);
974 return;
976 clsCnsImpl(env, clsExact(*rcls), sval(op.str1));
979 void in(ISS& env, const bc::File&) { effect_free(env); push(env, TSStr); }
980 void in(ISS& env, const bc::Dir&) { effect_free(env); push(env, TSStr); }
981 void in(ISS& env, const bc::Method&) { effect_free(env); push(env, TSStr); }
983 void in(ISS& env, const bc::FuncCred&) { effect_free(env); push(env, TObj); }
985 void in(ISS& env, const bc::ClassName& op) {
986 auto const ty = topC(env);
987 if (ty.subtypeOf(BCls) && is_specialized_cls(ty)) {
988 auto const dcls = dcls_of(ty);
989 if (dcls.type == DCls::Exact) {
990 return reduce(env,
991 bc::PopC {},
992 bc::String { dcls.cls.name() });
994 effect_free(env);
996 popC(env);
997 push(env, TSStr);
1000 void in(ISS& env, const bc::LazyClassFromClass&) {
1001 auto const ty = topC(env);
1002 if (ty.subtypeOf(BCls) && is_specialized_cls(ty)) {
1003 auto const dcls = dcls_of(ty);
1004 if (dcls.type == DCls::Exact) {
1005 return reduce(env,
1006 bc::PopC {},
1007 bc::LazyClass { dcls.cls.name() });
1009 effect_free(env);
1011 popC(env);
1012 push(env, TLazyCls);
1015 void concatHelper(ISS& env, uint32_t n) {
1016 auto changed = false;
1017 auto side_effects = false;
1018 if (will_reduce(env)) {
1019 auto litstr = [&] (SString next, uint32_t i) -> SString {
1020 auto const t = topC(env, i);
1021 auto const v = tv(t);
1022 if (!v) return nullptr;
1023 if (!isStringType(v->m_type) && !isIntType(v->m_type)) return nullptr;
1024 auto const cell = eval_cell_value(
1025 [&] {
1026 auto const s = makeStaticString(
1027 next ?
1028 StringData::Make(tvAsCVarRef(&*v).toString().get(), next) :
1029 tvAsCVarRef(&*v).toString().get());
1030 return make_tv<KindOfString>(s);
1033 if (!cell) return nullptr;
1034 return cell->m_data.pstr;
1037 auto fold = [&] (uint32_t slot, uint32_t num, SString result) {
1038 auto const cell = make_tv<KindOfPersistentString>(result);
1039 auto const ty = from_cell(cell);
1040 BytecodeVec bcs{num, bc::PopC {}};
1041 if (num > 1) bcs.push_back(gen_constant(cell));
1042 if (slot == 0) {
1043 reduce(env, std::move(bcs));
1044 } else {
1045 insert_after_slot(env, slot, num, num > 1 ? 1 : 0, &ty, bcs);
1046 reprocess(env);
1048 n -= num - 1;
1049 changed = true;
1052 for (auto i = 0; i < n; i++) {
1053 if (!topC(env, i).subtypeOf(BArrKey)) {
1054 side_effects = true;
1055 break;
1059 if (!side_effects) {
1060 for (auto i = 0; i < n; i++) {
1061 auto const tracked = !env.trackedElems.empty() &&
1062 env.trackedElems.back().depth + i + 1 == env.state.stack.size();
1063 if (tracked) finish_tracked_elems(env, env.trackedElems.back().depth);
1064 auto const prev = op_from_slot(env, i);
1065 if (!prev) continue;
1066 if ((prev->op == Op::Concat && tracked) || prev->op == Op::ConcatN) {
1067 auto const extra = kill_by_slot(env, i);
1068 changed = true;
1069 n += extra;
1070 i += extra;
1075 SString result = nullptr;
1076 uint32_t i = 0;
1077 uint32_t nlit = 0;
1078 while (i < n) {
1079 // In order to collapse literals, we need to be able to insert
1080 // pops, and a constant after the sequence that generated the
1081 // literals. We can always insert after the last instruction
1082 // though, and we only need to check the first slot of a
1083 // sequence.
1084 auto const next = !i || result || can_insert_after_slot(env, i) ?
1085 litstr(result, i) : nullptr;
1086 if (next == staticEmptyString()) {
1087 if (n == 1) break;
1088 // don't fold away empty strings if the concat could trigger exceptions
1089 if (i == 0 && !topC(env, 1).subtypeOf(BArrKey)) break;
1090 if (n == 2 && i == 1 && !topC(env, 0).subtypeOf(BArrKey)) break;
1091 assertx(nlit == 0);
1092 fold(i, 1, next);
1093 n--;
1094 continue;
1096 if (!next) {
1097 if (nlit > 1) {
1098 fold(i - nlit, nlit, result);
1099 i -= nlit - 1;
1101 nlit = 0;
1102 } else {
1103 nlit++;
1105 result = next;
1106 i++;
1108 if (nlit > 1) fold(i - nlit, nlit, result);
1111 if (!changed) {
1112 discard(env, n);
1113 if (n == 2 && !side_effects && will_reduce(env)) {
1114 env.trackedElems.emplace_back(
1115 env.state.stack.size(),
1116 env.unchangedBcs + env.replacedBcs.size()
1119 push(env, TStr);
1120 return;
1123 if (n == 1) {
1124 if (!topC(env).subtypeOf(BStr)) {
1125 return reduce(env, bc::CastString {});
1127 return reduce(env);
1130 reduce(env);
1131 // We can't reduce the emitted concats, or we'll end up with
1132 // infinite recursion.
1133 env.flags.wasPEI = true;
1134 env.flags.effectFree = false;
1135 env.flags.canConstProp = false;
1137 auto concat = [&] (uint32_t num) {
1138 discard(env, num);
1139 push(env, TStr);
1140 if (num == 2) {
1141 record(env, bc::Concat {});
1142 } else {
1143 record(env, bc::ConcatN { num });
1147 while (n >= 4) {
1148 concat(4);
1149 n -= 3;
1151 if (n > 1) concat(n);
1154 void in(ISS& env, const bc::Concat& /*op*/) {
1155 concatHelper(env, 2);
1158 void in(ISS& env, const bc::ConcatN& op) {
1159 if (op.arg1 == 2) return reduce(env, bc::Concat {});
1160 concatHelper(env, op.arg1);
1163 template <class Op, class Fun>
1164 void arithImpl(ISS& env, const Op& /*op*/, Fun fun) {
1165 constprop(env);
1166 auto const t1 = popC(env);
1167 auto const t2 = popC(env);
1168 push(env, fun(t2, t1));
1171 void in(ISS& env, const bc::Add& op) { arithImpl(env, op, typeAdd); }
1172 void in(ISS& env, const bc::Sub& op) { arithImpl(env, op, typeSub); }
1173 void in(ISS& env, const bc::Mul& op) { arithImpl(env, op, typeMul); }
1174 void in(ISS& env, const bc::Div& op) { arithImpl(env, op, typeDiv); }
1175 void in(ISS& env, const bc::Mod& op) { arithImpl(env, op, typeMod); }
1176 void in(ISS& env, const bc::Pow& op) { arithImpl(env, op, typePow); }
1177 void in(ISS& env, const bc::BitAnd& op) { arithImpl(env, op, typeBitAnd); }
1178 void in(ISS& env, const bc::BitOr& op) { arithImpl(env, op, typeBitOr); }
1179 void in(ISS& env, const bc::BitXor& op) { arithImpl(env, op, typeBitXor); }
1180 void in(ISS& env, const bc::AddO& op) { arithImpl(env, op, typeAddO); }
1181 void in(ISS& env, const bc::SubO& op) { arithImpl(env, op, typeSubO); }
1182 void in(ISS& env, const bc::MulO& op) { arithImpl(env, op, typeMulO); }
1183 void in(ISS& env, const bc::Shl& op) { arithImpl(env, op, typeShl); }
1184 void in(ISS& env, const bc::Shr& op) { arithImpl(env, op, typeShr); }
1186 void in(ISS& env, const bc::BitNot& /*op*/) {
1187 auto const t = popC(env);
1188 auto const v = tv(t);
1189 if (!t.couldBe(BInt | BStr | BSStr | BLazyCls | BCls)) {
1190 return push(env, TBottom);
1193 if (v) {
1194 constprop(env);
1195 auto cell = eval_cell([&] {
1196 auto c = *v;
1197 tvBitNot(c);
1198 return c;
1200 if (cell) return push(env, std::move(*cell));
1202 push(env, TInitCell);
1205 namespace {
1207 template<bool NSame>
1208 std::pair<Type,bool> resolveSame(ISS& env) {
1209 auto const l1 = topStkEquiv(env, 0);
1210 auto const t1 = topC(env, 0);
1211 auto const l2 = topStkEquiv(env, 1);
1212 auto const t2 = topC(env, 1);
1214 auto warningsEnabled =
1215 (RuntimeOption::EvalEmitClsMethPointers ||
1216 RuntimeOption::EvalRaiseClassConversionWarning);
1218 auto const result = [&] {
1219 auto const v1 = tv(t1);
1220 auto const v2 = tv(t2);
1222 if (l1 == StackDupId ||
1223 (l1 == l2 && l1 != NoLocalId) ||
1224 (l1 <= MaxLocalId && l2 <= MaxLocalId && locsAreEquiv(env, l1, l2))) {
1225 if (!t1.couldBe(BDbl) || !t2.couldBe(BDbl) ||
1226 (v1 && (v1->m_type != KindOfDouble || !std::isnan(v1->m_data.dbl))) ||
1227 (v2 && (v2->m_type != KindOfDouble || !std::isnan(v2->m_data.dbl)))) {
1228 return NSame ? TFalse : TTrue;
1232 if (v1 && v2) {
1233 if (auto r = eval_cell_value([&]{ return tvSame(*v2, *v1); })) {
1234 // we wouldn't get here if cellSame raised a warning
1235 warningsEnabled = false;
1236 return r != NSame ? TTrue : TFalse;
1240 return NSame ? typeNSame(t1, t2) : typeSame(t1, t2);
1241 }();
1243 if (warningsEnabled && result == (NSame ? TFalse : TTrue)) {
1244 warningsEnabled = false;
1246 return { result, warningsEnabled && compare_might_raise(t1, t2) };
1249 template<bool Negate>
1250 void sameImpl(ISS& env) {
1251 if (auto const last = last_op(env)) {
1252 if (last->op == Op::Null) {
1253 rewind(env, 1);
1254 reduce(env, bc::IsTypeC { IsTypeOp::Null });
1255 if (Negate) reduce(env, bc::Not {});
1256 return;
1258 if (auto const prev = last_op(env, 1)) {
1259 if (prev->op == Op::Null &&
1260 (last->op == Op::CGetL || last->op == Op::CGetL2 ||
1261 last->op == Op::CGetQuietL)) {
1262 auto const loc = [&]() {
1263 if (last->op == Op::CGetL) {
1264 return last->CGetL.nloc1;
1265 } else if (last->op == Op::CGetL2) {
1266 return last->CGetL2.nloc1;
1267 } else if (last->op == Op::CGetQuietL) {
1268 return NamedLocal{kInvalidLocalName, last->CGetQuietL.loc1};
1270 always_assert(false);
1271 }();
1272 rewind(env, 2);
1273 reduce(env, bc::IsTypeL { loc, IsTypeOp::Null });
1274 if (Negate) reduce(env, bc::Not {});
1275 return;
1280 auto pair = resolveSame<Negate>(env);
1281 discard(env, 2);
1283 if (!pair.second) {
1284 nothrow(env);
1285 constprop(env);
1288 push(env, std::move(pair.first));
1291 template<class JmpOp>
1292 bool sameJmpImpl(ISS& env, Op sameOp, const JmpOp& jmp) {
1293 const StackElem* elems[2];
1294 env.state.stack.peek(2, elems, 1);
1296 auto const loc0 = elems[1]->equivLoc;
1297 auto const loc1 = elems[0]->equivLoc;
1298 // If loc0 == loc1, either they're both NoLocalId, so there's
1299 // nothing for us to deduce, or both stack elements are the same
1300 // value, so the only thing we could deduce is that they are or are
1301 // not NaN. But we don't track that, so just bail.
1302 if (loc0 == loc1 || loc0 == StackDupId) return false;
1304 auto const ty0 = elems[1]->type;
1305 auto const ty1 = elems[0]->type;
1306 auto const val0 = tv(ty0);
1307 auto const val1 = tv(ty1);
1309 assertx(!val0 || !val1);
1310 if ((loc0 == NoLocalId && !val0 && ty1.subtypeOf(ty0)) ||
1311 (loc1 == NoLocalId && !val1 && ty0.subtypeOf(ty1))) {
1312 return false;
1315 // Same currently lies about the distinction between Func/Cls/Str
1316 if (ty0.couldBe(BCls) && ty1.couldBe(BStr)) return false;
1317 if (ty1.couldBe(BCls) && ty0.couldBe(BStr)) return false;
1318 if (ty0.couldBe(BLazyCls) && ty1.couldBe(BStr)) return false;
1319 if (ty1.couldBe(BLazyCls) && ty0.couldBe(BStr)) return false;
1321 auto isect = intersection_of(ty0, ty1);
1323 // Unfortunately, floating point negative zero and positive zero are
1324 // different, but are identical using as far as Same is concerened. We should
1325 // avoid refining a value to 0.0 because it compares identically to 0.0
1326 if (isect.couldBe(dval(0.0)) || isect.couldBe(dval(-0.0))) {
1327 isect = union_of(isect, TDbl);
1330 discard(env, 1);
1332 auto handle_same = [&] {
1333 // Currently dce uses equivalency to prove that something isn't
1334 // the last reference - so we can only assert equivalency here if
1335 // we know that won't be affected. Its irrelevant for uncounted
1336 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1337 // the same thing.
1338 if (loc0 <= MaxLocalId &&
1339 (ty0.subtypeOf(BObj | BRes | BPrim) ||
1340 ty1.subtypeOf(BObj | BRes | BPrim) ||
1341 (ty0.subtypeOf(BUnc) && ty1.subtypeOf(BUnc)))) {
1342 if (loc1 == StackDupId) {
1343 setStkLocal(env, loc0, 0);
1344 } else if (loc1 <= MaxLocalId && !locsAreEquiv(env, loc0, loc1)) {
1345 auto loc = loc0;
1346 while (true) {
1347 auto const other = findLocEquiv(env, loc);
1348 if (other == NoLocalId) break;
1349 killLocEquiv(env, loc);
1350 addLocEquiv(env, loc, loc1);
1351 loc = other;
1353 addLocEquiv(env, loc, loc1);
1356 return refineLocation(env, loc1 != NoLocalId ? loc1 : loc0, [&] (Type ty) {
1357 auto const needsUninit =
1358 ty.couldBe(BUninit) &&
1359 !isect.couldBe(BUninit) &&
1360 isect.couldBe(BInitNull);
1361 auto ret = ty.subtypeOf(BUnc) ? isect : loosen_staticness(isect);
1362 if (needsUninit) ret = union_of(std::move(ret), TUninit);
1363 return ret;
1368 auto handle_differ_side = [&] (LocalId location, const Type& ty) {
1369 if (!ty.subtypeOf(BInitNull) && !ty.strictSubtypeOf(TBool)) return true;
1370 return refineLocation(env, location, [&] (Type t) {
1371 if (ty.subtypeOf(BNull)) {
1372 t = remove_uninit(std::move(t));
1373 if (t.couldBe(BInitNull) && !t.subtypeOf(BInitNull)) {
1374 t = unopt(std::move(t));
1376 return t;
1377 } else if (ty.strictSubtypeOf(TBool) && t.subtypeOf(BBool)) {
1378 return ty == TFalse ? TTrue : TFalse;
1380 return t;
1384 auto handle_differ = [&] {
1385 return
1386 (loc0 == NoLocalId || handle_differ_side(loc0, ty1)) &&
1387 (loc1 == NoLocalId || handle_differ_side(loc1, ty0));
1390 auto const sameIsJmpTarget =
1391 (sameOp == Op::Same) == (JmpOp::op == Op::JmpNZ);
1393 auto save = env.state;
1394 auto const target_reachable = sameIsJmpTarget ?
1395 handle_same() : handle_differ();
1396 if (!target_reachable) jmp_nevertaken(env);
1397 // swap, so we can restore this state if the branch is always taken.
1398 env.state.swap(save);
1399 if (!(sameIsJmpTarget ? handle_differ() : handle_same())) {
1400 jmp_setdest(env, jmp.target1);
1401 env.state.copy_from(std::move(save));
1402 } else if (target_reachable) {
1403 env.propagate(jmp.target1, &save);
1406 return true;
1409 bc::JmpNZ invertJmp(const bc::JmpZ& jmp) { return bc::JmpNZ { jmp.target1 }; }
1410 bc::JmpZ invertJmp(const bc::JmpNZ& jmp) { return bc::JmpZ { jmp.target1 }; }
1414 void in(ISS& env, const bc::Same&) { sameImpl<false>(env); }
1415 void in(ISS& env, const bc::NSame&) { sameImpl<true>(env); }
1417 template<class Fun>
1418 void cmpImpl(ISS& env, Fun fun) {
1419 auto const t1 = popC(env);
1420 auto const t2 = popC(env);
1421 auto const v1 = tv(t1);
1422 auto const v2 = tv(t2);
1423 if (v1 && v2) {
1424 if (auto r = eval_cell_value([&]{ return fun(*v2, *v1); })) {
1425 constprop(env);
1426 return push(env, *r ? TTrue : TFalse);
1429 // TODO_4: evaluate when these can throw, non-constant type stuff.
1430 push(env, TBool);
1433 namespace {
1435 bool couldBeStringish(const Type& t) {
1436 return t.couldBe(BCls | BLazyCls | BStr);
1439 bool everEq(const Type& t1, const Type& t2) {
1440 // for comparison purposes we need to be careful about these coercions
1441 if (couldBeStringish(t1) && couldBeStringish(t2)) return true;
1442 return loosen_all(t1).couldBe(loosen_all(t2));
1445 bool cmpWillThrow(const Type& t1, const Type& t2) {
1446 // for comparison purposes we need to be careful about these coercions
1447 if (couldBeStringish(t1) && couldBeStringish(t2)) return false;
1449 auto couldBeIntAndDbl = [](const Type& t1, const Type& t2) {
1450 return t1.couldBe(BInt) && t2.couldBe(BDbl);
1452 // relational comparisons allow for int v dbl
1453 if (couldBeIntAndDbl(t1, t2) || couldBeIntAndDbl(t2, t1)) return false;
1455 return !loosen_to_datatype(t1).couldBe(loosen_to_datatype(t2));
1458 void eqImpl(ISS& env, bool eq) {
1459 auto rs = resolveSame<false>(env);
1460 if (rs.first == TTrue) {
1461 if (!rs.second) constprop(env);
1462 discard(env, 2);
1463 return push(env, eq ? TTrue : TFalse);
1466 if (!everEq(topC(env, 0), topC(env, 1))) {
1467 discard(env, 2);
1468 return push(env, eq ? TFalse : TTrue);
1471 cmpImpl(env, [&] (TypedValue c1, TypedValue c2) {
1472 return tvEqual(c1, c2) == eq;
1476 bool cmpThrowCheck(ISS& env, const Type& t1, const Type& t2) {
1477 if (!cmpWillThrow(t1, t2)) return false;
1478 discard(env, 2);
1479 push(env, TBottom);
1480 unreachable(env);
1481 return true;
1486 void in(ISS& env, const bc::Eq&) { eqImpl(env, true); }
1487 void in(ISS& env, const bc::Neq&) { eqImpl(env, false); }
1489 void in(ISS& env, const bc::Lt&) {
1490 if (cmpThrowCheck(env, topC(env, 0), topC(env, 1))) return;
1491 cmpImpl(env, static_cast<bool (*)(TypedValue, TypedValue)>(tvLess));
1493 void in(ISS& env, const bc::Gt&) {
1494 if (cmpThrowCheck(env, topC(env, 0), topC(env, 1))) return;
1495 cmpImpl(env, static_cast<bool (*)(TypedValue, TypedValue)>(tvGreater));
1497 void in(ISS& env, const bc::Lte&) {
1498 if (cmpThrowCheck(env, topC(env, 0), topC(env, 1))) return;
1499 cmpImpl(env, tvLessOrEqual);
1501 void in(ISS& env, const bc::Gte&) {
1502 if (cmpThrowCheck(env, topC(env, 0), topC(env, 1))) return;
1503 cmpImpl(env, tvGreaterOrEqual);
1506 void in(ISS& env, const bc::Cmp&) {
1507 auto const t1 = topC(env, 0);
1508 auto const t2 = topC(env, 1);
1509 if (cmpThrowCheck(env, t1, t2)) return;
1510 discard(env, 2);
1511 if (t1 == t2) {
1512 auto const v1 = tv(t1);
1513 auto const v2 = tv(t2);
1514 if (v1 && v2) {
1515 if (auto r = eval_cell_value([&]{ return ival(tvCompare(*v2, *v1)); })) {
1516 constprop(env);
1517 return push(env, std::move(*r));
1521 // TODO_4: evaluate when these can throw, non-constant type stuff.
1522 push(env, TInt);
1525 void castBoolImpl(ISS& env, const Type& t, bool negate) {
1526 nothrow(env);
1527 constprop(env);
1529 auto const e = emptiness(t);
1530 switch (e) {
1531 case Emptiness::Empty:
1532 case Emptiness::NonEmpty:
1533 return push(env, (e == Emptiness::Empty) == negate ? TTrue : TFalse);
1534 case Emptiness::Maybe:
1535 break;
1538 push(env, TBool);
1541 void in(ISS& env, const bc::Not&) {
1542 castBoolImpl(env, popC(env), true);
1545 void in(ISS& env, const bc::CastBool&) {
1546 auto const t = topC(env);
1547 if (t.subtypeOf(BBool)) return reduce(env);
1548 castBoolImpl(env, popC(env), false);
1551 void in(ISS& env, const bc::CastInt&) {
1552 auto const t = topC(env);
1553 if (t.subtypeOf(BInt)) return reduce(env);
1554 constprop(env);
1555 popC(env);
1556 // Objects can raise a warning about converting to int.
1557 if (!t.couldBe(BObj)) nothrow(env);
1558 if (auto const v = tv(t)) {
1559 auto cell = eval_cell([&] {
1560 return make_tv<KindOfInt64>(tvToInt(*v));
1562 if (cell) return push(env, std::move(*cell));
1564 push(env, TInt);
1567 // Handle a casting operation, where "target" is the type being casted to. If
1568 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1569 // is set to true, if the source type is the same as the destination, the cast
1570 // will be optimized away.
1571 void castImpl(ISS& env, Type target, void(*fn)(TypedValue*)) {
1572 auto const t = topC(env);
1573 if (t.subtypeOf(target)) return reduce(env);
1574 popC(env);
1576 if (fn) {
1577 if (auto val = tv(t)) {
1578 if (auto result = eval_cell([&] { fn(&*val); return *val; })) {
1579 constprop(env);
1580 target = *result;
1584 push(env, std::move(target));
1587 void in(ISS& env, const bc::CastDouble&) {
1588 castImpl(env, TDbl, tvCastToDoubleInPlace);
1591 void in(ISS& env, const bc::CastString&) {
1592 castImpl(env, TStr, tvCastToStringInPlace);
1595 void in(ISS& env, const bc::CastDict&) {
1596 castImpl(env, TDict, tvCastToDictInPlace);
1599 void in(ISS& env, const bc::CastVec&) {
1600 castImpl(env, TVec, tvCastToVecInPlace);
1603 void in(ISS& env, const bc::CastKeyset&) {
1604 castImpl(env, TKeyset, tvCastToKeysetInPlace);
1607 void in(ISS& env, const bc::DblAsBits&) {
1608 effect_free(env);
1609 constprop(env);
1611 auto const ty = popC(env);
1612 if (!ty.couldBe(BDbl)) return push(env, ival(0));
1614 if (auto val = tv(ty)) {
1615 assertx(isDoubleType(val->m_type));
1616 val->m_type = KindOfInt64;
1617 push(env, from_cell(*val));
1618 return;
1621 push(env, TInt);
1624 void in(ISS& env, const bc::Print& /*op*/) {
1625 popC(env);
1626 push(env, ival(1));
1629 void in(ISS& env, const bc::Clone& /*op*/) {
1630 auto val = popC(env);
1631 if (!val.subtypeOf(BObj)) {
1632 val &= TObj;
1633 if (val.is(BBottom)) unreachable(env);
1635 push(env, std::move(val));
1638 void in(ISS& env, const bc::Exit&) { popC(env); push(env, TInitNull); }
1639 void in(ISS& env, const bc::Fatal&) { popC(env); }
1641 void in(ISS& /*env*/, const bc::JmpNS&) {
1642 always_assert(0 && "blocks should not contain JmpNS instructions");
1645 void in(ISS& /*env*/, const bc::Jmp&) {
1646 always_assert(0 && "blocks should not contain Jmp instructions");
1649 void in(ISS& env, const bc::Select& op) {
1650 auto const cond = topC(env);
1651 auto const t = topC(env, 1);
1652 auto const f = topC(env, 2);
1654 effect_free(env);
1655 constprop(env);
1657 switch (emptiness(cond)) {
1658 case Emptiness::Maybe:
1659 discard(env, 3);
1660 push(env, union_of(t, f));
1661 return;
1662 case Emptiness::NonEmpty:
1663 discard(env, 3);
1664 push(env, t);
1665 return;
1666 case Emptiness::Empty:
1667 return reduce(env, bc::PopC {}, bc::PopC {});
1669 not_reached();
1672 namespace {
1674 template<class JmpOp>
1675 bool isTypeHelper(ISS& env,
1676 IsTypeOp typeOp,
1677 LocalId location,
1678 Op op,
1679 const JmpOp& jmp) {
1680 if (typeOp == IsTypeOp::Scalar || typeOp == IsTypeOp::LegacyArrLike ||
1681 typeOp == IsTypeOp::Func) {
1682 return false;
1685 auto const val = [&] {
1686 if (op != Op::IsTypeC) return locRaw(env, location);
1687 const StackElem* elem;
1688 env.state.stack.peek(1, &elem, 1);
1689 location = elem->equivLoc;
1690 return elem->type;
1691 }();
1693 if (location == NoLocalId || !val.subtypeOf(BCell)) return false;
1695 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1696 // Otherwise, refine location based on the testType.
1697 auto testTy = type_of_istype(typeOp);
1699 assertx(val.couldBe(testTy) &&
1700 (!val.subtypeOf(testTy) || val.subtypeOf(BObj)));
1702 discard(env, 1);
1704 if (op == Op::IsTypeC) {
1705 if (!is_type_might_raise(testTy, val)) nothrow(env);
1706 } else if (op == Op::IssetL) {
1707 nothrow(env);
1708 } else if (!locCouldBeUninit(env, location) &&
1709 !is_type_might_raise(testTy, val)) {
1710 nothrow(env);
1713 auto const negate = (jmp.op == Op::JmpNZ) == (op != Op::IssetL);
1714 auto const was_true = [&] (Type t) {
1715 if (testTy.subtypeOf(BNull)) return intersection_of(t, TNull);
1716 assertx(!testTy.couldBe(BNull));
1717 return intersection_of(t, testTy);
1719 auto const was_false = [&] (Type t) {
1720 auto tinit = remove_uninit(t);
1721 if (testTy.subtypeOf(BNull)) {
1722 return (tinit.couldBe(BInitNull) && !tinit.subtypeOf(BInitNull))
1723 ? unopt(std::move(tinit)) : tinit;
1725 if (t.couldBe(BInitNull) && !t.subtypeOf(BInitNull)) {
1726 assertx(!testTy.couldBe(BNull));
1727 if (unopt(tinit).subtypeOf(testTy)) return TNull;
1729 return t;
1732 auto const pre = [&] (Type t) {
1733 return negate ? was_true(std::move(t)) : was_false(std::move(t));
1736 auto const post = [&] (Type t) {
1737 return negate ? was_false(std::move(t)) : was_true(std::move(t));
1740 refineLocation(env, location, pre, jmp.target1, post);
1741 return true;
1744 // If the current function is a memoize wrapper, return the inferred return type
1745 // of the function being wrapped along with if the wrapped function is effect
1746 // free.
1747 std::pair<Type, bool> memoizeImplRetType(ISS& env) {
1748 always_assert(env.ctx.func->isMemoizeWrapper);
1750 // Lookup the wrapped function. This should always resolve to a precise
1751 // function but we don't rely on it.
1752 auto const memo_impl_func = [&] {
1753 if (env.ctx.func->cls) {
1754 auto const clsTy = selfClsExact(env);
1755 return env.index.resolve_method(
1756 env.ctx,
1757 clsTy ? *clsTy : TCls,
1758 memoize_impl_name(env.ctx.func)
1761 return env.index.resolve_func(env.ctx, memoize_impl_name(env.ctx.func));
1762 }();
1764 // Infer the return type of the wrapped function, taking into account the
1765 // types of the parameters for context sensitive types.
1766 auto const numArgs = env.ctx.func->params.size();
1767 CompactVector<Type> args{numArgs};
1768 for (auto i = LocalId{0}; i < numArgs; ++i) {
1769 args[i] = locAsCell(env, i);
1772 // Determine the context the wrapped function will be called on.
1773 auto const ctxType = [&]() -> Type {
1774 if (env.ctx.func->cls) {
1775 if (env.ctx.func->attrs & AttrStatic) {
1776 // The class context for static methods is the method's class,
1777 // if LSB is not specified.
1778 auto const clsTy =
1779 env.ctx.func->isMemoizeWrapperLSB ?
1780 selfCls(env) :
1781 selfClsExact(env);
1782 return clsTy ? *clsTy : TCls;
1783 } else {
1784 return thisTypeNonNull(env);
1787 return TBottom;
1788 }();
1790 auto retTy = env.index.lookup_return_type(
1791 env.ctx,
1792 &env.collect.methods,
1793 args,
1794 ctxType,
1795 memo_impl_func
1797 auto const effectFree = env.index.is_effect_free(memo_impl_func);
1798 // Regardless of anything we know the return type will be an InitCell (this is
1799 // a requirement of memoize functions).
1800 if (!retTy.subtypeOf(BInitCell)) return { TInitCell, effectFree };
1801 return { retTy, effectFree };
1804 template<class JmpOp>
1805 bool instanceOfJmpImpl(ISS& env,
1806 const bc::InstanceOfD& inst,
1807 const JmpOp& jmp) {
1809 const StackElem* elem;
1810 env.state.stack.peek(1, &elem, 1);
1812 auto const locId = elem->equivLoc;
1813 if (locId == NoLocalId || interface_supports_non_objects(inst.str1)) {
1814 return false;
1816 auto const rcls = env.index.resolve_class(env.ctx, inst.str1);
1817 if (!rcls) return false;
1819 auto const val = elem->type;
1820 auto const instTy = subObj(*rcls);
1821 assertx(!val.subtypeOf(instTy) && val.couldBe(instTy));
1823 // If we have an optional type, whose unopt is guaranteed to pass
1824 // the instanceof check, then failing to pass implies it was null.
1825 auto const fail_implies_null =
1826 val.couldBe(BInitNull) &&
1827 !val.subtypeOf(BInitNull) &&
1828 unopt(val).subtypeOf(instTy);
1830 discard(env, 1);
1831 auto const negate = jmp.op == Op::JmpNZ;
1832 auto const result = [&] (Type t, bool pass) {
1833 return pass ? instTy : fail_implies_null ? TNull : t;
1835 auto const pre = [&] (Type t) { return result(t, negate); };
1836 auto const post = [&] (Type t) { return result(t, !negate); };
1837 refineLocation(env, locId, pre, jmp.target1, post);
1838 return true;
1841 template<class JmpOp>
1842 bool isTypeStructCJmpImpl(ISS& env,
1843 const bc::IsTypeStructC& inst,
1844 const JmpOp& jmp) {
1846 const StackElem* elems[2];
1847 env.state.stack.peek(2, elems, 1);
1849 auto const locId = elems[0]->equivLoc;
1850 if (locId == NoLocalId) return false;
1852 auto const a = tv(elems[1]->type);
1853 if (!a) return false;
1854 // if it wasn't valid, the JmpOp wouldn't be reachable
1855 assertx(isValidTSType(*a, false));
1857 auto const is_nullable_ts = is_ts_nullable(a->m_data.parr);
1858 auto const ts_kind = get_ts_kind(a->m_data.parr);
1859 // type_of_type_structure does not resolve these types. It is important we
1860 // do resolve them here, or we may have issues when we reduce the checks to
1861 // InstanceOfD checks. This logic performs the same exact refinement as
1862 // instanceOfD will.
1863 if (is_nullable_ts ||
1864 (ts_kind != TypeStructure::Kind::T_class &&
1865 ts_kind != TypeStructure::Kind::T_interface &&
1866 ts_kind != TypeStructure::Kind::T_xhp &&
1867 ts_kind != TypeStructure::Kind::T_unresolved)) {
1868 return false;
1871 auto const clsName = get_ts_classname(a->m_data.parr);
1872 auto const rcls = env.index.resolve_class(env.ctx, clsName);
1873 if (!rcls ||
1874 !rcls->resolved() ||
1875 rcls->cls()->attrs & AttrEnum ||
1876 interface_supports_non_objects(clsName)) {
1877 return false;
1880 auto const val = elems[0]->type;
1881 auto const instTy = subObj(*rcls);
1882 if (val.subtypeOf(instTy) || !val.couldBe(instTy)) {
1883 return false;
1886 // If we have an optional type, whose unopt is guaranteed to pass
1887 // the instanceof check, then failing to pass implies it was null.
1888 auto const fail_implies_null =
1889 val.couldBe(BInitNull) &&
1890 !val.subtypeOf(BInitNull) &&
1891 unopt(val).subtypeOf(instTy);
1893 discard(env, 1);
1895 auto const negate = jmp.op == Op::JmpNZ;
1896 auto const result = [&] (Type t, bool pass) {
1897 return pass ? instTy : fail_implies_null ? TNull : t;
1899 auto const pre = [&] (Type t) { return result(t, negate); };
1900 auto const post = [&] (Type t) { return result(t, !negate); };
1901 refineLocation(env, locId, pre, jmp.target1, post);
1902 return true;
1905 template<class JmpOp>
1906 void jmpImpl(ISS& env, const JmpOp& op) {
1907 auto const Negate = std::is_same<JmpOp, bc::JmpNZ>::value;
1908 auto const location = topStkEquiv(env);
1909 auto const e = emptiness(topC(env));
1910 if (e == (Negate ? Emptiness::NonEmpty : Emptiness::Empty)) {
1911 reduce(env, bc::PopC {});
1912 return jmp_setdest(env, op.target1);
1915 if (e == (Negate ? Emptiness::Empty : Emptiness::NonEmpty) ||
1916 (next_real_block(env.ctx.func, env.blk.fallthrough) ==
1917 next_real_block(env.ctx.func, op.target1))) {
1918 return reduce(env, bc::PopC{});
1921 auto fix = [&] {
1922 if (env.flags.jmpDest == NoBlockId) return;
1923 auto const jmpDest = env.flags.jmpDest;
1924 env.flags.jmpDest = NoBlockId;
1925 rewind(env, op);
1926 reduce(env, bc::PopC {});
1927 env.flags.jmpDest = jmpDest;
1930 if (auto const last = last_op(env)) {
1931 if (last->op == Op::Not) {
1932 rewind(env, 1);
1933 return reduce(env, invertJmp(op));
1935 if (last->op == Op::Same || last->op == Op::NSame) {
1936 if (sameJmpImpl(env, last->op, op)) return fix();
1937 } else if (last->op == Op::IssetL) {
1938 if (isTypeHelper(env,
1939 IsTypeOp::Null,
1940 last->IssetL.loc1,
1941 last->op,
1942 op)) {
1943 return fix();
1945 } else if (last->op == Op::IsTypeL) {
1946 if (isTypeHelper(env,
1947 last->IsTypeL.subop2,
1948 last->IsTypeL.nloc1.id,
1949 last->op,
1950 op)) {
1951 return fix();
1953 } else if (last->op == Op::IsTypeC) {
1954 if (isTypeHelper(env,
1955 last->IsTypeC.subop1,
1956 NoLocalId,
1957 last->op,
1958 op)) {
1959 return fix();
1961 } else if (last->op == Op::InstanceOfD) {
1962 if (instanceOfJmpImpl(env, last->InstanceOfD, op)) return fix();
1963 } else if (last->op == Op::IsTypeStructC) {
1964 if (isTypeStructCJmpImpl(env, last->IsTypeStructC, op)) return fix();
1968 popC(env);
1969 effect_free(env);
1971 if (location == NoLocalId) return env.propagate(op.target1, &env.state);
1973 refineLocation(env, location,
1974 Negate ? assert_nonemptiness : assert_emptiness,
1975 op.target1,
1976 Negate ? assert_emptiness : assert_nonemptiness);
1977 return fix();
1980 } // namespace
1982 void in(ISS& env, const bc::JmpNZ& op) { jmpImpl(env, op); }
1983 void in(ISS& env, const bc::JmpZ& op) { jmpImpl(env, op); }
1985 void in(ISS& env, const bc::Switch& op) {
1986 const auto t = topC(env);
1987 const auto v = tv(t);
1989 auto bail = [&] {
1990 popC(env);
1991 forEachTakenEdge(op, [&] (BlockId id) {
1992 env.propagate(id, &env.state);
1996 auto go = [&] (BlockId blk) {
1997 reduce(env, bc::PopC {});
1998 return jmp_setdest(env, blk);
2001 if (!t.couldBe(BInt)) {
2002 if (op.subop1 == SwitchKind::Unbounded) return bail();
2003 return go(op.targets.back());
2006 if (!v) return bail();
2008 auto num_elems = op.targets.size();
2009 if (op.subop1 == SwitchKind::Unbounded) {
2010 if (v->m_data.num < 0 || v->m_data.num >= num_elems) return bail();
2011 return go(op.targets[v->m_data.num]);
2014 assertx(num_elems > 2);
2015 num_elems -= 2;
2016 auto const i = v->m_data.num - op.arg2;
2017 return i >= 0 && i < num_elems ? go(op.targets[i]) : go(op.targets.back());
2020 void in(ISS& env, const bc::SSwitch& op) {
2021 const auto t = topC(env);
2022 const auto v = tv(t);
2024 if (!couldBeStringish(t)) {
2025 reduce(env, bc::PopC {});
2026 return jmp_setdest(env, op.targets.back().second);
2029 if (v) {
2030 for (auto& kv : op.targets) {
2031 auto match = eval_cell_value([&] {
2032 if (!kv.first) return true;
2033 return v->m_data.pstr->equal(kv.first);
2036 if (!match) break;
2037 if (*match) {
2038 reduce(env, bc::PopC {});
2039 return jmp_setdest(env, kv.second);
2044 popC(env);
2045 forEachTakenEdge(op, [&] (BlockId id) {
2046 env.propagate(id, &env.state);
2050 void in(ISS& env, const bc::RetC& /*op*/) {
2051 auto const locEquiv = topStkLocal(env);
2052 doRet(env, popC(env), false);
2053 if (locEquiv != NoLocalId && locEquiv < env.ctx.func->params.size()) {
2054 env.flags.retParam = locEquiv;
2057 void in(ISS& env, const bc::RetM& op) {
2058 std::vector<Type> ret(op.arg1);
2059 for (int i = 0; i < op.arg1; i++) {
2060 ret[op.arg1 - i - 1] = popC(env);
2062 doRet(env, vec(std::move(ret)), false);
2065 void in(ISS& env, const bc::RetCSuspended&) {
2066 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
2068 auto const t = popC(env);
2069 doRet(
2070 env,
2071 is_specialized_wait_handle(t) ? wait_handle_inner(t) : TInitCell,
2072 false
2076 void in(ISS& env, const bc::Throw& /*op*/) {
2077 popC(env);
2080 void in(ISS& env, const bc::ThrowNonExhaustiveSwitch& /*op*/) {}
2082 void in(ISS& env, const bc::RaiseClassStringConversionWarning& /*op*/) {}
2084 void in(ISS& env, const bc::ChainFaults&) {
2085 popC(env);
2088 void in(ISS& env, const bc::NativeImpl&) {
2089 killLocals(env);
2091 if (is_collection_method_returning_this(env.ctx.cls, env.ctx.func)) {
2092 auto const resCls = env.index.builtin_class(env.ctx.cls->name);
2093 return doRet(env, objExact(resCls), true);
2096 if (env.ctx.func->nativeInfo) {
2097 return doRet(env, native_function_return_type(env.ctx.func), true);
2099 doRet(env, TInitCell, true);
2102 void in(ISS& env, const bc::CGetL& op) {
2103 if (locIsThis(env, op.nloc1.id)) {
2104 auto const& ty = peekLocRaw(env, op.nloc1.id);
2105 if (!ty.subtypeOf(BInitNull)) {
2106 auto const subop = ty.couldBe(BUninit) ?
2107 BareThisOp::Notice : ty.couldBe(BNull) ?
2108 BareThisOp::NoNotice : BareThisOp::NeverNull;
2109 return reduce(env, bc::BareThis { subop });
2112 if (auto const last = last_op(env)) {
2113 if (last->op == Op::PopL &&
2114 op.nloc1.id == last->PopL.loc1) {
2115 reprocess(env);
2116 rewind(env, 1);
2117 setLocRaw(env, op.nloc1.id, TCell);
2118 return reduce(env, bc::SetL { op.nloc1.id });
2121 if (!peekLocCouldBeUninit(env, op.nloc1.id)) {
2122 auto const minLocEquiv = findMinLocEquiv(env, op.nloc1.id, false);
2123 auto const loc = minLocEquiv != NoLocalId ? minLocEquiv : op.nloc1.id;
2124 return reduce(env, bc::CGetQuietL { loc });
2126 mayReadLocal(env, op.nloc1.id);
2127 push(env, locAsCell(env, op.nloc1.id), op.nloc1.id);
2130 void in(ISS& env, const bc::CGetQuietL& op) {
2131 if (locIsThis(env, op.loc1)) {
2132 return reduce(env, bc::BareThis { BareThisOp::NoNotice });
2134 if (auto const last = last_op(env)) {
2135 if (last->op == Op::PopL &&
2136 op.loc1 == last->PopL.loc1) {
2137 reprocess(env);
2138 rewind(env, 1);
2139 setLocRaw(env, op.loc1, TCell);
2140 return reduce(env, bc::SetL { op.loc1 });
2143 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, true);
2144 if (minLocEquiv != NoLocalId) {
2145 return reduce(env, bc::CGetQuietL { minLocEquiv });
2148 effect_free(env);
2149 constprop(env);
2150 mayReadLocal(env, op.loc1);
2151 push(env, locAsCell(env, op.loc1), op.loc1);
2154 void in(ISS& env, const bc::CUGetL& op) {
2155 auto ty = locRaw(env, op.loc1);
2156 effect_free(env);
2157 constprop(env);
2158 push(env, std::move(ty), op.loc1);
2161 void in(ISS& env, const bc::PushL& op) {
2162 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, false);
2163 if (minLocEquiv != NoLocalId) {
2164 return reduce(env, bc::CGetQuietL { minLocEquiv }, bc::UnsetL { op.loc1 });
2167 if (auto const last = last_op(env)) {
2168 if (last->op == Op::PopL &&
2169 last->PopL.loc1 == op.loc1) {
2170 // rewind is ok, because we're just going to unset the local
2171 // (and note the unset can't be a no-op because the PopL set it
2172 // to an InitCell). But its possible that before the PopL, the
2173 // local *was* unset, so maybe would have killed the no-op. The
2174 // only way to fix that is to reprocess the block with the new
2175 // instruction sequence and see what happens.
2176 reprocess(env);
2177 rewind(env, 1);
2178 return reduce(env, bc::UnsetL { op.loc1 });
2182 if (auto val = tv(peekLocRaw(env, op.loc1))) {
2183 return reduce(env, bc::UnsetL { op.loc1 }, gen_constant(*val));
2186 impl(env, bc::CGetQuietL { op.loc1 }, bc::UnsetL { op.loc1 });
2189 void in(ISS& env, const bc::CGetL2& op) {
2190 if (auto const last = last_op(env)) {
2191 if ((poppable(last->op) && !numPop(*last)) ||
2192 ((last->op == Op::CGetL || last->op == Op::CGetQuietL) &&
2193 !peekLocCouldBeUninit(env, op.nloc1.id))) {
2194 auto const other = *last;
2195 rewind(env, 1);
2196 return reduce(env, bc::CGetL { op.nloc1 }, other);
2200 if (!peekLocCouldBeUninit(env, op.nloc1.id)) {
2201 auto const minLocEquiv = findMinLocEquiv(env, op.nloc1.id, false);
2202 if (minLocEquiv != NoLocalId) {
2203 return reduce(env, bc::CGetL2 { { kInvalidLocalName, minLocEquiv } });
2205 effect_free(env);
2207 mayReadLocal(env, op.nloc1.id);
2208 auto loc = locAsCell(env, op.nloc1.id);
2209 auto topEquiv = topStkLocal(env);
2210 auto top = popT(env);
2211 push(env, std::move(loc), op.nloc1.id);
2212 push(env, std::move(top), topEquiv);
2215 void in(ISS& env, const bc::CGetG&) { popC(env); push(env, TInitCell); }
2217 void in(ISS& env, const bc::CGetS& op) {
2218 auto const tcls = popC(env);
2219 auto const tname = popC(env);
2221 auto const throws = [&] {
2222 unreachable(env);
2223 return push(env, TBottom);
2226 if (!tcls.couldBe(BCls)) return throws();
2228 auto lookup = env.index.lookup_static(
2229 env.ctx,
2230 env.collect.props,
2231 tcls,
2232 tname
2235 if (lookup.found == TriBool::No || lookup.ty.subtypeOf(BBottom)) {
2236 return throws();
2239 if (checkReadonlyOpThrows(ReadonlyOp::Mutable, op.subop1) &&
2240 lookup.readOnly == TriBool::Yes) {
2241 return throws();
2243 auto const mightReadOnlyThrow = checkReadonlyOpMaybeThrows(ReadonlyOp::Mutable, op.subop1) &&
2244 (lookup.readOnly == TriBool::Yes || lookup.readOnly == TriBool::Maybe);
2246 if (lookup.found == TriBool::Yes &&
2247 lookup.lateInit == TriBool::No &&
2248 !lookup.classInitMightRaise &&
2249 !mightReadOnlyThrow &&
2250 tcls.subtypeOf(BCls) &&
2251 tname.subtypeOf(BStr)) {
2252 effect_free(env);
2253 constprop(env);
2256 push(env, std::move(lookup.ty));
2259 void in(ISS& env, const bc::ClassGetC& op) {
2260 auto const t = topC(env);
2262 if (t.subtypeOf(BCls)) return reduce(env, bc::Nop {});
2263 popC(env);
2265 if (!t.couldBe(BObj | BCls | BStr | BLazyCls)) {
2266 unreachable(env);
2267 push(env, TBottom);
2268 return;
2271 if (t.subtypeOf(BObj)) {
2272 effect_free(env);
2273 push(env, objcls(t));
2274 return;
2277 if (auto const clsname = getNameFromType(t)) {
2278 if (auto const rcls = env.index.resolve_class(env.ctx, clsname)) {
2279 if (rcls->cls()) effect_free(env);
2280 push(env, clsExact(*rcls));
2281 return;
2285 push(env, TCls);
2288 void in(ISS& env, const bc::ClassGetTS& op) {
2289 // TODO(T31677864): implement real optimizations
2290 auto const ts = popC(env);
2291 if (!ts.couldBe(BDict)) {
2292 push(env, TBottom);
2293 push(env, TBottom);
2294 return;
2297 push(env, TCls);
2298 push(env, TOptVec);
2301 void in(ISS& env, const bc::AKExists&) {
2302 auto const base = popC(env);
2303 auto const [key, promotion] = promote_classlike_to_key(popC(env));
2305 auto result = TBottom;
2306 auto effectFree = promotion != Promotion::YesMightThrow;
2308 if (!base.subtypeOf(BObj | BArrLike)) {
2309 effectFree = false;
2310 result |= TFalse;
2313 if (base.couldBe(BObj)) {
2314 effectFree = false;
2315 result |= TBool;
2317 if (base.couldBe(BArrLike)) {
2318 auto const validKey = key.subtypeOf(BArrKey);
2319 if (!validKey) effectFree = false;
2320 if (key.couldBe(BArrKey)) {
2321 auto const elem =
2322 array_like_elem(base, validKey ? key : intersection_of(key, TArrKey));
2323 if (elem.first.is(BBottom)) {
2324 result |= TFalse;
2325 } else if (elem.second) {
2326 result |= TTrue;
2327 } else {
2328 result |= TBool;
2333 if (result.is(BBottom)) {
2334 assertx(!effectFree);
2335 unreachable(env);
2337 if (effectFree) {
2338 constprop(env);
2339 effect_free(env);
2341 push(env, std::move(result));
2344 void in(ISS& env, const bc::GetMemoKeyL& op) {
2345 auto const& func = env.ctx.func;
2346 auto const name = folly::to<std::string>(
2347 func && func->cls ? func->cls->name->data() : "",
2348 func && func->cls ? "::" : "",
2349 func ? func->name->data() : "");
2350 always_assert(func->isMemoizeWrapper);
2352 auto const rclsIMemoizeParam = env.index.builtin_class(s_IMemoizeParam.get());
2353 auto const tyIMemoizeParam = subObj(rclsIMemoizeParam);
2355 auto const inTy = locAsCell(env, op.nloc1.id);
2357 // If the local could be uninit, we might raise a warning (as
2358 // usual). Converting an object to a memo key might invoke PHP code if it has
2359 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2360 if (!locCouldBeUninit(env, op.nloc1.id) &&
2361 !inTy.couldBe(BObj | BVec | BDict)) {
2362 effect_free(env);
2363 constprop(env);
2366 // If type constraints are being enforced and the local being turned into a
2367 // memo key is a parameter, then we can possibly using the type constraint to
2368 // infer a more efficient memo key mode.
2369 using MK = MemoKeyConstraint;
2370 Optional<res::Class> resolvedCls;
2371 auto const mkc = [&] {
2372 if (op.nloc1.id >= env.ctx.func->params.size()) return MK::None;
2373 auto tc = env.ctx.func->params[op.nloc1.id].typeConstraint;
2374 if (tc.type() == AnnotType::Object) {
2375 auto res = env.index.resolve_type_name(tc.typeName());
2376 if (res.type != AnnotType::Object) {
2377 tc.resolveType(res.type, res.nullable || tc.isNullable());
2378 } else {
2379 resolvedCls = env.index.resolve_class(env.ctx, tc.typeName());
2382 return memoKeyConstraintFromTC(tc);
2383 }();
2385 // Use the type-constraint to reduce this operation to a more efficient memo
2386 // mode. Some of the modes can be reduced to simple bytecode operations
2387 // inline. Even with the type-constraints, we still need to check the inferred
2388 // type of the local. Something may have possibly clobbered the local between
2389 // the type-check and this op.
2390 switch (mkc) {
2391 case MK::Int:
2392 // Always an int, so the key is always an identity mapping
2393 if (inTy.subtypeOf(BInt)) return reduce(env, bc::CGetL { op.nloc1 });
2394 break;
2395 case MK::Bool:
2396 // Always a bool, so the key is the bool cast to an int
2397 if (inTy.subtypeOf(BBool)) {
2398 return reduce(env, bc::CGetL { op.nloc1 }, bc::CastInt {});
2400 break;
2401 case MK::Str:
2402 // Always a string, so the key is always an identity mapping
2403 if (inTy.subtypeOf(BStr)) return reduce(env, bc::CGetL { op.nloc1 });
2404 break;
2405 case MK::IntOrStr:
2406 // Either an int or string, so the key can be an identity mapping
2407 if (inTy.subtypeOf(BArrKey)) return reduce(env, bc::CGetL { op.nloc1 });
2408 break;
2409 case MK::StrOrNull:
2410 // A nullable string. The key will either be the string or the integer
2411 // zero.
2412 if (inTy.subtypeOf(BOptStr)) {
2413 return reduce(
2414 env,
2415 bc::CGetL { op.nloc1 },
2416 bc::Int { 0 },
2417 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2418 bc::Select {}
2421 break;
2422 case MK::IntOrNull:
2423 // A nullable int. The key will either be the integer, or the static empty
2424 // string.
2425 if (inTy.subtypeOf(BOptInt)) {
2426 return reduce(
2427 env,
2428 bc::CGetL { op.nloc1 },
2429 bc::String { staticEmptyString() },
2430 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2431 bc::Select {}
2434 break;
2435 case MK::BoolOrNull:
2436 // A nullable bool. The key will either be 0, 1, or 2.
2437 if (inTy.subtypeOf(BOptBool)) {
2438 return reduce(
2439 env,
2440 bc::CGetL { op.nloc1 },
2441 bc::CastInt {},
2442 bc::Int { 2 },
2443 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2444 bc::Select {}
2447 break;
2448 case MK::Dbl:
2449 // The double will be converted (losslessly) to an integer.
2450 if (inTy.subtypeOf(BDbl)) {
2451 return reduce(env, bc::CGetL { op.nloc1 }, bc::DblAsBits {});
2453 break;
2454 case MK::DblOrNull:
2455 // A nullable double. The key will be an integer, or the static empty
2456 // string.
2457 if (inTy.subtypeOf(BOptDbl)) {
2458 return reduce(
2459 env,
2460 bc::CGetL { op.nloc1 },
2461 bc::DblAsBits {},
2462 bc::String { staticEmptyString() },
2463 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2464 bc::Select {}
2467 break;
2468 case MK::Object:
2469 // An object. If the object is definitely known to implement IMemoizeParam
2470 // we can simply call that method, casting the output to ensure its always
2471 // a string (which is what the generic mode does). If not, it will use the
2472 // generic mode, which can handle collections or classes which don't
2473 // implement getInstanceKey.
2474 if (resolvedCls &&
2475 resolvedCls->mustBeSubtypeOf(rclsIMemoizeParam) &&
2476 inTy.subtypeOf(tyIMemoizeParam)) {
2477 return reduce(
2478 env,
2479 bc::CGetL { op.nloc1 },
2480 bc::NullUninit {},
2481 bc::FCallObjMethodD {
2482 FCallArgs(0),
2483 staticEmptyString(),
2484 ObjMethodOp::NullThrows,
2485 s_getInstanceKey.get()
2487 bc::CastString {}
2490 break;
2491 case MK::ObjectOrNull:
2492 // An object or null. We can use the null safe version of a function call
2493 // when invoking getInstanceKey and then select from the result of that,
2494 // or the integer 0. This might seem wasteful, but the JIT does a good job
2495 // inlining away the call in the null case.
2496 if (resolvedCls &&
2497 resolvedCls->mustBeSubtypeOf(rclsIMemoizeParam) &&
2498 inTy.subtypeOf(opt(tyIMemoizeParam))) {
2499 return reduce(
2500 env,
2501 bc::CGetL { op.nloc1 },
2502 bc::NullUninit {},
2503 bc::FCallObjMethodD {
2504 FCallArgs(0),
2505 staticEmptyString(),
2506 ObjMethodOp::NullSafe,
2507 s_getInstanceKey.get()
2509 bc::CastString {},
2510 bc::Int { 0 },
2511 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2512 bc::Select {}
2515 break;
2516 case MK::None:
2517 break;
2520 // No type constraint, or one that isn't usuable. Use the generic memoization
2521 // scheme which can handle any type:
2523 if (auto const val = tv(inTy)) {
2524 auto const key = eval_cell(
2525 [&]{ return HHVM_FN(serialize_memoize_param)(*val); }
2527 if (key) return push(env, *key);
2530 // Integer keys are always mapped to themselves
2531 if (inTy.subtypeOf(BInt)) return reduce(env, bc::CGetL { op.nloc1 });
2532 if (inTy.subtypeOf(BOptInt)) {
2533 return reduce(
2534 env,
2535 bc::CGetL { op.nloc1 },
2536 bc::String { s_nullMemoKey.get() },
2537 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2538 bc::Select {}
2541 if (inTy.subtypeOf(BBool)) {
2542 return reduce(
2543 env,
2544 bc::String { s_falseMemoKey.get() },
2545 bc::String { s_trueMemoKey.get() },
2546 bc::CGetL { op.nloc1 },
2547 bc::Select {}
2551 // A memo key can be an integer if the input might be an integer, and is a
2552 // string otherwise. Booleans and nulls are always static strings.
2553 auto keyTy = [&]{
2554 if (inTy.subtypeOf(BOptBool)) return TSStr;
2555 if (inTy.couldBe(BInt)) return union_of(TInt, TStr);
2556 return TStr;
2557 }();
2558 push(env, std::move(keyTy));
2561 void in(ISS& env, const bc::IssetL& op) {
2562 if (locIsThis(env, op.loc1)) {
2563 return reduce(env,
2564 bc::BareThis { BareThisOp::NoNotice },
2565 bc::IsTypeC { IsTypeOp::Null },
2566 bc::Not {});
2568 effect_free(env);
2569 constprop(env);
2570 auto const loc = locAsCell(env, op.loc1);
2571 if (loc.subtypeOf(BNull)) return push(env, TFalse);
2572 if (!loc.couldBe(BNull)) return push(env, TTrue);
2573 push(env, TBool);
2576 void in(ISS& env, const bc::IsUnsetL& op) {
2577 effect_free(env);
2578 constprop(env);
2579 auto const loc = locAsCell(env, op.loc1);
2580 if (loc.subtypeOf(BUninit)) return push(env, TTrue);
2581 if (!loc.couldBe(BUninit)) return push(env, TFalse);
2582 push(env, TBool);
2585 void in(ISS& env, const bc::IssetS& op) {
2586 auto const tcls = popC(env);
2587 auto const tname = popC(env);
2589 if (!tcls.couldBe(BCls)) {
2590 unreachable(env);
2591 return push(env, TBottom);
2594 auto lookup = env.index.lookup_static(
2595 env.ctx,
2596 env.collect.props,
2597 tcls,
2598 tname
2601 if (!lookup.classInitMightRaise &&
2602 tcls.subtypeOf(BCls) &&
2603 tname.subtypeOf(BStr)) {
2604 effect_free(env);
2605 constprop(env);
2608 if (lookup.ty.subtypeOf(BNull)) return push(env, TFalse);
2609 if (!lookup.ty.couldBe(BNull) && lookup.lateInit == TriBool::No) {
2610 return push(env, TTrue);
2612 push(env, TBool);
2615 void in(ISS& env, const bc::IssetG&) { popC(env); push(env, TBool); }
2617 void isTypeImpl(ISS& env, const Type& locOrCell, const Type& test) {
2618 if (locOrCell.subtypeOf(test)) return push(env, TTrue);
2619 if (!locOrCell.couldBe(test)) return push(env, TFalse);
2620 push(env, TBool);
2623 void isTypeObj(ISS& env, const Type& ty) {
2624 if (!ty.couldBe(BObj)) return push(env, TFalse);
2625 if (ty.subtypeOf(BObj)) {
2626 auto const incompl = objExact(
2627 env.index.builtin_class(s_PHP_Incomplete_Class.get()));
2628 if (RO::EvalBuildMayNoticeOnMethCallerHelperIsObject) {
2629 auto const c =
2630 objExact(env.index.builtin_class(s_MethCallerHelper.get()));
2631 if (ty.couldBe(c)) return push(env, TBool);
2633 if (!ty.couldBe(incompl)) return push(env, TTrue);
2634 if (ty.subtypeOf(incompl)) return push(env, TFalse);
2636 push(env, TBool);
2639 template<class Op>
2640 void isTypeLImpl(ISS& env, const Op& op) {
2641 auto const loc = locAsCell(env, op.nloc1.id);
2642 if (!locCouldBeUninit(env, op.nloc1.id) &&
2643 !is_type_might_raise(op.subop2, loc)) {
2644 constprop(env);
2645 effect_free(env);
2648 switch (op.subop2) {
2649 case IsTypeOp::Scalar: return push(env, TBool);
2650 case IsTypeOp::LegacyArrLike: return push(env, TBool);
2651 case IsTypeOp::Obj: return isTypeObj(env, loc);
2652 case IsTypeOp::Func:
2653 return loc.couldBe(TFunc) ? push(env, TBool) : push(env, TFalse);
2654 default: return isTypeImpl(env, loc, type_of_istype(op.subop2));
2658 template<class Op>
2659 void isTypeCImpl(ISS& env, const Op& op) {
2660 auto const t1 = popC(env);
2661 if (!is_type_might_raise(op.subop1, t1)) {
2662 constprop(env);
2663 effect_free(env);
2666 switch (op.subop1) {
2667 case IsTypeOp::Scalar: return push(env, TBool);
2668 case IsTypeOp::LegacyArrLike: return push(env, TBool);
2669 case IsTypeOp::Obj: return isTypeObj(env, t1);
2670 case IsTypeOp::Func:
2671 return t1.couldBe(TFunc) ? push(env, TBool) : push(env, TFalse);
2672 default: return isTypeImpl(env, t1, type_of_istype(op.subop1));
2676 void in(ISS& env, const bc::IsTypeC& op) { isTypeCImpl(env, op); }
2677 void in(ISS& env, const bc::IsTypeL& op) { isTypeLImpl(env, op); }
2679 void in(ISS& env, const bc::InstanceOfD& op) {
2680 auto t1 = topC(env);
2681 // Note: InstanceOfD can do autoload if the type might be a type
2682 // alias, so it's not nothrow unless we know it's an object type.
2683 if (auto const rcls = env.index.resolve_class(env.ctx, op.str1)) {
2684 auto result = [&] (const Type& r) {
2685 nothrow(env);
2686 if (r != TBool) constprop(env);
2687 popC(env);
2688 push(env, r);
2690 if (!interface_supports_non_objects(rcls->name())) {
2691 auto const testTy = subObj(*rcls);
2692 if (t1.subtypeOf(testTy)) return result(TTrue);
2693 if (!t1.couldBe(testTy)) return result(TFalse);
2694 if (t1.couldBe(BInitNull) && !t1.subtypeOf(BInitNull)) {
2695 t1 = unopt(std::move(t1));
2696 if (t1.subtypeOf(testTy)) {
2697 return reduce(env, bc::IsTypeC { IsTypeOp::Null }, bc::Not {});
2700 return result(TBool);
2703 popC(env);
2704 push(env, TBool);
2707 void in(ISS& env, const bc::InstanceOf& /*op*/) {
2708 auto const t1 = topC(env);
2709 auto const v1 = tv(t1);
2710 if (v1 && v1->m_type == KindOfPersistentString) {
2711 return reduce(env, bc::PopC {},
2712 bc::InstanceOfD { v1->m_data.pstr });
2715 if (t1.subtypeOf(BObj) && is_specialized_obj(t1)) {
2716 auto const dobj = dobj_of(t1);
2717 switch (dobj.type) {
2718 case DObj::Sub:
2719 break;
2720 case DObj::Exact:
2721 return reduce(env, bc::PopC {},
2722 bc::InstanceOfD { dobj.cls.name() });
2726 popC(env);
2727 popC(env);
2728 push(env, TBool);
2731 void in(ISS& env, const bc::IsLateBoundCls& op) {
2732 auto const cls = env.ctx.cls;
2733 if (cls && !(cls->attrs & AttrTrait)) effect_free(env);
2734 popC(env);
2735 return push(env, TBool);
2738 namespace {
2740 bool isValidTypeOpForIsAs(const IsTypeOp& op) {
2741 switch (op) {
2742 case IsTypeOp::Null:
2743 case IsTypeOp::Bool:
2744 case IsTypeOp::Int:
2745 case IsTypeOp::Dbl:
2746 case IsTypeOp::Str:
2747 case IsTypeOp::Obj:
2748 return true;
2749 case IsTypeOp::Res:
2750 case IsTypeOp::Vec:
2751 case IsTypeOp::Dict:
2752 case IsTypeOp::Keyset:
2753 case IsTypeOp::ArrLike:
2754 case IsTypeOp::LegacyArrLike:
2755 case IsTypeOp::Scalar:
2756 case IsTypeOp::ClsMeth:
2757 case IsTypeOp::Func:
2758 case IsTypeOp::Class:
2759 return false;
2761 not_reached();
2764 void isTypeStructImpl(ISS& env, SArray inputTS) {
2765 auto const ts = inputTS;
2766 auto const t = loosen_likeness(topC(env, 1)); // operand to is/as
2768 bool may_raise = true;
2769 auto result = [&] (const Type& out) {
2770 popC(env); // type structure
2771 popC(env); // operand to is/as
2772 constprop(env);
2773 if (!may_raise) nothrow(env);
2774 return push(env, out);
2777 auto check = [&] (
2778 const Optional<Type> type,
2779 const Optional<Type> deopt = std::nullopt
2781 if (!type || is_type_might_raise(*type, t)) return result(TBool);
2782 auto test = type.value();
2783 if (t.subtypeOf(test)) return result(TTrue);
2784 if (!t.couldBe(test) && (!deopt || !t.couldBe(deopt.value()))) {
2785 return result(TFalse);
2787 auto const op = type_to_istypeop(test);
2788 if (!op || !isValidTypeOpForIsAs(op.value())) return result(TBool);
2789 return reduce(env, bc::PopC {}, bc::IsTypeC { *op });
2792 auto const is_nullable_ts = is_ts_nullable(ts);
2793 auto const is_definitely_null = t.subtypeOf(BNull);
2794 auto const is_definitely_not_null = !t.couldBe(BNull);
2796 if (is_nullable_ts && is_definitely_null) return result(TTrue);
2798 auto const ts_type = type_of_type_structure(env.index, env.ctx, ts);
2800 if (is_nullable_ts && !is_definitely_not_null && ts_type == std::nullopt) {
2801 // Ts is nullable and we know that t could be null but we dont know for sure
2802 // Also we didn't get a type out of the type structure
2803 return result(TBool);
2806 if (ts_type && !is_type_might_raise(*ts_type, t)) may_raise = false;
2807 switch (get_ts_kind(ts)) {
2808 case TypeStructure::Kind::T_int:
2809 case TypeStructure::Kind::T_bool:
2810 case TypeStructure::Kind::T_float:
2811 case TypeStructure::Kind::T_string:
2812 case TypeStructure::Kind::T_num:
2813 case TypeStructure::Kind::T_arraykey:
2814 case TypeStructure::Kind::T_keyset:
2815 case TypeStructure::Kind::T_void:
2816 case TypeStructure::Kind::T_null:
2817 return check(ts_type);
2818 case TypeStructure::Kind::T_tuple:
2819 return check(ts_type, TVec);
2820 case TypeStructure::Kind::T_shape:
2821 return check(ts_type, TDict);
2822 case TypeStructure::Kind::T_dict:
2823 return check(ts_type);
2824 case TypeStructure::Kind::T_vec:
2825 return check(ts_type);
2826 case TypeStructure::Kind::T_nothing:
2827 case TypeStructure::Kind::T_noreturn:
2828 return result(TFalse);
2829 case TypeStructure::Kind::T_mixed:
2830 case TypeStructure::Kind::T_dynamic:
2831 return result(TTrue);
2832 case TypeStructure::Kind::T_nonnull:
2833 if (is_definitely_null) return result(TFalse);
2834 if (is_definitely_not_null) return result(TTrue);
2835 return reduce(env,
2836 bc::PopC {},
2837 bc::IsTypeC { IsTypeOp::Null },
2838 bc::Not {});
2839 case TypeStructure::Kind::T_class:
2840 case TypeStructure::Kind::T_interface:
2841 case TypeStructure::Kind::T_xhp: {
2842 auto clsname = get_ts_classname(ts);
2843 auto const rcls = env.index.resolve_class(env.ctx, clsname);
2844 if (!rcls || !rcls->resolved() || (ts->exists(s_generic_types) &&
2845 (rcls->cls()->hasReifiedGenerics ||
2846 !isTSAllWildcards(ts)))) {
2847 // If it is a reified class or has non wildcard generics,
2848 // we need to bail
2849 return result(TBool);
2851 return reduce(env, bc::PopC {}, bc::InstanceOfD { clsname });
2853 case TypeStructure::Kind::T_unresolved: {
2854 auto classname = get_ts_classname(ts);
2855 auto const has_generics = ts->exists(s_generic_types);
2856 if (!has_generics && classname->isame(s_this.get())) {
2857 return reduce(env, bc::PopC {}, bc::IsLateBoundCls {});
2859 auto const rcls = env.index.resolve_class(env.ctx, classname);
2860 // We can only reduce to instance of if we know for sure that this class
2861 // can be resolved since instanceof undefined class does not throw
2862 if (!rcls || !rcls->resolved() || rcls->cls()->attrs & AttrEnum) {
2863 return result(TBool);
2865 if (has_generics &&
2866 (rcls->cls()->hasReifiedGenerics || !isTSAllWildcards(ts))) {
2867 // If it is a reified class or has non wildcard generics,
2868 // we need to bail
2869 return result(TBool);
2871 return reduce(env, bc::PopC {}, bc::InstanceOfD { rcls->name() });
2873 case TypeStructure::Kind::T_enum:
2874 case TypeStructure::Kind::T_resource:
2875 case TypeStructure::Kind::T_vec_or_dict:
2876 case TypeStructure::Kind::T_any_array:
2877 // TODO(T29232862): implement
2878 return result(TBool);
2879 case TypeStructure::Kind::T_typeaccess:
2880 case TypeStructure::Kind::T_darray:
2881 case TypeStructure::Kind::T_varray:
2882 case TypeStructure::Kind::T_varray_or_darray:
2883 case TypeStructure::Kind::T_reifiedtype:
2884 return result(TBool);
2885 case TypeStructure::Kind::T_fun:
2886 case TypeStructure::Kind::T_typevar:
2887 case TypeStructure::Kind::T_trait:
2888 // We will error on these at the JIT
2889 return result(TBool);
2892 not_reached();
2895 const StaticString s_hh_type_structure_no_throw("HH\\type_structure_no_throw");
2897 } // namespace
2899 void in(ISS& env, const bc::IsTypeStructC& op) {
2900 if (!topC(env).couldBe(BDict)) {
2901 popC(env);
2902 popC(env);
2903 return unreachable(env);
2905 auto const a = tv(topC(env));
2906 if (!a || !isValidTSType(*a, false)) {
2907 popC(env);
2908 popC(env);
2909 return push(env, TBool);
2911 if (op.subop1 == TypeStructResolveOp::Resolve) {
2912 if (auto const ts = resolve_type_structure(env, a->m_data.parr).sarray()) {
2913 return reduce(
2914 env,
2915 bc::PopC {},
2916 bc::Dict { ts },
2917 bc::IsTypeStructC { TypeStructResolveOp::DontResolve }
2920 if (auto const val = get_ts_this_type_access(a->m_data.parr)) {
2921 // Convert `$x is this::T` into
2922 // `$x is type_structure_no_throw(static::class, 'T')`
2923 // to take advantage of the caching that comes with the type_structure
2924 return reduce(
2925 env,
2926 bc::PopC {},
2927 bc::NullUninit {},
2928 bc::NullUninit {},
2929 bc::LateBoundCls {},
2930 bc::String {val},
2931 bc::FCallFuncD {FCallArgs(2), s_hh_type_structure_no_throw.get()},
2932 bc::IsTypeStructC { TypeStructResolveOp::DontResolve }
2936 isTypeStructImpl(env, a->m_data.parr);
2939 void in(ISS& env, const bc::ThrowAsTypeStructException& op) {
2940 popC(env);
2941 popC(env);
2942 unreachable(env);
2945 void in(ISS& env, const bc::CombineAndResolveTypeStruct& op) {
2946 assertx(op.arg1 > 0);
2947 auto valid = true;
2948 auto const first = tv(topC(env));
2949 if (first && isValidTSType(*first, false)) {
2950 auto const ts = first->m_data.parr;
2951 // Optimize single input that does not need any combination
2952 if (op.arg1 == 1) {
2953 if (auto const r = resolve_type_structure(env, ts).sarray()) {
2954 return reduce(
2955 env,
2956 bc::PopC {},
2957 bc::Dict { r }
2962 // Optimize double input that needs a single combination and looks of the
2963 // form ?T, @T or ~T
2964 if (op.arg1 == 2 && get_ts_kind(ts) == TypeStructure::Kind::T_reifiedtype) {
2965 BytecodeVec instrs { bc::PopC {} };
2966 auto const tv_true = gen_constant(make_tv<KindOfBoolean>(true));
2967 if (ts->exists(s_like.get())) {
2968 instrs.push_back(gen_constant(make_tv<KindOfString>(s_like.get())));
2969 instrs.push_back(tv_true);
2970 instrs.push_back(bc::AddElemC {});
2972 if (ts->exists(s_nullable.get())) {
2973 instrs.push_back(gen_constant(make_tv<KindOfString>(s_nullable.get())));
2974 instrs.push_back(tv_true);
2975 instrs.push_back(bc::AddElemC {});
2977 if (ts->exists(s_soft.get())) {
2978 instrs.push_back(gen_constant(make_tv<KindOfString>(s_soft.get())));
2979 instrs.push_back(tv_true);
2980 instrs.push_back(bc::AddElemC {});
2982 return reduce(env, std::move(instrs));
2986 for (int i = 0; i < op.arg1; ++i) {
2987 auto const t = popC(env);
2988 valid &= t.couldBe(BDict);
2990 if (!valid) return unreachable(env);
2991 nothrow(env);
2992 push(env, TDict);
2995 void in(ISS& env, const bc::RecordReifiedGeneric& op) {
2996 // TODO(T31677864): implement real optimizations
2997 auto const t = popC(env);
2998 if (!t.couldBe(BVec)) return unreachable(env);
2999 if (t.subtypeOf(BVec)) nothrow(env);
3000 push(env, TSVec);
3003 void in(ISS& env, const bc::CheckReifiedGenericMismatch& op) {
3004 auto const location = topStkEquiv(env, 0);
3005 popC(env);
3007 if (location == NoLocalId) return;
3008 auto const ok = refineLocation(
3009 env, location,
3010 [&] (Type) {
3011 return get_type_of_reified_list(env.ctx.cls->userAttributes);
3014 if (!ok) unreachable(env);
3017 namespace {
3020 * If the value on the top of the stack is known to be equivalent to the local
3021 * its being moved/copied to, return std::nullopt without modifying any
3022 * state. Otherwise, pop the stack value, perform the set, and return a pair
3023 * giving the value's type, and any other local its known to be equivalent to.
3025 template <typename Set>
3026 Optional<std::pair<Type, LocalId>> moveToLocImpl(ISS& env,
3027 const Set& op) {
3028 if (auto const prev = last_op(env, 1)) {
3029 if (prev->op == Op::CGetL2 &&
3030 prev->CGetL2.nloc1.id == op.loc1 &&
3031 last_op(env)->op == Op::Concat) {
3032 rewind(env, 2);
3033 reduce(env, bc::SetOpL { op.loc1, SetOpOp::ConcatEqual });
3034 return std::nullopt;
3038 auto equivLoc = topStkEquiv(env);
3039 // If the local could be a Ref, don't record equality because the stack
3040 // element and the local won't actually have the same type.
3041 if (equivLoc == StackThisId && env.state.thisLoc != NoLocalId) {
3042 if (env.state.thisLoc == op.loc1 ||
3043 locsAreEquiv(env, env.state.thisLoc, op.loc1)) {
3044 return std::nullopt;
3045 } else {
3046 equivLoc = env.state.thisLoc;
3049 if (!is_volatile_local(env.ctx.func, op.loc1)) {
3050 if (equivLoc <= MaxLocalId) {
3051 if (equivLoc == op.loc1 ||
3052 locsAreEquiv(env, equivLoc, op.loc1)) {
3053 // We allow equivalency to ignore Uninit, so we need to check
3054 // the types here.
3055 if (peekLocRaw(env, op.loc1) == topC(env)) {
3056 return std::nullopt;
3059 } else if (equivLoc == NoLocalId) {
3060 equivLoc = op.loc1;
3062 if (!any(env.collect.opts & CollectionOpts::Speculating)) {
3063 effect_free(env);
3065 } else {
3066 equivLoc = NoLocalId;
3068 nothrow(env);
3069 auto val = popC(env);
3070 setLoc(env, op.loc1, val);
3071 if (equivLoc == StackThisId) {
3072 assertx(env.state.thisLoc == NoLocalId);
3073 equivLoc = env.state.thisLoc = op.loc1;
3075 if (equivLoc == StackDupId) {
3076 setStkLocal(env, op.loc1);
3077 } else if (equivLoc != op.loc1 && equivLoc != NoLocalId) {
3078 addLocEquiv(env, op.loc1, equivLoc);
3080 return { std::make_pair(std::move(val), equivLoc) };
3085 void in(ISS& env, const bc::PopL& op) {
3086 // If the same value is already in the local, do nothing but pop
3087 // it. Otherwise, the set has been done by moveToLocImpl.
3088 if (!moveToLocImpl(env, op)) return reduce(env, bc::PopC {});
3091 void in(ISS& env, const bc::SetL& op) {
3092 // If the same value is already in the local, do nothing because SetL keeps
3093 // the value on the stack. If it isn't, we need to push it back onto the stack
3094 // because moveToLocImpl popped it.
3095 if (auto p = moveToLocImpl(env, op)) {
3096 push(env, std::move(p->first), p->second);
3097 } else {
3098 reduce(env);
3102 void in(ISS& env, const bc::SetG&) {
3103 auto t1 = popC(env);
3104 popC(env);
3105 push(env, std::move(t1));
3108 void in(ISS& env, const bc::SetS& op) {
3109 auto const val = popC(env);
3110 auto const tcls = popC(env);
3111 auto const tname = popC(env);
3113 auto const throws = [&] {
3114 unreachable(env);
3115 return push(env, TBottom);
3118 if (!tcls.couldBe(BCls)) return throws();
3120 auto merge = env.index.merge_static_type(
3121 env.ctx,
3122 env.collect.publicSPropMutations,
3123 env.collect.props,
3124 tcls,
3125 tname,
3126 val,
3127 true,
3128 false,
3129 checkReadonlyOpThrows(ReadonlyOp::Readonly, op.subop1)
3132 if (merge.throws == TriBool::Yes || merge.adjusted.subtypeOf(BBottom)) {
3133 return throws();
3136 if (merge.throws == TriBool::No &&
3137 tcls.subtypeOf(BCls) &&
3138 tname.subtypeOf(BStr)) {
3139 nothrow(env);
3142 push(env, std::move(merge.adjusted));
3145 void in(ISS& env, const bc::SetOpL& op) {
3146 auto const t1 = popC(env);
3147 auto const loc = locAsCell(env, op.loc1);
3149 auto resultTy = typeSetOp(op.subop2, loc, t1);
3150 setLoc(env, op.loc1, resultTy);
3151 push(env, std::move(resultTy));
3154 void in(ISS& env, const bc::SetOpG&) {
3155 popC(env); popC(env);
3156 push(env, TInitCell);
3159 void in(ISS& env, const bc::SetOpS& op) {
3160 auto const rhs = popC(env);
3161 auto const tcls = popC(env);
3162 auto const tname = popC(env);
3164 auto const throws = [&] {
3165 unreachable(env);
3166 return push(env, TBottom);
3169 if (!tcls.couldBe(BCls)) return throws();
3171 auto const lookup = env.index.lookup_static(
3172 env.ctx,
3173 env.collect.props,
3174 tcls,
3175 tname
3178 if (lookup.found == TriBool::No || lookup.ty.subtypeOf(BBottom)) {
3179 return throws();
3182 auto const newTy = typeSetOp(op.subop1, lookup.ty, rhs);
3183 if (newTy.subtypeOf(BBottom)) return throws();
3185 auto merge = env.index.merge_static_type(
3186 env.ctx,
3187 env.collect.publicSPropMutations,
3188 env.collect.props,
3189 tcls,
3190 tname,
3191 newTy
3194 if (merge.throws == TriBool::Yes || merge.adjusted.subtypeOf(BBottom)) {
3195 return throws();
3198 // NB: Unlike IncDecS, SetOpS pushes the post-TypeConstraint
3199 // adjustment value.
3200 push(env, std::move(merge.adjusted));
3203 void in(ISS& env, const bc::IncDecL& op) {
3204 auto loc = locAsCell(env, op.nloc1.id);
3205 auto newT = typeIncDec(op.subop2, loc);
3207 if (newT.subtypeOf(BBottom)) {
3208 unreachable(env);
3209 return push(env, TBottom);
3212 if (!locCouldBeUninit(env, op.nloc1.id) && loc.subtypeOf(BNum)) nothrow(env);
3214 auto const pre = isPre(op.subop2);
3215 if (!pre) push(env, std::move(loc));
3216 setLoc(env, op.nloc1.id, newT);
3217 if (pre) push(env, std::move(newT));
3220 void in(ISS& env, const bc::IncDecG&) { popC(env); push(env, TInitCell); }
3222 void in(ISS& env, const bc::IncDecS& op) {
3223 auto const tcls = popC(env);
3224 auto const tname = popC(env);
3225 auto const pre = isPre(op.subop1);
3227 auto const throws = [&] {
3228 unreachable(env);
3229 return push(env, TBottom);
3232 if (!tcls.couldBe(BCls)) return throws();
3234 auto lookup = env.index.lookup_static(
3235 env.ctx,
3236 env.collect.props,
3237 tcls,
3238 tname
3241 if (lookup.found == TriBool::No || lookup.ty.subtypeOf(BBottom)) {
3242 return throws();
3245 auto newTy = typeIncDec(op.subop1, lookup.ty);
3246 if (newTy.subtypeOf(BBottom)) return throws();
3248 auto const merge = env.index.merge_static_type(
3249 env.ctx,
3250 env.collect.publicSPropMutations,
3251 env.collect.props,
3252 tcls,
3253 tname,
3254 newTy
3257 if (merge.throws == TriBool::Yes || merge.adjusted.subtypeOf(BBottom)) {
3258 return throws();
3261 if (lookup.found == TriBool::Yes &&
3262 lookup.lateInit == TriBool::No &&
3263 !lookup.classInitMightRaise &&
3264 merge.throws == TriBool::No &&
3265 tcls.subtypeOf(BCls) &&
3266 tname.subtypeOf(BStr) &&
3267 lookup.ty.subtypeOf(BNum)) {
3268 nothrow(env);
3271 // NB: IncDecS pushes the value pre-TypeConstraint modification
3272 push(env, pre ? std::move(newTy) : std::move(lookup.ty));
3275 void in(ISS& env, const bc::UnsetL& op) {
3276 if (locRaw(env, op.loc1).subtypeOf(TUninit)) {
3277 return reduce(env);
3280 if (auto const last = last_op(env)) {
3281 // No point in popping into the local if we're just going to
3282 // immediately unset it.
3283 if (last->op == Op::PopL &&
3284 last->PopL.loc1 == op.loc1) {
3285 reprocess(env);
3286 rewind(env, 1);
3287 setLocRaw(env, op.loc1, TCell);
3288 return reduce(env, bc::PopC {}, bc::UnsetL { op.loc1 });
3292 if (any(env.collect.opts & CollectionOpts::Speculating)) {
3293 nothrow(env);
3294 } else {
3295 effect_free(env);
3297 setLocRaw(env, op.loc1, TUninit);
3300 void in(ISS& env, const bc::UnsetG& /*op*/) {
3301 auto const t1 = popC(env);
3302 if (!t1.couldBe(BObj | BRes)) nothrow(env);
3305 bool fcallCanSkipRepack(ISS& env, const FCallArgs& fca, const res::Func& func) {
3306 // Can't skip repack if potentially calling a function with too many args.
3307 if (fca.numArgs() > func.minNonVariadicParams()) return false;
3308 // Repack not needed if not unpacking and not having too many arguments.
3309 if (!fca.hasUnpack()) return true;
3310 // Can't skip repack if unpack args are in a wrong position.
3311 if (fca.numArgs() != func.maxNonVariadicParams()) return false;
3313 // Repack not needed if unpack args have the correct type.
3314 auto const unpackArgs = topC(env, fca.hasGenerics() ? 1 : 0);
3315 return unpackArgs.subtypeOf(BVec);
3318 bool coeffectRulesMatch(ISS& env,
3319 const FCallArgs& fca,
3320 const res::Func& func,
3321 uint32_t numExtraInputs,
3322 const CoeffectRule& caller,
3323 const CoeffectRule& callee) {
3324 if (caller.m_type != callee.m_type) return false;
3325 switch (caller.m_type) {
3326 case CoeffectRule::Type::CCThis: {
3327 if (caller.m_name != callee.m_name ||
3328 caller.m_types != callee.m_types) {
3329 return false;
3331 if (!thisAvailable(env)) return false;
3332 auto const loc = topStkEquiv(env, fca.numInputs() + numExtraInputs + 1);
3333 return loc == StackThisId || (loc <= MaxLocalId && locIsThis(env, loc));
3335 case CoeffectRule::Type::CCParam:
3336 if (caller.m_name != callee.m_name) return false;
3337 // fallthrough
3338 case CoeffectRule::Type::FunParam: {
3339 if (fca.hasUnpack()) return false;
3340 if (fca.numArgs() <= callee.m_index) return false;
3341 auto const l1 = caller.m_index;
3342 auto const l2 = topStkEquiv(env, fca.numInputs() - callee.m_index - 1);
3343 return l1 == l2 ||
3344 (l1 <= MaxLocalId &&
3345 l2 <= MaxLocalId &&
3346 locsAreEquiv(env, l1, l2));
3348 case CoeffectRule::Type::CCReified:
3349 // TODO: optimize these
3350 return false;
3351 case CoeffectRule::Type::ClosureParentScope:
3352 case CoeffectRule::Type::GeneratorThis:
3353 case CoeffectRule::Type::Caller:
3354 case CoeffectRule::Type::Invalid:
3355 return false;
3357 not_reached();
3360 bool fcallCanSkipCoeffectsCheck(ISS& env,
3361 const FCallArgs& fca,
3362 const res::Func& func,
3363 uint32_t numExtraInputs) {
3364 auto const requiredCoeffectsOpt = func.requiredCoeffects();
3365 if (!requiredCoeffectsOpt) return false;
3366 auto const required = *requiredCoeffectsOpt;
3367 auto const provided =
3368 RuntimeCoeffects::fromValue(env.ctx.func->requiredCoeffects.value() |
3369 env.ctx.func->coeffectEscapes.value());
3370 if (!provided.canCall(required)) return false;
3371 auto const calleeRules = func.coeffectRules();
3372 // If we couldn't tell whether callee has rules or not, punt.
3373 if (!calleeRules) return false;
3374 if (calleeRules->empty()) return true;
3375 if (calleeRules->size() == 1 && (*calleeRules)[0].isCaller()) return true;
3376 auto const callerRules = env.ctx.func->coeffectRules;
3377 return std::is_permutation(callerRules.begin(), callerRules.end(),
3378 calleeRules->begin(), calleeRules->end(),
3379 [&] (const CoeffectRule& a,
3380 const CoeffectRule& b) {
3381 return coeffectRulesMatch(env, fca, func,
3382 numExtraInputs,
3383 a, b);
3387 template<typename FCallWithFCA>
3388 bool fcallOptimizeChecks(
3389 ISS& env,
3390 const FCallArgs& fca,
3391 const res::Func& func,
3392 FCallWithFCA fcallWithFCA,
3393 Optional<uint32_t> inOutNum,
3394 bool maybeNullsafe,
3395 uint32_t numExtraInputs
3397 // Don't optimize away in-out checks if we might use the null safe
3398 // operator. If we do so, we need the in-out bits to shuffle the
3399 // stack properly.
3400 if (!maybeNullsafe && fca.enforceInOut()) {
3401 if (inOutNum == fca.numRets() - 1) {
3402 bool match = true;
3403 for (auto i = 0; i < fca.numArgs(); ++i) {
3404 auto const kind = env.index.lookup_param_prep(env.ctx, func, i);
3405 if (kind.inOut == TriBool::Maybe) {
3406 match = false;
3407 break;
3410 if (yesOrNo(fca.isInOut(i)) != kind.inOut) {
3411 // The function/method may not exist, in which case we should raise a
3412 // different error. Just defer the checks to the runtime.
3413 if (!func.exactFunc()) return false;
3415 // inout mismatch
3416 auto const exCls = makeStaticString("InvalidArgumentException");
3417 auto const err = makeStaticString(formatParamInOutMismatch(
3418 func.name()->data(), i, !fca.isInOut(i)));
3420 reduce(
3421 env,
3422 bc::NewObjD { exCls },
3423 bc::Dup {},
3424 bc::NullUninit {},
3425 bc::String { err },
3426 bc::FCallCtor { FCallArgs(1), staticEmptyString() },
3427 bc::PopC {},
3428 bc::LockObj {},
3429 bc::Throw {}
3431 return true;
3435 if (match) {
3436 // Optimize away the runtime inout-ness check.
3437 reduce(env, fcallWithFCA(fca.withoutInOut()));
3438 return true;
3443 if (fca.enforceReadonly()) {
3444 bool match = true;
3445 for (auto i = 0; i < fca.numArgs(); ++i) {
3446 if (!fca.isReadonly(i)) continue;
3447 auto const kind = env.index.lookup_param_prep(env.ctx, func, i);
3448 if (kind.readonly == TriBool::Maybe) {
3449 match = false;
3450 break;
3453 if (kind.readonly != TriBool::Yes) {
3454 // The function/method may not exist, in which case we should raise a
3455 // different error. Just defer the checks to the runtime.
3456 if (!func.exactFunc()) return false;
3457 match = false;
3458 break;
3462 if (match) {
3463 // Optimize away the runtime readonly-ness check.
3464 reduce(env, fcallWithFCA(fca.withoutReadonly()));
3465 return true;
3469 if (fca.enforceMutableReturn()) {
3470 if (env.index.lookup_return_readonly(env.ctx, func) == TriBool::No) {
3471 reduce(env, fcallWithFCA(fca.withoutEnforceMutableReturn()));
3472 return true;
3476 if (fca.enforceReadonlyThis()) {
3477 if (env.index.lookup_readonly_this(env.ctx, func) == TriBool::Yes) {
3478 reduce(env, fcallWithFCA(fca.withoutEnforceReadonlyThis()));
3479 return true;
3483 // Infer whether the callee supports async eager return.
3484 if (fca.asyncEagerTarget() != NoBlockId) {
3485 auto const status = env.index.supports_async_eager_return(func);
3486 if (status && !*status) {
3487 reduce(env, fcallWithFCA(fca.withoutAsyncEagerTarget()));
3488 return true;
3492 if (!fca.skipRepack() && fcallCanSkipRepack(env, fca, func)) {
3493 reduce(env, fcallWithFCA(fca.withoutRepack()));
3494 return true;
3497 if (!fca.skipCoeffectsCheck() &&
3498 fcallCanSkipCoeffectsCheck(env, fca, func, numExtraInputs)) {
3499 reduce(env, fcallWithFCA(fca.withoutCoeffectsCheck()));
3500 return true;
3503 return false;
3506 bool fcallTryFold(
3507 ISS& env,
3508 const FCallArgs& fca,
3509 const res::Func& func,
3510 Type context,
3511 bool maybeDynamic,
3512 uint32_t numExtraInputs
3514 auto const foldableFunc = func.exactFunc();
3515 if (!foldableFunc) return false;
3516 if (!shouldAttemptToFold(env, foldableFunc, fca, context, maybeDynamic)) {
3517 return false;
3520 assertx(!fca.hasUnpack() && !fca.hasGenerics() && fca.numRets() == 1);
3521 assertx(options.ConstantFoldBuiltins);
3523 auto const finish = [&] (Type ty) {
3524 auto const v = tv(ty);
3525 if (!v) return false;
3526 BytecodeVec repl;
3527 for (uint32_t i = 0; i < numExtraInputs; ++i) repl.push_back(bc::PopC {});
3528 for (uint32_t i = 0; i < fca.numArgs(); ++i) repl.push_back(bc::PopC {});
3529 repl.push_back(bc::PopU {});
3530 if (topT(env, fca.numArgs() + 1 + numExtraInputs).subtypeOf(TInitCell)) {
3531 repl.push_back(bc::PopC {});
3532 } else {
3533 assertx(topT(env, fca.numArgs() + 1 + numExtraInputs).subtypeOf(TUninit));
3534 repl.push_back(bc::PopU {});
3536 repl.push_back(gen_constant(*v));
3537 reduce(env, std::move(repl));
3538 return true;
3541 if (foldableFunc->attrs & AttrBuiltin &&
3542 foldableFunc->attrs & AttrIsFoldable) {
3543 auto ret = const_fold(env, fca.numArgs(), numExtraInputs, *foldableFunc,
3544 false);
3545 if (!ret) return false;
3546 return finish(std::move(*ret));
3549 CompactVector<Type> args(fca.numArgs());
3550 auto const firstArgPos = numExtraInputs + fca.numInputs() - 1;
3551 for (auto i = uint32_t{0}; i < fca.numArgs(); ++i) {
3552 auto const& arg = topT(env, firstArgPos - i);
3553 auto const isScalar = is_scalar(arg);
3554 if (!isScalar &&
3555 (env.index.func_depends_on_arg(foldableFunc, i) ||
3556 !arg.subtypeOf(BInitCell))) {
3557 return false;
3559 args[i] = isScalar ? scalarize(arg) : arg;
3562 auto calleeCtx = CallContext {
3563 foldableFunc,
3564 std::move(args),
3565 std::move(context)
3567 if (env.collect.unfoldableFuncs.count(calleeCtx)) return false;
3569 if (finish(env.index.lookup_foldable_return_type(env.ctx, calleeCtx))) {
3570 return true;
3572 env.collect.unfoldableFuncs.emplace(std::move(calleeCtx));
3573 return false;
3576 Type typeFromWH(Type t) {
3577 if (!t.couldBe(BObj)) {
3578 // Exceptions will be thrown if a non-object is awaited.
3579 return TBottom;
3582 // Throw away non-obj component.
3583 t &= TObj;
3585 // If we aren't even sure this is a wait handle, there's nothing we can
3586 // infer here.
3587 if (!is_specialized_wait_handle(t)) {
3588 return TInitCell;
3591 return wait_handle_inner(t);
3594 void pushCallReturnType(ISS& env,
3595 Type ty,
3596 const FCallArgs& fca,
3597 bool nullsafe,
3598 std::vector<Type> inOuts) {
3599 auto const numRets = fca.numRets();
3600 if (numRets != 1) {
3601 assertx(fca.asyncEagerTarget() == NoBlockId);
3602 assertx(IMPLIES(nullsafe, inOuts.size() == numRets - 1));
3604 for (auto i = uint32_t{0}; i < numRets - 1; ++i) popU(env);
3605 if (!ty.couldBe(BVecN)) {
3606 // Function cannot have an in-out args match, so call will
3607 // always fail.
3608 if (!nullsafe) {
3609 for (int32_t i = 0; i < numRets; i++) push(env, TBottom);
3610 return unreachable(env);
3612 // We'll only hit the nullsafe null case, so the outputs are the
3613 // inout inputs.
3614 for (auto& t : inOuts) push(env, std::move(t));
3615 push(env, TInitNull);
3616 return;
3619 // If we might use the nullsafe operator, we need to union in the
3620 // null case (which means the inout args are unchanged).
3621 if (is_specialized_array_like(ty)) {
3622 for (int32_t i = 1; i < numRets; i++) {
3623 auto elem = array_like_elem(ty, ival(i)).first;
3624 if (nullsafe) elem |= inOuts[i-1];
3625 push(env, std::move(elem));
3627 push(
3628 env,
3629 nullsafe
3630 ? opt(array_like_elem(ty, ival(0)).first)
3631 : array_like_elem(ty, ival(0)).first
3633 } else {
3634 for (int32_t i = 0; i < numRets; ++i) push(env, TInitCell);
3636 return;
3638 if (fca.asyncEagerTarget() != NoBlockId) {
3639 assertx(!ty.is(BBottom));
3640 push(env, typeFromWH(ty));
3641 assertx(!topC(env).subtypeOf(BBottom));
3642 env.propagate(fca.asyncEagerTarget(), &env.state);
3643 popC(env);
3645 if (nullsafe) ty = opt(std::move(ty));
3646 if (ty.is(BBottom)) {
3647 // The callee function never returns. It might throw, or loop
3648 // forever.
3649 push(env, TBottom);
3650 return unreachable(env);
3652 return push(env, std::move(ty));
3655 const StaticString s_defined { "defined" };
3656 const StaticString s_function_exists { "function_exists" };
3658 template<typename FCallWithFCA>
3659 void fcallKnownImpl(
3660 ISS& env,
3661 const FCallArgs& fca,
3662 const res::Func& func,
3663 Type context,
3664 bool nullsafe,
3665 uint32_t numExtraInputs,
3666 FCallWithFCA fcallWithFCA,
3667 Optional<uint32_t> inOutNum
3669 auto const numArgs = fca.numArgs();
3670 auto returnType = [&] {
3671 CompactVector<Type> args(numArgs);
3672 auto const firstArgPos = numExtraInputs + fca.numInputs() - 1;
3673 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3674 args[i] = topCV(env, firstArgPos - i);
3677 return fca.hasUnpack()
3678 ? env.index.lookup_return_type(env.ctx, &env.collect.methods, func)
3679 : env.index.lookup_return_type(
3680 env.ctx, &env.collect.methods, args, context, func
3682 }();
3684 // If there's a caller/callee inout mismatch, then the call will
3685 // always fail.
3686 if (fca.enforceInOut()) {
3687 if (inOutNum && (*inOutNum + 1 != fca.numRets())) {
3688 returnType = TBottom;
3692 if (fca.asyncEagerTarget() != NoBlockId && typeFromWH(returnType) == TBottom) {
3693 // Kill the async eager target if the function never returns.
3694 reduce(env, fcallWithFCA(std::move(fca.withoutAsyncEagerTarget())));
3695 return;
3698 if (func.name()->isame(s_function_exists.get()) &&
3699 (numArgs == 1 || numArgs == 2) &&
3700 !fca.hasUnpack() && !fca.hasGenerics()) {
3701 handle_function_exists(env, topT(env, numExtraInputs + numArgs - 1));
3704 for (auto i = uint32_t{0}; i < numExtraInputs; ++i) popC(env);
3705 if (fca.hasGenerics()) popC(env);
3706 if (fca.hasUnpack()) popC(env);
3707 std::vector<Type> inOuts;
3708 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3709 if (nullsafe && fca.isInOut(numArgs - i - 1)) {
3710 inOuts.emplace_back(popCV(env));
3711 } else {
3712 popCV(env);
3715 popU(env);
3716 popCU(env);
3717 pushCallReturnType(env, std::move(returnType),
3718 fca, nullsafe, std::move(inOuts));
3721 void fcallUnknownImpl(ISS& env,
3722 const FCallArgs& fca,
3723 const Type& retTy = TInitCell) {
3724 if (fca.hasGenerics()) popC(env);
3725 if (fca.hasUnpack()) popC(env);
3726 auto const numArgs = fca.numArgs();
3727 auto const numRets = fca.numRets();
3728 for (auto i = uint32_t{0}; i < numArgs; ++i) popCV(env);
3729 popU(env);
3730 popCU(env);
3731 if (fca.asyncEagerTarget() != NoBlockId) {
3732 assertx(numRets == 1);
3733 assertx(!retTy.is(BBottom));
3734 push(env, retTy);
3735 env.propagate(fca.asyncEagerTarget(), &env.state);
3736 popC(env);
3738 for (auto i = uint32_t{0}; i < numRets - 1; ++i) popU(env);
3739 for (auto i = uint32_t{0}; i < numRets; ++i) push(env, retTy);
3742 void in(ISS& env, const bc::FCallFuncD& op) {
3743 auto const rfunc = env.index.resolve_func(env.ctx, op.str2);
3745 if (op.fca.hasGenerics()) {
3746 auto const tsList = topC(env);
3747 if (!tsList.couldBe(BVec)) {
3748 return unreachable(env);
3751 if (!rfunc.couldHaveReifiedGenerics()) {
3752 return reduce(
3753 env,
3754 bc::PopC {},
3755 bc::FCallFuncD { op.fca.withoutGenerics(), op.str2 }
3760 auto const updateBC = [&] (FCallArgs fca) {
3761 return bc::FCallFuncD { std::move(fca), op.str2 };
3764 auto const numInOut = op.fca.enforceInOut()
3765 ? env.index.lookup_num_inout_params(env.ctx, rfunc)
3766 : std::nullopt;
3768 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC, numInOut, false, 0) ||
3769 fcallTryFold(env, op.fca, rfunc, TBottom, false, 0)) {
3770 return;
3773 if (auto const func = rfunc.exactFunc()) {
3774 if (optimize_builtin(env, func, op.fca)) return;
3777 fcallKnownImpl(env, op.fca, rfunc, TBottom, false, 0, updateBC, numInOut);
3780 namespace {
3782 void fcallFuncUnknown(ISS& env, const bc::FCallFunc& op) {
3783 popC(env);
3784 fcallUnknownImpl(env, op.fca);
3787 void fcallFuncClsMeth(ISS& env, const bc::FCallFunc& op) {
3788 assertx(topC(env).subtypeOf(BClsMeth));
3790 // TODO: optimize me
3791 fcallFuncUnknown(env, op);
3794 void fcallFuncFunc(ISS& env, const bc::FCallFunc& op) {
3795 assertx(topC(env).subtypeOf(BFunc));
3797 // TODO: optimize me
3798 fcallFuncUnknown(env, op);
3801 void fcallFuncObj(ISS& env, const bc::FCallFunc& op) {
3802 assertx(topC(env).subtypeOf(BObj));
3804 // TODO: optimize me
3805 fcallFuncUnknown(env, op);
3808 void fcallFuncStr(ISS& env, const bc::FCallFunc& op) {
3809 assertx(topC(env).subtypeOf(BStr));
3810 auto funcName = getNameFromType(topC(env));
3811 if (!funcName) return fcallFuncUnknown(env, op);
3813 funcName = normalizeNS(funcName);
3814 if (!isNSNormalized(funcName) || !notClassMethodPair(funcName)) {
3815 return fcallFuncUnknown(env, op);
3818 auto const rfunc = env.index.resolve_func(env.ctx, funcName);
3819 if (!rfunc.mightCareAboutDynCalls()) {
3820 return reduce(env, bc::PopC {}, bc::FCallFuncD { op.fca, funcName });
3823 auto const updateBC = [&] (FCallArgs fca) {
3824 return bc::FCallFunc { std::move(fca) };
3827 auto const numInOut = op.fca.enforceInOut()
3828 ? env.index.lookup_num_inout_params(env.ctx, rfunc)
3829 : std::nullopt;
3831 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC, numInOut, false, 0)) {
3832 return;
3834 fcallKnownImpl(env, op.fca, rfunc, TBottom, false, 1, updateBC, numInOut);
3837 } // namespace
3839 void in(ISS& env, const bc::FCallFunc& op) {
3840 auto const callable = topC(env);
3841 if (callable.subtypeOf(BFunc)) return fcallFuncFunc(env, op);
3842 if (callable.subtypeOf(BClsMeth)) return fcallFuncClsMeth(env, op);
3843 if (callable.subtypeOf(BObj)) return fcallFuncObj(env, op);
3844 if (callable.subtypeOf(BStr)) return fcallFuncStr(env, op);
3845 fcallFuncUnknown(env, op);
3848 void in(ISS& env, const bc::ResolveFunc& op) {
3849 push(env, TFunc);
3852 void in(ISS& env, const bc::ResolveMethCaller& op) {
3853 // TODO (T29639296)
3854 push(env, TFunc);
3857 void in(ISS& env, const bc::ResolveRFunc& op) {
3858 popC(env);
3859 push(env, union_of(TFunc, TRFunc));
3862 namespace {
3864 Type ctxCls(ISS& env) {
3865 auto const s = selfCls(env);
3866 return setctx(s ? *s : TCls);
3869 Type specialClsRefToCls(ISS& env, SpecialClsRef ref) {
3870 if (!env.ctx.cls) return TCls;
3871 auto const op = [&]()-> Optional<Type> {
3872 switch (ref) {
3873 case SpecialClsRef::Static: return ctxCls(env);
3874 case SpecialClsRef::Self: return selfClsExact(env);
3875 case SpecialClsRef::Parent: return parentClsExact(env);
3877 always_assert(false);
3878 }();
3879 return op ? *op : TCls;
3882 template<bool reifiedVersion = false>
3883 void resolveClsMethodSImpl(ISS& env, SpecialClsRef ref, LSString meth_name) {
3884 auto const clsTy = specialClsRefToCls(env, ref);
3885 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, meth_name);
3886 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
3887 !rfunc.couldHaveReifiedGenerics()) {
3888 auto const clsName = dcls_of(clsTy).cls.name();
3889 return reduce(env, bc::ResolveClsMethodD { clsName, meth_name });
3891 if (reifiedVersion) popC(env);
3892 if (!reifiedVersion || !rfunc.couldHaveReifiedGenerics()) {
3893 push(env, TClsMeth);
3894 } else {
3895 push(env, union_of(TClsMeth, TRClsMeth));
3899 } // namespace
3901 void in(ISS& env, const bc::ResolveClsMethod& op) {
3902 popC(env);
3903 push(env, TClsMeth);
3906 void in(ISS& env, const bc::ResolveClsMethodD& op) {
3907 push(env, TClsMeth);
3910 void in(ISS& env, const bc::ResolveClsMethodS& op) {
3911 resolveClsMethodSImpl<false>(env, op.subop1, op.str2);
3914 void in(ISS& env, const bc::ResolveRClsMethod&) {
3915 popC(env);
3916 popC(env);
3917 push(env, union_of(TClsMeth, TRClsMeth));
3920 void in(ISS& env, const bc::ResolveRClsMethodD&) {
3921 popC(env);
3922 push(env, union_of(TClsMeth, TRClsMeth));
3925 void in(ISS& env, const bc::ResolveRClsMethodS& op) {
3926 resolveClsMethodSImpl<true>(env, op.subop1, op.str2);
3929 void in(ISS& env, const bc::ResolveClass& op) {
3930 // TODO (T61651936)
3931 auto cls = env.index.resolve_class(env.ctx, op.str1);
3932 if (cls && cls->resolved()) {
3933 push(env, clsExact(*cls));
3934 } else {
3935 // If the class is not resolved,
3936 // it might not be unique or it might not be a valid classname.
3937 push(env, union_of(TArrKey, TCls, TLazyCls));
3941 void in(ISS& env, const bc::LazyClass& op) {
3942 effect_free(env);
3943 push(env, lazyclsval(op.str1));
3946 namespace {
3948 const StaticString
3949 s_DynamicContextOverrideUnsafe("__SystemLib\\DynamicContextOverrideUnsafe");
3951 bool isBadContext(const FCallArgs& fca) {
3952 return fca.context() &&
3953 fca.context()->isame(s_DynamicContextOverrideUnsafe.get());
3956 Context getCallContext(const ISS& env, const FCallArgs& fca) {
3957 if (auto const name = fca.context()) {
3958 auto const rcls = env.index.resolve_class(env.ctx, name);
3959 if (rcls && rcls->cls()) {
3960 return Context { env.ctx.unit, env.ctx.func, rcls->cls() };
3962 return Context { env.ctx.unit, env.ctx.func, nullptr };
3964 return env.ctx;
3967 void fcallObjMethodNullsafeNoFold(ISS& env,
3968 const FCallArgs& fca,
3969 bool extraInput) {
3970 assertx(fca.asyncEagerTarget() == NoBlockId);
3971 if (extraInput) popC(env);
3972 if (fca.hasGenerics()) popC(env);
3973 if (fca.hasUnpack()) popC(env);
3974 auto const numArgs = fca.numArgs();
3975 auto const numRets = fca.numRets();
3976 std::vector<Type> inOuts;
3977 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3978 if (fca.enforceInOut() && fca.isInOut(numArgs - i - 1)) {
3979 inOuts.emplace_back(popCV(env));
3980 } else {
3981 popCV(env);
3984 popU(env);
3985 popCU(env);
3986 for (auto i = uint32_t{0}; i < numRets - 1; ++i) popU(env);
3987 assertx(inOuts.size() == numRets - 1);
3988 for (auto& t : inOuts) push(env, std::move(t));
3989 push(env, TInitNull);
3992 void fcallObjMethodNullsafe(ISS& env, const FCallArgs& fca, bool extraInput) {
3993 // Don't fold if there's inout arguments. We could, in principal,
3994 // fold away the inout case like we do below, but we don't have the
3995 // bytecodes necessary to shuffle the stack.
3996 if (fca.enforceInOut()) {
3997 for (uint32_t i = 0; i < fca.numArgs(); ++i) {
3998 if (fca.isInOut(i)) {
3999 return fcallObjMethodNullsafeNoFold(env, fca, extraInput);
4004 BytecodeVec repl;
4005 if (extraInput) repl.push_back(bc::PopC {});
4006 if (fca.hasGenerics()) repl.push_back(bc::PopC {});
4007 if (fca.hasUnpack()) repl.push_back(bc::PopC {});
4009 auto const numArgs = fca.numArgs();
4010 for (uint32_t i = 0; i < numArgs; ++i) {
4011 assertx(topC(env, repl.size()).subtypeOf(BInitCell));
4012 repl.push_back(bc::PopC {});
4014 repl.push_back(bc::PopU {});
4015 repl.push_back(bc::PopC {});
4016 assertx(fca.numRets() == 1);
4017 repl.push_back(bc::Null {});
4019 reduce(env, std::move(repl));
4022 template <typename Op, class UpdateBC>
4023 void fcallObjMethodImpl(ISS& env, const Op& op, SString methName, bool dynamic,
4024 bool extraInput, UpdateBC updateBC) {
4025 auto const nullThrows = op.subop3 == ObjMethodOp::NullThrows;
4026 auto const inputPos = op.fca.numInputs() + (extraInput ? 2 : 1);
4027 auto const input = topC(env, inputPos);
4028 auto const location = topStkEquiv(env, inputPos);
4029 auto const mayCallMethod = input.couldBe(BObj);
4030 auto const mayUseNullsafe = !nullThrows && input.couldBe(BNull);
4031 auto const mayThrowNonObj = !input.subtypeOf(nullThrows ? BObj : BOptObj);
4033 auto const refineLoc = [&] {
4034 if (location == NoLocalId) return;
4035 if (!refineLocation(env, location, [&] (Type t) {
4036 if (nullThrows) return intersection_of(t, TObj);
4037 if (!t.couldBe(BUninit)) return intersection_of(t, TOptObj);
4038 if (!t.couldBe(BObj)) return intersection_of(t, TNull);
4039 return t;
4040 })) {
4041 unreachable(env);
4045 auto const throws = [&] {
4046 if (op.fca.asyncEagerTarget() != NoBlockId) {
4047 // Kill the async eager target if the function never returns.
4048 return reduce(env, updateBC(op.fca.withoutAsyncEagerTarget()));
4050 if (extraInput) popC(env);
4051 fcallUnknownImpl(env, op.fca, TBottom);
4052 unreachable(env);
4055 if (!mayCallMethod && !mayUseNullsafe) {
4056 // This FCallObjMethodD may only throw
4057 return throws();
4060 if (!mayCallMethod && !mayThrowNonObj) {
4061 // Null input, this may only return null, so do that.
4062 return fcallObjMethodNullsafe(env, op.fca, extraInput);
4065 if (!mayCallMethod) {
4066 // May only return null, but can't fold as we may still throw.
4067 assertx(mayUseNullsafe && mayThrowNonObj);
4068 if (op.fca.asyncEagerTarget() != NoBlockId) {
4069 return reduce(env, updateBC(op.fca.withoutAsyncEagerTarget()));
4071 return fcallObjMethodNullsafeNoFold(env, op.fca, extraInput);
4074 if (isBadContext(op.fca)) return throws();
4076 auto const ctx = getCallContext(env, op.fca);
4077 auto const ctxTy = input.couldBe(BObj)
4078 ? intersection_of(input, TObj)
4079 : TObj;
4080 auto const clsTy = objcls(ctxTy);
4081 auto const rfunc = env.index.resolve_method(ctx, clsTy, methName);
4083 auto const numInOut = op.fca.enforceInOut()
4084 ? env.index.lookup_num_inout_params(env.ctx, rfunc)
4085 : std::nullopt;
4087 auto const canFold = !mayUseNullsafe && !mayThrowNonObj;
4088 auto const numExtraInputs = extraInput ? 1 : 0;
4089 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC,
4090 numInOut, mayUseNullsafe, numExtraInputs) ||
4091 (canFold && fcallTryFold(env, op.fca, rfunc, ctxTy, dynamic,
4092 numExtraInputs))) {
4093 return;
4096 if (rfunc.exactFunc() && op.str2->empty()) {
4097 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4100 fcallKnownImpl(env, op.fca, rfunc, ctxTy, mayUseNullsafe, extraInput ? 1 : 0,
4101 updateBC, numInOut);
4102 refineLoc();
4105 } // namespace
4107 void in(ISS& env, const bc::FCallObjMethodD& op) {
4108 if (op.fca.hasGenerics()) {
4109 auto const tsList = topC(env);
4110 if (!tsList.couldBe(BVec)) {
4111 return unreachable(env);
4114 auto const input = topC(env, op.fca.numInputs() + 1);
4115 auto const clsTy = input.couldBe(BObj)
4116 ? objcls(intersection_of(input, TObj))
4117 : TCls;
4118 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4119 if (!rfunc.couldHaveReifiedGenerics()) {
4120 return reduce(
4121 env,
4122 bc::PopC {},
4123 bc::FCallObjMethodD {
4124 op.fca.withoutGenerics(), op.str2, op.subop3, op.str4 }
4129 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4130 if (!clsHint) clsHint = op.str2;
4131 return bc::FCallObjMethodD { std::move(fca), clsHint, op.subop3, op.str4 };
4133 fcallObjMethodImpl(env, op, op.str4, false, false, updateBC);
4136 void in(ISS& env, const bc::FCallObjMethod& op) {
4137 auto const methName = getNameFromType(topC(env));
4138 if (!methName) {
4139 popC(env);
4140 fcallUnknownImpl(env, op.fca);
4141 return;
4144 auto const input = topC(env, op.fca.numInputs() + 2);
4145 auto const clsTy = input.couldBe(BObj)
4146 ? objcls(intersection_of(input, TObj))
4147 : TCls;
4148 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4149 if (!rfunc.mightCareAboutDynCalls()) {
4150 return reduce(
4151 env,
4152 bc::PopC {},
4153 bc::FCallObjMethodD { op.fca, op.str2, op.subop3, methName }
4157 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4158 if (!clsHint) clsHint = op.str2;
4159 return bc::FCallObjMethod { std::move(fca), clsHint, op.subop3 };
4161 fcallObjMethodImpl(env, op, methName, true, true, updateBC);
4164 namespace {
4166 template <typename Op, class UpdateBC>
4167 void fcallClsMethodImpl(ISS& env, const Op& op, Type clsTy, SString methName,
4168 bool dynamic, uint32_t numExtraInputs,
4169 UpdateBC updateBC) {
4170 if (isBadContext(op.fca)) {
4171 for (auto i = uint32_t{0}; i < numExtraInputs; ++i) popC(env);
4172 fcallUnknownImpl(env, op.fca);
4173 unreachable(env);
4174 return;
4177 auto const ctx = getCallContext(env, op.fca);
4178 auto const rfunc = env.index.resolve_method(ctx, clsTy, methName);
4180 auto const numInOut = op.fca.enforceInOut()
4181 ? env.index.lookup_num_inout_params(env.ctx, rfunc)
4182 : std::nullopt;
4184 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC, numInOut, false,
4185 numExtraInputs) ||
4186 fcallTryFold(env, op.fca, rfunc, clsTy, dynamic, numExtraInputs)) {
4187 return;
4190 if (rfunc.exactFunc() && op.str2->empty()) {
4191 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4194 fcallKnownImpl(env, op.fca, rfunc, clsTy, false /* nullsafe */,
4195 numExtraInputs, updateBC, numInOut);
4198 } // namespace
4200 void in(ISS& env, const bc::FCallClsMethodD& op) {
4201 auto const rcls = env.index.resolve_class(env.ctx, op.str3);
4202 auto const clsTy = rcls ? clsExact(*rcls) : TCls;
4203 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4205 if (op.fca.hasGenerics() && !rfunc.couldHaveReifiedGenerics()) {
4206 return reduce(
4207 env,
4208 bc::PopC {},
4209 bc::FCallClsMethodD {
4210 op.fca.withoutGenerics(), op.str2, op.str3, op.str4 }
4214 if (auto const func = rfunc.exactFunc()) {
4215 assertx(func->cls != nullptr);
4216 if (func->cls->name->same(op.str3) &&
4217 optimize_builtin(env, func, op.fca)) {
4218 // When we use FCallBuiltin to call a static method, the litstr method
4219 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4221 // As a result, we can only do this optimization if the name of the
4222 // builtin function's class matches this op's class name immediate.
4223 return;
4227 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4228 if (!clsHint) clsHint = op.str2;
4229 return bc::FCallClsMethodD { std::move(fca), clsHint, op.str3, op.str4 };
4231 fcallClsMethodImpl(env, op, clsTy, op.str4, false, 0, updateBC);
4234 void in(ISS& env, const bc::FCallClsMethod& op) {
4235 auto const methName = getNameFromType(topC(env, 1));
4236 if (!methName) {
4237 popC(env);
4238 popC(env);
4239 fcallUnknownImpl(env, op.fca);
4240 return;
4243 auto const clsTy = topC(env);
4244 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4245 auto const skipLogAsDynamicCall =
4246 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls &&
4247 op.subop3 == IsLogAsDynamicCallOp::DontLogAsDynamicCall;
4248 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
4249 (!rfunc.mightCareAboutDynCalls() || skipLogAsDynamicCall)) {
4250 auto const clsName = dcls_of(clsTy).cls.name();
4251 return reduce(
4252 env,
4253 bc::PopC {},
4254 bc::PopC {},
4255 bc::FCallClsMethodD { op.fca, op.str2, clsName, methName }
4259 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4260 if (!clsHint) clsHint = op.str2;
4261 return bc::FCallClsMethod { std::move(fca), clsHint, op.subop3 };
4263 fcallClsMethodImpl(env, op, clsTy, methName, true, 2, updateBC);
4266 namespace {
4268 template <typename Op, class UpdateBC>
4269 void fcallClsMethodSImpl(ISS& env, const Op& op, SString methName, bool dynamic,
4270 bool extraInput, UpdateBC updateBC) {
4271 auto const clsTy = specialClsRefToCls(env, op.subop3);
4272 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
4273 !dynamic && op.subop3 == SpecialClsRef::Static) {
4274 auto const clsName = dcls_of(clsTy).cls.name();
4275 reduce(env, bc::FCallClsMethodD { op.fca, op.str2, clsName, methName });
4276 return;
4279 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4281 auto const numInOut = op.fca.enforceInOut()
4282 ? env.index.lookup_num_inout_params(env.ctx, rfunc)
4283 : std::nullopt;
4285 auto const numExtraInputs = extraInput ? 1 : 0;
4286 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC, numInOut, false,
4287 numExtraInputs) ||
4288 fcallTryFold(env, op.fca, rfunc, ctxCls(env), dynamic,
4289 numExtraInputs)) {
4290 return;
4293 if (rfunc.exactFunc() && op.str2->empty()) {
4294 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4297 fcallKnownImpl(env, op.fca, rfunc, ctxCls(env), false /* nullsafe */,
4298 extraInput ? 1 : 0, updateBC, numInOut);
4301 } // namespace
4303 void in(ISS& env, const bc::FCallClsMethodSD& op) {
4304 if (op.fca.hasGenerics()) {
4305 auto const clsTy = specialClsRefToCls(env, op.subop3);
4306 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4307 if (!rfunc.couldHaveReifiedGenerics()) {
4308 return reduce(
4309 env,
4310 bc::PopC {},
4311 bc::FCallClsMethodSD {
4312 op.fca.withoutGenerics(), op.str2, op.subop3, op.str4 }
4317 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4318 if (!clsHint) clsHint = op.str2;
4319 return bc::FCallClsMethodSD { std::move(fca), clsHint, op.subop3, op.str4 };
4321 fcallClsMethodSImpl(env, op, op.str4, false, false, updateBC);
4324 void in(ISS& env, const bc::FCallClsMethodS& op) {
4325 auto const methName = getNameFromType(topC(env));
4326 if (!methName) {
4327 popC(env);
4328 fcallUnknownImpl(env, op.fca);
4329 return;
4332 auto const clsTy = specialClsRefToCls(env, op.subop3);
4333 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4334 if (!rfunc.mightCareAboutDynCalls() && !rfunc.couldHaveReifiedGenerics()) {
4335 return reduce(
4336 env,
4337 bc::PopC {},
4338 bc::FCallClsMethodSD { op.fca, op.str2, op.subop3, methName }
4342 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4343 if (!clsHint) clsHint = op.str2;
4344 return bc::FCallClsMethodS { std::move(fca), clsHint, op.subop3 };
4346 fcallClsMethodSImpl(env, op, methName, true, true, updateBC);
4349 namespace {
4351 void newObjDImpl(ISS& env, const StringData* className, bool rflavor) {
4352 auto const rcls = env.index.resolve_class(env.ctx, className);
4353 if (!rcls) {
4354 if (rflavor) popC(env);
4355 push(env, TObj);
4356 return;
4358 if (rflavor && !rcls->couldHaveReifiedGenerics()) {
4359 return reduce(env, bc::PopC {}, bc::NewObjD { className });
4361 auto const isCtx = !rcls->couldBeOverriden() && env.ctx.cls &&
4362 rcls->same(env.index.resolve_class(env.ctx.cls));
4363 if (rflavor) popC(env);
4364 push(env, setctx(objExact(*rcls), isCtx));
4367 } // namespace
4369 void in(ISS& env, const bc::NewObjD& op) { newObjDImpl(env, op.str1, false); }
4370 void in(ISS& env, const bc::NewObjRD& op) { newObjDImpl(env, op.str1, true); }
4372 void in(ISS& env, const bc::NewObjS& op) {
4373 auto const cls = specialClsRefToCls(env, op.subop1);
4374 if (!is_specialized_cls(cls)) {
4375 push(env, TObj);
4376 return;
4379 auto const dcls = dcls_of(cls);
4380 auto const exact = dcls.type == DCls::Exact;
4381 if (exact && !dcls.cls.couldHaveReifiedGenerics() &&
4382 (!dcls.cls.couldBeOverriden() || equivalently_refined(cls, unctx(cls)))) {
4383 return reduce(env, bc::NewObjD { dcls.cls.name() });
4386 push(env, toobj(cls));
4389 void in(ISS& env, const bc::NewObj& op) {
4390 auto const cls = topC(env);
4391 if (!cls.subtypeOf(BCls) || !is_specialized_cls(cls)) {
4392 popC(env);
4393 push(env, TObj);
4394 return;
4397 auto const dcls = dcls_of(cls);
4398 auto const exact = dcls.type == DCls::Exact;
4399 if (exact && !dcls.cls.mightCareAboutDynConstructs()) {
4400 return reduce(
4401 env,
4402 bc::PopC {},
4403 bc::NewObjD { dcls.cls.name() }
4407 popC(env);
4408 push(env, toobj(cls));
4411 void in(ISS& env, const bc::NewObjR& op) {
4412 auto const generics = topC(env);
4413 auto const cls = topC(env, 1);
4415 if (generics.subtypeOf(BInitNull)) {
4416 return reduce(
4417 env,
4418 bc::PopC {},
4419 bc::NewObj {}
4423 if (!cls.subtypeOf(BCls) || !is_specialized_cls(cls)) {
4424 popC(env);
4425 popC(env);
4426 push(env, TObj);
4427 return;
4430 auto const dcls = dcls_of(cls);
4431 auto const exact = dcls.type == DCls::Exact;
4432 if (exact && !dcls.cls.couldHaveReifiedGenerics()) {
4433 return reduce(
4434 env,
4435 bc::PopC {},
4436 bc::NewObj {}
4440 popC(env);
4441 popC(env);
4442 push(env, toobj(cls));
4445 namespace {
4447 bool objMightHaveConstProps(const Type& t) {
4448 assertx(t.subtypeOf(BObj));
4449 assertx(is_specialized_obj(t));
4450 auto const dobj = dobj_of(t);
4451 switch (dobj.type) {
4452 case DObj::Exact:
4453 return dobj.cls.couldHaveConstProp();
4454 case DObj::Sub:
4455 return dobj.cls.derivedCouldHaveConstProp();
4457 not_reached();
4462 void in(ISS& env, const bc::FCallCtor& op) {
4463 auto const obj = topC(env, op.fca.numInputs() + 1);
4464 assertx(op.fca.numRets() == 1);
4466 if (!is_specialized_obj(obj)) {
4467 return fcallUnknownImpl(env, op.fca);
4470 if (op.fca.lockWhileUnwinding() && !objMightHaveConstProps(obj)) {
4471 return reduce(
4472 env, bc::FCallCtor { op.fca.withoutLockWhileUnwinding(), op.str2 }
4476 auto const dobj = dobj_of(obj);
4477 auto const exact = dobj.type == DObj::Exact;
4478 auto const rfunc = env.index.resolve_ctor(env.ctx, dobj.cls, exact);
4479 if (!rfunc) {
4480 return fcallUnknownImpl(env, op.fca);
4483 auto const updateFCA = [&] (FCallArgs&& fca) {
4484 return bc::FCallCtor { std::move(fca), op.str2 };
4487 auto const numInOut = op.fca.enforceInOut()
4488 ? env.index.lookup_num_inout_params(env.ctx, *rfunc)
4489 : std::nullopt;
4491 auto const canFold = obj.subtypeOf(BObj);
4492 if (fcallOptimizeChecks(env, op.fca, *rfunc, updateFCA, numInOut, false, 0) ||
4493 (canFold && fcallTryFold(env, op.fca, *rfunc,
4494 obj, false /* dynamic */, 0))) {
4495 return;
4498 if (rfunc->exactFunc() && op.str2->empty()) {
4499 // We've found the exact func that will be called, set the hint.
4500 return reduce(env, bc::FCallCtor { op.fca, rfunc->exactFunc()->cls->name });
4503 fcallKnownImpl(env, op.fca, *rfunc, obj, false /* nullsafe */, 0,
4504 updateFCA, numInOut);
4507 void in(ISS& env, const bc::LockObj& op) {
4508 auto const t = topC(env);
4509 auto bail = [&]() {
4510 discard(env, 1);
4511 return push(env, t);
4513 if (!t.subtypeOf(BObj)) return bail();
4514 if (!is_specialized_obj(t) || objMightHaveConstProps(t)) {
4515 nothrow(env);
4516 return bail();
4518 reduce(env);
4521 namespace {
4523 // baseLoc is NoLocalId for non-local iterators.
4524 void iterInitImpl(ISS& env, IterArgs ita, BlockId target, LocalId baseLoc) {
4525 auto const local = baseLoc != NoLocalId;
4526 auto const sourceLoc = local ? baseLoc : topStkLocal(env);
4527 auto const base = local ? locAsCell(env, baseLoc) : topC(env);
4528 auto ity = iter_types(base);
4530 auto const fallthrough = [&] {
4531 auto const baseCannotBeObject = !base.couldBe(BObj);
4532 setIter(env, ita.iterId, LiveIter { ity, sourceLoc, NoLocalId, env.bid,
4533 false, baseCannotBeObject });
4534 // Do this after setting the iterator, in case it clobbers the base local
4535 // equivalency.
4536 setLoc(env, ita.valId, std::move(ity.value));
4537 if (ita.hasKey()) {
4538 setLoc(env, ita.keyId, std::move(ity.key));
4539 setIterKey(env, ita.iterId, ita.keyId);
4543 assertx(iterIsDead(env, ita.iterId));
4545 if (!ity.mayThrowOnInit) {
4546 if (ity.count == IterTypes::Count::Empty && will_reduce(env)) {
4547 if (local) {
4548 reduce(env);
4549 } else {
4550 reduce(env, bc::PopC{});
4552 return jmp_setdest(env, target);
4554 nothrow(env);
4557 if (!local) popC(env);
4559 switch (ity.count) {
4560 case IterTypes::Count::Empty:
4561 mayReadLocal(env, ita.valId);
4562 if (ita.hasKey()) mayReadLocal(env, ita.keyId);
4563 jmp_setdest(env, target);
4564 return;
4565 case IterTypes::Count::Single:
4566 case IterTypes::Count::NonEmpty:
4567 fallthrough();
4568 return jmp_nevertaken(env);
4569 case IterTypes::Count::ZeroOrOne:
4570 case IterTypes::Count::Any:
4571 // Take the branch before setting locals if the iter is already
4572 // empty, but after popping. Similar for the other IterInits
4573 // below.
4574 env.propagate(target, &env.state);
4575 fallthrough();
4576 return;
4578 always_assert(false);
4581 // baseLoc is NoLocalId for non-local iterators.
4582 void iterNextImpl(ISS& env, IterArgs ita, BlockId target, LocalId baseLoc) {
4583 auto const curVal = peekLocRaw(env, ita.valId);
4584 auto const curKey = ita.hasKey() ? peekLocRaw(env, ita.keyId) : TBottom;
4586 auto noThrow = false;
4587 auto const noTaken = match<bool>(
4588 env.state.iters[ita.iterId],
4589 [&] (DeadIter) {
4590 always_assert(false && "IterNext on dead iter");
4591 return false;
4593 [&] (const LiveIter& ti) {
4594 if (!ti.types.mayThrowOnNext) noThrow = true;
4595 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4596 switch (ti.types.count) {
4597 case IterTypes::Count::Single:
4598 case IterTypes::Count::ZeroOrOne:
4599 return true;
4600 case IterTypes::Count::NonEmpty:
4601 case IterTypes::Count::Any:
4602 setLoc(env, ita.valId, ti.types.value);
4603 if (ita.hasKey()) {
4604 setLoc(env, ita.keyId, ti.types.key);
4605 setIterKey(env, ita.iterId, ita.keyId);
4607 return false;
4608 case IterTypes::Count::Empty:
4609 always_assert(false);
4611 not_reached();
4615 if (noTaken && noThrow && will_reduce(env)) {
4616 auto const iterId = safe_cast<IterId>(ita.iterId);
4617 return baseLoc == NoLocalId
4618 ? reduce(env, bc::IterFree { iterId })
4619 : reduce(env, bc::LIterFree { iterId, baseLoc });
4622 mayReadLocal(env, baseLoc);
4623 mayReadLocal(env, ita.valId);
4624 if (ita.hasKey()) mayReadLocal(env, ita.keyId);
4626 if (noThrow) nothrow(env);
4628 if (noTaken) {
4629 jmp_nevertaken(env);
4630 freeIter(env, ita.iterId);
4631 return;
4634 env.propagate(target, &env.state);
4636 freeIter(env, ita.iterId);
4637 setLocRaw(env, ita.valId, curVal);
4638 if (ita.hasKey()) setLocRaw(env, ita.keyId, curKey);
4643 void in(ISS& env, const bc::IterInit& op) {
4644 iterInitImpl(env, op.ita, op.target2, NoLocalId);
4647 void in(ISS& env, const bc::LIterInit& op) {
4648 iterInitImpl(env, op.ita, op.target3, op.loc2);
4651 void in(ISS& env, const bc::IterNext& op) {
4652 iterNextImpl(env, op.ita, op.target2, NoLocalId);
4655 void in(ISS& env, const bc::LIterNext& op) {
4656 iterNextImpl(env, op.ita, op.target3, op.loc2);
4659 void in(ISS& env, const bc::IterFree& op) {
4660 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
4661 auto const isNop = match<bool>(
4662 env.state.iters[op.iter1],
4663 [] (DeadIter) {
4664 return true;
4666 [&] (const LiveIter& ti) {
4667 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4668 return false;
4672 if (isNop && will_reduce(env)) return reduce(env);
4674 nothrow(env);
4675 freeIter(env, op.iter1);
4678 void in(ISS& env, const bc::LIterFree& op) {
4679 nothrow(env);
4680 mayReadLocal(env, op.loc2);
4681 freeIter(env, op.iter1);
4685 * Any include/require (or eval) op kills all locals, and private properties.
4687 void inclOpImpl(ISS& env) {
4688 popC(env);
4689 killLocals(env);
4690 killThisProps(env);
4691 killPrivateStatics(env);
4692 push(env, TInitCell);
4695 void in(ISS& env, const bc::Incl&) { inclOpImpl(env); }
4696 void in(ISS& env, const bc::InclOnce&) { inclOpImpl(env); }
4697 void in(ISS& env, const bc::Req&) { inclOpImpl(env); }
4698 void in(ISS& env, const bc::ReqOnce&) { inclOpImpl(env); }
4699 void in(ISS& env, const bc::ReqDoc&) { inclOpImpl(env); }
4700 void in(ISS& env, const bc::Eval&) { inclOpImpl(env); }
4702 void in(ISS& env, const bc::This&) {
4703 if (thisAvailable(env)) {
4704 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
4706 auto const ty = thisTypeNonNull(env);
4707 push(env, ty, StackThisId);
4708 setThisAvailable(env);
4709 if (ty.subtypeOf(BBottom)) unreachable(env);
4712 void in(ISS& env, const bc::LateBoundCls& op) {
4713 if (env.ctx.cls) effect_free(env);
4714 auto const ty = selfCls(env);
4715 push(env, setctx(ty ? *ty : TCls));
4718 void in(ISS& env, const bc::CheckThis&) {
4719 if (thisAvailable(env)) {
4720 return reduce(env);
4722 setThisAvailable(env);
4725 void in(ISS& env, const bc::BareThis& op) {
4726 if (thisAvailable(env)) {
4727 if (op.subop1 != BareThisOp::NeverNull) {
4728 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
4732 auto const ty = thisType(env);
4733 switch (op.subop1) {
4734 case BareThisOp::Notice:
4735 break;
4736 case BareThisOp::NoNotice:
4737 effect_free(env);
4738 break;
4739 case BareThisOp::NeverNull:
4740 setThisAvailable(env);
4741 if (!env.state.unreachable) effect_free(env);
4742 return push(env, ty, StackThisId);
4745 push(env, ty, StackThisId);
4749 * Amongst other things, we use this to mark units non-persistent.
4751 void in(ISS& env, const bc::OODeclExists& op) {
4752 auto flag = popC(env);
4753 auto name = popC(env);
4754 push(env, [&] {
4755 if (!name.strictSubtypeOf(TStr)) return TBool;
4756 auto const v = tv(name);
4757 if (!v) return TBool;
4758 auto rcls = env.index.resolve_class(env.ctx, v->m_data.pstr);
4759 if (!rcls || !rcls->cls()) return TFalse;
4760 auto const exist = [&] () -> bool {
4761 switch (op.subop1) {
4762 case OODeclExistsOp::Class:
4763 return !(rcls->cls()->attrs & (AttrInterface | AttrTrait));
4764 case OODeclExistsOp::Interface:
4765 return rcls->cls()->attrs & AttrInterface;
4766 case OODeclExistsOp::Trait:
4767 return rcls->cls()->attrs & AttrTrait;
4769 not_reached();
4770 }();
4771 constprop(env);
4772 return exist ? TTrue : TFalse;
4773 } ());
4776 namespace {
4777 bool couldBeMocked(const Type& t) {
4778 if (is_specialized_cls(t)) {
4779 return dcls_of(t).cls.couldBeMocked();
4780 } else if (is_specialized_obj(t)) {
4781 return dobj_of(t).cls.couldBeMocked();
4783 // In practice this should not occur since this is used mostly on the result
4784 // of looked up type constraints.
4785 return true;
4789 using TCVec = std::vector<const TypeConstraint*>;
4791 void in(ISS& env, const bc::VerifyParamType& op) {
4792 IgnoreUsedParams _{env};
4794 if (env.ctx.func->isMemoizeImpl) {
4795 // a MemoizeImpl's params have already been checked by the wrapper
4796 return reduce(env);
4799 auto const& pinfo = env.ctx.func->params[op.loc1];
4800 // Generally we won't know anything about the params, but
4801 // analyze_func_inline does - and this can help with effect-free analysis
4802 TCVec tcs = {&pinfo.typeConstraint};
4803 for (auto const& t : pinfo.upperBounds) tcs.push_back(&t);
4804 if (std::all_of(std::begin(tcs), std::end(tcs),
4805 [&](const TypeConstraint* tc) {
4806 return env.index.satisfies_constraint(env.ctx,
4807 locAsCell(env, op.loc1),
4808 *tc);
4809 })) {
4810 if (!locAsCell(env, op.loc1).couldBe(BCls)) {
4811 return reduce(env);
4816 * We assume that if this opcode doesn't throw, the parameter was of the
4817 * specified type.
4819 auto tcT = TTop;
4820 for (auto const& constraint : tcs) {
4821 if (constraint->hasConstraint() && !constraint->isTypeVar() &&
4822 !constraint->isTypeConstant()) {
4823 auto t = env.index.lookup_constraint(env.ctx, *constraint);
4824 if (constraint->isThis() && couldBeMocked(t)) {
4825 t = unctx(std::move(t));
4827 FTRACE(2, " {} ({})\n", constraint->fullName(), show(t));
4828 tcT = intersection_of(std::move(tcT), std::move(t));
4829 if (tcT.subtypeOf(BBottom)) unreachable(env);
4832 if (tcT != TTop) setLoc(env, op.loc1, std::move(tcT));
4835 void in(ISS& env, const bc::VerifyParamTypeTS& op) {
4836 auto const a = topC(env);
4837 if (!a.couldBe(BDict)) {
4838 unreachable(env);
4839 popC(env);
4840 return;
4842 auto const constraint = env.ctx.func->params[op.loc1].typeConstraint;
4843 // TODO(T31677864): We are being extremely pessimistic here, relax it
4844 if (!env.ctx.func->isReified &&
4845 (!env.ctx.cls || !env.ctx.cls->hasReifiedGenerics) &&
4846 !env.index.could_have_reified_type(env.ctx, constraint)) {
4847 return reduce(env, bc::PopC {}, bc::VerifyParamType { op.loc1 });
4850 if (auto const inputTS = tv(a)) {
4851 if (!isValidTSType(*inputTS, false)) {
4852 unreachable(env);
4853 popC(env);
4854 return;
4856 auto const resolvedTS =
4857 resolve_type_structure(env, inputTS->m_data.parr).sarray();
4858 if (resolvedTS && resolvedTS != inputTS->m_data.parr) {
4859 reduce(env, bc::PopC {});
4860 reduce(env, bc::Dict { resolvedTS });
4861 reduce(env, bc::VerifyParamTypeTS { op.loc1 });
4862 return;
4864 if (shouldReduceToNonReifiedVerifyType(env, inputTS->m_data.parr)) {
4865 return reduce(env, bc::PopC {}, bc::VerifyParamType { op.loc1 });
4868 if (auto const last = last_op(env)) {
4869 if (last->op == Op::CombineAndResolveTypeStruct) {
4870 if (auto const last2 = last_op(env, 1)) {
4871 if (last2->op == Op::Dict &&
4872 shouldReduceToNonReifiedVerifyType(env, last2->Dict.arr1)) {
4873 return reduce(env, bc::PopC {}, bc::VerifyParamType { op.loc1 });
4878 popC(env);
4881 void verifyRetImpl(ISS& env, const TCVec& tcs,
4882 bool reduce_this, bool ts_flavor) {
4883 // If it is the ts flavor, then second thing on the stack, otherwise first
4884 auto stackT = topC(env, (int)ts_flavor);
4885 auto const stackEquiv = topStkEquiv(env, (int)ts_flavor);
4887 // If there is no return type constraint, or if the return type
4888 // constraint is a typevar, or if the top of stack is the same or a
4889 // subtype of the type constraint, then this is a no-op, unless
4890 // reified types could be involved.
4891 if (std::all_of(std::begin(tcs), std::end(tcs),
4892 [&](const TypeConstraint* tc) {
4893 return env.index.satisfies_constraint(env.ctx, stackT, *tc);
4894 })) {
4895 if (ts_flavor) {
4896 // we wouldn't get here if reified types were definitely not
4897 // involved, so just bail.
4898 popC(env);
4899 popC(env);
4900 push(env, std::move(stackT), stackEquiv);
4901 return;
4903 return reduce(env);
4906 std::vector<Type> constraintTypes;
4907 auto dont_reduce = false;
4909 for (auto const& constraint : tcs) {
4910 // When the constraint is not soft.
4911 // We can safely assume that either VerifyRetTypeC will
4912 // throw or it will produce a value whose type is compatible with the
4913 // return type constraint.
4914 auto tcT = remove_uninit(env.index.lookup_constraint(env.ctx, *constraint));
4915 constraintTypes.push_back(tcT);
4917 // In some circumstances, verifyRetType can modify the type. If it
4918 // does that we can't reduce even when we know it succeeds.
4919 // VerifyRetType will convert a TCls to a TStr implicitly
4920 // (and possibly warn)
4921 if (tcT.couldBe(BStr) && stackT.couldBe(BCls | BLazyCls)) {
4922 stackT |= TSStr;
4923 dont_reduce = true;
4926 // If the constraint is soft, then there are no optimizations we can safely
4927 // do here, so just leave the top of stack as is.
4928 if (constraint->isSoft() ||
4929 (RuntimeOption::EvalEnforceGenericsUB < 2 &&
4930 constraint->isUpperBound()))
4932 if (ts_flavor) popC(env);
4933 popC(env);
4934 push(env, std::move(stackT), stackEquiv);
4935 return;
4939 // In cases where we have a `this` hint where stackT is an TOptObj known to
4940 // be this, we can replace the check with a non null check. These cases are
4941 // likely from a BareThis that could return Null. Since the runtime will
4942 // split these translations, it will rarely in practice return null.
4943 if (reduce_this &&
4944 !dont_reduce &&
4945 stackT.couldBe(BInitNull) &&
4946 !stackT.subtypeOf(BInitNull) &&
4947 std::all_of(std::begin(tcs), std::end(tcs),
4948 [&](const TypeConstraint* constraint) {
4949 return constraint->isThis() &&
4950 !constraint->isNullable() &&
4951 env.index.satisfies_constraint(
4952 env.ctx, unopt(stackT), *constraint);
4956 if (ts_flavor) {
4957 return reduce(env, bc::PopC {}, bc::VerifyRetNonNullC {});
4959 return reduce(env, bc::VerifyRetNonNullC {});
4962 auto retT = std::move(stackT);
4963 for (auto& tcT : constraintTypes) {
4964 retT = intersection_of(std::move(tcT), std::move(retT));
4965 if (retT.subtypeOf(BBottom)) {
4966 unreachable(env);
4967 if (ts_flavor) popC(env); // the type structure
4968 return;
4972 if (ts_flavor) popC(env); // the type structure
4973 popC(env);
4974 push(env, std::move(retT));
4977 void in(ISS& env, const bc::VerifyOutType& op) {
4978 TCVec tcs;
4979 auto const& pinfo = env.ctx.func->params[op.arg1];
4980 tcs.push_back(&pinfo.typeConstraint);
4981 for (auto const& t : pinfo.upperBounds) tcs.push_back(&t);
4982 verifyRetImpl(env, tcs, false, false);
4985 void in(ISS& env, const bc::VerifyRetTypeC& /*op*/) {
4986 TCVec tcs;
4987 tcs.push_back(&env.ctx.func->retTypeConstraint);
4988 for (auto const& t : env.ctx.func->returnUBs) tcs.push_back(&t);
4989 verifyRetImpl(env, tcs, true, false);
4992 void in(ISS& env, const bc::VerifyRetTypeTS& /*op*/) {
4993 auto const a = topC(env);
4994 if (!a.couldBe(BDict)) {
4995 unreachable(env);
4996 popC(env);
4997 return;
4999 auto const constraint = env.ctx.func->retTypeConstraint;
5000 // TODO(T31677864): We are being extremely pessimistic here, relax it
5001 if (!env.ctx.func->isReified &&
5002 (!env.ctx.cls || !env.ctx.cls->hasReifiedGenerics) &&
5003 !env.index.could_have_reified_type(env.ctx, constraint)) {
5004 return reduce(env, bc::PopC {}, bc::VerifyRetTypeC {});
5006 if (auto const inputTS = tv(a)) {
5007 if (!isValidTSType(*inputTS, false)) {
5008 unreachable(env);
5009 popC(env);
5010 return;
5012 auto const resolvedTS =
5013 resolve_type_structure(env, inputTS->m_data.parr).sarray();
5014 if (resolvedTS && resolvedTS != inputTS->m_data.parr) {
5015 reduce(env, bc::PopC {});
5016 reduce(env, bc::Dict { resolvedTS });
5017 reduce(env, bc::VerifyRetTypeTS {});
5018 return;
5020 if (shouldReduceToNonReifiedVerifyType(env, inputTS->m_data.parr)) {
5021 return reduce(env, bc::PopC {}, bc::VerifyRetTypeC {});
5024 if (auto const last = last_op(env)) {
5025 if (last->op == Op::CombineAndResolveTypeStruct) {
5026 if (auto const last2 = last_op(env, 1)) {
5027 if (last2->op == Op::Dict &&
5028 shouldReduceToNonReifiedVerifyType(env, last2->Dict.arr1)) {
5029 return reduce(env, bc::PopC {}, bc::VerifyRetTypeC {});
5034 TCVec tcs {&constraint};
5035 for (auto const& t : env.ctx.func->returnUBs) tcs.push_back(&t);
5036 verifyRetImpl(env, tcs, true, true);
5039 void in(ISS& env, const bc::VerifyRetNonNullC& /*op*/) {
5040 auto const constraint = env.ctx.func->retTypeConstraint;
5041 if (constraint.isSoft()) {
5042 return;
5045 auto stackT = topC(env);
5047 if (!stackT.couldBe(BInitNull)) {
5048 reduce(env);
5049 return;
5052 if (stackT.subtypeOf(BNull)) return unreachable(env);
5054 auto const equiv = topStkEquiv(env);
5056 stackT = unopt(std::move(stackT));
5058 popC(env);
5059 push(env, stackT, equiv);
5062 void in(ISS& env, const bc::Self& op) {
5063 auto const self = selfClsExact(env);
5064 if (self) {
5065 effect_free(env);
5066 push(env, *self);
5067 } else {
5068 push(env, TCls);
5072 void in(ISS& env, const bc::Parent& op) {
5073 auto const parent = parentClsExact(env);
5074 if (parent) {
5075 effect_free(env);
5076 push(env, *parent);
5077 } else {
5078 push(env, TCls);
5082 void in(ISS& env, const bc::CreateCl& op) {
5083 auto const nargs = op.arg1;
5084 auto const clsPair = env.index.resolve_closure_class(env.ctx, op.arg2);
5087 * Every closure should have a unique allocation site, but we may see it
5088 * multiple times in a given round of analyzing this function. Each time we
5089 * may have more information about the used variables; the types should only
5090 * possibly grow. If it's already there we need to merge the used vars in
5091 * with what we saw last time.
5093 if (nargs) {
5094 CompactVector<Type> usedVars(nargs);
5095 for (auto i = uint32_t{0}; i < nargs; ++i) {
5096 usedVars[nargs - i - 1] = unctx(popCU(env));
5098 merge_closure_use_vars_into(
5099 env.collect.closureUseTypes,
5100 clsPair.second,
5101 std::move(usedVars)
5105 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
5106 // assert the exact type of closure objects. The best we can do is assert
5107 // that it's a subclass of Closure.
5108 auto const closure = env.index.builtin_class(s_Closure.get());
5110 return push(env, subObj(closure));
5113 void in(ISS& env, const bc::CreateCont& /*op*/) {
5114 // First resume is always next() which pushes null.
5115 push(env, TInitNull);
5118 void in(ISS& env, const bc::ContEnter&) { popC(env); push(env, TInitCell); }
5119 void in(ISS& env, const bc::ContRaise&) { popC(env); push(env, TInitCell); }
5121 void in(ISS& env, const bc::Yield&) {
5122 popC(env);
5123 push(env, TInitCell);
5126 void in(ISS& env, const bc::YieldK&) {
5127 popC(env);
5128 popC(env);
5129 push(env, TInitCell);
5132 void in(ISS& /*env*/, const bc::ContCheck&) {}
5133 void in(ISS& env, const bc::ContValid&) { push(env, TBool); }
5134 void in(ISS& env, const bc::ContKey&) { push(env, TInitCell); }
5135 void in(ISS& env, const bc::ContCurrent&) { push(env, TInitCell); }
5136 void in(ISS& env, const bc::ContGetReturn&) { push(env, TInitCell); }
5138 void pushTypeFromWH(ISS& env, Type t) {
5139 auto inner = typeFromWH(t);
5140 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5141 if (inner.subtypeOf(BBottom)) unreachable(env);
5142 push(env, std::move(inner));
5145 void in(ISS& env, const bc::WHResult&) {
5146 pushTypeFromWH(env, popC(env));
5149 void in(ISS& env, const bc::Await&) {
5150 pushTypeFromWH(env, popC(env));
5153 void in(ISS& env, const bc::AwaitAll& op) {
5154 auto const equiv = equivLocalRange(env, op.locrange);
5155 if (equiv != op.locrange.first) {
5156 return reduce(
5157 env,
5158 bc::AwaitAll {LocalRange {equiv, op.locrange.count}}
5162 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5163 mayReadLocal(env, op.locrange.first + i);
5166 push(env, TInitNull);
5169 void in(ISS& env, const bc::SetImplicitContextByIndex&) {
5170 popC(env);
5171 push(env, TInt);
5174 void in(ISS& env, const bc::Idx&) {
5175 auto const def = popC(env);
5176 auto const [key, promotion] = promote_classlike_to_key(popC(env));
5177 auto const base = popC(env);
5179 assertx(!def.is(BBottom));
5181 auto effectFree = promotion != Promotion::YesMightThrow;
5182 auto result = TBottom;
5184 auto const finish = [&] {
5185 if (result.is(BBottom)) {
5186 assertx(!effectFree);
5187 unreachable(env);
5189 if (effectFree) {
5190 constprop(env);
5191 effect_free(env);
5193 push(env, std::move(result));
5196 if (key.couldBe(BNull)) result |= def;
5197 if (key.subtypeOf(BNull)) return finish();
5199 if (!base.subtypeOf(BArrLike | BObj | BStr)) result |= def;
5201 if (base.couldBe(BArrLike)) {
5202 if (!key.subtypeOf(BOptArrKey)) effectFree = false;
5203 if (key.couldBe(BArrKey)) {
5204 auto elem = array_like_elem(
5205 base,
5206 key.subtypeOf(BArrKey) ? key : intersection_of(key, TArrKey)
5208 result |= std::move(elem.first);
5209 if (!elem.second) result |= def;
5212 if (base.couldBe(BObj)) {
5213 result |= TInitCell;
5214 effectFree = false;
5216 if (base.couldBe(BStr)) {
5217 result |= TSStr;
5218 result |= def;
5219 if (!key.subtypeOf(BOptArrKey)) effectFree = false;
5222 finish();
5225 void in(ISS& env, const bc::ArrayIdx&) {
5226 auto def = popC(env);
5227 auto const [key, promotion] = promote_classlike_to_key(popC(env));
5228 auto const base = popC(env);
5230 assertx(!def.is(BBottom));
5232 auto effectFree = promotion != Promotion::YesMightThrow;
5233 auto result = TBottom;
5235 auto const finish = [&] {
5236 if (result.is(BBottom)) {
5237 assertx(!effectFree);
5238 unreachable(env);
5240 if (effectFree) {
5241 constprop(env);
5242 effect_free(env);
5244 push(env, std::move(result));
5247 if (key.couldBe(BNull)) result |= def;
5248 if (key.subtypeOf(BNull)) return finish();
5250 if (!base.subtypeOf(BArrLike)) effectFree = false;
5251 if (!base.couldBe(BArrLike)) return finish();
5253 if (!key.subtypeOf(BOptArrKey)) effectFree = false;
5254 if (!key.couldBe(BArrKey)) return finish();
5256 auto elem = array_like_elem(
5257 base,
5258 key.subtypeOf(BArrKey) ? key : intersection_of(key, TArrKey)
5260 result |= std::move(elem.first);
5261 if (!elem.second) result |= std::move(def);
5262 finish();
5265 namespace {
5266 void implArrayMarkLegacy(ISS& env, bool legacy) {
5267 auto const recursive = popC(env);
5268 auto const value = popC(env);
5270 if (auto const tv_recursive = tv(recursive)) {
5271 if (auto const tv_value = tv(value)) {
5272 if (tvIsBool(*tv_recursive)) {
5273 auto const result = eval_cell([&]{
5274 return val(*tv_recursive).num
5275 ? arrprov::markTvRecursively(*tv_value, legacy)
5276 : arrprov::markTvShallow(*tv_value, legacy);
5278 if (result) {
5279 push(env, *result);
5280 effect_free(env);
5281 constprop(env);
5282 return;
5288 // TODO(kshaunak): We could add some type info here.
5289 push(env, TInitCell);
5293 void in(ISS& env, const bc::ArrayMarkLegacy&) {
5294 implArrayMarkLegacy(env, true);
5297 void in(ISS& env, const bc::ArrayUnmarkLegacy&) {
5298 implArrayMarkLegacy(env, false);
5301 void in(ISS& env, const bc::CheckProp&) {
5302 if (env.ctx.cls->attrs & AttrNoOverride) {
5303 return reduce(env, bc::False {});
5305 effect_free(env);
5306 push(env, TBool);
5309 void in(ISS& env, const bc::InitProp& op) {
5310 auto const t = topC(env);
5311 switch (op.subop2) {
5312 case InitPropOp::Static:
5313 env.index.merge_static_type(
5314 env.ctx,
5315 env.collect.publicSPropMutations,
5316 env.collect.props,
5317 clsExact(env.index.resolve_class(env.ctx.cls)),
5318 sval(op.str1),
5320 false,
5321 true
5323 break;
5324 case InitPropOp::NonStatic:
5325 mergeThisProp(env, op.str1, t);
5326 break;
5329 for (auto& prop : env.ctx.func->cls->properties) {
5330 if (prop.name != op.str1) continue;
5332 ITRACE(1, "InitProp: {} = {}\n", op.str1, show(t));
5334 if (env.index.satisfies_constraint(env.ctx, t, prop.typeConstraint) &&
5335 std::all_of(prop.ubs.begin(), prop.ubs.end(),
5336 [&](TypeConstraint ub) {
5337 applyFlagsToUB(ub, prop.typeConstraint);
5338 return env.index.satisfies_constraint(env.ctx, t, ub);
5339 })) {
5340 prop.attrs |= AttrInitialSatisfiesTC;
5341 } else {
5342 badPropInitialValue(env);
5343 prop.attrs = (Attr)(prop.attrs & ~AttrInitialSatisfiesTC);
5344 continue;
5347 auto const v = tv(t);
5348 if (v || !could_contain_objects(t)) {
5349 prop.attrs = (Attr)(prop.attrs & ~AttrDeepInit);
5350 if (!v) break;
5351 prop.val = *v;
5352 env.index.update_static_prop_init_val(env.ctx.func->cls, op.str1);
5353 return reduce(env, bc::PopC {});
5357 popC(env);
5360 void in(ISS& env, const bc::Silence& op) {
5361 nothrow(env);
5362 switch (op.subop2) {
5363 case SilenceOp::Start:
5364 setLoc(env, op.loc1, TInt);
5365 break;
5366 case SilenceOp::End:
5367 locRaw(env, op.loc1);
5368 break;
5372 namespace {
5374 template <typename Op, typename Rebind>
5375 bool memoGetImpl(ISS& env, const Op& op, Rebind&& rebind) {
5376 always_assert(env.ctx.func->isMemoizeWrapper);
5377 always_assert(op.locrange.first + op.locrange.count
5378 <= env.ctx.func->locals.size());
5380 if (will_reduce(env)) {
5381 // If we can use an equivalent, earlier range, then use that instead.
5382 auto const equiv = equivLocalRange(env, op.locrange);
5383 if (equiv != op.locrange.first) {
5384 reduce(env, rebind(LocalRange { equiv, op.locrange.count }));
5385 return true;
5389 auto retTy = memoizeImplRetType(env);
5391 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5392 // and $this isn't available.
5393 auto allArrKey = true;
5394 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5395 allArrKey &= locRaw(env, op.locrange.first + i).subtypeOf(BArrKey);
5397 if (allArrKey &&
5398 (!env.ctx.func->cls ||
5399 (env.ctx.func->attrs & AttrStatic) ||
5400 thisAvailable(env))) {
5401 if (will_reduce(env)) {
5402 if (retTy.first.subtypeOf(BBottom)) {
5403 reduce(env);
5404 jmp_setdest(env, op.target1);
5405 return true;
5407 // deal with constprop manually; otherwise we will propagate the
5408 // taken edge and *then* replace the MemoGet with a constant.
5409 if (retTy.second) {
5410 if (auto v = tv(retTy.first)) {
5411 reduce(env, gen_constant(*v));
5412 return true;
5416 effect_free(env);
5419 if (retTy.first == TBottom) {
5420 jmp_setdest(env, op.target1);
5421 return true;
5424 env.propagate(op.target1, &env.state);
5425 push(env, std::move(retTy.first));
5426 return false;
5431 void in(ISS& env, const bc::MemoGet& op) {
5432 memoGetImpl(
5433 env, op,
5434 [&] (const LocalRange& l) { return bc::MemoGet { op.target1, l }; }
5438 void in(ISS& env, const bc::MemoGetEager& op) {
5439 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
5441 auto const reduced = memoGetImpl(
5442 env, op,
5443 [&] (const LocalRange& l) {
5444 return bc::MemoGetEager { op.target1, op.target2, l };
5447 if (reduced) return;
5449 env.propagate(op.target2, &env.state);
5450 auto const t = popC(env);
5451 push(
5452 env,
5453 is_specialized_wait_handle(t) ? wait_handle_inner(t) : TInitCell
5457 namespace {
5459 template <typename Op>
5460 void memoSetImpl(ISS& env, const Op& op) {
5461 always_assert(env.ctx.func->isMemoizeWrapper);
5462 always_assert(op.locrange.first + op.locrange.count
5463 <= env.ctx.func->locals.size());
5465 // If we can use an equivalent, earlier range, then use that instead.
5466 auto const equiv = equivLocalRange(env, op.locrange);
5467 if (equiv != op.locrange.first) {
5468 return reduce(
5469 env,
5470 Op { LocalRange { equiv, op.locrange.count } }
5474 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5475 // and $this isn't available.
5476 auto allArrKey = true;
5477 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5478 allArrKey &= locRaw(env, op.locrange.first + i).subtypeOf(BArrKey);
5480 if (allArrKey &&
5481 (!env.ctx.func->cls ||
5482 (env.ctx.func->attrs & AttrStatic) ||
5483 thisAvailable(env))) {
5484 nothrow(env);
5486 push(env, popC(env));
5491 void in(ISS& env, const bc::MemoSet& op) {
5492 memoSetImpl(env, op);
5495 void in(ISS& env, const bc::MemoSetEager& op) {
5496 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
5497 memoSetImpl(env, op);
5502 namespace {
5504 //////////////////////////////////////////////////////////////////////
5506 void dispatch(ISS& env, const Bytecode& op) {
5507 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5508 switch (op.op) { OPCODES }
5509 #undef O
5510 not_reached();
5513 //////////////////////////////////////////////////////////////////////
5515 void interpStep(ISS& env, const Bytecode& bc) {
5516 ITRACE(2, " {} ({})\n",
5517 show(env.ctx.func, bc),
5518 env.unchangedBcs + env.replacedBcs.size());
5519 Trace::Indent _;
5521 // If there are throw exit edges, make a copy of the state (except
5522 // stacks) in case we need to propagate across throw exits (if
5523 // it's a PEI).
5524 if (!env.stateBefore && env.blk.throwExit != NoBlockId) {
5525 env.stateBefore.emplace(with_throwable_only(env.index, env.state));
5528 env.flags = {};
5530 default_dispatch(env, bc);
5532 if (env.flags.reduced) return;
5534 auto const_prop = [&] {
5535 if (!options.ConstantProp || !env.flags.canConstProp) return false;
5537 auto const numPushed = bc.numPush();
5538 TinyVector<TypedValue> cells;
5540 auto i = size_t{0};
5541 while (i < numPushed) {
5542 auto const v = tv(topT(env, i));
5543 if (!v) return false;
5544 cells.push_back(*v);
5545 ++i;
5548 if (env.flags.wasPEI) {
5549 ITRACE(2, " nothrow (due to constprop)\n");
5550 env.flags.wasPEI = false;
5552 if (!env.flags.effectFree) {
5553 ITRACE(2, " effect_free (due to constprop)\n");
5554 env.flags.effectFree = true;
5557 // If we're doing inline interp, don't actually perform the
5558 // constprop. If we do, we can infer static types that won't
5559 // actually exist at runtime.
5560 if (any(env.collect.opts & CollectionOpts::Inlining)) {
5561 ITRACE(2, " inlining, skipping actual constprop\n");
5562 return false;
5565 rewind(env, bc);
5567 auto const numPop = bc.numPop();
5568 for (auto j = 0; j < numPop; j++) {
5569 auto const flavor = bc.popFlavor(j);
5570 if (flavor == Flavor::C) {
5571 interpStep(env, bc::PopC {});
5572 } else if (flavor == Flavor::U) {
5573 interpStep(env, bc::PopU {});
5574 } else {
5575 assertx(flavor == Flavor::CU);
5576 auto const& popped = topT(env);
5577 if (popped.subtypeOf(BUninit)) {
5578 interpStep(env, bc::PopU {});
5579 } else {
5580 assertx(popped.subtypeOf(BInitCell));
5581 interpStep(env, bc::PopC {});
5586 while (i--) {
5587 push(env, from_cell(cells[i]));
5588 record(env, gen_constant(cells[i]));
5590 return true;
5593 if (const_prop()) {
5594 return;
5597 assertx(!env.flags.effectFree || !env.flags.wasPEI);
5598 if (env.flags.wasPEI) {
5599 ITRACE(2, " PEI.\n");
5600 if (env.stateBefore) {
5601 env.propagate(env.blk.throwExit, &*env.stateBefore);
5604 env.stateBefore.reset();
5606 record(env, bc);
5609 void interpOne(ISS& env, const Bytecode& bc) {
5610 env.srcLoc = bc.srcLoc;
5611 interpStep(env, bc);
5614 BlockId speculate(Interp& interp) {
5615 auto low_water = interp.state.stack.size();
5617 interp.collect.opts = interp.collect.opts | CollectionOpts::Speculating;
5618 SCOPE_EXIT {
5619 interp.collect.opts = interp.collect.opts - CollectionOpts::Speculating;
5622 auto failed = false;
5623 ISS env { interp, [&] (BlockId, const State*) { failed = true; } };
5625 FTRACE(4, " Speculate B{}\n", interp.bid);
5626 for (auto const& bc : interp.blk->hhbcs) {
5627 assertx(!interp.state.unreachable);
5628 auto const numPop = bc.numPop() +
5629 (bc.op == Op::CGetL2 ? 1 :
5630 bc.op == Op::Dup ? -1 : 0);
5631 if (interp.state.stack.size() - numPop < low_water) {
5632 low_water = interp.state.stack.size() - numPop;
5635 interpOne(env, bc);
5636 if (failed) {
5637 env.collect.mInstrState.clear();
5638 FTRACE(3, " Bailing from speculate because propagate was called\n");
5639 return NoBlockId;
5642 auto const& flags = env.flags;
5643 if (!flags.effectFree) {
5644 env.collect.mInstrState.clear();
5645 FTRACE(3, " Bailing from speculate because not effect free\n");
5646 return NoBlockId;
5649 assertx(!flags.returned);
5651 if (flags.jmpDest != NoBlockId && interp.state.stack.size() == low_water) {
5652 FTRACE(2, " Speculate found target block {}\n", flags.jmpDest);
5653 return flags.jmpDest;
5657 if (interp.state.stack.size() != low_water) {
5658 FTRACE(3,
5659 " Bailing from speculate because the speculated block "
5660 "left items on the stack\n");
5661 return NoBlockId;
5664 if (interp.blk->fallthrough == NoBlockId) {
5665 FTRACE(3,
5666 " Bailing from speculate because there was no fallthrough");
5667 return NoBlockId;
5670 FTRACE(2, " Speculate found fallthrough block {}\n",
5671 interp.blk->fallthrough);
5673 return interp.blk->fallthrough;
5676 BlockId speculateHelper(ISS& env, BlockId orig, bool updateTaken) {
5677 assertx(orig != NoBlockId);
5679 if (!will_reduce(env)) return orig;
5681 auto const last = last_op(env);
5682 bool endsInControlFlow = last && instrIsNonCallControlFlow(last->op);
5683 auto target = orig;
5684 auto pops = 0;
5686 if (options.RemoveDeadBlocks) {
5687 State temp{env.state, State::Compact{}};
5688 while (true) {
5689 auto const& func = env.ctx.func;
5690 auto const targetBlk = func.blocks()[target].get();
5691 if (!targetBlk->multiPred) break;
5692 auto const ok = [&] {
5693 switch (targetBlk->hhbcs.back().op) {
5694 case Op::JmpZ:
5695 case Op::JmpNZ:
5696 case Op::SSwitch:
5697 case Op::Switch:
5698 return true;
5699 default:
5700 return false;
5702 }();
5704 if (!ok) break;
5706 Interp interp {
5707 env.index, env.ctx, env.collect, target, targetBlk, temp
5710 auto const old_size = temp.stack.size();
5711 auto const new_target = speculate(interp);
5712 if (new_target == NoBlockId) break;
5714 const ssize_t delta = old_size - temp.stack.size();
5715 assertx(delta >= 0);
5716 if (delta && endsInControlFlow) break;
5718 pops += delta;
5719 target = new_target;
5720 temp.stack.compact();
5724 if (endsInControlFlow && updateTaken) {
5725 assertx(!pops);
5726 auto needsUpdate = target != orig;
5727 if (!needsUpdate) {
5728 forEachTakenEdge(
5729 *last,
5730 [&] (BlockId bid) {
5731 if (bid != orig) needsUpdate = true;
5735 if (needsUpdate) {
5736 auto& bc = mutate_last_op(env);
5737 forEachTakenEdge(
5739 [&] (BlockId& bid) {
5740 bid = bid == orig ? target : NoBlockId;
5746 while (pops--) {
5747 auto const& popped = topT(env);
5748 if (popped.subtypeOf(BInitCell)) {
5749 interpStep(env, bc::PopC {});
5750 } else {
5751 assertx(popped.subtypeOf(BUninit));
5752 interpStep(env, bc::PopU {});
5756 return target;
5761 //////////////////////////////////////////////////////////////////////
5763 RunFlags run(Interp& interp, const State& in, PropagateFn propagate) {
5764 SCOPE_EXIT {
5765 FTRACE(2, "out {}{}\n",
5766 state_string(*interp.ctx.func, interp.state, interp.collect),
5767 property_state_string(interp.collect.props));
5770 auto env = ISS { interp, propagate };
5771 auto ret = RunFlags {};
5772 auto finish = [&] (BlockId fallthrough) {
5773 ret.updateInfo.fallthrough = fallthrough;
5774 ret.updateInfo.unchangedBcs = env.unchangedBcs;
5775 ret.updateInfo.replacedBcs = std::move(env.replacedBcs);
5776 return ret;
5779 BytecodeVec retryBcs;
5780 auto retryOffset = interp.blk->hhbcs.size();
5781 auto size = retryOffset;
5782 BlockId retryFallthrough = interp.blk->fallthrough;
5783 size_t idx = 0;
5785 while (true) {
5786 if (idx == size) {
5787 finish_tracked_elems(env, 0);
5788 if (!env.reprocess) break;
5789 FTRACE(2, " Reprocess mutated block {}\n", interp.bid);
5790 assertx(env.unchangedBcs < retryOffset || env.replacedBcs.size());
5791 assertx(!env.undo);
5792 retryOffset = env.unchangedBcs;
5793 retryBcs = std::move(env.replacedBcs);
5794 env.unchangedBcs = 0;
5795 env.state.copy_from(in);
5796 env.reprocess = false;
5797 env.replacedBcs.clear();
5798 size = retryOffset + retryBcs.size();
5799 idx = 0;
5800 continue;
5803 auto const& bc = idx < retryOffset ?
5804 interp.blk->hhbcs[idx] : retryBcs[idx - retryOffset];
5805 ++idx;
5807 interpOne(env, bc);
5808 auto const& flags = env.flags;
5810 if (flags.wasPEI) ret.noThrow = false;
5812 if (interp.collect.effectFree && !flags.effectFree) {
5813 interp.collect.effectFree = false;
5814 if (any(interp.collect.opts & CollectionOpts::EffectFreeOnly)) {
5815 env.collect.mInstrState.clear();
5816 FTRACE(2, " Bailing because not effect free\n");
5817 return finish(NoBlockId);
5821 if (flags.returned) {
5822 always_assert(idx == size);
5823 if (env.reprocess) continue;
5825 always_assert(interp.blk->fallthrough == NoBlockId);
5826 assertx(!ret.returned);
5827 FTRACE(2, " returned {}\n", show(*flags.returned));
5828 ret.retParam = flags.retParam;
5829 ret.returned = flags.returned;
5830 return finish(NoBlockId);
5833 if (flags.jmpDest != NoBlockId) {
5834 always_assert(idx == size);
5835 auto const hasFallthrough = [&] {
5836 if (flags.jmpDest != interp.blk->fallthrough) {
5837 FTRACE(2, " <took branch; no fallthrough>\n");
5838 auto const last = last_op(env);
5839 return !last || !instrIsNonCallControlFlow(last->op);
5840 } else {
5841 FTRACE(2, " <branch never taken>\n");
5842 return true;
5844 }();
5845 if (hasFallthrough) retryFallthrough = flags.jmpDest;
5846 if (env.reprocess) continue;
5847 finish_tracked_elems(env, 0);
5848 auto const newDest = speculateHelper(env, flags.jmpDest, true);
5849 propagate(newDest, &interp.state);
5850 return finish(hasFallthrough ? newDest : NoBlockId);
5853 if (interp.state.unreachable) {
5854 if (env.reprocess) {
5855 idx = size;
5856 continue;
5858 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
5859 finish_tracked_elems(env, 0);
5860 return finish(NoBlockId);
5864 FTRACE(2, " <end block>\n");
5865 if (retryFallthrough != NoBlockId) {
5866 retryFallthrough = speculateHelper(env, retryFallthrough, false);
5867 propagate(retryFallthrough, &interp.state);
5869 return finish(retryFallthrough);
5872 StepFlags step(Interp& interp, const Bytecode& op) {
5873 auto noop = [] (BlockId, const State*) {};
5874 ISS env { interp, noop };
5875 env.analyzeDepth++;
5876 default_dispatch(env, op);
5877 if (env.state.unreachable) {
5878 env.collect.mInstrState.clear();
5880 assertx(env.trackedElems.empty());
5881 return env.flags;
5884 void default_dispatch(ISS& env, const Bytecode& op) {
5885 if (!env.trackedElems.empty()) {
5886 auto const pops = [&] () -> uint32_t {
5887 switch (op.op) {
5888 case Op::AddElemC:
5889 case Op::AddNewElemC:
5890 return numPop(op) - 1;
5891 case Op::Concat:
5892 case Op::ConcatN:
5893 return 0;
5894 default:
5895 return numPop(op);
5897 }();
5899 finish_tracked_elems(env, env.state.stack.size() - pops);
5901 dispatch(env, op);
5902 if (instrFlags(op.op) & TF && env.flags.jmpDest == NoBlockId) {
5903 unreachable(env);
5904 } else if (env.state.unreachable) {
5905 env.collect.mInstrState.clear();
5909 Optional<Type> thisType(const Index& index, Context ctx) {
5910 return thisTypeFromContext(index, ctx);
5913 //////////////////////////////////////////////////////////////////////