take &self
[hiphop-php.git] / hphp / hhbbc / interp.cpp
blob3db83fac65cf3dc46496d83a261fe551352530b6
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
18 #include <algorithm>
19 #include <vector>
20 #include <string>
21 #include <iterator>
23 #include <folly/Optional.h>
24 #include <folly/gen/Base.h>
25 #include <folly/gen/String.h>
27 #include "hphp/util/trace.h"
28 #include "hphp/runtime/base/array-init.h"
29 #include "hphp/runtime/base/collections.h"
30 #include "hphp/runtime/base/static-string-table.h"
31 #include "hphp/runtime/base/tv-arith.h"
32 #include "hphp/runtime/base/tv-comparisons.h"
33 #include "hphp/runtime/base/tv-conversions.h"
34 #include "hphp/runtime/base/type-structure.h"
35 #include "hphp/runtime/base/type-structure-helpers.h"
36 #include "hphp/runtime/base/type-structure-helpers-defs.h"
37 #include "hphp/runtime/vm/runtime.h"
38 #include "hphp/runtime/vm/unit-util.h"
40 #include "hphp/runtime/ext/hh/ext_hh.h"
42 #include "hphp/hhbbc/analyze.h"
43 #include "hphp/hhbbc/bc.h"
44 #include "hphp/hhbbc/cfg.h"
45 #include "hphp/hhbbc/class-util.h"
46 #include "hphp/hhbbc/eval-cell.h"
47 #include "hphp/hhbbc/index.h"
48 #include "hphp/hhbbc/interp-state.h"
49 #include "hphp/hhbbc/optimize.h"
50 #include "hphp/hhbbc/representation.h"
51 #include "hphp/hhbbc/type-builtins.h"
52 #include "hphp/hhbbc/type-ops.h"
53 #include "hphp/hhbbc/type-system.h"
54 #include "hphp/hhbbc/unit-util.h"
56 #include "hphp/hhbbc/interp-internal.h"
58 namespace HPHP { namespace HHBBC {
60 //////////////////////////////////////////////////////////////////////
62 namespace {
64 const StaticString s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
65 const StaticString s_IMemoizeParam("HH\\IMemoizeParam");
66 const StaticString s_getInstanceKey("getInstanceKey");
67 const StaticString s_Closure("Closure");
68 const StaticString s_this("HH\\this");
70 bool poppable(Op op) {
71 switch (op) {
72 case Op::Dup:
73 case Op::Null:
74 case Op::False:
75 case Op::True:
76 case Op::Int:
77 case Op::Double:
78 case Op::String:
79 case Op::Array:
80 case Op::Vec:
81 case Op::Dict:
82 case Op::Keyset:
83 case Op::NewArray:
84 case Op::NewDArray:
85 case Op::NewMixedArray:
86 case Op::NewDictArray:
87 case Op::NewLikeArrayL:
88 case Op::NewCol:
89 return true;
90 default:
91 return false;
95 void interpStep(ISS& env, const Bytecode& bc);
97 void record(ISS& env, const Bytecode& bc) {
98 if (bc.srcLoc != env.srcLoc) {
99 Bytecode tmp = bc;
100 tmp.srcLoc = env.srcLoc;
101 return record(env, tmp);
104 if (!env.replacedBcs.size() &&
105 env.unchangedBcs < env.blk.hhbcs.size() &&
106 bc == env.blk.hhbcs[env.unchangedBcs]) {
107 env.unchangedBcs++;
108 return;
111 ITRACE(2, " => {}\n", show(env.ctx.func, bc));
112 env.replacedBcs.push_back(bc);
115 // The number of pops as seen by interp.
116 uint32_t numPop(const Bytecode& bc) {
117 if (bc.op == Op::CGetL2) return 1;
118 return bc.numPop();
121 // The number of pushes as seen by interp.
122 uint32_t numPush(const Bytecode& bc) {
123 if (bc.op == Op::CGetL2) return 2;
124 return bc.numPush();
127 void reprocess(ISS& env) {
128 env.reprocess = true;
131 ArrayData** add_elem_array(ISS& env) {
132 auto const idx = env.trackedElems.back().idx;
133 if (idx < env.unchangedBcs) {
134 auto const DEBUG_ONLY& bc = env.blk.hhbcs[idx];
135 assertx(bc.op == Op::Concat);
136 return nullptr;
138 assertx(idx >= env.unchangedBcs);
139 auto& bc = env.replacedBcs[idx - env.unchangedBcs];
140 auto arr = [&] () -> const ArrayData** {
141 switch (bc.op) {
142 case Op::Array: return &bc.Array.arr1;
143 case Op::Dict: return &bc.Dict.arr1;
144 case Op::Keyset: return &bc.Keyset.arr1;
145 case Op::Vec: return &bc.Vec.arr1;
146 case Op::Concat: return nullptr;
147 default: not_reached();
149 }();
150 return const_cast<ArrayData**>(arr);
153 bool start_add_elem(ISS& env, Type& ty, Op op) {
154 auto value = tvNonStatic(ty);
155 if (!value || !isArrayLikeType(value->m_type)) return false;
157 if (op == Op::AddElemC) {
158 reduce(env, bc::PopC {}, bc::PopC {}, bc::PopC {});
159 } else {
160 reduce(env, bc::PopC {}, bc::PopC {});
162 env.trackedElems.emplace_back(
163 env.state.stack.size(),
164 env.unchangedBcs + env.replacedBcs.size()
167 auto const arr = value->m_data.parr;
168 env.replacedBcs.push_back(
169 [&] () -> Bytecode {
170 if (arr->isKeyset()) {
171 return bc::Keyset { arr };
173 if (arr->isVecArray()) {
174 return bc::Vec { arr };
176 if (arr->isDict()) {
177 return bc::Dict { arr };
179 if (arr->isPHPArray()) {
180 return bc::Array { arr };
183 not_reached();
186 env.replacedBcs.back().srcLoc = env.srcLoc;
187 ITRACE(2, "(addelem* -> {}\n",
188 show(env.ctx.func, env.replacedBcs.back()));
189 push(env, std::move(ty));
190 effect_free(env);
191 return true;
195 * Alter the saved add_elem array in a way that preserves its provenance tag
196 * or adds a new one if applicable (i.e. the array is a vec or dict)
198 * The `mutate` parameter should be callable with an ArrayData** pointing to the
199 * add_elem array cached in the interp state and should write to it directly.
201 template <typename Fn>
202 bool mutate_add_elem_array(ISS& env, ProvTag tag, Fn&& mutate) {
203 auto const arr = add_elem_array(env);
204 if (!arr) return false;
205 // We need to propagate the provenance info in case we promote *arr from
206 // static to counted (or if its representation changes in some other
207 // way) ...
208 assertx(!RuntimeOption::EvalArrayProvenance || tag);
209 auto const oldTag = RuntimeOption::EvalArrayProvenance ?
210 arrprov::getTag(*arr) :
211 folly::none;
212 mutate(arr);
213 // ... which means we'll have to setTag if
214 // - the array still needs a tag AND
215 // either:
216 // - the array had no tag coming into this op OR
217 // - the set op cleared the provenance bit somehow
218 // (representation changed or we CoWed a static array)
219 if (RuntimeOption::EvalArrayProvenance &&
220 arrprov::arrayWantsTag(*arr) &&
221 (!oldTag || !(*arr)->hasProvenanceData())) {
222 // if oldTag is unset, then this operation is the provenance location
223 arrprov::setTag(*arr, oldTag ? *oldTag : *tag);
225 // make sure that if provenance is enabled and the array wants a tag,
226 // that we definitely assigned one leaving this op
227 assertx(!tag ||
228 !arrprov::arrayWantsTag(*arr) ||
229 (*arr)->hasProvenanceData());
230 return true;
233 void finish_tracked_elem(ISS& env) {
234 auto const arr = add_elem_array(env);
235 env.trackedElems.pop_back();
236 if (arr) ArrayData::GetScalarArray(arr);
239 void finish_tracked_elems(ISS& env, size_t depth) {
240 while (!env.trackedElems.empty() && env.trackedElems.back().depth >= depth) {
241 finish_tracked_elem(env);
245 uint32_t id_from_slot(ISS& env, int slot) {
246 auto const id = (env.state.stack.end() - (slot + 1))->id;
247 assertx(id == StackElem::NoId ||
248 id < env.unchangedBcs + env.replacedBcs.size());
249 return id;
252 const Bytecode* op_from_id(ISS& env, uint32_t id) {
253 if (id == StackElem::NoId) return nullptr;
254 if (id < env.unchangedBcs) return &env.blk.hhbcs[id];
255 auto const off = id - env.unchangedBcs;
256 assertx(off < env.replacedBcs.size());
257 return &env.replacedBcs[off];
260 void ensure_mutable(ISS& env, uint32_t id) {
261 if (id < env.unchangedBcs) {
262 auto const delta = env.unchangedBcs - id;
263 env.replacedBcs.resize(env.replacedBcs.size() + delta);
264 for (auto i = env.replacedBcs.size(); i-- > delta; ) {
265 env.replacedBcs[i] = std::move(env.replacedBcs[i - delta]);
267 for (auto i = 0; i < delta; i++) {
268 env.replacedBcs[i] = env.blk.hhbcs[id + i];
270 env.unchangedBcs = id;
275 * Turn the instruction that wrote the slot'th element from the top of
276 * the stack into a Nop, adjusting the stack appropriately. If its the
277 * previous instruction, just rewind.
279 int kill_by_slot(ISS& env, int slot) {
280 auto const id = id_from_slot(env, slot);
281 assertx(id != StackElem::NoId);
282 auto const sz = env.state.stack.size();
283 // if its the last bytecode we processed, we can rewind and avoid
284 // the reprocess overhead.
285 if (id == env.unchangedBcs + env.replacedBcs.size() - 1) {
286 rewind(env, 1);
287 return env.state.stack.size() - sz;
289 ensure_mutable(env, id);
290 auto& bc = env.replacedBcs[id - env.unchangedBcs];
291 auto const pop = numPop(bc);
292 auto const push = numPush(bc);
293 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
294 slot, id, show(env.ctx.func, bc));
295 bc = bc_with_loc(bc.srcLoc, bc::Nop {});
296 env.state.stack.kill(pop, push, id);
297 reprocess(env);
298 return env.state.stack.size() - sz;
302 * Check whether an instruction can be inserted immediately after the
303 * slot'th stack entry was written. This is only possible if slot was
304 * the last thing written by the instruction that wrote it (ie some
305 * bytecodes push more than one value - there's no way to insert a
306 * bytecode that will write *between* those values on the stack).
308 bool can_insert_after_slot(ISS& env, int slot) {
309 auto const it = env.state.stack.end() - (slot + 1);
310 if (it->id == StackElem::NoId) return false;
311 if (auto const next = it.next_elem(1)) {
312 return next->id != it->id;
314 return true;
318 * Insert a sequence of bytecodes after the instruction that wrote the
319 * slot'th element from the top of the stack.
321 * The entire sequence pops numPop, and pushes numPush stack
322 * elements. Only the last bytecode can push anything onto the stack,
323 * and the types it pushes are pointed to by types (if you have more
324 * than one bytecode that pushes, call this more than once).
326 void insert_after_slot(ISS& env, int slot,
327 int numPop, int numPush, const Type* types,
328 const BytecodeVec& bcs) {
329 assertx(can_insert_after_slot(env, slot));
330 auto const id = id_from_slot(env, slot);
331 assertx(id != StackElem::NoId);
332 ensure_mutable(env, id + 1);
333 env.state.stack.insert_after(numPop, numPush, types, bcs.size(), id);
334 env.replacedBcs.insert(env.replacedBcs.begin() + (id + 1 - env.unchangedBcs),
335 bcs.begin(), bcs.end());
336 using namespace folly::gen;
337 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
338 slot, id,
339 from(bcs) |
340 map([&] (const Bytecode& bc) { return show(env.ctx.func, bc); }) |
341 unsplit<std::string>(", "));
344 Bytecode& mutate_last_op(ISS& env) {
345 assertx(will_reduce(env));
347 if (!env.replacedBcs.size()) {
348 assertx(env.unchangedBcs);
349 env.replacedBcs.push_back(env.blk.hhbcs[--env.unchangedBcs]);
351 return env.replacedBcs.back();
355 * Can be used to replace one op with another when rewind/reduce isn't
356 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
357 * because the SetL changed both the Type and the equiv of its local).
359 void replace_last_op(ISS& env, Bytecode&& bc) {
360 auto& last = mutate_last_op(env);
361 auto const newPush = numPush(bc);
362 auto const oldPush = numPush(last);
363 auto const newPops = numPop(bc);
364 auto const oldPops = numPop(last);
366 assertx(newPush <= oldPush);
367 assertx(newPops <= oldPops);
369 if (newPush != oldPush || newPops != oldPops) {
370 env.state.stack.rewind(oldPops - newPops, oldPush - newPush);
372 ITRACE(2, "(replace: {}->{}\n",
373 show(env.ctx.func, last), show(env.ctx.func, bc));
374 last = bc_with_loc(last.srcLoc, bc);
379 //////////////////////////////////////////////////////////////////////
381 const Bytecode* op_from_slot(ISS& env, int slot, int prev /* = 0 */) {
382 if (!will_reduce(env)) return nullptr;
383 auto const id = id_from_slot(env, slot);
384 if (id == StackElem::NoId) return nullptr;
385 if (id < prev) return nullptr;
386 return op_from_id(env, id - prev);
389 const Bytecode* last_op(ISS& env, int idx /* = 0 */) {
390 if (!will_reduce(env)) return nullptr;
392 if (env.replacedBcs.size() > idx) {
393 return &env.replacedBcs[env.replacedBcs.size() - idx - 1];
396 idx -= env.replacedBcs.size();
397 if (env.unchangedBcs > idx) {
398 return &env.blk.hhbcs[env.unchangedBcs - idx - 1];
400 return nullptr;
404 * Assuming bc was just interped, rewind to the state immediately
405 * before it was interped.
407 * This is rarely what you want. Its used for constprop, where the
408 * bytecode has been interped, but not yet committed to the bytecode
409 * stream. We want to undo its effects, the spit out pops for its
410 * inputs, and commit a constant-generating bytecode.
412 void rewind(ISS& env, const Bytecode& bc) {
413 ITRACE(2, "(rewind: {}\n", show(env.ctx.func, bc));
414 env.state.stack.rewind(numPop(bc), numPush(bc));
418 * Used for peephole opts. Will undo the *stack* effects of the last n
419 * committed byte codes, and remove them from the bytecode stream, in
420 * preparation for writing out an optimized replacement sequence.
422 * WARNING: Does not undo other changes to state, such as local types,
423 * local equivalency, and thisType. Take care when rewinding such
424 * things.
426 void rewind(ISS& env, int n) {
427 assertx(n);
428 while (env.replacedBcs.size()) {
429 rewind(env, env.replacedBcs.back());
430 env.replacedBcs.pop_back();
431 if (!--n) return;
433 while (n--) {
434 rewind(env, env.blk.hhbcs[--env.unchangedBcs]);
438 void impl_vec(ISS& env, bool reduce, BytecodeVec&& bcs) {
439 if (!will_reduce(env)) reduce = false;
441 if (reduce) {
442 using namespace folly::gen;
443 ITRACE(2, "(reduce: {}\n",
444 from(bcs) |
445 map([&] (const Bytecode& bc) { return show(env.ctx.func, bc); }) |
446 unsplit<std::string>(", "));
447 if (bcs.size()) {
448 auto ef = !env.flags.reduced || env.flags.effectFree;
449 Trace::Indent _;
450 for (auto const& bc : bcs) {
451 assert(
452 env.flags.jmpDest == NoBlockId &&
453 "you can't use impl with branching opcodes before last position"
455 interpStep(env, bc);
456 if (!env.flags.effectFree) ef = false;
457 if (env.state.unreachable || env.flags.jmpDest != NoBlockId) break;
459 env.flags.effectFree = ef;
460 } else if (!env.flags.reduced) {
461 effect_free(env);
463 env.flags.reduced = true;
464 return;
467 env.analyzeDepth++;
468 SCOPE_EXIT { env.analyzeDepth--; };
470 // We should be at the start of a bytecode.
471 assertx(env.flags.wasPEI &&
472 !env.flags.canConstProp &&
473 !env.flags.effectFree);
475 env.flags.wasPEI = false;
476 env.flags.canConstProp = true;
477 env.flags.effectFree = true;
479 for (auto const& bc : bcs) {
480 assert(env.flags.jmpDest == NoBlockId &&
481 "you can't use impl with branching opcodes before last position");
483 auto const wasPEI = env.flags.wasPEI;
484 auto const canConstProp = env.flags.canConstProp;
485 auto const effectFree = env.flags.effectFree;
487 ITRACE(3, " (impl {}\n", show(env.ctx.func, bc));
488 env.flags.wasPEI = true;
489 env.flags.canConstProp = false;
490 env.flags.effectFree = false;
491 default_dispatch(env, bc);
493 if (env.flags.canConstProp) {
494 [&] {
495 if (env.flags.effectFree && !env.flags.wasPEI) return;
496 auto stk = env.state.stack.end();
497 for (auto i = bc.numPush(); i--; ) {
498 --stk;
499 if (!is_scalar(stk->type)) return;
501 env.flags.effectFree = true;
502 env.flags.wasPEI = false;
503 }();
506 // If any of the opcodes in the impl list said they could throw,
507 // then the whole thing could throw.
508 env.flags.wasPEI = env.flags.wasPEI || wasPEI;
509 env.flags.canConstProp = env.flags.canConstProp && canConstProp;
510 env.flags.effectFree = env.flags.effectFree && effectFree;
511 if (env.state.unreachable || env.flags.jmpDest != NoBlockId) break;
515 LocalId equivLocalRange(ISS& env, const LocalRange& range) {
516 auto bestRange = range.first;
517 auto equivFirst = findLocEquiv(env, range.first);
518 if (equivFirst == NoLocalId) return bestRange;
519 do {
520 if (equivFirst < bestRange) {
521 auto equivRange = [&] {
522 // local equivalency includes differing by Uninit, so we need
523 // to check the types.
524 if (peekLocRaw(env, equivFirst) != peekLocRaw(env, range.first)) {
525 return false;
528 for (uint32_t i = 1; i < range.count; ++i) {
529 if (!locsAreEquiv(env, equivFirst + i, range.first + i) ||
530 peekLocRaw(env, equivFirst + i) !=
531 peekLocRaw(env, range.first + i)) {
532 return false;
536 return true;
537 }();
539 if (equivRange) {
540 bestRange = equivFirst;
543 equivFirst = findLocEquiv(env, equivFirst);
544 assert(equivFirst != NoLocalId);
545 } while (equivFirst != range.first);
547 return bestRange;
550 SString getNameFromType(const Type& t) {
551 if (!t.subtypeOf(BStr)) return nullptr;
552 if (is_specialized_string(t)) return sval_of(t);
553 return nullptr;
556 //////////////////////////////////////////////////////////////////////
558 namespace {
560 ArrayData*
561 resolveTSListStatically(ISS& env, SArray tsList, const php::Class* declaringCls,
562 bool checkArrays) {
563 auto arr = Array::attach(const_cast<ArrayData*>(tsList));
564 for (auto i = 0; i < arr.size(); i++) {
565 auto elemArr = arr[i].getArrayData();
566 auto elem = resolveTSStatically(env, elemArr, declaringCls, checkArrays);
567 if (!elem) return nullptr;
568 arr.set(i, Variant(elem));
570 return arr.detach();
573 } // namespace
575 ArrayData*
576 resolveTSStatically(ISS& env, SArray ts, const php::Class* declaringCls,
577 bool checkArrays) {
578 auto const addModifiers = [&](ArrayData* result) {
579 auto a = Array::attach(result);
580 if (is_ts_like(ts) && !is_ts_like(a.get())) {
581 a.set(s_like, make_tv<KindOfBoolean>(true));
583 if (is_ts_nullable(ts) && !is_ts_nullable(a.get())) {
584 a.set(s_nullable, make_tv<KindOfBoolean>(true));
586 if (is_ts_soft(ts) && !is_ts_soft(a.get())) {
587 a.set(s_soft, make_tv<KindOfBoolean>(true));
589 return a.detach();
591 auto const finish = [&](const ArrayData* result) {
592 auto r = const_cast<ArrayData*>(result);
593 ArrayData::GetScalarArray(&r);
594 return r;
596 switch (get_ts_kind(ts)) {
597 case TypeStructure::Kind::T_int:
598 case TypeStructure::Kind::T_bool:
599 case TypeStructure::Kind::T_float:
600 case TypeStructure::Kind::T_string:
601 case TypeStructure::Kind::T_num:
602 case TypeStructure::Kind::T_arraykey:
603 case TypeStructure::Kind::T_void:
604 case TypeStructure::Kind::T_null:
605 case TypeStructure::Kind::T_nothing:
606 case TypeStructure::Kind::T_noreturn:
607 case TypeStructure::Kind::T_mixed:
608 case TypeStructure::Kind::T_dynamic:
609 case TypeStructure::Kind::T_nonnull:
610 case TypeStructure::Kind::T_resource:
611 return finish(ts);
612 case TypeStructure::Kind::T_typevar:
613 if (ts->exists(s_name.get()) &&
614 get_ts_name(ts)->equal(s_wildcard.get())) {
615 return finish(ts);
617 return nullptr;
618 case TypeStructure::Kind::T_dict:
619 case TypeStructure::Kind::T_vec:
620 case TypeStructure::Kind::T_keyset:
621 case TypeStructure::Kind::T_vec_or_dict:
622 case TypeStructure::Kind::T_arraylike:
623 if (checkArrays) {
624 if (!ts->exists(s_generic_types)) return finish(ts);
625 auto const generics = get_ts_generic_types(ts);
626 auto rgenerics =
627 resolveTSListStatically(env, generics, declaringCls, checkArrays);
628 if (!rgenerics) return nullptr;
629 auto result = const_cast<ArrayData*>(ts);
630 return finish(result->set(s_generic_types.get(), Variant(rgenerics)));
632 return isTSAllWildcards(ts) ? finish(ts) : nullptr;
633 case TypeStructure::Kind::T_class:
634 case TypeStructure::Kind::T_interface:
635 case TypeStructure::Kind::T_xhp:
636 case TypeStructure::Kind::T_enum:
637 // Generics for these must have been resolved already as we'd never set
638 // the TS Kind to be one of these until resolution
639 return finish(ts);
640 case TypeStructure::Kind::T_tuple: {
641 auto const elems = get_ts_elem_types(ts);
642 auto relems =
643 resolveTSListStatically(env, elems, declaringCls, checkArrays);
644 if (!relems) return nullptr;
645 auto result = const_cast<ArrayData*>(ts);
646 return finish(result->set(s_elem_types.get(), Variant(relems)));
648 case TypeStructure::Kind::T_shape:
649 // TODO(T31677864): We can also optimize this but shapes could have
650 // optional fields or they could allow unknown fields, so this one is
651 // slightly more tricky
652 return nullptr;
653 case TypeStructure::Kind::T_unresolved: {
654 assertx(ts->exists(s_classname));
655 auto result = const_cast<ArrayData*>(ts);
656 if (ts->exists(s_generic_types)) {
657 auto const generics = get_ts_generic_types(ts);
658 auto rgenerics =
659 resolveTSListStatically(env, generics, declaringCls, checkArrays);
660 if (!rgenerics) return nullptr;
661 result = result->set(s_generic_types.get(), Variant(rgenerics));
663 auto const rcls = env.index.resolve_class(env.ctx, get_ts_classname(ts));
664 if (!rcls || !rcls->resolved()) return nullptr;
665 auto const attrs = rcls->cls()->attrs;
666 auto const kind = [&] {
667 if (attrs & AttrEnum) return TypeStructure::Kind::T_enum;
668 if (attrs & AttrTrait) return TypeStructure::Kind::T_trait;
669 if (attrs & AttrInterface) return TypeStructure::Kind::T_interface;
670 return TypeStructure::Kind::T_class;
671 }();
672 return finish(result->set(s_kind.get(),
673 Variant(static_cast<uint8_t>(kind))));
675 case TypeStructure::Kind::T_typeaccess: {
676 auto const accList = get_ts_access_list(ts);
677 auto const size = accList->size();
678 auto clsName = get_ts_root_name(ts);
679 auto checkNoOverrideOnFirst = false;
680 if (declaringCls) {
681 if (clsName->isame(s_self.get())) {
682 clsName = declaringCls->name;
683 } else if (clsName->isame(s_parent.get()) && declaringCls->parentName) {
684 clsName = declaringCls->parentName;
685 } else if (clsName->isame(s_this.get())) {
686 clsName = declaringCls->name;
687 checkNoOverrideOnFirst = true;
690 ArrayData* typeCnsVal = nullptr;
691 for (auto i = 0; i < size; i++) {
692 auto const rcls = env.index.resolve_class(env.ctx, clsName);
693 if (!rcls || !rcls->resolved()) return nullptr;
694 auto const cnsName = accList->at(i);
695 if (!tvIsString(&cnsName)) return nullptr;
696 auto const cnst = env.index.lookup_class_const_ptr(env.ctx, *rcls,
697 cnsName.m_data.pstr,
698 true);
699 if (!cnst || !cnst->val || !cnst->isTypeconst ||
700 !tvIsDictOrDArray(&*cnst->val)) {
701 return nullptr;
703 if (checkNoOverrideOnFirst && i == 0 && !cnst->isNoOverride) {
704 return nullptr;
706 typeCnsVal = resolveTSStatically(env, cnst->val->m_data.parr, cnst->cls,
707 checkArrays);
708 if (!typeCnsVal) return nullptr;
709 if (i == size - 1) break;
710 auto const kind = get_ts_kind(typeCnsVal);
711 if (kind != TypeStructure::Kind::T_class &&
712 kind != TypeStructure::Kind::T_interface) {
713 return nullptr;
715 clsName = get_ts_classname(typeCnsVal);
717 if (!typeCnsVal) return nullptr;
718 return finish(addModifiers(typeCnsVal));
720 case TypeStructure::Kind::T_fun: {
721 auto rreturn = resolveTSStatically(env, get_ts_return_type(ts),
722 declaringCls, checkArrays);
723 if (!rreturn) return nullptr;
724 auto rparams = resolveTSListStatically(env, get_ts_param_types(ts),
725 declaringCls, checkArrays);
726 if (!rparams) return nullptr;
727 auto result = const_cast<ArrayData*>(ts)
728 ->set(s_return_type.get(), Variant(rreturn))
729 ->set(s_param_types.get(), Variant(rparams));
730 auto const variadic = get_ts_variadic_type_opt(ts);
731 if (variadic) {
732 auto rvariadic = resolveTSStatically(env, variadic, declaringCls,
733 checkArrays);
734 if (!rvariadic) return nullptr;
735 result = result->set(s_variadic_type.get(), Variant(rvariadic));
737 return finish(result);
739 case TypeStructure::Kind::T_array:
740 case TypeStructure::Kind::T_darray:
741 case TypeStructure::Kind::T_varray:
742 case TypeStructure::Kind::T_varray_or_darray:
743 case TypeStructure::Kind::T_reifiedtype:
744 case TypeStructure::Kind::T_trait:
745 return nullptr;
747 not_reached();
750 //////////////////////////////////////////////////////////////////////
752 namespace interp_step {
754 void in(ISS& env, const bc::Nop&) { reduce(env); }
756 void in(ISS& env, const bc::PopC&) {
757 if (auto const last = last_op(env)) {
758 if (poppable(last->op)) {
759 rewind(env, 1);
760 return reduce(env);
762 if (last->op == Op::This) {
763 // can't rewind This because it removed null from thisType (so
764 // CheckThis at this point is a no-op) - and note that it must
765 // have *been* nullable, or we'd have turned it into a
766 // `BareThis NeverNull`
767 replace_last_op(env, bc::CheckThis {});
768 return reduce(env);
770 if (last->op == Op::SetL) {
771 // can't rewind a SetL because it changes local state
772 replace_last_op(env, bc::PopL { last->SetL.loc1 });
773 return reduce(env);
775 if (last->op == Op::CGetL2) {
776 auto loc = last->CGetL2.loc1;
777 rewind(env, 1);
778 return reduce(env, bc::PopC {}, bc::CGetL { loc });
782 effect_free(env);
783 popC(env);
786 void in(ISS& env, const bc::PopU&) {
787 if (auto const last = last_op(env)) {
788 if (last->op == Op::NullUninit) {
789 rewind(env, 1);
790 return reduce(env);
793 effect_free(env); popU(env);
796 void in(ISS& env, const bc::PopU2&) {
797 effect_free(env);
798 auto equiv = topStkEquiv(env);
799 auto val = popC(env);
800 popU(env);
801 push(env, std::move(val), equiv != StackDupId ? equiv : NoLocalId);
804 void in(ISS& env, const bc::PopFrame& op) {
805 effect_free(env);
807 std::vector<std::pair<Type, LocalId>> vals{op.arg1};
808 for (auto i = op.arg1; i > 0; --i) {
809 vals[i - 1] = {popC(env), topStkEquiv(env)};
811 for (uint32_t i = 0; i < 3; i++) popU(env);
812 for (auto& p : vals) {
813 push(
814 env, std::move(p.first), p.second != StackDupId ? p.second : NoLocalId);
818 void in(ISS& env, const bc::EntryNop&) { effect_free(env); }
820 void in(ISS& env, const bc::Dup& /*op*/) {
821 effect_free(env);
822 auto equiv = topStkEquiv(env);
823 auto val = popC(env);
824 push(env, val, equiv);
825 push(env, std::move(val), StackDupId);
828 void in(ISS& env, const bc::AssertRATL& op) {
829 mayReadLocal(env, op.loc1);
830 effect_free(env);
833 void in(ISS& env, const bc::AssertRATStk&) {
834 effect_free(env);
837 void in(ISS& env, const bc::BreakTraceHint&) { effect_free(env); }
839 void in(ISS& env, const bc::CGetCUNop&) {
840 effect_free(env);
841 auto const t = popCU(env);
842 push(env, remove_uninit(t));
845 void in(ISS& env, const bc::UGetCUNop&) {
846 effect_free(env);
847 popCU(env);
848 push(env, TUninit);
851 void in(ISS& env, const bc::Null&) {
852 effect_free(env);
853 push(env, TInitNull);
856 void in(ISS& env, const bc::NullUninit&) {
857 effect_free(env);
858 push(env, TUninit);
861 void in(ISS& env, const bc::True&) {
862 effect_free(env);
863 push(env, TTrue);
866 void in(ISS& env, const bc::False&) {
867 effect_free(env);
868 push(env, TFalse);
871 void in(ISS& env, const bc::Int& op) {
872 effect_free(env);
873 push(env, ival(op.arg1));
876 void in(ISS& env, const bc::Double& op) {
877 effect_free(env);
878 push(env, dval(op.dbl1));
881 void in(ISS& env, const bc::String& op) {
882 effect_free(env);
883 push(env, sval(op.str1));
886 void in(ISS& env, const bc::Array& op) {
887 assert(op.arr1->isPHPArray());
888 assertx(!RuntimeOption::EvalHackArrDVArrs || op.arr1->isNotDVArray());
889 effect_free(env);
890 push(env, aval(op.arr1));
893 void in(ISS& env, const bc::Vec& op) {
894 assert(op.arr1->isVecArray());
895 effect_free(env);
896 push(env, vec_val(op.arr1));
899 void in(ISS& env, const bc::Dict& op) {
900 assert(op.arr1->isDict());
901 effect_free(env);
902 push(env, dict_val(op.arr1));
905 void in(ISS& env, const bc::Keyset& op) {
906 assert(op.arr1->isKeyset());
907 effect_free(env);
908 push(env, keyset_val(op.arr1));
911 void in(ISS& env, const bc::NewArray& op) {
912 effect_free(env);
913 push(env, op.arg1 == 0 ? aempty() : some_aempty());
916 void in(ISS& env, const bc::NewDictArray& op) {
917 effect_free(env);
918 push(env, op.arg1 == 0 ? dict_empty(provTagHere(env))
919 : some_dict_empty(provTagHere(env)));
922 void in(ISS& env, const bc::NewMixedArray& op) {
923 effect_free(env);
924 push(env, op.arg1 == 0 ? aempty() : some_aempty());
927 void in(ISS& env, const bc::NewPackedArray& op) {
928 auto elems = std::vector<Type>{};
929 elems.reserve(op.arg1);
930 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
931 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
933 discard(env, op.arg1);
934 push(env, arr_packed(std::move(elems)));
935 constprop(env);
938 void in(ISS& env, const bc::NewVArray& op) {
939 assertx(!RuntimeOption::EvalHackArrDVArrs);
940 auto elems = std::vector<Type>{};
941 elems.reserve(op.arg1);
942 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
943 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
945 discard(env, op.arg1);
946 push(env, arr_packed_varray(std::move(elems)));
947 effect_free(env);
948 constprop(env);
951 void in(ISS& env, const bc::NewDArray& op) {
952 assertx(!RuntimeOption::EvalHackArrDVArrs);
953 effect_free(env);
954 push(env, op.arg1 == 0 ? aempty_darray() : some_aempty_darray());
957 void in(ISS& env, const bc::NewRecord& op) {
958 discard(env, op.keys.size());
959 push(env, TRecord);
962 void in(ISS& env, const bc::NewRecordArray& op) {
963 discard(env, op.keys.size());
964 push(env, TArr);
967 void in(ISS& env, const bc::NewStructArray& op) {
968 auto map = MapElems{};
969 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
970 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
972 push(env, arr_map(std::move(map)));
973 effect_free(env);
974 constprop(env);
977 void in(ISS& env, const bc::NewStructDArray& op) {
978 assertx(!RuntimeOption::EvalHackArrDVArrs);
979 auto map = MapElems{};
980 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
981 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
983 push(env, arr_map_darray(std::move(map)));
984 effect_free(env);
985 constprop(env);
988 void in(ISS& env, const bc::NewStructDict& op) {
989 auto map = MapElems{};
990 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
991 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
993 push(env, dict_map(std::move(map), provTagHere(env)));
994 effect_free(env);
995 constprop(env);
998 void in(ISS& env, const bc::NewVecArray& op) {
999 auto elems = std::vector<Type>{};
1000 elems.reserve(op.arg1);
1001 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
1002 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
1004 discard(env, op.arg1);
1005 effect_free(env);
1006 constprop(env);
1007 push(env, vec(std::move(elems), provTagHere(env)));
1010 void in(ISS& env, const bc::NewKeysetArray& op) {
1011 assert(op.arg1 > 0);
1012 auto map = MapElems{};
1013 auto ty = TBottom;
1014 auto useMap = true;
1015 auto bad = false;
1016 auto mayThrow = false;
1017 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
1018 auto k = disect_strict_key(popC(env));
1019 mayThrow |= k.mayThrow;
1020 if (k.type == TBottom) {
1021 bad = true;
1022 useMap = false;
1024 if (useMap) {
1025 if (auto const v = k.tv()) {
1026 map.emplace_front(*v, k.type);
1027 } else {
1028 useMap = false;
1031 ty |= std::move(k.type);
1033 if (!mayThrow) effect_free(env);
1034 if (useMap) {
1035 push(env, keyset_map(std::move(map)));
1036 if (!mayThrow) constprop(env);
1037 } else if (!bad) {
1038 push(env, keyset_n(ty));
1039 } else {
1040 unreachable(env);
1041 push(env, TBottom);
1045 void in(ISS& env, const bc::NewLikeArrayL& op) {
1046 locAsCell(env, op.loc1);
1047 push(env, some_aempty());
1050 void in(ISS& env, const bc::AddElemC& /*op*/) {
1051 auto const v = topC(env, 0);
1052 auto const k = topC(env, 1);
1054 auto inTy = (env.state.stack.end() - 3).unspecialize();
1056 auto const tag = provTagHere(env);
1058 auto outTy = [&] (Type ty) ->
1059 folly::Optional<std::pair<Type,ThrowMode>> {
1060 if (ty.subtypeOf(BArr)) {
1061 return array_set(std::move(ty), k, v, tag);
1063 if (ty.subtypeOf(BDict)) {
1064 return dict_set(std::move(ty), k, v, tag);
1066 return folly::none;
1067 }(std::move(inTy));
1069 if (outTy && outTy->second == ThrowMode::None && will_reduce(env)) {
1070 if (!env.trackedElems.empty() &&
1071 env.trackedElems.back().depth + 3 == env.state.stack.size()) {
1072 auto const handled = [&] {
1073 if (!k.subtypeOf(BArrKey)) return false;
1074 auto ktv = tv(k);
1075 if (!ktv) return false;
1076 auto vtv = tv(v);
1077 if (!vtv) return false;
1078 return mutate_add_elem_array(env, tag, [&](ArrayData** arr){
1079 *arr = (*arr)->set(*ktv, *vtv);
1081 }();
1082 if (handled) {
1083 (env.state.stack.end() - 3)->type = std::move(outTy->first);
1084 reduce(env, bc::PopC {}, bc::PopC {});
1085 ITRACE(2, "(addelem* -> {}\n",
1086 show(env.ctx.func,
1087 env.replacedBcs[env.trackedElems.back().idx - env.unchangedBcs]));
1088 return;
1090 } else {
1091 if (start_add_elem(env, outTy->first, Op::AddElemC)) {
1092 return;
1097 discard(env, 3);
1098 finish_tracked_elems(env, env.state.stack.size());
1100 if (!outTy) {
1101 return push(env, union_of(TArr, TDict));
1104 if (outTy->first.subtypeOf(BBottom)) {
1105 unreachable(env);
1106 } else if (outTy->second == ThrowMode::None) {
1107 effect_free(env);
1108 constprop(env);
1110 push(env, std::move(outTy->first));
1113 void in(ISS& env, const bc::AddNewElemC&) {
1114 auto v = topC(env);
1115 auto inTy = (env.state.stack.end() - 2).unspecialize();
1117 auto const tag = provTagHere(env);
1119 auto outTy = [&] (Type ty) -> folly::Optional<Type> {
1120 if (ty.subtypeOf(BArr)) {
1121 return array_newelem(std::move(ty), std::move(v), tag).first;
1123 if (ty.subtypeOf(BVec)) {
1124 return vec_newelem(std::move(ty), std::move(v), tag).first;
1126 if (ty.subtypeOf(BKeyset)) {
1127 return keyset_newelem(std::move(ty), std::move(v)).first;
1129 return folly::none;
1130 }(std::move(inTy));
1132 if (outTy && will_reduce(env)) {
1133 if (!env.trackedElems.empty() &&
1134 env.trackedElems.back().depth + 2 == env.state.stack.size()) {
1135 auto const handled = [&] {
1136 auto vtv = tv(v);
1137 if (!vtv) return false;
1138 return mutate_add_elem_array(env, tag, [&](ArrayData** arr){
1139 *arr = (*arr)->append(*vtv);
1141 }();
1142 if (handled) {
1143 (env.state.stack.end() - 2)->type = std::move(*outTy);
1144 reduce(env, bc::PopC {});
1145 ITRACE(2, "(addelem* -> {}\n",
1146 show(env.ctx.func,
1147 env.replacedBcs[env.trackedElems.back().idx - env.unchangedBcs]));
1148 return;
1150 } else {
1151 if (start_add_elem(env, *outTy, Op::AddNewElemC)) {
1152 return;
1157 discard(env, 2);
1158 finish_tracked_elems(env, env.state.stack.size());
1160 if (!outTy) {
1161 return push(env, TInitCell);
1164 if (outTy->subtypeOf(BBottom)) {
1165 unreachable(env);
1166 } else {
1167 constprop(env);
1169 push(env, std::move(*outTy));
1172 void in(ISS& env, const bc::NewCol& op) {
1173 auto const type = static_cast<CollectionType>(op.subop1);
1174 auto const name = collections::typeToString(type);
1175 push(env, objExact(env.index.builtin_class(name)));
1176 effect_free(env);
1179 void in(ISS& env, const bc::NewPair& /*op*/) {
1180 popC(env); popC(env);
1181 auto const name = collections::typeToString(CollectionType::Pair);
1182 push(env, objExact(env.index.builtin_class(name)));
1183 effect_free(env);
1186 void in(ISS& env, const bc::ColFromArray& op) {
1187 auto const src = popC(env);
1188 auto const type = static_cast<CollectionType>(op.subop1);
1189 assertx(type != CollectionType::Pair);
1190 if (type == CollectionType::Vector || type == CollectionType::ImmVector) {
1191 if (src.subtypeOf(TVec)) effect_free(env);
1192 } else {
1193 assertx(type == CollectionType::Map ||
1194 type == CollectionType::ImmMap ||
1195 type == CollectionType::Set ||
1196 type == CollectionType::ImmSet);
1197 if (src.subtypeOf(TDict)) effect_free(env);
1199 auto const name = collections::typeToString(type);
1200 push(env, objExact(env.index.builtin_class(name)));
1203 void in(ISS& env, const bc::CnsE& op) {
1204 if (!options.HardConstProp) return push(env, TInitCell);
1205 auto t = env.index.lookup_constant(env.ctx, op.str1);
1206 if (!t) {
1207 // There's no entry for this constant in the index. It must be
1208 // the first iteration, so we'll add a dummy entry to make sure
1209 // there /is/ something next time around.
1210 Cell val;
1211 val.m_type = kReadOnlyConstant;
1212 env.collect.cnsMap.emplace(op.str1, val);
1213 t = TInitCell;
1214 // make sure we're re-analyzed
1215 env.collect.readsUntrackedConstants = true;
1216 } else if (t->strictSubtypeOf(TInitCell)) {
1217 // constprop will take care of nothrow *if* its a constant; and if
1218 // its not, we might trigger autoload.
1219 constprop(env);
1221 push(env, std::move(*t));
1224 void in(ISS& env, const bc::ClsCns& op) {
1225 auto const& t1 = topC(env);
1226 if (is_specialized_cls(t1)) {
1227 auto const dcls = dcls_of(t1);
1228 auto const finish = [&] {
1229 reduce(env, bc::PopC { },
1230 bc::ClsCnsD { op.str1, dcls.cls.name() });
1232 if (dcls.type == DCls::Exact) return finish();
1233 auto const cnst = env.index.lookup_class_const_ptr(env.ctx, dcls.cls,
1234 op.str1, false);
1235 if (cnst && cnst->isNoOverride) return finish();
1237 popC(env);
1238 push(env, TInitCell);
1241 void in(ISS& env, const bc::ClsCnsD& op) {
1242 if (auto const rcls = env.index.resolve_class(env.ctx, op.str2)) {
1243 auto t = env.index.lookup_class_constant(env.ctx, *rcls, op.str1, false);
1244 if (options.HardConstProp) constprop(env);
1245 push(env, std::move(t));
1246 return;
1248 push(env, TInitCell);
1251 void in(ISS& env, const bc::File&) { effect_free(env); push(env, TSStr); }
1252 void in(ISS& env, const bc::Dir&) { effect_free(env); push(env, TSStr); }
1253 void in(ISS& env, const bc::Method&) { effect_free(env); push(env, TSStr); }
1255 void in(ISS& env, const bc::FuncCred&) { effect_free(env); push(env, TObj); }
1257 void in(ISS& env, const bc::ClassName& op) {
1258 auto const ty = topC(env);
1259 if (is_specialized_cls(ty)) {
1260 auto const dcls = dcls_of(ty);
1261 if (dcls.type == DCls::Exact) {
1262 return reduce(env,
1263 bc::PopC {},
1264 bc::String { dcls.cls.name() });
1267 if (ty.subtypeOf(TCls)) nothrow(env);
1268 popC(env);
1269 push(env, TSStr);
1272 void concatHelper(ISS& env, uint32_t n) {
1273 auto changed = false;
1274 auto side_effects = false;
1275 if (will_reduce(env)) {
1276 auto litstr = [&] (SString next, uint32_t i) -> SString {
1277 auto const t = topC(env, i);
1278 auto const v = tv(t);
1279 if (!v) return nullptr;
1280 if (!isStringType(v->m_type) &&
1281 v->m_type != KindOfNull &&
1282 v->m_type != KindOfBoolean &&
1283 v->m_type != KindOfInt64 &&
1284 v->m_type != KindOfDouble) {
1285 return nullptr;
1287 auto const cell = eval_cell_value(
1288 [&] {
1289 auto const s = makeStaticString(
1290 next ?
1291 StringData::Make(tvAsCVarRef(&*v).toString().get(), next) :
1292 tvAsCVarRef(&*v).toString().get());
1293 return make_tv<KindOfString>(s);
1296 if (!cell) return nullptr;
1297 return cell->m_data.pstr;
1300 auto fold = [&] (uint32_t slot, uint32_t num, SString result) {
1301 auto const cell = make_tv<KindOfPersistentString>(result);
1302 auto const ty = from_cell(cell);
1303 BytecodeVec bcs{num, bc::PopC {}};
1304 if (num > 1) bcs.push_back(gen_constant(cell));
1305 if (slot == 0) {
1306 reduce(env, std::move(bcs));
1307 } else {
1308 insert_after_slot(env, slot, num, num > 1 ? 1 : 0, &ty, bcs);
1309 reprocess(env);
1311 n -= num - 1;
1312 changed = true;
1315 for (auto i = 0; i < n; i++) {
1316 if (topC(env, i).couldBe(BObj | BArrLike | BRes)) {
1317 side_effects = true;
1318 break;
1322 if (!side_effects) {
1323 for (auto i = 0; i < n; i++) {
1324 auto const tracked = !env.trackedElems.empty() &&
1325 env.trackedElems.back().depth + i + 1 == env.state.stack.size();
1326 if (tracked) finish_tracked_elems(env, env.trackedElems.back().depth);
1327 auto const prev = op_from_slot(env, i);
1328 if (!prev) continue;
1329 if ((prev->op == Op::Concat && tracked) || prev->op == Op::ConcatN) {
1330 auto const extra = kill_by_slot(env, i);
1331 changed = true;
1332 n += extra;
1333 i += extra;
1338 SString result = nullptr;
1339 uint32_t i = 0;
1340 uint32_t nlit = 0;
1341 while (i < n) {
1342 // In order to collapse literals, we need to be able to insert
1343 // pops, and a constant after the sequence that generated the
1344 // literals. We can always insert after the last instruction
1345 // though, and we only need to check the first slot of a
1346 // sequence.
1347 auto const next = !i || result || can_insert_after_slot(env, i) ?
1348 litstr(result, i) : nullptr;
1349 if (next == staticEmptyString()) {
1350 if (n == 1) break;
1351 assertx(nlit == 0);
1352 fold(i, 1, next);
1353 n--;
1354 continue;
1356 if (!next) {
1357 if (nlit > 1) {
1358 fold(i - nlit, nlit, result);
1359 i -= nlit - 1;
1361 nlit = 0;
1362 } else {
1363 nlit++;
1365 result = next;
1366 i++;
1368 if (nlit > 1) fold(i - nlit, nlit, result);
1371 if (!changed) {
1372 discard(env, n);
1373 if (n == 2 && !side_effects && will_reduce(env)) {
1374 env.trackedElems.emplace_back(
1375 env.state.stack.size(),
1376 env.unchangedBcs + env.replacedBcs.size()
1379 push(env, TStr);
1380 return;
1383 if (n == 1) {
1384 if (!topC(env).subtypeOf(BStr)) {
1385 return reduce(env, bc::CastString {});
1387 return reduce(env);
1390 reduce(env);
1391 // We can't reduce the emitted concats, or we'll end up with
1392 // infinite recursion.
1393 env.flags.wasPEI = true;
1394 env.flags.effectFree = false;
1395 env.flags.canConstProp = false;
1397 auto concat = [&] (uint32_t num) {
1398 discard(env, num);
1399 push(env, TStr);
1400 if (num == 2) {
1401 record(env, bc::Concat {});
1402 } else {
1403 record(env, bc::ConcatN { num });
1407 while (n >= 4) {
1408 concat(4);
1409 n -= 3;
1411 if (n > 1) concat(n);
1414 void in(ISS& env, const bc::Concat& /*op*/) {
1415 concatHelper(env, 2);
1418 void in(ISS& env, const bc::ConcatN& op) {
1419 if (op.arg1 == 2) return reduce(env, bc::Concat {});
1420 concatHelper(env, op.arg1);
1423 template <class Op, class Fun>
1424 void arithImpl(ISS& env, const Op& /*op*/, Fun fun) {
1425 constprop(env);
1426 auto const t1 = popC(env);
1427 auto const t2 = popC(env);
1428 push(env, fun(t2, t1));
1431 void in(ISS& env, const bc::Add& op) { arithImpl(env, op, typeAdd); }
1432 void in(ISS& env, const bc::Sub& op) { arithImpl(env, op, typeSub); }
1433 void in(ISS& env, const bc::Mul& op) { arithImpl(env, op, typeMul); }
1434 void in(ISS& env, const bc::Div& op) { arithImpl(env, op, typeDiv); }
1435 void in(ISS& env, const bc::Mod& op) { arithImpl(env, op, typeMod); }
1436 void in(ISS& env, const bc::Pow& op) { arithImpl(env, op, typePow); }
1437 void in(ISS& env, const bc::BitAnd& op) { arithImpl(env, op, typeBitAnd); }
1438 void in(ISS& env, const bc::BitOr& op) { arithImpl(env, op, typeBitOr); }
1439 void in(ISS& env, const bc::BitXor& op) { arithImpl(env, op, typeBitXor); }
1440 void in(ISS& env, const bc::AddO& op) { arithImpl(env, op, typeAddO); }
1441 void in(ISS& env, const bc::SubO& op) { arithImpl(env, op, typeSubO); }
1442 void in(ISS& env, const bc::MulO& op) { arithImpl(env, op, typeMulO); }
1443 void in(ISS& env, const bc::Shl& op) { arithImpl(env, op, typeShl); }
1444 void in(ISS& env, const bc::Shr& op) { arithImpl(env, op, typeShr); }
1446 void in(ISS& env, const bc::BitNot& /*op*/) {
1447 auto const t = popC(env);
1448 auto const v = tv(t);
1449 if (v) {
1450 constprop(env);
1451 auto cell = eval_cell([&] {
1452 auto c = *v;
1453 cellBitNot(c);
1454 return c;
1456 if (cell) return push(env, std::move(*cell));
1458 push(env, TInitCell);
1461 namespace {
1463 bool couldBeHackArr(Type t) {
1464 return t.couldBe(BVec | BDict | BKeyset);
1467 template<bool NSame>
1468 std::pair<Type,bool> resolveSame(ISS& env) {
1469 auto const l1 = topStkEquiv(env, 0);
1470 auto const t1 = topC(env, 0);
1471 auto const l2 = topStkEquiv(env, 1);
1472 auto const t2 = topC(env, 1);
1474 // EvalHackArrCompatNotices will notice on === and !== between PHP arrays and
1475 // Hack arrays. We can't really do better than this in general because of
1476 // arrays inside these arrays.
1477 auto warningsEnabled =
1478 (RuntimeOption::EvalHackArrCompatNotices ||
1479 RuntimeOption::EvalHackArrCompatDVCmpNotices);
1481 auto const result = [&] {
1482 auto const v1 = tv(t1);
1483 auto const v2 = tv(t2);
1485 if (l1 == StackDupId ||
1486 (l1 == l2 && l1 != NoLocalId) ||
1487 (l1 <= MaxLocalId && l2 <= MaxLocalId && locsAreEquiv(env, l1, l2))) {
1488 if (!t1.couldBe(BDbl) || !t2.couldBe(BDbl) ||
1489 (v1 && (v1->m_type != KindOfDouble || !std::isnan(v1->m_data.dbl))) ||
1490 (v2 && (v2->m_type != KindOfDouble || !std::isnan(v2->m_data.dbl)))) {
1491 return NSame ? TFalse : TTrue;
1495 if (v1 && v2) {
1496 if (auto r = eval_cell_value([&]{ return cellSame(*v2, *v1); })) {
1497 // we wouldn't get here if cellSame raised a warning
1498 warningsEnabled = false;
1499 return r != NSame ? TTrue : TFalse;
1503 return NSame ? typeNSame(t1, t2) : typeSame(t1, t2);
1504 }();
1506 if (warningsEnabled && result == (NSame ? TFalse : TTrue)) {
1507 warningsEnabled = false;
1509 return { result, warningsEnabled && compare_might_raise(t1, t2) };
1512 template<bool Negate>
1513 void sameImpl(ISS& env) {
1514 if (auto const last = last_op(env)) {
1515 if (last->op == Op::Null) {
1516 rewind(env, 1);
1517 reduce(env, bc::IsTypeC { IsTypeOp::Null });
1518 if (Negate) reduce(env, bc::Not {});
1519 return;
1521 if (auto const prev = last_op(env, 1)) {
1522 if (prev->op == Op::Null &&
1523 (last->op == Op::CGetL || last->op == Op::CGetL2)) {
1524 auto const loc = last->op == Op::CGetL ?
1525 last->CGetL.loc1 : last->CGetL2.loc1;
1526 rewind(env, 2);
1527 reduce(env, bc::IsTypeL { loc, IsTypeOp::Null });
1528 if (Negate) reduce(env, bc::Not {});
1529 return;
1534 auto pair = resolveSame<Negate>(env);
1535 discard(env, 2);
1537 if (!pair.second) {
1538 nothrow(env);
1539 constprop(env);
1542 push(env, std::move(pair.first));
1545 template<class JmpOp>
1546 bool sameJmpImpl(ISS& env, Op sameOp, const JmpOp& jmp) {
1547 const StackElem* elems[2];
1548 env.state.stack.peek(2, elems, 1);
1550 auto const loc0 = elems[1]->equivLoc;
1551 auto const loc1 = elems[0]->equivLoc;
1552 // If loc0 == loc1, either they're both NoLocalId, so there's
1553 // nothing for us to deduce, or both stack elements are the same
1554 // value, so the only thing we could deduce is that they are or are
1555 // not NaN. But we don't track that, so just bail.
1556 if (loc0 == loc1 || loc0 == StackDupId) return false;
1558 auto const ty0 = elems[1]->type;
1559 auto const ty1 = elems[0]->type;
1560 auto const val0 = tv(ty0);
1561 auto const val1 = tv(ty1);
1563 assertx(!val0 || !val1);
1564 if ((loc0 == NoLocalId && !val0 && ty1.subtypeOf(ty0)) ||
1565 (loc1 == NoLocalId && !val1 && ty0.subtypeOf(ty1))) {
1566 return false;
1569 // Same currently lies about the distinction between Func/Cls/Str
1570 if (ty0.couldBe(BFunc | BCls) && ty1.couldBe(BStr)) return false;
1571 if (ty1.couldBe(BFunc | BCls) && ty0.couldBe(BStr)) return false;
1573 // We need to loosen away the d/varray bits here because array comparison does
1574 // not take into account the difference.
1575 auto isect = intersection_of(
1576 loosen_provenance(loosen_dvarrayness(ty0)),
1577 loosen_provenance(loosen_dvarrayness(ty1))
1580 // Unfortunately, floating point negative zero and positive zero are
1581 // different, but are identical using as far as Same is concerened. We should
1582 // avoid refining a value to 0.0 because it compares identically to 0.0
1583 if (isect.couldBe(dval(0.0)) || isect.couldBe(dval(-0.0))) {
1584 isect = union_of(isect, TDbl);
1587 discard(env, 1);
1589 auto handle_same = [&] {
1590 // Currently dce uses equivalency to prove that something isn't
1591 // the last reference - so we can only assert equivalency here if
1592 // we know that won't be affected. Its irrelevant for uncounted
1593 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1594 // the same thing.
1595 if (loc0 <= MaxLocalId &&
1596 (ty0.subtypeOf(BObj | BRes | BPrim) ||
1597 ty1.subtypeOf(BObj | BRes | BPrim) ||
1598 (ty0.subtypeOf(BUnc) && ty1.subtypeOf(BUnc)))) {
1599 if (loc1 == StackDupId) {
1600 setStkLocal(env, loc0, 0);
1601 } else if (loc1 <= MaxLocalId && !locsAreEquiv(env, loc0, loc1)) {
1602 auto loc = loc0;
1603 while (true) {
1604 auto const other = findLocEquiv(env, loc);
1605 if (other == NoLocalId) break;
1606 killLocEquiv(env, loc);
1607 addLocEquiv(env, loc, loc1);
1608 loc = other;
1610 addLocEquiv(env, loc, loc1);
1613 return refineLocation(env, loc1 != NoLocalId ? loc1 : loc0, [&] (Type ty) {
1614 if (!ty.couldBe(BUninit) || !isect.couldBe(BNull)) {
1615 auto ret = intersection_of(std::move(ty), isect);
1616 return ty.subtypeOf(BUnc) ? ret : loosen_staticness(ret);
1619 if (isect.subtypeOf(BNull)) {
1620 return ty.couldBe(BInitNull) ? TNull : TUninit;
1623 return ty;
1627 auto handle_differ_side = [&] (LocalId location, const Type& ty) {
1628 if (!ty.subtypeOf(BInitNull) && !ty.strictSubtypeOf(TBool)) return true;
1629 return refineLocation(env, location, [&] (Type t) {
1630 if (ty.subtypeOf(BNull)) {
1631 t = remove_uninit(std::move(t));
1632 if (is_opt(t)) t = unopt(std::move(t));
1633 return t;
1634 } else if (ty.strictSubtypeOf(TBool) && t.subtypeOf(BBool)) {
1635 return ty == TFalse ? TTrue : TFalse;
1637 return t;
1641 auto handle_differ = [&] {
1642 return
1643 (loc0 == NoLocalId || handle_differ_side(loc0, ty1)) &&
1644 (loc1 == NoLocalId || handle_differ_side(loc1, ty0));
1647 auto const sameIsJmpTarget =
1648 (sameOp == Op::Same) == (JmpOp::op == Op::JmpNZ);
1650 auto save = env.state;
1651 auto const target_reachable = sameIsJmpTarget ?
1652 handle_same() : handle_differ();
1653 if (!target_reachable) jmp_nevertaken(env);
1654 // swap, so we can restore this state if the branch is always taken.
1655 env.state.swap(save);
1656 if (!(sameIsJmpTarget ? handle_differ() : handle_same())) {
1657 jmp_setdest(env, jmp.target1);
1658 env.state.copy_from(std::move(save));
1659 } else if (target_reachable) {
1660 env.propagate(jmp.target1, &save);
1663 return true;
1666 bc::JmpNZ invertJmp(const bc::JmpZ& jmp) { return bc::JmpNZ { jmp.target1 }; }
1667 bc::JmpZ invertJmp(const bc::JmpNZ& jmp) { return bc::JmpZ { jmp.target1 }; }
1671 void in(ISS& env, const bc::Same&) { sameImpl<false>(env); }
1672 void in(ISS& env, const bc::NSame&) { sameImpl<true>(env); }
1674 template<class Fun>
1675 void binOpBoolImpl(ISS& env, Fun fun) {
1676 auto const t1 = popC(env);
1677 auto const t2 = popC(env);
1678 auto const v1 = tv(t1);
1679 auto const v2 = tv(t2);
1680 if (v1 && v2) {
1681 if (auto r = eval_cell_value([&]{ return fun(*v2, *v1); })) {
1682 constprop(env);
1683 return push(env, *r ? TTrue : TFalse);
1686 // TODO_4: evaluate when these can throw, non-constant type stuff.
1687 push(env, TBool);
1690 template<class Fun>
1691 void binOpInt64Impl(ISS& env, Fun fun) {
1692 auto const t1 = popC(env);
1693 auto const t2 = popC(env);
1694 auto const v1 = tv(t1);
1695 auto const v2 = tv(t2);
1696 if (v1 && v2) {
1697 if (auto r = eval_cell_value([&]{ return ival(fun(*v2, *v1)); })) {
1698 constprop(env);
1699 return push(env, std::move(*r));
1702 // TODO_4: evaluate when these can throw, non-constant type stuff.
1703 push(env, TInt);
1706 void in(ISS& env, const bc::Eq&) {
1707 auto rs = resolveSame<false>(env);
1708 if (rs.first == TTrue) {
1709 if (!rs.second) constprop(env);
1710 discard(env, 2);
1711 return push(env, TTrue);
1713 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return cellEqual(c1, c2); });
1715 void in(ISS& env, const bc::Neq&) {
1716 auto rs = resolveSame<false>(env);
1717 if (rs.first == TTrue) {
1718 if (!rs.second) constprop(env);
1719 discard(env, 2);
1720 return push(env, TFalse);
1722 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return !cellEqual(c1, c2); });
1724 void in(ISS& env, const bc::Lt&) {
1725 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return cellLess(c1, c2); });
1727 void in(ISS& env, const bc::Gt&) {
1728 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return cellGreater(c1, c2); });
1730 void in(ISS& env, const bc::Lte&) { binOpBoolImpl(env, cellLessOrEqual); }
1731 void in(ISS& env, const bc::Gte&) { binOpBoolImpl(env, cellGreaterOrEqual); }
1733 void in(ISS& env, const bc::Cmp&) {
1734 binOpInt64Impl(env, [&] (Cell c1, Cell c2) { return cellCompare(c1, c2); });
1737 void in(ISS& env, const bc::Xor&) {
1738 binOpBoolImpl(env, [&] (Cell c1, Cell c2) {
1739 return cellToBool(c1) ^ cellToBool(c2);
1743 void castBoolImpl(ISS& env, const Type& t, bool negate) {
1744 nothrow(env);
1745 constprop(env);
1747 auto const e = emptiness(t);
1748 switch (e) {
1749 case Emptiness::Empty:
1750 case Emptiness::NonEmpty:
1751 return push(env, (e == Emptiness::Empty) == negate ? TTrue : TFalse);
1752 case Emptiness::Maybe:
1753 break;
1756 push(env, TBool);
1759 void in(ISS& env, const bc::Not&) {
1760 castBoolImpl(env, popC(env), true);
1763 void in(ISS& env, const bc::CastBool&) {
1764 auto const t = topC(env);
1765 if (t.subtypeOf(BBool)) return reduce(env);
1766 castBoolImpl(env, popC(env), false);
1769 void in(ISS& env, const bc::CastInt&) {
1770 auto const t = topC(env);
1771 if (t.subtypeOf(BInt)) return reduce(env);
1772 constprop(env);
1773 popC(env);
1774 // Objects can raise a warning about converting to int.
1775 if (!t.couldBe(BObj)) nothrow(env);
1776 if (auto const v = tv(t)) {
1777 auto cell = eval_cell([&] {
1778 return make_tv<KindOfInt64>(cellToInt(*v));
1780 if (cell) return push(env, std::move(*cell));
1782 push(env, TInt);
1785 // Handle a casting operation, where "target" is the type being casted to. If
1786 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1787 // is set to true, if the source type is the same as the destination, the cast
1788 // will be optimized away.
1789 void castImpl(ISS& env, Type target, void(*fn)(TypedValue*)) {
1790 auto const t = topC(env);
1791 if (t.subtypeOf(target)) return reduce(env);
1792 popC(env);
1793 if (fn) {
1794 if (auto val = tv(t)) {
1795 if (auto result = eval_cell([&] { fn(&*val); return *val; })) {
1796 constprop(env);
1797 target = *result;
1801 push(env, std::move(target));
1804 void in(ISS& env, const bc::CastDouble&) {
1805 castImpl(env, TDbl, tvCastToDoubleInPlace);
1808 void in(ISS& env, const bc::CastString&) {
1809 castImpl(env, TStr, tvCastToStringInPlace);
1812 void in(ISS& env, const bc::CastArray&) {
1813 castImpl(env, TPArr, tvCastToArrayInPlace);
1816 void in(ISS& env, const bc::CastDict&) {
1817 castImpl(env, TDict, tvCastToDictInPlace);
1820 void in(ISS& env, const bc::CastVec&) {
1821 castImpl(env, TVec, tvCastToVecInPlace);
1824 void in(ISS& env, const bc::CastKeyset&) {
1825 castImpl(env, TKeyset, tvCastToKeysetInPlace);
1828 void in(ISS& env, const bc::CastVArray&) {
1829 assertx(!RuntimeOption::EvalHackArrDVArrs);
1830 castImpl(env, TVArr, tvCastToVArrayInPlace);
1833 void in(ISS& env, const bc::CastDArray&) {
1834 assertx(!RuntimeOption::EvalHackArrDVArrs);
1835 castImpl(env, TDArr, tvCastToDArrayInPlace);
1838 void in(ISS& env, const bc::DblAsBits&) {
1839 nothrow(env);
1840 constprop(env);
1842 auto const ty = popC(env);
1843 if (!ty.couldBe(BDbl)) return push(env, ival(0));
1845 if (auto val = tv(ty)) {
1846 assertx(isDoubleType(val->m_type));
1847 val->m_type = KindOfInt64;
1848 push(env, from_cell(*val));
1849 return;
1852 push(env, TInt);
1855 void in(ISS& env, const bc::Print& /*op*/) {
1856 popC(env);
1857 push(env, ival(1));
1860 void in(ISS& env, const bc::Clone& /*op*/) {
1861 auto val = popC(env);
1862 if (!val.subtypeOf(BObj)) {
1863 val = is_opt(val) ? unopt(std::move(val)) : TObj;
1865 push(env, std::move(val));
1868 void in(ISS& env, const bc::Exit&) { popC(env); push(env, TInitNull); }
1869 void in(ISS& env, const bc::Fatal&) { popC(env); }
1871 void in(ISS& /*env*/, const bc::JmpNS&) {
1872 always_assert(0 && "blocks should not contain JmpNS instructions");
1875 void in(ISS& /*env*/, const bc::Jmp&) {
1876 always_assert(0 && "blocks should not contain Jmp instructions");
1879 void in(ISS& env, const bc::Select& op) {
1880 auto const cond = topC(env);
1881 auto const t = topC(env, 1);
1882 auto const f = topC(env, 2);
1884 nothrow(env);
1885 constprop(env);
1887 switch (emptiness(cond)) {
1888 case Emptiness::Maybe:
1889 discard(env, 3);
1890 push(env, union_of(t, f));
1891 return;
1892 case Emptiness::NonEmpty:
1893 discard(env, 3);
1894 push(env, t);
1895 return;
1896 case Emptiness::Empty:
1897 return reduce(env, bc::PopC {}, bc::PopC {});
1899 not_reached();
1902 namespace {
1904 template<class JmpOp>
1905 bool isTypeHelper(ISS& env,
1906 IsTypeOp typeOp,
1907 LocalId location,
1908 Op op,
1909 const JmpOp& jmp) {
1910 if (typeOp == IsTypeOp::Scalar || typeOp == IsTypeOp::ArrLike) {
1911 return false;
1914 auto const val = [&] {
1915 if (op != Op::IsTypeC) return locRaw(env, location);
1916 const StackElem* elem;
1917 env.state.stack.peek(1, &elem, 1);
1918 location = elem->equivLoc;
1919 return elem->type;
1920 }();
1922 if (location == NoLocalId || !val.subtypeOf(BCell)) return false;
1924 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1925 // Otherwise, refine location based on the testType.
1926 auto testTy = type_of_istype(typeOp);
1927 if (RuntimeOption::EvalIsCompatibleClsMethType && val.couldBe(BClsMeth)) {
1928 assertx(RuntimeOption::EvalEmitClsMethPointers);
1929 if (RuntimeOption::EvalHackArrDVArrs) {
1930 if ((typeOp == IsTypeOp::Vec) || (typeOp == IsTypeOp::VArray)) {
1931 if (val.couldBe(BVec | BVArr)) return false;
1932 testTy = TClsMeth;
1934 } else {
1935 if ((typeOp == IsTypeOp::Arr) || (typeOp == IsTypeOp::VArray)) {
1936 if (val.couldBe(BArr | BVArr)) return false;
1937 testTy = TClsMeth;
1942 assertx(val.couldBe(testTy) &&
1943 (!val.subtypeOf(testTy) || val.subtypeOf(BObj)));
1945 discard(env, 1);
1947 if (op == Op::IsTypeC) {
1948 if (!is_type_might_raise(testTy, val)) nothrow(env);
1949 } else if (op == Op::IssetL) {
1950 nothrow(env);
1951 } else if (!locCouldBeUninit(env, location) &&
1952 !is_type_might_raise(testTy, val)) {
1953 nothrow(env);
1956 auto const negate = (jmp.op == Op::JmpNZ) == (op != Op::IssetL);
1957 auto const was_true = [&] (Type t) {
1958 if (testTy.subtypeOf(BNull)) return intersection_of(t, TNull);
1959 assertx(!testTy.couldBe(BNull));
1960 return intersection_of(t, testTy);
1962 auto const was_false = [&] (Type t) {
1963 auto tinit = remove_uninit(t);
1964 if (testTy.subtypeOf(BNull)) {
1965 return is_opt(tinit) ? unopt(tinit) : tinit;
1967 if (is_opt(tinit)) {
1968 assertx(!testTy.couldBe(BNull));
1969 if (unopt(tinit).subtypeOf(testTy)) return TNull;
1971 return t;
1974 auto const pre = [&] (Type t) {
1975 return negate ? was_true(std::move(t)) : was_false(std::move(t));
1978 auto const post = [&] (Type t) {
1979 return negate ? was_false(std::move(t)) : was_true(std::move(t));
1982 refineLocation(env, location, pre, jmp.target1, post);
1983 return true;
1986 // If the current function is a memoize wrapper, return the inferred return type
1987 // of the function being wrapped along with if the wrapped function is effect
1988 // free.
1989 std::pair<Type, bool> memoizeImplRetType(ISS& env) {
1990 always_assert(env.ctx.func->isMemoizeWrapper);
1992 // Lookup the wrapped function. This should always resolve to a precise
1993 // function but we don't rely on it.
1994 auto const memo_impl_func = [&] {
1995 if (env.ctx.func->cls) {
1996 auto const clsTy = selfClsExact(env);
1997 return env.index.resolve_method(
1998 env.ctx,
1999 clsTy ? *clsTy : TCls,
2000 memoize_impl_name(env.ctx.func)
2003 return env.index.resolve_func(env.ctx, memoize_impl_name(env.ctx.func));
2004 }();
2006 // Infer the return type of the wrapped function, taking into account the
2007 // types of the parameters for context sensitive types.
2008 auto const numArgs = env.ctx.func->params.size();
2009 CompactVector<Type> args{numArgs};
2010 for (auto i = LocalId{0}; i < numArgs; ++i) {
2011 args[i] = locAsCell(env, i);
2014 // Determine the context the wrapped function will be called on.
2015 auto const ctxType = [&]() -> Type {
2016 if (env.ctx.func->cls) {
2017 if (env.ctx.func->attrs & AttrStatic) {
2018 // The class context for static methods is the method's class,
2019 // if LSB is not specified.
2020 auto const clsTy =
2021 env.ctx.func->isMemoizeWrapperLSB ?
2022 selfCls(env) :
2023 selfClsExact(env);
2024 return clsTy ? *clsTy : TCls;
2025 } else {
2026 return thisTypeNonNull(env);
2029 return TBottom;
2030 }();
2032 auto retTy = env.index.lookup_return_type(
2033 env.ctx,
2034 args,
2035 ctxType,
2036 memo_impl_func
2038 auto const effectFree = env.index.is_effect_free(memo_impl_func);
2039 // Regardless of anything we know the return type will be an InitCell (this is
2040 // a requirement of memoize functions).
2041 if (!retTy.subtypeOf(BInitCell)) return { TInitCell, effectFree };
2042 return { retTy, effectFree };
2045 template<class JmpOp>
2046 bool instanceOfJmpImpl(ISS& env,
2047 const bc::InstanceOfD& inst,
2048 const JmpOp& jmp) {
2050 const StackElem* elem;
2051 env.state.stack.peek(1, &elem, 1);
2053 auto const locId = elem->equivLoc;
2054 if (locId == NoLocalId || interface_supports_non_objects(inst.str1)) {
2055 return false;
2057 auto const rcls = env.index.resolve_class(env.ctx, inst.str1);
2058 if (!rcls) return false;
2060 auto const val = elem->type;
2061 auto const instTy = subObj(*rcls);
2062 assertx(!val.subtypeOf(instTy) && val.couldBe(instTy));
2064 if (rcls->couldBeInterface()) return false;
2066 // If we have an optional type, whose unopt is guaranteed to pass
2067 // the instanceof check, then failing to pass implies it was null.
2068 auto const fail_implies_null = is_opt(val) && unopt(val).subtypeOf(instTy);
2070 discard(env, 1);
2071 auto const negate = jmp.op == Op::JmpNZ;
2072 auto const result = [&] (Type t, bool pass) {
2073 return pass ? instTy : fail_implies_null ? TNull : t;
2075 auto const pre = [&] (Type t) { return result(t, negate); };
2076 auto const post = [&] (Type t) { return result(t, !negate); };
2077 refineLocation(env, locId, pre, jmp.target1, post);
2078 return true;
2081 template<class JmpOp>
2082 bool isTypeStructCJmpImpl(ISS& env,
2083 const bc::IsTypeStructC& inst,
2084 const JmpOp& jmp) {
2086 const StackElem* elems[2];
2087 env.state.stack.peek(2, elems, 1);
2089 auto const locId = elems[0]->equivLoc;
2090 if (locId == NoLocalId) return false;
2092 auto const a = tv(elems[1]->type);
2093 if (!a) return false;
2094 // if it wasn't valid, the JmpOp wouldn't be reachable
2095 assertx(isValidTSType(*a, false));
2097 auto const is_nullable_ts = is_ts_nullable(a->m_data.parr);
2098 auto const ts_kind = get_ts_kind(a->m_data.parr);
2099 // type_of_type_structure does not resolve these types. It is important we
2100 // do resolve them here, or we may have issues when we reduce the checks to
2101 // InstanceOfD checks. This logic performs the same exact refinement as
2102 // instanceOfD will.
2103 if (is_nullable_ts ||
2104 (ts_kind != TypeStructure::Kind::T_class &&
2105 ts_kind != TypeStructure::Kind::T_interface &&
2106 ts_kind != TypeStructure::Kind::T_xhp &&
2107 ts_kind != TypeStructure::Kind::T_unresolved)) {
2108 return false;
2111 auto const clsName = get_ts_classname(a->m_data.parr);
2112 auto const rcls = env.index.resolve_class(env.ctx, clsName);
2113 if (!rcls ||
2114 !rcls->resolved() ||
2115 rcls->cls()->attrs & AttrEnum ||
2116 interface_supports_non_objects(clsName)) {
2117 return false;
2120 auto const val = elems[0]->type;
2121 auto const instTy = subObj(*rcls);
2122 if (val.subtypeOf(instTy) || !val.couldBe(instTy)) {
2123 return false;
2126 // If we have an optional type, whose unopt is guaranteed to pass
2127 // the instanceof check, then failing to pass implies it was null.
2128 auto const fail_implies_null = is_opt(val) && unopt(val).subtypeOf(instTy);
2130 discard(env, 1);
2132 auto const negate = jmp.op == Op::JmpNZ;
2133 auto const result = [&] (Type t, bool pass) {
2134 return pass ? instTy : fail_implies_null ? TNull : t;
2136 auto const pre = [&] (Type t) { return result(t, negate); };
2137 auto const post = [&] (Type t) { return result(t, !negate); };
2138 refineLocation(env, locId, pre, jmp.target1, post);
2139 return true;
2142 template<class JmpOp>
2143 void jmpImpl(ISS& env, const JmpOp& op) {
2144 auto const Negate = std::is_same<JmpOp, bc::JmpNZ>::value;
2145 auto const location = topStkEquiv(env);
2146 auto const e = emptiness(topC(env));
2147 if (e == (Negate ? Emptiness::NonEmpty : Emptiness::Empty)) {
2148 reduce(env, bc::PopC {});
2149 return jmp_setdest(env, op.target1);
2152 if (e == (Negate ? Emptiness::Empty : Emptiness::NonEmpty) ||
2153 (next_real_block(*env.ctx.func, env.blk.fallthrough) ==
2154 next_real_block(*env.ctx.func, op.target1))) {
2155 return reduce(env, bc::PopC{});
2158 auto fix = [&] {
2159 if (env.flags.jmpDest == NoBlockId) return;
2160 auto const jmpDest = env.flags.jmpDest;
2161 env.flags.jmpDest = NoBlockId;
2162 rewind(env, op);
2163 reduce(env, bc::PopC {});
2164 env.flags.jmpDest = jmpDest;
2167 if (auto const last = last_op(env)) {
2168 if (last->op == Op::Not) {
2169 rewind(env, 1);
2170 return reduce(env, invertJmp(op));
2172 if (last->op == Op::Same || last->op == Op::NSame) {
2173 if (sameJmpImpl(env, last->op, op)) return fix();
2174 } else if (last->op == Op::IssetL) {
2175 if (isTypeHelper(env,
2176 IsTypeOp::Null,
2177 last->IsTypeL.loc1,
2178 last->op,
2179 op)) {
2180 return fix();
2182 } else if (last->op == Op::IsTypeL) {
2183 if (isTypeHelper(env,
2184 last->IsTypeL.subop2,
2185 last->IsTypeL.loc1,
2186 last->op,
2187 op)) {
2188 return fix();
2190 } else if (last->op == Op::IsTypeC) {
2191 if (isTypeHelper(env,
2192 last->IsTypeC.subop1,
2193 NoLocalId,
2194 last->op,
2195 op)) {
2196 return fix();
2198 } else if (last->op == Op::InstanceOfD) {
2199 if (instanceOfJmpImpl(env, last->InstanceOfD, op)) return fix();
2200 } else if (last->op == Op::IsTypeStructC) {
2201 if (isTypeStructCJmpImpl(env, last->IsTypeStructC, op)) return fix();
2205 popC(env);
2206 nothrow(env);
2208 if (location == NoLocalId) return env.propagate(op.target1, &env.state);
2210 refineLocation(env, location,
2211 Negate ? assert_nonemptiness : assert_emptiness,
2212 op.target1,
2213 Negate ? assert_emptiness : assert_nonemptiness);
2214 return fix();
2217 } // namespace
2219 void in(ISS& env, const bc::JmpNZ& op) { jmpImpl(env, op); }
2220 void in(ISS& env, const bc::JmpZ& op) { jmpImpl(env, op); }
2222 void in(ISS& env, const bc::Switch& op) {
2223 auto v = tv(topC(env));
2225 if (v) {
2226 auto go = [&] (BlockId blk) {
2227 reduce(env, bc::PopC {});
2228 return jmp_setdest(env, blk);
2230 auto num_elems = op.targets.size();
2231 if (op.subop1 == SwitchKind::Unbounded) {
2232 if (v->m_type == KindOfInt64 &&
2233 v->m_data.num >= 0 && v->m_data.num < num_elems) {
2234 return go(op.targets[v->m_data.num]);
2236 } else {
2237 assertx(num_elems > 2);
2238 num_elems -= 2;
2239 for (auto i = size_t{}; ; i++) {
2240 if (i == num_elems) {
2241 return go(op.targets.back());
2243 auto match = eval_cell_value([&] {
2244 return cellEqual(*v, static_cast<int64_t>(op.arg2 + i));
2246 if (!match) break;
2247 if (*match) {
2248 return go(op.targets[i]);
2254 popC(env);
2255 forEachTakenEdge(op, [&] (BlockId id) {
2256 env.propagate(id, &env.state);
2260 void in(ISS& env, const bc::SSwitch& op) {
2261 auto v = tv(topC(env));
2263 if (v) {
2264 for (auto& kv : op.targets) {
2265 auto match = eval_cell_value([&] {
2266 return !kv.first || cellEqual(*v, kv.first);
2268 if (!match) break;
2269 if (*match) {
2270 reduce(env, bc::PopC {});
2271 return jmp_setdest(env, kv.second);
2276 popC(env);
2277 forEachTakenEdge(op, [&] (BlockId id) {
2278 env.propagate(id, &env.state);
2282 void in(ISS& env, const bc::RetC& /*op*/) {
2283 auto const locEquiv = topStkLocal(env);
2284 doRet(env, popC(env), false);
2285 if (locEquiv != NoLocalId && locEquiv < env.ctx.func->params.size()) {
2286 env.flags.retParam = locEquiv;
2289 void in(ISS& env, const bc::RetM& op) {
2290 std::vector<Type> ret(op.arg1);
2291 for (int i = 0; i < op.arg1; i++) {
2292 ret[op.arg1 - i - 1] = popC(env);
2294 doRet(env, vec(std::move(ret), provTagHere(env)), false);
2297 void in(ISS& env, const bc::RetCSuspended&) {
2298 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
2300 auto const t = popC(env);
2301 doRet(
2302 env,
2303 is_specialized_wait_handle(t) ? wait_handle_inner(t) : TInitCell,
2304 false
2308 void in(ISS& env, const bc::Throw& /*op*/) {
2309 popC(env);
2312 void in(ISS& env, const bc::ChainFaults&) {
2313 popC(env);
2316 void in(ISS& env, const bc::NativeImpl&) {
2317 killLocals(env);
2318 mayUseVV(env);
2320 if (is_collection_method_returning_this(env.ctx.cls, env.ctx.func)) {
2321 auto const resCls = env.index.builtin_class(env.ctx.cls->name);
2322 return doRet(env, objExact(resCls), true);
2325 if (env.ctx.func->nativeInfo) {
2326 return doRet(env, native_function_return_type(env.ctx.func), true);
2328 doRet(env, TInitCell, true);
2331 void in(ISS& env, const bc::CGetL& op) {
2332 if (locIsThis(env, op.loc1)) {
2333 auto const& ty = peekLocRaw(env, op.loc1);
2334 if (!ty.subtypeOf(BInitNull)) {
2335 auto const subop = ty.couldBe(BUninit) ?
2336 BareThisOp::Notice : ty.couldBe(BNull) ?
2337 BareThisOp::NoNotice : BareThisOp::NeverNull;
2338 return reduce(env, bc::BareThis { subop });
2341 if (auto const last = last_op(env)) {
2342 if (!is_pseudomain(env.ctx.func) && last->op == Op::PopL &&
2343 op.loc1 == last->PopL.loc1) {
2344 reprocess(env);
2345 rewind(env, 1);
2346 setLocRaw(env, op.loc1, TCell);
2347 return reduce(env, bc::SetL { op.loc1 });
2350 if (!peekLocCouldBeUninit(env, op.loc1)) {
2351 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, false);
2352 if (minLocEquiv != NoLocalId) {
2353 return reduce(env, bc::CGetL { minLocEquiv });
2356 nothrow(env);
2357 constprop(env);
2359 mayReadLocal(env, op.loc1);
2360 push(env, locAsCell(env, op.loc1), op.loc1);
2363 void in(ISS& env, const bc::CGetQuietL& op) {
2364 if (locIsThis(env, op.loc1)) {
2365 return reduce(env, bc::BareThis { BareThisOp::NoNotice });
2367 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, true);
2368 if (minLocEquiv != NoLocalId) {
2369 return reduce(env, bc::CGetQuietL { minLocEquiv });
2372 nothrow(env);
2373 constprop(env);
2374 mayReadLocal(env, op.loc1);
2375 push(env, locAsCell(env, op.loc1), op.loc1);
2378 void in(ISS& env, const bc::CUGetL& op) {
2379 auto ty = locRaw(env, op.loc1);
2380 if (ty.subtypeOf(BUninit)) {
2381 return reduce(env, bc::NullUninit {});
2383 nothrow(env);
2384 if (!ty.couldBe(BUninit)) constprop(env);
2385 if (!ty.subtypeOf(BCell)) ty = TCell;
2386 push(env, std::move(ty), op.loc1);
2389 void in(ISS& env, const bc::PushL& op) {
2390 if (auto val = tv(peekLocRaw(env, op.loc1))) {
2391 return reduce(env, bc::UnsetL { op.loc1 }, gen_constant(*val));
2394 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, false);
2395 if (minLocEquiv != NoLocalId) {
2396 return reduce(env, bc::CGetL { minLocEquiv }, bc::UnsetL { op.loc1 });
2399 if (auto const last = last_op(env)) {
2400 if (last->op == Op::PopL &&
2401 last->PopL.loc1 == op.loc1) {
2402 // rewind is ok, because we're just going to unset the local
2403 // (and note the unset can't be a no-op because the PopL set it
2404 // to an InitCell). But its possible that before the PopL, the
2405 // local *was* unset, so maybe would have killed the no-op. The
2406 // only way to fix that is to reprocess the block with the new
2407 // instruction sequence and see what happens.
2408 reprocess(env);
2409 rewind(env, 1);
2410 return reduce(env, bc::UnsetL { op.loc1 });
2414 impl(env, bc::CGetL { op.loc1 }, bc::UnsetL { op.loc1 });
2417 void in(ISS& env, const bc::CGetL2& op) {
2418 if (auto const last = last_op(env)) {
2419 if ((poppable(last->op) && !numPop(*last)) ||
2420 (last->op == Op::CGetL && !peekLocCouldBeUninit(env, op.loc1))) {
2421 auto const other = *last;
2422 rewind(env, 1);
2423 return reduce(env, bc::CGetL { op.loc1 }, other);
2427 if (!peekLocCouldBeUninit(env, op.loc1)) {
2428 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, false);
2429 if (minLocEquiv != NoLocalId) {
2430 return reduce(env, bc::CGetL2 { minLocEquiv });
2432 effect_free(env);
2434 mayReadLocal(env, op.loc1);
2435 auto loc = locAsCell(env, op.loc1);
2436 auto topEquiv = topStkLocal(env);
2437 auto top = popT(env);
2438 push(env, std::move(loc), op.loc1);
2439 push(env, std::move(top), topEquiv);
2442 void in(ISS& env, const bc::CGetG&) { popC(env); push(env, TInitCell); }
2444 void in(ISS& env, const bc::CGetS& op) {
2445 auto const tcls = popC(env);
2446 auto const tname = popC(env);
2447 auto const vname = tv(tname);
2448 auto const self = selfCls(env);
2450 if (vname && vname->m_type == KindOfPersistentString &&
2451 self && tcls.subtypeOf(*self)) {
2452 if (auto ty = selfPropAsCell(env, vname->m_data.pstr)) {
2453 // Only nothrow when we know it's a private declared property (and thus
2454 // accessible here), class initialization won't throw, and its not a
2455 // LateInit prop (which will throw if not initialized).
2456 if (!classInitMightRaise(env, tcls) &&
2457 !isMaybeLateInitSelfProp(env, vname->m_data.pstr)) {
2458 nothrow(env);
2460 // We can only constprop here if we know for sure this is exactly the
2461 // correct class. The reason for this is that you could have a LSB
2462 // class attempting to access a private static in a derived class with
2463 // the same name as a private static in this class, which is supposed to
2464 // fatal at runtime (for an example see test/quick/static_sprop2.php).
2465 auto const selfExact = selfClsExact(env);
2466 if (selfExact && tcls.subtypeOf(*selfExact)) constprop(env);
2469 if (ty->subtypeOf(BBottom)) unreachable(env);
2470 return push(env, std::move(*ty));
2474 auto indexTy = env.index.lookup_public_static(env.ctx, tcls, tname);
2475 if (indexTy.subtypeOf(BInitCell)) {
2477 * Constant propagation here can change when we invoke autoload, so it's
2478 * considered HardConstProp. It's safe not to check anything about private
2479 * or protected static properties, because you can't override a public
2480 * static property with a private or protected one---if the index gave us
2481 * back a constant type, it's because it found a public static and it must
2482 * be the property this would have read dynamically.
2484 if (options.HardConstProp &&
2485 !classInitMightRaise(env, tcls) &&
2486 !env.index.lookup_public_static_maybe_late_init(tcls, tname)) {
2487 constprop(env);
2489 if (indexTy.subtypeOf(BBottom)) unreachable(env);
2490 return push(env, std::move(indexTy));
2493 push(env, TInitCell);
2496 void in(ISS& env, const bc::ClassGetC& op) {
2497 auto const t = topC(env);
2499 if (t.subtypeOf(BCls)) return reduce(env, bc::Nop {});
2500 popC(env);
2502 if (t.subtypeOf(BObj)) {
2503 effect_free(env);
2504 push(env, objcls(t));
2505 return;
2508 if (auto const clsname = getNameFromType(t)) {
2509 if (auto const rcls = env.index.resolve_class(env.ctx, clsname)) {
2510 auto const cls = rcls->cls();
2511 if (cls &&
2512 ((cls->attrs & AttrPersistent) ||
2513 cls == env.ctx.cls)) {
2514 effect_free(env);
2516 push(env, clsExact(*rcls));
2517 return;
2521 push(env, TCls);
2524 void in(ISS& env, const bc::ClassGetTS& op) {
2525 // TODO(T31677864): implement real optimizations
2526 auto const ts = popC(env);
2527 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
2528 if (!ts.couldBe(requiredTSType)) {
2529 push(env, TBottom);
2530 return;
2533 auto const& genericsType =
2534 RuntimeOption::EvalHackArrDVArrs ? TVec : TVArr;
2536 push(env, TCls);
2537 push(env, opt(genericsType));
2540 void in(ISS& env, const bc::AKExists& /*op*/) {
2541 auto const base = popC(env);
2542 auto const key = popC(env);
2544 // Bases other than array-like or object will raise a warning and return
2545 // false.
2546 if (!base.couldBeAny(TArr, TVec, TDict, TKeyset, TObj)) {
2547 return push(env, TFalse);
2550 // Push the returned type and annotate effects appropriately, taking into
2551 // account if the base might be null. Allowing for a possibly null base lets
2552 // us capture more cases.
2553 auto const finish = [&] (const Type& t, bool mayThrow) {
2554 if (base.couldBe(BInitNull)) return push(env, union_of(t, TFalse));
2555 if (!mayThrow) {
2556 constprop(env);
2557 effect_free(env);
2559 if (base.subtypeOf(BBottom)) unreachable(env);
2560 return push(env, t);
2563 // Helper for Hack arrays. "validKey" is the set of key types which can return
2564 // a value from AKExists. "silentKey" is the set of key types which will
2565 // silently return false (anything else throws). The Hack array elem functions
2566 // will treat values of "silentKey" as throwing, so we must identify those
2567 // cases and deal with them.
2568 auto const hackArr = [&] (std::pair<Type, ThrowMode> elem,
2569 const Type& validKey,
2570 const Type& silentKey) {
2571 switch (elem.second) {
2572 case ThrowMode::None:
2573 assertx(key.subtypeOf(validKey));
2574 return finish(TTrue, false);
2575 case ThrowMode::MaybeMissingElement:
2576 assertx(key.subtypeOf(validKey));
2577 return finish(TBool, false);
2578 case ThrowMode::MissingElement:
2579 assertx(key.subtypeOf(validKey));
2580 return finish(TFalse, false);
2581 case ThrowMode::MaybeBadKey:
2582 assertx(key.couldBe(validKey));
2583 return finish(
2584 elem.first.subtypeOf(BBottom) ? TFalse : TBool,
2585 !key.subtypeOf(BOptArrKey)
2587 case ThrowMode::BadOperation:
2588 assertx(!key.couldBe(validKey));
2589 return finish(key.couldBe(silentKey) ? TFalse : TBottom, true);
2593 // Vecs will throw for any key other than Int, Str, or Null, and will silently
2594 // return false for the latter two.
2595 if (base.subtypeOrNull(BVec)) {
2596 if (key.subtypeOrNull(BStr)) return finish(TFalse, false);
2597 return hackArr(vec_elem(base, key, TBottom), TInt, TOptStr);
2600 // Dicts and keysets will throw for any key other than Int, Str, or Null,
2601 // and will silently return false for Null.
2602 if (base.subtypeOfAny(TOptDict, TOptKeyset)) {
2603 if (key.subtypeOf(BInitNull)) return finish(TFalse, false);
2604 auto const elem = base.subtypeOrNull(BDict)
2605 ? dict_elem(base, key, TBottom)
2606 : keyset_elem(base, key, TBottom);
2607 return hackArr(elem, TArrKey, TInitNull);
2610 if (base.subtypeOrNull(BArr)) {
2611 // Unlike Idx, AKExists will transform a null key on arrays into the static
2612 // empty string, so we don't need to do any fixups here.
2613 auto const elem = array_elem(base, key, TBottom);
2614 switch (elem.second) {
2615 case ThrowMode::None: return finish(TTrue, false);
2616 case ThrowMode::MaybeMissingElement: return finish(TBool, false);
2617 case ThrowMode::MissingElement: return finish(TFalse, false);
2618 case ThrowMode::MaybeBadKey:
2619 return finish(elem.first.subtypeOf(BBottom) ? TFalse : TBool, true);
2620 case ThrowMode::BadOperation: always_assert(false);
2624 // Objects or other unions of possible bases
2625 push(env, TBool);
2628 void in(ISS& env, const bc::GetMemoKeyL& op) {
2629 always_assert(env.ctx.func->isMemoizeWrapper);
2631 auto const rclsIMemoizeParam = env.index.builtin_class(s_IMemoizeParam.get());
2632 auto const tyIMemoizeParam = subObj(rclsIMemoizeParam);
2634 auto const inTy = locAsCell(env, op.loc1);
2636 // If the local could be uninit, we might raise a warning (as
2637 // usual). Converting an object to a memo key might invoke PHP code if it has
2638 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2639 if (!locCouldBeUninit(env, op.loc1) &&
2640 !inTy.couldBeAny(TObj, TArr, TVec, TDict)) {
2641 nothrow(env); constprop(env);
2644 // If type constraints are being enforced and the local being turned into a
2645 // memo key is a parameter, then we can possibly using the type constraint to
2646 // infer a more efficient memo key mode.
2647 using MK = MemoKeyConstraint;
2648 folly::Optional<res::Class> resolvedCls;
2649 auto const mkc = [&] {
2650 if (op.loc1 >= env.ctx.func->params.size()) return MK::None;
2651 auto tc = env.ctx.func->params[op.loc1].typeConstraint;
2652 if (tc.type() == AnnotType::Object) {
2653 auto res = env.index.resolve_type_name(tc.typeName());
2654 if (res.type != AnnotType::Object) {
2655 tc.resolveType(res.type, res.nullable || tc.isNullable());
2656 } else {
2657 resolvedCls = env.index.resolve_class(env.ctx, tc.typeName());
2660 return memoKeyConstraintFromTC(tc);
2661 }();
2663 // Use the type-constraint to reduce this operation to a more efficient memo
2664 // mode. Some of the modes can be reduced to simple bytecode operations
2665 // inline. Even with the type-constraints, we still need to check the inferred
2666 // type of the local. Something may have possibly clobbered the local between
2667 // the type-check and this op.
2668 switch (mkc) {
2669 case MK::Int:
2670 // Always an int, so the key is always an identity mapping
2671 if (inTy.subtypeOf(BInt)) return reduce(env, bc::CGetL { op.loc1 });
2672 break;
2673 case MK::Bool:
2674 // Always a bool, so the key is the bool cast to an int
2675 if (inTy.subtypeOf(BBool)) {
2676 return reduce(env, bc::CGetL { op.loc1 }, bc::CastInt {});
2678 break;
2679 case MK::Str:
2680 // Always a string, so the key is always an identity mapping
2681 if (inTy.subtypeOf(BStr)) return reduce(env, bc::CGetL { op.loc1 });
2682 break;
2683 case MK::IntOrStr:
2684 // Either an int or string, so the key can be an identity mapping
2685 if (inTy.subtypeOf(BArrKey)) return reduce(env, bc::CGetL { op.loc1 });
2686 break;
2687 case MK::StrOrNull:
2688 // A nullable string. The key will either be the string or the integer
2689 // zero.
2690 if (inTy.subtypeOrNull(BStr)) {
2691 return reduce(
2692 env,
2693 bc::CGetL { op.loc1 },
2694 bc::Int { 0 },
2695 bc::IsTypeL { op.loc1, IsTypeOp::Null },
2696 bc::Select {}
2699 break;
2700 case MK::IntOrNull:
2701 // A nullable int. The key will either be the integer, or the static empty
2702 // string.
2703 if (inTy.subtypeOrNull(BInt)) {
2704 return reduce(
2705 env,
2706 bc::CGetL { op.loc1 },
2707 bc::String { staticEmptyString() },
2708 bc::IsTypeL { op.loc1, IsTypeOp::Null },
2709 bc::Select {}
2712 break;
2713 case MK::BoolOrNull:
2714 // A nullable bool. The key will either be 0, 1, or 2.
2715 if (inTy.subtypeOrNull(BBool)) {
2716 return reduce(
2717 env,
2718 bc::CGetL { op.loc1 },
2719 bc::CastInt {},
2720 bc::Int { 2 },
2721 bc::IsTypeL { op.loc1, IsTypeOp::Null },
2722 bc::Select {}
2725 break;
2726 case MK::Dbl:
2727 // The double will be converted (losslessly) to an integer.
2728 if (inTy.subtypeOf(BDbl)) {
2729 return reduce(env, bc::CGetL { op.loc1 }, bc::DblAsBits {});
2731 break;
2732 case MK::DblOrNull:
2733 // A nullable double. The key will be an integer, or the static empty
2734 // string.
2735 if (inTy.subtypeOrNull(BDbl)) {
2736 return reduce(
2737 env,
2738 bc::CGetL { op.loc1 },
2739 bc::DblAsBits {},
2740 bc::String { staticEmptyString() },
2741 bc::IsTypeL { op.loc1, IsTypeOp::Null },
2742 bc::Select {}
2745 break;
2746 case MK::Object:
2747 // An object. If the object is definitely known to implement IMemoizeParam
2748 // we can simply call that method, casting the output to ensure its always
2749 // a string (which is what the generic mode does). If not, it will use the
2750 // generic mode, which can handle collections or classes which don't
2751 // implement getInstanceKey.
2752 if (resolvedCls &&
2753 resolvedCls->mustBeSubtypeOf(rclsIMemoizeParam) &&
2754 inTy.subtypeOf(tyIMemoizeParam)) {
2755 return reduce(
2756 env,
2757 bc::CGetL { op.loc1 },
2758 bc::NullUninit {},
2759 bc::NullUninit {},
2760 bc::FCallObjMethodD {
2761 FCallArgs(0),
2762 staticEmptyString(),
2763 ObjMethodOp::NullThrows,
2764 s_getInstanceKey.get()
2766 bc::CastString {}
2769 break;
2770 case MK::ObjectOrNull:
2771 // An object or null. We can use the null safe version of a function call
2772 // when invoking getInstanceKey and then select from the result of that,
2773 // or the integer 0. This might seem wasteful, but the JIT does a good job
2774 // inlining away the call in the null case.
2775 if (resolvedCls &&
2776 resolvedCls->mustBeSubtypeOf(rclsIMemoizeParam) &&
2777 inTy.subtypeOf(opt(tyIMemoizeParam))) {
2778 return reduce(
2779 env,
2780 bc::CGetL { op.loc1 },
2781 bc::NullUninit {},
2782 bc::NullUninit {},
2783 bc::FCallObjMethodD {
2784 FCallArgs(0),
2785 staticEmptyString(),
2786 ObjMethodOp::NullSafe,
2787 s_getInstanceKey.get()
2789 bc::CastString {},
2790 bc::Int { 0 },
2791 bc::IsTypeL { op.loc1, IsTypeOp::Null },
2792 bc::Select {}
2795 break;
2796 case MK::None:
2797 break;
2800 // No type constraint, or one that isn't usuable. Use the generic memoization
2801 // scheme which can handle any type:
2803 if (auto const val = tv(inTy)) {
2804 auto const key = eval_cell(
2805 [&]{ return HHVM_FN(serialize_memoize_param)(*val); }
2807 if (key) return push(env, *key);
2810 // Integer keys are always mapped to themselves
2811 if (inTy.subtypeOf(BInt)) return reduce(env, bc::CGetL { op.loc1 });
2812 if (inTy.subtypeOrNull(BInt)) {
2813 return reduce(
2814 env,
2815 bc::CGetL { op.loc1 },
2816 bc::String { s_nullMemoKey.get() },
2817 bc::IsTypeL { op.loc1, IsTypeOp::Null },
2818 bc::Select {}
2821 if (inTy.subtypeOf(BBool)) {
2822 return reduce(
2823 env,
2824 bc::String { s_falseMemoKey.get() },
2825 bc::String { s_trueMemoKey.get() },
2826 bc::CGetL { op.loc1 },
2827 bc::Select {}
2831 // A memo key can be an integer if the input might be an integer, and is a
2832 // string otherwise. Booleans and nulls are always static strings.
2833 auto keyTy = [&]{
2834 if (inTy.subtypeOrNull(BBool)) return TSStr;
2835 if (inTy.couldBe(BInt)) return union_of(TInt, TStr);
2836 return TStr;
2837 }();
2838 push(env, std::move(keyTy));
2841 void in(ISS& env, const bc::IssetL& op) {
2842 if (locIsThis(env, op.loc1)) {
2843 return reduce(env,
2844 bc::BareThis { BareThisOp::NoNotice },
2845 bc::IsTypeC { IsTypeOp::Null },
2846 bc::Not {});
2848 nothrow(env);
2849 constprop(env);
2850 auto const loc = locAsCell(env, op.loc1);
2851 if (loc.subtypeOf(BNull)) return push(env, TFalse);
2852 if (!loc.couldBe(BNull)) return push(env, TTrue);
2853 push(env, TBool);
2856 void in(ISS& env, const bc::EmptyL& op) {
2857 nothrow(env);
2858 constprop(env);
2859 castBoolImpl(env, locAsCell(env, op.loc1), true);
2862 void in(ISS& env, const bc::EmptyS& op) {
2863 popC(env);
2864 popC(env);
2865 push(env, TBool);
2868 void in(ISS& env, const bc::IssetS& op) {
2869 auto const tcls = popC(env);
2870 auto const tname = popC(env);
2871 auto const vname = tv(tname);
2872 auto const self = selfCls(env);
2874 if (self && tcls.subtypeOf(*self) &&
2875 vname && vname->m_type == KindOfPersistentString) {
2876 if (auto const t = selfPropAsCell(env, vname->m_data.pstr)) {
2877 if (isMaybeLateInitSelfProp(env, vname->m_data.pstr)) {
2878 if (!classInitMightRaise(env, tcls)) constprop(env);
2879 return push(env, t->subtypeOf(BBottom) ? TFalse : TBool);
2881 if (t->subtypeOf(BNull)) {
2882 if (!classInitMightRaise(env, tcls)) constprop(env);
2883 return push(env, TFalse);
2885 if (!t->couldBe(BNull)) {
2886 if (!classInitMightRaise(env, tcls)) constprop(env);
2887 return push(env, TTrue);
2892 auto const indexTy = env.index.lookup_public_static(env.ctx, tcls, tname);
2893 if (indexTy.subtypeOf(BInitCell)) {
2894 // See the comments in CGetS about constprop for public statics.
2895 if (options.HardConstProp && !classInitMightRaise(env, tcls)) {
2896 constprop(env);
2898 if (env.index.lookup_public_static_maybe_late_init(tcls, tname)) {
2899 return push(env, indexTy.subtypeOf(BBottom) ? TFalse : TBool);
2901 if (indexTy.subtypeOf(BNull)) { return push(env, TFalse); }
2902 if (!indexTy.couldBe(BNull)) { return push(env, TTrue); }
2905 push(env, TBool);
2908 void in(ISS& env, const bc::EmptyG&) { popC(env); push(env, TBool); }
2909 void in(ISS& env, const bc::IssetG&) { popC(env); push(env, TBool); }
2911 void isTypeImpl(ISS& env, const Type& locOrCell, const Type& test) {
2912 if (locOrCell.subtypeOf(test)) return push(env, TTrue);
2913 if (!locOrCell.couldBe(test)) return push(env, TFalse);
2914 push(env, TBool);
2917 void isTypeObj(ISS& env, const Type& ty) {
2918 if (!ty.couldBe(BObj)) return push(env, TFalse);
2919 if (ty.subtypeOf(BObj)) {
2920 auto const incompl = objExact(
2921 env.index.builtin_class(s_PHP_Incomplete_Class.get()));
2922 if (!ty.couldBe(incompl)) return push(env, TTrue);
2923 if (ty.subtypeOf(incompl)) return push(env, TFalse);
2925 push(env, TBool);
2928 void isTypeArrLike(ISS& env, const Type& ty) {
2929 if (ty.subtypeOf(BArr | BVec | BDict | BKeyset | BClsMeth)) {
2930 return push(env, TTrue);
2932 if (!ty.couldBe(BArr | BVec | BDict | BKeyset | BClsMeth)) {
2933 return push(env, TFalse);
2935 push(env, TBool);
2938 namespace {
2939 bool isCompactTypeClsMeth(ISS& env, IsTypeOp op, const Type& t) {
2940 assertx(RuntimeOption::EvalEmitClsMethPointers);
2941 if (t.couldBe(BClsMeth)) {
2942 if (RuntimeOption::EvalHackArrDVArrs) {
2943 if (op == IsTypeOp::Vec || op == IsTypeOp::VArray) {
2944 if (t.subtypeOf(
2945 op == IsTypeOp::Vec ? BClsMeth | BVec : BClsMeth | BVArr)) {
2946 push(env, TTrue);
2947 } else if (t.couldBe(op == IsTypeOp::Vec ? BVec : BVArr)) {
2948 push(env, TBool);
2949 } else {
2950 isTypeImpl(env, t, TClsMeth);
2952 return true;
2954 } else {
2955 if (op == IsTypeOp::Arr || op == IsTypeOp::VArray) {
2956 if (t.subtypeOf(
2957 op == IsTypeOp::VArray ? BClsMeth | BVArr : BClsMeth | BArr)) {
2958 push(env, TTrue);
2959 } else if (t.couldBe(op == IsTypeOp::VArray ? BVArr : BArr)) {
2960 push(env, TBool);
2961 } else {
2962 isTypeImpl(env, t, TClsMeth);
2964 return true;
2968 return false;
2972 template<class Op>
2973 void isTypeLImpl(ISS& env, const Op& op) {
2974 auto const loc = locAsCell(env, op.loc1);
2975 if (!locCouldBeUninit(env, op.loc1) && !is_type_might_raise(op.subop2, loc)) {
2976 constprop(env);
2977 nothrow(env);
2980 if (RuntimeOption::EvalIsCompatibleClsMethType &&
2981 isCompactTypeClsMeth(env, op.subop2, loc)) {
2982 return;
2985 switch (op.subop2) {
2986 case IsTypeOp::Scalar: return push(env, TBool);
2987 case IsTypeOp::Obj: return isTypeObj(env, loc);
2988 case IsTypeOp::ArrLike: return isTypeArrLike(env, loc);
2989 default: return isTypeImpl(env, loc, type_of_istype(op.subop2));
2993 template<class Op>
2994 void isTypeCImpl(ISS& env, const Op& op) {
2995 auto const t1 = popC(env);
2996 if (!is_type_might_raise(op.subop1, t1)) {
2997 constprop(env);
2998 nothrow(env);
3001 if (RuntimeOption::EvalIsCompatibleClsMethType &&
3002 isCompactTypeClsMeth(env, op.subop1, t1)) {
3003 return;
3006 switch (op.subop1) {
3007 case IsTypeOp::Scalar: return push(env, TBool);
3008 case IsTypeOp::Obj: return isTypeObj(env, t1);
3009 case IsTypeOp::ArrLike: return isTypeArrLike(env, t1);
3010 default: return isTypeImpl(env, t1, type_of_istype(op.subop1));
3014 void in(ISS& env, const bc::IsTypeC& op) { isTypeCImpl(env, op); }
3015 void in(ISS& env, const bc::IsTypeL& op) { isTypeLImpl(env, op); }
3017 void in(ISS& env, const bc::InstanceOfD& op) {
3018 auto t1 = topC(env);
3019 // Note: InstanceOfD can do autoload if the type might be a type
3020 // alias, so it's not nothrow unless we know it's an object type.
3021 if (auto const rcls = env.index.resolve_class(env.ctx, op.str1)) {
3022 auto result = [&] (const Type& r) {
3023 nothrow(env);
3024 if (r != TBool) constprop(env);
3025 popC(env);
3026 push(env, r);
3028 if (!interface_supports_non_objects(rcls->name())) {
3029 auto testTy = subObj(*rcls);
3030 if (t1.subtypeOf(testTy)) return result(TTrue);
3031 if (!t1.couldBe(testTy)) return result(TFalse);
3032 if (is_opt(t1)) {
3033 t1 = unopt(std::move(t1));
3034 if (t1.subtypeOf(testTy)) {
3035 return reduce(env, bc::IsTypeC { IsTypeOp::Null }, bc::Not {});
3038 return result(TBool);
3041 popC(env);
3042 push(env, TBool);
3045 void in(ISS& env, const bc::InstanceOf& /*op*/) {
3046 auto const t1 = topC(env);
3047 auto const v1 = tv(t1);
3048 if (v1 && v1->m_type == KindOfPersistentString) {
3049 return reduce(env, bc::PopC {},
3050 bc::InstanceOfD { v1->m_data.pstr });
3053 if (t1.subtypeOf(BObj) && is_specialized_obj(t1)) {
3054 auto const dobj = dobj_of(t1);
3055 switch (dobj.type) {
3056 case DObj::Sub:
3057 break;
3058 case DObj::Exact:
3059 return reduce(env, bc::PopC {},
3060 bc::InstanceOfD { dobj.cls.name() });
3064 popC(env);
3065 popC(env);
3066 push(env, TBool);
3069 namespace {
3071 bool isValidTypeOpForIsAs(const IsTypeOp& op) {
3072 switch (op) {
3073 case IsTypeOp::Null:
3074 case IsTypeOp::Bool:
3075 case IsTypeOp::Int:
3076 case IsTypeOp::Dbl:
3077 case IsTypeOp::Str:
3078 case IsTypeOp::Obj:
3079 return true;
3080 case IsTypeOp::Res:
3081 case IsTypeOp::Arr:
3082 case IsTypeOp::Vec:
3083 case IsTypeOp::Dict:
3084 case IsTypeOp::Keyset:
3085 case IsTypeOp::VArray:
3086 case IsTypeOp::DArray:
3087 case IsTypeOp::ArrLike:
3088 case IsTypeOp::Scalar:
3089 case IsTypeOp::ClsMeth:
3090 case IsTypeOp::Func:
3091 return false;
3093 not_reached();
3096 template<bool asExpression>
3097 void isAsTypeStructImpl(ISS& env, SArray inputTS) {
3098 auto const resolvedTS = resolveTSStatically(env, inputTS, env.ctx.cls, true);
3099 auto const ts = resolvedTS ? resolvedTS : inputTS;
3100 auto const t = topC(env, 1); // operand to is/as
3102 bool may_raise = true;
3103 auto result = [&] (
3104 const Type& out,
3105 const folly::Optional<Type>& test = folly::none
3107 if (asExpression && out.subtypeOf(BTrue)) {
3108 return reduce(env, bc::PopC {});
3110 auto const location = topStkEquiv(env, 1);
3111 popC(env); // type structure
3112 popC(env); // operand to is/as
3113 if (!asExpression) {
3114 constprop(env);
3115 if (!may_raise) nothrow(env);
3116 return push(env, out);
3118 if (out.subtypeOf(BFalse)) {
3119 push(env, t);
3120 return unreachable(env);
3123 assertx(out == TBool);
3124 if (!test) return push(env, t);
3125 auto const newT = intersection_of(*test, t);
3126 if (newT == TBottom || !refineLocation(env, location, [&] (Type t) {
3127 auto ret = intersection_of(*test, t);
3128 if (test->couldBe(BInitNull) && t.couldBe(BUninit)) {
3129 ret |= TUninit;
3131 return ret;
3132 })) {
3133 unreachable(env);
3135 return push(env, newT);
3138 auto check = [&] (
3139 const folly::Optional<Type> type,
3140 const folly::Optional<Type> deopt = folly::none
3142 if (!type || is_type_might_raise(*type, t)) return result(TBool);
3143 auto test = type.value();
3144 if (t.couldBe(BClsMeth)) {
3145 if (RuntimeOption::EvalHackArrDVArrs) {
3146 if (test == TVec) {
3147 if (t.subtypeOf(BClsMeth | BVec)) return result(TTrue);
3148 else if (t.couldBe(BVec)) return result(TBool);
3149 else test = TClsMeth;
3150 } else if (test == TVArr) {
3151 if (t.subtypeOf(BClsMeth | BVArr)) return result(TTrue);
3152 else if (t.couldBe(BVArr)) return result(TBool);
3153 else test = TClsMeth;
3155 } else {
3156 if (test == TVArr) {
3157 if (t.subtypeOf(BClsMeth | BVArr)) return result(TTrue);
3158 else if (t.couldBe(BVArr)) return result(TBool);
3159 else test = TClsMeth;
3160 } else if (test == TArr) {
3161 if (t.subtypeOf(BClsMeth | BArr)) return result(TTrue);
3162 else if (t.couldBe(BArr)) return result(TBool);
3163 else test = TClsMeth;
3167 if (t.subtypeOf(test)) return result(TTrue);
3168 if (!t.couldBe(test) && (!deopt || !t.couldBe(deopt.value()))) {
3169 return result(TFalse);
3171 auto const op = type_to_istypeop(test);
3172 if (asExpression || !op || !isValidTypeOpForIsAs(op.value())) {
3173 return result(TBool, test);
3175 return reduce(env, bc::PopC {}, bc::IsTypeC { *op });
3178 auto const is_nullable_ts = is_ts_nullable(ts);
3179 auto const is_definitely_null = t.subtypeOf(BNull);
3180 auto const is_definitely_not_null = !t.couldBe(BNull);
3182 if (is_nullable_ts && is_definitely_null) return result(TTrue);
3184 auto const ts_type = type_of_type_structure(ts);
3186 if (is_nullable_ts && !is_definitely_not_null && ts_type == folly::none) {
3187 // Ts is nullable and we know that t could be null but we dont know for sure
3188 // Also we didn't get a type out of the type structure
3189 return result(TBool);
3192 if (!asExpression) {
3193 if (ts_type && !is_type_might_raise(*ts_type, t)) may_raise = false;
3195 switch (get_ts_kind(ts)) {
3196 case TypeStructure::Kind::T_int:
3197 case TypeStructure::Kind::T_bool:
3198 case TypeStructure::Kind::T_float:
3199 case TypeStructure::Kind::T_string:
3200 case TypeStructure::Kind::T_num:
3201 case TypeStructure::Kind::T_arraykey:
3202 case TypeStructure::Kind::T_keyset:
3203 case TypeStructure::Kind::T_void:
3204 case TypeStructure::Kind::T_null:
3205 return check(ts_type);
3206 case TypeStructure::Kind::T_tuple:
3207 return RuntimeOption::EvalHackArrCompatTypeHintNotices
3208 ? check(ts_type, TDArr)
3209 : check(ts_type);
3210 case TypeStructure::Kind::T_shape:
3211 return RuntimeOption::EvalHackArrCompatTypeHintNotices
3212 ? check(ts_type, TVArr)
3213 : check(ts_type);
3214 case TypeStructure::Kind::T_dict:
3215 return check(ts_type, TDArr);
3216 case TypeStructure::Kind::T_vec:
3217 return check(ts_type, TVArr);
3218 case TypeStructure::Kind::T_nothing:
3219 case TypeStructure::Kind::T_noreturn:
3220 return result(TFalse);
3221 case TypeStructure::Kind::T_mixed:
3222 case TypeStructure::Kind::T_dynamic:
3223 return result(TTrue);
3224 case TypeStructure::Kind::T_nonnull:
3225 if (is_definitely_null) return result(TFalse);
3226 if (is_definitely_not_null) return result(TTrue);
3227 if (!asExpression) {
3228 return reduce(env,
3229 bc::PopC {},
3230 bc::IsTypeC { IsTypeOp::Null },
3231 bc::Not {});
3233 return result(TBool);
3234 case TypeStructure::Kind::T_class:
3235 case TypeStructure::Kind::T_interface:
3236 case TypeStructure::Kind::T_xhp: {
3237 if (asExpression) return result(TBool);
3238 auto clsname = get_ts_classname(ts);
3239 auto const rcls = env.index.resolve_class(env.ctx, clsname);
3240 if (!rcls || !rcls->resolved() || (ts->exists(s_generic_types) &&
3241 (rcls->cls()->hasReifiedGenerics ||
3242 !isTSAllWildcards(ts)))) {
3243 // If it is a reified class or has non wildcard generics,
3244 // we need to bail
3245 return result(TBool);
3247 return reduce(env, bc::PopC {}, bc::InstanceOfD { clsname });
3249 case TypeStructure::Kind::T_unresolved: {
3250 if (asExpression) return result(TBool);
3251 auto classname = get_ts_classname(ts);
3252 auto const has_generics = ts->exists(s_generic_types);
3253 if (!has_generics && classname->isame(s_this.get())) {
3254 return reduce(env, bc::PopC {}, bc::IsLateBoundCls {});
3256 auto const rcls = env.index.resolve_class(env.ctx, classname);
3257 // We can only reduce to instance of if we know for sure that this class
3258 // can be resolved since instanceof undefined class does not throw
3259 if (!rcls || !rcls->resolved() || rcls->cls()->attrs & AttrEnum) {
3260 return result(TBool);
3262 if (has_generics &&
3263 (rcls->cls()->hasReifiedGenerics || !isTSAllWildcards(ts))) {
3264 // If it is a reified class or has non wildcard generics,
3265 // we need to bail
3266 return result(TBool);
3268 return reduce(env, bc::PopC {}, bc::InstanceOfD { rcls->name() });
3270 case TypeStructure::Kind::T_enum:
3271 case TypeStructure::Kind::T_resource:
3272 case TypeStructure::Kind::T_vec_or_dict:
3273 case TypeStructure::Kind::T_arraylike:
3274 // TODO(T29232862): implement
3275 return result(TBool);
3276 case TypeStructure::Kind::T_typeaccess:
3277 case TypeStructure::Kind::T_array:
3278 case TypeStructure::Kind::T_darray:
3279 case TypeStructure::Kind::T_varray:
3280 case TypeStructure::Kind::T_varray_or_darray:
3281 case TypeStructure::Kind::T_reifiedtype:
3282 return result(TBool);
3283 case TypeStructure::Kind::T_fun:
3284 case TypeStructure::Kind::T_typevar:
3285 case TypeStructure::Kind::T_trait:
3286 // We will error on these at the JIT
3287 return result(TBool);
3290 not_reached();
3293 bool canReduceToDontResolveList(SArray tsList, bool checkArrays);
3295 bool canReduceToDontResolve(SArray ts, bool checkArrays) {
3296 auto const checkGenerics = [&](SArray arr) {
3297 if (!ts->exists(s_generic_types)) return true;
3298 return canReduceToDontResolveList(get_ts_generic_types(ts), true);
3300 switch (get_ts_kind(ts)) {
3301 case TypeStructure::Kind::T_int:
3302 case TypeStructure::Kind::T_bool:
3303 case TypeStructure::Kind::T_float:
3304 case TypeStructure::Kind::T_string:
3305 case TypeStructure::Kind::T_num:
3306 case TypeStructure::Kind::T_arraykey:
3307 case TypeStructure::Kind::T_void:
3308 case TypeStructure::Kind::T_null:
3309 case TypeStructure::Kind::T_nothing:
3310 case TypeStructure::Kind::T_noreturn:
3311 case TypeStructure::Kind::T_mixed:
3312 case TypeStructure::Kind::T_dynamic:
3313 case TypeStructure::Kind::T_nonnull:
3314 case TypeStructure::Kind::T_resource:
3315 return true;
3316 // Following have generic parameters that may need to be resolved
3317 case TypeStructure::Kind::T_dict:
3318 case TypeStructure::Kind::T_vec:
3319 case TypeStructure::Kind::T_keyset:
3320 case TypeStructure::Kind::T_vec_or_dict:
3321 case TypeStructure::Kind::T_arraylike:
3322 return !checkArrays || checkGenerics(ts);
3323 case TypeStructure::Kind::T_class:
3324 case TypeStructure::Kind::T_interface:
3325 case TypeStructure::Kind::T_xhp:
3326 case TypeStructure::Kind::T_enum:
3327 return isTSAllWildcards(ts) || checkGenerics(ts);
3328 case TypeStructure::Kind::T_tuple:
3329 return canReduceToDontResolveList(get_ts_elem_types(ts), checkArrays);
3330 case TypeStructure::Kind::T_shape: {
3331 auto result = true;
3332 IterateV(
3333 get_ts_fields(ts),
3334 [&](TypedValue v) {
3335 assertx(isArrayLikeType(v.m_type));
3336 auto const arr = v.m_data.parr;
3337 if (arr->exists(s_is_cls_cns)) {
3338 result = false;
3339 return true; // short circuit
3341 result &= canReduceToDontResolve(get_ts_value(arr), checkArrays);
3342 // when result is false, we can short circuit
3343 return !result;
3346 return result;
3348 case TypeStructure::Kind::T_fun: {
3349 auto const variadicType = get_ts_variadic_type_opt(ts);
3350 return canReduceToDontResolve(get_ts_return_type(ts), checkArrays)
3351 && canReduceToDontResolveList(get_ts_param_types(ts), checkArrays)
3352 && (!variadicType || canReduceToDontResolve(variadicType, checkArrays));
3354 // Following needs to be resolved
3355 case TypeStructure::Kind::T_unresolved:
3356 case TypeStructure::Kind::T_typeaccess:
3357 // Following cannot be used in is/as expressions, we need to error on them
3358 // Currently erroring happens as a part of the resolving phase,
3359 // so keep resolving them
3360 case TypeStructure::Kind::T_array:
3361 case TypeStructure::Kind::T_darray:
3362 case TypeStructure::Kind::T_varray:
3363 case TypeStructure::Kind::T_varray_or_darray:
3364 case TypeStructure::Kind::T_reifiedtype:
3365 case TypeStructure::Kind::T_typevar:
3366 case TypeStructure::Kind::T_trait:
3367 return false;
3369 not_reached();
3372 bool canReduceToDontResolveList(SArray tsList, bool checkArrays) {
3373 auto result = true;
3374 IterateV(
3375 tsList,
3376 [&](TypedValue v) {
3377 assertx(isArrayLikeType(v.m_type));
3378 result &= canReduceToDontResolve(v.m_data.parr, checkArrays);
3379 // when result is false, we can short circuit
3380 return !result;
3383 return result;
3386 } // namespace
3388 void in(ISS& env, const bc::IsLateBoundCls& op) {
3389 auto const cls = env.ctx.cls;
3390 if (cls && !(cls->attrs & AttrTrait)) effect_free(env);
3391 popC(env);
3392 return push(env, TBool);
3395 void in(ISS& env, const bc::IsTypeStructC& op) {
3396 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
3397 if (!topC(env).couldBe(requiredTSType)) {
3398 popC(env);
3399 popC(env);
3400 return unreachable(env);
3402 auto const a = tv(topC(env));
3403 if (!a || !isValidTSType(*a, false)) {
3404 popC(env);
3405 popC(env);
3406 return push(env, TBool);
3408 if (op.subop1 == TypeStructResolveOp::Resolve &&
3409 canReduceToDontResolve(a->m_data.parr, false)) {
3410 return reduce(env, bc::IsTypeStructC { TypeStructResolveOp::DontResolve });
3412 isAsTypeStructImpl<false>(env, a->m_data.parr);
3415 void in(ISS& env, const bc::ThrowAsTypeStructException& op) {
3416 popC(env);
3417 popC(env);
3418 unreachable(env);
3419 return;
3422 void in(ISS& env, const bc::CombineAndResolveTypeStruct& op) {
3423 assertx(op.arg1 > 0);
3424 auto valid = true;
3425 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
3426 auto const first = tv(topC(env));
3427 if (first && isValidTSType(*first, false)) {
3428 auto const ts = first->m_data.parr;
3429 // Optimize single input that does not need any combination
3430 if (op.arg1 == 1) {
3431 if (canReduceToDontResolve(ts, true)) return reduce(env);
3432 if (auto const resolved = resolveTSStatically(env, ts, env.ctx.cls,
3433 true)) {
3434 return RuntimeOption::EvalHackArrDVArrs
3435 ? reduce(env, bc::PopC {}, bc::Dict { resolved })
3436 : reduce(env, bc::PopC {}, bc::Array { resolved });
3439 // Optimize double input that needs a single combination and looks of the
3440 // form ?T, @T or ~T
3441 if (op.arg1 == 2 && get_ts_kind(ts) == TypeStructure::Kind::T_reifiedtype) {
3442 BytecodeVec instrs { bc::PopC {} };
3443 auto const tv_true = gen_constant(make_tv<KindOfBoolean>(true));
3444 if (ts->exists(s_like.get())) {
3445 instrs.push_back(gen_constant(make_tv<KindOfString>(s_like.get())));
3446 instrs.push_back(tv_true);
3447 instrs.push_back(bc::AddElemC {});
3449 if (ts->exists(s_nullable.get())) {
3450 instrs.push_back(gen_constant(make_tv<KindOfString>(s_nullable.get())));
3451 instrs.push_back(tv_true);
3452 instrs.push_back(bc::AddElemC {});
3454 if (ts->exists(s_soft.get())) {
3455 instrs.push_back(gen_constant(make_tv<KindOfString>(s_soft.get())));
3456 instrs.push_back(tv_true);
3457 instrs.push_back(bc::AddElemC {});
3459 return reduce(env, std::move(instrs));
3463 for (int i = 0; i < op.arg1; ++i) {
3464 auto const t = popC(env);
3465 valid &= t.couldBe(requiredTSType);
3467 if (!valid) return unreachable(env);
3468 nothrow(env);
3469 push(env, Type{requiredTSType});
3472 void in(ISS& env, const bc::RecordReifiedGeneric& op) {
3473 // TODO(T31677864): implement real optimizations
3474 auto const t = popC(env);
3475 auto const required = RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr;
3476 if (!t.couldBe(required)) return unreachable(env);
3477 if (t.subtypeOf(required)) nothrow(env);
3478 push(env, RuntimeOption::EvalHackArrDVArrs ? TSVec : TSVArr);
3481 void in(ISS& env, const bc::CheckReifiedGenericMismatch& op) {
3482 // TODO(T31677864): implement real optimizations
3483 popC(env);
3486 namespace {
3489 * If the value on the top of the stack is known to be equivalent to the local
3490 * its being moved/copied to, return folly::none without modifying any
3491 * state. Otherwise, pop the stack value, perform the set, and return a pair
3492 * giving the value's type, and any other local its known to be equivalent to.
3494 template <typename Set>
3495 folly::Optional<std::pair<Type, LocalId>> moveToLocImpl(ISS& env,
3496 const Set& op) {
3497 if (auto const prev = last_op(env, 1)) {
3498 if (prev->op == Op::CGetL2 &&
3499 prev->CGetL2.loc1 == op.loc1 &&
3500 last_op(env)->op == Op::Concat) {
3501 rewind(env, 2);
3502 reduce(env, bc::SetOpL { op.loc1, SetOpOp::ConcatEqual });
3503 return folly::none;
3507 auto equivLoc = topStkEquiv(env);
3508 // If the local could be a Ref, don't record equality because the stack
3509 // element and the local won't actually have the same type.
3510 if (equivLoc == StackThisId && env.state.thisLoc != NoLocalId) {
3511 if (env.state.thisLoc == op.loc1 ||
3512 locsAreEquiv(env, env.state.thisLoc, op.loc1)) {
3513 return folly::none;
3514 } else {
3515 equivLoc = env.state.thisLoc;
3518 if (!is_volatile_local(env.ctx.func, op.loc1)) {
3519 if (equivLoc <= MaxLocalId) {
3520 if (equivLoc == op.loc1 ||
3521 locsAreEquiv(env, equivLoc, op.loc1)) {
3522 // We allow equivalency to ignore Uninit, so we need to check
3523 // the types here.
3524 if (peekLocRaw(env, op.loc1) == topC(env)) {
3525 return folly::none;
3528 } else if (equivLoc == NoLocalId) {
3529 equivLoc = op.loc1;
3531 if (!any(env.collect.opts & CollectionOpts::Speculating)) {
3532 effect_free(env);
3534 } else {
3535 equivLoc = NoLocalId;
3537 nothrow(env);
3538 auto val = popC(env);
3539 setLoc(env, op.loc1, val);
3540 if (equivLoc == StackThisId) {
3541 assertx(env.state.thisLoc == NoLocalId);
3542 equivLoc = env.state.thisLoc = op.loc1;
3544 if (equivLoc == StackDupId) {
3545 setStkLocal(env, op.loc1);
3546 } else if (equivLoc != op.loc1 && equivLoc != NoLocalId) {
3547 addLocEquiv(env, op.loc1, equivLoc);
3549 return { std::make_pair(std::move(val), equivLoc) };
3554 void in(ISS& env, const bc::PopL& op) {
3555 // If the same value is already in the local, do nothing but pop
3556 // it. Otherwise, the set has been done by moveToLocImpl.
3557 if (!moveToLocImpl(env, op)) return reduce(env, bc::PopC {});
3560 void in(ISS& env, const bc::SetL& op) {
3561 // If the same value is already in the local, do nothing because SetL keeps
3562 // the value on the stack. If it isn't, we need to push it back onto the stack
3563 // because moveToLocImpl popped it.
3564 if (auto p = moveToLocImpl(env, op)) {
3565 push(env, std::move(p->first), p->second);
3566 } else {
3567 reduce(env);
3571 void in(ISS& env, const bc::SetG&) {
3572 auto t1 = popC(env);
3573 popC(env);
3574 push(env, std::move(t1));
3577 void in(ISS& env, const bc::SetS& op) {
3578 auto const t1 = popC(env);
3579 auto const tcls = popC(env);
3580 auto const tname = popC(env);
3581 auto const vname = tv(tname);
3582 auto const self = selfCls(env);
3584 if (vname && vname->m_type == KindOfPersistentString &&
3585 canSkipMergeOnConstProp(env, tcls, vname->m_data.pstr)) {
3586 unreachable(env);
3587 push(env, TBottom);
3588 return;
3591 if (!self || tcls.couldBe(*self)) {
3592 if (vname && vname->m_type == KindOfPersistentString) {
3593 mergeSelfProp(env, vname->m_data.pstr, t1);
3594 } else {
3595 mergeEachSelfPropRaw(env, [&] (Type) { return t1; });
3599 env.collect.publicSPropMutations.merge(env.index, env.ctx, tcls, tname, t1);
3601 push(env, std::move(t1));
3604 void in(ISS& env, const bc::SetOpL& op) {
3605 auto const t1 = popC(env);
3606 auto const v1 = tv(t1);
3607 auto const loc = locAsCell(env, op.loc1);
3608 auto const locVal = tv(loc);
3609 if (v1 && locVal) {
3610 // Can't constprop at this eval_cell, because of the effects on
3611 // locals.
3612 auto resultTy = eval_cell([&] {
3613 Cell c = *locVal;
3614 Cell rhs = *v1;
3615 setopBody(&c, op.subop2, &rhs);
3616 return c;
3618 if (!resultTy) resultTy = TInitCell;
3620 // We may have inferred a TSStr or TSArr with a value here, but
3621 // at runtime it will not be static. For now just throw that
3622 // away. TODO(#3696042): should be able to loosen_staticness here.
3623 if (resultTy->subtypeOf(BStr)) resultTy = TStr;
3624 else if (resultTy->subtypeOf(BArr)) resultTy = TArr;
3625 else if (resultTy->subtypeOf(BVec)) resultTy = TVec;
3626 else if (resultTy->subtypeOf(BDict)) resultTy = TDict;
3627 else if (resultTy->subtypeOf(BKeyset)) resultTy = TKeyset;
3629 setLoc(env, op.loc1, *resultTy);
3630 push(env, *resultTy);
3631 return;
3634 auto resultTy = typeSetOp(op.subop2, loc, t1);
3635 setLoc(env, op.loc1, resultTy);
3636 push(env, std::move(resultTy));
3639 void in(ISS& env, const bc::SetOpG&) {
3640 popC(env); popC(env);
3641 push(env, TInitCell);
3644 void in(ISS& env, const bc::SetOpS& op) {
3645 popC(env);
3646 auto const tcls = popC(env);
3647 auto const tname = popC(env);
3648 auto const vname = tv(tname);
3649 auto const self = selfCls(env);
3651 if (vname && vname->m_type == KindOfPersistentString &&
3652 canSkipMergeOnConstProp(env, tcls, vname->m_data.pstr)) {
3653 unreachable(env);
3654 push(env, TBottom);
3655 return;
3658 if (!self || tcls.couldBe(*self)) {
3659 if (vname && vname->m_type == KindOfPersistentString) {
3660 mergeSelfProp(env, vname->m_data.pstr, TInitCell);
3661 } else {
3662 killSelfProps(env);
3666 env.collect.publicSPropMutations.merge(
3667 env.index, env.ctx, tcls, tname, TInitCell
3670 push(env, TInitCell);
3673 void in(ISS& env, const bc::IncDecL& op) {
3674 auto loc = locAsCell(env, op.loc1);
3675 auto newT = typeIncDec(op.subop2, loc);
3676 auto const pre = isPre(op.subop2);
3678 // If it's a non-numeric string, this may cause it to exceed the max length.
3679 if (!locCouldBeUninit(env, op.loc1) &&
3680 !loc.couldBe(BStr)) {
3681 nothrow(env);
3684 if (!pre) push(env, std::move(loc));
3685 setLoc(env, op.loc1, newT);
3686 if (pre) push(env, std::move(newT));
3689 void in(ISS& env, const bc::IncDecG&) { popC(env); push(env, TInitCell); }
3691 void in(ISS& env, const bc::IncDecS& op) {
3692 auto const tcls = popC(env);
3693 auto const tname = popC(env);
3694 auto const vname = tv(tname);
3695 auto const self = selfCls(env);
3697 if (vname && vname->m_type == KindOfPersistentString &&
3698 canSkipMergeOnConstProp(env, tcls, vname->m_data.pstr)) {
3699 unreachable(env);
3700 push(env, TBottom);
3701 return;
3704 if (!self || tcls.couldBe(*self)) {
3705 if (vname && vname->m_type == KindOfPersistentString) {
3706 mergeSelfProp(env, vname->m_data.pstr, TInitCell);
3707 } else {
3708 killSelfProps(env);
3712 env.collect.publicSPropMutations.merge(
3713 env.index, env.ctx, tcls, tname, TInitCell
3716 push(env, TInitCell);
3719 void in(ISS& env, const bc::UnsetL& op) {
3720 if (locRaw(env, op.loc1).subtypeOf(TUninit)) {
3721 return reduce(env);
3723 if (any(env.collect.opts & CollectionOpts::Speculating)) {
3724 nothrow(env);
3725 } else {
3726 effect_free(env);
3728 setLocRaw(env, op.loc1, TUninit);
3731 void in(ISS& env, const bc::UnsetG& /*op*/) {
3732 auto const t1 = popC(env);
3733 if (!t1.couldBe(BObj | BRes)) nothrow(env);
3736 template<class FCallWithFCA>
3737 bool fcallOptimizeChecks(
3738 ISS& env,
3739 const FCallArgs& fca,
3740 const res::Func& func,
3741 FCallWithFCA fcallWithFCA
3743 auto const numOut = env.index.lookup_num_inout_params(env.ctx, func);
3744 if (fca.enforceInOut() && numOut == fca.numRets - 1) {
3745 bool match = true;
3746 for (auto i = 0; i < fca.numArgs; ++i) {
3747 auto const kind = env.index.lookup_param_prep(env.ctx, func, i);
3748 if (kind == PrepKind::Unknown) {
3749 match = false;
3750 break;
3753 if (kind != (fca.isInOut(i) ? PrepKind::InOut : PrepKind::Val)) {
3754 // The function/method may not exist, in which case we should raise a
3755 // different error. Just defer the checks to the runtime.
3756 if (!func.exactFunc()) return false;
3758 // inout mismatch
3759 auto const exCls = makeStaticString("InvalidArgumentException");
3760 auto const err = makeStaticString(formatParamInOutMismatch(
3761 func.name()->data(), i, !fca.isInOut(i)));
3763 reduce(
3764 env,
3765 bc::NewObjD { exCls },
3766 bc::Dup {},
3767 bc::NullUninit {},
3768 bc::NullUninit {},
3769 bc::String { err },
3770 bc::FCallCtor { FCallArgs(1), staticEmptyString() },
3771 bc::PopC {},
3772 bc::LockObj {},
3773 bc::Throw {}
3775 return true;
3779 if (match) {
3780 // Optimize away the runtime inout-ness check.
3781 reduce(env, fcallWithFCA(FCallArgs(
3782 fca.flags, fca.numArgs, fca.numRets, nullptr, fca.asyncEagerTarget,
3783 fca.lockWhileUnwinding)));
3784 return true;
3788 // Infer whether the callee supports async eager return.
3789 if (fca.asyncEagerTarget != NoBlockId &&
3790 !fca.supportsAsyncEagerReturn()) {
3791 auto const status = env.index.supports_async_eager_return(func);
3792 if (status) {
3793 auto newFCA = fca;
3794 if (*status) {
3795 // Callee supports async eager return.
3796 newFCA.flags = static_cast<FCallArgs::Flags>(
3797 newFCA.flags | FCallArgs::SupportsAsyncEagerReturn);
3798 } else {
3799 // Callee doesn't support async eager return.
3800 newFCA.asyncEagerTarget = NoBlockId;
3802 reduce(env, fcallWithFCA(std::move(newFCA)));
3803 return true;
3807 return false;
3810 bool fcallTryFold(
3811 ISS& env,
3812 const FCallArgs& fca,
3813 const res::Func& func,
3814 Type context,
3815 bool maybeDynamic,
3816 uint32_t numExtraInputs
3818 auto const foldableFunc = func.exactFunc();
3819 if (!foldableFunc) return false;
3820 if (!canFold(env, foldableFunc, fca, context, maybeDynamic)) {
3821 return false;
3824 assertx(!fca.hasUnpack() && !fca.hasGenerics() && fca.numRets == 1);
3825 assertx(options.ConstantFoldBuiltins);
3827 auto tried_lookup = false;
3828 auto ty = [&] () {
3829 if (foldableFunc->attrs & AttrBuiltin &&
3830 foldableFunc->attrs & AttrIsFoldable) {
3831 auto ret = const_fold(env, fca.numArgs, numExtraInputs, *foldableFunc,
3832 false);
3833 return ret ? *ret : TBottom;
3835 CompactVector<Type> args(fca.numArgs);
3836 auto const firstArgPos = numExtraInputs + fca.numInputs() - 1;
3837 for (auto i = uint32_t{0}; i < fca.numArgs; ++i) {
3838 auto const& arg = topT(env, firstArgPos - i);
3839 auto const isScalar = is_scalar(arg);
3840 if (!isScalar &&
3841 (env.index.func_depends_on_arg(foldableFunc, i) ||
3842 !arg.subtypeOf(BInitCell))) {
3843 return TBottom;
3845 args[i] = isScalar ? scalarize(arg) : arg;
3848 tried_lookup = true;
3849 return env.index.lookup_foldable_return_type(
3850 env.ctx, foldableFunc, context, std::move(args));
3851 }();
3853 if (auto v = tv(ty)) {
3854 BytecodeVec repl;
3855 for (uint32_t i = 0; i < numExtraInputs; ++i) repl.push_back(bc::PopC {});
3856 for (uint32_t i = 0; i < fca.numArgs; ++i) repl.push_back(bc::PopC {});
3857 repl.push_back(bc::PopU {});
3858 repl.push_back(bc::PopU {});
3859 if (topT(env, fca.numArgs + 2 + numExtraInputs).subtypeOf(TInitCell)) {
3860 repl.push_back(bc::PopC {});
3861 } else {
3862 assertx(topT(env, fca.numArgs + 2 + numExtraInputs).subtypeOf(TUninit));
3863 repl.push_back(bc::PopU {});
3865 repl.push_back(gen_constant(*v));
3866 reduce(env, std::move(repl));
3867 return true;
3870 if (tried_lookup) {
3871 env.collect.unfoldableFuncs.emplace(foldableFunc, env.bid);
3873 return false;
3876 Type typeFromWH(Type t) {
3877 if (!t.couldBe(BObj)) {
3878 // Exceptions will be thrown if a non-object is awaited.
3879 return TBottom;
3882 // Throw away non-obj component.
3883 t &= TObj;
3885 // If we aren't even sure this is a wait handle, there's nothing we can
3886 // infer here.
3887 if (!is_specialized_wait_handle(t)) {
3888 return TInitCell;
3891 return wait_handle_inner(t);
3894 void pushCallReturnType(ISS& env, Type&& ty, const FCallArgs& fca) {
3895 if (ty == TBottom) {
3896 // The callee function never returns. It might throw, or loop forever.
3897 unreachable(env);
3899 if (fca.numRets != 1) {
3900 assertx(fca.asyncEagerTarget == NoBlockId);
3901 for (auto i = uint32_t{0}; i < fca.numRets - 1; ++i) popU(env);
3902 if (is_specialized_vec(ty)) {
3903 for (int32_t i = 1; i < fca.numRets; i++) {
3904 push(env, vec_elem(ty, ival(i)).first);
3906 push(env, vec_elem(ty, ival(0)).first);
3907 } else {
3908 for (int32_t i = 0; i < fca.numRets; i++) push(env, TInitCell);
3910 return;
3912 if (fca.asyncEagerTarget != NoBlockId) {
3913 push(env, typeFromWH(ty));
3914 assertx(topC(env) != TBottom);
3915 env.propagate(fca.asyncEagerTarget, &env.state);
3916 popC(env);
3918 return push(env, std::move(ty));
3921 const StaticString s_defined { "defined" };
3922 const StaticString s_function_exists { "function_exists" };
3924 template<class FCallWithFCA>
3925 void fcallKnownImpl(
3926 ISS& env,
3927 const FCallArgs& fca,
3928 const res::Func& func,
3929 Type context,
3930 bool nullsafe,
3931 uint32_t numExtraInputs,
3932 FCallWithFCA fcallWithFCA
3934 auto returnType = [&] {
3935 CompactVector<Type> args(fca.numArgs);
3936 auto const firstArgPos = numExtraInputs + fca.numInputs() - 1;
3937 for (auto i = uint32_t{0}; i < fca.numArgs; ++i) {
3938 args[i] = topCV(env, firstArgPos - i);
3941 auto ty = fca.hasUnpack()
3942 ? env.index.lookup_return_type(env.ctx, func)
3943 : env.index.lookup_return_type(env.ctx, args, context, func);
3944 if (nullsafe) {
3945 ty = union_of(std::move(ty), TInitNull);
3947 return ty;
3948 }();
3950 if (fca.asyncEagerTarget != NoBlockId && typeFromWH(returnType) == TBottom) {
3951 // Kill the async eager target if the function never returns.
3952 auto newFCA = fca;
3953 newFCA.asyncEagerTarget = NoBlockId;
3954 newFCA.flags = static_cast<FCallArgs::Flags>(
3955 newFCA.flags & ~FCallArgs::SupportsAsyncEagerReturn);
3956 reduce(env, fcallWithFCA(std::move(newFCA)));
3957 return;
3960 if (func.name()->isame(s_function_exists.get()) &&
3961 (fca.numArgs == 1 || fca.numArgs == 2) &&
3962 !fca.hasUnpack() && !fca.hasGenerics()) {
3963 handle_function_exists(env, topT(env, numExtraInputs + fca.numArgs - 1));
3966 if (func.name()->isame(s_defined.get()) &&
3967 fca.numArgs == 1 && !fca.hasUnpack() && !fca.hasGenerics() &&
3968 options.HardConstProp) {
3969 // If someone calls defined('foo') they probably want foo to be
3970 // defined normally; ie not a persistent constant.
3971 if (auto const v = tv(topCV(env, numExtraInputs))) {
3972 if (isStringType(v->m_type) &&
3973 !env.index.lookup_constant(env.ctx, v->m_data.pstr)) {
3974 env.collect.cnsMap[v->m_data.pstr].m_type = kDynamicConstant;
3979 for (auto i = uint32_t{0}; i < numExtraInputs; ++i) popC(env);
3980 if (fca.hasGenerics()) popC(env);
3981 if (fca.hasUnpack()) popC(env);
3982 for (auto i = uint32_t{0}; i < fca.numArgs; ++i) popCV(env);
3983 popU(env);
3984 popU(env);
3985 popCU(env);
3986 pushCallReturnType(env, std::move(returnType), fca);
3989 void fcallUnknownImpl(ISS& env, const FCallArgs& fca) {
3990 if (fca.hasGenerics()) popC(env);
3991 if (fca.hasUnpack()) popC(env);
3992 for (auto i = uint32_t{0}; i < fca.numArgs; ++i) popCV(env);
3993 popU(env);
3994 popU(env);
3995 popCU(env);
3996 if (fca.asyncEagerTarget != NoBlockId) {
3997 assertx(fca.numRets == 1);
3998 push(env, TInitCell);
3999 env.propagate(fca.asyncEagerTarget, &env.state);
4000 popC(env);
4002 for (auto i = uint32_t{0}; i < fca.numRets - 1; ++i) popU(env);
4003 for (auto i = uint32_t{0}; i < fca.numRets; ++i) push(env, TInitCell);
4006 void in(ISS& env, const bc::FCallFuncD& op) {
4007 auto const rfunc = env.index.resolve_func(env.ctx, op.str2);
4009 if (op.fca.hasGenerics()) {
4010 auto const tsList = topC(env);
4011 if (!tsList.couldBe(RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr)) {
4012 return unreachable(env);
4015 if (!rfunc.couldHaveReifiedGenerics()) {
4016 return reduce(
4017 env,
4018 bc::PopC {},
4019 bc::FCallFuncD { op.fca.withoutGenerics(), op.str2 }
4024 auto const updateBC = [&] (FCallArgs fca) {
4025 return bc::FCallFuncD { std::move(fca), op.str2 };
4028 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4029 fcallTryFold(env, op.fca, rfunc, TBottom, false, 0)) {
4030 return;
4033 if (auto const func = rfunc.exactFunc()) {
4034 if (can_emit_builtin(env, func, op.fca)) {
4035 return finish_builtin(env, func, op.fca);
4039 fcallKnownImpl(env, op.fca, rfunc, TBottom, false, 0, updateBC);
4042 namespace {
4044 void fcallFuncUnknown(ISS& env, const bc::FCallFunc& op) {
4045 popC(env);
4046 fcallUnknownImpl(env, op.fca);
4049 void fcallFuncClsMeth(ISS& env, const bc::FCallFunc& op) {
4050 assertx(topC(env).subtypeOf(BClsMeth));
4052 // TODO: optimize me
4053 fcallFuncUnknown(env, op);
4056 void fcallFuncFunc(ISS& env, const bc::FCallFunc& op) {
4057 assertx(topC(env).subtypeOf(BFunc));
4059 // TODO: optimize me
4060 fcallFuncUnknown(env, op);
4063 void fcallFuncObj(ISS& env, const bc::FCallFunc& op) {
4064 assertx(topC(env).subtypeOf(BObj));
4066 // TODO: optimize me
4067 fcallFuncUnknown(env, op);
4070 void fcallFuncStr(ISS& env, const bc::FCallFunc& op) {
4071 assertx(topC(env).subtypeOf(BStr));
4072 auto funcName = getNameFromType(topC(env));
4073 if (!funcName) return fcallFuncUnknown(env, op);
4075 funcName = normalizeNS(funcName);
4076 if (!isNSNormalized(funcName) || !notClassMethodPair(funcName)) {
4077 return fcallFuncUnknown(env, op);
4080 auto const rfunc = env.index.resolve_func(env.ctx, funcName);
4081 if (!rfunc.mightCareAboutDynCalls()) {
4082 return reduce(env, bc::PopC {}, bc::FCallFuncD { op.fca, funcName });
4085 auto const updateBC = [&] (FCallArgs fca) {
4086 return bc::FCallFunc { std::move(fca) };
4089 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC)) return;
4090 fcallKnownImpl(env, op.fca, rfunc, TBottom, false, 1, updateBC);
4093 } // namespace
4095 void in(ISS& env, const bc::FCallFunc& op) {
4096 auto const callable = topC(env);
4097 if (callable.subtypeOf(BFunc)) return fcallFuncFunc(env, op);
4098 if (callable.subtypeOf(BClsMeth)) return fcallFuncClsMeth(env, op);
4099 if (callable.subtypeOf(BObj)) return fcallFuncObj(env, op);
4100 if (callable.subtypeOf(BStr)) return fcallFuncStr(env, op);
4101 fcallFuncUnknown(env, op);
4104 void in(ISS& env, const bc::ResolveFunc& op) {
4105 // TODO (T29639296)
4106 push(env, TFunc);
4109 void in(ISS& env, const bc::ResolveObjMethod& op) {
4110 // TODO (T29639296)
4111 popC(env);
4112 popC(env);
4113 if (RuntimeOption::EvalHackArrDVArrs) {
4114 push(env, TVec);
4115 } else {
4116 push(env, TVArr);
4120 void in(ISS& env, const bc::ResolveClsMethod& op) {
4121 popC(env);
4122 popC(env);
4123 push(env, TClsMeth);
4126 namespace {
4128 void fcallObjMethodNullsafe(ISS& env, const FCallArgs& fca, bool extraInput) {
4129 BytecodeVec repl;
4130 if (extraInput) repl.push_back(bc::PopC {});
4131 if (fca.hasGenerics()) repl.push_back(bc::PopC {});
4132 if (fca.hasUnpack()) repl.push_back(bc::PopC {});
4133 for (uint32_t i = 0; i < fca.numArgs; ++i) {
4134 assertx(topC(env, repl.size()).subtypeOf(BInitCell));
4135 repl.push_back(bc::PopC {});
4137 repl.push_back(bc::PopU {});
4138 repl.push_back(bc::PopU {});
4139 repl.push_back(bc::PopC {});
4140 for (uint32_t i = 0; i < fca.numRets - 1; ++i) {
4141 repl.push_back(bc::PopU {});
4143 repl.push_back(bc::Null {});
4145 reduce(env, std::move(repl));
4148 template <typename Op, class UpdateBC>
4149 void fcallObjMethodImpl(ISS& env, const Op& op, SString methName, bool dynamic,
4150 bool extraInput, UpdateBC updateBC) {
4151 auto const nullThrows = op.subop3 == ObjMethodOp::NullThrows;
4152 auto const inputPos = op.fca.numInputs() + (extraInput ? 3 : 2);
4153 auto const input = topC(env, inputPos);
4154 auto const location = topStkEquiv(env, inputPos);
4155 auto const mayCallMethod = input.couldBe(BObj);
4156 auto const mayUseNullsafe = !nullThrows && input.couldBe(BNull);
4157 auto const mayThrowNonObj = !input.subtypeOf(nullThrows ? BObj : BOptObj);
4159 auto const refineLoc = [&] {
4160 if (location == NoLocalId) return;
4161 if (!refineLocation(env, location, [&] (Type t) {
4162 if (nullThrows) return intersection_of(t, TObj);
4163 if (!t.couldBe(BUninit)) return intersection_of(t, TOptObj);
4164 if (!t.couldBe(BObj)) return intersection_of(t, TNull);
4165 return t;
4166 })) {
4167 unreachable(env);
4171 auto const unknown = [&] {
4172 if (extraInput) popC(env);
4173 fcallUnknownImpl(env, op.fca);
4174 refineLoc();
4177 if (!mayCallMethod && !mayUseNullsafe) {
4178 // This FCallObjMethodD may only throw, make sure it's not optimized away.
4179 unknown();
4180 unreachable(env);
4181 return;
4184 if (!mayCallMethod && !mayThrowNonObj) {
4185 // Null input, this may only return null, so do that.
4186 return fcallObjMethodNullsafe(env, op.fca, extraInput);
4189 if (!mayCallMethod) {
4190 // May only return null, but can't fold as we may still throw.
4191 assertx(mayUseNullsafe && mayThrowNonObj);
4192 return unknown();
4195 auto const ctxTy = intersection_of(input, TObj);
4196 auto const clsTy = objcls(ctxTy);
4197 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4199 auto const canFold = !mayUseNullsafe && !mayThrowNonObj;
4200 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4201 (canFold && fcallTryFold(env, op.fca, rfunc, ctxTy, dynamic,
4202 extraInput ? 1 : 0))) {
4203 return;
4206 if (rfunc.exactFunc() && rfunc.cantBeMagicCall() && op.str2->empty()) {
4207 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4210 fcallKnownImpl(env, op.fca, rfunc, ctxTy, mayUseNullsafe, extraInput ? 1 : 0,
4211 updateBC);
4212 refineLoc();
4215 } // namespace
4217 void in(ISS& env, const bc::FCallObjMethodD& op) {
4218 if (op.fca.hasGenerics()) {
4219 auto const tsList = topC(env);
4220 if (!tsList.couldBe(RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr)) {
4221 return unreachable(env);
4224 auto const input = topC(env, op.fca.numInputs() + 2);
4225 auto const clsTy = objcls(intersection_of(input, TObj));
4226 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4227 if (!rfunc.couldHaveReifiedGenerics()) {
4228 return reduce(
4229 env,
4230 bc::PopC {},
4231 bc::FCallObjMethodD {
4232 op.fca.withoutGenerics(), op.str2, op.subop3, op.str4 }
4237 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4238 if (!clsHint) clsHint = op.str2;
4239 return bc::FCallObjMethodD { std::move(fca), clsHint, op.subop3, op.str4 };
4241 fcallObjMethodImpl(env, op, op.str4, false, false, updateBC);
4244 void in(ISS& env, const bc::FCallObjMethod& op) {
4245 auto const methName = getNameFromType(topC(env));
4246 if (!methName) {
4247 popC(env);
4248 fcallUnknownImpl(env, op.fca);
4249 return;
4252 auto const input = topC(env, op.fca.numInputs() + 3);
4253 auto const clsTy = objcls(intersection_of(input, TObj));
4254 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4255 if (!rfunc.mightCareAboutDynCalls()) {
4256 return reduce(
4257 env,
4258 bc::PopC {},
4259 bc::FCallObjMethodD { op.fca, op.str2, op.subop3, methName }
4263 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4264 if (!clsHint) clsHint = op.str2;
4265 return bc::FCallObjMethod { std::move(fca), clsHint, op.subop3 };
4267 fcallObjMethodImpl(env, op, methName, true, true, updateBC);
4270 namespace {
4272 template <typename Op, class UpdateBC>
4273 void fcallClsMethodImpl(ISS& env, const Op& op, Type clsTy, SString methName,
4274 bool dynamic, uint32_t numExtraInputs,
4275 UpdateBC updateBC) {
4276 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4278 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4279 fcallTryFold(env, op.fca, rfunc, clsTy, dynamic, numExtraInputs)) {
4280 return;
4283 if (rfunc.exactFunc() && op.str2->empty()) {
4284 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4287 fcallKnownImpl(env, op.fca, rfunc, clsTy, false /* nullsafe */,
4288 numExtraInputs, updateBC);
4291 } // namespace
4293 void in(ISS& env, const bc::FCallClsMethodD& op) {
4294 auto const rcls = env.index.resolve_class(env.ctx, op.str3);
4295 auto const clsTy = rcls ? clsExact(*rcls) : TCls;
4296 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4298 if (op.fca.hasGenerics() && !rfunc.couldHaveReifiedGenerics()) {
4299 return reduce(
4300 env,
4301 bc::PopC {},
4302 bc::FCallClsMethodD {
4303 op.fca.withoutGenerics(), op.str2, op.str3, op.str4 }
4307 if (auto const func = rfunc.exactFunc()) {
4308 assertx(func->cls != nullptr);
4309 if (func->cls->name->same(op.str3) && can_emit_builtin(env, func, op.fca)) {
4310 // When we use FCallBuiltin to call a static method, the litstr method
4311 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4313 // As a result, we can only do this optimization if the name of the
4314 // builtin function's class matches this op's class name immediate.
4315 return finish_builtin(env, func, op.fca);
4319 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4320 if (!clsHint) clsHint = op.str2;
4321 return bc::FCallClsMethodD { std::move(fca), clsHint, op.str3, op.str4 };
4323 fcallClsMethodImpl(env, op, clsTy, op.str4, false, 0, updateBC);
4326 void in(ISS& env, const bc::FCallClsMethod& op) {
4327 auto const methName = getNameFromType(topC(env, 1));
4328 if (!methName) {
4329 popC(env);
4330 popC(env);
4331 fcallUnknownImpl(env, op.fca);
4332 return;
4335 auto const clsTy = topC(env);
4336 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4337 auto const skipLogAsDynamicCall =
4338 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls &&
4339 op.subop3 == IsLogAsDynamicCallOp::DontLogAsDynamicCall;
4340 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
4341 (!rfunc.mightCareAboutDynCalls() || skipLogAsDynamicCall)) {
4342 auto const clsName = dcls_of(clsTy).cls.name();
4343 return reduce(
4344 env,
4345 bc::PopC {},
4346 bc::PopC {},
4347 bc::FCallClsMethodD { op.fca, op.str2, clsName, methName }
4351 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4352 if (!clsHint) clsHint = op.str2;
4353 return bc::FCallClsMethod { std::move(fca), clsHint, op.subop3 };
4355 fcallClsMethodImpl(env, op, clsTy, methName, true, 2, updateBC);
4358 namespace {
4360 Type ctxCls(ISS& env) {
4361 auto const s = selfCls(env);
4362 return setctx(s ? *s : TCls);
4365 Type specialClsRefToCls(ISS& env, SpecialClsRef ref) {
4366 if (!env.ctx.cls) return TCls;
4367 auto const op = [&]()-> folly::Optional<Type> {
4368 switch (ref) {
4369 case SpecialClsRef::Static: return ctxCls(env);
4370 case SpecialClsRef::Self: return selfClsExact(env);
4371 case SpecialClsRef::Parent: return parentClsExact(env);
4373 always_assert(false);
4374 }();
4375 return op ? *op : TCls;
4378 template <typename Op, class UpdateBC>
4379 void fcallClsMethodSImpl(ISS& env, const Op& op, SString methName, bool dynamic,
4380 bool extraInput, UpdateBC updateBC) {
4381 auto const clsTy = specialClsRefToCls(env, op.subop3);
4382 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
4383 !dynamic && op.subop3 == SpecialClsRef::Static) {
4384 auto const clsName = dcls_of(clsTy).cls.name();
4385 reduce(env, bc::FCallClsMethodD { op.fca, op.str2, clsName, methName });
4386 return;
4389 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4391 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4392 fcallTryFold(env, op.fca, rfunc, ctxCls(env), dynamic,
4393 extraInput ? 1 : 0)) {
4394 return;
4397 if (rfunc.exactFunc() && op.str2->empty()) {
4398 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4401 fcallKnownImpl(env, op.fca, rfunc, ctxCls(env), false /* nullsafe */,
4402 extraInput ? 1 : 0, updateBC);
4405 } // namespace
4407 void in(ISS& env, const bc::FCallClsMethodSD& op) {
4408 if (op.fca.hasGenerics()) {
4409 auto const clsTy = specialClsRefToCls(env, op.subop3);
4410 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4411 if (!rfunc.couldHaveReifiedGenerics()) {
4412 return reduce(
4413 env,
4414 bc::PopC {},
4415 bc::FCallClsMethodSD {
4416 op.fca.withoutGenerics(), op.str2, op.subop3, op.str4 }
4421 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4422 if (!clsHint) clsHint = op.str2;
4423 return bc::FCallClsMethodSD { std::move(fca), clsHint, op.subop3, op.str4 };
4425 fcallClsMethodSImpl(env, op, op.str4, false, false, updateBC);
4428 void in(ISS& env, const bc::FCallClsMethodS& op) {
4429 auto const methName = getNameFromType(topC(env));
4430 if (!methName) {
4431 popC(env);
4432 fcallUnknownImpl(env, op.fca);
4433 return;
4436 auto const clsTy = specialClsRefToCls(env, op.subop3);
4437 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4438 if (!rfunc.mightCareAboutDynCalls() && !rfunc.couldHaveReifiedGenerics()) {
4439 return reduce(
4440 env,
4441 bc::PopC {},
4442 bc::FCallClsMethodSD { op.fca, op.str2, op.subop3, methName }
4446 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4447 if (!clsHint) clsHint = op.str2;
4448 return bc::FCallClsMethodS { std::move(fca), clsHint, op.subop3 };
4450 fcallClsMethodSImpl(env, op, methName, true, true, updateBC);
4453 namespace {
4455 void newObjDImpl(ISS& env, const StringData* className, bool rflavor) {
4456 auto const rcls = env.index.resolve_class(env.ctx, className);
4457 if (!rcls) {
4458 if (rflavor) popC(env);
4459 push(env, TObj);
4460 return;
4462 if (rflavor && !rcls->couldHaveReifiedGenerics()) {
4463 return reduce(env, bc::PopC {}, bc::NewObjD { className });
4465 auto const isCtx = !rcls->couldBeOverriden() && env.ctx.cls &&
4466 rcls->same(env.index.resolve_class(env.ctx.cls));
4467 if (rflavor) popC(env);
4468 push(env, setctx(objExact(*rcls), isCtx));
4471 } // namespace
4473 void in(ISS& env, const bc::NewObjD& op) { newObjDImpl(env, op.str1, false); }
4474 void in(ISS& env, const bc::NewObjRD& op) { newObjDImpl(env, op.str1, true); }
4476 void in(ISS& env, const bc::NewObjS& op) {
4477 auto const cls = specialClsRefToCls(env, op.subop1);
4478 if (!is_specialized_cls(cls)) {
4479 push(env, TObj);
4480 return;
4483 auto const dcls = dcls_of(cls);
4484 auto const exact = dcls.type == DCls::Exact;
4485 if (exact && !dcls.cls.couldHaveReifiedGenerics() &&
4486 (!dcls.cls.couldBeOverriden() || equivalently_refined(cls, unctx(cls)))) {
4487 return reduce(env, bc::NewObjD { dcls.cls.name() });
4490 push(env, toobj(cls));
4493 void in(ISS& env, const bc::NewObj& op) {
4494 auto const cls = topC(env);
4495 if (!is_specialized_cls(cls)) {
4496 popC(env);
4497 push(env, TObj);
4498 return;
4501 auto const dcls = dcls_of(cls);
4502 auto const exact = dcls.type == DCls::Exact;
4503 if (exact && !dcls.cls.mightCareAboutDynConstructs()) {
4504 return reduce(
4505 env,
4506 bc::PopC {},
4507 bc::NewObjD { dcls.cls.name() }
4511 popC(env);
4512 push(env, toobj(cls));
4515 void in(ISS& env, const bc::NewObjR& op) {
4516 auto const generics = topC(env);
4517 auto const cls = topC(env, 1);
4519 if (generics.subtypeOf(BInitNull)) {
4520 return reduce(
4521 env,
4522 bc::PopC {},
4523 bc::NewObj {}
4527 if (!is_specialized_cls(cls)) {
4528 popC(env);
4529 popC(env);
4530 push(env, TObj);
4531 return;
4534 auto const dcls = dcls_of(cls);
4535 auto const exact = dcls.type == DCls::Exact;
4536 if (exact && !dcls.cls.couldHaveReifiedGenerics()) {
4537 return reduce(
4538 env,
4539 bc::PopC {},
4540 bc::NewObj {}
4544 popC(env);
4545 popC(env);
4546 push(env, toobj(cls));
4549 namespace {
4551 bool objMightHaveConstProps(const Type& t) {
4552 assertx(t.subtypeOf(BObj));
4553 assertx(is_specialized_obj(t));
4554 auto const dobj = dobj_of(t);
4555 switch (dobj.type) {
4556 case DObj::Exact:
4557 return dobj.cls.couldHaveConstProp();
4558 case DObj::Sub:
4559 return dobj.cls.derivedCouldHaveConstProp();
4561 not_reached();
4566 void in(ISS& env, const bc::FCallCtor& op) {
4567 auto const obj = topC(env, op.fca.numInputs() + 2);
4568 assertx(op.fca.numRets == 1);
4570 if (!is_specialized_obj(obj)) {
4571 return fcallUnknownImpl(env, op.fca);
4574 if (op.fca.lockWhileUnwinding && !objMightHaveConstProps(obj)) {
4575 auto newFca = folly::copy(op.fca);
4576 newFca.lockWhileUnwinding = false;
4577 return reduce(env, bc::FCallCtor { std::move(newFca), op.str2 });
4580 auto const dobj = dobj_of(obj);
4581 auto const exact = dobj.type == DObj::Exact;
4582 auto const rfunc = env.index.resolve_ctor(env.ctx, dobj.cls, exact);
4583 if (!rfunc) {
4584 return fcallUnknownImpl(env, op.fca);
4587 auto const updateFCA = [&] (FCallArgs&& fca) {
4588 return bc::FCallCtor { std::move(fca), op.str2 };
4591 auto const canFold = obj.subtypeOf(BObj);
4592 if (fcallOptimizeChecks(env, op.fca, *rfunc, updateFCA) ||
4593 (canFold && fcallTryFold(env, op.fca, *rfunc,
4594 obj, false /* dynamic */, 0))) {
4595 return;
4598 if (rfunc->exactFunc() && op.str2->empty()) {
4599 // We've found the exact func that will be called, set the hint.
4600 return reduce(env, bc::FCallCtor { op.fca, rfunc->exactFunc()->cls->name });
4603 fcallKnownImpl(env, op.fca, *rfunc, obj, false /* nullsafe */, 0,
4604 updateFCA);
4607 void in(ISS& env, const bc::LockObj& op) {
4608 auto const t = topC(env);
4609 auto bail = [&]() {
4610 discard(env, 1);
4611 return push(env, t);
4613 if (!t.subtypeOf(BObj)) return bail();
4614 if (!is_specialized_obj(t) || objMightHaveConstProps(t)) {
4615 nothrow(env);
4616 return bail();
4618 reduce(env);
4621 namespace {
4623 void iterInitImpl(ISS& env, IterId iter, LocalId valueLoc,
4624 BlockId target, const Type& base, LocalId baseLoc,
4625 bool needsPop) {
4626 auto ity = iter_types(base);
4628 auto const fallthrough = [&] {
4629 auto const baseCannotBeObject = !base.couldBe(BObj);
4630 setIter(env, iter, LiveIter { ity, baseLoc, NoLocalId, env.bid,
4631 false, baseCannotBeObject });
4632 // Do this after setting the iterator, in case it clobbers the base local
4633 // equivalency.
4634 setLoc(env, valueLoc, std::move(ity.value));
4637 assert(iterIsDead(env, iter));
4639 if (!ity.mayThrowOnInit) {
4640 if (ity.count == IterTypes::Count::Empty && will_reduce(env)) {
4641 if (needsPop) {
4642 reduce(env, bc::PopC{});
4643 } else {
4644 reduce(env);
4646 return jmp_setdest(env, target);
4648 nothrow(env);
4651 if (needsPop) {
4652 popC(env);
4655 switch (ity.count) {
4656 case IterTypes::Count::Empty:
4657 mayReadLocal(env, valueLoc);
4658 jmp_setdest(env, target);
4659 return;
4660 case IterTypes::Count::Single:
4661 case IterTypes::Count::NonEmpty:
4662 fallthrough();
4663 return jmp_nevertaken(env);
4664 case IterTypes::Count::ZeroOrOne:
4665 case IterTypes::Count::Any:
4666 // Take the branch before setting locals if the iter is already
4667 // empty, but after popping. Similar for the other IterInits
4668 // below.
4669 env.propagate(target, &env.state);
4670 fallthrough();
4671 return;
4673 always_assert(false);
4676 void iterInitKImpl(ISS& env, IterId iter, LocalId valueLoc, LocalId keyLoc,
4677 BlockId target, const Type& base, LocalId baseLoc,
4678 bool needsPop) {
4679 auto ity = iter_types(base);
4681 auto const fallthrough = [&]{
4682 auto const baseCannotBeObject = !base.couldBe(BObj);
4683 setIter(env, iter, LiveIter { ity, baseLoc, NoLocalId, env.bid,
4684 false, baseCannotBeObject });
4685 // Do this after setting the iterator, in case it clobbers the base local
4686 // equivalency.
4687 setLoc(env, valueLoc, std::move(ity.value));
4688 setLoc(env, keyLoc, std::move(ity.key));
4689 setIterKey(env, iter, keyLoc);
4692 assert(iterIsDead(env, iter));
4694 if (!ity.mayThrowOnInit) {
4695 if (ity.count == IterTypes::Count::Empty && will_reduce(env)) {
4696 if (needsPop) {
4697 reduce(env, bc::PopC{});
4698 } else {
4699 reduce(env);
4701 return jmp_setdest(env, target);
4703 nothrow(env);
4706 if (needsPop) {
4707 popC(env);
4710 switch (ity.count) {
4711 case IterTypes::Count::Empty:
4712 mayReadLocal(env, valueLoc);
4713 mayReadLocal(env, keyLoc);
4714 return jmp_setdest(env, target);
4715 case IterTypes::Count::Single:
4716 case IterTypes::Count::NonEmpty:
4717 fallthrough();
4718 return jmp_nevertaken(env);
4719 case IterTypes::Count::ZeroOrOne:
4720 case IterTypes::Count::Any:
4721 env.propagate(target, &env.state);
4722 fallthrough();
4723 return;
4726 always_assert(false);
4729 void iterNextImpl(ISS& env,
4730 IterId iter, LocalId valueLoc, BlockId target,
4731 LocalId baseLoc) {
4732 auto const curLoc = peekLocRaw(env, valueLoc);
4733 auto noThrow = false;
4734 auto const noTaken = match<bool>(
4735 env.state.iters[iter],
4736 [&] (DeadIter) {
4737 always_assert(false && "IterNext on dead iter");
4738 return false;
4740 [&] (const LiveIter& ti) {
4741 if (!ti.types.mayThrowOnNext) noThrow = true;
4742 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4743 switch (ti.types.count) {
4744 case IterTypes::Count::Single:
4745 case IterTypes::Count::ZeroOrOne:
4746 return true;
4747 case IterTypes::Count::NonEmpty:
4748 case IterTypes::Count::Any:
4749 setLoc(env, valueLoc, ti.types.value);
4750 return false;
4751 case IterTypes::Count::Empty:
4752 always_assert(false);
4754 not_reached();
4758 if (noTaken && noThrow && will_reduce(env)) {
4759 if (baseLoc != NoLocalId) {
4760 return reduce(env, bc::LIterFree { iter, baseLoc });
4762 return reduce(env, bc::IterFree { iter });
4765 mayReadLocal(env, valueLoc);
4766 mayReadLocal(env, baseLoc);
4768 if (noThrow) nothrow(env);
4770 if (noTaken) {
4771 jmp_nevertaken(env);
4772 freeIter(env, iter);
4773 return;
4776 env.propagate(target, &env.state);
4778 freeIter(env, iter);
4779 setLocRaw(env, valueLoc, curLoc);
4782 void iterNextKImpl(ISS& env, IterId iter, LocalId valueLoc,
4783 LocalId keyLoc, BlockId target, LocalId baseLoc) {
4784 auto const curValue = peekLocRaw(env, valueLoc);
4785 auto const curKey = peekLocRaw(env, keyLoc);
4786 auto noThrow = false;
4787 auto const noTaken = match<bool>(
4788 env.state.iters[iter],
4789 [&] (DeadIter) {
4790 always_assert(false && "IterNextK on dead iter");
4791 return false;
4793 [&] (const LiveIter& ti) {
4794 if (!ti.types.mayThrowOnNext) noThrow = true;
4795 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4796 switch (ti.types.count) {
4797 case IterTypes::Count::Single:
4798 case IterTypes::Count::ZeroOrOne:
4799 return true;
4800 case IterTypes::Count::NonEmpty:
4801 case IterTypes::Count::Any:
4802 setLoc(env, valueLoc, ti.types.value);
4803 setLoc(env, keyLoc, ti.types.key);
4804 setIterKey(env, iter, keyLoc);
4805 return false;
4806 case IterTypes::Count::Empty:
4807 always_assert(false);
4809 not_reached();
4813 if (noTaken && noThrow && will_reduce(env)) {
4814 if (baseLoc != NoLocalId) {
4815 return reduce(env, bc::LIterFree { iter, baseLoc });
4817 return reduce(env, bc::IterFree { iter });
4820 mayReadLocal(env, valueLoc);
4821 mayReadLocal(env, keyLoc);
4822 mayReadLocal(env, baseLoc);
4824 if (noThrow) nothrow(env);
4826 if (noTaken) {
4827 jmp_nevertaken(env);
4828 freeIter(env, iter);
4829 return;
4832 env.propagate(target, &env.state);
4834 freeIter(env, iter);
4835 setLocRaw(env, valueLoc, curValue);
4836 setLocRaw(env, keyLoc, curKey);
4841 void in(ISS& env, const bc::IterInit& op) {
4842 auto base = topC(env);
4843 iterInitImpl(
4844 env,
4845 op.iter1,
4846 op.loc3,
4847 op.target2,
4848 std::move(base),
4849 topStkLocal(env),
4850 true
4854 void in(ISS& env, const bc::LIterInit& op) {
4855 iterInitImpl(
4856 env,
4857 op.iter1,
4858 op.loc4,
4859 op.target3,
4860 locAsCell(env, op.loc2),
4861 op.loc2,
4862 false
4866 void in(ISS& env, const bc::IterInitK& op) {
4867 auto base = topC(env);
4868 iterInitKImpl(
4869 env,
4870 op.iter1,
4871 op.loc3,
4872 op.loc4,
4873 op.target2,
4874 std::move(base),
4875 topStkLocal(env),
4876 true
4880 void in(ISS& env, const bc::LIterInitK& op) {
4881 iterInitKImpl(
4882 env,
4883 op.iter1,
4884 op.loc4,
4885 op.loc5,
4886 op.target3,
4887 locAsCell(env, op.loc2),
4888 op.loc2,
4889 false
4893 void in(ISS& env, const bc::IterNext& op) {
4894 iterNextImpl(env, op.iter1, op.loc3, op.target2, NoLocalId);
4897 void in(ISS& env, const bc::LIterNext& op) {
4898 iterNextImpl(env, op.iter1, op.loc4, op.target3, op.loc2);
4901 void in(ISS& env, const bc::IterNextK& op) {
4902 iterNextKImpl(env, op.iter1, op.loc3, op.loc4, op.target2, NoLocalId);
4905 void in(ISS& env, const bc::LIterNextK& op) {
4906 iterNextKImpl(env, op.iter1, op.loc4, op.loc5, op.target3, op.loc2);
4909 void in(ISS& env, const bc::IterFree& op) {
4910 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
4911 auto const isNop = match<bool>(
4912 env.state.iters[op.iter1],
4913 [] (DeadIter) {
4914 return true;
4916 [&] (const LiveIter& ti) {
4917 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4918 return false;
4922 if (isNop && will_reduce(env)) return reduce(env);
4924 nothrow(env);
4925 freeIter(env, op.iter1);
4928 void in(ISS& env, const bc::LIterFree& op) {
4929 nothrow(env);
4930 mayReadLocal(env, op.loc2);
4931 freeIter(env, op.iter1);
4934 void in(ISS& env, const bc::IterBreak& op) {
4935 nothrow(env);
4937 for (auto const& it : op.iterTab) {
4938 if (it.kind == KindOfIter || it.kind == KindOfLIter) {
4939 match<void>(
4940 env.state.iters[it.id],
4941 [] (DeadIter) {},
4942 [&] (const LiveIter& ti) {
4943 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4947 if (it.kind == KindOfLIter) mayReadLocal(env, it.local);
4948 freeIter(env, it.id);
4951 env.propagate(op.target1, &env.state);
4955 * Any include/require (or eval) op kills all locals, and private properties.
4957 void inclOpImpl(ISS& env) {
4958 popC(env);
4959 killLocals(env);
4960 killThisProps(env);
4961 killSelfProps(env);
4962 mayUseVV(env);
4963 push(env, TInitCell);
4966 void in(ISS& env, const bc::Incl&) { inclOpImpl(env); }
4967 void in(ISS& env, const bc::InclOnce&) { inclOpImpl(env); }
4968 void in(ISS& env, const bc::Req&) { inclOpImpl(env); }
4969 void in(ISS& env, const bc::ReqOnce&) { inclOpImpl(env); }
4970 void in(ISS& env, const bc::ReqDoc&) { inclOpImpl(env); }
4971 void in(ISS& env, const bc::Eval&) { inclOpImpl(env); }
4973 void in(ISS& /*env*/, const bc::DefCls&) {}
4974 void in(ISS& /*env*/, const bc::DefRecord&) {}
4975 void in(ISS& /*env*/, const bc::DefClsNop&) {}
4976 void in(ISS& env, const bc::AliasCls&) {
4977 popC(env);
4978 push(env, TBool);
4981 void in(ISS& env, const bc::DefCns& op) {
4982 auto const t = popC(env);
4983 if (options.HardConstProp) {
4984 auto const v = tv(t);
4985 auto const val = v && tvAsCVarRef(&*v).isAllowedAsConstantValue() ?
4986 *v : make_tv<KindOfUninit>();
4987 auto const res = env.collect.cnsMap.emplace(op.str1, val);
4988 if (!res.second) {
4989 if (res.first->second.m_type == kReadOnlyConstant) {
4990 // we only saw a read of this constant
4991 res.first->second = val;
4992 } else {
4993 // more than one definition in this function
4994 res.first->second.m_type = kDynamicConstant;
4998 push(env, TBool);
5001 void in(ISS& /*env*/, const bc::DefTypeAlias&) {}
5003 void in(ISS& env, const bc::This&) {
5004 if (thisAvailable(env)) {
5005 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
5007 auto const ty = thisTypeNonNull(env);
5008 push(env, ty, StackThisId);
5009 setThisAvailable(env);
5010 if (ty.subtypeOf(BBottom)) unreachable(env);
5013 void in(ISS& env, const bc::LateBoundCls& op) {
5014 if (env.ctx.cls) effect_free(env);
5015 auto const ty = selfCls(env);
5016 push(env, setctx(ty ? *ty : TCls));
5019 void in(ISS& env, const bc::CheckThis&) {
5020 if (thisAvailable(env)) {
5021 return reduce(env);
5023 setThisAvailable(env);
5026 void in(ISS& env, const bc::BareThis& op) {
5027 if (thisAvailable(env)) {
5028 if (op.subop1 != BareThisOp::NeverNull) {
5029 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
5033 auto const ty = thisType(env);
5034 switch (op.subop1) {
5035 case BareThisOp::Notice:
5036 break;
5037 case BareThisOp::NoNotice:
5038 effect_free(env);
5039 break;
5040 case BareThisOp::NeverNull:
5041 setThisAvailable(env);
5042 if (!env.state.unreachable) effect_free(env);
5043 return push(env, ty, StackThisId);
5046 push(env, ty, StackThisId);
5049 void in(ISS& env, const bc::InitThisLoc& op) {
5050 if (!is_volatile_local(env.ctx.func, op.loc1)) {
5051 setLocRaw(env, op.loc1, TCell);
5052 env.state.thisLoc = op.loc1;
5057 * Amongst other things, we use this to mark units non-persistent.
5059 void in(ISS& env, const bc::OODeclExists& op) {
5060 auto flag = popC(env);
5061 auto name = popC(env);
5062 push(env, [&] {
5063 if (!name.strictSubtypeOf(TStr)) return TBool;
5064 auto const v = tv(name);
5065 if (!v) return TBool;
5066 auto rcls = env.index.resolve_class(env.ctx, v->m_data.pstr);
5067 if (!rcls || !rcls->cls()) return TBool;
5068 auto const mayExist = [&] () -> bool {
5069 switch (op.subop1) {
5070 case OODeclExistsOp::Class:
5071 return !(rcls->cls()->attrs & (AttrInterface | AttrTrait));
5072 case OODeclExistsOp::Interface:
5073 return rcls->cls()->attrs & AttrInterface;
5074 case OODeclExistsOp::Trait:
5075 return rcls->cls()->attrs & AttrTrait;
5077 not_reached();
5078 }();
5079 auto unit = rcls->cls()->unit;
5080 auto canConstProp = [&] {
5081 // Its generally not safe to constprop this, because of
5082 // autoload. We're safe if its part of systemlib, or a
5083 // superclass of the current context.
5084 if (is_systemlib_part(*unit)) return true;
5085 if (!env.ctx.cls) return false;
5086 auto thisClass = env.index.resolve_class(env.ctx.cls);
5087 return thisClass.mustBeSubtypeOf(*rcls);
5089 if (canConstProp()) {
5090 constprop(env);
5091 return mayExist ? TTrue : TFalse;
5093 if (!any(env.collect.opts & CollectionOpts::Inlining)) {
5094 unit->persistent.store(false, std::memory_order_relaxed);
5096 // At this point, if it mayExist, we still don't know that it
5097 // *does* exist, but if not we know that it either doesn't
5098 // exist, or it doesn't have the right type.
5099 return mayExist ? TBool : TFalse;
5100 } ());
5103 namespace {
5104 bool couldBeMocked(const Type& t) {
5105 if (is_specialized_cls(t)) {
5106 return dcls_of(t).cls.couldBeMocked();
5107 } else if (is_specialized_obj(t)) {
5108 return dobj_of(t).cls.couldBeMocked();
5110 // In practice this should not occur since this is used mostly on the result
5111 // of looked up type constraints.
5112 return true;
5116 void in(ISS& env, const bc::VerifyParamType& op) {
5117 IgnoreUsedParams _{env};
5119 if (env.ctx.func->isMemoizeImpl) {
5120 // a MemoizeImpl's params have already been checked by the wrapper
5121 return reduce(env);
5124 // Generally we won't know anything about the params, but
5125 // analyze_func_inline does - and this can help with effect-free analysis
5126 auto const constraint = env.ctx.func->params[op.loc1].typeConstraint;
5127 if (env.index.satisfies_constraint(env.ctx,
5128 locAsCell(env, op.loc1),
5129 constraint)) {
5130 if (!locAsCell(env, op.loc1).couldBe(BFunc | BCls)) {
5131 return reduce(env);
5136 * We assume that if this opcode doesn't throw, the parameter was of the
5137 * specified type (although it may have been a Ref if the parameter was
5138 * by reference).
5140 * The env.setLoc here handles dealing with a parameter that was
5141 * already known to be a reference.
5143 * NB: VerifyParamType of a reference parameter can kill any references
5144 * if it re-enters.
5146 if (RuntimeOption::EvalThisTypeHintLevel != 3 && constraint.isThis()) {
5147 return;
5149 if (constraint.hasConstraint() && !constraint.isTypeVar() &&
5150 !constraint.isTypeConstant()) {
5151 auto t =
5152 loosen_dvarrayness(env.index.lookup_constraint(env.ctx, constraint));
5153 if (constraint.isThis() && couldBeMocked(t)) {
5154 t = unctx(std::move(t));
5156 if (t.subtypeOf(BBottom)) unreachable(env);
5157 FTRACE(2, " {} ({})\n", constraint.fullName(), show(t));
5158 setLoc(env, op.loc1, std::move(t));
5162 void in(ISS& env, const bc::VerifyParamTypeTS& op) {
5163 auto const a = topC(env);
5164 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
5165 if (!a.couldBe(requiredTSType)) {
5166 unreachable(env);
5167 popC(env);
5168 return;
5170 auto const constraint = env.ctx.func->params[op.loc1].typeConstraint;
5171 // TODO(T31677864): We are being extremely pessimistic here, relax it
5172 if (!env.ctx.func->isReified &&
5173 (!env.ctx.cls || !env.ctx.cls->hasReifiedGenerics) &&
5174 !env.index.could_have_reified_type(env.ctx, constraint)) {
5175 return reduce(env, bc::PopC {}, bc::VerifyParamType { op.loc1 });
5178 if (auto const inputTS = tv(a)) {
5179 if (!isValidTSType(*inputTS, false)) {
5180 unreachable(env);
5181 popC(env);
5182 return;
5184 auto const resolvedTS =
5185 resolveTSStatically(env, inputTS->m_data.parr, env.ctx.cls, true);
5186 if (resolvedTS && resolvedTS != inputTS->m_data.parr) {
5187 reduce(env, bc::PopC {});
5188 RuntimeOption::EvalHackArrDVArrs ? reduce(env, bc::Dict { resolvedTS })
5189 : reduce(env, bc::Array { resolvedTS });
5190 reduce(env, bc::VerifyParamTypeTS { op.loc1 });
5191 return;
5194 popC(env);
5197 void verifyRetImpl(ISS& env, const TypeConstraint& constraint,
5198 bool reduce_this, bool ts_flavor) {
5199 // If it is the ts flavor, then second thing on the stack, otherwise first
5200 auto stackT = topC(env, (int)ts_flavor);
5201 auto const stackEquiv = topStkEquiv(env, (int)ts_flavor);
5203 // If there is no return type constraint, or if the return type
5204 // constraint is a typevar, or if the top of stack is the same or a
5205 // subtype of the type constraint, then this is a no-op, unless
5206 // reified types could be involved.
5207 if (env.index.satisfies_constraint(env.ctx, stackT, constraint)) {
5208 if (ts_flavor) {
5209 // we wouldn't get here if reified types were definitely not
5210 // involved, so just bail.
5211 popC(env);
5212 popC(env);
5213 push(env, std::move(stackT), stackEquiv);
5214 return;
5216 return reduce(env);
5219 // For CheckReturnTypeHints >= 3 AND the constraint is not soft.
5220 // We can safely assume that either VerifyRetTypeC will
5221 // throw or it will produce a value whose type is compatible with the
5222 // return type constraint.
5223 auto tcT = remove_uninit(
5224 loosen_dvarrayness(env.index.lookup_constraint(env.ctx, constraint)));
5226 // If tcT could be an interface or trait, we upcast it to TObj/TOptObj.
5227 // Why? Because we want uphold the invariant that we only refine return
5228 // types and never widen them, and if we allow tcT to be an interface then
5229 // it's possible for violations of this invariant to arise. For an example,
5230 // see "hphp/test/slow/hhbbc/return-type-opt-bug.php".
5231 // Note: It's safe to use TObj/TOptObj because lookup_constraint() only
5232 // returns classes or interfaces or traits (it never returns something that
5233 // could be an enum or type alias) and it never returns anything that could
5234 // be a "magic" interface that supports non-objects. (For traits the return
5235 // typehint will always throw at run time, so it's safe to use TObj/TOptObj.)
5236 if (is_specialized_obj(tcT) && dobj_of(tcT).cls.couldBeInterfaceOrTrait()) {
5237 tcT = is_opt(tcT) ? TOptObj : TObj;
5240 // In some circumstances, verifyRetType can modify the type. If it
5241 // does that we can't reduce even when we know it succeeds.
5242 auto dont_reduce = false;
5243 // VerifyRetType will convert a TFunc to a TStr implicitly
5244 // (and possibly warn)
5245 if (tcT.couldBe(BStr) && stackT.couldBe(BFunc | BCls)) {
5246 stackT |= TStr;
5247 dont_reduce = true;
5250 // VerifyRetType will convert TClsMeth to TVec/TVArr/TArr implicitly
5251 if (stackT.couldBe(BClsMeth)) {
5252 if (tcT.couldBe(BVec)) {
5253 stackT |= TVec;
5254 dont_reduce = true;
5256 if (tcT.couldBe(BVArr)) {
5257 stackT |= TVArr;
5258 dont_reduce = true;
5260 if (tcT.couldBe(TArr)) {
5261 stackT |= TArr;
5262 dont_reduce = true;
5266 // If CheckReturnTypeHints < 3 OR if the constraint is soft,
5267 // then there are no optimizations we can safely do here, so
5268 // just leave the top of stack as is.
5269 if (RuntimeOption::EvalCheckReturnTypeHints < 3 || constraint.isSoft() ||
5270 (RuntimeOption::EvalThisTypeHintLevel != 3 && constraint.isThis())) {
5271 if (ts_flavor) popC(env);
5272 popC(env);
5273 push(env, std::move(stackT), stackEquiv);
5274 return;
5277 // In cases where we have a `this` hint where stackT is an TOptObj known to
5278 // be this, we can replace the check with a non null check. These cases are
5279 // likely from a BareThis that could return Null. Since the runtime will
5280 // split these translations, it will rarely in practice return null.
5281 if (reduce_this &&
5282 !dont_reduce &&
5283 constraint.isThis() &&
5284 !constraint.isNullable() &&
5285 is_opt(stackT) &&
5286 env.index.satisfies_constraint(env.ctx, unopt(stackT), constraint)) {
5287 if (ts_flavor) {
5288 return reduce(env, bc::PopC {}, bc::VerifyRetNonNullC {});
5290 return reduce(env, bc::VerifyRetNonNullC {});
5293 auto retT = intersection_of(std::move(tcT), std::move(stackT));
5294 if (retT.subtypeOf(BBottom)) {
5295 unreachable(env);
5296 if (ts_flavor) popC(env); // the type structure
5297 return;
5300 if (ts_flavor) popC(env); // the type structure
5301 popC(env);
5302 push(env, std::move(retT));
5305 void in(ISS& env, const bc::VerifyOutType& op) {
5306 verifyRetImpl(env, env.ctx.func->params[op.arg1].typeConstraint,
5307 false, false);
5310 void in(ISS& env, const bc::VerifyRetTypeC& /*op*/) {
5311 verifyRetImpl(env, env.ctx.func->retTypeConstraint, true, false);
5314 void in(ISS& env, const bc::VerifyRetTypeTS& /*op*/) {
5315 auto const a = topC(env);
5316 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
5317 if (!a.couldBe(requiredTSType)) {
5318 unreachable(env);
5319 popC(env);
5320 return;
5322 auto const constraint = env.ctx.func->retTypeConstraint;
5323 // TODO(T31677864): We are being extremely pessimistic here, relax it
5324 if (!env.ctx.func->isReified &&
5325 (!env.ctx.cls || !env.ctx.cls->hasReifiedGenerics) &&
5326 !env.index.could_have_reified_type(env.ctx, constraint)) {
5327 return reduce(env, bc::PopC {}, bc::VerifyRetTypeC {});
5329 if (auto const inputTS = tv(a)) {
5330 if (!isValidTSType(*inputTS, false)) {
5331 unreachable(env);
5332 popC(env);
5333 return;
5335 auto const resolvedTS =
5336 resolveTSStatically(env, inputTS->m_data.parr, env.ctx.cls, true);
5337 if (resolvedTS && resolvedTS != inputTS->m_data.parr) {
5338 reduce(env, bc::PopC {});
5339 RuntimeOption::EvalHackArrDVArrs ? reduce(env, bc::Dict { resolvedTS })
5340 : reduce(env, bc::Array { resolvedTS });
5341 reduce(env, bc::VerifyRetTypeTS {});
5342 return;
5345 verifyRetImpl(env, constraint, true, true);
5348 void in(ISS& env, const bc::VerifyRetNonNullC& /*op*/) {
5349 auto const constraint = env.ctx.func->retTypeConstraint;
5350 if (RuntimeOption::EvalCheckReturnTypeHints < 3 || constraint.isSoft()
5351 || (RuntimeOption::EvalThisTypeHintLevel != 3 && constraint.isThis())) {
5352 return;
5355 auto stackT = topC(env);
5357 if (!stackT.couldBe(BInitNull)) {
5358 reduce(env);
5359 return;
5362 if (stackT.subtypeOf(BNull)) return unreachable(env);
5364 auto const equiv = topStkEquiv(env);
5366 if (is_opt(stackT)) stackT = unopt(std::move(stackT));
5368 popC(env);
5369 push(env, stackT, equiv);
5372 void in(ISS& env, const bc::Self& op) {
5373 auto const self = selfClsExact(env);
5374 if (self) {
5375 effect_free(env);
5376 push(env, *self);
5377 } else {
5378 push(env, TCls);
5382 void in(ISS& env, const bc::Parent& op) {
5383 auto const parent = parentClsExact(env);
5384 if (parent) {
5385 effect_free(env);
5386 push(env, *parent);
5387 } else {
5388 push(env, TCls);
5392 void in(ISS& env, const bc::CreateCl& op) {
5393 auto const nargs = op.arg1;
5394 auto const clsPair = env.index.resolve_closure_class(env.ctx, op.arg2);
5397 * Every closure should have a unique allocation site, but we may see it
5398 * multiple times in a given round of analyzing this function. Each time we
5399 * may have more information about the used variables; the types should only
5400 * possibly grow. If it's already there we need to merge the used vars in
5401 * with what we saw last time.
5403 if (nargs) {
5404 CompactVector<Type> usedVars(nargs);
5405 for (auto i = uint32_t{0}; i < nargs; ++i) {
5406 usedVars[nargs - i - 1] = unctx(popCU(env));
5408 merge_closure_use_vars_into(
5409 env.collect.closureUseTypes,
5410 clsPair.second,
5411 std::move(usedVars)
5415 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
5416 // assert the exact type of closure objects. The best we can do is assert
5417 // that it's a subclass of Closure.
5418 auto const closure = env.index.builtin_class(s_Closure.get());
5420 return push(env, subObj(closure));
5423 void in(ISS& env, const bc::CreateCont& /*op*/) {
5424 // First resume is always next() which pushes null.
5425 push(env, TInitNull);
5428 void in(ISS& env, const bc::ContEnter&) { popC(env); push(env, TInitCell); }
5429 void in(ISS& env, const bc::ContRaise&) { popC(env); push(env, TInitCell); }
5431 void in(ISS& env, const bc::Yield&) {
5432 popC(env);
5433 push(env, TInitCell);
5436 void in(ISS& env, const bc::YieldK&) {
5437 popC(env);
5438 popC(env);
5439 push(env, TInitCell);
5442 void in(ISS& env, const bc::ContAssignDelegate&) {
5443 popC(env);
5446 void in(ISS& env, const bc::ContEnterDelegate&) {
5447 popC(env);
5450 void in(ISS& env, const bc::YieldFromDelegate& op) {
5451 push(env, TInitCell);
5452 env.propagate(op.target2, &env.state);
5455 void in(ISS& /*env*/, const bc::ContUnsetDelegate&) {}
5457 void in(ISS& /*env*/, const bc::ContCheck&) {}
5458 void in(ISS& env, const bc::ContValid&) { push(env, TBool); }
5459 void in(ISS& env, const bc::ContKey&) { push(env, TInitCell); }
5460 void in(ISS& env, const bc::ContCurrent&) { push(env, TInitCell); }
5461 void in(ISS& env, const bc::ContGetReturn&) { push(env, TInitCell); }
5463 void pushTypeFromWH(ISS& env, Type t) {
5464 auto inner = typeFromWH(t);
5465 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5466 if (inner.subtypeOf(BBottom)) unreachable(env);
5467 push(env, std::move(inner));
5470 void in(ISS& env, const bc::WHResult&) {
5471 pushTypeFromWH(env, popC(env));
5474 void in(ISS& env, const bc::Await&) {
5475 pushTypeFromWH(env, popC(env));
5478 void in(ISS& env, const bc::AwaitAll& op) {
5479 auto const equiv = equivLocalRange(env, op.locrange);
5480 if (equiv != op.locrange.first) {
5481 return reduce(
5482 env,
5483 bc::AwaitAll {LocalRange {equiv, op.locrange.count}}
5487 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5488 mayReadLocal(env, op.locrange.first + i);
5491 push(env, TInitNull);
5494 namespace {
5496 void idxImpl(ISS& env, bool arraysOnly) {
5497 auto const def = popC(env);
5498 auto const key = popC(env);
5499 auto const base = popC(env);
5501 if (key.subtypeOf(BInitNull)) {
5502 // A null key, regardless of whether we're ArrayIdx or Idx will always
5503 // silently return the default value, regardless of the base type.
5504 constprop(env);
5505 effect_free(env);
5506 return push(env, def);
5509 // Push the returned type and annotate effects appropriately, taking into
5510 // account if the base might be null. Allowing for a possibly null base lets
5511 // us capture more cases.
5512 auto const finish = [&] (const Type& t, bool canThrow) {
5513 // A null base will raise if we're ArrayIdx. For Idx, it will silently
5514 // return the default value.
5515 auto const baseMaybeNull = base.couldBe(BInitNull);
5516 if (!canThrow && (!arraysOnly || !baseMaybeNull)) {
5517 constprop(env);
5518 effect_free(env);
5520 if (!arraysOnly && baseMaybeNull) return push(env, union_of(t, def));
5521 if (t.subtypeOf(BBottom)) unreachable(env);
5522 return push(env, t);
5525 if (arraysOnly) {
5526 // If ArrayIdx, we'll raise an error for anything other than array-like and
5527 // null. This op is only terminal if null isn't possible.
5528 if (!base.couldBe(BArr | BVec | BDict | BKeyset | BClsMeth)) {
5529 return finish(key.couldBe(BInitNull) ? def : TBottom, true);
5531 } else if (
5532 !base.couldBe(BArr | BVec | BDict | BKeyset | BStr | BObj | BClsMeth)) {
5533 // Otherwise, any strange bases for Idx will just return the default value
5534 // without raising.
5535 return finish(def, false);
5538 // Helper for Hack arrays. "validKey" is the set key types which can return a
5539 // value from Idx. "silentKey" is the set of key types which will silently
5540 // return null (anything else throws). The Hack array elem functions will
5541 // treat values of "silentKey" as throwing, so we must identify those cases
5542 // and deal with them.
5543 auto const hackArr = [&] (std::pair<Type, ThrowMode> elem,
5544 const Type& validKey,
5545 const Type& silentKey) {
5546 switch (elem.second) {
5547 case ThrowMode::None:
5548 case ThrowMode::MaybeMissingElement:
5549 case ThrowMode::MissingElement:
5550 assertx(key.subtypeOf(validKey));
5551 return finish(elem.first, false);
5552 case ThrowMode::MaybeBadKey:
5553 assertx(key.couldBe(validKey));
5554 if (key.couldBe(silentKey)) elem.first |= def;
5555 return finish(elem.first, !key.subtypeOf(BOptArrKey));
5556 case ThrowMode::BadOperation:
5557 assertx(!key.couldBe(validKey));
5558 return finish(key.couldBe(silentKey) ? def : TBottom, true);
5562 if (base.subtypeOrNull(BVec)) {
5563 // Vecs will throw for any key other than Int, Str, or Null, and will
5564 // silently return the default value for the latter two.
5565 if (key.subtypeOrNull(BStr)) return finish(def, false);
5566 return hackArr(vec_elem(base, key, def), TInt, TOptStr);
5569 if (base.subtypeOfAny(TOptDict, TOptKeyset)) {
5570 // Dicts and keysets will throw for any key other than Int, Str, or Null,
5571 // and will silently return the default value for Null.
5572 auto const elem = base.subtypeOrNull(BDict)
5573 ? dict_elem(base, key, def)
5574 : keyset_elem(base, key, def);
5575 return hackArr(elem, TArrKey, TInitNull);
5578 if (base.subtypeOrNull(BArr)) {
5579 // A possibly null key is more complicated for arrays. array_elem() will
5580 // transform a null key into an empty string (matching the semantics of
5581 // array access), but that's not what Idx does. So, attempt to remove
5582 // nullish from the key first. If we can't, it just means we'll get a more
5583 // conservative value.
5584 auto maybeNull = false;
5585 auto const fixedKey = [&]{
5586 if (key.couldBe(TInitNull)) {
5587 maybeNull = true;
5588 if (is_nullish(key)) return unnullish(key);
5590 return key;
5591 }();
5593 auto elem = array_elem(base, fixedKey, def);
5594 // If the key was null, Idx will return the default value, so add to the
5595 // return type.
5596 if (maybeNull) elem.first |= def;
5598 switch (elem.second) {
5599 case ThrowMode::None:
5600 case ThrowMode::MaybeMissingElement:
5601 case ThrowMode::MissingElement:
5602 return finish(elem.first, false);
5603 case ThrowMode::MaybeBadKey:
5604 return finish(elem.first, true);
5605 case ThrowMode::BadOperation:
5606 always_assert(false);
5610 if (!arraysOnly && base.subtypeOrNull(BStr)) {
5611 // Idx on a string always produces a string or the default value (without
5612 // ever raising).
5613 return finish(union_of(TStr, def), false);
5616 // Objects or other unions of possible bases
5617 push(env, TInitCell);
5622 void in(ISS& env, const bc::Idx&) { idxImpl(env, false); }
5623 void in(ISS& env, const bc::ArrayIdx&) { idxImpl(env, true); }
5625 void in(ISS& env, const bc::CheckProp&) {
5626 if (env.ctx.cls->attrs & AttrNoOverride) {
5627 return reduce(env, bc::False {});
5629 nothrow(env);
5630 push(env, TBool);
5633 void in(ISS& env, const bc::InitProp& op) {
5634 auto const t = topC(env);
5635 switch (op.subop2) {
5636 case InitPropOp::Static:
5637 mergeSelfProp(env, op.str1, t);
5638 env.collect.publicSPropMutations.merge(
5639 env.index, env.ctx, *env.ctx.cls, sval(op.str1), t, true
5641 break;
5642 case InitPropOp::NonStatic:
5643 mergeThisProp(env, op.str1, t);
5644 break;
5647 for (auto& prop : env.ctx.func->cls->properties) {
5648 if (prop.name != op.str1) continue;
5650 ITRACE(1, "InitProp: {} = {}\n", op.str1, show(t));
5652 if (env.index.satisfies_constraint(env.ctx, t, prop.typeConstraint)) {
5653 prop.attrs |= AttrInitialSatisfiesTC;
5654 } else {
5655 badPropInitialValue(env);
5656 prop.attrs = (Attr)(prop.attrs & ~AttrInitialSatisfiesTC);
5659 auto const v = tv(t);
5660 if (v || !could_contain_objects(t)) {
5661 prop.attrs = (Attr)(prop.attrs & ~AttrDeepInit);
5662 if (!v) break;
5663 prop.val = *v;
5664 env.index.update_static_prop_init_val(env.ctx.func->cls, op.str1);
5665 return reduce(env, bc::PopC {});
5669 popC(env);
5672 void in(ISS& env, const bc::Silence& op) {
5673 nothrow(env);
5674 switch (op.subop2) {
5675 case SilenceOp::Start:
5676 setLoc(env, op.loc1, TInt);
5677 break;
5678 case SilenceOp::End:
5679 break;
5683 namespace {
5685 template <typename Op, typename Rebind>
5686 bool memoGetImpl(ISS& env, const Op& op, Rebind&& rebind) {
5687 always_assert(env.ctx.func->isMemoizeWrapper);
5688 always_assert(op.locrange.first + op.locrange.count
5689 <= env.ctx.func->locals.size());
5691 if (will_reduce(env)) {
5692 // If we can use an equivalent, earlier range, then use that instead.
5693 auto const equiv = equivLocalRange(env, op.locrange);
5694 if (equiv != op.locrange.first) {
5695 reduce(env, rebind(LocalRange { equiv, op.locrange.count }));
5696 return true;
5700 auto retTy = memoizeImplRetType(env);
5702 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5703 // and $this isn't available.
5704 auto allArrKey = true;
5705 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5706 allArrKey &= locRaw(env, op.locrange.first + i).subtypeOf(BArrKey);
5708 if (allArrKey &&
5709 (!env.ctx.func->cls ||
5710 (env.ctx.func->attrs & AttrStatic) ||
5711 thisAvailable(env))) {
5712 if (will_reduce(env)) {
5713 if (retTy.first.subtypeOf(BBottom)) {
5714 reduce(env);
5715 jmp_setdest(env, op.target1);
5716 return true;
5718 // deal with constprop manually; otherwise we will propagate the
5719 // taken edge and *then* replace the MemoGet with a constant.
5720 if (retTy.second) {
5721 if (auto v = tv(retTy.first)) {
5722 reduce(env, gen_constant(*v));
5723 return true;
5727 nothrow(env);
5730 if (retTy.first == TBottom) {
5731 jmp_setdest(env, op.target1);
5732 return true;
5735 env.propagate(op.target1, &env.state);
5736 push(env, std::move(retTy.first));
5737 return false;
5742 void in(ISS& env, const bc::MemoGet& op) {
5743 memoGetImpl(
5744 env, op,
5745 [&] (const LocalRange& l) { return bc::MemoGet { op.target1, l }; }
5749 void in(ISS& env, const bc::MemoGetEager& op) {
5750 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
5752 auto const reduced = memoGetImpl(
5753 env, op,
5754 [&] (const LocalRange& l) {
5755 return bc::MemoGetEager { op.target1, op.target2, l };
5758 if (reduced) return;
5760 env.propagate(op.target2, &env.state);
5761 auto const t = popC(env);
5762 push(
5763 env,
5764 is_specialized_wait_handle(t) ? wait_handle_inner(t) : TInitCell
5768 namespace {
5770 template <typename Op>
5771 void memoSetImpl(ISS& env, const Op& op) {
5772 always_assert(env.ctx.func->isMemoizeWrapper);
5773 always_assert(op.locrange.first + op.locrange.count
5774 <= env.ctx.func->locals.size());
5776 // If we can use an equivalent, earlier range, then use that instead.
5777 auto const equiv = equivLocalRange(env, op.locrange);
5778 if (equiv != op.locrange.first) {
5779 return reduce(
5780 env,
5781 Op { LocalRange { equiv, op.locrange.count } }
5785 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5786 // and $this isn't available.
5787 auto allArrKey = true;
5788 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5789 allArrKey &= locRaw(env, op.locrange.first + i).subtypeOf(BArrKey);
5791 if (allArrKey &&
5792 (!env.ctx.func->cls ||
5793 (env.ctx.func->attrs & AttrStatic) ||
5794 thisAvailable(env))) {
5795 nothrow(env);
5797 push(env, popC(env));
5802 void in(ISS& env, const bc::MemoSet& op) {
5803 memoSetImpl(env, op);
5806 void in(ISS& env, const bc::MemoSetEager& op) {
5807 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
5808 memoSetImpl(env, op);
5813 namespace {
5815 //////////////////////////////////////////////////////////////////////
5817 void dispatch(ISS& env, const Bytecode& op) {
5818 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5819 switch (op.op) { OPCODES }
5820 #undef O
5821 not_reached();
5824 //////////////////////////////////////////////////////////////////////
5826 void interpStep(ISS& env, const Bytecode& bc) {
5827 ITRACE(2, " {} ({})\n",
5828 show(env.ctx.func, bc),
5829 env.unchangedBcs + env.replacedBcs.size());
5830 Trace::Indent _;
5832 // If there are throw exit edges, make a copy of the state (except
5833 // stacks) in case we need to propagate across throw exits (if
5834 // it's a PEI).
5835 if (!env.stateBefore && env.blk.throwExit != NoBlockId) {
5836 env.stateBefore.emplace(with_throwable_only(env.index, env.state));
5839 env.flags = {};
5841 default_dispatch(env, bc);
5843 if (env.flags.reduced) return;
5845 auto const_prop = [&] {
5846 if (!options.ConstantProp || !env.flags.canConstProp) return false;
5848 auto const numPushed = bc.numPush();
5849 TinyVector<Cell> cells;
5851 auto i = size_t{0};
5852 while (i < numPushed) {
5853 auto const v = tv(topT(env, i));
5854 if (!v) return false;
5855 cells.push_back(*v);
5856 ++i;
5859 if (env.flags.wasPEI) {
5860 ITRACE(2, " nothrow (due to constprop)\n");
5861 env.flags.wasPEI = false;
5863 if (!env.flags.effectFree) {
5864 ITRACE(2, " effect_free (due to constprop)\n");
5865 env.flags.effectFree = true;
5868 rewind(env, bc);
5870 auto const numPop = bc.numPop();
5871 for (auto j = 0; j < numPop; j++) {
5872 switch (bc.popFlavor(j)) {
5873 case Flavor::CVU:
5874 // Note that we only support C's for CVU so far (this only
5875 // comes up with FCallBuiltin)---we'll fail the verifier if
5876 // something changes to send V's or U's through here.
5877 interpStep(env, bc::PopC {});
5878 break;
5879 case Flavor::CU:
5880 // We only support C's for CU right now.
5881 interpStep(env, bc::PopC {});
5882 break;
5883 case Flavor::C:
5884 interpStep(env, bc::PopC {});
5885 break;
5886 case Flavor::V: not_reached();
5887 case Flavor::U: not_reached();
5888 case Flavor::CV: not_reached();
5892 while (i--) {
5893 push(env, from_cell(cells[i]));
5894 record(env, gen_constant(cells[i]));
5896 return true;
5899 if (const_prop()) {
5900 return;
5903 assertx(!env.flags.effectFree || !env.flags.wasPEI);
5904 if (env.flags.wasPEI) {
5905 ITRACE(2, " PEI.\n");
5906 if (env.stateBefore) {
5907 env.propagate(env.blk.throwExit, &*env.stateBefore);
5910 env.stateBefore.clear();
5912 record(env, bc);
5915 void interpOne(ISS& env, const Bytecode& bc) {
5916 env.srcLoc = bc.srcLoc;
5917 interpStep(env, bc);
5920 BlockId speculate(Interp& interp) {
5921 auto low_water = interp.state.stack.size();
5923 interp.collect.opts = interp.collect.opts | CollectionOpts::Speculating;
5924 SCOPE_EXIT {
5925 interp.collect.opts = interp.collect.opts - CollectionOpts::Speculating;
5928 auto failed = false;
5929 ISS env { interp, [&] (BlockId, const State*) { failed = true; } };
5931 FTRACE(4, " Speculate B{}\n", interp.bid);
5932 for (auto const& bc : interp.blk->hhbcs) {
5933 assertx(!interp.state.unreachable);
5934 auto const numPop = bc.numPop() +
5935 (bc.op == Op::CGetL2 ? 1 :
5936 bc.op == Op::Dup ? -1 : 0);
5937 if (interp.state.stack.size() - numPop < low_water) {
5938 low_water = interp.state.stack.size() - numPop;
5941 interpOne(env, bc);
5942 if (failed) {
5943 env.collect.mInstrState.clear();
5944 FTRACE(3, " Bailing from speculate because propagate was called\n");
5945 return NoBlockId;
5948 auto const& flags = env.flags;
5949 if (!flags.effectFree) {
5950 env.collect.mInstrState.clear();
5951 FTRACE(3, " Bailing from speculate because not effect free\n");
5952 return NoBlockId;
5955 assertx(!flags.returned);
5957 if (flags.jmpDest != NoBlockId && interp.state.stack.size() == low_water) {
5958 FTRACE(2, " Speculate found target block {}\n", flags.jmpDest);
5959 return flags.jmpDest;
5963 if (interp.state.stack.size() != low_water) {
5964 FTRACE(3,
5965 " Bailing from speculate because the speculated block "
5966 "left items on the stack\n");
5967 return NoBlockId;
5970 if (interp.blk->fallthrough == NoBlockId) {
5971 FTRACE(3,
5972 " Bailing from speculate because there was no fallthrough");
5973 return NoBlockId;
5976 FTRACE(2, " Speculate found fallthrough block {}\n",
5977 interp.blk->fallthrough);
5979 return interp.blk->fallthrough;
5982 BlockId speculateHelper(ISS& env, BlockId orig, bool updateTaken) {
5983 assertx(orig != NoBlockId);
5985 if (!will_reduce(env)) return orig;
5987 auto const last = last_op(env);
5988 bool endsInControlFlow = last && instrIsNonCallControlFlow(last->op);
5989 auto target = orig;
5990 auto pops = 0;
5992 if (options.RemoveDeadBlocks) {
5993 State temp{env.state, State::Compact{}};
5994 while (true) {
5995 auto const func = env.ctx.func;
5996 auto const targetBlk = func->blocks[target].get();
5997 if (!targetBlk->multiPred) break;
5998 auto const ok = [&] {
5999 switch (targetBlk->hhbcs.back().op) {
6000 case Op::JmpZ:
6001 case Op::JmpNZ:
6002 case Op::SSwitch:
6003 case Op::Switch:
6004 return true;
6005 default:
6006 return false;
6008 }();
6010 if (!ok) break;
6012 Interp interp {
6013 env.index, env.ctx, env.collect, target, targetBlk, temp
6016 auto const old_size = temp.stack.size();
6017 auto const new_target = speculate(interp);
6018 if (new_target == NoBlockId) break;
6020 const ssize_t delta = old_size - temp.stack.size();
6021 assertx(delta >= 0);
6022 if (delta && endsInControlFlow) break;
6024 pops += delta;
6025 target = new_target;
6026 temp.stack.compact();
6030 if (endsInControlFlow && updateTaken) {
6031 assertx(!pops);
6032 auto needsUpdate = target != orig;
6033 if (!needsUpdate) {
6034 forEachTakenEdge(
6035 *last,
6036 [&] (BlockId bid) {
6037 if (bid != orig) needsUpdate = true;
6041 if (needsUpdate) {
6042 auto& bc = mutate_last_op(env);
6043 forEachTakenEdge(
6045 [&] (BlockId& bid) {
6046 bid = bid == orig ? target : NoBlockId;
6052 while (pops--) {
6053 interpStep(env, bc::PopC {});
6056 return target;
6061 //////////////////////////////////////////////////////////////////////
6063 RunFlags run(Interp& interp, const State& in, PropagateFn propagate) {
6064 SCOPE_EXIT {
6065 FTRACE(2, "out {}{}\n",
6066 state_string(*interp.ctx.func, interp.state, interp.collect),
6067 property_state_string(interp.collect.props));
6070 auto env = ISS { interp, propagate };
6071 auto ret = RunFlags {};
6072 auto finish = [&] (BlockId fallthrough) {
6073 ret.updateInfo.fallthrough = fallthrough;
6074 ret.updateInfo.unchangedBcs = env.unchangedBcs;
6075 ret.updateInfo.replacedBcs = std::move(env.replacedBcs);
6076 return ret;
6079 BytecodeVec retryBcs;
6080 auto retryOffset = interp.blk->hhbcs.size();
6081 auto size = retryOffset;
6082 BlockId retryFallthrough = interp.blk->fallthrough;
6083 size_t idx = 0;
6085 while (true) {
6086 if (idx == size) {
6087 finish_tracked_elems(env, 0);
6088 if (!env.reprocess) break;
6089 FTRACE(2, " Reprocess mutated block {}\n", interp.bid);
6090 assertx(env.unchangedBcs < retryOffset || env.replacedBcs.size());
6091 retryOffset = env.unchangedBcs;
6092 retryBcs = std::move(env.replacedBcs);
6093 env.unchangedBcs = 0;
6094 env.state.copy_from(in);
6095 env.reprocess = false;
6096 env.replacedBcs.clear();
6097 size = retryOffset + retryBcs.size();
6098 idx = 0;
6099 continue;
6102 auto const& bc = idx < retryOffset ?
6103 interp.blk->hhbcs[idx] : retryBcs[idx - retryOffset];
6104 ++idx;
6106 interpOne(env, bc);
6107 auto const& flags = env.flags;
6108 if (interp.collect.effectFree && !flags.effectFree) {
6109 interp.collect.effectFree = false;
6110 if (any(interp.collect.opts & CollectionOpts::EffectFreeOnly)) {
6111 env.collect.mInstrState.clear();
6112 FTRACE(2, " Bailing because not effect free\n");
6113 return finish(NoBlockId);
6117 if (flags.returned) {
6118 always_assert(idx == size);
6119 if (env.reprocess) continue;
6121 always_assert(interp.blk->fallthrough == NoBlockId);
6122 assertx(!ret.returned);
6123 FTRACE(2, " returned {}\n", show(*flags.returned));
6124 ret.retParam = flags.retParam;
6125 ret.returned = flags.returned;
6126 return finish(NoBlockId);
6129 if (flags.jmpDest != NoBlockId) {
6130 always_assert(idx == size);
6131 auto const hasFallthrough = [&] {
6132 if (flags.jmpDest != interp.blk->fallthrough) {
6133 FTRACE(2, " <took branch; no fallthrough>\n");
6134 auto const last = last_op(env);
6135 return !last || !instrIsNonCallControlFlow(last->op);
6136 } else {
6137 FTRACE(2, " <branch never taken>\n");
6138 return true;
6140 }();
6141 if (hasFallthrough) retryFallthrough = flags.jmpDest;
6142 if (env.reprocess) continue;
6143 finish_tracked_elems(env, 0);
6144 auto const newDest = speculateHelper(env, flags.jmpDest, true);
6145 propagate(newDest, &interp.state);
6146 return finish(hasFallthrough ? newDest : NoBlockId);
6149 if (interp.state.unreachable) {
6150 if (env.reprocess) {
6151 idx = size;
6152 continue;
6154 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
6155 finish_tracked_elems(env, 0);
6156 return finish(NoBlockId);
6160 FTRACE(2, " <end block>\n");
6161 if (retryFallthrough != NoBlockId) {
6162 retryFallthrough = speculateHelper(env, retryFallthrough, false);
6163 propagate(retryFallthrough, &interp.state);
6165 return finish(retryFallthrough);
6168 StepFlags step(Interp& interp, const Bytecode& op) {
6169 auto noop = [] (BlockId, const State*) {};
6170 ISS env { interp, noop };
6171 env.analyzeDepth++;
6172 dispatch(env, op);
6173 if (env.state.unreachable) {
6174 env.collect.mInstrState.clear();
6176 assertx(env.trackedElems.empty());
6177 return env.flags;
6180 void default_dispatch(ISS& env, const Bytecode& op) {
6181 if (!env.trackedElems.empty()) {
6182 auto const pops = [&] () -> uint32_t {
6183 switch (op.op) {
6184 case Op::AddElemC:
6185 case Op::AddNewElemC:
6186 return numPop(op) - 1;
6187 case Op::Concat:
6188 case Op::ConcatN:
6189 return 0;
6190 default:
6191 return numPop(op);
6193 }();
6195 finish_tracked_elems(env, env.state.stack.size() - pops);
6197 dispatch(env, op);
6198 if (instrFlags(op.op) & TF && env.flags.jmpDest == NoBlockId) {
6199 unreachable(env);
6200 } else if (env.state.unreachable) {
6201 env.collect.mInstrState.clear();
6205 folly::Optional<Type> thisType(const Index& index, Context ctx) {
6206 return thisTypeFromContext(index, ctx);
6209 //////////////////////////////////////////////////////////////////////