Improve refineLocation a little
[hiphop-php.git] / hphp / hhbbc / interp.cpp
blob5ca06182c15b1edaac205c4cf6a6e9f4aeda6e1c
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
18 #include <algorithm>
19 #include <vector>
20 #include <string>
21 #include <iterator>
23 #include <folly/Optional.h>
25 #include "hphp/util/trace.h"
26 #include "hphp/runtime/base/array-init.h"
27 #include "hphp/runtime/base/collections.h"
28 #include "hphp/runtime/base/static-string-table.h"
29 #include "hphp/runtime/base/tv-arith.h"
30 #include "hphp/runtime/base/tv-comparisons.h"
31 #include "hphp/runtime/base/tv-conversions.h"
32 #include "hphp/runtime/base/type-structure.h"
33 #include "hphp/runtime/base/type-structure-helpers-defs.h"
34 #include "hphp/runtime/vm/runtime.h"
35 #include "hphp/runtime/vm/unit-util.h"
37 #include "hphp/runtime/ext/hh/ext_hh.h"
39 #include "hphp/hhbbc/analyze.h"
40 #include "hphp/hhbbc/bc.h"
41 #include "hphp/hhbbc/cfg.h"
42 #include "hphp/hhbbc/class-util.h"
43 #include "hphp/hhbbc/eval-cell.h"
44 #include "hphp/hhbbc/index.h"
45 #include "hphp/hhbbc/interp-state.h"
46 #include "hphp/hhbbc/optimize.h"
47 #include "hphp/hhbbc/representation.h"
48 #include "hphp/hhbbc/type-builtins.h"
49 #include "hphp/hhbbc/type-ops.h"
50 #include "hphp/hhbbc/type-system.h"
51 #include "hphp/hhbbc/unit-util.h"
53 #include "hphp/hhbbc/interp-internal.h"
55 namespace HPHP { namespace HHBBC {
57 //////////////////////////////////////////////////////////////////////
59 namespace {
61 const StaticString s_Throwable("Throwable");
62 const StaticString s_empty("");
63 const StaticString s_construct("__construct");
64 const StaticString s_86ctor("86ctor");
65 const StaticString s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
66 const StaticString s_IMemoizeParam("HH\\IMemoizeParam");
67 const StaticString s_getInstanceKey("getInstanceKey");
68 const StaticString s_Closure("Closure");
69 const StaticString s_byRefWarn("Only variables should be passed by reference");
70 const StaticString s_byRefError("Only variables can be passed by reference");
71 const StaticString s_trigger_error("trigger_error");
74 //////////////////////////////////////////////////////////////////////
76 void impl_vec(ISS& env, bool reduce, std::vector<Bytecode>&& bcs) {
77 std::vector<Bytecode> currentReduction;
78 if (!options.StrengthReduce) reduce = false;
80 env.flags.wasPEI = false;
81 env.flags.canConstProp = true;
82 env.flags.effectFree = true;
84 for (auto it = begin(bcs); it != end(bcs); ++it) {
85 assert(env.flags.jmpDest == NoBlockId &&
86 "you can't use impl with branching opcodes before last position");
88 auto const wasPEI = env.flags.wasPEI;
89 auto const canConstProp = env.flags.canConstProp;
90 auto const effectFree = env.flags.effectFree;
92 FTRACE(3, " (impl {}\n", show(env.ctx.func, *it));
93 env.flags.wasPEI = true;
94 env.flags.canConstProp = false;
95 env.flags.effectFree = false;
96 env.flags.strengthReduced = folly::none;
97 default_dispatch(env, *it);
99 if (env.flags.strengthReduced) {
100 if (instrFlags(env.flags.strengthReduced->back().op) & TF) {
101 unreachable(env);
103 if (reduce) {
104 std::move(begin(*env.flags.strengthReduced),
105 end(*env.flags.strengthReduced),
106 std::back_inserter(currentReduction));
108 } else {
109 if (instrFlags(it->op) & TF) {
110 unreachable(env);
112 auto applyConstProp = [&] {
113 if (env.flags.effectFree && !env.flags.wasPEI) return;
114 auto stk = env.state.stack.end();
115 for (auto i = it->numPush(); i--; ) {
116 --stk;
117 if (!is_scalar(stk->type)) return;
119 env.flags.effectFree = true;
120 env.flags.wasPEI = false;
122 if (reduce) {
123 auto added = false;
124 if (env.flags.canConstProp) {
125 if (env.collect.propagate_constants) {
126 if (env.collect.propagate_constants(*it, env.state,
127 currentReduction)) {
128 added = true;
129 env.flags.canConstProp = false;
130 env.flags.wasPEI = false;
131 env.flags.effectFree = true;
133 } else {
134 applyConstProp();
137 if (!added) currentReduction.push_back(std::move(*it));
138 } else if (env.flags.canConstProp) {
139 applyConstProp();
143 // If any of the opcodes in the impl list said they could throw,
144 // then the whole thing could throw.
145 env.flags.wasPEI = env.flags.wasPEI || wasPEI;
146 env.flags.canConstProp = env.flags.canConstProp && canConstProp;
147 env.flags.effectFree = env.flags.effectFree && effectFree;
148 if (env.state.unreachable) break;
151 if (reduce) {
152 env.flags.strengthReduced = std::move(currentReduction);
153 } else {
154 env.flags.strengthReduced = folly::none;
158 LocalId equivLocalRange(ISS& env, const LocalRange& range) {
159 auto bestRange = range.first;
160 auto equivFirst = findLocEquiv(env, range.first);
161 if (equivFirst == NoLocalId) return bestRange;
162 do {
163 if (equivFirst < bestRange) {
164 auto equivRange = [&] {
165 // local equivalency includes differing by Uninit, so we need
166 // to check the types.
167 if (peekLocRaw(env, equivFirst) != peekLocRaw(env, range.first)) {
168 return false;
171 for (uint32_t i = 1; i <= range.restCount; ++i) {
172 if (!locsAreEquiv(env, equivFirst + i, range.first + i) ||
173 peekLocRaw(env, equivFirst + i) !=
174 peekLocRaw(env, range.first + i)) {
175 return false;
179 return true;
180 }();
182 if (equivRange) {
183 bestRange = equivFirst;
186 equivFirst = findLocEquiv(env, equivFirst);
187 assert(equivFirst != NoLocalId);
188 } while (equivFirst != range.first);
190 return bestRange;
193 namespace interp_step {
195 void in(ISS& env, const bc::Nop&) { effect_free(env); }
196 void in(ISS& env, const bc::DiscardClsRef& op) {
197 nothrow(env);
198 takeClsRefSlot(env, op.slot);
200 void in(ISS& env, const bc::PopC&) {
201 nothrow(env);
202 if (!could_run_destructor(popC(env))) effect_free(env);
204 void in(ISS& env, const bc::PopU&) { effect_free(env); popU(env); }
205 void in(ISS& env, const bc::PopV&) { nothrow(env); popV(env); }
206 void in(ISS& env, const bc::PopR&) {
207 auto t = topT(env, 0);
208 if (t.subtypeOf(TCell)) {
209 return reduce(env, bc::UnboxRNop {}, bc::PopC {});
211 nothrow(env);
212 popR(env);
215 void in(ISS& env, const bc::EntryNop&) { effect_free(env); }
217 void in(ISS& env, const bc::Dup& /*op*/) {
218 effect_free(env);
219 auto equiv = topStkEquiv(env);
220 auto val = popC(env);
221 push(env, val, equiv);
222 push(env, std::move(val), StackDupId);
225 void in(ISS& env, const bc::AssertRATL& op) {
226 mayReadLocal(env, op.loc1);
227 effect_free(env);
230 void in(ISS& env, const bc::AssertRATStk&) {
231 effect_free(env);
234 void in(ISS& env, const bc::BreakTraceHint&) { effect_free(env); }
236 void in(ISS& env, const bc::Box&) {
237 effect_free(env);
238 popC(env);
239 push(env, TRef);
242 void in(ISS& env, const bc::BoxR&) {
243 effect_free(env);
244 if (topR(env).subtypeOf(TRef)) {
245 return reduce(env, bc::BoxRNop {});
247 popR(env);
248 push(env, TRef);
251 void in(ISS& env, const bc::Unbox&) {
252 effect_free(env);
253 popV(env);
254 push(env, TInitCell);
257 void in(ISS& env, const bc::UnboxR&) {
258 auto const t = topR(env);
259 if (t.subtypeOf(TInitCell)) return reduce(env, bc::UnboxRNop {});
260 nothrow(env);
261 popT(env);
262 push(env, TInitCell);
265 void in(ISS& env, const bc::RGetCNop&) { effect_free(env); }
267 void in(ISS& env, const bc::CGetCUNop&) {
268 effect_free(env);
269 auto const t = popCU(env);
270 push(env, remove_uninit(t));
273 void in(ISS& env, const bc::UGetCUNop&) {
274 effect_free(env);
275 popCU(env);
276 push(env, TUninit);
279 void in(ISS& env, const bc::UnboxRNop&) {
280 effect_free(env);
281 constprop(env);
282 auto t = popR(env);
283 if (!t.subtypeOf(TInitCell)) t = TInitCell;
284 push(env, std::move(t));
287 void in(ISS& env, const bc::BoxRNop&) {
288 effect_free(env);
289 auto t = popR(env);
290 if (!t.subtypeOf(TRef)) t = TRef;
291 push(env, std::move(t));
294 void in(ISS& env, const bc::Null&) {
295 effect_free(env);
296 push(env, TInitNull);
299 void in(ISS& env, const bc::NullUninit&) {
300 effect_free(env);
301 push(env, TUninit);
304 void in(ISS& env, const bc::True&) {
305 effect_free(env);
306 push(env, TTrue);
309 void in(ISS& env, const bc::False&) {
310 effect_free(env);
311 push(env, TFalse);
314 void in(ISS& env, const bc::Int& op) {
315 effect_free(env);
316 push(env, ival(op.arg1));
319 void in(ISS& env, const bc::Double& op) {
320 effect_free(env);
321 push(env, dval(op.dbl1));
324 void in(ISS& env, const bc::String& op) {
325 effect_free(env);
326 push(env, sval(op.str1));
329 void in(ISS& env, const bc::Array& op) {
330 assert(op.arr1->isPHPArray());
331 assertx(!RuntimeOption::EvalHackArrDVArrs || op.arr1->isNotDVArray());
332 effect_free(env);
333 push(env, aval(op.arr1));
336 void in(ISS& env, const bc::Vec& op) {
337 assert(op.arr1->isVecArray());
338 effect_free(env);
339 push(env, vec_val(op.arr1));
342 void in(ISS& env, const bc::Dict& op) {
343 assert(op.arr1->isDict());
344 effect_free(env);
345 push(env, dict_val(op.arr1));
348 void in(ISS& env, const bc::Keyset& op) {
349 assert(op.arr1->isKeyset());
350 effect_free(env);
351 push(env, keyset_val(op.arr1));
354 void in(ISS& env, const bc::NewArray& op) {
355 push(env, op.arg1 == 0 ?
356 effect_free(env), aempty() : some_aempty());
359 void in(ISS& env, const bc::NewDictArray& op) {
360 push(env, op.arg1 == 0 ?
361 effect_free(env), dict_empty() : some_dict_empty());
364 void in(ISS& env, const bc::NewMixedArray& op) {
365 push(env, op.arg1 == 0 ?
366 effect_free(env), aempty() : some_aempty());
369 void in(ISS& env, const bc::NewPackedArray& op) {
370 auto elems = std::vector<Type>{};
371 elems.reserve(op.arg1);
372 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
373 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
375 discard(env, op.arg1);
376 push(env, arr_packed(std::move(elems)));
377 constprop(env);
380 void in(ISS& env, const bc::NewVArray& op) {
381 assertx(!RuntimeOption::EvalHackArrDVArrs);
382 auto elems = std::vector<Type>{};
383 elems.reserve(op.arg1);
384 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
385 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
387 discard(env, op.arg1);
388 push(env, arr_packed_varray(std::move(elems)));
389 constprop(env);
392 void in(ISS& env, const bc::NewDArray& op) {
393 assertx(!RuntimeOption::EvalHackArrDVArrs);
394 push(env, op.arg1 == 0 ?
395 effect_free(env), aempty_darray() : some_aempty_darray());
398 void in(ISS& env, const bc::NewStructArray& op) {
399 auto map = MapElems{};
400 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
401 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
403 push(env, arr_map(std::move(map)));
404 constprop(env);
407 void in(ISS& env, const bc::NewStructDArray& op) {
408 assertx(!RuntimeOption::EvalHackArrDVArrs);
409 auto map = MapElems{};
410 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
411 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
413 push(env, arr_map_darray(std::move(map)));
414 constprop(env);
417 void in(ISS& env, const bc::NewStructDict& op) {
418 auto map = MapElems{};
419 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
420 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
422 push(env, dict_map(std::move(map)));
423 constprop(env);
426 void in(ISS& env, const bc::NewVecArray& op) {
427 auto elems = std::vector<Type>{};
428 elems.reserve(op.arg1);
429 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
430 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
432 discard(env, op.arg1);
433 constprop(env);
434 push(env, vec(std::move(elems)));
437 void in(ISS& env, const bc::NewKeysetArray& op) {
438 assert(op.arg1 > 0);
439 auto map = MapElems{};
440 auto ty = TBottom;
441 auto useMap = true;
442 auto bad = false;
443 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
444 auto k = disect_strict_key(popC(env));
445 if (k.type == TBottom) {
446 bad = true;
447 useMap = false;
449 if (useMap) {
450 if (auto const v = k.tv()) {
451 map.emplace_front(*v, k.type);
452 } else {
453 useMap = false;
456 ty |= std::move(k.type);
458 if (useMap) {
459 push(env, keyset_map(std::move(map)));
460 constprop(env);
461 } else if (!bad) {
462 push(env, keyset_n(ty));
463 } else {
464 unreachable(env);
465 push(env, TBottom);
469 void in(ISS& env, const bc::NewLikeArrayL& op) {
470 locAsCell(env, op.loc1);
471 push(env, some_aempty());
474 void in(ISS& env, const bc::AddElemC& /*op*/) {
475 auto const v = popC(env);
476 auto const k = popC(env);
478 auto const outTy = [&] (Type ty) ->
479 folly::Optional<std::pair<Type,ThrowMode>> {
480 if (ty.subtypeOf(TArr)) {
481 return array_set(std::move(ty), k, v);
483 if (ty.subtypeOf(TDict)) {
484 return dict_set(std::move(ty), k, v);
486 return folly::none;
487 }(popC(env));
489 if (!outTy) {
490 return push(env, union_of(TArr, TDict));
493 if (outTy->first.subtypeOf(TBottom)) {
494 unreachable(env);
495 } else if (outTy->second == ThrowMode::None) {
496 nothrow(env);
497 if (any(env.collect.opts & CollectionOpts::TrackConstantArrays)) {
498 constprop(env);
501 push(env, std::move(outTy->first));
504 void in(ISS& env, const bc::AddElemV& /*op*/) {
505 popV(env); popC(env);
506 auto const ty = popC(env);
507 auto const outTy =
508 ty.subtypeOf(TArr) ? TArr
509 : ty.subtypeOf(TDict) ? TDict
510 : union_of(TArr, TDict);
511 push(env, outTy);
514 void in(ISS& env, const bc::AddNewElemC&) {
515 auto v = popC(env);
517 auto const outTy = [&] (Type ty) -> folly::Optional<Type> {
518 if (ty.subtypeOf(TArr)) {
519 return array_newelem(std::move(ty), std::move(v)).first;
521 if (ty.subtypeOf(TVec)) {
522 return vec_newelem(std::move(ty), std::move(v)).first;
524 if (ty.subtypeOf(TKeyset)) {
525 return keyset_newelem(std::move(ty), std::move(v)).first;
527 return folly::none;
528 }(popC(env));
530 if (!outTy) {
531 return push(env, TInitCell);
534 if (outTy->subtypeOf(TBottom)) {
535 unreachable(env);
536 } else {
537 if (any(env.collect.opts & CollectionOpts::TrackConstantArrays)) {
538 constprop(env);
541 push(env, std::move(*outTy));
544 void in(ISS& env, const bc::AddNewElemV&) {
545 popV(env);
546 popC(env);
547 push(env, TArr);
550 void in(ISS& env, const bc::NewCol& op) {
551 auto const type = static_cast<CollectionType>(op.subop1);
552 auto const name = collections::typeToString(type);
553 push(env, objExact(env.index.builtin_class(name)));
556 void in(ISS& env, const bc::NewPair& /*op*/) {
557 popC(env); popC(env);
558 auto const name = collections::typeToString(CollectionType::Pair);
559 push(env, objExact(env.index.builtin_class(name)));
562 void in(ISS& env, const bc::ColFromArray& op) {
563 popC(env);
564 auto const type = static_cast<CollectionType>(op.subop1);
565 auto const name = collections::typeToString(type);
566 push(env, objExact(env.index.builtin_class(name)));
569 void doCns(ISS& env, SString str, SString fallback) {
570 if (!options.HardConstProp) return push(env, TInitCell);
572 auto t = env.index.lookup_constant(env.ctx, str, fallback);
573 if (!t) {
574 // There's no entry for this constant in the index. It must be
575 // the first iteration, so we'll add a dummy entry to make sure
576 // there /is/ something next time around.
577 Cell val;
578 val.m_type = kReadOnlyConstant;
579 env.collect.cnsMap.emplace(str, val);
580 t = TInitCell;
581 // make sure we're re-analyzed
582 env.collect.readsUntrackedConstants = true;
583 } else if (t->strictSubtypeOf(TInitCell)) {
584 // constprop will take care of nothrow *if* its a constant; and if
585 // its not, we might trigger autoload.
586 constprop(env);
588 push(env, std::move(*t));
591 void in(ISS& env, const bc::Cns& op) { doCns(env, op.str1, nullptr); }
592 void in(ISS& env, const bc::CnsE& op) { doCns(env, op.str1, nullptr); }
593 void in(ISS& env, const bc::CnsU& op) { doCns(env, op.str1, op.str2); }
595 void in(ISS& env, const bc::ClsCns& op) {
596 auto const& t1 = peekClsRefSlot(env, op.slot);
597 if (is_specialized_cls(t1)) {
598 auto const dcls = dcls_of(t1);
599 if (dcls.type == DCls::Exact) {
600 return reduce(env, bc::DiscardClsRef { op.slot },
601 bc::ClsCnsD { op.str1, dcls.cls.name() });
604 takeClsRefSlot(env, op.slot);
605 push(env, TInitCell);
608 void in(ISS& env, const bc::ClsCnsD& op) {
609 if (auto const rcls = env.index.resolve_class(env.ctx, op.str2)) {
610 auto t = env.index.lookup_class_constant(env.ctx, *rcls, op.str1);
611 if (options.HardConstProp) constprop(env);
612 push(env, std::move(t));
613 return;
615 push(env, TInitCell);
618 void in(ISS& env, const bc::File&) { effect_free(env); push(env, TSStr); }
619 void in(ISS& env, const bc::Dir&) { effect_free(env); push(env, TSStr); }
620 void in(ISS& env, const bc::Method&) { effect_free(env); push(env, TSStr); }
622 void in(ISS& env, const bc::ClsRefName& op) {
623 nothrow(env);
624 takeClsRefSlot(env, op.slot);
625 push(env, TSStr);
628 void concatHelper(ISS& env, uint32_t n) {
629 uint32_t i = 0;
630 StringData* result = nullptr;
631 while (i < n) {
632 auto const t = topC(env, i);
633 auto const v = tv(t);
634 if (!v) break;
635 if (!isStringType(v->m_type) &&
636 v->m_type != KindOfNull &&
637 v->m_type != KindOfBoolean &&
638 v->m_type != KindOfInt64 &&
639 v->m_type != KindOfDouble) {
640 break;
642 auto const cell = eval_cell_value([&] {
643 auto const s = makeStaticString(
644 result ?
645 StringData::Make(tvAsCVarRef(&*v).toString().get(), result) :
646 tvAsCVarRef(&*v).toString().get());
647 return make_tv<KindOfString>(s);
649 if (!cell) break;
650 result = cell->m_data.pstr;
651 i++;
653 if (result && i >= 2) {
654 std::vector<Bytecode> bcs(i, bc::PopC {});
655 bcs.push_back(gen_constant(make_tv<KindOfString>(result)));
656 if (i < n) {
657 bcs.push_back(bc::ConcatN { n - i + 1 });
659 return reduce(env, std::move(bcs));
661 discard(env, n);
662 push(env, TStr);
665 void in(ISS& env, const bc::Concat& /*op*/) {
666 concatHelper(env, 2);
669 void in(ISS& env, const bc::ConcatN& op) {
670 if (op.arg1 == 2) return reduce(env, bc::Concat {});
671 concatHelper(env, op.arg1);
674 template <class Op, class Fun>
675 void arithImpl(ISS& env, const Op& /*op*/, Fun fun) {
676 constprop(env);
677 auto const t1 = popC(env);
678 auto const t2 = popC(env);
679 push(env, fun(t2, t1));
682 void in(ISS& env, const bc::Add& op) { arithImpl(env, op, typeAdd); }
683 void in(ISS& env, const bc::Sub& op) { arithImpl(env, op, typeSub); }
684 void in(ISS& env, const bc::Mul& op) { arithImpl(env, op, typeMul); }
685 void in(ISS& env, const bc::Div& op) { arithImpl(env, op, typeDiv); }
686 void in(ISS& env, const bc::Mod& op) { arithImpl(env, op, typeMod); }
687 void in(ISS& env, const bc::Pow& op) { arithImpl(env, op, typePow); }
688 void in(ISS& env, const bc::BitAnd& op) { arithImpl(env, op, typeBitAnd); }
689 void in(ISS& env, const bc::BitOr& op) { arithImpl(env, op, typeBitOr); }
690 void in(ISS& env, const bc::BitXor& op) { arithImpl(env, op, typeBitXor); }
691 void in(ISS& env, const bc::AddO& op) { arithImpl(env, op, typeAddO); }
692 void in(ISS& env, const bc::SubO& op) { arithImpl(env, op, typeSubO); }
693 void in(ISS& env, const bc::MulO& op) { arithImpl(env, op, typeMulO); }
694 void in(ISS& env, const bc::Shl& op) { arithImpl(env, op, typeShl); }
695 void in(ISS& env, const bc::Shr& op) { arithImpl(env, op, typeShr); }
697 void in(ISS& env, const bc::BitNot& /*op*/) {
698 auto const t = popC(env);
699 auto const v = tv(t);
700 if (v) {
701 constprop(env);
702 auto cell = eval_cell([&] {
703 auto c = *v;
704 cellBitNot(c);
705 return c;
707 if (cell) return push(env, std::move(*cell));
709 push(env, TInitCell);
712 namespace {
714 bool couldBeHackArr(Type t) {
715 return t.couldBe(TVec) || t.couldBe(TDict) || t.couldBe(TKeyset);
718 template<bool NSame>
719 std::pair<Type,bool> resolveSame(ISS& env) {
720 auto const l1 = topStkEquiv(env, 0);
721 auto const t1 = topC(env, 0);
722 auto const l2 = topStkEquiv(env, 1);
723 auto const t2 = topC(env, 1);
725 auto const mightWarn = [&] {
726 // EvalHackArrCompatNotices will notice on === and !== between PHP arrays
727 // and Hack arrays.
728 if (RuntimeOption::EvalHackArrCompatNotices) {
729 if (t1.couldBe(TArr) && couldBeHackArr(t2)) return true;
730 if (couldBeHackArr(t1) && t2.couldBe(TArr)) return true;
732 if (RuntimeOption::EvalHackArrCompatDVCmpNotices) {
733 if (!t1.couldBe(TArr) || !t2.couldBe(TArr)) return false;
734 if (t1.subtypeOf(TPArr) && t2.subtypeOf(TPArr)) return false;
735 if (t1.subtypeOf(TVArr) && t2.subtypeOf(TVArr)) return false;
736 if (t1.subtypeOf(TDArr) && t2.subtypeOf(TDArr)) return false;
737 return true;
739 return false;
742 auto const result = [&] {
743 auto const v1 = tv(t1);
744 auto const v2 = tv(t2);
746 if (l1 == StackDupId ||
747 (l1 <= MaxLocalId && l2 <= MaxLocalId &&
748 (l1 == l2 || locsAreEquiv(env, l1, l2)))) {
749 if (!t1.couldBe(TDbl) || !t2.couldBe(TDbl) ||
750 (v1 && (v1->m_type != KindOfDouble || !std::isnan(v1->m_data.dbl))) ||
751 (v2 && (v2->m_type != KindOfDouble || !std::isnan(v2->m_data.dbl)))) {
752 return NSame ? TFalse : TTrue;
756 if (v1 && v2) {
757 if (auto r = eval_cell_value([&]{ return cellSame(*v2, *v1); })) {
758 return r != NSame ? TTrue : TFalse;
762 return NSame ? typeNSame(t1, t2) : typeSame(t1, t2);
765 return { result(), mightWarn() };
768 template<bool Negate>
769 void sameImpl(ISS& env) {
770 auto pair = resolveSame<Negate>(env);
771 discard(env, 2);
773 if (!pair.second) {
774 nothrow(env);
775 constprop(env);
778 push(env, std::move(pair.first));
781 template<class Same, class JmpOp>
782 void sameJmpImpl(ISS& env, const Same& same, const JmpOp& jmp) {
783 auto bail = [&] { impl(env, same, jmp); };
785 constexpr auto NSame = Same::op == Op::NSame;
787 if (resolveSame<NSame>(env).first != TBool) return bail();
789 auto const loc0 = topStkEquiv(env, 0);
790 auto const loc1 = topStkEquiv(env, 1);
791 if (loc0 == NoLocalId && loc1 == NoLocalId) return bail();
793 auto const ty0 = topC(env, 0);
794 auto const ty1 = topC(env, 1);
795 auto const val0 = tv(ty0);
796 auto const val1 = tv(ty1);
798 if ((val0 && val1) ||
799 (loc0 == NoLocalId && !val0 && ty1.subtypeOf(ty0)) ||
800 (loc1 == NoLocalId && !val1 && ty0.subtypeOf(ty1))) {
801 return bail();
804 // We need to loosen away the d/varray bits here because array comparison does
805 // not take into account the difference.
806 auto isect = intersection_of(
807 loosen_dvarrayness(ty0),
808 loosen_dvarrayness(ty1)
810 discard(env, 2);
812 auto handle_same = [&] {
813 // Currently dce uses equivalency to prove that something isn't
814 // the last reference - so we can only assert equivalency here if
815 // we know that won't be affected. Its irrelevant for uncounted
816 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
817 // the same thing.
818 if (loc0 <= MaxLocalId && loc1 <= MaxLocalId &&
819 (ty0.subtypeOfAny(TOptObj, TOptRes) ||
820 ty1.subtypeOfAny(TOptObj, TOptRes) ||
821 (ty0.subtypeOf(TUnc) && ty1.subtypeOf(TUnc)))) {
822 if (loc1 == StackDupId) {
823 setStkLocal(env, loc0);
824 } else {
825 assertx(loc0 != loc1 && !locsAreEquiv(env, loc0, loc1));
826 auto loc = loc0;
827 while (true) {
828 auto const other = findLocEquiv(env, loc);
829 if (other == NoLocalId) break;
830 killLocEquiv(env, loc);
831 addLocEquiv(env, loc, loc1);
832 loc = other;
834 addLocEquiv(env, loc, loc1);
837 return refineLocation(env, loc1 != NoLocalId ? loc1 : loc0, [&] (Type ty) {
838 if (!ty.couldBe(TUninit) || !isect.couldBe(TNull)) {
839 auto ret = intersection_of(std::move(ty), isect);
840 return ty.subtypeOf(TUnc) ? ret : loosen_staticness(ret);
843 if (isect.subtypeOf(TNull)) {
844 return ty.couldBe(TInitNull) ? TNull : TUninit;
847 return ty;
851 auto handle_differ_side = [&] (LocalId location, const Type& ty) {
852 if (!ty.subtypeOf(TInitNull) && !ty.strictSubtypeOf(TBool)) return true;
853 return refineLocation(env, location, [&] (Type t) {
854 if (ty.subtypeOf(TNull)) {
855 t = remove_uninit(std::move(t));
856 if (is_opt(t)) t = unopt(std::move(t));
857 return t;
858 } else if (ty.strictSubtypeOf(TBool) && t.subtypeOf(TBool)) {
859 return ty == TFalse ? TTrue : TFalse;
861 return t;
865 auto handle_differ = [&] {
866 return
867 (loc0 == NoLocalId || handle_differ_side(loc0, ty1)) &&
868 (loc1 == NoLocalId || handle_differ_side(loc1, ty0));
871 auto const sameIsJmpTarget =
872 (Same::op == Op::Same) == (JmpOp::op == Op::JmpNZ);
874 auto save = env.state;
875 if (sameIsJmpTarget ? handle_same() : handle_differ()) {
876 env.propagate(jmp.target, &env.state);
877 } else {
878 jmp_nevertaken(env);
880 env.state = std::move(save);
881 if (!(sameIsJmpTarget ? handle_differ() : handle_same())) {
882 jmp_setdest(env, jmp.target);
886 bc::JmpNZ invertJmp(const bc::JmpZ& jmp) { return bc::JmpNZ { jmp.target }; }
887 bc::JmpZ invertJmp(const bc::JmpNZ& jmp) { return bc::JmpZ { jmp.target }; }
891 template<class Same, class JmpOp>
892 void group(ISS& env, const Same& same, const JmpOp& jmp) {
893 sameJmpImpl(env, same, jmp);
896 template<class Same, class JmpOp>
897 void group(ISS& env, const Same& same, const bc::Not&, const JmpOp& jmp) {
898 sameJmpImpl(env, same, invertJmp(jmp));
901 void in(ISS& env, const bc::Same&) { sameImpl<false>(env); }
902 void in(ISS& env, const bc::NSame&) { sameImpl<true>(env); }
904 template<class Fun>
905 void binOpBoolImpl(ISS& env, Fun fun) {
906 auto const t1 = popC(env);
907 auto const t2 = popC(env);
908 auto const v1 = tv(t1);
909 auto const v2 = tv(t2);
910 if (v1 && v2) {
911 if (auto r = eval_cell_value([&]{ return fun(*v2, *v1); })) {
912 constprop(env);
913 return push(env, *r ? TTrue : TFalse);
916 // TODO_4: evaluate when these can throw, non-constant type stuff.
917 push(env, TBool);
920 template<class Fun>
921 void binOpInt64Impl(ISS& env, Fun fun) {
922 auto const t1 = popC(env);
923 auto const t2 = popC(env);
924 auto const v1 = tv(t1);
925 auto const v2 = tv(t2);
926 if (v1 && v2) {
927 if (auto r = eval_cell_value([&]{ return ival(fun(*v2, *v1)); })) {
928 constprop(env);
929 return push(env, std::move(*r));
932 // TODO_4: evaluate when these can throw, non-constant type stuff.
933 push(env, TInt);
936 void in(ISS& env, const bc::Eq&) {
937 auto rs = resolveSame<false>(env);
938 if (rs.first == TTrue) {
939 if (!rs.second) constprop(env);
940 discard(env, 2);
941 return push(env, TTrue);
943 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return cellEqual(c1, c2); });
945 void in(ISS& env, const bc::Neq&) {
946 auto rs = resolveSame<false>(env);
947 if (rs.first == TTrue) {
948 if (!rs.second) constprop(env);
949 discard(env, 2);
950 return push(env, TFalse);
952 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return !cellEqual(c1, c2); });
954 void in(ISS& env, const bc::Lt&) {
955 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return cellLess(c1, c2); });
957 void in(ISS& env, const bc::Gt&) {
958 binOpBoolImpl(env, [&] (Cell c1, Cell c2) { return cellGreater(c1, c2); });
960 void in(ISS& env, const bc::Lte&) { binOpBoolImpl(env, cellLessOrEqual); }
961 void in(ISS& env, const bc::Gte&) { binOpBoolImpl(env, cellGreaterOrEqual); }
963 void in(ISS& env, const bc::Cmp&) {
964 binOpInt64Impl(env, [&] (Cell c1, Cell c2) { return cellCompare(c1, c2); });
967 void in(ISS& env, const bc::Xor&) {
968 binOpBoolImpl(env, [&] (Cell c1, Cell c2) {
969 return cellToBool(c1) ^ cellToBool(c2);
973 void castBoolImpl(ISS& env, const Type& t, bool negate) {
974 nothrow(env);
975 constprop(env);
977 auto const e = emptiness(t);
978 switch (e) {
979 case Emptiness::Empty:
980 case Emptiness::NonEmpty:
981 return push(env, (e == Emptiness::Empty) == negate ? TTrue : TFalse);
982 case Emptiness::Maybe:
983 break;
986 push(env, TBool);
989 void in(ISS& env, const bc::Not&) {
990 castBoolImpl(env, popC(env), true);
993 void in(ISS& env, const bc::CastBool&) {
994 auto const t = topC(env);
995 if (t.subtypeOf(TBool)) return reduce(env, bc::Nop {});
996 castBoolImpl(env, popC(env), false);
999 void in(ISS& env, const bc::CastInt&) {
1000 constprop(env);
1001 auto const t = topC(env);
1002 if (t.subtypeOf(TInt)) return reduce(env, bc::Nop {});
1003 popC(env);
1004 // Objects can raise a warning about converting to int.
1005 if (!t.couldBe(TObj)) nothrow(env);
1006 if (auto const v = tv(t)) {
1007 auto cell = eval_cell([&] {
1008 return make_tv<KindOfInt64>(cellToInt(*v));
1010 if (cell) return push(env, std::move(*cell));
1012 push(env, TInt);
1015 // Handle a casting operation, where "target" is the type being casted to. If
1016 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1017 // is set to true, if the source type is the same as the destination, the cast
1018 // will be optimized away.
1019 void castImpl(ISS& env, Type target, void(*fn)(TypedValue*)) {
1020 auto const t = topC(env);
1021 if (t.subtypeOf(target)) return reduce(env, bc::Nop {});
1022 popC(env);
1023 if (fn) {
1024 if (auto val = tv(t)) {
1025 if (auto result = eval_cell([&] { fn(&*val); return *val; })) {
1026 constprop(env);
1027 target = *result;
1031 push(env, std::move(target));
1034 void in(ISS& env, const bc::CastDouble&) {
1035 castImpl(env, TDbl, tvCastToDoubleInPlace);
1038 void in(ISS& env, const bc::CastString&) {
1039 castImpl(env, TStr, tvCastToStringInPlace);
1042 void in(ISS& env, const bc::CastArray&) {
1043 castImpl(env, TPArr, tvCastToArrayInPlace);
1046 void in(ISS& env, const bc::CastObject&) { castImpl(env, TObj, nullptr); }
1048 void in(ISS& env, const bc::CastDict&) {
1049 castImpl(env, TDict, tvCastToDictInPlace);
1052 void in(ISS& env, const bc::CastVec&) {
1053 castImpl(env, TVec, tvCastToVecInPlace);
1056 void in(ISS& env, const bc::CastKeyset&) {
1057 castImpl(env, TKeyset, tvCastToKeysetInPlace);
1060 void in(ISS& env, const bc::CastVArray&) {
1061 assertx(!RuntimeOption::EvalHackArrDVArrs);
1062 castImpl(env, TVArr, tvCastToVArrayInPlace);
1065 void in(ISS& env, const bc::CastDArray&) {
1066 assertx(!RuntimeOption::EvalHackArrDVArrs);
1067 castImpl(env, TDArr, tvCastToDArrayInPlace);
1070 void in(ISS& env, const bc::Print& /*op*/) {
1071 popC(env);
1072 push(env, ival(1));
1075 void in(ISS& env, const bc::Clone& /*op*/) {
1076 auto val = popC(env);
1077 if (!val.subtypeOf(TObj)) {
1078 val = is_opt(val) ? unopt(std::move(val)) : TObj;
1080 push(env, std::move(val));
1083 void in(ISS& env, const bc::Exit&) { popC(env); push(env, TInitNull); }
1084 void in(ISS& env, const bc::Fatal&) { popC(env); }
1086 void in(ISS& /*env*/, const bc::JmpNS&) {
1087 always_assert(0 && "blocks should not contain JmpNS instructions");
1090 void in(ISS& /*env*/, const bc::Jmp&) {
1091 always_assert(0 && "blocks should not contain Jmp instructions");
1094 template<bool Negate, class JmpOp>
1095 void jmpImpl(ISS& env, const JmpOp& op) {
1096 nothrow(env);
1097 auto const location = topStkEquiv(env);
1098 auto const e = emptiness(popC(env));
1099 if (e == (Negate ? Emptiness::NonEmpty : Emptiness::Empty)) {
1100 jmp_setdest(env, op.target);
1101 env.propagate(op.target, &env.state);
1102 return;
1105 if (e == (Negate ? Emptiness::Empty : Emptiness::NonEmpty)) {
1106 jmp_nevertaken(env);
1107 return;
1110 if (next_real_block(*env.ctx.func, env.blk.fallthrough) ==
1111 next_real_block(*env.ctx.func, op.target)) {
1112 jmp_nevertaken(env);
1113 return;
1116 if (location == NoLocalId) return env.propagate(op.target, &env.state);
1118 auto val = peekLocation(env, location);
1119 assertx(!val.couldBe(TRef)); // we shouldn't have an equivLoc if it was
1121 refineLocation(env, location,
1122 Negate ? assert_nonemptiness : assert_emptiness,
1123 op.target,
1124 Negate ? assert_emptiness : assert_nonemptiness);
1127 void in(ISS& env, const bc::JmpNZ& op) { jmpImpl<true>(env, op); }
1128 void in(ISS& env, const bc::JmpZ& op) { jmpImpl<false>(env, op); }
1130 namespace {
1132 template<class IsType, class JmpOp>
1133 void isTypeHelper(ISS& env,
1134 IsTypeOp typeOp, LocalId location,
1135 const IsType& istype, const JmpOp& jmp) {
1137 if (typeOp == IsTypeOp::Scalar || typeOp == IsTypeOp::ArrLike) {
1138 return impl(env, istype, jmp);
1141 auto const val = istype.op == Op::IsTypeC ?
1142 topT(env) : locRaw(env, location);
1143 auto const testTy = type_of_istype(typeOp);
1144 if (!val.subtypeOf(TCell) || val.subtypeOf(testTy) || !val.couldBe(testTy)) {
1145 return impl(env, istype, jmp);
1148 if (istype.op == Op::IsTypeC) {
1149 if (!is_type_might_raise(testTy, val)) nothrow(env);
1150 popT(env);
1151 } else if (!locCouldBeUninit(env, location) &&
1152 !is_type_might_raise(testTy, val)) {
1153 nothrow(env);
1156 auto const negate = jmp.op == Op::JmpNZ;
1157 auto const was_true = [&] (Type t) {
1158 if (testTy.subtypeOf(TNull)) return intersection_of(t, TNull);
1159 assertx(!testTy.couldBe(TNull));
1160 return intersection_of(t, testTy);
1162 auto const was_false = [&] (Type t) {
1163 if (testTy.subtypeOf(TNull)) {
1164 t = remove_uninit(std::move(t));
1165 return is_opt(t) ? unopt(t) : t;
1167 if (is_opt(t)) {
1168 if (unopt(t).subtypeOf(testTy)) return TInitNull;
1170 return t;
1173 auto const pre = [&] (Type t) {
1174 return negate ? was_true(std::move(t)) : was_false(std::move(t));
1177 auto const post = [&] (Type t) {
1178 return negate ? was_false(std::move(t)) : was_true(std::move(t));
1181 refineLocation(env, location, pre, jmp.target, post);
1184 folly::Optional<Cell> staticLocHelper(ISS& env, LocalId l, Type init) {
1185 if (is_volatile_local(env.ctx.func, l)) return folly::none;
1186 unbindLocalStatic(env, l);
1187 setLocRaw(env, l, TRef);
1188 bindLocalStatic(env, l, std::move(init));
1189 if (!env.ctx.func->isMemoizeWrapper &&
1190 !env.ctx.func->isClosureBody &&
1191 env.collect.localStaticTypes.size() > l) {
1192 auto t = env.collect.localStaticTypes[l];
1193 if (auto v = tv(t)) {
1194 useLocalStatic(env, l);
1195 setLocRaw(env, l, t);
1196 return v;
1199 useLocalStatic(env, l);
1200 return folly::none;
1203 // If the current function is a memoize wrapper, return the inferred return type
1204 // of the function being wrapped.
1205 Type memoizeImplRetType(ISS& env) {
1206 always_assert(env.ctx.func->isMemoizeWrapper);
1208 // Lookup the wrapped function. This should always resolve to a precise
1209 // function but we don't rely on it.
1210 auto const memo_impl_func = [&]{
1211 if (env.ctx.func->cls) {
1212 auto const clsTy = selfClsExact(env);
1213 return env.index.resolve_method(
1214 env.ctx,
1215 clsTy ? *clsTy : TCls,
1216 memoize_impl_name(env.ctx.func)
1219 return env.index.resolve_func(env.ctx, memoize_impl_name(env.ctx.func));
1220 }();
1222 // Infer the return type of the wrapped function, taking into account the
1223 // types of the parameters for context sensitive types.
1224 auto const numArgs = env.ctx.func->params.size();
1225 std::vector<Type> args{numArgs};
1226 for (auto i = LocalId{0}; i < numArgs; ++i) {
1227 args[i] = locAsCell(env, i);
1230 // Determine the context the wrapped function will be called on.
1231 auto const ctxType = [&]() -> Type {
1232 if (env.ctx.func->cls) {
1233 if (env.ctx.func->attrs & AttrStatic) {
1234 // The class context for static methods will be the method's class.
1235 auto const clsTy = selfClsExact(env);
1236 return clsTy ? *clsTy : TCls;
1237 } else {
1238 auto const s = thisType(env);
1239 return s ? *s : TObj;
1242 return TBottom;
1243 }();
1245 auto retTy = env.index.lookup_return_type(
1246 CallContext { env.ctx, args, ctxType },
1247 memo_impl_func
1249 // Regardless of anything we know the return type will be an InitCell (this is
1250 // a requirement of memoize functions).
1251 if (!retTy.subtypeOf(TInitCell)) return TInitCell;
1252 return retTy;
1256 * Propagate a more specific type to the taken/fall-through branches of a jmp
1257 * operation when the jmp is done because of a type test. Given a type `valTy`,
1258 * being tested against the type `testTy`, propagate `failTy` to the branch
1259 * representing test failure, and `testTy` to the branch representing test
1260 * success.
1262 template<class JmpOp>
1263 void typeTestPropagate(ISS& env, Type valTy, Type testTy,
1264 Type failTy, const JmpOp& jmp) {
1265 nothrow(env);
1266 auto const takenOnSuccess = jmp.op == Op::JmpNZ;
1268 if (valTy.subtypeOf(testTy) || failTy.subtypeOf(TBottom)) {
1269 push(env, std::move(valTy));
1270 if (takenOnSuccess) {
1271 jmp_setdest(env, jmp.target);
1272 env.propagate(jmp.target, &env.state);
1273 } else {
1274 jmp_nevertaken(env);
1276 return;
1278 if (!valTy.couldBe(testTy)) {
1279 push(env, failTy);
1280 if (takenOnSuccess) {
1281 jmp_nevertaken(env);
1282 } else {
1283 jmp_setdest(env, jmp.target);
1284 env.propagate(jmp.target, &env.state);
1286 return;
1289 push(env, std::move(takenOnSuccess ? testTy : failTy));
1290 env.propagate(jmp.target, &env.state);
1291 discard(env, 1);
1292 push(env, std::move(takenOnSuccess ? failTy : testTy));
1295 // After a StaticLocCheck, we know the local is bound on the true path,
1296 // and not changed on the false path.
1297 template<class JmpOp>
1298 void staticLocCheckJmpImpl(ISS& env,
1299 const bc::StaticLocCheck& slc,
1300 const JmpOp& jmp) {
1301 auto const takenOnInit = jmp.op == Op::JmpNZ;
1302 auto save = env.state;
1304 if (auto const v = staticLocHelper(env, slc.loc1, TBottom)) {
1305 return impl(env, slc, jmp);
1308 if (env.collect.localStaticTypes.size() > slc.loc1 &&
1309 env.collect.localStaticTypes[slc.loc1].subtypeOf(TBottom)) {
1310 if (takenOnInit) {
1311 env.state = std::move(save);
1312 jmp_nevertaken(env);
1313 } else {
1314 env.propagate(jmp.target, &save);
1315 jmp_setdest(env, jmp.target);
1317 return;
1320 if (takenOnInit) {
1321 env.propagate(jmp.target, &env.state);
1322 env.state = std::move(save);
1323 } else {
1324 env.propagate(jmp.target, &save);
1330 template<class JmpOp>
1331 void group(ISS& env, const bc::StaticLocCheck& slc, const JmpOp& jmp) {
1332 staticLocCheckJmpImpl(env, slc, jmp);
1335 template<class JmpOp>
1336 void group(ISS& env, const bc::StaticLocCheck& slc,
1337 const bc::Not&, const JmpOp& jmp) {
1338 staticLocCheckJmpImpl(env, slc, invertJmp(jmp));
1341 template<class JmpOp>
1342 void group(ISS& env, const bc::IsTypeL& istype, const JmpOp& jmp) {
1343 isTypeHelper(env, istype.subop2, istype.loc1, istype, jmp);
1346 template<class JmpOp>
1347 void group(ISS& env, const bc::IsTypeL& istype,
1348 const bc::Not&, const JmpOp& jmp) {
1349 isTypeHelper(env, istype.subop2, istype.loc1, istype, invertJmp(jmp));
1352 // If we duplicate a value, and then test its type and Jmp based on that result,
1353 // we can narrow the type of the top of the stack. Only do this for null checks
1354 // right now (because its useful in memoize wrappers).
1355 template<class JmpOp>
1356 void group(ISS& env, const bc::IsTypeC& istype, const JmpOp& jmp) {
1357 auto const location = topStkEquiv(env);
1358 if (location == NoLocalId) return impl(env, istype, jmp);
1359 isTypeHelper(env, istype.subop1, location, istype, jmp);
1362 template<class JmpOp>
1363 void group(ISS& env, const bc::IsTypeC& istype,
1364 const bc::Not& negate, const JmpOp& jmp) {
1365 auto const location = topStkEquiv(env);
1366 if (location == NoLocalId) return impl(env, istype, negate, jmp);
1367 isTypeHelper(env, istype.subop1, location, istype, invertJmp(jmp));
1370 // If we do an IsUninit check and then Jmp based on the check, one branch will
1371 // be the original type minus the Uninit, and the other will be
1372 // Uninit. (IsUninit does not pop the value).
1373 template<class JmpOp>
1374 void group(ISS& env, const bc::IsUninit&, const JmpOp& jmp) {
1375 auto const valTy = popCU(env);
1376 typeTestPropagate(env, valTy, TUninit, remove_uninit(valTy), jmp);
1379 template<class JmpOp>
1380 void group(ISS& env, const bc::IsUninit&, const bc::Not&, const JmpOp& jmp) {
1381 auto const valTy = popCU(env);
1382 typeTestPropagate(env, valTy, TUninit, remove_uninit(valTy), invertJmp(jmp));
1385 // A MemoGet, followed by an IsUninit, followed by a Jmp, can have the type of
1386 // the stack inferred very well. The IsUninit success path will be Uninit and
1387 // the failure path will be the inferred return type of the wrapped
1388 // function. This has to be done as a group and not via individual interp()
1389 // calls is because of limitations in HHBBC's type-system. The type that MemoGet
1390 // pushes is the inferred return type of the wrapper function with Uninit added
1391 // in. Unfortunately HHBBC's type-system cannot exactly represent this
1392 // combination, so it gets forced to Cell. By analyzing this triplet as a group,
1393 // we can avoid this loss of type precision.
1394 template <class JmpOp>
1395 void group(ISS& env, const bc::MemoGet& get, const bc::IsUninit& /*isuninit*/,
1396 const JmpOp& jmp) {
1397 impl(env, get);
1398 typeTestPropagate(env, popCU(env), TUninit, memoizeImplRetType(env), jmp);
1401 namespace {
1403 template<class JmpOp>
1404 void instanceOfJmpImpl(ISS& env,
1405 const bc::InstanceOfD& inst,
1406 const JmpOp& jmp) {
1407 auto bail = [&] { impl(env, inst, jmp); };
1409 auto const locId = topStkEquiv(env);
1410 if (locId == NoLocalId || interface_supports_non_objects(inst.str1)) {
1411 return bail();
1413 auto const val = peekLocation(env, locId, 1);
1414 assertx(!val.couldBe(TRef)); // we shouldn't have an equivLoc if it was
1415 auto const rcls = env.index.resolve_class(env.ctx, inst.str1);
1416 if (!rcls) return bail();
1418 auto const instTy = subObj(*rcls);
1419 if (val.subtypeOf(instTy) || !val.couldBe(instTy)) {
1420 return bail();
1423 // If we have an optional type, whose unopt is guaranteed to pass
1424 // the instanceof check, then failing to pass implies it was null.
1425 auto const fail_implies_null = is_opt(val) && unopt(val).subtypeOf(instTy);
1427 popC(env);
1428 auto const negate = jmp.op == Op::JmpNZ;
1429 auto const result = [&] (Type t, bool pass) {
1430 return pass ? instTy :
1431 fail_implies_null ? (t.couldBe(TUninit) ? TNull : TInitNull) : t;
1433 auto const pre = [&] (Type t) { return result(t, negate); };
1434 auto const post = [&] (Type t) { return result(t, !negate); };
1435 refineLocation(env, locId, pre, jmp.target, post);
1440 template<class JmpOp>
1441 void group(ISS& env,
1442 const bc::InstanceOfD& inst,
1443 const JmpOp& jmp) {
1444 instanceOfJmpImpl(env, inst, jmp);
1447 template<class JmpOp>
1448 void group(ISS& env,
1449 const bc::InstanceOfD& inst,
1450 const bc::Not&,
1451 const JmpOp& jmp) {
1452 instanceOfJmpImpl(env, inst, invertJmp(jmp));
1455 namespace {
1457 template<class JmpOp>
1458 void isTypeStructJmpImpl(ISS& env,
1459 const bc::IsTypeStruct& inst,
1460 const JmpOp& jmp) {
1461 auto bail = [&] { impl(env, inst, jmp); };
1463 auto const locId = topStkEquiv(env);
1464 if (locId == NoLocalId) return bail();
1466 auto ts_type = type_of_type_structure(inst.arr1);
1467 if (!ts_type) return bail();
1469 // TODO(T26859386): refine if ($x is nonnull) case
1471 popC(env);
1472 auto const negate = jmp.op == Op::JmpNZ;
1473 auto const result = [&] (Type t, bool pass) {
1474 if (!pass) {
1475 if ((ts_type.value()).subtypeOf(TNull) && is_opt(t)) {
1476 return unopt(std::move(t));
1478 return t;
1480 return intersection_of(std::move(t), std::move(ts_type.value()));
1482 auto const pre = [&] (Type t) { return result(std::move(t), negate); };
1483 auto const post = [&] (Type t) { return result(std::move(t), !negate); };
1484 refineLocation(env, locId, pre, jmp.target, post);
1489 template<class JmpOp>
1490 void group(ISS& env,
1491 const bc::IsTypeStruct& inst,
1492 const JmpOp& jmp) {
1493 isTypeStructJmpImpl(env, inst, jmp);
1496 template<class JmpOp>
1497 void group(ISS& env,
1498 const bc::IsTypeStruct& inst,
1499 const bc::Not&,
1500 const JmpOp& jmp) {
1501 isTypeStructJmpImpl(env, inst, invertJmp(jmp));
1504 void in(ISS& env, const bc::Switch& op) {
1505 auto v = tv(popC(env));
1507 if (v) {
1508 auto go = [&] (BlockId blk) {
1509 effect_free(env);
1510 env.propagate(blk, &env.state);
1511 jmp_setdest(env, blk);
1513 auto num_elems = op.targets.size();
1514 if (op.subop1 == SwitchKind::Bounded) {
1515 if (v->m_type == KindOfInt64 &&
1516 v->m_data.num >= 0 && v->m_data.num < num_elems) {
1517 return go(op.targets[v->m_data.num]);
1519 } else {
1520 assertx(num_elems > 2);
1521 num_elems -= 2;
1522 for (auto i = size_t{}; ; i++) {
1523 if (i == num_elems) {
1524 return go(op.targets.back());
1526 auto match = eval_cell_value([&] {
1527 return cellEqual(*v, static_cast<int64_t>(op.arg2 + i));
1529 if (!match) break;
1530 if (*match) {
1531 return go(op.targets[i]);
1537 forEachTakenEdge(op, [&] (BlockId id) {
1538 env.propagate(id, &env.state);
1542 void in(ISS& env, const bc::SSwitch& op) {
1543 auto v = tv(popC(env));
1545 if (v) {
1546 for (auto& kv : op.targets) {
1547 auto match = eval_cell_value([&] {
1548 return !kv.first || cellEqual(*v, kv.first);
1550 if (!match) break;
1551 if (*match) {
1552 effect_free(env);
1553 env.propagate(kv.second, &env.state);
1554 jmp_setdest(env, kv.second);
1555 return;
1560 forEachTakenEdge(op, [&] (BlockId id) {
1561 env.propagate(id, &env.state);
1565 void in(ISS& env, const bc::RetC& /*op*/) {
1566 doRet(env, popC(env), false);
1568 void in(ISS& env, const bc::RetV& /*op*/) {
1569 doRet(env, popV(env), false);
1571 void in(ISS& env, const bc::RetM& op) {
1572 std::vector<Type> ret(op.arg1);
1573 for (int i = 0; i < op.arg1; i++) {
1574 ret[op.arg1 - i - 1] = popC(env);
1576 doRet(env, vec(std::move(ret)), false);
1579 void in(ISS& env, const bc::Unwind&) {
1580 nothrow(env); // Don't propagate to throw edges
1581 for (auto exit : env.blk.unwindExits) {
1582 auto const stackLess = without_stacks(env.state);
1583 env.propagate(exit, &stackLess);
1587 void in(ISS& env, const bc::Throw& /*op*/) {
1588 popC(env);
1591 void in(ISS& env, const bc::Catch&) {
1592 nothrow(env);
1593 return push(env, subObj(env.index.builtin_class(s_Throwable.get())));
1596 void in(ISS& env, const bc::ChainFaults&) {
1597 popC(env);
1600 void in(ISS& env, const bc::NativeImpl&) {
1601 killLocals(env);
1602 mayUseVV(env);
1604 if (is_collection_method_returning_this(env.ctx.cls, env.ctx.func)) {
1605 assert(env.ctx.func->attrs & AttrParamCoerceModeNull);
1606 assert(!(env.ctx.func->attrs & AttrReference));
1607 auto const resCls = env.index.builtin_class(env.ctx.cls->name);
1608 // Can still return null if parameter coercion fails
1609 return doRet(env, union_of(objExact(resCls), TInitNull), true);
1612 if (env.ctx.func->nativeInfo) {
1613 return doRet(env, native_function_return_type(env.ctx.func), true);
1615 doRet(env, TInitGen, true);
1618 void in(ISS& env, const bc::CGetL& op) {
1619 if (op.loc1 == env.state.thisLocToKill) {
1620 return reduce(env, bc::BareThis { BareThisOp::Notice });
1622 if (!locCouldBeUninit(env, op.loc1)) {
1623 nothrow(env);
1624 constprop(env);
1626 push(env, locAsCell(env, op.loc1), op.loc1);
1629 void in(ISS& env, const bc::CGetQuietL& op) {
1630 if (op.loc1 == env.state.thisLocToKill) {
1631 return reduce(env, bc::BareThis { BareThisOp::NoNotice });
1633 nothrow(env);
1634 constprop(env);
1635 push(env, locAsCell(env, op.loc1), op.loc1);
1638 void in(ISS& env, const bc::CUGetL& op) {
1639 auto ty = locRaw(env, op.loc1);
1640 if (ty.subtypeOf(TUninit)) {
1641 return reduce(env, bc::NullUninit {});
1643 nothrow(env);
1644 if (!ty.couldBe(TUninit)) constprop(env);
1645 if (!ty.subtypeOf(TCell)) ty = TCell;
1646 push(env, std::move(ty), op.loc1);
1649 void in(ISS& env, const bc::PushL& op) {
1650 if (auto val = tv(locRaw(env, op.loc1))) {
1651 return reduce(env, gen_constant(*val), bc::UnsetL { op.loc1 });
1653 impl(env, bc::CGetL { op.loc1 }, bc::UnsetL { op.loc1 });
1656 void in(ISS& env, const bc::CGetL2& op) {
1657 // Can't constprop yet because of no INS_1 support in bc.h
1658 if (!locCouldBeUninit(env, op.loc1)) effect_free(env);
1659 auto loc = locAsCell(env, op.loc1);
1660 auto topEquiv = topStkLocal(env);
1661 auto top = popT(env);
1662 push(env, std::move(loc), op.loc1);
1663 push(env, std::move(top), topEquiv);
1666 namespace {
1668 template <typename Op> void common_cgetn(ISS& env) {
1669 auto const t1 = topC(env);
1670 auto const v1 = tv(t1);
1671 if (v1 && v1->m_type == KindOfPersistentString) {
1672 auto const loc = findLocal(env, v1->m_data.pstr);
1673 if (loc != NoLocalId) {
1674 return reduce(env, bc::PopC {}, Op { loc });
1677 readUnknownLocals(env);
1678 mayUseVV(env);
1679 popC(env); // conversion to string can throw
1680 push(env, TInitCell);
1685 void in(ISS& env, const bc::CGetN&) { common_cgetn<bc::CGetL>(env); }
1686 void in(ISS& env, const bc::CGetQuietN&) { common_cgetn<bc::CGetQuietL>(env); }
1688 void in(ISS& env, const bc::CGetG&) { popC(env); push(env, TInitCell); }
1689 void in(ISS& env, const bc::CGetQuietG&) { popC(env); push(env, TInitCell); }
1691 void in(ISS& env, const bc::CGetS& op) {
1692 auto const tcls = takeClsRefSlot(env, op.slot);
1693 auto const tname = popC(env);
1694 auto const vname = tv(tname);
1695 auto const self = selfCls(env);
1697 if (vname && vname->m_type == KindOfPersistentString &&
1698 self && tcls.subtypeOf(*self)) {
1699 if (auto ty = selfPropAsCell(env, vname->m_data.pstr)) {
1700 // Only nothrow when we know it's a private declared property
1701 // (and thus accessible here).
1702 nothrow(env);
1704 // We can only constprop here if we know for sure this is exactly the
1705 // correct class. The reason for this is that you could have a LSB class
1706 // attempting to access a private static in a derived class with the same
1707 // name as a private static in this class, which is supposed to fatal at
1708 // runtime (for an example see test/quick/static_sprop2.php).
1709 auto const selfExact = selfClsExact(env);
1710 if (selfExact && tcls.subtypeOf(*selfExact)) {
1711 constprop(env);
1714 return push(env, std::move(*ty));
1718 auto indexTy = env.index.lookup_public_static(tcls, tname);
1719 if (indexTy.subtypeOf(TInitCell)) {
1721 * Constant propagation here can change when we invoke autoload, so it's
1722 * considered HardConstProp. It's safe not to check anything about private
1723 * or protected static properties, because you can't override a public
1724 * static property with a private or protected one---if the index gave us
1725 * back a constant type, it's because it found a public static and it must
1726 * be the property this would have read dynamically.
1728 if (options.HardConstProp) constprop(env);
1729 return push(env, std::move(indexTy));
1732 push(env, TInitCell);
1735 void in(ISS& env, const bc::VGetL& op) {
1736 nothrow(env);
1737 setLocRaw(env, op.loc1, TRef);
1738 push(env, TRef);
1741 void in(ISS& env, const bc::VGetN&) {
1742 auto const t1 = topC(env);
1743 auto const v1 = tv(t1);
1744 if (v1 && v1->m_type == KindOfPersistentString) {
1745 auto const loc = findLocal(env, v1->m_data.pstr);
1746 if (loc != NoLocalId) {
1747 return reduce(env, bc::PopC {},
1748 bc::VGetL { loc });
1751 modifyLocalStatic(env, NoLocalId, TRef);
1752 popC(env);
1753 boxUnknownLocal(env);
1754 mayUseVV(env);
1755 push(env, TRef);
1758 void in(ISS& env, const bc::VGetG&) { popC(env); push(env, TRef); }
1760 void in(ISS& env, const bc::VGetS& op) {
1761 auto const tcls = takeClsRefSlot(env, op.slot);
1762 auto const tname = popC(env);
1763 auto const vname = tv(tname);
1764 auto const self = selfCls(env);
1766 if (!self || tcls.couldBe(*self)) {
1767 if (vname && vname->m_type == KindOfPersistentString) {
1768 boxSelfProp(env, vname->m_data.pstr);
1769 } else {
1770 killSelfProps(env);
1774 if (auto c = env.collect.publicStatics) {
1775 c->merge(env.ctx, tcls, tname, TRef);
1778 push(env, TRef);
1781 void clsRefGetImpl(ISS& env, Type t1, ClsRefSlotId slot) {
1782 auto cls = [&]{
1783 if (t1.subtypeOf(TObj)) {
1784 nothrow(env);
1785 return objcls(t1);
1787 auto const v1 = tv(t1);
1788 if (v1 && v1->m_type == KindOfPersistentString) {
1789 if (auto const rcls = env.index.resolve_class(env.ctx, v1->m_data.pstr)) {
1790 return clsExact(*rcls);
1793 return TCls;
1794 }();
1795 putClsRefSlot(env, slot, std::move(cls));
1798 void in(ISS& env, const bc::ClsRefGetL& op) {
1799 if (op.loc1 == env.state.thisLocToKill) {
1800 return reduce(env,
1801 bc::BareThis { BareThisOp::Notice },
1802 bc::ClsRefGetC { op.slot });
1804 clsRefGetImpl(env, locAsCell(env, op.loc1), op.slot);
1806 void in(ISS& env, const bc::ClsRefGetC& op) {
1807 clsRefGetImpl(env, popC(env), op.slot);
1810 void in(ISS& env, const bc::AKExists& /*op*/) {
1811 auto const t1 = popC(env);
1812 auto const t2 = popC(env);
1814 auto const mayThrow = [&]{
1815 if (!t1.subtypeOfAny(TObj, TArr, TVec, TDict, TKeyset)) return true;
1816 if (t2.subtypeOfAny(TStr, TNull)) {
1817 return t1.subtypeOfAny(TObj, TArr) &&
1818 RuntimeOption::EvalHackArrCompatNotices;
1820 if (t2.subtypeOf(TInt)) return false;
1821 return true;
1822 }();
1824 if (!mayThrow) nothrow(env);
1825 push(env, TBool);
1828 void in(ISS& env, const bc::GetMemoKeyL& op) {
1829 always_assert(env.ctx.func->isMemoizeWrapper);
1831 auto const tyIMemoizeParam =
1832 subObj(env.index.builtin_class(s_IMemoizeParam.get()));
1834 auto const inTy = locAsCell(env, op.loc1);
1836 // If the local could be uninit, we might raise a warning (as
1837 // usual). Converting an object to a memo key might invoke PHP code if it has
1838 // the IMemoizeParam interface, and if it doesn't, we'll throw.
1839 if (!locCouldBeUninit(env, op.loc1) && !inTy.couldBe(TObj)) {
1840 nothrow(env); constprop(env);
1843 // If type constraints are being enforced and the local being turned into a
1844 // memo key is a parameter, then we can possibly using the type constraint to
1845 // perform a more efficient memoization scheme. Note that this all needs to
1846 // stay in sync with the interpreter and JIT.
1847 using MK = MemoKeyConstraint;
1848 auto const mkc = [&] {
1849 if (!RuntimeOption::EvalHardTypeHints) return MK::None;
1850 if (op.loc1 >= env.ctx.func->params.size()) return MK::None;
1851 auto tc = env.ctx.func->params[op.loc1].typeConstraint;
1852 if (tc.type() == AnnotType::Object) {
1853 auto res = env.index.resolve_type_name(tc.typeName());
1854 if (res.type != AnnotType::Object) {
1855 tc.resolveType(res.type, res.nullable || tc.isNullable());
1858 return memoKeyConstraintFromTC(tc);
1859 }();
1861 switch (mkc) {
1862 case MK::Null:
1863 // Always null, so the key can always just be 0
1864 always_assert(inTy.subtypeOf(TNull));
1865 return push(env, ival(0));
1866 case MK::Int:
1867 // Always an int, so the key is always an identity mapping
1868 always_assert(inTy.subtypeOf(TInt));
1869 return reduce(env, bc::CGetL { op.loc1 });
1870 case MK::Bool:
1871 // Always a bool, so the key is the bool cast to an int
1872 always_assert(inTy.subtypeOf(TBool));
1873 return reduce(env, bc::CGetL { op.loc1 }, bc::CastInt {});
1874 case MK::Str:
1875 // Always a string, so the key is always an identity mapping
1876 always_assert(inTy.subtypeOf(TStr));
1877 return reduce(env, bc::CGetL { op.loc1 });
1878 case MK::IntOrStr:
1879 // Either an int or string, so the key can be an identity mapping
1880 return reduce(env, bc::CGetL { op.loc1 });
1881 case MK::StrOrNull:
1882 case MK::IntOrNull:
1883 // A nullable string or int. For strings the key will always be 0 or the
1884 // string. For ints the key will be the int or a static string. We can't
1885 // reduce either without introducing control flow.
1886 return push(env, union_of(TInt, TStr));
1887 case MK::BoolOrNull:
1888 // A nullable bool. The key will always be an int (null will be 2), but we
1889 // can't reduce that without introducing control flow.
1890 return push(env, TInt);
1891 case MK::None:
1892 break;
1895 // No type constraint, or one that isn't usuable. Use the generic memoization
1896 // scheme which can handle any type:
1898 // Integer keys are always mapped to themselves
1899 if (inTy.subtypeOf(TInt)) return reduce(env, bc::CGetL { op.loc1 });
1901 if (inTy.subtypeOf(tyIMemoizeParam)) {
1902 return reduce(
1903 env,
1904 bc::CGetL { op.loc1 },
1905 bc::FPushObjMethodD {
1907 s_getInstanceKey.get(),
1908 ObjMethodOp::NullThrows,
1909 false
1911 bc::FCall { 0 },
1912 bc::UnboxR {}
1916 // A memo key can be an integer if the input might be an integer, and is a
1917 // string otherwise. Booleans are always static strings.
1918 auto keyTy = [&]{
1919 if (auto const val = tv(inTy)) {
1920 auto const key = eval_cell(
1921 [&]{ return HHVM_FN(serialize_memoize_param)(*val); }
1923 if (key) return *key;
1925 if (inTy.subtypeOf(TBool)) return TSStr;
1926 if (inTy.couldBe(TInt)) return union_of(TInt, TStr);
1927 return TStr;
1928 }();
1929 push(env, std::move(keyTy));
1932 void in(ISS& env, const bc::IssetL& op) {
1933 if (op.loc1 == env.state.thisLocToKill) {
1934 return reduce(env,
1935 bc::BareThis { BareThisOp::NoNotice },
1936 bc::IsTypeC { IsTypeOp::Null },
1937 bc::Not {});
1939 nothrow(env);
1940 constprop(env);
1941 auto const loc = locAsCell(env, op.loc1);
1942 if (loc.subtypeOf(TNull)) return push(env, TFalse);
1943 if (!loc.couldBe(TNull)) return push(env, TTrue);
1944 push(env, TBool);
1947 void in(ISS& env, const bc::EmptyL& op) {
1948 nothrow(env);
1949 constprop(env);
1950 castBoolImpl(env, locAsCell(env, op.loc1), true);
1953 void in(ISS& env, const bc::EmptyS& op) {
1954 takeClsRefSlot(env, op.slot);
1955 popC(env);
1956 push(env, TBool);
1959 void in(ISS& env, const bc::IssetS& op) {
1960 auto const tcls = takeClsRefSlot(env, op.slot);
1961 auto const tname = popC(env);
1962 auto const vname = tv(tname);
1963 auto const self = selfCls(env);
1965 if (self && tcls.subtypeOf(*self) &&
1966 vname && vname->m_type == KindOfPersistentString) {
1967 if (auto const t = selfPropAsCell(env, vname->m_data.pstr)) {
1968 if (t->subtypeOf(TNull)) { constprop(env); return push(env, TFalse); }
1969 if (!t->couldBe(TNull)) { constprop(env); return push(env, TTrue); }
1973 auto const indexTy = env.index.lookup_public_static(tcls, tname);
1974 if (indexTy.subtypeOf(TInitCell)) {
1975 // See the comments in CGetS about constprop for public statics.
1976 if (options.HardConstProp) constprop(env);
1977 if (indexTy.subtypeOf(TNull)) { return push(env, TFalse); }
1978 if (!indexTy.couldBe(TNull)) { return push(env, TTrue); }
1981 push(env, TBool);
1984 template<class ReduceOp>
1985 void issetEmptyNImpl(ISS& env) {
1986 auto const t1 = topC(env);
1987 auto const v1 = tv(t1);
1988 if (v1 && v1->m_type == KindOfPersistentString) {
1989 auto const loc = findLocal(env, v1->m_data.pstr);
1990 if (loc != NoLocalId) {
1991 return reduce(env, bc::PopC {}, ReduceOp { loc });
1993 // Can't push true in the non env.findLocal case unless we know
1994 // whether this function can have a VarEnv.
1996 readUnknownLocals(env);
1997 mayUseVV(env);
1998 popC(env);
1999 push(env, TBool);
2002 void in(ISS& env, const bc::IssetN&) { issetEmptyNImpl<bc::IssetL>(env); }
2003 void in(ISS& env, const bc::EmptyN&) { issetEmptyNImpl<bc::EmptyL>(env); }
2004 void in(ISS& env, const bc::EmptyG&) { popC(env); push(env, TBool); }
2005 void in(ISS& env, const bc::IssetG&) { popC(env); push(env, TBool); }
2007 void isTypeImpl(ISS& env, const Type& locOrCell, const Type& test) {
2008 if (!is_type_might_raise(test, locOrCell)) constprop(env);
2009 if (locOrCell.subtypeOf(test)) return push(env, TTrue);
2010 if (!locOrCell.couldBe(test)) return push(env, TFalse);
2011 push(env, TBool);
2014 void isTypeObj(ISS& env, const Type& ty) {
2015 if (!ty.couldBe(TObj)) return push(env, TFalse);
2016 if (ty.subtypeOf(TObj)) {
2017 auto const incompl = objExact(
2018 env.index.builtin_class(s_PHP_Incomplete_Class.get()));
2019 if (!ty.couldBe(incompl)) return push(env, TTrue);
2020 if (ty.subtypeOf(incompl)) return push(env, TFalse);
2022 push(env, TBool);
2025 void isTypeArrLike(ISS& env, const Type& ty) {
2026 if (ty.subtypeOfAny(TArr, TVec, TDict, TKeyset)) return push(env, TTrue);
2027 if (!ty.couldBeAny(TArr, TVec, TDict, TKeyset)) return push(env, TFalse);
2028 push(env, TBool);
2031 template<class Op>
2032 void isTypeLImpl(ISS& env, const Op& op) {
2033 if (!locCouldBeUninit(env, op.loc1)) { nothrow(env); constprop(env); }
2034 auto const loc = locAsCell(env, op.loc1);
2035 switch (op.subop2) {
2036 case IsTypeOp::Scalar: return push(env, TBool);
2037 case IsTypeOp::Obj: return isTypeObj(env, loc);
2038 case IsTypeOp::ArrLike: return isTypeArrLike(env, loc);
2039 default: return isTypeImpl(env, loc, type_of_istype(op.subop2));
2043 template<class Op>
2044 void isTypeCImpl(ISS& env, const Op& op) {
2045 nothrow(env);
2046 auto const t1 = popC(env);
2047 switch (op.subop1) {
2048 case IsTypeOp::Scalar: return push(env, TBool);
2049 case IsTypeOp::Obj: return isTypeObj(env, t1);
2050 case IsTypeOp::ArrLike: return isTypeArrLike(env, t1);
2051 default: return isTypeImpl(env, t1, type_of_istype(op.subop1));
2055 void in(ISS& env, const bc::IsTypeC& op) { isTypeCImpl(env, op); }
2056 void in(ISS& env, const bc::IsTypeL& op) { isTypeLImpl(env, op); }
2058 void in(ISS& env, const bc::IsUninit& /*op*/) {
2059 nothrow(env);
2060 push(env, popCU(env));
2061 isTypeImpl(env, topT(env), TUninit);
2064 void in(ISS& env, const bc::MaybeMemoType& /*op*/) {
2065 always_assert(env.ctx.func->isMemoizeWrapper);
2066 nothrow(env);
2067 constprop(env);
2068 auto const memoTy = memoizeImplRetType(env);
2069 auto const ty = popC(env);
2070 push(env, ty.couldBe(memoTy) ? TTrue : TFalse);
2073 void in(ISS& env, const bc::IsMemoType& /*op*/) {
2074 always_assert(env.ctx.func->isMemoizeWrapper);
2075 nothrow(env);
2076 constprop(env);
2077 auto const memoTy = memoizeImplRetType(env);
2078 auto const ty = popC(env);
2079 push(env, memoTy.subtypeOf(ty) ? TTrue : TFalse);
2082 void in(ISS& env, const bc::InstanceOfD& op) {
2083 auto t1 = topC(env);
2084 // Note: InstanceOfD can do autoload if the type might be a type
2085 // alias, so it's not nothrow unless we know it's an object type.
2086 if (auto const rcls = env.index.resolve_class(env.ctx, op.str1)) {
2087 auto result = [&] (const Type& r) {
2088 nothrow(env);
2089 if (r != TBool) constprop(env);
2090 popC(env);
2091 push(env, r);
2093 if (!interface_supports_non_objects(rcls->name())) {
2094 auto testTy = subObj(*rcls);
2095 if (t1.subtypeOf(testTy)) return result(TTrue);
2096 if (!t1.couldBe(testTy)) return result(TFalse);
2097 if (is_opt(t1)) {
2098 t1 = unopt(std::move(t1));
2099 if (t1.subtypeOf(testTy)) {
2100 return reduce(env, bc::IsTypeC { IsTypeOp::Null }, bc::Not {});
2103 return result(TBool);
2106 popC(env);
2107 push(env, TBool);
2110 void in(ISS& env, const bc::InstanceOf& /*op*/) {
2111 auto const t1 = topC(env);
2112 auto const v1 = tv(t1);
2113 if (v1 && v1->m_type == KindOfPersistentString) {
2114 return reduce(env, bc::PopC {},
2115 bc::InstanceOfD { v1->m_data.pstr });
2118 if (t1.subtypeOf(TObj) && is_specialized_obj(t1)) {
2119 auto const dobj = dobj_of(t1);
2120 switch (dobj.type) {
2121 case DObj::Sub:
2122 break;
2123 case DObj::Exact:
2124 return reduce(env, bc::PopC {},
2125 bc::InstanceOfD { dobj.cls.name() });
2129 popC(env);
2130 popC(env);
2131 push(env, TBool);
2134 namespace {
2136 bool isValidTypeOpForIsAs(const IsTypeOp& op) {
2137 switch (op) {
2138 case IsTypeOp::Null:
2139 case IsTypeOp::Bool:
2140 case IsTypeOp::Int:
2141 case IsTypeOp::Dbl:
2142 case IsTypeOp::Str:
2143 case IsTypeOp::Obj:
2144 return true;
2145 case IsTypeOp::Res:
2146 case IsTypeOp::Arr:
2147 case IsTypeOp::Vec:
2148 case IsTypeOp::Dict:
2149 case IsTypeOp::Keyset:
2150 case IsTypeOp::VArray:
2151 case IsTypeOp::DArray:
2152 case IsTypeOp::ArrLike:
2153 case IsTypeOp::Scalar:
2154 case IsTypeOp::Uninit:
2155 return false;
2157 not_reached();
2160 template<bool asExpression>
2161 void isAsTypeStructImpl(ISS& env, SArray ts) {
2162 auto const t = topC(env);
2164 auto result = [&] (
2165 const Type& out,
2166 const folly::Optional<Type>& test = folly::none
2168 auto const location = topStkEquiv(env);
2169 popC(env);
2170 if (!asExpression) return push(env, out);
2171 if (out.subtypeOf(TTrue)) {
2172 constprop(env);
2173 push(env, t);
2174 return reduce(env, bc::Nop {});
2176 if (out.subtypeOf(TFalse)) {
2177 push(env, t);
2178 return unreachable(env);
2181 assertx(out == TBool);
2182 if (!test) return push(env, t);
2183 auto const newT = intersection_of(*test, t);
2184 if (newT == TBottom || !refineLocation(env, location, [&] (Type t) {
2185 auto ret = intersection_of(*test, t);
2186 if (test->couldBe(TInitNull) && t.couldBe(TUninit)) {
2187 ret |= TUninit;
2189 return ret;
2190 })) {
2191 unreachable(env);
2193 return push(env, newT);
2196 auto check = [&] (
2197 const folly::Optional<Type> type,
2198 const folly::Optional<Type> deopt = folly::none
2200 if (!type) return result(TBool);
2201 auto const test = type.value();
2202 if (t.subtypeOf(test)) return result(TTrue);
2203 if (!t.couldBe(test) && (!deopt || !t.couldBe(deopt.value()))) {
2204 return result(TFalse);
2206 auto const op = type_to_istypeop(test);
2207 if (asExpression || !op || !isValidTypeOpForIsAs(op.value())) {
2208 return result(TBool, test);
2210 return reduce(env, bc::IsTypeC { *op });
2213 auto const is_nullable_ts = is_ts_nullable(ts);
2214 auto const is_definitely_null = t.subtypeOf(TNull);
2215 auto const is_definitely_not_null = !t.couldBe(TNull);
2217 if (is_nullable_ts && is_definitely_null) return result(TTrue);
2219 auto const ts_type = type_of_type_structure(ts);
2221 if (is_nullable_ts && !is_definitely_not_null && ts_type == folly::none) {
2222 // Ts is nullable and we know that t could be null but we dont know for sure
2223 // Also we didn't get a type out of the type structure
2224 return result(TBool);
2227 if (!asExpression) constprop(env);
2228 switch (get_ts_kind(ts)) {
2229 case TypeStructure::Kind::T_int:
2230 case TypeStructure::Kind::T_bool:
2231 case TypeStructure::Kind::T_float:
2232 case TypeStructure::Kind::T_string:
2233 case TypeStructure::Kind::T_num:
2234 case TypeStructure::Kind::T_arraykey:
2235 case TypeStructure::Kind::T_keyset:
2236 case TypeStructure::Kind::T_void:
2237 case TypeStructure::Kind::T_tuple:
2238 return RuntimeOption::EvalHackArrCompatIsArrayNotices
2239 ? check(ts_type, TDArr)
2240 : check(ts_type);
2241 case TypeStructure::Kind::T_shape:
2242 return RuntimeOption::EvalHackArrCompatIsArrayNotices
2243 ? check(ts_type, TVArr)
2244 : check(ts_type);
2245 case TypeStructure::Kind::T_dict:
2246 return check(ts_type, TDArr);
2247 case TypeStructure::Kind::T_vec:
2248 return check(ts_type, TVArr);
2249 case TypeStructure::Kind::T_noreturn:
2250 return result(TFalse);
2251 case TypeStructure::Kind::T_mixed:
2252 return result(TTrue);
2253 case TypeStructure::Kind::T_nonnull:
2254 if (is_definitely_null) return result(TFalse);
2255 if (is_definitely_not_null) return result(TTrue);
2256 if (!asExpression) {
2257 return reduce(env, bc::IsTypeC { IsTypeOp::Null }, bc::Not {});
2259 return result(TBool);
2260 case TypeStructure::Kind::T_class:
2261 case TypeStructure::Kind::T_interface: {
2262 if (asExpression) return result(TBool);
2263 return reduce(env, bc::InstanceOfD { get_ts_classname(ts) });
2265 case TypeStructure::Kind::T_unresolved: {
2266 if (asExpression) return result(TBool);
2267 auto const rcls = env.index.resolve_class(env.ctx, get_ts_classname(ts));
2268 // We can only reduce to instance of if we know for sure that this class
2269 // can be resolved since instanceof undefined class does not throw
2270 if (!rcls || !rcls->resolved() || rcls->cls()->attrs & AttrEnum) {
2271 return result(TBool);
2273 return reduce(env, bc::InstanceOfD { rcls->name() });
2275 case TypeStructure::Kind::T_enum:
2276 case TypeStructure::Kind::T_resource:
2277 case TypeStructure::Kind::T_vec_or_dict:
2278 // TODO(T29232862): implement
2279 return result(TBool);
2280 case TypeStructure::Kind::T_typeaccess:
2281 case TypeStructure::Kind::T_array:
2282 case TypeStructure::Kind::T_xhp:
2283 return result(TBool);
2284 case TypeStructure::Kind::T_fun:
2285 case TypeStructure::Kind::T_typevar:
2286 case TypeStructure::Kind::T_trait:
2287 // We will error on these at the JIT
2288 return result(TBool);
2291 not_reached();
2296 void in(ISS& env, const bc::IsTypeStruct& op) {
2297 assertx(op.arr1->isDictOrDArray());
2298 isAsTypeStructImpl<false>(env, op.arr1);
2301 void in(ISS& env, const bc::AsTypeStruct& op) {
2302 assertx(op.arr1->isDictOrDArray());
2303 isAsTypeStructImpl<true>(env, op.arr1);
2306 namespace {
2309 * If the value on the top of the stack is known to be equivalent to the local
2310 * its being moved/copied to, return folly::none without modifying any
2311 * state. Otherwise, pop the stack value, perform the set, and return a pair
2312 * giving the value's type, and any other local its known to be equivalent to.
2314 template <typename Op>
2315 folly::Optional<std::pair<Type, LocalId>> moveToLocImpl(ISS& env,
2316 const Op& op) {
2317 nothrow(env);
2318 auto equivLoc = topStkLocal(env);
2319 // If the local could be a Ref, don't record equality because the stack
2320 // element and the local won't actually have the same type.
2321 if (!locCouldBeRef(env, op.loc1)) {
2322 assertx(!is_volatile_local(env.ctx.func, op.loc1));
2323 if (equivLoc != NoLocalId) {
2324 if (equivLoc == op.loc1 ||
2325 locsAreEquiv(env, equivLoc, op.loc1)) {
2326 // We allow equivalency to ignore Uninit, so we need to check
2327 // the types here.
2328 if (peekLocRaw(env, op.loc1) == topC(env)) {
2329 return folly::none;
2332 } else {
2333 equivLoc = op.loc1;
2335 } else {
2336 equivLoc = NoLocalId;
2338 auto val = popC(env);
2339 setLoc(env, op.loc1, val);
2340 if (equivLoc != op.loc1 && equivLoc != NoLocalId) {
2341 addLocEquiv(env, op.loc1, equivLoc);
2343 return { std::make_pair(std::move(val), equivLoc) };
2348 void in(ISS& env, const bc::PopL& op) {
2349 // If the same value is already in the local, do nothing but pop
2350 // it. Otherwise, the set has been done by moveToLocImpl.
2351 if (!moveToLocImpl(env, op)) return reduce(env, bc::PopC {});
2354 void in(ISS& env, const bc::SetL& op) {
2355 // If the same value is already in the local, do nothing because SetL keeps
2356 // the value on the stack. If it isn't, we need to push it back onto the stack
2357 // because moveToLocImpl popped it.
2358 if (auto p = moveToLocImpl(env, op)) {
2359 push(env, std::move(p->first), p->second);
2360 } else {
2361 reduce(env, bc::Nop {});
2365 void in(ISS& env, const bc::SetN&) {
2366 // This isn't trivial to strength reduce, without a "flip two top
2367 // elements of stack" opcode.
2368 auto t1 = popC(env);
2369 auto const t2 = popC(env);
2370 auto const v2 = tv(t2);
2371 // TODO(#3653110): could nothrow if t2 can't be an Obj or Res
2373 auto const knownLoc = v2 && v2->m_type == KindOfPersistentString
2374 ? findLocal(env, v2->m_data.pstr)
2375 : NoLocalId;
2376 if (knownLoc != NoLocalId) {
2377 setLoc(env, knownLoc, t1);
2378 } else {
2379 // We could be changing the value of any local, but we won't
2380 // change whether or not they are boxed or initialized.
2381 loseNonRefLocalTypes(env);
2383 mayUseVV(env);
2384 push(env, std::move(t1));
2387 void in(ISS& env, const bc::SetG&) {
2388 auto t1 = popC(env);
2389 popC(env);
2390 push(env, std::move(t1));
2393 void in(ISS& env, const bc::SetS& op) {
2394 auto const t1 = popC(env);
2395 auto const tcls = takeClsRefSlot(env, op.slot);
2396 auto const tname = popC(env);
2397 auto const vname = tv(tname);
2398 auto const self = selfCls(env);
2400 if (!self || tcls.couldBe(*self)) {
2401 if (vname && vname->m_type == KindOfPersistentString) {
2402 nothrow(env);
2403 mergeSelfProp(env, vname->m_data.pstr, t1);
2404 } else {
2405 mergeEachSelfPropRaw(env, [&] (Type) { return t1; });
2409 if (auto c = env.collect.publicStatics) {
2410 c->merge(env.ctx, tcls, tname, t1);
2413 push(env, std::move(t1));
2416 void in(ISS& env, const bc::SetOpL& op) {
2417 auto const t1 = popC(env);
2418 auto const v1 = tv(t1);
2419 auto const loc = locAsCell(env, op.loc1);
2420 auto const locVal = tv(loc);
2421 if (v1 && locVal) {
2422 // Can't constprop at this eval_cell, because of the effects on
2423 // locals.
2424 auto resultTy = eval_cell([&] {
2425 Cell c = *locVal;
2426 Cell rhs = *v1;
2427 setopBody(&c, op.subop2, &rhs);
2428 return c;
2430 if (!resultTy) resultTy = TInitCell;
2432 // We may have inferred a TSStr or TSArr with a value here, but
2433 // at runtime it will not be static. For now just throw that
2434 // away. TODO(#3696042): should be able to loosen_staticness here.
2435 if (resultTy->subtypeOf(TStr)) resultTy = TStr;
2436 else if (resultTy->subtypeOf(TArr)) resultTy = TArr;
2437 else if (resultTy->subtypeOf(TVec)) resultTy = TVec;
2438 else if (resultTy->subtypeOf(TDict)) resultTy = TDict;
2439 else if (resultTy->subtypeOf(TKeyset)) resultTy = TKeyset;
2441 setLoc(env, op.loc1, *resultTy);
2442 push(env, *resultTy);
2443 return;
2446 auto resultTy = typeSetOp(op.subop2, loc, t1);
2447 setLoc(env, op.loc1, resultTy);
2448 push(env, std::move(resultTy));
2451 void in(ISS& env, const bc::SetOpN&) {
2452 popC(env);
2453 popC(env);
2454 loseNonRefLocalTypes(env);
2455 mayUseVV(env);
2456 push(env, TInitCell);
2459 void in(ISS& env, const bc::SetOpG&) {
2460 popC(env); popC(env);
2461 push(env, TInitCell);
2464 void in(ISS& env, const bc::SetOpS& op) {
2465 popC(env);
2466 auto const tcls = takeClsRefSlot(env, op.slot);
2467 auto const tname = popC(env);
2468 auto const vname = tv(tname);
2469 auto const self = selfCls(env);
2471 if (!self || tcls.couldBe(*self)) {
2472 if (vname && vname->m_type == KindOfPersistentString) {
2473 mergeSelfProp(env, vname->m_data.pstr, TInitCell);
2474 } else {
2475 loseNonRefSelfPropTypes(env);
2479 if (auto c = env.collect.publicStatics) {
2480 c->merge(env.ctx, tcls, tname, TInitCell);
2483 push(env, TInitCell);
2486 void in(ISS& env, const bc::IncDecL& op) {
2487 auto loc = locAsCell(env, op.loc1);
2488 auto newT = typeIncDec(op.subop2, loc);
2489 auto const pre = isPre(op.subop2);
2491 // If it's a non-numeric string, this may cause it to exceed the max length.
2492 if (!locCouldBeUninit(env, op.loc1) &&
2493 !loc.couldBe(TStr)) {
2494 nothrow(env);
2497 if (!pre) push(env, std::move(loc));
2498 setLoc(env, op.loc1, newT);
2499 if (pre) push(env, std::move(newT));
2502 void in(ISS& env, const bc::IncDecN& op) {
2503 auto const t1 = topC(env);
2504 auto const v1 = tv(t1);
2505 auto const knownLoc = v1 && v1->m_type == KindOfPersistentString
2506 ? findLocal(env, v1->m_data.pstr)
2507 : NoLocalId;
2508 if (knownLoc != NoLocalId) {
2509 return reduce(env, bc::PopC {},
2510 bc::IncDecL { knownLoc, op.subop1 });
2512 popC(env);
2513 loseNonRefLocalTypes(env);
2514 mayUseVV(env);
2515 push(env, TInitCell);
2518 void in(ISS& env, const bc::IncDecG&) { popC(env); push(env, TInitCell); }
2520 void in(ISS& env, const bc::IncDecS& op) {
2521 auto const tcls = takeClsRefSlot(env, op.slot);
2522 auto const tname = popC(env);
2523 auto const vname = tv(tname);
2524 auto const self = selfCls(env);
2526 if (!self || tcls.couldBe(*self)) {
2527 if (vname && vname->m_type == KindOfPersistentString) {
2528 mergeSelfProp(env, vname->m_data.pstr, TInitCell);
2529 } else {
2530 loseNonRefSelfPropTypes(env);
2534 if (auto c = env.collect.publicStatics) {
2535 c->merge(env.ctx, tcls, tname, TInitCell);
2538 push(env, TInitCell);
2541 void in(ISS& env, const bc::BindL& op) {
2542 // If the op.loc1 was bound to a local static, its going to be
2543 // unbound from it. If the thing its being bound /to/ is a local
2544 // static, we've already marked it as modified via the VGetL, so
2545 // there's nothing more to track.
2546 // Unbind it before any updates.
2547 modifyLocalStatic(env, op.loc1, TUninit);
2548 nothrow(env);
2549 auto t1 = popV(env);
2550 setLocRaw(env, op.loc1, t1);
2551 push(env, std::move(t1));
2554 void in(ISS& env, const bc::BindN&) {
2555 // TODO(#3653110): could nothrow if t2 can't be an Obj or Res
2556 auto t1 = popV(env);
2557 auto const t2 = popC(env);
2558 auto const v2 = tv(t2);
2559 auto const knownLoc = v2 && v2->m_type == KindOfPersistentString
2560 ? findLocal(env, v2->m_data.pstr)
2561 : NoLocalId;
2562 unbindLocalStatic(env, knownLoc);
2563 if (knownLoc != NoLocalId) {
2564 setLocRaw(env, knownLoc, t1);
2565 } else {
2566 boxUnknownLocal(env);
2568 mayUseVV(env);
2569 push(env, std::move(t1));
2572 void in(ISS& env, const bc::BindG&) {
2573 auto t1 = popV(env);
2574 popC(env);
2575 push(env, std::move(t1));
2578 void in(ISS& env, const bc::BindS& op) {
2579 popV(env);
2580 auto const tcls = takeClsRefSlot(env, op.slot);
2581 auto const tname = popC(env);
2582 auto const vname = tv(tname);
2583 auto const self = selfCls(env);
2585 if (!self || tcls.couldBe(*self)) {
2586 if (vname && vname->m_type == KindOfPersistentString) {
2587 boxSelfProp(env, vname->m_data.pstr);
2588 } else {
2589 killSelfProps(env);
2593 if (auto c = env.collect.publicStatics) {
2594 c->merge(env.ctx, tcls, tname, TRef);
2597 push(env, TRef);
2600 void in(ISS& env, const bc::UnsetL& op) {
2601 nothrow(env);
2602 setLocRaw(env, op.loc1, TUninit);
2605 void in(ISS& env, const bc::UnsetN& /*op*/) {
2606 auto const t1 = topC(env);
2607 auto const v1 = tv(t1);
2608 if (v1 && v1->m_type == KindOfPersistentString) {
2609 auto const loc = findLocal(env, v1->m_data.pstr);
2610 if (loc != NoLocalId) {
2611 return reduce(env, bc::PopC {},
2612 bc::UnsetL { loc });
2615 popC(env);
2616 if (!t1.couldBe(TObj) && !t1.couldBe(TRes)) nothrow(env);
2617 unsetUnknownLocal(env);
2618 mayUseVV(env);
2621 void in(ISS& env, const bc::UnsetG& /*op*/) {
2622 auto const t1 = popC(env);
2623 if (!t1.couldBe(TObj) && !t1.couldBe(TRes)) nothrow(env);
2626 void in(ISS& env, const bc::FPushFuncD& op) {
2627 auto const rfunc = env.index.resolve_func(env.ctx, op.str2);
2628 if (auto const func = rfunc.exactFunc()) {
2629 if (can_emit_builtin(func, op.arg1, op.has_unpack)) {
2630 fpiPush(
2631 env,
2632 ActRec { FPIKind::Builtin, TBottom, folly::none, rfunc },
2633 op.arg1,
2634 false
2636 return reduce(env, bc::Nop {});
2639 if (fpiPush(env, ActRec { FPIKind::Func, TBottom, folly::none, rfunc },
2640 op.arg1, false)) {
2641 return reduce(env, bc::Nop {});
2645 void in(ISS& env, const bc::FPushFunc& op) {
2646 auto const t1 = topC(env);
2647 auto const v1 = tv(t1);
2648 folly::Optional<res::Func> rfunc;
2649 // FPushFuncD and FPushFuncU require that the names of inout functions be
2650 // mangled, so skip those for now.
2651 if (v1 && v1->m_type == KindOfPersistentString && op.argv.size() == 0) {
2652 auto const name = normalizeNS(v1->m_data.pstr);
2653 // FPushFuncD doesn't support class-method pair strings yet.
2654 if (isNSNormalized(name) && notClassMethodPair(name)) {
2655 rfunc = env.index.resolve_func(env.ctx, name);
2656 // If the function might distinguish being called dynamically from not,
2657 // don't turn a dynamic call into a static one.
2658 if (!rfunc->mightCareAboutDynCalls()) {
2659 return reduce(env, bc::PopC {},
2660 bc::FPushFuncD { op.arg1, name, op.has_unpack });
2664 popC(env);
2665 if (t1.subtypeOf(TObj)) {
2666 return fpiPushNoFold(env, ActRec { FPIKind::ObjInvoke, t1 });
2668 if (t1.subtypeOf(TArr)) {
2669 return fpiPushNoFold(env, ActRec { FPIKind::CallableArr, TTop });
2671 if (t1.subtypeOf(TStr)) {
2672 fpiPush(
2673 env,
2674 ActRec { FPIKind::Func, TTop, folly::none, rfunc },
2675 op.arg1,
2676 true);
2677 return;
2679 fpiPushNoFold(env, ActRec { FPIKind::Unknown, TTop });
2682 void in(ISS& env, const bc::FPushFuncU& op) {
2683 auto const rfuncPair =
2684 env.index.resolve_func_fallback(env.ctx, op.str2, op.str3);
2685 if (options.ElideAutoloadInvokes && !rfuncPair.second) {
2686 return reduce(
2687 env,
2688 bc::FPushFuncD { op.arg1, rfuncPair.first.name(), op.has_unpack }
2691 fpiPushNoFold(
2692 env,
2693 ActRec {
2694 FPIKind::Func,
2695 TBottom,
2696 folly::none,
2697 rfuncPair.first,
2698 rfuncPair.second
2703 const StaticString s_nullFunc { "__SystemLib\\__86null" };
2705 void in(ISS& env, const bc::FPushObjMethodD& op) {
2706 auto const nullThrows = op.subop3 == ObjMethodOp::NullThrows;
2707 auto const input = topC(env);
2708 auto const mayCallMethod = input.couldBe(TObj);
2709 auto const mayCallNullsafe = !nullThrows && input.couldBe(TNull);
2710 auto const mayThrowNonObj = !input.subtypeOf(nullThrows ? TObj : TOptObj);
2712 if (!mayCallMethod && !mayCallNullsafe) {
2713 // This FPush may only throw, make sure it's not optimized away.
2714 fpiPushNoFold(env, ActRec { FPIKind::ObjMeth, TBottom });
2715 popC(env);
2716 return unreachable(env);
2719 if (!mayCallMethod && !mayThrowNonObj) {
2720 // Null input, this may only call the nullsafe helper, so do that.
2721 return reduce(
2722 env,
2723 bc::PopC {},
2724 bc::FPushFuncD { op.arg1, s_nullFunc.get(), op.has_unpack }
2728 auto const ar = [&] {
2729 assertx(mayCallMethod);
2730 auto const kind = mayCallNullsafe ? FPIKind::ObjMethNS : FPIKind::ObjMeth;
2731 auto const ctxTy = intersection_of(input, TObj);
2732 auto const clsTy = objcls(ctxTy);
2733 auto const rcls = is_specialized_cls(clsTy)
2734 ? folly::Optional<res::Class>(dcls_of(clsTy).cls)
2735 : folly::none;
2736 auto const func = env.index.resolve_method(env.ctx, clsTy, op.str2);
2737 return ActRec { kind, ctxTy, rcls, func };
2740 if (!mayCallMethod) {
2741 // Calls nullsafe helper, but can't fold as we may still throw.
2742 assertx(mayCallNullsafe && mayThrowNonObj);
2743 auto const func = env.index.resolve_func(env.ctx, s_nullFunc.get());
2744 assertx(func.exactFunc());
2745 fpiPushNoFold(env, ActRec { FPIKind::Func, TBottom, folly::none, func });
2746 } else if (mayCallNullsafe || mayThrowNonObj) {
2747 // Can't optimize away as FCall may push null instead of the folded value
2748 // or FCall may throw.
2749 fpiPushNoFold(env, ar());
2750 } else if (fpiPush(env, ar(), op.arg1, false)) {
2751 return reduce(env, bc::PopC {});
2754 auto const location = topStkEquiv(env);
2755 if (location != NoLocalId) {
2756 if (!refineLocation(env, location, [&] (Type t) {
2757 if (nullThrows) return intersection_of(t, TObj);
2758 if (!t.couldBe(TUninit)) return intersection_of(t, TOptObj);
2759 if (!t.couldBe(TObj)) return intersection_of(t, TNull);
2760 return t;
2761 })) {
2762 unreachable(env);
2766 popC(env);
2769 void in(ISS& env, const bc::FPushObjMethod& op) {
2770 auto const t1 = topC(env);
2771 auto const v1 = tv(t1);
2772 auto const clsTy = objcls(t1);
2773 folly::Optional<res::Func> rfunc;
2774 if (v1 && v1->m_type == KindOfPersistentString && op.argv.size() == 0) {
2775 rfunc = env.index.resolve_method(env.ctx, clsTy, v1->m_data.pstr);
2776 if (!rfunc->mightCareAboutDynCalls()) {
2777 return reduce(
2778 env,
2779 bc::PopC {},
2780 bc::FPushObjMethodD {
2781 op.arg1, v1->m_data.pstr, op.subop2, op.has_unpack
2786 popC(env);
2787 fpiPush(
2788 env,
2789 ActRec {
2790 FPIKind::ObjMeth,
2791 popC(env),
2792 is_specialized_cls(clsTy)
2793 ? folly::Optional<res::Class>(dcls_of(clsTy).cls)
2794 : folly::none,
2795 rfunc
2797 op.arg1,
2798 true
2802 void in(ISS& env, const bc::FPushClsMethodD& op) {
2803 auto const rcls = env.index.resolve_class(env.ctx, op.str3);
2804 auto clsType = rcls ? clsExact(*rcls) : TCls;
2805 auto const rfun = env.index.resolve_method(
2806 env.ctx,
2807 clsType,
2808 op.str2
2810 if (fpiPush(env, ActRec { FPIKind::ClsMeth, clsType, rcls, rfun }, op.arg1,
2811 false)) {
2812 return reduce(env, bc::Nop {});
2816 namespace {
2818 Type ctxCls(ISS& env) {
2819 auto const s = selfCls(env);
2820 return setctx(s ? *s : TCls);
2823 Type specialClsRefToCls(ISS& env, SpecialClsRef ref) {
2824 if (!env.ctx.cls) return TCls;
2825 auto const op = [&]()-> folly::Optional<Type> {
2826 switch (ref) {
2827 case SpecialClsRef::Static: return ctxCls(env);
2828 case SpecialClsRef::Self: return selfClsExact(env);
2829 case SpecialClsRef::Parent: return parentClsExact(env);
2831 always_assert(false);
2832 }();
2833 return op ? *op : TCls;
2838 void in(ISS& env, const bc::FPushClsMethod& op) {
2839 auto const t1 = peekClsRefSlot(env, op.slot);
2840 auto const t2 = topC(env);
2841 auto const v2 = tv(t2);
2843 folly::Optional<res::Class> rcls;
2844 auto exactCls = false;
2845 if (is_specialized_cls(t1)) {
2846 auto dcls = dcls_of(t1);
2847 rcls = dcls.cls;
2848 exactCls = dcls.type == DCls::Exact;
2850 folly::Optional<res::Func> rfunc;
2851 if (v2 && v2->m_type == KindOfPersistentString && op.argv.size() == 0) {
2852 rfunc = env.index.resolve_method(env.ctx, t1, v2->m_data.pstr);
2853 if (exactCls && rcls && !rfunc->mightCareAboutDynCalls()) {
2854 return reduce(
2855 env,
2856 bc::DiscardClsRef { op.slot },
2857 bc::PopC {},
2858 bc::FPushClsMethodD {
2859 op.arg1, v2->m_data.pstr, rcls->name(), op.has_unpack
2864 if (fpiPush(env, ActRec { FPIKind::ClsMeth, t1, rcls, rfunc }, op.arg1,
2865 true)) {
2866 return reduce(env,
2867 bc::DiscardClsRef { op.slot },
2868 bc::PopC {});
2870 takeClsRefSlot(env, op.slot);
2871 popC(env);
2874 void in(ISS& env, const bc::FPushClsMethodS& op) {
2875 auto const name = topC(env);
2876 auto const namev = tv(name);
2877 auto const cls = specialClsRefToCls(env, op.subop2);
2878 folly::Optional<res::Func> rfunc;
2879 if (namev && namev->m_type == KindOfPersistentString && op.argv.size() == 0) {
2880 rfunc = env.index.resolve_method(env.ctx, cls, namev->m_data.pstr);
2881 if (!rfunc->mightCareAboutDynCalls()) {
2882 return reduce(
2883 env,
2884 bc::PopC {},
2885 bc::FPushClsMethodSD {
2886 op.arg1, op.subop2, namev->m_data.pstr, op.has_unpack
2891 auto const rcls = is_specialized_cls(cls)
2892 ? folly::Optional<res::Class>{dcls_of(cls).cls}
2893 : folly::none;
2894 if (fpiPush(env, ActRec {
2895 FPIKind::ClsMeth,
2896 ctxCls(env),
2897 rcls,
2898 rfunc
2899 }, op.arg1, true)) {
2900 return reduce(env, bc::PopC {});
2902 popC(env);
2905 void in(ISS& env, const bc::FPushClsMethodSD& op) {
2906 auto const cls = specialClsRefToCls(env, op.subop2);
2908 folly::Optional<res::Class> rcls;
2909 auto exactCls = false;
2910 if (is_specialized_cls(cls)) {
2911 auto dcls = dcls_of(cls);
2912 rcls = dcls.cls;
2913 exactCls = dcls.type == DCls::Exact;
2916 if (op.subop2 == SpecialClsRef::Static && rcls && exactCls) {
2917 return reduce(
2918 env,
2919 bc::FPushClsMethodD {
2920 op.arg1, op.str3, rcls->name(), op.has_unpack
2925 auto const rfun = env.index.resolve_method(env.ctx, cls, op.str3);
2926 if (fpiPush(env, ActRec {
2927 FPIKind::ClsMeth,
2928 ctxCls(env),
2929 rcls,
2930 rfun
2931 }, op.arg1, false)) {
2932 return reduce(env, bc::Nop {});
2936 void ctorHelper(ISS& env, SString name, int32_t nargs) {
2937 auto const rcls = env.index.resolve_class(env.ctx, name);
2938 auto const rfunc = rcls ?
2939 env.index.resolve_ctor(env.ctx, *rcls, true) : folly::none;
2940 auto ctxType = false;
2941 if (rcls && env.ctx.cls && rcls->same(env.index.resolve_class(env.ctx.cls)) &&
2942 !rcls->couldBeOverriden()) {
2943 ctxType = true;
2945 fpiPush(
2946 env,
2947 ActRec {
2948 FPIKind::Ctor,
2949 setctx(rcls ? clsExact(*rcls) : TCls, ctxType),
2950 rcls,
2951 rfunc
2953 nargs,
2954 false
2956 push(env, setctx(rcls ? objExact(*rcls) : TObj, ctxType));
2959 void in(ISS& env, const bc::FPushCtorD& op) {
2960 ctorHelper(env, op.str2, op.arg1);
2963 void in(ISS& env, const bc::FPushCtorI& op) {
2964 auto const name = env.ctx.unit->classes[op.arg2]->name;
2965 ctorHelper(env, name, op.arg1);
2968 void in(ISS& env, const bc::FPushCtorS& op) {
2969 auto const cls = specialClsRefToCls(env, op.subop2);
2970 if (is_specialized_cls(cls)) {
2971 auto const dcls = dcls_of(cls);
2972 if (dcls.type == DCls::Exact
2973 && (!dcls.cls.couldBeOverriden()
2974 || equivalently_refined(cls, unctx(cls)))) {
2975 return reduce(
2976 env,
2977 bc::FPushCtorD { op.arg1, dcls.cls.name(), op.has_unpack }
2980 auto const rfunc = env.index.resolve_ctor(env.ctx, dcls.cls, false);
2981 push(env, toobj(cls));
2982 // PHP doesn't forward the context to constructors.
2983 fpiPush(env, ActRec { FPIKind::Ctor, unctx(cls), dcls.cls, rfunc },
2984 op.arg1,
2985 false);
2986 return;
2988 push(env, TObj);
2989 fpiPush(env, ActRec { FPIKind::Ctor, TCls }, op.arg1, false);
2992 void in(ISS& env, const bc::FPushCtor& op) {
2993 auto const& t1 = peekClsRefSlot(env, op.slot);
2994 if (is_specialized_cls(t1)) {
2995 auto const dcls = dcls_of(t1);
2996 auto const rfunc = env.index.resolve_ctor(env.ctx, dcls.cls, false);
2997 if (dcls.type == DCls::Exact && rfunc && !rfunc->mightCareAboutDynCalls()) {
2998 return reduce(env, bc::DiscardClsRef { op.slot },
2999 bc::FPushCtorD { op.arg1, dcls.cls.name(), op.has_unpack });
3002 auto const& t2 = takeClsRefSlot(env, op.slot);
3003 push(env, toobj(t2));
3004 fpiPushNoFold(env, ActRec { FPIKind::Ctor, t2, dcls.cls, rfunc });
3005 return;
3007 takeClsRefSlot(env, op.slot);
3008 push(env, TObj);
3009 fpiPushNoFold(env, ActRec { FPIKind::Ctor, TCls });
3012 void in(ISS& env, const bc::FPushCufIter&) {
3013 nothrow(env);
3014 fpiPushNoFold(env, ActRec { FPIKind::Unknown, TTop });
3017 void in(ISS& /*env*/, const bc::FHandleRefMismatch& /*op*/) {}
3019 void in(ISS& env, const bc::FPassL& op) {
3020 auto const kind = prepKind(env, op.arg1);
3021 auto hint = !fpassCanThrow(env, kind, op.subop3) ? FPassHint::Any : op.subop3;
3022 switch (kind) {
3023 case PrepKind::Unknown:
3024 if (!locCouldBeUninit(env, op.loc2) && op.subop3 == FPassHint::Any) {
3025 nothrow(env);
3027 // This might box the local, we can't tell. Note: if the local
3028 // is already TRef, we could try to leave it alone, but not for
3029 // now.
3030 setLocRaw(env, op.loc2, TGen);
3031 return push(env, TInitGen);
3032 case PrepKind::Val:
3033 return reduce_fpass_arg(env, bc::CGetL { op.loc2 }, op.arg1, false, hint);
3034 case PrepKind::Ref:
3035 return reduce_fpass_arg(env, bc::VGetL { op.loc2 }, op.arg1, true, hint);
3039 void in(ISS& env, const bc::FPassN& op) {
3040 auto const kind = prepKind(env, op.arg1);
3041 auto hint = !fpassCanThrow(env, kind, op.subop2) ? FPassHint::Any : op.subop2;
3042 switch (kind) {
3043 case PrepKind::Unknown:
3044 // This could change the type of any local.
3045 popC(env);
3046 killLocals(env);
3047 mayUseVV(env);
3048 return push(env, TInitGen);
3049 case PrepKind::Val: return reduce_fpass_arg(env,
3050 bc::CGetN {},
3051 op.arg1,
3052 false,
3053 hint);
3054 case PrepKind::Ref: return reduce_fpass_arg(env,
3055 bc::VGetN {},
3056 op.arg1,
3057 true,
3058 hint);
3062 void in(ISS& env, const bc::FPassG& op) {
3063 auto const kind = prepKind(env, op.arg1);
3064 auto hint = !fpassCanThrow(env, kind, op.subop2) ? FPassHint::Any : op.subop2;
3065 switch (kind) {
3066 case PrepKind::Unknown: popC(env); return push(env, TInitGen);
3067 case PrepKind::Val: return reduce_fpass_arg(env,
3068 bc::CGetG {},
3069 op.arg1,
3070 false,
3071 hint);
3072 case PrepKind::Ref: return reduce_fpass_arg(env,
3073 bc::VGetG {},
3074 op.arg1,
3075 true,
3076 hint);
3080 void in(ISS& env, const bc::FPassS& op) {
3081 auto const kind = prepKind(env, op.arg1);
3082 auto hint = !fpassCanThrow(env, kind, op.subop3) ? FPassHint::Any : op.subop3;
3083 switch (kind) {
3084 case PrepKind::Unknown:
3086 auto tcls = takeClsRefSlot(env, op.slot);
3087 auto const self = selfCls(env);
3088 auto const tname = popC(env);
3089 auto const vname = tv(tname);
3090 if (!self || tcls.couldBe(*self)) {
3091 if (vname && vname->m_type == KindOfPersistentString) {
3092 // May or may not be boxing it, depending on the refiness.
3093 mergeSelfProp(env, vname->m_data.pstr, TInitGen);
3094 } else {
3095 killSelfProps(env);
3098 if (auto c = env.collect.publicStatics) {
3099 c->merge(env.ctx, tcls, tname, TInitGen);
3102 return push(env, TInitGen);
3103 case PrepKind::Val:
3104 return reduce_fpass_arg(env, bc::CGetS { op.slot }, op.arg1, false, hint);
3105 case PrepKind::Ref:
3106 return reduce_fpass_arg(env, bc::VGetS { op.slot }, op.arg1, true, hint);
3110 void in(ISS& env, const bc::FPassV& op) {
3111 auto const kind = prepKind(env, op.arg1);
3112 auto hint = op.subop2;
3113 if (!fpassCanThrow(env, kind, op.subop2)) {
3114 hint = FPassHint::Any;
3115 nothrow(env);
3117 switch (kind) {
3118 case PrepKind::Unknown:
3119 popV(env);
3120 return push(env, TInitGen);
3121 case PrepKind::Val:
3122 return reduce_fpass_arg(env, bc::Unbox {}, op.arg1, false, hint);
3123 case PrepKind::Ref:
3124 return reduce_fpass_arg(env, bc::Nop {}, op.arg1, true, hint);
3128 void in(ISS& env, const bc::FPassR& op) {
3129 auto const kind = prepKind(env, op.arg1);
3130 auto hint = op.subop2;
3131 if (!fpassCanThrow(env, kind, op.subop2)) {
3132 hint = FPassHint::Any;
3133 nothrow(env);
3135 if (shouldKillFPass(env, op.subop2, op.arg1)) {
3136 switch (kind) {
3137 case PrepKind::Unknown:
3138 not_reached();
3139 case PrepKind::Val:
3140 return killFPass(env, kind, hint, op.arg1, bc::UnboxR {});
3141 case PrepKind::Ref:
3142 return killFPass(env, kind, hint, op.arg1, bc::BoxR {});
3146 auto const t1 = topT(env);
3147 if (t1.subtypeOf(TCell)) {
3148 return reduce_fpass_arg(env, bc::UnboxRNop {}, op.arg1, false, hint);
3151 // If it's known to be a ref, this behaves like FPassV, except we need to do
3152 // it slightly differently to keep stack flavors correct.
3153 if (t1.subtypeOf(TRef)) {
3154 switch (kind) {
3155 case PrepKind::Unknown:
3156 popV(env);
3157 return push(env, TInitGen);
3158 case PrepKind::Val:
3159 return reduce_fpass_arg(env, bc::UnboxR {}, op.arg1, false, hint);
3160 case PrepKind::Ref:
3161 return reduce_fpass_arg(env, bc::BoxRNop {}, op.arg1, true, hint);
3163 not_reached();
3166 // Here we don't know if it is going to be a cell or a ref.
3167 switch (kind) {
3168 case PrepKind::Unknown: popR(env); return push(env, TInitGen);
3169 case PrepKind::Val: popR(env); return push(env, TInitCell);
3170 case PrepKind::Ref: popR(env); return push(env, TRef);
3174 void in(ISS& env, const bc::FPassVNop& op) {
3175 if (shouldKillFPass(env, op.subop2, op.arg1)) {
3176 return killFPass(env, prepKind(env, op.arg1), op.subop2, op.arg1,
3177 bc::Nop {});
3179 if (op.subop2 == FPassHint::Ref) {
3180 return reduce(env, bc::FPassVNop { op.arg1, FPassHint::Any });
3182 push(env, popV(env));
3183 if (op.subop2 != FPassHint::Cell) nothrow(env);
3186 void in(ISS& env, const bc::FPassC& op) {
3187 if (shouldKillFPass(env, op.subop2, op.arg1)) {
3188 return killFPass(env, prepKind(env, op.arg1), op.subop2, op.arg1,
3189 bc::Nop {});
3191 if (op.subop2 == FPassHint::Cell && prepKind(env, op.arg1) == PrepKind::Val) {
3192 return reduce(env, bc::FPassC { op.arg1, FPassHint::Any });
3194 if (op.subop2 != FPassHint::Ref) effect_free(env);
3197 constexpr int32_t kNoUnpack = -1;
3199 void pushCallReturnType(ISS& env, Type&& ty, int32_t unpack = kNoUnpack) {
3200 if (ty == TBottom) {
3201 // The callee function never returns. It might throw, or loop forever.
3202 unreachable(env);
3204 if (unpack != kNoUnpack) {
3205 for (auto i = uint32_t{0}; i < unpack - 1; ++i) popU(env);
3206 if (is_specialized_vec(ty)) {
3207 for (int32_t i = 1; i < unpack; i++) {
3208 push(env, vec_elem(ty, ival(i)).first);
3210 push(env, vec_elem(ty, ival(0)).first);
3211 } else {
3212 for (int32_t i = 0; i < unpack; i++) push(env, TInitCell);
3214 return;
3216 return push(env, std::move(ty));
3219 const StaticString s_defined { "defined" };
3220 const StaticString s_function_exists { "function_exists" };
3222 void fcallKnownImpl(ISS& env, uint32_t numArgs, int32_t unpack = kNoUnpack) {
3223 auto const ar = fpiTop(env);
3224 always_assert(ar.func.hasValue());
3226 if (options.ConstantFoldBuiltins && ar.foldable) {
3227 if (unpack == kNoUnpack) {
3228 auto ty = [&] () {
3229 auto const func = ar.func->exactFunc();
3230 assertx(func);
3231 if (func->attrs & AttrBuiltin && func->attrs & AttrIsFoldable) {
3232 auto ret = const_fold(env, numArgs, *ar.func);
3233 return ret ? *ret : TBottom;
3235 std::vector<Type> args(numArgs);
3236 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3237 args[numArgs - i - 1] = scalarize(topT(env, i));
3240 return env.index.lookup_foldable_return_type(
3241 env.ctx, func, std::move(args));
3242 }();
3243 if (auto v = tv(ty)) {
3244 std::vector<Bytecode> repl { numArgs, bc::PopC {} };
3245 repl.push_back(gen_constant(*v));
3246 repl.push_back(bc::RGetCNop {});
3247 fpiPop(env);
3248 return reduce(env, std::move(repl));
3251 fpiNotFoldable(env);
3252 fpiPop(env);
3253 discard(env, numArgs);
3254 if (unpack != kNoUnpack) {
3255 while (unpack--) push(env, TBottom);
3256 return;
3258 return push(env, TBottom);
3261 fpiPop(env);
3262 specialFunctionEffects(env, ar);
3264 if (ar.func->name()->isame(s_function_exists.get())) {
3265 handle_function_exists(env, numArgs, false);
3268 std::vector<Type> args(numArgs);
3269 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3270 args[numArgs - i - 1] = popF(env);
3273 if (options.HardConstProp &&
3274 numArgs == 1 &&
3275 ar.func->name()->isame(s_defined.get())) {
3276 // If someone calls defined('foo') they probably want foo to be
3277 // defined normally; ie not a persistent constant.
3278 if (auto const v = tv(args[0])) {
3279 if (isStringType(v->m_type) &&
3280 !env.index.lookup_constant(env.ctx, v->m_data.pstr)) {
3281 env.collect.cnsMap[v->m_data.pstr].m_type = kDynamicConstant;
3286 auto ty = env.index.lookup_return_type(
3287 CallContext { env.ctx, args, ar.context },
3288 *ar.func
3290 if (ar.kind == FPIKind::ObjMethNS) {
3291 ty = union_of(std::move(ty), TInitNull);
3293 if (!ar.fallbackFunc) {
3294 pushCallReturnType(env, std::move(ty), unpack);
3295 return;
3297 auto ty2 = env.index.lookup_return_type(
3298 CallContext { env.ctx, args, ar.context },
3299 *ar.fallbackFunc
3301 pushCallReturnType(env, union_of(std::move(ty), std::move(ty2)), unpack);
3304 void in(ISS& env, const bc::FCall& op) {
3305 auto const ar = fpiTop(env);
3306 if (ar.func && !ar.fallbackFunc) {
3307 switch (ar.kind) {
3308 case FPIKind::Unknown:
3309 case FPIKind::CallableArr:
3310 case FPIKind::ObjInvoke:
3311 not_reached();
3312 case FPIKind::Func:
3313 return reduce(
3314 env,
3315 bc::FCallD { op.arg1, s_empty.get(), ar.func->name() }
3317 break;
3318 case FPIKind::Builtin:
3319 return finish_builtin(env, ar.func->exactFunc(), op.arg1, false);
3320 case FPIKind::Ctor:
3322 * Need to be wary of old-style ctors. We could get into the situation
3323 * where we're constructing class D extends B, and B has an old-style
3324 * ctor but D::B also exists. (So in this case we'll skip the
3325 * fcallKnownImpl stuff.)
3327 if (!ar.func->name()->isame(s_construct.get())) {
3328 break;
3330 // fallthrough
3331 case FPIKind::ObjMeth:
3332 case FPIKind::ClsMeth:
3333 if (ar.cls.hasValue() && ar.func->cantBeMagicCall()) {
3334 return reduce(
3335 env,
3336 bc::FCallD { op.arg1, ar.cls->name(), ar.func->name() }
3339 // fallthrough
3340 case FPIKind::ObjMethNS:
3341 // If we didn't return a reduce above, we still can compute a
3342 // partially-known FCall effect with our res::Func.
3343 return fcallKnownImpl(env, op.arg1);
3347 for (auto i = uint32_t{0}; i < op.arg1; ++i) popF(env);
3348 fpiPop(env);
3349 specialFunctionEffects(env, ar);
3350 push(env, TInitGen);
3353 void in(ISS& env, const bc::FCallD& op) {
3354 auto const ar = fpiTop(env);
3355 if ((ar.func && ar.func->name() != op.str3) ||
3356 (ar.cls && ar.cls->name() != op.str2)) {
3357 // We've found a more precise type for the call, so update it
3358 return reduce(
3359 env,
3360 bc::FCallD {
3361 op.arg1, ar.cls ? ar.cls->name() : s_empty.get(), ar.func->name()
3365 if (ar.kind == FPIKind::Builtin) {
3366 return finish_builtin(env, ar.func->exactFunc(), op.arg1, false);
3368 if (ar.func) return fcallKnownImpl(env, op.arg1);
3369 for (auto i = uint32_t{0}; i < op.arg1; ++i) popF(env);
3370 fpiPop(env);
3371 specialFunctionEffects(env, ar);
3372 push(env, TInitGen);
3375 void in(ISS& env, const bc::FCallAwait& op) {
3376 auto const ar = fpiTop(env);
3377 if (ar.foldable) {
3378 discard(env, op.arg1);
3379 fpiNotFoldable(env);
3380 fpiPop(env);
3381 return push(env, TBottom);
3383 if ((ar.func && ar.func->name() != op.str3) ||
3384 (ar.cls && ar.cls->name() != op.str2)) {
3385 // We've found a more precise type for the call, so update it
3386 return reduce(
3387 env,
3388 bc::FCallAwait {
3389 op.arg1, ar.cls ? ar.cls->name() : s_empty.get(), ar.func->name()
3393 impl(env,
3394 bc::FCallD { op.arg1, op.str2, op.str3 },
3395 bc::UnboxRNop {},
3396 bc::Await {});
3399 void fcallUnpackImpl(ISS& env, int arg, int32_t unpack = kNoUnpack) {
3400 auto const ar = fpiTop(env);
3401 if (ar.kind == FPIKind::Builtin) {
3402 always_assert(unpack == kNoUnpack);
3403 return finish_builtin(env, ar.func->exactFunc(), arg, true);
3405 if (ar.foldable) {
3406 discard(env, arg);
3407 fpiNotFoldable(env);
3408 fpiPop(env);
3409 return push(env, TBottom);
3411 for (auto i = uint32_t{0}; i < arg; ++i) { popF(env); }
3412 fpiPop(env);
3413 specialFunctionEffects(env, ar);
3414 if (ar.func) {
3415 auto ty = env.index.lookup_return_type(env.ctx, *ar.func);
3416 if (!ar.fallbackFunc) {
3417 pushCallReturnType(env, std::move(ty), unpack);
3418 return;
3420 auto ty2 = env.index.lookup_return_type(env.ctx, *ar.fallbackFunc);
3421 pushCallReturnType(env, union_of(std::move(ty), std::move(ty2)), unpack);
3422 return;
3424 if (unpack != kNoUnpack) {
3425 for (int i = 0; i < unpack - 1; i++) popU(env);
3426 while (unpack--) push(env, TInitCell);
3427 return;
3429 return push(env, TInitGen);
3432 void in(ISS& env, const bc::FCallUnpack& op) {
3433 fcallUnpackImpl(env, op.arg1);
3436 void in(ISS& env, const bc::FCallM& op) {
3437 auto const ar = fpiTop(env);
3438 if (ar.func && !ar.fallbackFunc) {
3439 switch (ar.kind) {
3440 case FPIKind::Unknown:
3441 case FPIKind::CallableArr:
3442 case FPIKind::ObjInvoke:
3443 case FPIKind::Builtin:
3444 case FPIKind::Ctor:
3445 not_reached();
3446 case FPIKind::Func:
3447 return reduce(
3448 env,
3449 bc::FCallDM { op.arg1, op.arg2, s_empty.get(), ar.func->name() }
3451 break;
3452 case FPIKind::ObjMeth:
3453 case FPIKind::ClsMeth:
3454 if (ar.cls.hasValue() && ar.func->cantBeMagicCall()) {
3455 return reduce(
3456 env,
3457 bc::FCallDM { op.arg1, op.arg2, ar.cls->name(), ar.func->name() }
3460 // fallthrough
3461 case FPIKind::ObjMethNS:
3462 // If we didn't return a reduce above, we still can compute a
3463 // partially-known FCall effect with our res::Func.
3464 return fcallKnownImpl(env, op.arg1, op.arg2);
3468 for (auto i = uint32_t{0}; i < op.arg1; ++i) popF(env);
3469 fpiPop(env);
3470 specialFunctionEffects(env, ar);
3471 for (auto i = uint32_t{0}; i < op.arg2 - 1; ++i) popU(env);
3472 for (auto i = uint32_t{0}; i < op.arg2; ++i) push(env, TInitCell);
3475 void in(ISS& env, const bc::FCallDM& op) {
3476 auto const ar = fpiTop(env);
3477 if ((ar.func && ar.func->name() != op.str4) ||
3478 (ar.cls && ar.cls->name() != op.str3)) {
3479 // We've found a more precise type for the call, so update it
3480 return reduce(
3481 env,
3482 bc::FCallDM {
3483 op.arg1,
3484 op.arg2,
3485 ar.cls ? ar.cls->name() : s_empty.get(),
3486 ar.func->name()
3490 always_assert(ar.kind != FPIKind::Builtin);
3491 if (ar.func) return fcallKnownImpl(env, op.arg1, op.arg2);
3493 for (auto i = uint32_t{0}; i < op.arg1; ++i) popF(env);
3494 fpiPop(env);
3495 specialFunctionEffects(env, ar);
3496 for (auto i = uint32_t{0}; i < op.arg2 - 1; ++i) popU(env);
3497 for (auto i = uint32_t{0}; i < op.arg2; ++i) push(env, TInitCell);
3500 void in(ISS& env, const bc::FCallUnpackM& op) {
3501 fcallUnpackImpl(env, op.arg1, op.arg2);
3504 void in(ISS& env, const bc::DecodeCufIter& op) {
3505 popC(env); // func
3506 env.propagate(op.target, &env.state); // before iter is modifed
3509 namespace {
3511 void iterInitImpl(ISS& env, IterId iter, LocalId valueLoc,
3512 BlockId target, const Type& base, LocalId baseLoc) {
3513 assert(iterIsDead(env, iter));
3515 auto ity = iter_types(base);
3516 if (!ity.mayThrowOnInit) nothrow(env);
3518 auto const taken = [&]{
3519 // Take the branch before setting locals if the iter is already
3520 // empty, but after popping. Similar for the other IterInits
3521 // below.
3522 freeIter(env, iter);
3523 env.propagate(target, &env.state);
3526 auto const fallthrough = [&]{
3527 setIter(env, iter, LiveIter { ity, baseLoc, NoLocalId, env.blk.id });
3528 // Do this after setting the iterator, in case it clobbers the base local
3529 // equivalency.
3530 setLoc(env, valueLoc, std::move(ity.value));
3533 switch (ity.count) {
3534 case IterTypes::Count::Empty:
3535 taken();
3536 mayReadLocal(env, valueLoc);
3537 jmp_setdest(env, target);
3538 break;
3539 case IterTypes::Count::Single:
3540 case IterTypes::Count::NonEmpty:
3541 fallthrough();
3542 jmp_nevertaken(env);
3543 break;
3544 case IterTypes::Count::ZeroOrOne:
3545 case IterTypes::Count::Any:
3546 taken();
3547 fallthrough();
3548 break;
3552 void iterInitKImpl(ISS& env, IterId iter, LocalId valueLoc, LocalId keyLoc,
3553 BlockId target, const Type& base, LocalId baseLoc) {
3554 assert(iterIsDead(env, iter));
3556 auto ity = iter_types(base);
3557 if (!ity.mayThrowOnInit) nothrow(env);
3559 auto const taken = [&]{
3560 freeIter(env, iter);
3561 env.propagate(target, &env.state);
3564 auto const fallthrough = [&]{
3565 setIter(env, iter, LiveIter { ity, baseLoc, NoLocalId, env.blk.id });
3566 // Do this after setting the iterator, in case it clobbers the base local
3567 // equivalency.
3568 setLoc(env, valueLoc, std::move(ity.value));
3569 setLoc(env, keyLoc, std::move(ity.key));
3570 if (!locCouldBeRef(env, keyLoc)) setIterKey(env, iter, keyLoc);
3573 switch (ity.count) {
3574 case IterTypes::Count::Empty:
3575 taken();
3576 mayReadLocal(env, valueLoc);
3577 mayReadLocal(env, keyLoc);
3578 jmp_setdest(env, target);
3579 break;
3580 case IterTypes::Count::Single:
3581 case IterTypes::Count::NonEmpty:
3582 fallthrough();
3583 jmp_nevertaken(env);
3584 break;
3585 case IterTypes::Count::ZeroOrOne:
3586 case IterTypes::Count::Any:
3587 taken();
3588 fallthrough();
3589 break;
3593 void iterNextImpl(ISS& env, IterId iter, LocalId valueLoc, BlockId target) {
3594 auto const curLoc = locRaw(env, valueLoc);
3596 auto const noTaken = match<bool>(
3597 env.state.iters[iter],
3598 [&] (DeadIter) {
3599 always_assert(false && "IterNext on dead iter");
3600 return false;
3602 [&] (const LiveIter& ti) {
3603 if (!ti.types.mayThrowOnNext) nothrow(env);
3604 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
3605 switch (ti.types.count) {
3606 case IterTypes::Count::Single:
3607 case IterTypes::Count::ZeroOrOne:
3608 return true;
3609 case IterTypes::Count::NonEmpty:
3610 case IterTypes::Count::Any:
3611 setLoc(env, valueLoc, ti.types.value);
3612 return false;
3613 case IterTypes::Count::Empty:
3614 always_assert(false);
3616 not_reached();
3619 if (noTaken) {
3620 jmp_nevertaken(env);
3621 freeIter(env, iter);
3622 return;
3625 env.propagate(target, &env.state);
3627 freeIter(env, iter);
3628 setLocRaw(env, valueLoc, curLoc);
3631 void iterNextKImpl(ISS& env, IterId iter, LocalId valueLoc,
3632 LocalId keyLoc, BlockId target) {
3633 auto const curValue = locRaw(env, valueLoc);
3634 auto const curKey = locRaw(env, keyLoc);
3636 auto const noTaken = match<bool>(
3637 env.state.iters[iter],
3638 [&] (DeadIter) {
3639 always_assert(false && "IterNextK on dead iter");
3640 return false;
3642 [&] (const LiveIter& ti) {
3643 if (!ti.types.mayThrowOnNext) nothrow(env);
3644 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
3645 switch (ti.types.count) {
3646 case IterTypes::Count::Single:
3647 case IterTypes::Count::ZeroOrOne:
3648 return true;
3649 case IterTypes::Count::NonEmpty:
3650 case IterTypes::Count::Any:
3651 setLoc(env, valueLoc, ti.types.value);
3652 setLoc(env, keyLoc, ti.types.key);
3653 if (!locCouldBeRef(env, keyLoc)) setIterKey(env, iter, keyLoc);
3654 return false;
3655 case IterTypes::Count::Empty:
3656 always_assert(false);
3658 not_reached();
3661 if (noTaken) {
3662 jmp_nevertaken(env);
3663 freeIter(env, iter);
3664 return;
3667 env.propagate(target, &env.state);
3669 freeIter(env, iter);
3670 setLocRaw(env, valueLoc, curValue);
3671 setLocRaw(env, keyLoc, curKey);
3676 void in(ISS& env, const bc::IterInit& op) {
3677 auto const baseLoc = topStkLocal(env);
3678 auto base = popC(env);
3679 iterInitImpl(env, op.iter1, op.loc3, op.target, std::move(base), baseLoc);
3682 void in(ISS& env, const bc::LIterInit& op) {
3683 iterInitImpl(
3684 env,
3685 op.iter1,
3686 op.loc4,
3687 op.target,
3688 locAsCell(env, op.loc2),
3689 op.loc2
3693 void in(ISS& env, const bc::IterInitK& op) {
3694 auto const baseLoc = topStkLocal(env);
3695 auto base = popC(env);
3696 iterInitKImpl(
3697 env,
3698 op.iter1,
3699 op.loc3,
3700 op.loc4,
3701 op.target,
3702 std::move(base),
3703 baseLoc
3707 void in(ISS& env, const bc::LIterInitK& op) {
3708 iterInitKImpl(
3709 env,
3710 op.iter1,
3711 op.loc4,
3712 op.loc5,
3713 op.target,
3714 locAsCell(env, op.loc2),
3715 op.loc2
3719 void in(ISS& env, const bc::MIterInit& op) {
3720 popV(env);
3721 env.propagate(op.target, &env.state);
3722 unbindLocalStatic(env, op.loc3);
3723 setLocRaw(env, op.loc3, TRef);
3726 void in(ISS& env, const bc::MIterInitK& op) {
3727 popV(env);
3728 env.propagate(op.target, &env.state);
3729 unbindLocalStatic(env, op.loc3);
3730 setLocRaw(env, op.loc3, TRef);
3731 setLoc(env, op.loc4, TInitCell);
3734 void in(ISS& env, const bc::WIterInit& op) {
3735 popC(env);
3736 env.propagate(op.target, &env.state);
3737 // WIter* instructions may leave the value locals as either refs
3738 // or cells, depending whether the rhs of the assignment was a
3739 // ref.
3740 setLocRaw(env, op.loc3, TInitGen);
3743 void in(ISS& env, const bc::WIterInitK& op) {
3744 popC(env);
3745 env.propagate(op.target, &env.state);
3746 setLocRaw(env, op.loc3, TInitGen);
3747 setLoc(env, op.loc4, TInitCell);
3750 void in(ISS& env, const bc::IterNext& op) {
3751 iterNextImpl(env, op.iter1, op.loc3, op.target);
3754 void in(ISS& env, const bc::LIterNext& op) {
3755 mayReadLocal(env, op.loc2);
3756 iterNextImpl(env, op.iter1, op.loc4, op.target);
3759 void in(ISS& env, const bc::IterNextK& op) {
3760 iterNextKImpl(env, op.iter1, op.loc3, op.loc4, op.target);
3763 void in(ISS& env, const bc::LIterNextK& op) {
3764 mayReadLocal(env, op.loc2);
3765 iterNextKImpl(env, op.iter1, op.loc4, op.loc5, op.target);
3768 void in(ISS& env, const bc::MIterNext& op) {
3769 env.propagate(op.target, &env.state);
3770 unbindLocalStatic(env, op.loc3);
3771 setLocRaw(env, op.loc3, TRef);
3774 void in(ISS& env, const bc::MIterNextK& op) {
3775 env.propagate(op.target, &env.state);
3776 unbindLocalStatic(env, op.loc3);
3777 setLocRaw(env, op.loc3, TRef);
3778 setLoc(env, op.loc4, TInitCell);
3781 void in(ISS& env, const bc::WIterNext& op) {
3782 env.propagate(op.target, &env.state);
3783 setLocRaw(env, op.loc3, TInitGen);
3786 void in(ISS& env, const bc::WIterNextK& op) {
3787 env.propagate(op.target, &env.state);
3788 setLocRaw(env, op.loc3, TInitGen);
3789 setLoc(env, op.loc4, TInitCell);
3792 void in(ISS& env, const bc::IterFree& op) {
3793 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
3794 nothrow(env);
3796 match<void>(
3797 env.state.iters[op.iter1],
3798 [] (DeadIter) {},
3799 [&] (const LiveIter& ti) {
3800 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
3804 freeIter(env, op.iter1);
3806 void in(ISS& env, const bc::LIterFree& op) {
3807 nothrow(env);
3808 mayReadLocal(env, op.loc2);
3809 freeIter(env, op.iter1);
3811 void in(ISS& env, const bc::MIterFree& op) {
3812 nothrow(env);
3813 freeIter(env, op.iter1);
3815 void in(ISS& env, const bc::CIterFree& op) {
3816 nothrow(env);
3817 freeIter(env, op.iter1);
3820 void in(ISS& env, const bc::IterBreak& op) {
3821 nothrow(env);
3823 for (auto const& it : op.iterTab) {
3824 if (it.kind == KindOfIter || it.kind == KindOfLIter) {
3825 match<void>(
3826 env.state.iters[it.id],
3827 [] (DeadIter) {},
3828 [&] (const LiveIter& ti) {
3829 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
3833 if (it.kind == KindOfLIter) mayReadLocal(env, it.local);
3834 freeIter(env, it.id);
3837 env.propagate(op.target, &env.state);
3841 * Any include/require (or eval) op kills all locals, and private properties.
3843 * We don't need to do anything for collect.publicStatics because we'll analyze
3844 * the included pseudo-main separately and see any effects it may have on
3845 * public statics.
3847 void inclOpImpl(ISS& env) {
3848 popC(env);
3849 killLocals(env);
3850 killThisProps(env);
3851 killSelfProps(env);
3852 mayUseVV(env);
3853 push(env, TInitCell);
3856 void in(ISS& env, const bc::Incl&) { inclOpImpl(env); }
3857 void in(ISS& env, const bc::InclOnce&) { inclOpImpl(env); }
3858 void in(ISS& env, const bc::Req&) { inclOpImpl(env); }
3859 void in(ISS& env, const bc::ReqOnce&) { inclOpImpl(env); }
3860 void in(ISS& env, const bc::ReqDoc&) { inclOpImpl(env); }
3861 void in(ISS& env, const bc::Eval&) { inclOpImpl(env); }
3863 void in(ISS& /*env*/, const bc::DefFunc&) {}
3864 void in(ISS& /*env*/, const bc::DefCls&) {}
3865 void in(ISS& /*env*/, const bc::DefClsNop&) {}
3866 void in(ISS& env, const bc::AliasCls&) {
3867 popC(env);
3868 push(env, TBool);
3871 void in(ISS& env, const bc::DefCns& op) {
3872 auto const t = popC(env);
3873 if (options.HardConstProp) {
3874 auto const v = tv(t);
3875 auto const val = v && tvAsCVarRef(&*v).isAllowedAsConstantValue() ?
3876 *v : make_tv<KindOfUninit>();
3877 auto const res = env.collect.cnsMap.emplace(op.str1, val);
3878 if (!res.second) {
3879 if (res.first->second.m_type == kReadOnlyConstant) {
3880 // we only saw a read of this constant
3881 res.first->second = val;
3882 } else {
3883 // more than one definition in this function
3884 res.first->second.m_type = kDynamicConstant;
3888 push(env, TBool);
3891 void in(ISS& /*env*/, const bc::DefTypeAlias&) {}
3893 void in(ISS& env, const bc::This&) {
3894 if (thisAvailable(env)) {
3895 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
3897 auto const ty = thisType(env);
3898 push(env, ty ? *ty : TObj);
3899 setThisAvailable(env);
3902 void in(ISS& env, const bc::LateBoundCls& op) {
3903 auto const ty = selfCls(env);
3904 putClsRefSlot(env, op.slot, setctx(ty ? *ty : TCls));
3907 void in(ISS& env, const bc::CheckThis&) {
3908 if (thisAvailable(env)) {
3909 reduce(env, bc::Nop {});
3911 setThisAvailable(env);
3914 void in(ISS& env, const bc::BareThis& op) {
3915 if (thisAvailable(env)) {
3916 if (op.subop1 != BareThisOp::NeverNull) {
3917 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
3921 auto const ty = thisType(env);
3922 switch (op.subop1) {
3923 case BareThisOp::Notice:
3924 break;
3925 case BareThisOp::NoNotice:
3926 nothrow(env);
3927 break;
3928 case BareThisOp::NeverNull:
3929 nothrow(env);
3930 setThisAvailable(env);
3931 return push(env, ty ? *ty : TObj);
3934 push(env, ty ? opt(*ty) : TOptObj);
3937 void in(ISS& env, const bc::InitThisLoc& op) {
3938 setLocRaw(env, op.loc1, TCell);
3939 env.state.thisLocToKill = op.loc1;
3942 void in(ISS& env, const bc::StaticLocDef& op) {
3943 if (staticLocHelper(env, op.loc1, topC(env))) {
3944 return reduce(env, bc::SetL { op.loc1 }, bc::PopC {});
3946 popC(env);
3949 void in(ISS& env, const bc::StaticLocCheck& op) {
3950 auto const l = op.loc1;
3951 if (!env.ctx.func->isMemoizeWrapper &&
3952 !env.ctx.func->isClosureBody &&
3953 env.collect.localStaticTypes.size() > l) {
3954 auto t = env.collect.localStaticTypes[l];
3955 if (auto v = tv(t)) {
3956 useLocalStatic(env, l);
3957 setLocRaw(env, l, t);
3958 return reduce(env,
3959 gen_constant(*v),
3960 bc::SetL { op.loc1 }, bc::PopC {},
3961 bc::True {});
3964 setLocRaw(env, l, TGen);
3965 maybeBindLocalStatic(env, l);
3966 push(env, TBool);
3969 void in(ISS& env, const bc::StaticLocInit& op) {
3970 if (staticLocHelper(env, op.loc1, topC(env))) {
3971 return reduce(env, bc::SetL { op.loc1 }, bc::PopC {});
3973 popC(env);
3977 * Amongst other things, we use this to mark units non-persistent.
3979 void in(ISS& env, const bc::OODeclExists& op) {
3980 auto flag = popC(env);
3981 auto name = popC(env);
3982 push(env, [&] {
3983 if (!name.strictSubtypeOf(TStr)) return TBool;
3984 auto const v = tv(name);
3985 if (!v) return TBool;
3986 auto rcls = env.index.resolve_class(env.ctx, v->m_data.pstr);
3987 if (!rcls || !rcls->cls()) return TBool;
3988 auto const mayExist = [&] () -> bool {
3989 switch (op.subop1) {
3990 case OODeclExistsOp::Class:
3991 return !(rcls->cls()->attrs & (AttrInterface | AttrTrait));
3992 case OODeclExistsOp::Interface:
3993 return rcls->cls()->attrs & AttrInterface;
3994 case OODeclExistsOp::Trait:
3995 return rcls->cls()->attrs & AttrTrait;
3997 not_reached();
3998 }();
3999 auto unit = rcls->cls()->unit;
4000 auto canConstProp = [&] {
4001 // Its generally not safe to constprop this, because of
4002 // autoload. We're safe if its part of systemlib, or a
4003 // superclass of the current context.
4004 if (is_systemlib_part(*unit)) return true;
4005 if (!env.ctx.cls) return false;
4006 auto thisClass = env.index.resolve_class(env.ctx.cls);
4007 return thisClass.subtypeOf(*rcls);
4009 if (canConstProp()) {
4010 constprop(env);
4011 return mayExist ? TTrue : TFalse;
4013 if (!any(env.collect.opts & CollectionOpts::Inlining)) {
4014 unit->persistent.store(false, std::memory_order_relaxed);
4016 // At this point, if it mayExist, we still don't know that it
4017 // *does* exist, but if not we know that it either doesn't
4018 // exist, or it doesn't have the right type.
4019 return mayExist ? TBool : TFalse;
4020 } ());
4023 namespace {
4024 bool couldBeMocked(const Type& t) {
4025 if (is_specialized_cls(t)) {
4026 return dcls_of(t).cls.couldBeMocked();
4027 } else if (is_specialized_obj(t)) {
4028 return dobj_of(t).cls.couldBeMocked();
4030 // In practice this should not occur since this is used mostly on the result
4031 // of looked up type constraints.
4032 return true;
4036 void in(ISS& env, const bc::VerifyParamType& op) {
4037 if (env.ctx.func->isMemoizeImpl &&
4038 !locCouldBeRef(env, op.loc1) &&
4039 RuntimeOption::EvalHardTypeHints) {
4040 // a MemoizeImpl's params have already been checked by the wrapper
4041 return reduce(env, bc::Nop {});
4044 // Generally we won't know anything about the params, but
4045 // analyze_func_inline does - and this can help with effect-free analysis
4046 auto const constraint = env.ctx.func->params[op.loc1].typeConstraint;
4047 if (env.index.satisfies_constraint(env.ctx,
4048 locAsCell(env, op.loc1),
4049 constraint)) {
4050 reduce(env, bc::Nop {});
4051 return;
4054 if (!RuntimeOption::EvalHardTypeHints) return;
4057 * In HardTypeHints mode, we assume that if this opcode doesn't
4058 * throw, the parameter was of the specified type (although it may
4059 * have been a Ref if the parameter was by reference).
4061 * The env.setLoc here handles dealing with a parameter that was
4062 * already known to be a reference.
4064 * NB: VerifyParamType of a reference parameter can kill any
4065 * references if it re-enters, even if Option::HardTypeHints is
4066 * on.
4068 if (RuntimeOption::EvalThisTypeHintLevel != 3 && constraint.isThis()) {
4069 return;
4071 if (constraint.hasConstraint() && !constraint.isTypeVar() &&
4072 !constraint.isTypeConstant()) {
4073 auto t =
4074 loosen_dvarrayness(env.index.lookup_constraint(env.ctx, constraint));
4075 if (constraint.isThis() && couldBeMocked(t)) {
4076 t = unctx(std::move(t));
4078 if (t.subtypeOf(TBottom)) unreachable(env);
4079 FTRACE(2, " {} ({})\n", constraint.fullName(), show(t));
4080 setLoc(env, op.loc1, std::move(t));
4084 void verifyRetImpl(ISS& env, TypeConstraint& constraint, bool reduce_this) {
4085 auto stackT = topC(env);
4087 // If there is no return type constraint, or if the return type
4088 // constraint is a typevar, or if the top of stack is the same
4089 // or a subtype of the type constraint, then this is a no-op.
4090 if (env.index.satisfies_constraint(env.ctx, stackT, constraint)) {
4091 reduce(env, bc::Nop {});
4092 return;
4095 // If CheckReturnTypeHints < 3 OR if the constraint is soft,
4096 // then there are no optimizations we can safely do here, so
4097 // just leave the top of stack as is.
4098 if (RuntimeOption::EvalCheckReturnTypeHints < 3 || constraint.isSoft()
4099 || (RuntimeOption::EvalThisTypeHintLevel != 3 && constraint.isThis())) {
4100 return;
4103 // In cases where we have a `this` hint where stackT is an TOptObj known to
4104 // be this, we can replace the check with a non null check. These cases are
4105 // likely from a BareThis that could return Null. Since the runtime will
4106 // split these translations, it will rarely in practice return null.
4107 if (constraint.isThis() && !constraint.isNullable() && is_opt(stackT) &&
4108 env.index.satisfies_constraint(env.ctx, unopt(stackT), constraint)) {
4109 if (reduce_this) {
4110 reduce(env, bc::VerifyRetNonNullC {});
4111 return;
4115 // If we reach here, then CheckReturnTypeHints >= 3 AND the constraint
4116 // is not soft. We can safely assume that either VerifyRetTypeC will
4117 // throw or it will produce a value whose type is compatible with the
4118 // return type constraint.
4119 auto tcT =
4120 remove_uninit(
4121 loosen_dvarrayness(env.index.lookup_constraint(env.ctx, constraint))
4124 // If tcT could be an interface or trait, we upcast it to TObj/TOptObj.
4125 // Why? Because we want uphold the invariant that we only refine return
4126 // types and never widen them, and if we allow tcT to be an interface then
4127 // it's possible for violations of this invariant to arise. For an example,
4128 // see "hphp/test/slow/hhbbc/return-type-opt-bug.php".
4129 // Note: It's safe to use TObj/TOptObj because lookup_constraint() only
4130 // returns classes or interfaces or traits (it never returns something that
4131 // could be an enum or type alias) and it never returns anything that could
4132 // be a "magic" interface that supports non-objects. (For traits the return
4133 // typehint will always throw at run time, so it's safe to use TObj/TOptObj.)
4134 if (is_specialized_obj(tcT) && dobj_of(tcT).cls.couldBeInterfaceOrTrait()) {
4135 tcT = is_opt(tcT) ? TOptObj : TObj;
4138 auto retT = intersection_of(std::move(tcT), std::move(stackT));
4139 if (retT.subtypeOf(TBottom)) {
4140 unreachable(env);
4141 return;
4144 popC(env);
4145 push(env, std::move(retT));
4148 void in(ISS& /*env*/, const bc::VerifyRetTypeV& /*op*/) {}
4149 void in(ISS& env, const bc::VerifyOutType& op) {
4150 verifyRetImpl(env, env.ctx.func->params[op.arg1].typeConstraint, false);
4153 void in(ISS& env, const bc::VerifyRetTypeC& /*op*/) {
4154 verifyRetImpl(env, env.ctx.func->retTypeConstraint, true);
4157 void in(ISS& env, const bc::VerifyRetNonNullC& /*op*/) {
4158 auto const constraint = env.ctx.func->retTypeConstraint;
4159 if (RuntimeOption::EvalCheckReturnTypeHints < 3 || constraint.isSoft()
4160 || (RuntimeOption::EvalThisTypeHintLevel != 3 && constraint.isThis())) {
4161 return;
4164 auto stackT = topC(env);
4166 if (!is_opt(stackT)) {
4167 reduce(env, bc::Nop {});
4168 return;
4171 popC(env);
4172 push(env, unopt(std::move(stackT)));
4175 void in(ISS& env, const bc::Self& op) {
4176 auto self = selfClsExact(env);
4177 putClsRefSlot(env, op.slot, self ? *self : TCls);
4180 void in(ISS& env, const bc::Parent& op) {
4181 auto parent = parentClsExact(env);
4182 putClsRefSlot(env, op.slot, parent ? *parent : TCls);
4185 void in(ISS& env, const bc::CreateCl& op) {
4186 auto const nargs = op.arg1;
4187 auto const clsPair = env.index.resolve_closure_class(env.ctx, op.arg2);
4190 * Every closure should have a unique allocation site, but we may see it
4191 * multiple times in a given round of analyzing this function. Each time we
4192 * may have more information about the used variables; the types should only
4193 * possibly grow. If it's already there we need to merge the used vars in
4194 * with what we saw last time.
4196 if (nargs) {
4197 std::vector<Type> usedVars(nargs);
4198 for (auto i = uint32_t{0}; i < nargs; ++i) {
4199 usedVars[nargs - i - 1] = unctx(popT(env));
4201 merge_closure_use_vars_into(
4202 env.collect.closureUseTypes,
4203 clsPair.second,
4204 usedVars
4208 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
4209 // assert the exact type of closure objects. The best we can do is assert
4210 // that it's a subclass of Closure.
4211 auto const closure = env.index.builtin_class(s_Closure.get());
4213 return push(env, subObj(closure));
4216 void in(ISS& env, const bc::CreateCont& /*op*/) {
4217 // First resume is always next() which pushes null.
4218 push(env, TInitNull);
4221 void in(ISS& env, const bc::ContEnter&) { popC(env); push(env, TInitCell); }
4222 void in(ISS& env, const bc::ContRaise&) { popC(env); push(env, TInitCell); }
4224 void in(ISS& env, const bc::Yield&) {
4225 popC(env);
4226 push(env, TInitCell);
4229 void in(ISS& env, const bc::YieldK&) {
4230 popC(env);
4231 popC(env);
4232 push(env, TInitCell);
4235 void in(ISS& env, const bc::ContAssignDelegate&) {
4236 popC(env);
4239 void in(ISS& env, const bc::ContEnterDelegate&) {
4240 popC(env);
4243 void in(ISS& env, const bc::YieldFromDelegate&) {
4244 push(env, TInitCell);
4247 void in(ISS& /*env*/, const bc::ContUnsetDelegate&) {}
4249 void in(ISS& /*env*/, const bc::ContCheck&) {}
4250 void in(ISS& env, const bc::ContValid&) { push(env, TBool); }
4251 void in(ISS& env, const bc::ContStarted&) { push(env, TBool); }
4252 void in(ISS& env, const bc::ContKey&) { push(env, TInitCell); }
4253 void in(ISS& env, const bc::ContCurrent&) { push(env, TInitCell); }
4254 void in(ISS& env, const bc::ContGetReturn&) { push(env, TInitCell); }
4256 void pushTypeFromWH(ISS& env, Type t) {
4257 if (!t.couldBe(TObj)) {
4258 // These opcodes require an object descending from WaitHandle.
4259 // Exceptions will be thrown for any non-object.
4260 push(env, TBottom);
4261 unreachable(env);
4262 return;
4265 // Throw away non-obj component.
4266 t &= TObj;
4268 // If we aren't even sure this is a wait handle, there's nothing we can
4269 // infer here.
4270 if (!is_specialized_wait_handle(t)) {
4271 return push(env, TInitCell);
4274 auto inner = wait_handle_inner(t);
4275 if (inner.subtypeOf(TBottom)) {
4276 // If it's a WaitH<Bottom>, we know it's going to throw an exception, and
4277 // the fallthrough code is not reachable.
4278 push(env, TBottom);
4279 unreachable(env);
4280 return;
4283 push(env, std::move(inner));
4286 void in(ISS& env, const bc::WHResult&) {
4287 pushTypeFromWH(env, popC(env));
4290 void in(ISS& env, const bc::Await&) {
4291 pushTypeFromWH(env, popC(env));
4294 void in(ISS& env, const bc::AwaitAll& op) {
4295 auto const equiv = equivLocalRange(env, op.locrange);
4296 if (equiv != op.locrange.first) {
4297 return reduce(
4298 env,
4299 bc::AwaitAll {LocalRange {equiv, op.locrange.restCount}}
4303 for (uint32_t i = 0; i < op.locrange.restCount + 1; ++i) {
4304 mayReadLocal(env, op.locrange.first + i);
4307 push(env, TInitNull);
4310 void in(ISS& /*env*/, const bc::IncStat&) {}
4312 void in(ISS& env, const bc::Idx&) {
4313 popC(env); popC(env); popC(env);
4314 push(env, TInitCell);
4317 void in(ISS& env, const bc::ArrayIdx&) {
4318 popC(env); popC(env); popC(env);
4319 push(env, TInitCell);
4322 void in(ISS& env, const bc::CheckProp&) {
4323 if (env.ctx.cls->attrs & AttrNoOverride) {
4324 return reduce(env, bc::False {});
4326 nothrow(env);
4327 push(env, TBool);
4330 void in(ISS& env, const bc::InitProp& op) {
4331 auto const t = topC(env);
4332 switch (op.subop2) {
4333 case InitPropOp::Static:
4334 mergeSelfProp(env, op.str1, t);
4335 if (auto c = env.collect.publicStatics) {
4336 c->merge(env.ctx, *env.ctx.cls, sval(op.str1), t);
4338 break;
4339 case InitPropOp::NonStatic:
4340 mergeThisProp(env, op.str1, t);
4341 break;
4343 auto const v = tv(t);
4344 if (v || !could_contain_objects(t)) {
4345 for (auto& prop : env.ctx.func->cls->properties) {
4346 if (prop.name == op.str1) {
4347 ITRACE(1, "InitProp: {} = {}\n", op.str1, show(t));
4348 prop.attrs = (Attr)(prop.attrs & ~AttrDeepInit);
4349 if (!v) break;
4350 prop.val = *v;
4351 if (op.subop2 == InitPropOp::Static &&
4352 !env.collect.publicStatics &&
4353 !env.index.frozen()) {
4354 env.index.fixup_public_static(env.ctx.func->cls, prop.name, t);
4356 return reduce(env, bc::PopC {});
4360 popC(env);
4363 void in(ISS& env, const bc::Silence& op) {
4364 nothrow(env);
4365 switch (op.subop2) {
4366 case SilenceOp::Start:
4367 setLoc(env, op.loc1, TInt);
4368 break;
4369 case SilenceOp::End:
4370 break;
4375 //////////////////////////////////////////////////////////////////////
4377 void dispatch(ISS& env, const Bytecode& op) {
4378 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
4379 switch (op.op) { OPCODES }
4380 #undef O
4381 not_reached();
4384 //////////////////////////////////////////////////////////////////////
4386 template<class Iterator, class... Args>
4387 void group(ISS& env, Iterator& it, Args&&... args) {
4388 FTRACE(2, " {}\n", [&]() -> std::string {
4389 auto ret = std::string{};
4390 for (auto i = size_t{0}; i < sizeof...(Args); ++i) {
4391 ret += " " + show(env.ctx.func, it[i]);
4392 if (i != sizeof...(Args) - 1) ret += ';';
4394 return ret;
4395 }());
4396 it += sizeof...(Args);
4397 return interp_step::group(env, std::forward<Args>(args)...);
4400 template<class Iterator>
4401 void interpStep(ISS& env, Iterator& it, Iterator stop) {
4403 * During the analysis phase, we analyze some common bytecode
4404 * patterns involving conditional jumps as groups to be able to
4405 * add additional information to the type environment depending on
4406 * whether the branch is taken or not.
4408 auto const o1 = it->op;
4409 auto const o2 = it + 1 != stop ? it[1].op : Op::Nop;
4410 auto const o3 = it + 1 != stop &&
4411 it + 2 != stop ? it[2].op : Op::Nop;
4413 #define X(y) \
4414 case Op::y: \
4415 switch (o2) { \
4416 case Op::Not: \
4417 switch (o3) { \
4418 case Op::JmpZ: \
4419 return group(env, it, it[0].y, it[1].Not, it[2].JmpZ); \
4420 case Op::JmpNZ: \
4421 return group(env, it, it[0].y, it[1].Not, it[2].JmpNZ); \
4422 default: break; \
4424 break; \
4425 case Op::JmpZ: \
4426 return group(env, it, it[0].y, it[1].JmpZ); \
4427 case Op::JmpNZ: \
4428 return group(env, it, it[0].y, it[1].JmpNZ); \
4429 default: break; \
4431 break;
4433 switch (o1) {
4434 X(InstanceOfD)
4435 X(IsTypeStruct)
4436 X(IsTypeL)
4437 X(IsTypeC)
4438 X(IsUninit)
4439 X(StaticLocCheck)
4440 X(Same)
4441 X(NSame)
4442 case Op::MemoGet:
4443 switch (o2) {
4444 case Op::IsUninit:
4445 switch (o3) {
4446 case Op::JmpZ:
4447 return group(env, it, it[0].MemoGet, it[1].IsUninit, it[2].JmpZ);
4448 case Op::JmpNZ:
4449 return group(env, it, it[0].MemoGet, it[1].IsUninit, it[2].JmpNZ);
4450 default: break;
4452 break;
4453 default: break;
4455 break;
4456 default: break;
4458 #undef X
4460 FTRACE(2, " {}\n", show(env.ctx.func, *it));
4461 dispatch(env, *it++);
4464 template<class Iterator>
4465 StepFlags interpOps(Interp& interp,
4466 Iterator& iter, Iterator stop,
4467 PropagateFn propagate) {
4468 auto flags = StepFlags{};
4469 ISS env { interp, flags, propagate };
4471 // If there are throw exit edges, make a copy of the state (except
4472 // stacks) in case we need to propagate across throw exits (if
4473 // it's a PEI).
4474 auto const stateBefore = interp.blk->throwExits.empty()
4475 ? State{}
4476 : without_stacks(interp.state);
4478 auto const numPushed = iter->numPush();
4479 interpStep(env, iter, stop);
4481 auto fix_const_outputs = [&] {
4482 auto elems = &interp.state.stack.back();
4483 constexpr auto numCells = 4;
4484 Cell cells[numCells];
4486 auto i = size_t{0};
4487 while (i < numPushed) {
4488 if (i < numCells) {
4489 auto const v = tv(elems->type);
4490 if (!v) return false;
4491 cells[i] = *v;
4492 } else if (!is_scalar(elems->type)) {
4493 return false;
4495 ++i;
4496 --elems;
4498 while (++elems, i--) {
4499 elems->type = from_cell(i < numCells ?
4500 cells[i] : *tv(elems->type));
4502 return true;
4505 if (options.ConstantProp && flags.canConstProp && fix_const_outputs()) {
4506 if (flags.wasPEI) {
4507 FTRACE(2, " nothrow (due to constprop)\n");
4508 flags.wasPEI = false;
4510 if (!flags.effectFree) {
4511 FTRACE(2, " effect_free (due to constprop)\n");
4512 flags.effectFree = true;
4516 assertx(!flags.effectFree || !flags.wasPEI);
4517 if (flags.wasPEI) {
4518 FTRACE(2, " PEI.\n");
4519 for (auto exit : interp.blk->throwExits) {
4520 propagate(exit, &stateBefore);
4523 return flags;
4526 //////////////////////////////////////////////////////////////////////
4528 RunFlags run(Interp& interp, PropagateFn propagate) {
4529 SCOPE_EXIT {
4530 FTRACE(2, "out {}{}\n",
4531 state_string(*interp.ctx.func, interp.state, interp.collect),
4532 property_state_string(interp.collect.props));
4535 auto ret = RunFlags {};
4536 auto const stop = end(interp.blk->hhbcs);
4537 auto iter = begin(interp.blk->hhbcs);
4538 while (iter != stop) {
4539 auto const flags = interpOps(interp, iter, stop, propagate);
4540 if (interp.collect.effectFree && !flags.effectFree) {
4541 interp.collect.effectFree = false;
4542 if (any(interp.collect.opts & CollectionOpts::EffectFreeOnly)) {
4543 FTRACE(2, " Bailing because not effect free\n");
4544 return ret;
4548 if (flags.usedLocalStatics) {
4549 if (!ret.usedLocalStatics) {
4550 ret.usedLocalStatics = std::move(flags.usedLocalStatics);
4551 } else {
4552 for (auto& elm : *flags.usedLocalStatics) {
4553 ret.usedLocalStatics->insert(std::move(elm));
4558 if (interp.state.unreachable) {
4559 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
4560 return ret;
4563 if (flags.jmpDest != NoBlockId &&
4564 flags.jmpDest != interp.blk->fallthrough) {
4565 FTRACE(2, " <took branch; no fallthrough>\n");
4566 return ret;
4569 if (flags.returned) {
4570 FTRACE(2, " returned {}\n", show(*flags.returned));
4571 always_assert(iter == stop);
4572 always_assert(interp.blk->fallthrough == NoBlockId);
4573 ret.returned = flags.returned;
4574 return ret;
4578 FTRACE(2, " <end block>\n");
4579 if (interp.blk->fallthrough != NoBlockId) {
4580 propagate(interp.blk->fallthrough, &interp.state);
4582 return ret;
4585 StepFlags step(Interp& interp, const Bytecode& op) {
4586 auto flags = StepFlags{};
4587 auto noop = [] (BlockId, const State*) {};
4588 ISS env { interp, flags, noop };
4589 dispatch(env, op);
4590 return flags;
4593 void default_dispatch(ISS& env, const Bytecode& op) {
4594 dispatch(env, op);
4597 folly::Optional<Type> thisType(const Interp& interp) {
4598 return thisTypeHelper(interp.index, interp.ctx);
4601 //////////////////////////////////////////////////////////////////////