Kill InitThisLoc
[hiphop-php.git] / hphp / hhbbc / interp.cpp
blob3e56ad732f25b29e27e29168b000cf9649485cf8
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
18 #include <algorithm>
19 #include <vector>
20 #include <string>
21 #include <iterator>
23 #include <folly/Optional.h>
24 #include <folly/gen/Base.h>
25 #include <folly/gen/String.h>
27 #include "hphp/util/hash-set.h"
28 #include "hphp/util/trace.h"
29 #include "hphp/runtime/base/array-init.h"
30 #include "hphp/runtime/base/array-iterator.h"
31 #include "hphp/runtime/base/collections.h"
32 #include "hphp/runtime/base/static-string-table.h"
33 #include "hphp/runtime/base/tv-arith.h"
34 #include "hphp/runtime/base/tv-comparisons.h"
35 #include "hphp/runtime/base/tv-conversions.h"
36 #include "hphp/runtime/base/type-structure.h"
37 #include "hphp/runtime/base/type-structure-helpers.h"
38 #include "hphp/runtime/base/type-structure-helpers-defs.h"
39 #include "hphp/runtime/vm/runtime.h"
40 #include "hphp/runtime/vm/unit-util.h"
42 #include "hphp/runtime/ext/hh/ext_hh.h"
44 #include "hphp/hhbbc/analyze.h"
45 #include "hphp/hhbbc/bc.h"
46 #include "hphp/hhbbc/cfg.h"
47 #include "hphp/hhbbc/class-util.h"
48 #include "hphp/hhbbc/eval-cell.h"
49 #include "hphp/hhbbc/index.h"
50 #include "hphp/hhbbc/interp-state.h"
51 #include "hphp/hhbbc/optimize.h"
52 #include "hphp/hhbbc/representation.h"
53 #include "hphp/hhbbc/type-builtins.h"
54 #include "hphp/hhbbc/type-ops.h"
55 #include "hphp/hhbbc/type-system.h"
56 #include "hphp/hhbbc/unit-util.h"
57 #include "hphp/hhbbc/wide-func.h"
59 #include "hphp/hhbbc/interp-internal.h"
61 namespace HPHP { namespace HHBBC {
63 //////////////////////////////////////////////////////////////////////
65 namespace {
67 const StaticString s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
68 const StaticString s_IMemoizeParam("HH\\IMemoizeParam");
69 const StaticString s_getInstanceKey("getInstanceKey");
70 const StaticString s_Closure("Closure");
71 const StaticString s_this("HH\\this");
73 bool poppable(Op op) {
74 switch (op) {
75 case Op::Dup:
76 case Op::Null:
77 case Op::False:
78 case Op::True:
79 case Op::Int:
80 case Op::Double:
81 case Op::String:
82 case Op::Array:
83 case Op::Vec:
84 case Op::Dict:
85 case Op::Keyset:
86 case Op::NewDArray:
87 case Op::NewDictArray:
88 case Op::NewCol:
89 return true;
90 default:
91 return false;
95 void interpStep(ISS& env, const Bytecode& bc);
97 void record(ISS& env, const Bytecode& bc) {
98 if (bc.srcLoc != env.srcLoc) {
99 Bytecode tmp = bc;
100 tmp.srcLoc = env.srcLoc;
101 return record(env, tmp);
104 if (!env.replacedBcs.size() &&
105 env.unchangedBcs < env.blk.hhbcs.size() &&
106 bc == env.blk.hhbcs[env.unchangedBcs]) {
107 env.unchangedBcs++;
108 return;
111 ITRACE(2, " => {}\n", show(env.ctx.func, bc));
112 env.replacedBcs.push_back(bc);
115 // The number of pops as seen by interp.
116 uint32_t numPop(const Bytecode& bc) {
117 if (bc.op == Op::CGetL2) return 1;
118 return bc.numPop();
121 // The number of pushes as seen by interp.
122 uint32_t numPush(const Bytecode& bc) {
123 if (bc.op == Op::CGetL2) return 2;
124 return bc.numPush();
127 void reprocess(ISS& env) {
128 env.reprocess = true;
131 ArrayData** add_elem_array(ISS& env) {
132 auto const idx = env.trackedElems.back().idx;
133 if (idx < env.unchangedBcs) {
134 auto const DEBUG_ONLY& bc = env.blk.hhbcs[idx];
135 assertx(bc.op == Op::Concat);
136 return nullptr;
138 assertx(idx >= env.unchangedBcs);
139 auto& bc = env.replacedBcs[idx - env.unchangedBcs];
140 auto arr = [&] () -> const ArrayData** {
141 switch (bc.op) {
142 case Op::Array: return &bc.Array.arr1;
143 case Op::Dict: return &bc.Dict.arr1;
144 case Op::Keyset: return &bc.Keyset.arr1;
145 case Op::Vec: return &bc.Vec.arr1;
146 case Op::Concat: return nullptr;
147 default: not_reached();
149 }();
150 return const_cast<ArrayData**>(arr);
153 bool start_add_elem(ISS& env, Type& ty, Op op) {
154 auto value = tvNonStatic(ty);
155 if (!value || !isArrayLikeType(value->m_type)) return false;
157 if (op == Op::AddElemC) {
158 reduce(env, bc::PopC {}, bc::PopC {}, bc::PopC {});
159 } else {
160 reduce(env, bc::PopC {}, bc::PopC {});
162 env.trackedElems.emplace_back(
163 env.state.stack.size(),
164 env.unchangedBcs + env.replacedBcs.size()
167 auto const arr = value->m_data.parr;
168 env.replacedBcs.push_back(
169 [&] () -> Bytecode {
170 if (arr->isKeysetType()) {
171 return bc::Keyset { arr };
173 if (arr->isVecType()) {
174 return bc::Vec { arr };
176 if (arr->isDictType()) {
177 return bc::Dict { arr };
179 if (arr->isPHPArrayType()) {
180 return bc::Array { arr };
183 not_reached();
186 env.replacedBcs.back().srcLoc = env.srcLoc;
187 ITRACE(2, "(addelem* -> {}\n",
188 show(env.ctx.func, env.replacedBcs.back()));
189 push(env, std::move(ty));
190 effect_free(env);
191 return true;
195 * Alter the saved add_elem array in a way that preserves its provenance tag
196 * or adds a new one if applicable (i.e. the array is a vec or dict)
198 * The `mutate` parameter should be callable with an ArrayData** pointing to the
199 * add_elem array cached in the interp state and should write to it directly.
201 template <typename Fn>
202 bool mutate_add_elem_array(ISS& env, ProvTag loc, Fn&& mutate) {
203 auto const arr = add_elem_array(env);
204 if (!arr) return false;
206 assertx(!RuntimeOption::EvalArrayProvenance || loc.valid());
208 if (!RuntimeOption::EvalArrayProvenance) {
209 mutate(arr);
210 return true;
213 // We need to propagate the provenance info in case we promote *arr from
214 // static to counted (or if its representation changes in some other way)...
215 auto const tag = ProvTag::FromSArr(*arr);
217 mutate(arr);
219 // ...which means we'll have to setTag if
220 // - the array still needs a tag AND
221 // either:
222 // - the array had no tag coming into this op OR
223 // - the set op cleared the provenance bit somehow
224 // (representation changed or we CoWed a static array)
225 if (arrprov::arrayWantsTag(*arr)) {
226 if (tag == ProvTag::NoTag) {
227 arrprov::setTag(*arr, loc.get());
228 } else if (!arrprov::getTag(*arr).valid()) {
229 arrprov::setTag(*arr, tag.get());
233 // Make sure that, if provenance is enabled and the array wants a tag, we
234 // definitely assigned one leaving this op.
235 assertx(!loc.valid() ||
236 !arrprov::arrayWantsTag(*arr) ||
237 arrprov::getTag(*arr).valid());
238 return true;
241 void finish_tracked_elem(ISS& env) {
242 auto const arr = add_elem_array(env);
243 env.trackedElems.pop_back();
244 if (arr) ArrayData::GetScalarArray(arr);
247 void finish_tracked_elems(ISS& env, size_t depth) {
248 while (!env.trackedElems.empty() && env.trackedElems.back().depth >= depth) {
249 finish_tracked_elem(env);
253 uint32_t id_from_slot(ISS& env, int slot) {
254 auto const id = (env.state.stack.end() - (slot + 1))->id;
255 assertx(id == StackElem::NoId ||
256 id < env.unchangedBcs + env.replacedBcs.size());
257 return id;
260 const Bytecode* op_from_id(ISS& env, uint32_t id) {
261 if (id == StackElem::NoId) return nullptr;
262 if (id < env.unchangedBcs) return &env.blk.hhbcs[id];
263 auto const off = id - env.unchangedBcs;
264 assertx(off < env.replacedBcs.size());
265 return &env.replacedBcs[off];
268 void ensure_mutable(ISS& env, uint32_t id) {
269 if (id < env.unchangedBcs) {
270 auto const delta = env.unchangedBcs - id;
271 env.replacedBcs.resize(env.replacedBcs.size() + delta);
272 for (auto i = env.replacedBcs.size(); i-- > delta; ) {
273 env.replacedBcs[i] = std::move(env.replacedBcs[i - delta]);
275 for (auto i = 0; i < delta; i++) {
276 env.replacedBcs[i] = env.blk.hhbcs[id + i];
278 env.unchangedBcs = id;
283 * Turn the instruction that wrote the slot'th element from the top of
284 * the stack into a Nop, adjusting the stack appropriately. If its the
285 * previous instruction, just rewind.
287 int kill_by_slot(ISS& env, int slot) {
288 auto const id = id_from_slot(env, slot);
289 assertx(id != StackElem::NoId);
290 auto const sz = env.state.stack.size();
291 // if its the last bytecode we processed, we can rewind and avoid
292 // the reprocess overhead.
293 if (id == env.unchangedBcs + env.replacedBcs.size() - 1) {
294 rewind(env, 1);
295 return env.state.stack.size() - sz;
297 ensure_mutable(env, id);
298 auto& bc = env.replacedBcs[id - env.unchangedBcs];
299 auto const pop = numPop(bc);
300 auto const push = numPush(bc);
301 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
302 slot, id, show(env.ctx.func, bc));
303 bc = bc_with_loc(bc.srcLoc, bc::Nop {});
304 env.state.stack.kill(pop, push, id);
305 reprocess(env);
306 return env.state.stack.size() - sz;
310 * Check whether an instruction can be inserted immediately after the
311 * slot'th stack entry was written. This is only possible if slot was
312 * the last thing written by the instruction that wrote it (ie some
313 * bytecodes push more than one value - there's no way to insert a
314 * bytecode that will write *between* those values on the stack).
316 bool can_insert_after_slot(ISS& env, int slot) {
317 auto const it = env.state.stack.end() - (slot + 1);
318 if (it->id == StackElem::NoId) return false;
319 if (auto const next = it.next_elem(1)) {
320 return next->id != it->id;
322 return true;
326 * Insert a sequence of bytecodes after the instruction that wrote the
327 * slot'th element from the top of the stack.
329 * The entire sequence pops numPop, and pushes numPush stack
330 * elements. Only the last bytecode can push anything onto the stack,
331 * and the types it pushes are pointed to by types (if you have more
332 * than one bytecode that pushes, call this more than once).
334 void insert_after_slot(ISS& env, int slot,
335 int numPop, int numPush, const Type* types,
336 const BytecodeVec& bcs) {
337 assertx(can_insert_after_slot(env, slot));
338 auto const id = id_from_slot(env, slot);
339 assertx(id != StackElem::NoId);
340 ensure_mutable(env, id + 1);
341 env.state.stack.insert_after(numPop, numPush, types, bcs.size(), id);
342 env.replacedBcs.insert(env.replacedBcs.begin() + (id + 1 - env.unchangedBcs),
343 bcs.begin(), bcs.end());
344 using namespace folly::gen;
345 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
346 slot, id,
347 from(bcs) |
348 map([&] (const Bytecode& bc) { return show(env.ctx.func, bc); }) |
349 unsplit<std::string>(", "));
352 Bytecode& mutate_last_op(ISS& env) {
353 assertx(will_reduce(env));
355 if (!env.replacedBcs.size()) {
356 assertx(env.unchangedBcs);
357 env.replacedBcs.push_back(env.blk.hhbcs[--env.unchangedBcs]);
359 return env.replacedBcs.back();
363 * Can be used to replace one op with another when rewind/reduce isn't
364 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
365 * because the SetL changed both the Type and the equiv of its local).
367 void replace_last_op(ISS& env, Bytecode&& bc) {
368 auto& last = mutate_last_op(env);
369 auto const newPush = numPush(bc);
370 auto const oldPush = numPush(last);
371 auto const newPops = numPop(bc);
372 auto const oldPops = numPop(last);
374 assertx(newPush <= oldPush);
375 assertx(newPops <= oldPops);
377 if (newPush != oldPush || newPops != oldPops) {
378 env.state.stack.rewind(oldPops - newPops, oldPush - newPush);
380 ITRACE(2, "(replace: {}->{}\n",
381 show(env.ctx.func, last), show(env.ctx.func, bc));
382 last = bc_with_loc(last.srcLoc, bc);
387 //////////////////////////////////////////////////////////////////////
389 const Bytecode* op_from_slot(ISS& env, int slot, int prev /* = 0 */) {
390 if (!will_reduce(env)) return nullptr;
391 auto const id = id_from_slot(env, slot);
392 if (id == StackElem::NoId) return nullptr;
393 if (id < prev) return nullptr;
394 return op_from_id(env, id - prev);
397 const Bytecode* last_op(ISS& env, int idx /* = 0 */) {
398 if (!will_reduce(env)) return nullptr;
400 if (env.replacedBcs.size() > idx) {
401 return &env.replacedBcs[env.replacedBcs.size() - idx - 1];
404 idx -= env.replacedBcs.size();
405 if (env.unchangedBcs > idx) {
406 return &env.blk.hhbcs[env.unchangedBcs - idx - 1];
408 return nullptr;
412 * Assuming bc was just interped, rewind to the state immediately
413 * before it was interped.
415 * This is rarely what you want. Its used for constprop, where the
416 * bytecode has been interped, but not yet committed to the bytecode
417 * stream. We want to undo its effects, the spit out pops for its
418 * inputs, and commit a constant-generating bytecode.
420 void rewind(ISS& env, const Bytecode& bc) {
421 ITRACE(2, "(rewind: {}\n", show(env.ctx.func, bc));
422 env.state.stack.rewind(numPop(bc), numPush(bc));
426 * Used for peephole opts. Will undo the *stack* effects of the last n
427 * committed byte codes, and remove them from the bytecode stream, in
428 * preparation for writing out an optimized replacement sequence.
430 * WARNING: Does not undo other changes to state, such as local types,
431 * local equivalency, and thisType. Take care when rewinding such
432 * things.
434 void rewind(ISS& env, int n) {
435 assertx(n);
436 while (env.replacedBcs.size()) {
437 rewind(env, env.replacedBcs.back());
438 env.replacedBcs.pop_back();
439 if (!--n) return;
441 while (n--) {
442 rewind(env, env.blk.hhbcs[--env.unchangedBcs]);
446 void impl_vec(ISS& env, bool reduce, BytecodeVec&& bcs) {
447 if (!will_reduce(env)) reduce = false;
449 if (reduce) {
450 using namespace folly::gen;
451 ITRACE(2, "(reduce: {}\n",
452 from(bcs) |
453 map([&] (const Bytecode& bc) { return show(env.ctx.func, bc); }) |
454 unsplit<std::string>(", "));
455 if (bcs.size()) {
456 auto ef = !env.flags.reduced || env.flags.effectFree;
457 Trace::Indent _;
458 for (auto const& bc : bcs) {
459 assert(
460 env.flags.jmpDest == NoBlockId &&
461 "you can't use impl with branching opcodes before last position"
463 interpStep(env, bc);
464 if (!env.flags.effectFree) ef = false;
465 if (env.state.unreachable || env.flags.jmpDest != NoBlockId) break;
467 env.flags.effectFree = ef;
468 } else if (!env.flags.reduced) {
469 effect_free(env);
471 env.flags.reduced = true;
472 return;
475 env.analyzeDepth++;
476 SCOPE_EXIT { env.analyzeDepth--; };
478 // We should be at the start of a bytecode.
479 assertx(env.flags.wasPEI &&
480 !env.flags.canConstProp &&
481 !env.flags.effectFree);
483 env.flags.wasPEI = false;
484 env.flags.canConstProp = true;
485 env.flags.effectFree = true;
487 for (auto const& bc : bcs) {
488 assert(env.flags.jmpDest == NoBlockId &&
489 "you can't use impl with branching opcodes before last position");
491 auto const wasPEI = env.flags.wasPEI;
492 auto const canConstProp = env.flags.canConstProp;
493 auto const effectFree = env.flags.effectFree;
495 ITRACE(3, " (impl {}\n", show(env.ctx.func, bc));
496 env.flags.wasPEI = true;
497 env.flags.canConstProp = false;
498 env.flags.effectFree = false;
499 default_dispatch(env, bc);
501 if (env.flags.canConstProp) {
502 [&] {
503 if (env.flags.effectFree && !env.flags.wasPEI) return;
504 auto stk = env.state.stack.end();
505 for (auto i = bc.numPush(); i--; ) {
506 --stk;
507 if (!is_scalar(stk->type)) return;
509 env.flags.effectFree = true;
510 env.flags.wasPEI = false;
511 }();
514 // If any of the opcodes in the impl list said they could throw,
515 // then the whole thing could throw.
516 env.flags.wasPEI = env.flags.wasPEI || wasPEI;
517 env.flags.canConstProp = env.flags.canConstProp && canConstProp;
518 env.flags.effectFree = env.flags.effectFree && effectFree;
519 if (env.state.unreachable || env.flags.jmpDest != NoBlockId) break;
523 LocalId equivLocalRange(ISS& env, const LocalRange& range) {
524 auto bestRange = range.first;
525 auto equivFirst = findLocEquiv(env, range.first);
526 if (equivFirst == NoLocalId) return bestRange;
527 do {
528 if (equivFirst < bestRange) {
529 auto equivRange = [&] {
530 // local equivalency includes differing by Uninit, so we need
531 // to check the types.
532 if (peekLocRaw(env, equivFirst) != peekLocRaw(env, range.first)) {
533 return false;
536 for (uint32_t i = 1; i < range.count; ++i) {
537 if (!locsAreEquiv(env, equivFirst + i, range.first + i) ||
538 peekLocRaw(env, equivFirst + i) !=
539 peekLocRaw(env, range.first + i)) {
540 return false;
544 return true;
545 }();
547 if (equivRange) {
548 bestRange = equivFirst;
551 equivFirst = findLocEquiv(env, equivFirst);
552 assert(equivFirst != NoLocalId);
553 } while (equivFirst != range.first);
555 return bestRange;
558 SString getNameFromType(const Type& t) {
559 if (!t.subtypeOf(BStr)) return nullptr;
560 if (is_specialized_string(t)) return sval_of(t);
561 return nullptr;
564 //////////////////////////////////////////////////////////////////////
566 namespace {
568 ArrayData*
569 resolveTSStaticallyImpl(ISS& env, hphp_fast_set<SArray>& seenTs, SArray ts,
570 const php::Class* declaringCls);
572 ArrayData*
573 resolveTSListStatically(ISS& env, hphp_fast_set<SArray>& seenTs,
574 SArray tsList, const php::Class* declaringCls) {
575 auto arr = Array::attach(const_cast<ArrayData*>(tsList));
576 for (auto i = 0; i < arr.size(); i++) {
577 auto elemArr = arr[i].getArrayData();
578 auto elem = resolveTSStaticallyImpl(env, seenTs, elemArr, declaringCls);
579 if (!elem) return nullptr;
580 arr.set(i, Variant(elem));
582 return arr.detach();
585 ArrayData*
586 resolveTSStaticallyImpl(ISS& env, hphp_fast_set<SArray>& seenTs, SArray ts,
587 const php::Class* declaringCls) {
588 if (seenTs.contains(ts)) return nullptr;
589 seenTs.emplace(ts);
590 SCOPE_EXIT { seenTs.erase(ts); };
592 auto const addModifiers = [&](ArrayData* result) {
593 auto a = Array::attach(result);
594 if (is_ts_like(ts) && !is_ts_like(a.get())) {
595 a.set(s_like, make_tv<KindOfBoolean>(true));
597 if (is_ts_nullable(ts) && !is_ts_nullable(a.get())) {
598 a.set(s_nullable, make_tv<KindOfBoolean>(true));
600 if (is_ts_soft(ts) && !is_ts_soft(a.get())) {
601 a.set(s_soft, make_tv<KindOfBoolean>(true));
603 return a.detach();
605 auto const finish = [&](const ArrayData* result) {
606 auto r = const_cast<ArrayData*>(result);
607 ArrayData::GetScalarArray(&r);
608 return r;
610 switch (get_ts_kind(ts)) {
611 case TypeStructure::Kind::T_int:
612 case TypeStructure::Kind::T_bool:
613 case TypeStructure::Kind::T_float:
614 case TypeStructure::Kind::T_string:
615 case TypeStructure::Kind::T_num:
616 case TypeStructure::Kind::T_arraykey:
617 case TypeStructure::Kind::T_void:
618 case TypeStructure::Kind::T_null:
619 case TypeStructure::Kind::T_nothing:
620 case TypeStructure::Kind::T_noreturn:
621 case TypeStructure::Kind::T_mixed:
622 case TypeStructure::Kind::T_dynamic:
623 case TypeStructure::Kind::T_nonnull:
624 case TypeStructure::Kind::T_resource:
625 return finish(ts);
626 case TypeStructure::Kind::T_typevar:
627 if (ts->exists(s_name.get()) &&
628 get_ts_name(ts)->equal(s_wildcard.get())) {
629 return finish(ts);
631 return nullptr;
632 case TypeStructure::Kind::T_dict:
633 case TypeStructure::Kind::T_vec:
634 case TypeStructure::Kind::T_keyset:
635 case TypeStructure::Kind::T_vec_or_dict:
636 case TypeStructure::Kind::T_arraylike: {
637 if (!ts->exists(s_generic_types)) return finish(ts);
638 auto const generics = get_ts_generic_types(ts);
639 auto rgenerics =
640 resolveTSListStatically(env, seenTs, generics, declaringCls);
641 if (!rgenerics) return nullptr;
642 auto result = const_cast<ArrayData*>(ts);
643 return finish(result->set(s_generic_types.get(), Variant(rgenerics)));
645 case TypeStructure::Kind::T_class:
646 case TypeStructure::Kind::T_interface:
647 case TypeStructure::Kind::T_xhp:
648 case TypeStructure::Kind::T_enum:
649 // Generics for these must have been resolved already as we'd never set
650 // the TS Kind to be one of these until resolution
651 return finish(ts);
652 case TypeStructure::Kind::T_tuple: {
653 auto const elems = get_ts_elem_types(ts);
654 auto relems = resolveTSListStatically(env, seenTs, elems, declaringCls);
655 if (!relems) return nullptr;
656 auto result = const_cast<ArrayData*>(ts);
657 return finish(result->set(s_elem_types.get(), Variant(relems)));
659 case TypeStructure::Kind::T_shape:
660 // TODO(T31677864): We can also optimize this but shapes could have
661 // optional fields or they could allow unknown fields, so this one is
662 // slightly more tricky
663 return nullptr;
664 case TypeStructure::Kind::T_unresolved: {
665 assertx(ts->exists(s_classname));
666 auto result = const_cast<ArrayData*>(ts);
667 if (ts->exists(s_generic_types)) {
668 auto const generics = get_ts_generic_types(ts);
669 auto rgenerics =
670 resolveTSListStatically(env, seenTs, generics, declaringCls);
671 if (!rgenerics) return nullptr;
672 result = result->set(s_generic_types.get(), Variant(rgenerics));
674 auto const rcls = env.index.resolve_class(env.ctx, get_ts_classname(ts));
675 if (!rcls || !rcls->resolved()) return nullptr;
676 auto const attrs = rcls->cls()->attrs;
677 auto const kind = [&] {
678 if (attrs & AttrEnum) return TypeStructure::Kind::T_enum;
679 if (attrs & AttrTrait) return TypeStructure::Kind::T_trait;
680 if (attrs & AttrInterface) return TypeStructure::Kind::T_interface;
681 return TypeStructure::Kind::T_class;
682 }();
683 return finish(result->set(s_kind.get(),
684 Variant(static_cast<uint8_t>(kind))));
686 case TypeStructure::Kind::T_typeaccess: {
687 auto const accList = get_ts_access_list(ts);
688 auto const size = accList->size();
689 auto clsName = get_ts_root_name(ts);
690 auto checkNoOverrideOnFirst = false;
691 if (declaringCls) {
692 if (clsName->isame(s_self.get())) {
693 clsName = declaringCls->name;
694 } else if (clsName->isame(s_parent.get()) && declaringCls->parentName) {
695 clsName = declaringCls->parentName;
696 } else if (clsName->isame(s_this.get())) {
697 clsName = declaringCls->name;
698 checkNoOverrideOnFirst = true;
701 ArrayData* typeCnsVal = nullptr;
702 for (auto i = 0; i < size; i++) {
703 auto const rcls = env.index.resolve_class(env.ctx, clsName);
704 if (!rcls || !rcls->resolved()) return nullptr;
705 auto const cnsName = accList->at(i);
706 if (!tvIsString(&cnsName)) return nullptr;
707 auto const cnst = env.index.lookup_class_const_ptr(env.ctx, *rcls,
708 cnsName.m_data.pstr,
709 true);
710 if (!cnst || !cnst->val || !cnst->isTypeconst ||
711 !tvIsHAMSafeDArray(&*cnst->val)) {
712 return nullptr;
714 if (checkNoOverrideOnFirst && i == 0 && !cnst->isNoOverride) {
715 return nullptr;
717 typeCnsVal = resolveTSStaticallyImpl(env, seenTs,
718 cnst->val->m_data.parr, cnst->cls);
719 if (!typeCnsVal) return nullptr;
720 if (i == size - 1) break;
721 auto const kind = get_ts_kind(typeCnsVal);
722 if (kind != TypeStructure::Kind::T_class &&
723 kind != TypeStructure::Kind::T_interface) {
724 return nullptr;
726 clsName = get_ts_classname(typeCnsVal);
728 if (!typeCnsVal) return nullptr;
729 return finish(addModifiers(typeCnsVal));
731 case TypeStructure::Kind::T_fun: {
732 auto rreturn = resolveTSStaticallyImpl(env, seenTs,
733 get_ts_return_type(ts),
734 declaringCls);
735 if (!rreturn) return nullptr;
736 auto rparams = resolveTSListStatically(env, seenTs,
737 get_ts_param_types(ts),
738 declaringCls);
739 if (!rparams) return nullptr;
740 auto result = const_cast<ArrayData*>(ts)
741 ->set(s_return_type.get(), Variant(rreturn))
742 ->set(s_param_types.get(), Variant(rparams));
743 auto const variadic = get_ts_variadic_type_opt(ts);
744 if (variadic) {
745 auto rvariadic =
746 resolveTSStaticallyImpl(env, seenTs, variadic, declaringCls);
747 if (!rvariadic) return nullptr;
748 result = result->set(s_variadic_type.get(), Variant(rvariadic));
750 return finish(result);
752 case TypeStructure::Kind::T_array:
753 case TypeStructure::Kind::T_darray:
754 case TypeStructure::Kind::T_varray:
755 case TypeStructure::Kind::T_varray_or_darray:
756 case TypeStructure::Kind::T_reifiedtype:
757 case TypeStructure::Kind::T_trait:
758 return nullptr;
760 not_reached();
763 } // namespace
765 ArrayData*
766 resolveTSStatically(ISS& env, SArray ts, const php::Class* declaringCls) {
767 hphp_fast_set<SArray> seenTs;
768 return resolveTSStaticallyImpl(env, seenTs, ts, declaringCls);
771 //////////////////////////////////////////////////////////////////////
773 namespace interp_step {
775 void in(ISS& env, const bc::Nop&) { reduce(env); }
777 void in(ISS& env, const bc::PopC&) {
778 if (auto const last = last_op(env)) {
779 if (poppable(last->op)) {
780 rewind(env, 1);
781 return reduce(env);
783 if (last->op == Op::This) {
784 // can't rewind This because it removed null from thisType (so
785 // CheckThis at this point is a no-op) - and note that it must
786 // have *been* nullable, or we'd have turned it into a
787 // `BareThis NeverNull`
788 replace_last_op(env, bc::CheckThis {});
789 return reduce(env);
791 if (last->op == Op::SetL) {
792 // can't rewind a SetL because it changes local state
793 replace_last_op(env, bc::PopL { last->SetL.loc1 });
794 return reduce(env);
796 if (last->op == Op::CGetL2) {
797 auto loc = last->CGetL2.nloc1;
798 rewind(env, 1);
799 return reduce(env, bc::PopC {}, bc::CGetL { loc });
803 effect_free(env);
804 popC(env);
807 void in(ISS& env, const bc::PopU&) {
808 if (auto const last = last_op(env)) {
809 if (last->op == Op::NullUninit) {
810 rewind(env, 1);
811 return reduce(env);
814 effect_free(env); popU(env);
817 void in(ISS& env, const bc::PopU2&) {
818 effect_free(env);
819 auto equiv = topStkEquiv(env);
820 auto val = popC(env);
821 popU(env);
822 push(env, std::move(val), equiv != StackDupId ? equiv : NoLocalId);
825 void in(ISS& env, const bc::PopFrame& op) {
826 effect_free(env);
828 std::vector<std::pair<Type, LocalId>> vals{op.arg1};
829 for (auto i = op.arg1; i > 0; --i) {
830 vals[i - 1] = {popC(env), topStkEquiv(env)};
832 for (uint32_t i = 0; i < 3; i++) popU(env);
833 for (auto& p : vals) {
834 push(
835 env, std::move(p.first), p.second != StackDupId ? p.second : NoLocalId);
839 void in(ISS& env, const bc::EntryNop&) { effect_free(env); }
841 void in(ISS& env, const bc::Dup& /*op*/) {
842 effect_free(env);
843 auto equiv = topStkEquiv(env);
844 auto val = popC(env);
845 push(env, val, equiv);
846 push(env, std::move(val), StackDupId);
849 void in(ISS& env, const bc::AssertRATL& op) {
850 mayReadLocal(env, op.loc1);
851 effect_free(env);
854 void in(ISS& env, const bc::AssertRATStk&) {
855 effect_free(env);
858 void in(ISS& env, const bc::BreakTraceHint&) { effect_free(env); }
860 void in(ISS& env, const bc::CGetCUNop&) {
861 effect_free(env);
862 auto const t = popCU(env);
863 push(env, remove_uninit(t));
866 void in(ISS& env, const bc::UGetCUNop&) {
867 effect_free(env);
868 popCU(env);
869 push(env, TUninit);
872 void in(ISS& env, const bc::Null&) {
873 effect_free(env);
874 push(env, TInitNull);
877 void in(ISS& env, const bc::NullUninit&) {
878 effect_free(env);
879 push(env, TUninit);
882 void in(ISS& env, const bc::True&) {
883 effect_free(env);
884 push(env, TTrue);
887 void in(ISS& env, const bc::False&) {
888 effect_free(env);
889 push(env, TFalse);
892 void in(ISS& env, const bc::Int& op) {
893 effect_free(env);
894 push(env, ival(op.arg1));
897 void in(ISS& env, const bc::Double& op) {
898 effect_free(env);
899 push(env, dval(op.dbl1));
902 void in(ISS& env, const bc::String& op) {
903 effect_free(env);
904 push(env, sval(op.str1));
907 void in(ISS& env, const bc::Array& op) {
908 assert(op.arr1->isPHPArrayType());
909 assertx(!RuntimeOption::EvalHackArrDVArrs || op.arr1->isNotDVArray());
910 effect_free(env);
911 push(env, aval(op.arr1));
914 void in(ISS& env, const bc::Vec& op) {
915 assert(op.arr1->isVecType());
916 effect_free(env);
917 push(env, vec_val(op.arr1));
920 void in(ISS& env, const bc::Dict& op) {
921 assert(op.arr1->isDictType());
922 effect_free(env);
923 push(env, dict_val(op.arr1));
926 void in(ISS& env, const bc::Keyset& op) {
927 assert(op.arr1->isKeysetType());
928 effect_free(env);
929 push(env, keyset_val(op.arr1));
932 void in(ISS& env, const bc::NewDictArray& op) {
933 effect_free(env);
934 push(env, op.arg1 == 0 ? dict_empty() : some_dict_empty());
937 void in(ISS& env, const bc::NewVArray& op) {
938 assertx(!RuntimeOption::EvalHackArrDVArrs);
939 auto elems = std::vector<Type>{};
940 elems.reserve(op.arg1);
941 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
942 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
944 discard(env, op.arg1);
945 push(env, arr_packed_varray(std::move(elems), provTagHere(env)));
946 effect_free(env);
947 constprop(env);
950 void in(ISS& env, const bc::NewDArray& op) {
951 assertx(!RuntimeOption::EvalHackArrDVArrs);
952 effect_free(env);
953 auto const tag = provTagHere(env);
954 push(env, op.arg1 == 0 ? aempty_darray(tag) : some_aempty_darray(tag));
957 void in(ISS& env, const bc::NewRecord& op) {
958 discard(env, op.keys.size());
959 auto const rrec = env.index.resolve_record(op.str1);
960 push(env, rrec ? exactRecord(*rrec) : TRecord);
963 void in(ISS& env, const bc::NewStructDArray& op) {
964 assertx(!RuntimeOption::EvalHackArrDVArrs);
965 auto map = MapElems{};
966 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
967 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
969 push(env, arr_map_darray(std::move(map), provTagHere(env)));
970 effect_free(env);
971 constprop(env);
974 void in(ISS& env, const bc::NewStructDict& op) {
975 auto map = MapElems{};
976 for (auto it = op.keys.end(); it != op.keys.begin(); ) {
977 map.emplace_front(make_tv<KindOfPersistentString>(*--it), popC(env));
979 push(env, dict_map(std::move(map)));
980 effect_free(env);
981 constprop(env);
984 void in(ISS& env, const bc::NewVec& op) {
985 auto elems = std::vector<Type>{};
986 elems.reserve(op.arg1);
987 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
988 elems.push_back(std::move(topC(env, op.arg1 - i - 1)));
990 discard(env, op.arg1);
991 effect_free(env);
992 constprop(env);
993 push(env, vec(std::move(elems)));
996 void in(ISS& env, const bc::NewKeysetArray& op) {
997 assert(op.arg1 > 0);
998 auto map = MapElems{};
999 auto ty = TBottom;
1000 auto useMap = true;
1001 auto bad = false;
1002 auto mayThrow = false;
1003 for (auto i = uint32_t{0}; i < op.arg1; ++i) {
1004 auto k = disect_strict_key(popC(env));
1005 mayThrow |= k.mayThrow;
1006 if (k.type == TBottom) {
1007 bad = true;
1008 useMap = false;
1010 if (useMap) {
1011 if (auto const v = k.tv()) {
1012 map.emplace_front(*v, k.type);
1013 } else {
1014 useMap = false;
1017 ty |= std::move(k.type);
1019 if (!mayThrow) effect_free(env);
1020 if (useMap) {
1021 push(env, keyset_map(std::move(map)));
1022 if (!mayThrow) constprop(env);
1023 } else if (!bad) {
1024 push(env, keyset_n(ty));
1025 } else {
1026 unreachable(env);
1027 push(env, TBottom);
1031 void in(ISS& env, const bc::AddElemC& /*op*/) {
1032 auto const v = topC(env, 0);
1033 auto const k = topC(env, 1);
1035 auto inTy = (env.state.stack.end() - 3).unspecialize();
1037 auto const tag = provTagHere(env);
1039 auto outTy = [&] (Type ty) ->
1040 folly::Optional<std::pair<Type,ThrowMode>> {
1041 if (ty.subtypeOf(BArr)) {
1042 return array_set(std::move(ty), k, v, tag);
1044 if (ty.subtypeOf(BDict)) {
1045 return dict_set(std::move(ty), k, v);
1047 return folly::none;
1048 }(std::move(inTy));
1050 if (outTy && outTy->second == ThrowMode::None && will_reduce(env)) {
1051 if (!env.trackedElems.empty() &&
1052 env.trackedElems.back().depth + 3 == env.state.stack.size()) {
1053 auto const handled = [&] {
1054 if (!k.subtypeOf(BArrKey)) return false;
1055 auto ktv = tv(k);
1056 if (!ktv) return false;
1057 auto vtv = tv(v);
1058 if (!vtv) return false;
1059 return mutate_add_elem_array(env, tag, [&](ArrayData** arr) {
1060 *arr = (*arr)->set(*ktv, *vtv);
1062 }();
1063 if (handled) {
1064 (env.state.stack.end() - 3)->type = std::move(outTy->first);
1065 reduce(env, bc::PopC {}, bc::PopC {});
1066 ITRACE(2, "(addelem* -> {}\n",
1067 show(env.ctx.func,
1068 env.replacedBcs[env.trackedElems.back().idx - env.unchangedBcs]));
1069 return;
1071 } else {
1072 if (start_add_elem(env, outTy->first, Op::AddElemC)) {
1073 return;
1078 discard(env, 3);
1079 finish_tracked_elems(env, env.state.stack.size());
1081 if (!outTy) {
1082 return push(env, union_of(TArr, TDict));
1085 if (outTy->first.subtypeOf(BBottom)) {
1086 unreachable(env);
1087 } else if (outTy->second == ThrowMode::None) {
1088 effect_free(env);
1089 constprop(env);
1091 push(env, std::move(outTy->first));
1094 void in(ISS& env, const bc::AddNewElemC&) {
1095 auto v = topC(env);
1096 auto inTy = (env.state.stack.end() - 2).unspecialize();
1098 auto const tag = provTagHere(env);
1100 auto outTy = [&] (Type ty) -> folly::Optional<Type> {
1101 if (ty.subtypeOf(BArr)) {
1102 return array_newelem(std::move(ty), std::move(v), tag).first;
1104 if (ty.subtypeOf(BVec)) {
1105 return vec_newelem(std::move(ty), std::move(v)).first;
1107 if (ty.subtypeOf(BKeyset)) {
1108 return keyset_newelem(std::move(ty), std::move(v)).first;
1110 return folly::none;
1111 }(std::move(inTy));
1113 if (outTy && will_reduce(env)) {
1114 if (!env.trackedElems.empty() &&
1115 env.trackedElems.back().depth + 2 == env.state.stack.size()) {
1116 auto const handled = [&] {
1117 auto vtv = tv(v);
1118 if (!vtv) return false;
1119 return mutate_add_elem_array(env, tag, [&](ArrayData** arr) {
1120 *arr = (*arr)->append(*vtv);
1122 }();
1123 if (handled) {
1124 (env.state.stack.end() - 2)->type = std::move(*outTy);
1125 reduce(env, bc::PopC {});
1126 ITRACE(2, "(addelem* -> {}\n",
1127 show(env.ctx.func,
1128 env.replacedBcs[env.trackedElems.back().idx - env.unchangedBcs]));
1129 return;
1131 } else {
1132 if (start_add_elem(env, *outTy, Op::AddNewElemC)) {
1133 return;
1138 discard(env, 2);
1139 finish_tracked_elems(env, env.state.stack.size());
1141 if (!outTy) {
1142 return push(env, TInitCell);
1145 if (outTy->subtypeOf(BBottom)) {
1146 unreachable(env);
1147 } else {
1148 constprop(env);
1150 push(env, std::move(*outTy));
1153 void in(ISS& env, const bc::NewCol& op) {
1154 auto const type = static_cast<CollectionType>(op.subop1);
1155 auto const name = collections::typeToString(type);
1156 push(env, objExact(env.index.builtin_class(name)));
1157 effect_free(env);
1160 void in(ISS& env, const bc::NewPair& /*op*/) {
1161 popC(env); popC(env);
1162 auto const name = collections::typeToString(CollectionType::Pair);
1163 push(env, objExact(env.index.builtin_class(name)));
1164 effect_free(env);
1167 void in(ISS& env, const bc::ColFromArray& op) {
1168 auto const src = popC(env);
1169 auto const type = static_cast<CollectionType>(op.subop1);
1170 assertx(type != CollectionType::Pair);
1171 if (type == CollectionType::Vector || type == CollectionType::ImmVector) {
1172 if (src.subtypeOf(TVec)) effect_free(env);
1173 } else {
1174 assertx(type == CollectionType::Map ||
1175 type == CollectionType::ImmMap ||
1176 type == CollectionType::Set ||
1177 type == CollectionType::ImmSet);
1178 if (src.subtypeOf(TDict)) effect_free(env);
1180 auto const name = collections::typeToString(type);
1181 push(env, objExact(env.index.builtin_class(name)));
1184 void in(ISS& env, const bc::CnsE& op) {
1185 auto t = env.index.lookup_constant(env.ctx, op.str1);
1186 if (t.strictSubtypeOf(TInitCell)) {
1187 // constprop will take care of nothrow *if* its a constant; and if
1188 // its not, we might trigger autoload.
1189 constprop(env);
1191 push(env, std::move(t));
1194 void in(ISS& env, const bc::ClsCns& op) {
1195 auto const& t1 = topC(env);
1196 if (is_specialized_cls(t1)) {
1197 auto const dcls = dcls_of(t1);
1198 auto const finish = [&] {
1199 reduce(env, bc::PopC { },
1200 bc::ClsCnsD { op.str1, dcls.cls.name() });
1202 if (dcls.type == DCls::Exact) return finish();
1203 auto const cnst = env.index.lookup_class_const_ptr(env.ctx, dcls.cls,
1204 op.str1, false);
1205 if (cnst && cnst->isNoOverride) return finish();
1207 popC(env);
1208 push(env, TInitCell);
1211 void in(ISS& env, const bc::ClsCnsD& op) {
1212 if (auto const rcls = env.index.resolve_class(env.ctx, op.str2)) {
1213 auto t = env.index.lookup_class_constant(env.ctx, *rcls, op.str1, false);
1214 constprop(env);
1215 push(env, std::move(t));
1216 return;
1218 push(env, TInitCell);
1221 void in(ISS& env, const bc::File&) { effect_free(env); push(env, TSStr); }
1222 void in(ISS& env, const bc::Dir&) { effect_free(env); push(env, TSStr); }
1223 void in(ISS& env, const bc::Method&) { effect_free(env); push(env, TSStr); }
1225 void in(ISS& env, const bc::FuncCred&) { effect_free(env); push(env, TObj); }
1227 void in(ISS& env, const bc::ClassName& op) {
1228 auto const ty = topC(env);
1229 if (is_specialized_cls(ty)) {
1230 auto const dcls = dcls_of(ty);
1231 if (dcls.type == DCls::Exact) {
1232 return reduce(env,
1233 bc::PopC {},
1234 bc::String { dcls.cls.name() });
1237 if (ty.subtypeOf(TCls)) nothrow(env);
1238 popC(env);
1239 push(env, TSStr);
1242 void concatHelper(ISS& env, uint32_t n) {
1243 auto changed = false;
1244 auto side_effects = false;
1245 if (will_reduce(env)) {
1246 auto litstr = [&] (SString next, uint32_t i) -> SString {
1247 auto const t = topC(env, i);
1248 auto const v = tv(t);
1249 if (!v) return nullptr;
1250 if (!isStringType(v->m_type) &&
1251 v->m_type != KindOfNull &&
1252 v->m_type != KindOfBoolean &&
1253 v->m_type != KindOfInt64 &&
1254 v->m_type != KindOfDouble) {
1255 return nullptr;
1257 auto const cell = eval_cell_value(
1258 [&] {
1259 auto const s = makeStaticString(
1260 next ?
1261 StringData::Make(tvAsCVarRef(&*v).toString().get(), next) :
1262 tvAsCVarRef(&*v).toString().get());
1263 return make_tv<KindOfString>(s);
1266 if (!cell) return nullptr;
1267 return cell->m_data.pstr;
1270 auto fold = [&] (uint32_t slot, uint32_t num, SString result) {
1271 auto const cell = make_tv<KindOfPersistentString>(result);
1272 auto const ty = from_cell(cell);
1273 BytecodeVec bcs{num, bc::PopC {}};
1274 if (num > 1) bcs.push_back(gen_constant(cell));
1275 if (slot == 0) {
1276 reduce(env, std::move(bcs));
1277 } else {
1278 insert_after_slot(env, slot, num, num > 1 ? 1 : 0, &ty, bcs);
1279 reprocess(env);
1281 n -= num - 1;
1282 changed = true;
1285 for (auto i = 0; i < n; i++) {
1286 if (topC(env, i).couldBe(BObj | BArrLike | BRes)) {
1287 side_effects = true;
1288 break;
1292 if (!side_effects) {
1293 for (auto i = 0; i < n; i++) {
1294 auto const tracked = !env.trackedElems.empty() &&
1295 env.trackedElems.back().depth + i + 1 == env.state.stack.size();
1296 if (tracked) finish_tracked_elems(env, env.trackedElems.back().depth);
1297 auto const prev = op_from_slot(env, i);
1298 if (!prev) continue;
1299 if ((prev->op == Op::Concat && tracked) || prev->op == Op::ConcatN) {
1300 auto const extra = kill_by_slot(env, i);
1301 changed = true;
1302 n += extra;
1303 i += extra;
1308 SString result = nullptr;
1309 uint32_t i = 0;
1310 uint32_t nlit = 0;
1311 while (i < n) {
1312 // In order to collapse literals, we need to be able to insert
1313 // pops, and a constant after the sequence that generated the
1314 // literals. We can always insert after the last instruction
1315 // though, and we only need to check the first slot of a
1316 // sequence.
1317 auto const next = !i || result || can_insert_after_slot(env, i) ?
1318 litstr(result, i) : nullptr;
1319 if (next == staticEmptyString()) {
1320 if (n == 1) break;
1321 assertx(nlit == 0);
1322 fold(i, 1, next);
1323 n--;
1324 continue;
1326 if (!next) {
1327 if (nlit > 1) {
1328 fold(i - nlit, nlit, result);
1329 i -= nlit - 1;
1331 nlit = 0;
1332 } else {
1333 nlit++;
1335 result = next;
1336 i++;
1338 if (nlit > 1) fold(i - nlit, nlit, result);
1341 if (!changed) {
1342 discard(env, n);
1343 if (n == 2 && !side_effects && will_reduce(env)) {
1344 env.trackedElems.emplace_back(
1345 env.state.stack.size(),
1346 env.unchangedBcs + env.replacedBcs.size()
1349 push(env, TStr);
1350 return;
1353 if (n == 1) {
1354 if (!topC(env).subtypeOf(BStr)) {
1355 return reduce(env, bc::CastString {});
1357 return reduce(env);
1360 reduce(env);
1361 // We can't reduce the emitted concats, or we'll end up with
1362 // infinite recursion.
1363 env.flags.wasPEI = true;
1364 env.flags.effectFree = false;
1365 env.flags.canConstProp = false;
1367 auto concat = [&] (uint32_t num) {
1368 discard(env, num);
1369 push(env, TStr);
1370 if (num == 2) {
1371 record(env, bc::Concat {});
1372 } else {
1373 record(env, bc::ConcatN { num });
1377 while (n >= 4) {
1378 concat(4);
1379 n -= 3;
1381 if (n > 1) concat(n);
1384 void in(ISS& env, const bc::Concat& /*op*/) {
1385 concatHelper(env, 2);
1388 void in(ISS& env, const bc::ConcatN& op) {
1389 if (op.arg1 == 2) return reduce(env, bc::Concat {});
1390 concatHelper(env, op.arg1);
1393 template <class Op, class Fun>
1394 void arithImpl(ISS& env, const Op& /*op*/, Fun fun) {
1395 constprop(env);
1396 auto const t1 = popC(env);
1397 auto const t2 = popC(env);
1398 push(env, fun(t2, t1));
1401 void in(ISS& env, const bc::Add& op) { arithImpl(env, op, typeAdd); }
1402 void in(ISS& env, const bc::Sub& op) { arithImpl(env, op, typeSub); }
1403 void in(ISS& env, const bc::Mul& op) { arithImpl(env, op, typeMul); }
1404 void in(ISS& env, const bc::Div& op) { arithImpl(env, op, typeDiv); }
1405 void in(ISS& env, const bc::Mod& op) { arithImpl(env, op, typeMod); }
1406 void in(ISS& env, const bc::Pow& op) { arithImpl(env, op, typePow); }
1407 void in(ISS& env, const bc::BitAnd& op) { arithImpl(env, op, typeBitAnd); }
1408 void in(ISS& env, const bc::BitOr& op) { arithImpl(env, op, typeBitOr); }
1409 void in(ISS& env, const bc::BitXor& op) { arithImpl(env, op, typeBitXor); }
1410 void in(ISS& env, const bc::AddO& op) { arithImpl(env, op, typeAddO); }
1411 void in(ISS& env, const bc::SubO& op) { arithImpl(env, op, typeSubO); }
1412 void in(ISS& env, const bc::MulO& op) { arithImpl(env, op, typeMulO); }
1413 void in(ISS& env, const bc::Shl& op) { arithImpl(env, op, typeShl); }
1414 void in(ISS& env, const bc::Shr& op) { arithImpl(env, op, typeShr); }
1416 void in(ISS& env, const bc::BitNot& /*op*/) {
1417 auto const t = popC(env);
1418 auto const v = tv(t);
1419 if (v) {
1420 constprop(env);
1421 auto cell = eval_cell([&] {
1422 auto c = *v;
1423 tvBitNot(c);
1424 return c;
1426 if (cell) return push(env, std::move(*cell));
1428 push(env, TInitCell);
1431 namespace {
1433 template<bool NSame>
1434 std::pair<Type,bool> resolveSame(ISS& env) {
1435 auto const l1 = topStkEquiv(env, 0);
1436 auto const t1 = topC(env, 0);
1437 auto const l2 = topStkEquiv(env, 1);
1438 auto const t2 = topC(env, 1);
1440 // EvalHackArrCompatNotices will notice on === and !== between PHP arrays and
1441 // Hack arrays. We can't really do better than this in general because of
1442 // arrays inside these arrays.
1443 auto warningsEnabled =
1444 (RuntimeOption::EvalHackArrCompatNotices ||
1445 RuntimeOption::EvalEmitClsMethPointers);
1447 auto const result = [&] {
1448 auto const v1 = tv(t1);
1449 auto const v2 = tv(t2);
1451 if (l1 == StackDupId ||
1452 (l1 == l2 && l1 != NoLocalId) ||
1453 (l1 <= MaxLocalId && l2 <= MaxLocalId && locsAreEquiv(env, l1, l2))) {
1454 if (!t1.couldBe(BDbl) || !t2.couldBe(BDbl) ||
1455 (v1 && (v1->m_type != KindOfDouble || !std::isnan(v1->m_data.dbl))) ||
1456 (v2 && (v2->m_type != KindOfDouble || !std::isnan(v2->m_data.dbl)))) {
1457 return NSame ? TFalse : TTrue;
1461 if (v1 && v2) {
1462 if (auto r = eval_cell_value([&]{ return tvSame(*v2, *v1); })) {
1463 // we wouldn't get here if cellSame raised a warning
1464 warningsEnabled = false;
1465 return r != NSame ? TTrue : TFalse;
1469 return NSame ? typeNSame(t1, t2) : typeSame(t1, t2);
1470 }();
1472 if (warningsEnabled && result == (NSame ? TFalse : TTrue)) {
1473 warningsEnabled = false;
1475 return { result, warningsEnabled && compare_might_raise(t1, t2) };
1478 template<bool Negate>
1479 void sameImpl(ISS& env) {
1480 if (auto const last = last_op(env)) {
1481 if (last->op == Op::Null) {
1482 rewind(env, 1);
1483 reduce(env, bc::IsTypeC { IsTypeOp::Null });
1484 if (Negate) reduce(env, bc::Not {});
1485 return;
1487 if (auto const prev = last_op(env, 1)) {
1488 if (prev->op == Op::Null &&
1489 (last->op == Op::CGetL || last->op == Op::CGetL2 ||
1490 last->op == Op::CGetQuietL)) {
1491 auto const loc = [&]() {
1492 if (last->op == Op::CGetL) {
1493 return last->CGetL.nloc1;
1494 } else if (last->op == Op::CGetL2) {
1495 return last->CGetL2.nloc1;
1496 } else if (last->op == Op::CGetQuietL) {
1497 return NamedLocal{kInvalidLocalName, last->CGetQuietL.loc1};
1499 always_assert(false);
1500 }();
1501 rewind(env, 2);
1502 reduce(env, bc::IsTypeL { loc, IsTypeOp::Null });
1503 if (Negate) reduce(env, bc::Not {});
1504 return;
1509 auto pair = resolveSame<Negate>(env);
1510 discard(env, 2);
1512 if (!pair.second) {
1513 nothrow(env);
1514 constprop(env);
1517 push(env, std::move(pair.first));
1520 template<class JmpOp>
1521 bool sameJmpImpl(ISS& env, Op sameOp, const JmpOp& jmp) {
1522 const StackElem* elems[2];
1523 env.state.stack.peek(2, elems, 1);
1525 auto const loc0 = elems[1]->equivLoc;
1526 auto const loc1 = elems[0]->equivLoc;
1527 // If loc0 == loc1, either they're both NoLocalId, so there's
1528 // nothing for us to deduce, or both stack elements are the same
1529 // value, so the only thing we could deduce is that they are or are
1530 // not NaN. But we don't track that, so just bail.
1531 if (loc0 == loc1 || loc0 == StackDupId) return false;
1533 auto const ty0 = elems[1]->type;
1534 auto const ty1 = elems[0]->type;
1535 auto const val0 = tv(ty0);
1536 auto const val1 = tv(ty1);
1538 assertx(!val0 || !val1);
1539 if ((loc0 == NoLocalId && !val0 && ty1.subtypeOf(ty0)) ||
1540 (loc1 == NoLocalId && !val1 && ty0.subtypeOf(ty1))) {
1541 return false;
1544 // Same currently lies about the distinction between Func/Cls/Str
1545 if (ty0.couldBe(BCls) && ty1.couldBe(BStr)) return false;
1546 if (ty1.couldBe(BCls) && ty0.couldBe(BStr)) return false;
1548 // We need to loosen provenance here because it doesn't affect same / equal.
1549 auto isect = intersection_of(loosen_provenance(ty0), loosen_provenance(ty1));
1551 // Unfortunately, floating point negative zero and positive zero are
1552 // different, but are identical using as far as Same is concerened. We should
1553 // avoid refining a value to 0.0 because it compares identically to 0.0
1554 if (isect.couldBe(dval(0.0)) || isect.couldBe(dval(-0.0))) {
1555 isect = union_of(isect, TDbl);
1558 discard(env, 1);
1560 auto handle_same = [&] {
1561 // Currently dce uses equivalency to prove that something isn't
1562 // the last reference - so we can only assert equivalency here if
1563 // we know that won't be affected. Its irrelevant for uncounted
1564 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1565 // the same thing.
1566 if (loc0 <= MaxLocalId &&
1567 (ty0.subtypeOf(BObj | BRes | BPrim) ||
1568 ty1.subtypeOf(BObj | BRes | BPrim) ||
1569 (ty0.subtypeOf(BUnc) && ty1.subtypeOf(BUnc)))) {
1570 if (loc1 == StackDupId) {
1571 setStkLocal(env, loc0, 0);
1572 } else if (loc1 <= MaxLocalId && !locsAreEquiv(env, loc0, loc1)) {
1573 auto loc = loc0;
1574 while (true) {
1575 auto const other = findLocEquiv(env, loc);
1576 if (other == NoLocalId) break;
1577 killLocEquiv(env, loc);
1578 addLocEquiv(env, loc, loc1);
1579 loc = other;
1581 addLocEquiv(env, loc, loc1);
1584 return refineLocation(env, loc1 != NoLocalId ? loc1 : loc0, [&] (Type ty) {
1585 auto const needsUninit =
1586 ty.couldBe(BUninit) &&
1587 !isect.couldBe(BUninit) &&
1588 isect.couldBe(BInitNull);
1589 auto ret = ty.subtypeOf(BUnc) ? isect : loosen_staticness(isect);
1590 if (needsUninit) ret = union_of(std::move(ret), TUninit);
1591 return ret;
1596 auto handle_differ_side = [&] (LocalId location, const Type& ty) {
1597 if (!ty.subtypeOf(BInitNull) && !ty.strictSubtypeOf(TBool)) return true;
1598 return refineLocation(env, location, [&] (Type t) {
1599 if (ty.subtypeOf(BNull)) {
1600 t = remove_uninit(std::move(t));
1601 if (is_opt(t)) t = unopt(std::move(t));
1602 return t;
1603 } else if (ty.strictSubtypeOf(TBool) && t.subtypeOf(BBool)) {
1604 return ty == TFalse ? TTrue : TFalse;
1606 return t;
1610 auto handle_differ = [&] {
1611 return
1612 (loc0 == NoLocalId || handle_differ_side(loc0, ty1)) &&
1613 (loc1 == NoLocalId || handle_differ_side(loc1, ty0));
1616 auto const sameIsJmpTarget =
1617 (sameOp == Op::Same) == (JmpOp::op == Op::JmpNZ);
1619 auto save = env.state;
1620 auto const target_reachable = sameIsJmpTarget ?
1621 handle_same() : handle_differ();
1622 if (!target_reachable) jmp_nevertaken(env);
1623 // swap, so we can restore this state if the branch is always taken.
1624 env.state.swap(save);
1625 if (!(sameIsJmpTarget ? handle_differ() : handle_same())) {
1626 jmp_setdest(env, jmp.target1);
1627 env.state.copy_from(std::move(save));
1628 } else if (target_reachable) {
1629 env.propagate(jmp.target1, &save);
1632 return true;
1635 bc::JmpNZ invertJmp(const bc::JmpZ& jmp) { return bc::JmpNZ { jmp.target1 }; }
1636 bc::JmpZ invertJmp(const bc::JmpNZ& jmp) { return bc::JmpZ { jmp.target1 }; }
1640 void in(ISS& env, const bc::Same&) { sameImpl<false>(env); }
1641 void in(ISS& env, const bc::NSame&) { sameImpl<true>(env); }
1643 template<class Fun>
1644 void binOpBoolImpl(ISS& env, Fun fun) {
1645 auto const t1 = popC(env);
1646 auto const t2 = popC(env);
1647 auto const v1 = tv(t1);
1648 auto const v2 = tv(t2);
1649 if (v1 && v2) {
1650 if (auto r = eval_cell_value([&]{ return fun(*v2, *v1); })) {
1651 constprop(env);
1652 return push(env, *r ? TTrue : TFalse);
1655 // TODO_4: evaluate when these can throw, non-constant type stuff.
1656 push(env, TBool);
1659 template<class Fun>
1660 void binOpInt64Impl(ISS& env, Fun fun) {
1661 auto const t1 = popC(env);
1662 auto const t2 = popC(env);
1663 auto const v1 = tv(t1);
1664 auto const v2 = tv(t2);
1665 if (v1 && v2) {
1666 if (auto r = eval_cell_value([&]{ return ival(fun(*v2, *v1)); })) {
1667 constprop(env);
1668 return push(env, std::move(*r));
1671 // TODO_4: evaluate when these can throw, non-constant type stuff.
1672 push(env, TInt);
1675 void in(ISS& env, const bc::Eq&) {
1676 auto rs = resolveSame<false>(env);
1677 if (rs.first == TTrue) {
1678 if (!rs.second) constprop(env);
1679 discard(env, 2);
1680 return push(env, TTrue);
1682 binOpBoolImpl(env, [&] (TypedValue c1, TypedValue c2) { return tvEqual(c1, c2); });
1684 void in(ISS& env, const bc::Neq&) {
1685 auto rs = resolveSame<false>(env);
1686 if (rs.first == TTrue) {
1687 if (!rs.second) constprop(env);
1688 discard(env, 2);
1689 return push(env, TFalse);
1691 binOpBoolImpl(env, [&] (TypedValue c1, TypedValue c2) { return !tvEqual(c1, c2); });
1693 void in(ISS& env, const bc::Lt&) {
1694 binOpBoolImpl(env, [&] (TypedValue c1, TypedValue c2) { return tvLess(c1, c2); });
1696 void in(ISS& env, const bc::Gt&) {
1697 binOpBoolImpl(env, [&] (TypedValue c1, TypedValue c2) { return tvGreater(c1, c2); });
1699 void in(ISS& env, const bc::Lte&) { binOpBoolImpl(env, tvLessOrEqual); }
1700 void in(ISS& env, const bc::Gte&) { binOpBoolImpl(env, tvGreaterOrEqual); }
1702 void in(ISS& env, const bc::Cmp&) {
1703 binOpInt64Impl(env, [&] (TypedValue c1, TypedValue c2) { return tvCompare(c1, c2); });
1706 void in(ISS& env, const bc::Xor&) {
1707 binOpBoolImpl(env, [&] (TypedValue c1, TypedValue c2) {
1708 return tvToBool(c1) ^ tvToBool(c2);
1712 void castBoolImpl(ISS& env, const Type& t, bool negate) {
1713 nothrow(env);
1714 constprop(env);
1716 auto const e = emptiness(t);
1717 switch (e) {
1718 case Emptiness::Empty:
1719 case Emptiness::NonEmpty:
1720 return push(env, (e == Emptiness::Empty) == negate ? TTrue : TFalse);
1721 case Emptiness::Maybe:
1722 break;
1725 push(env, TBool);
1728 void in(ISS& env, const bc::Not&) {
1729 castBoolImpl(env, popC(env), true);
1732 void in(ISS& env, const bc::CastBool&) {
1733 auto const t = topC(env);
1734 if (t.subtypeOf(BBool)) return reduce(env);
1735 castBoolImpl(env, popC(env), false);
1738 void in(ISS& env, const bc::CastInt&) {
1739 auto const t = topC(env);
1740 if (t.subtypeOf(BInt)) return reduce(env);
1741 constprop(env);
1742 popC(env);
1743 // Objects can raise a warning about converting to int.
1744 if (!t.couldBe(BObj)) nothrow(env);
1745 if (auto const v = tv(t)) {
1746 auto cell = eval_cell([&] {
1747 return make_tv<KindOfInt64>(tvToInt(*v));
1749 if (cell) return push(env, std::move(*cell));
1751 push(env, TInt);
1754 // Handle a casting operation, where "target" is the type being casted to. If
1755 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1756 // is set to true, if the source type is the same as the destination, the cast
1757 // will be optimized away.
1758 void castImpl(ISS& env, Type target, void(*fn)(TypedValue*)) {
1759 auto const t = topC(env);
1760 if (t.subtypeOf(target)) return reduce(env);
1761 popC(env);
1763 auto const needsRuntimeProvenance =
1764 RO::EvalArrayProvenance &&
1765 env.ctx.func->attrs & AttrProvenanceSkipFrame &&
1766 target.subtypeOf(kProvBits);
1768 if (fn && !needsRuntimeProvenance) {
1769 if (auto val = tv(t)) {
1770 // Legacy dvarrays may raise a notice on cast. In order to simplify the
1771 // rollout of these notices, we don't const-fold casts on these arrays.
1772 auto const may_raise_notice = [&]{
1773 if (!tvIsArrayLike(*val)) return false;
1774 auto const ad = val->m_data.parr;
1775 if (!ad->isLegacyArray()) return false;
1776 return (ad->isDArray() && target == TDict) ||
1777 (ad->isVArray() && target == TVec);
1778 }();
1779 if (!may_raise_notice) {
1780 if (auto result = eval_cell([&] { fn(&*val); return *val; })) {
1781 constprop(env);
1782 target = *result;
1787 push(env, std::move(target));
1790 void in(ISS& env, const bc::CastDouble&) {
1791 castImpl(env, TDbl, tvCastToDoubleInPlace);
1794 void in(ISS& env, const bc::CastString&) {
1795 castImpl(env, TStr, tvCastToStringInPlace);
1798 void in(ISS& env, const bc::CastDict&) {
1799 castImpl(env, TDict, tvCastToDictInPlace);
1802 void in(ISS& env, const bc::CastVec&) {
1803 castImpl(env, TVec, tvCastToVecInPlace);
1806 void in(ISS& env, const bc::CastKeyset&) {
1807 castImpl(env, TKeyset, tvCastToKeysetInPlace);
1810 void in(ISS& env, const bc::CastVArray&) {
1811 assertx(!RuntimeOption::EvalHackArrDVArrs);
1812 arrprov::TagOverride tag_override{provTagHere(env).get()};
1813 castImpl(env, TVArr, tvCastToVArrayInPlace);
1816 void in(ISS& env, const bc::CastDArray&) {
1817 assertx(!RuntimeOption::EvalHackArrDVArrs);
1818 arrprov::TagOverride tag_override{provTagHere(env).get()};
1819 castImpl(env, TDArr, tvCastToDArrayInPlace);
1822 void in(ISS& env, const bc::DblAsBits&) {
1823 nothrow(env);
1824 constprop(env);
1826 auto const ty = popC(env);
1827 if (!ty.couldBe(BDbl)) return push(env, ival(0));
1829 if (auto val = tv(ty)) {
1830 assertx(isDoubleType(val->m_type));
1831 val->m_type = KindOfInt64;
1832 push(env, from_cell(*val));
1833 return;
1836 push(env, TInt);
1839 void in(ISS& env, const bc::Print& /*op*/) {
1840 popC(env);
1841 push(env, ival(1));
1844 void in(ISS& env, const bc::Clone& /*op*/) {
1845 auto val = popC(env);
1846 if (!val.subtypeOf(BObj)) {
1847 val = is_opt(val) ? unopt(std::move(val)) : TObj;
1849 push(env, std::move(val));
1852 void in(ISS& env, const bc::Exit&) { popC(env); push(env, TInitNull); }
1853 void in(ISS& env, const bc::Fatal&) { popC(env); }
1855 void in(ISS& /*env*/, const bc::JmpNS&) {
1856 always_assert(0 && "blocks should not contain JmpNS instructions");
1859 void in(ISS& /*env*/, const bc::Jmp&) {
1860 always_assert(0 && "blocks should not contain Jmp instructions");
1863 void in(ISS& env, const bc::Select& op) {
1864 auto const cond = topC(env);
1865 auto const t = topC(env, 1);
1866 auto const f = topC(env, 2);
1868 nothrow(env);
1869 constprop(env);
1871 switch (emptiness(cond)) {
1872 case Emptiness::Maybe:
1873 discard(env, 3);
1874 push(env, union_of(t, f));
1875 return;
1876 case Emptiness::NonEmpty:
1877 discard(env, 3);
1878 push(env, t);
1879 return;
1880 case Emptiness::Empty:
1881 return reduce(env, bc::PopC {}, bc::PopC {});
1883 not_reached();
1886 namespace {
1888 template<class JmpOp>
1889 bool isTypeHelper(ISS& env,
1890 IsTypeOp typeOp,
1891 LocalId location,
1892 Op op,
1893 const JmpOp& jmp) {
1894 if (typeOp == IsTypeOp::Scalar) {
1895 return false;
1898 auto const val = [&] {
1899 if (op != Op::IsTypeC) return locRaw(env, location);
1900 const StackElem* elem;
1901 env.state.stack.peek(1, &elem, 1);
1902 location = elem->equivLoc;
1903 return elem->type;
1904 }();
1906 if (location == NoLocalId || !val.subtypeOf(BCell)) return false;
1908 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1909 // Otherwise, refine location based on the testType.
1910 auto testTy = type_of_istype(typeOp);
1912 assertx(val.couldBe(testTy) &&
1913 (!val.subtypeOf(testTy) || val.subtypeOf(BObj)));
1915 discard(env, 1);
1917 if (op == Op::IsTypeC) {
1918 if (!is_type_might_raise(testTy, val)) nothrow(env);
1919 } else if (op == Op::IssetL) {
1920 nothrow(env);
1921 } else if (!locCouldBeUninit(env, location) &&
1922 !is_type_might_raise(testTy, val)) {
1923 nothrow(env);
1926 auto const negate = (jmp.op == Op::JmpNZ) == (op != Op::IssetL);
1927 auto const was_true = [&] (Type t) {
1928 if (testTy.subtypeOf(BNull)) return intersection_of(t, TNull);
1929 assertx(!testTy.couldBe(BNull));
1930 return intersection_of(t, testTy);
1932 auto const was_false = [&] (Type t) {
1933 auto tinit = remove_uninit(t);
1934 if (testTy.subtypeOf(BNull)) {
1935 return is_opt(tinit) ? unopt(tinit) : tinit;
1937 if (is_opt(tinit)) {
1938 assertx(!testTy.couldBe(BNull));
1939 if (unopt(tinit).subtypeOf(testTy)) return TNull;
1941 return t;
1944 auto const pre = [&] (Type t) {
1945 return negate ? was_true(std::move(t)) : was_false(std::move(t));
1948 auto const post = [&] (Type t) {
1949 return negate ? was_false(std::move(t)) : was_true(std::move(t));
1952 refineLocation(env, location, pre, jmp.target1, post);
1953 return true;
1956 // If the current function is a memoize wrapper, return the inferred return type
1957 // of the function being wrapped along with if the wrapped function is effect
1958 // free.
1959 std::pair<Type, bool> memoizeImplRetType(ISS& env) {
1960 always_assert(env.ctx.func->isMemoizeWrapper);
1962 // Lookup the wrapped function. This should always resolve to a precise
1963 // function but we don't rely on it.
1964 auto const memo_impl_func = [&] {
1965 if (env.ctx.func->cls) {
1966 auto const clsTy = selfClsExact(env);
1967 return env.index.resolve_method(
1968 env.ctx,
1969 clsTy ? *clsTy : TCls,
1970 memoize_impl_name(env.ctx.func)
1973 return env.index.resolve_func(env.ctx, memoize_impl_name(env.ctx.func));
1974 }();
1976 // Infer the return type of the wrapped function, taking into account the
1977 // types of the parameters for context sensitive types.
1978 auto const numArgs = env.ctx.func->params.size();
1979 CompactVector<Type> args{numArgs};
1980 for (auto i = LocalId{0}; i < numArgs; ++i) {
1981 args[i] = locAsCell(env, i);
1984 // Determine the context the wrapped function will be called on.
1985 auto const ctxType = [&]() -> Type {
1986 if (env.ctx.func->cls) {
1987 if (env.ctx.func->attrs & AttrStatic) {
1988 // The class context for static methods is the method's class,
1989 // if LSB is not specified.
1990 auto const clsTy =
1991 env.ctx.func->isMemoizeWrapperLSB ?
1992 selfCls(env) :
1993 selfClsExact(env);
1994 return clsTy ? *clsTy : TCls;
1995 } else {
1996 return thisTypeNonNull(env);
1999 return TBottom;
2000 }();
2002 auto retTy = env.index.lookup_return_type(
2003 env.ctx,
2004 args,
2005 ctxType,
2006 memo_impl_func
2008 auto const effectFree = env.index.is_effect_free(memo_impl_func);
2009 // Regardless of anything we know the return type will be an InitCell (this is
2010 // a requirement of memoize functions).
2011 if (!retTy.subtypeOf(BInitCell)) return { TInitCell, effectFree };
2012 return { retTy, effectFree };
2015 template<class JmpOp>
2016 bool instanceOfJmpImpl(ISS& env,
2017 const bc::InstanceOfD& inst,
2018 const JmpOp& jmp) {
2020 const StackElem* elem;
2021 env.state.stack.peek(1, &elem, 1);
2023 auto const locId = elem->equivLoc;
2024 if (locId == NoLocalId || interface_supports_non_objects(inst.str1)) {
2025 return false;
2027 auto const rcls = env.index.resolve_class(env.ctx, inst.str1);
2028 if (!rcls) return false;
2030 auto const val = elem->type;
2031 auto const instTy = subObj(*rcls);
2032 assertx(!val.subtypeOf(instTy) && val.couldBe(instTy));
2034 // If we have an optional type, whose unopt is guaranteed to pass
2035 // the instanceof check, then failing to pass implies it was null.
2036 auto const fail_implies_null = is_opt(val) && unopt(val).subtypeOf(instTy);
2038 discard(env, 1);
2039 auto const negate = jmp.op == Op::JmpNZ;
2040 auto const result = [&] (Type t, bool pass) {
2041 return pass ? instTy : fail_implies_null ? TNull : t;
2043 auto const pre = [&] (Type t) { return result(t, negate); };
2044 auto const post = [&] (Type t) { return result(t, !negate); };
2045 refineLocation(env, locId, pre, jmp.target1, post);
2046 return true;
2049 template<class JmpOp>
2050 bool isTypeStructCJmpImpl(ISS& env,
2051 const bc::IsTypeStructC& inst,
2052 const JmpOp& jmp) {
2054 const StackElem* elems[2];
2055 env.state.stack.peek(2, elems, 1);
2057 auto const locId = elems[0]->equivLoc;
2058 if (locId == NoLocalId) return false;
2060 auto const a = tv(elems[1]->type);
2061 if (!a) return false;
2062 // if it wasn't valid, the JmpOp wouldn't be reachable
2063 assertx(isValidTSType(*a, false));
2065 auto const is_nullable_ts = is_ts_nullable(a->m_data.parr);
2066 auto const ts_kind = get_ts_kind(a->m_data.parr);
2067 // type_of_type_structure does not resolve these types. It is important we
2068 // do resolve them here, or we may have issues when we reduce the checks to
2069 // InstanceOfD checks. This logic performs the same exact refinement as
2070 // instanceOfD will.
2071 if (is_nullable_ts ||
2072 (ts_kind != TypeStructure::Kind::T_class &&
2073 ts_kind != TypeStructure::Kind::T_interface &&
2074 ts_kind != TypeStructure::Kind::T_xhp &&
2075 ts_kind != TypeStructure::Kind::T_unresolved)) {
2076 return false;
2079 auto const clsName = get_ts_classname(a->m_data.parr);
2080 auto const rcls = env.index.resolve_class(env.ctx, clsName);
2081 if (!rcls ||
2082 !rcls->resolved() ||
2083 rcls->cls()->attrs & AttrEnum ||
2084 interface_supports_non_objects(clsName)) {
2085 return false;
2088 auto const val = elems[0]->type;
2089 auto const instTy = subObj(*rcls);
2090 if (val.subtypeOf(instTy) || !val.couldBe(instTy)) {
2091 return false;
2094 // If we have an optional type, whose unopt is guaranteed to pass
2095 // the instanceof check, then failing to pass implies it was null.
2096 auto const fail_implies_null = is_opt(val) && unopt(val).subtypeOf(instTy);
2098 discard(env, 1);
2100 auto const negate = jmp.op == Op::JmpNZ;
2101 auto const result = [&] (Type t, bool pass) {
2102 return pass ? instTy : fail_implies_null ? TNull : t;
2104 auto const pre = [&] (Type t) { return result(t, negate); };
2105 auto const post = [&] (Type t) { return result(t, !negate); };
2106 refineLocation(env, locId, pre, jmp.target1, post);
2107 return true;
2110 template<class JmpOp>
2111 void jmpImpl(ISS& env, const JmpOp& op) {
2112 auto const Negate = std::is_same<JmpOp, bc::JmpNZ>::value;
2113 auto const location = topStkEquiv(env);
2114 auto const e = emptiness(topC(env));
2115 if (e == (Negate ? Emptiness::NonEmpty : Emptiness::Empty)) {
2116 reduce(env, bc::PopC {});
2117 return jmp_setdest(env, op.target1);
2120 if (e == (Negate ? Emptiness::Empty : Emptiness::NonEmpty) ||
2121 (next_real_block(env.ctx.func, env.blk.fallthrough) ==
2122 next_real_block(env.ctx.func, op.target1))) {
2123 return reduce(env, bc::PopC{});
2126 auto fix = [&] {
2127 if (env.flags.jmpDest == NoBlockId) return;
2128 auto const jmpDest = env.flags.jmpDest;
2129 env.flags.jmpDest = NoBlockId;
2130 rewind(env, op);
2131 reduce(env, bc::PopC {});
2132 env.flags.jmpDest = jmpDest;
2135 if (auto const last = last_op(env)) {
2136 if (last->op == Op::Not) {
2137 rewind(env, 1);
2138 return reduce(env, invertJmp(op));
2140 if (last->op == Op::Same || last->op == Op::NSame) {
2141 if (sameJmpImpl(env, last->op, op)) return fix();
2142 } else if (last->op == Op::IssetL) {
2143 if (isTypeHelper(env,
2144 IsTypeOp::Null,
2145 last->IssetL.loc1,
2146 last->op,
2147 op)) {
2148 return fix();
2150 } else if (last->op == Op::IsTypeL) {
2151 if (isTypeHelper(env,
2152 last->IsTypeL.subop2,
2153 last->IsTypeL.nloc1.id,
2154 last->op,
2155 op)) {
2156 return fix();
2158 } else if (last->op == Op::IsTypeC) {
2159 if (isTypeHelper(env,
2160 last->IsTypeC.subop1,
2161 NoLocalId,
2162 last->op,
2163 op)) {
2164 return fix();
2166 } else if (last->op == Op::InstanceOfD) {
2167 if (instanceOfJmpImpl(env, last->InstanceOfD, op)) return fix();
2168 } else if (last->op == Op::IsTypeStructC) {
2169 if (isTypeStructCJmpImpl(env, last->IsTypeStructC, op)) return fix();
2173 popC(env);
2174 nothrow(env);
2176 if (location == NoLocalId) return env.propagate(op.target1, &env.state);
2178 refineLocation(env, location,
2179 Negate ? assert_nonemptiness : assert_emptiness,
2180 op.target1,
2181 Negate ? assert_emptiness : assert_nonemptiness);
2182 return fix();
2185 } // namespace
2187 void in(ISS& env, const bc::JmpNZ& op) { jmpImpl(env, op); }
2188 void in(ISS& env, const bc::JmpZ& op) { jmpImpl(env, op); }
2190 void in(ISS& env, const bc::Switch& op) {
2191 auto v = tv(topC(env));
2193 if (v) {
2194 auto go = [&] (BlockId blk) {
2195 reduce(env, bc::PopC {});
2196 return jmp_setdest(env, blk);
2198 auto num_elems = op.targets.size();
2199 if (op.subop1 == SwitchKind::Unbounded) {
2200 if (v->m_type == KindOfInt64 &&
2201 v->m_data.num >= 0 && v->m_data.num < num_elems) {
2202 return go(op.targets[v->m_data.num]);
2204 } else {
2205 assertx(num_elems > 2);
2206 num_elems -= 2;
2207 for (auto i = size_t{}; ; i++) {
2208 if (i == num_elems) {
2209 return go(op.targets.back());
2211 auto match = eval_cell_value([&] {
2212 return tvEqual(*v, static_cast<int64_t>(op.arg2 + i));
2214 if (!match) break;
2215 if (*match) {
2216 return go(op.targets[i]);
2222 popC(env);
2223 forEachTakenEdge(op, [&] (BlockId id) {
2224 env.propagate(id, &env.state);
2228 void in(ISS& env, const bc::SSwitch& op) {
2229 auto v = tv(topC(env));
2231 if (v) {
2232 for (auto& kv : op.targets) {
2233 auto match = eval_cell_value([&] {
2234 return !kv.first || tvEqual(*v, kv.first);
2236 if (!match) break;
2237 if (*match) {
2238 reduce(env, bc::PopC {});
2239 return jmp_setdest(env, kv.second);
2244 popC(env);
2245 forEachTakenEdge(op, [&] (BlockId id) {
2246 env.propagate(id, &env.state);
2250 void in(ISS& env, const bc::RetC& /*op*/) {
2251 auto const locEquiv = topStkLocal(env);
2252 doRet(env, popC(env), false);
2253 if (locEquiv != NoLocalId && locEquiv < env.ctx.func->params.size()) {
2254 env.flags.retParam = locEquiv;
2257 void in(ISS& env, const bc::RetM& op) {
2258 std::vector<Type> ret(op.arg1);
2259 for (int i = 0; i < op.arg1; i++) {
2260 ret[op.arg1 - i - 1] = popC(env);
2262 doRet(env, vec(std::move(ret)), false);
2265 void in(ISS& env, const bc::RetCSuspended&) {
2266 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
2268 auto const t = popC(env);
2269 doRet(
2270 env,
2271 is_specialized_wait_handle(t) ? wait_handle_inner(t) : TInitCell,
2272 false
2276 void in(ISS& env, const bc::Throw& /*op*/) {
2277 popC(env);
2280 void in(ISS& env, const bc::ThrowNonExhaustiveSwitch& /*op*/) {}
2282 void in(ISS& env, const bc::RaiseClassStringConversionWarning& /*op*/) {}
2284 void in(ISS& env, const bc::ChainFaults&) {
2285 popC(env);
2288 void in(ISS& env, const bc::NativeImpl&) {
2289 killLocals(env);
2291 if (is_collection_method_returning_this(env.ctx.cls, env.ctx.func)) {
2292 auto const resCls = env.index.builtin_class(env.ctx.cls->name);
2293 return doRet(env, objExact(resCls), true);
2296 if (env.ctx.func->nativeInfo) {
2297 return doRet(env, native_function_return_type(env.ctx.func), true);
2299 doRet(env, TInitCell, true);
2302 void in(ISS& env, const bc::CGetL& op) {
2303 if (locIsThis(env, op.nloc1.id)) {
2304 auto const& ty = peekLocRaw(env, op.nloc1.id);
2305 if (!ty.subtypeOf(BInitNull)) {
2306 auto const subop = ty.couldBe(BUninit) ?
2307 BareThisOp::Notice : ty.couldBe(BNull) ?
2308 BareThisOp::NoNotice : BareThisOp::NeverNull;
2309 return reduce(env, bc::BareThis { subop });
2312 if (auto const last = last_op(env)) {
2313 if (last->op == Op::PopL &&
2314 op.nloc1.id == last->PopL.loc1) {
2315 reprocess(env);
2316 rewind(env, 1);
2317 setLocRaw(env, op.nloc1.id, TCell);
2318 return reduce(env, bc::SetL { op.nloc1.id });
2321 if (!peekLocCouldBeUninit(env, op.nloc1.id)) {
2322 auto const minLocEquiv = findMinLocEquiv(env, op.nloc1.id, false);
2323 auto const loc = minLocEquiv != NoLocalId ? minLocEquiv : op.nloc1.id;
2324 return reduce(env, bc::CGetQuietL { loc });
2326 mayReadLocal(env, op.nloc1.id);
2327 push(env, locAsCell(env, op.nloc1.id), op.nloc1.id);
2330 void in(ISS& env, const bc::CGetQuietL& op) {
2331 if (locIsThis(env, op.loc1)) {
2332 return reduce(env, bc::BareThis { BareThisOp::NoNotice });
2334 if (auto const last = last_op(env)) {
2335 if (last->op == Op::PopL &&
2336 op.loc1 == last->PopL.loc1) {
2337 reprocess(env);
2338 rewind(env, 1);
2339 setLocRaw(env, op.loc1, TCell);
2340 return reduce(env, bc::SetL { op.loc1 });
2343 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, true);
2344 if (minLocEquiv != NoLocalId) {
2345 return reduce(env, bc::CGetQuietL { minLocEquiv });
2348 nothrow(env);
2349 constprop(env);
2350 mayReadLocal(env, op.loc1);
2351 push(env, locAsCell(env, op.loc1), op.loc1);
2354 void in(ISS& env, const bc::CUGetL& op) {
2355 auto ty = locRaw(env, op.loc1);
2356 if (ty.subtypeOf(BUninit)) {
2357 return reduce(env, bc::NullUninit {});
2359 nothrow(env);
2360 if (!ty.couldBe(BUninit)) constprop(env);
2361 if (!ty.subtypeOf(BCell)) ty = TCell;
2362 push(env, std::move(ty), op.loc1);
2365 void in(ISS& env, const bc::PushL& op) {
2366 if (auto val = tv(peekLocRaw(env, op.loc1))) {
2367 return reduce(env, bc::UnsetL { op.loc1 }, gen_constant(*val));
2370 auto const minLocEquiv = findMinLocEquiv(env, op.loc1, false);
2371 if (minLocEquiv != NoLocalId) {
2372 return reduce(env, bc::CGetQuietL { minLocEquiv }, bc::UnsetL { op.loc1 });
2375 if (auto const last = last_op(env)) {
2376 if (last->op == Op::PopL &&
2377 last->PopL.loc1 == op.loc1) {
2378 // rewind is ok, because we're just going to unset the local
2379 // (and note the unset can't be a no-op because the PopL set it
2380 // to an InitCell). But its possible that before the PopL, the
2381 // local *was* unset, so maybe would have killed the no-op. The
2382 // only way to fix that is to reprocess the block with the new
2383 // instruction sequence and see what happens.
2384 reprocess(env);
2385 rewind(env, 1);
2386 return reduce(env, bc::UnsetL { op.loc1 });
2390 impl(env, bc::CGetQuietL { op.loc1 }, bc::UnsetL { op.loc1 });
2393 void in(ISS& env, const bc::CGetL2& op) {
2394 if (auto const last = last_op(env)) {
2395 if ((poppable(last->op) && !numPop(*last)) ||
2396 ((last->op == Op::CGetL || last->op == Op::CGetQuietL) &&
2397 !peekLocCouldBeUninit(env, op.nloc1.id))) {
2398 auto const other = *last;
2399 rewind(env, 1);
2400 return reduce(env, bc::CGetL { op.nloc1 }, other);
2404 if (!peekLocCouldBeUninit(env, op.nloc1.id)) {
2405 auto const minLocEquiv = findMinLocEquiv(env, op.nloc1.id, false);
2406 if (minLocEquiv != NoLocalId) {
2407 return reduce(env, bc::CGetL2 { { kInvalidLocalName, minLocEquiv } });
2409 effect_free(env);
2411 mayReadLocal(env, op.nloc1.id);
2412 auto loc = locAsCell(env, op.nloc1.id);
2413 auto topEquiv = topStkLocal(env);
2414 auto top = popT(env);
2415 push(env, std::move(loc), op.nloc1.id);
2416 push(env, std::move(top), topEquiv);
2419 void in(ISS& env, const bc::CGetG&) { popC(env); push(env, TInitCell); }
2421 void in(ISS& env, const bc::CGetS& op) {
2422 auto const tcls = popC(env);
2423 auto const tname = popC(env);
2424 auto const vname = tv(tname);
2425 auto const self = selfCls(env);
2427 if (vname && vname->m_type == KindOfPersistentString &&
2428 self && tcls.subtypeOf(*self)) {
2429 if (auto ty = selfPropAsCell(env, vname->m_data.pstr)) {
2430 // Only nothrow when we know it's a private declared property (and thus
2431 // accessible here), class initialization won't throw, and its not a
2432 // LateInit prop (which will throw if not initialized).
2433 if (!classInitMightRaise(env, tcls) &&
2434 !isMaybeLateInitSelfProp(env, vname->m_data.pstr)) {
2435 nothrow(env);
2437 // We can only constprop here if we know for sure this is exactly the
2438 // correct class. The reason for this is that you could have a LSB
2439 // class attempting to access a private static in a derived class with
2440 // the same name as a private static in this class, which is supposed to
2441 // fatal at runtime (for an example see test/quick/static_sprop2.php).
2442 auto const selfExact = selfClsExact(env);
2443 if (selfExact && tcls.subtypeOf(*selfExact)) constprop(env);
2446 if (ty->subtypeOf(BBottom)) unreachable(env);
2447 return push(env, std::move(*ty));
2451 auto indexTy = env.index.lookup_public_static(env.ctx, tcls, tname);
2452 if (indexTy.subtypeOf(BInitCell)) {
2454 * Constant propagation here can change when we invoke autoload.
2455 * It's safe not to check anything about private or protected static
2456 * properties, because you can't override a public static property with
2457 * a private or protected one---if the index gave us back a constant type,
2458 * it's because it found a public static and it must be the property this
2459 * would have read dynamically.
2461 if (!classInitMightRaise(env, tcls) &&
2462 !env.index.lookup_public_static_maybe_late_init(tcls, tname)) {
2463 constprop(env);
2465 if (indexTy.subtypeOf(BBottom)) unreachable(env);
2466 return push(env, std::move(indexTy));
2469 push(env, TInitCell);
2472 void in(ISS& env, const bc::ClassGetC& op) {
2473 auto const t = topC(env);
2475 if (t.subtypeOf(BCls)) return reduce(env, bc::Nop {});
2476 popC(env);
2478 if (t.subtypeOf(BObj)) {
2479 effect_free(env);
2480 push(env, objcls(t));
2481 return;
2484 if (auto const clsname = getNameFromType(t)) {
2485 if (auto const rcls = env.index.resolve_class(env.ctx, clsname)) {
2486 if (rcls->cls()) effect_free(env);
2487 push(env, clsExact(*rcls));
2488 return;
2492 push(env, TCls);
2495 void in(ISS& env, const bc::ClassGetTS& op) {
2496 // TODO(T31677864): implement real optimizations
2497 auto const ts = popC(env);
2498 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
2499 if (!ts.couldBe(requiredTSType)) {
2500 push(env, TBottom);
2501 return;
2504 auto const& genericsType =
2505 RuntimeOption::EvalHackArrDVArrs ? TVec : TVArr;
2507 push(env, TCls);
2508 push(env, opt(genericsType));
2511 void in(ISS& env, const bc::AKExists& /*op*/) {
2512 auto const base = popC(env);
2513 auto const key = popC(env);
2515 // Bases other than array-like or object will raise a warning and return
2516 // false.
2517 if (!base.couldBeAny(TArr, TVec, TDict, TKeyset, TObj)) {
2518 return push(env, TFalse);
2521 // Push the returned type and annotate effects appropriately, taking into
2522 // account if the base might be null. Allowing for a possibly null base lets
2523 // us capture more cases.
2524 auto const finish = [&] (const Type& t, bool mayThrow) {
2525 if (base.couldBe(BInitNull)) return push(env, union_of(t, TFalse));
2526 if (!mayThrow) {
2527 constprop(env);
2528 effect_free(env);
2530 if (base.subtypeOf(BBottom)) unreachable(env);
2531 return push(env, t);
2534 // Helper for Hack arrays. "validKey" is the set of key types which can return
2535 // a value from AKExists. "silentKey" is the set of key types which will
2536 // silently return false (anything else throws). The Hack array elem functions
2537 // will treat values of "silentKey" as throwing, so we must identify those
2538 // cases and deal with them.
2539 auto const hackArr = [&] (std::pair<Type, ThrowMode> elem,
2540 const Type& validKey,
2541 const Type& silentKey) {
2542 switch (elem.second) {
2543 case ThrowMode::None:
2544 assertx(key.subtypeOf(validKey));
2545 return finish(TTrue, false);
2546 case ThrowMode::MaybeMissingElement:
2547 assertx(key.subtypeOf(validKey));
2548 return finish(TBool, false);
2549 case ThrowMode::MissingElement:
2550 assertx(key.subtypeOf(validKey));
2551 return finish(TFalse, false);
2552 case ThrowMode::MaybeBadKey:
2553 assertx(key.couldBe(validKey));
2554 return finish(
2555 elem.first.subtypeOf(BBottom) ? TFalse : TBool,
2556 !key.subtypeOf(BArrKeyCompat)
2558 case ThrowMode::BadOperation:
2559 assertx(!key.couldBe(validKey));
2560 return finish(key.couldBe(silentKey) ? TFalse : TBottom, true);
2564 // Vecs will throw for any key other than Int or Str, and will silently
2565 // return false for Str.
2566 if (base.subtypeOrNull(BVec)) {
2567 if (key.subtypeOf(BStr)) return finish(TFalse, false);
2568 return hackArr(vec_elem(base, key, TBottom), TInt, TStr);
2571 // Dicts and keysets will throw for any key other than Int or Str.
2572 if (base.subtypeOfAny(TOptDict, TOptKeyset)) {
2573 auto const elem = base.subtypeOrNull(BDict)
2574 ? dict_elem(base, key, TBottom)
2575 : keyset_elem(base, key, TBottom);
2576 return hackArr(elem, TArrKeyCompat, TBottom);
2579 if (base.subtypeOrNull(BArr)) {
2580 // Unlike Idx, AKExists will transform a null key on arrays into the static
2581 // empty string, so we don't need to do any fixups here.
2582 auto const elem = array_elem(base, key, TBottom);
2583 switch (elem.second) {
2584 case ThrowMode::None: return finish(TTrue, false);
2585 case ThrowMode::MaybeMissingElement: return finish(TBool, false);
2586 case ThrowMode::MissingElement: return finish(TFalse, false);
2587 case ThrowMode::MaybeBadKey:
2588 return finish(elem.first.subtypeOf(BBottom) ? TFalse : TBool, true);
2589 case ThrowMode::BadOperation: always_assert(false);
2593 // Objects or other unions of possible bases
2594 push(env, TBool);
2597 const StaticString
2598 s_implicit_context_set("HH\\ImplicitContext::set"),
2599 s_implicit_context_genSet("HH\\ImplicitContext::genSet");
2601 void in(ISS& env, const bc::GetMemoKeyL& op) {
2602 auto const& func = env.ctx.func;
2603 auto const name = folly::to<std::string>(
2604 func && func->cls ? func->cls->name->data() : "",
2605 func && func->cls ? "::" : "",
2606 func ? func->name->data() : "");
2607 always_assert(func->isMemoizeWrapper ||
2608 name == s_implicit_context_set.get()->toCppString() ||
2609 name == s_implicit_context_genSet.get()->toCppString());
2611 auto const rclsIMemoizeParam = env.index.builtin_class(s_IMemoizeParam.get());
2612 auto const tyIMemoizeParam = subObj(rclsIMemoizeParam);
2614 auto const inTy = locAsCell(env, op.nloc1.id);
2616 // If the local could be uninit, we might raise a warning (as
2617 // usual). Converting an object to a memo key might invoke PHP code if it has
2618 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2619 if (!locCouldBeUninit(env, op.nloc1.id) &&
2620 !inTy.couldBeAny(TObj, TArr, TVec, TDict)) {
2621 nothrow(env); constprop(env);
2624 // If type constraints are being enforced and the local being turned into a
2625 // memo key is a parameter, then we can possibly using the type constraint to
2626 // infer a more efficient memo key mode.
2627 using MK = MemoKeyConstraint;
2628 folly::Optional<res::Class> resolvedCls;
2629 auto const mkc = [&] {
2630 if (op.nloc1.id >= env.ctx.func->params.size()) return MK::None;
2631 auto tc = env.ctx.func->params[op.nloc1.id].typeConstraint;
2632 if (tc.type() == AnnotType::Object) {
2633 auto res = env.index.resolve_type_name(tc.typeName());
2634 if (res.type != AnnotType::Object) {
2635 tc.resolveType(res.type, res.nullable || tc.isNullable());
2636 } else {
2637 resolvedCls = env.index.resolve_class(env.ctx, tc.typeName());
2640 return memoKeyConstraintFromTC(tc);
2641 }();
2643 // Use the type-constraint to reduce this operation to a more efficient memo
2644 // mode. Some of the modes can be reduced to simple bytecode operations
2645 // inline. Even with the type-constraints, we still need to check the inferred
2646 // type of the local. Something may have possibly clobbered the local between
2647 // the type-check and this op.
2648 switch (mkc) {
2649 case MK::Int:
2650 // Always an int, so the key is always an identity mapping
2651 if (inTy.subtypeOf(BInt)) return reduce(env, bc::CGetL { op.nloc1 });
2652 break;
2653 case MK::Bool:
2654 // Always a bool, so the key is the bool cast to an int
2655 if (inTy.subtypeOf(BBool)) {
2656 return reduce(env, bc::CGetL { op.nloc1 }, bc::CastInt {});
2658 break;
2659 case MK::Str:
2660 // Always a string, so the key is always an identity mapping
2661 if (inTy.subtypeOf(BStr)) return reduce(env, bc::CGetL { op.nloc1 });
2662 break;
2663 case MK::IntOrStr:
2664 // Either an int or string, so the key can be an identity mapping
2665 if (inTy.subtypeOf(BArrKey)) return reduce(env, bc::CGetL { op.nloc1 });
2666 break;
2667 case MK::StrOrNull:
2668 // A nullable string. The key will either be the string or the integer
2669 // zero.
2670 if (inTy.subtypeOrNull(BStr)) {
2671 return reduce(
2672 env,
2673 bc::CGetL { op.nloc1 },
2674 bc::Int { 0 },
2675 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2676 bc::Select {}
2679 break;
2680 case MK::IntOrNull:
2681 // A nullable int. The key will either be the integer, or the static empty
2682 // string.
2683 if (inTy.subtypeOrNull(BInt)) {
2684 return reduce(
2685 env,
2686 bc::CGetL { op.nloc1 },
2687 bc::String { staticEmptyString() },
2688 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2689 bc::Select {}
2692 break;
2693 case MK::BoolOrNull:
2694 // A nullable bool. The key will either be 0, 1, or 2.
2695 if (inTy.subtypeOrNull(BBool)) {
2696 return reduce(
2697 env,
2698 bc::CGetL { op.nloc1 },
2699 bc::CastInt {},
2700 bc::Int { 2 },
2701 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2702 bc::Select {}
2705 break;
2706 case MK::Dbl:
2707 // The double will be converted (losslessly) to an integer.
2708 if (inTy.subtypeOf(BDbl)) {
2709 return reduce(env, bc::CGetL { op.nloc1 }, bc::DblAsBits {});
2711 break;
2712 case MK::DblOrNull:
2713 // A nullable double. The key will be an integer, or the static empty
2714 // string.
2715 if (inTy.subtypeOrNull(BDbl)) {
2716 return reduce(
2717 env,
2718 bc::CGetL { op.nloc1 },
2719 bc::DblAsBits {},
2720 bc::String { staticEmptyString() },
2721 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2722 bc::Select {}
2725 break;
2726 case MK::Object:
2727 // An object. If the object is definitely known to implement IMemoizeParam
2728 // we can simply call that method, casting the output to ensure its always
2729 // a string (which is what the generic mode does). If not, it will use the
2730 // generic mode, which can handle collections or classes which don't
2731 // implement getInstanceKey.
2732 if (resolvedCls &&
2733 resolvedCls->mustBeSubtypeOf(rclsIMemoizeParam) &&
2734 inTy.subtypeOf(tyIMemoizeParam)) {
2735 return reduce(
2736 env,
2737 bc::CGetL { op.nloc1 },
2738 bc::NullUninit {},
2739 bc::NullUninit {},
2740 bc::FCallObjMethodD {
2741 FCallArgs(0),
2742 staticEmptyString(),
2743 ObjMethodOp::NullThrows,
2744 s_getInstanceKey.get()
2746 bc::CastString {}
2749 break;
2750 case MK::ObjectOrNull:
2751 // An object or null. We can use the null safe version of a function call
2752 // when invoking getInstanceKey and then select from the result of that,
2753 // or the integer 0. This might seem wasteful, but the JIT does a good job
2754 // inlining away the call in the null case.
2755 if (resolvedCls &&
2756 resolvedCls->mustBeSubtypeOf(rclsIMemoizeParam) &&
2757 inTy.subtypeOf(opt(tyIMemoizeParam))) {
2758 return reduce(
2759 env,
2760 bc::CGetL { op.nloc1 },
2761 bc::NullUninit {},
2762 bc::NullUninit {},
2763 bc::FCallObjMethodD {
2764 FCallArgs(0),
2765 staticEmptyString(),
2766 ObjMethodOp::NullSafe,
2767 s_getInstanceKey.get()
2769 bc::CastString {},
2770 bc::Int { 0 },
2771 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2772 bc::Select {}
2775 break;
2776 case MK::None:
2777 break;
2780 // No type constraint, or one that isn't usuable. Use the generic memoization
2781 // scheme which can handle any type:
2783 if (auto const val = tv(inTy)) {
2784 auto const key = eval_cell(
2785 [&]{ return HHVM_FN(serialize_memoize_param)(*val); }
2787 if (key) return push(env, *key);
2790 // Integer keys are always mapped to themselves
2791 if (inTy.subtypeOf(BInt)) return reduce(env, bc::CGetL { op.nloc1 });
2792 if (inTy.subtypeOrNull(BInt)) {
2793 return reduce(
2794 env,
2795 bc::CGetL { op.nloc1 },
2796 bc::String { s_nullMemoKey.get() },
2797 bc::IsTypeL { op.nloc1, IsTypeOp::Null },
2798 bc::Select {}
2801 if (inTy.subtypeOf(BBool)) {
2802 return reduce(
2803 env,
2804 bc::String { s_falseMemoKey.get() },
2805 bc::String { s_trueMemoKey.get() },
2806 bc::CGetL { op.nloc1 },
2807 bc::Select {}
2811 // A memo key can be an integer if the input might be an integer, and is a
2812 // string otherwise. Booleans and nulls are always static strings.
2813 auto keyTy = [&]{
2814 if (inTy.subtypeOrNull(BBool)) return TSStr;
2815 if (inTy.couldBe(BInt)) return union_of(TInt, TStr);
2816 return TStr;
2817 }();
2818 push(env, std::move(keyTy));
2821 void in(ISS& env, const bc::IssetL& op) {
2822 if (locIsThis(env, op.loc1)) {
2823 return reduce(env,
2824 bc::BareThis { BareThisOp::NoNotice },
2825 bc::IsTypeC { IsTypeOp::Null },
2826 bc::Not {});
2828 nothrow(env);
2829 constprop(env);
2830 auto const loc = locAsCell(env, op.loc1);
2831 if (loc.subtypeOf(BNull)) return push(env, TFalse);
2832 if (!loc.couldBe(BNull)) return push(env, TTrue);
2833 push(env, TBool);
2836 void in(ISS& env, const bc::IsUnsetL& op) {
2837 nothrow(env);
2838 constprop(env);
2839 auto const loc = locAsCell(env, op.loc1);
2840 if (loc.subtypeOf(BUninit)) return push(env, TTrue);
2841 if (!loc.couldBe(BUninit)) return push(env, TFalse);
2842 push(env, TBool);
2845 void in(ISS& env, const bc::IssetS& op) {
2846 auto const tcls = popC(env);
2847 auto const tname = popC(env);
2848 auto const vname = tv(tname);
2849 auto const self = selfCls(env);
2851 if (self && tcls.subtypeOf(*self) &&
2852 vname && vname->m_type == KindOfPersistentString) {
2853 if (auto const t = selfPropAsCell(env, vname->m_data.pstr)) {
2854 if (isMaybeLateInitSelfProp(env, vname->m_data.pstr)) {
2855 if (!classInitMightRaise(env, tcls)) constprop(env);
2856 return push(env, t->subtypeOf(BBottom) ? TFalse : TBool);
2858 if (t->subtypeOf(BNull)) {
2859 if (!classInitMightRaise(env, tcls)) constprop(env);
2860 return push(env, TFalse);
2862 if (!t->couldBe(BNull)) {
2863 if (!classInitMightRaise(env, tcls)) constprop(env);
2864 return push(env, TTrue);
2869 auto const indexTy = env.index.lookup_public_static(env.ctx, tcls, tname);
2870 if (indexTy.subtypeOf(BInitCell)) {
2871 // See the comments in CGetS about constprop for public statics.
2872 if (!classInitMightRaise(env, tcls)) {
2873 constprop(env);
2875 if (env.index.lookup_public_static_maybe_late_init(tcls, tname)) {
2876 return push(env, indexTy.subtypeOf(BBottom) ? TFalse : TBool);
2878 if (indexTy.subtypeOf(BNull)) { return push(env, TFalse); }
2879 if (!indexTy.couldBe(BNull)) { return push(env, TTrue); }
2882 push(env, TBool);
2885 void in(ISS& env, const bc::IssetG&) { popC(env); push(env, TBool); }
2887 void isTypeImpl(ISS& env, const Type& locOrCell, const Type& test) {
2888 if (locOrCell.subtypeOf(test)) return push(env, TTrue);
2889 if (!locOrCell.couldBe(test)) return push(env, TFalse);
2890 push(env, TBool);
2893 void isTypeObj(ISS& env, const Type& ty) {
2894 if (!ty.couldBe(BObj)) return push(env, TFalse);
2895 if (ty.subtypeOf(BObj)) {
2896 auto const incompl = objExact(
2897 env.index.builtin_class(s_PHP_Incomplete_Class.get()));
2898 if (!ty.couldBe(incompl)) return push(env, TTrue);
2899 if (ty.subtypeOf(incompl)) return push(env, TFalse);
2901 push(env, TBool);
2904 template<class Op>
2905 void isTypeLImpl(ISS& env, const Op& op) {
2906 auto const loc = locAsCell(env, op.nloc1.id);
2907 if (!locCouldBeUninit(env, op.nloc1.id) &&
2908 !is_type_might_raise(op.subop2, loc)) {
2909 constprop(env);
2910 nothrow(env);
2913 switch (op.subop2) {
2914 case IsTypeOp::Scalar: return push(env, TBool);
2915 case IsTypeOp::Obj: return isTypeObj(env, loc);
2916 default: return isTypeImpl(env, loc, type_of_istype(op.subop2));
2920 template<class Op>
2921 void isTypeCImpl(ISS& env, const Op& op) {
2922 auto const t1 = popC(env);
2923 if (!is_type_might_raise(op.subop1, t1)) {
2924 constprop(env);
2925 nothrow(env);
2928 switch (op.subop1) {
2929 case IsTypeOp::Scalar: return push(env, TBool);
2930 case IsTypeOp::Obj: return isTypeObj(env, t1);
2931 default: return isTypeImpl(env, t1, type_of_istype(op.subop1));
2935 void in(ISS& env, const bc::IsTypeC& op) { isTypeCImpl(env, op); }
2936 void in(ISS& env, const bc::IsTypeL& op) { isTypeLImpl(env, op); }
2938 void in(ISS& env, const bc::InstanceOfD& op) {
2939 auto t1 = topC(env);
2940 // Note: InstanceOfD can do autoload if the type might be a type
2941 // alias, so it's not nothrow unless we know it's an object type.
2942 if (auto const rcls = env.index.resolve_class(env.ctx, op.str1)) {
2943 auto result = [&] (const Type& r) {
2944 nothrow(env);
2945 if (r != TBool) constprop(env);
2946 popC(env);
2947 push(env, r);
2949 if (!interface_supports_non_objects(rcls->name())) {
2950 auto testTy = subObj(*rcls);
2951 if (t1.subtypeOf(testTy)) return result(TTrue);
2952 if (!t1.couldBe(testTy)) return result(TFalse);
2953 if (is_opt(t1)) {
2954 t1 = unopt(std::move(t1));
2955 if (t1.subtypeOf(testTy)) {
2956 return reduce(env, bc::IsTypeC { IsTypeOp::Null }, bc::Not {});
2959 return result(TBool);
2962 popC(env);
2963 push(env, TBool);
2966 void in(ISS& env, const bc::InstanceOf& /*op*/) {
2967 auto const t1 = topC(env);
2968 auto const v1 = tv(t1);
2969 if (v1 && v1->m_type == KindOfPersistentString) {
2970 return reduce(env, bc::PopC {},
2971 bc::InstanceOfD { v1->m_data.pstr });
2974 if (t1.subtypeOf(BObj) && is_specialized_obj(t1)) {
2975 auto const dobj = dobj_of(t1);
2976 switch (dobj.type) {
2977 case DObj::Sub:
2978 break;
2979 case DObj::Exact:
2980 return reduce(env, bc::PopC {},
2981 bc::InstanceOfD { dobj.cls.name() });
2985 popC(env);
2986 popC(env);
2987 push(env, TBool);
2990 namespace {
2992 bool isValidTypeOpForIsAs(const IsTypeOp& op) {
2993 switch (op) {
2994 case IsTypeOp::Null:
2995 case IsTypeOp::Bool:
2996 case IsTypeOp::Int:
2997 case IsTypeOp::Dbl:
2998 case IsTypeOp::Str:
2999 case IsTypeOp::Obj:
3000 return true;
3001 case IsTypeOp::Res:
3002 case IsTypeOp::Vec:
3003 case IsTypeOp::Dict:
3004 case IsTypeOp::Keyset:
3005 case IsTypeOp::VArray:
3006 case IsTypeOp::DArray:
3007 case IsTypeOp::ArrLike:
3008 case IsTypeOp::Scalar:
3009 case IsTypeOp::ClsMeth:
3010 case IsTypeOp::Func:
3011 case IsTypeOp::PHPArr:
3012 case IsTypeOp::Class:
3013 return false;
3015 not_reached();
3018 void isTypeStructImpl(ISS& env, SArray inputTS) {
3019 auto const resolvedTS = resolveTSStatically(env, inputTS, env.ctx.cls);
3020 auto const ts = resolvedTS ? resolvedTS : inputTS;
3021 auto const t = loosen_likeness(topC(env, 1)); // operand to is/as
3023 bool may_raise = true;
3024 auto result = [&] (const Type& out) {
3025 popC(env); // type structure
3026 popC(env); // operand to is/as
3027 constprop(env);
3028 if (!may_raise) nothrow(env);
3029 return push(env, out);
3032 auto check = [&] (
3033 const folly::Optional<Type> type,
3034 const folly::Optional<Type> deopt = folly::none
3036 if (!type || is_type_might_raise(*type, t)) return result(TBool);
3037 auto test = type.value();
3038 if (t.subtypeOf(test)) return result(TTrue);
3039 if (!t.couldBe(test) && (!deopt || !t.couldBe(deopt.value()))) {
3040 return result(TFalse);
3042 auto const op = type_to_istypeop(test);
3043 if (!op || !isValidTypeOpForIsAs(op.value())) return result(TBool);
3044 return reduce(env, bc::PopC {}, bc::IsTypeC { *op });
3047 auto const is_nullable_ts = is_ts_nullable(ts);
3048 auto const is_definitely_null = t.subtypeOf(BNull);
3049 auto const is_definitely_not_null = !t.couldBe(BNull);
3051 if (is_nullable_ts && is_definitely_null) return result(TTrue);
3053 auto const ts_type = type_of_type_structure(env.index, env.ctx, ts);
3055 if (is_nullable_ts && !is_definitely_not_null && ts_type == folly::none) {
3056 // Ts is nullable and we know that t could be null but we dont know for sure
3057 // Also we didn't get a type out of the type structure
3058 return result(TBool);
3061 if (ts_type && !is_type_might_raise(*ts_type, t)) may_raise = false;
3062 switch (get_ts_kind(ts)) {
3063 case TypeStructure::Kind::T_int:
3064 case TypeStructure::Kind::T_bool:
3065 case TypeStructure::Kind::T_float:
3066 case TypeStructure::Kind::T_string:
3067 case TypeStructure::Kind::T_num:
3068 case TypeStructure::Kind::T_arraykey:
3069 case TypeStructure::Kind::T_keyset:
3070 case TypeStructure::Kind::T_void:
3071 case TypeStructure::Kind::T_null:
3072 return check(ts_type);
3073 case TypeStructure::Kind::T_tuple:
3074 return check(ts_type, TVec);
3075 case TypeStructure::Kind::T_shape:
3076 return check(ts_type, TDict);
3077 case TypeStructure::Kind::T_dict:
3078 return check(ts_type, TDArr);
3079 case TypeStructure::Kind::T_vec:
3080 return check(ts_type, TVArr);
3081 case TypeStructure::Kind::T_nothing:
3082 case TypeStructure::Kind::T_noreturn:
3083 return result(TFalse);
3084 case TypeStructure::Kind::T_mixed:
3085 case TypeStructure::Kind::T_dynamic:
3086 return result(TTrue);
3087 case TypeStructure::Kind::T_nonnull:
3088 if (is_definitely_null) return result(TFalse);
3089 if (is_definitely_not_null) return result(TTrue);
3090 return reduce(env,
3091 bc::PopC {},
3092 bc::IsTypeC { IsTypeOp::Null },
3093 bc::Not {});
3094 case TypeStructure::Kind::T_class:
3095 case TypeStructure::Kind::T_interface:
3096 case TypeStructure::Kind::T_xhp: {
3097 auto clsname = get_ts_classname(ts);
3098 auto const rcls = env.index.resolve_class(env.ctx, clsname);
3099 if (!rcls || !rcls->resolved() || (ts->exists(s_generic_types) &&
3100 (rcls->cls()->hasReifiedGenerics ||
3101 !isTSAllWildcards(ts)))) {
3102 // If it is a reified class or has non wildcard generics,
3103 // we need to bail
3104 return result(TBool);
3106 return reduce(env, bc::PopC {}, bc::InstanceOfD { clsname });
3108 case TypeStructure::Kind::T_unresolved: {
3109 auto classname = get_ts_classname(ts);
3110 auto const has_generics = ts->exists(s_generic_types);
3111 if (!has_generics && classname->isame(s_this.get())) {
3112 return reduce(env, bc::PopC {}, bc::IsLateBoundCls {});
3114 auto const rcls = env.index.resolve_class(env.ctx, classname);
3115 // We can only reduce to instance of if we know for sure that this class
3116 // can be resolved since instanceof undefined class does not throw
3117 if (!rcls || !rcls->resolved() || rcls->cls()->attrs & AttrEnum) {
3118 return result(TBool);
3120 if (has_generics &&
3121 (rcls->cls()->hasReifiedGenerics || !isTSAllWildcards(ts))) {
3122 // If it is a reified class or has non wildcard generics,
3123 // we need to bail
3124 return result(TBool);
3126 return reduce(env, bc::PopC {}, bc::InstanceOfD { rcls->name() });
3128 case TypeStructure::Kind::T_enum:
3129 case TypeStructure::Kind::T_resource:
3130 case TypeStructure::Kind::T_vec_or_dict:
3131 case TypeStructure::Kind::T_arraylike:
3132 // TODO(T29232862): implement
3133 return result(TBool);
3134 case TypeStructure::Kind::T_typeaccess:
3135 case TypeStructure::Kind::T_array:
3136 case TypeStructure::Kind::T_darray:
3137 case TypeStructure::Kind::T_varray:
3138 case TypeStructure::Kind::T_varray_or_darray:
3139 case TypeStructure::Kind::T_reifiedtype:
3140 return result(TBool);
3141 case TypeStructure::Kind::T_fun:
3142 case TypeStructure::Kind::T_typevar:
3143 case TypeStructure::Kind::T_trait:
3144 // We will error on these at the JIT
3145 return result(TBool);
3148 not_reached();
3151 bool canReduceToDontResolveList(SArray tsList, bool checkArrays);
3153 bool canReduceToDontResolve(SArray ts, bool checkArrays) {
3154 auto const checkGenerics = [&](SArray arr) {
3155 if (!ts->exists(s_generic_types)) return true;
3156 return canReduceToDontResolveList(get_ts_generic_types(ts), true);
3158 switch (get_ts_kind(ts)) {
3159 case TypeStructure::Kind::T_int:
3160 case TypeStructure::Kind::T_bool:
3161 case TypeStructure::Kind::T_float:
3162 case TypeStructure::Kind::T_string:
3163 case TypeStructure::Kind::T_num:
3164 case TypeStructure::Kind::T_arraykey:
3165 case TypeStructure::Kind::T_void:
3166 case TypeStructure::Kind::T_null:
3167 case TypeStructure::Kind::T_nothing:
3168 case TypeStructure::Kind::T_noreturn:
3169 case TypeStructure::Kind::T_mixed:
3170 case TypeStructure::Kind::T_dynamic:
3171 case TypeStructure::Kind::T_nonnull:
3172 case TypeStructure::Kind::T_resource:
3173 return true;
3174 // Following have generic parameters that may need to be resolved
3175 case TypeStructure::Kind::T_dict:
3176 case TypeStructure::Kind::T_vec:
3177 case TypeStructure::Kind::T_keyset:
3178 case TypeStructure::Kind::T_vec_or_dict:
3179 case TypeStructure::Kind::T_arraylike:
3180 return !checkArrays || checkGenerics(ts);
3181 case TypeStructure::Kind::T_class:
3182 case TypeStructure::Kind::T_interface:
3183 case TypeStructure::Kind::T_xhp:
3184 case TypeStructure::Kind::T_enum:
3185 return isTSAllWildcards(ts) || checkGenerics(ts);
3186 case TypeStructure::Kind::T_tuple:
3187 return canReduceToDontResolveList(get_ts_elem_types(ts), checkArrays);
3188 case TypeStructure::Kind::T_fun: {
3189 auto const variadicType = get_ts_variadic_type_opt(ts);
3190 return canReduceToDontResolve(get_ts_return_type(ts), checkArrays)
3191 && canReduceToDontResolveList(get_ts_param_types(ts), checkArrays)
3192 && (!variadicType || canReduceToDontResolve(variadicType, checkArrays));
3194 case TypeStructure::Kind::T_shape:
3195 // We cannot skip resolution on shapes since shapes contain "value" field
3196 // which resolution removes.
3197 return false;
3198 // Following needs to be resolved
3199 case TypeStructure::Kind::T_unresolved:
3200 case TypeStructure::Kind::T_typeaccess:
3201 // Following cannot be used in is/as expressions, we need to error on them
3202 // Currently erroring happens as a part of the resolving phase,
3203 // so keep resolving them
3204 case TypeStructure::Kind::T_array:
3205 case TypeStructure::Kind::T_darray:
3206 case TypeStructure::Kind::T_varray:
3207 case TypeStructure::Kind::T_varray_or_darray:
3208 case TypeStructure::Kind::T_reifiedtype:
3209 case TypeStructure::Kind::T_typevar:
3210 case TypeStructure::Kind::T_trait:
3211 return false;
3213 not_reached();
3216 bool canReduceToDontResolveList(SArray tsList, bool checkArrays) {
3217 auto result = true;
3218 IterateV(
3219 tsList,
3220 [&](TypedValue v) {
3221 assertx(isArrayLikeType(v.m_type));
3222 result &= canReduceToDontResolve(v.m_data.parr, checkArrays);
3223 // when result is false, we can short circuit
3224 return !result;
3227 return result;
3230 } // namespace
3232 void in(ISS& env, const bc::IsLateBoundCls& op) {
3233 auto const cls = env.ctx.cls;
3234 if (cls && !(cls->attrs & AttrTrait)) effect_free(env);
3235 popC(env);
3236 return push(env, TBool);
3239 void in(ISS& env, const bc::IsTypeStructC& op) {
3240 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
3241 if (!topC(env).couldBe(requiredTSType)) {
3242 popC(env);
3243 popC(env);
3244 return unreachable(env);
3246 auto const a = tv(topC(env));
3247 if (!a || !isValidTSType(*a, false)) {
3248 popC(env);
3249 popC(env);
3250 return push(env, TBool);
3252 if (op.subop1 == TypeStructResolveOp::Resolve &&
3253 canReduceToDontResolve(a->m_data.parr, false)) {
3254 return reduce(env, bc::IsTypeStructC { TypeStructResolveOp::DontResolve });
3256 isTypeStructImpl(env, a->m_data.parr);
3259 void in(ISS& env, const bc::ThrowAsTypeStructException& op) {
3260 popC(env);
3261 popC(env);
3262 unreachable(env);
3263 return;
3266 void in(ISS& env, const bc::CombineAndResolveTypeStruct& op) {
3267 assertx(op.arg1 > 0);
3268 auto valid = true;
3269 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
3270 auto const first = tv(topC(env));
3271 if (first && isValidTSType(*first, false)) {
3272 auto const ts = first->m_data.parr;
3273 // Optimize single input that does not need any combination
3274 if (op.arg1 == 1) {
3275 if (canReduceToDontResolve(ts, true)) return reduce(env);
3276 if (auto const resolved = resolveTSStatically(env, ts, env.ctx.cls)) {
3277 return RuntimeOption::EvalHackArrDVArrs
3278 ? reduce(env, bc::PopC {}, bc::Dict { resolved })
3279 : reduce(env, bc::PopC {}, bc::Array { resolved });
3282 // Optimize double input that needs a single combination and looks of the
3283 // form ?T, @T or ~T
3284 if (op.arg1 == 2 && get_ts_kind(ts) == TypeStructure::Kind::T_reifiedtype) {
3285 BytecodeVec instrs { bc::PopC {} };
3286 auto const tv_true = gen_constant(make_tv<KindOfBoolean>(true));
3287 if (ts->exists(s_like.get())) {
3288 instrs.push_back(gen_constant(make_tv<KindOfString>(s_like.get())));
3289 instrs.push_back(tv_true);
3290 instrs.push_back(bc::AddElemC {});
3292 if (ts->exists(s_nullable.get())) {
3293 instrs.push_back(gen_constant(make_tv<KindOfString>(s_nullable.get())));
3294 instrs.push_back(tv_true);
3295 instrs.push_back(bc::AddElemC {});
3297 if (ts->exists(s_soft.get())) {
3298 instrs.push_back(gen_constant(make_tv<KindOfString>(s_soft.get())));
3299 instrs.push_back(tv_true);
3300 instrs.push_back(bc::AddElemC {});
3302 return reduce(env, std::move(instrs));
3306 for (int i = 0; i < op.arg1; ++i) {
3307 auto const t = popC(env);
3308 valid &= t.couldBe(requiredTSType);
3310 if (!valid) return unreachable(env);
3311 nothrow(env);
3312 push(env, Type{requiredTSType});
3315 void in(ISS& env, const bc::RecordReifiedGeneric& op) {
3316 // TODO(T31677864): implement real optimizations
3317 auto const t = popC(env);
3318 auto const required = RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr;
3319 if (!t.couldBe(required)) return unreachable(env);
3320 if (t.subtypeOf(required)) nothrow(env);
3321 push(env, RuntimeOption::EvalHackArrDVArrs ? TSVec : TSVArr);
3324 void in(ISS& env, const bc::CheckReifiedGenericMismatch& op) {
3325 // TODO(T31677864): implement real optimizations
3326 popC(env);
3329 namespace {
3332 * If the value on the top of the stack is known to be equivalent to the local
3333 * its being moved/copied to, return folly::none without modifying any
3334 * state. Otherwise, pop the stack value, perform the set, and return a pair
3335 * giving the value's type, and any other local its known to be equivalent to.
3337 template <typename Set>
3338 folly::Optional<std::pair<Type, LocalId>> moveToLocImpl(ISS& env,
3339 const Set& op) {
3340 if (auto const prev = last_op(env, 1)) {
3341 if (prev->op == Op::CGetL2 &&
3342 prev->CGetL2.nloc1.id == op.loc1 &&
3343 last_op(env)->op == Op::Concat) {
3344 rewind(env, 2);
3345 reduce(env, bc::SetOpL { op.loc1, SetOpOp::ConcatEqual });
3346 return folly::none;
3350 auto equivLoc = topStkEquiv(env);
3351 // If the local could be a Ref, don't record equality because the stack
3352 // element and the local won't actually have the same type.
3353 if (equivLoc == StackThisId && env.state.thisLoc != NoLocalId) {
3354 if (env.state.thisLoc == op.loc1 ||
3355 locsAreEquiv(env, env.state.thisLoc, op.loc1)) {
3356 return folly::none;
3357 } else {
3358 equivLoc = env.state.thisLoc;
3361 if (!is_volatile_local(env.ctx.func, op.loc1)) {
3362 if (equivLoc <= MaxLocalId) {
3363 if (equivLoc == op.loc1 ||
3364 locsAreEquiv(env, equivLoc, op.loc1)) {
3365 // We allow equivalency to ignore Uninit, so we need to check
3366 // the types here.
3367 if (peekLocRaw(env, op.loc1) == topC(env)) {
3368 return folly::none;
3371 } else if (equivLoc == NoLocalId) {
3372 equivLoc = op.loc1;
3374 if (!any(env.collect.opts & CollectionOpts::Speculating)) {
3375 effect_free(env);
3377 } else {
3378 equivLoc = NoLocalId;
3380 nothrow(env);
3381 auto val = popC(env);
3382 setLoc(env, op.loc1, val);
3383 if (equivLoc == StackThisId) {
3384 assertx(env.state.thisLoc == NoLocalId);
3385 equivLoc = env.state.thisLoc = op.loc1;
3387 if (equivLoc == StackDupId) {
3388 setStkLocal(env, op.loc1);
3389 } else if (equivLoc != op.loc1 && equivLoc != NoLocalId) {
3390 addLocEquiv(env, op.loc1, equivLoc);
3392 return { std::make_pair(std::move(val), equivLoc) };
3397 void in(ISS& env, const bc::PopL& op) {
3398 // If the same value is already in the local, do nothing but pop
3399 // it. Otherwise, the set has been done by moveToLocImpl.
3400 if (!moveToLocImpl(env, op)) return reduce(env, bc::PopC {});
3403 void in(ISS& env, const bc::SetL& op) {
3404 // If the same value is already in the local, do nothing because SetL keeps
3405 // the value on the stack. If it isn't, we need to push it back onto the stack
3406 // because moveToLocImpl popped it.
3407 if (auto p = moveToLocImpl(env, op)) {
3408 push(env, std::move(p->first), p->second);
3409 } else {
3410 reduce(env);
3414 void in(ISS& env, const bc::SetG&) {
3415 auto t1 = popC(env);
3416 popC(env);
3417 push(env, std::move(t1));
3420 void in(ISS& env, const bc::SetS& op) {
3421 auto const t1 = loosen_likeness(popC(env));
3422 auto const tcls = popC(env);
3423 auto const tname = popC(env);
3424 auto const vname = tv(tname);
3425 auto const self = selfCls(env);
3427 if (vname && vname->m_type == KindOfPersistentString &&
3428 canSkipMergeOnConstProp(env, tcls, vname->m_data.pstr)) {
3429 unreachable(env);
3430 push(env, TBottom);
3431 return;
3434 if (!self || tcls.couldBe(*self)) {
3435 if (vname && vname->m_type == KindOfPersistentString) {
3436 mergeSelfProp(env, vname->m_data.pstr, t1);
3437 } else {
3438 mergeEachSelfPropRaw(env, [&] (Type) { return t1; });
3442 env.collect.publicSPropMutations.merge(env.index, env.ctx, tcls, tname, t1);
3444 push(env, std::move(t1));
3447 void in(ISS& env, const bc::SetOpL& op) {
3448 auto const t1 = popC(env);
3449 auto const v1 = tv(t1);
3450 auto const loc = locAsCell(env, op.loc1);
3451 auto const locVal = tv(loc);
3452 if (v1 && locVal) {
3453 // Can't constprop at this eval_cell, because of the effects on
3454 // locals.
3455 auto resultTy = eval_cell([&] {
3456 TypedValue c = *locVal;
3457 TypedValue rhs = *v1;
3458 setopBody(&c, op.subop2, &rhs);
3459 return c;
3461 if (!resultTy) resultTy = TInitCell;
3463 // We may have inferred a TSStr or TSArr with a value here, but
3464 // at runtime it will not be static. For now just throw that
3465 // away. TODO(#3696042): should be able to loosen_staticness here.
3466 if (resultTy->subtypeOf(BStr)) resultTy = TStr;
3467 else if (resultTy->subtypeOf(BArr)) resultTy = TArr;
3468 else if (resultTy->subtypeOf(BVec)) resultTy = TVec;
3469 else if (resultTy->subtypeOf(BDict)) resultTy = TDict;
3470 else if (resultTy->subtypeOf(BKeyset)) resultTy = TKeyset;
3472 setLoc(env, op.loc1, *resultTy);
3473 push(env, *resultTy);
3474 return;
3477 auto resultTy = typeSetOp(op.subop2, loc, t1);
3478 setLoc(env, op.loc1, resultTy);
3479 push(env, std::move(resultTy));
3482 void in(ISS& env, const bc::SetOpG&) {
3483 popC(env); popC(env);
3484 push(env, TInitCell);
3487 void in(ISS& env, const bc::SetOpS& op) {
3488 popC(env);
3489 auto const tcls = popC(env);
3490 auto const tname = popC(env);
3491 auto const vname = tv(tname);
3492 auto const self = selfCls(env);
3494 if (vname && vname->m_type == KindOfPersistentString &&
3495 canSkipMergeOnConstProp(env, tcls, vname->m_data.pstr)) {
3496 unreachable(env);
3497 push(env, TBottom);
3498 return;
3501 if (!self || tcls.couldBe(*self)) {
3502 if (vname && vname->m_type == KindOfPersistentString) {
3503 mergeSelfProp(env, vname->m_data.pstr, TInitCell);
3504 } else {
3505 killSelfProps(env);
3509 env.collect.publicSPropMutations.merge(
3510 env.index, env.ctx, tcls, tname, TInitCell
3513 push(env, TInitCell);
3516 void in(ISS& env, const bc::IncDecL& op) {
3517 auto loc = locAsCell(env, op.nloc1.id);
3518 auto newT = typeIncDec(op.subop2, loc);
3519 auto const pre = isPre(op.subop2);
3521 // If it's a non-numeric string, this may cause it to exceed the max length.
3522 if (!locCouldBeUninit(env, op.nloc1.id) &&
3523 !loc.couldBe(BStr)) {
3524 nothrow(env);
3527 if (!pre) push(env, std::move(loc));
3528 setLoc(env, op.nloc1.id, newT);
3529 if (pre) push(env, std::move(newT));
3532 void in(ISS& env, const bc::IncDecG&) { popC(env); push(env, TInitCell); }
3534 void in(ISS& env, const bc::IncDecS& op) {
3535 auto const tcls = popC(env);
3536 auto const tname = popC(env);
3537 auto const vname = tv(tname);
3538 auto const self = selfCls(env);
3540 if (vname && vname->m_type == KindOfPersistentString &&
3541 canSkipMergeOnConstProp(env, tcls, vname->m_data.pstr)) {
3542 unreachable(env);
3543 push(env, TBottom);
3544 return;
3547 if (!self || tcls.couldBe(*self)) {
3548 if (vname && vname->m_type == KindOfPersistentString) {
3549 mergeSelfProp(env, vname->m_data.pstr, TInitCell);
3550 } else {
3551 killSelfProps(env);
3555 env.collect.publicSPropMutations.merge(
3556 env.index, env.ctx, tcls, tname, TInitCell
3559 push(env, TInitCell);
3562 void in(ISS& env, const bc::UnsetL& op) {
3563 if (locRaw(env, op.loc1).subtypeOf(TUninit)) {
3564 return reduce(env);
3566 if (any(env.collect.opts & CollectionOpts::Speculating)) {
3567 nothrow(env);
3568 } else {
3569 effect_free(env);
3571 setLocRaw(env, op.loc1, TUninit);
3574 void in(ISS& env, const bc::UnsetG& /*op*/) {
3575 auto const t1 = popC(env);
3576 if (!t1.couldBe(BObj | BRes)) nothrow(env);
3579 bool fcallCanSkipRepack(ISS& env, const FCallArgs& fca, const res::Func& func) {
3580 // Can't skip repack if potentially calling a function with too many args.
3581 if (fca.numArgs() > func.minNonVariadicParams()) return false;
3582 // Repack not needed if not unpacking and not having too many arguments.
3583 if (!fca.hasUnpack()) return true;
3584 // Can't skip repack if unpack args are in a wrong position.
3585 if (fca.numArgs() != func.maxNonVariadicParams()) return false;
3587 // Repack not needed if unpack args have the correct type.
3588 auto const unpackArgs = topC(env, fca.hasGenerics() ? 1 : 0);
3589 return unpackArgs.subtypeOf(RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr);
3592 template<class FCallWithFCA>
3593 bool fcallOptimizeChecks(
3594 ISS& env,
3595 const FCallArgs& fca,
3596 const res::Func& func,
3597 FCallWithFCA fcallWithFCA
3599 auto const numOut = env.index.lookup_num_inout_params(env.ctx, func);
3600 if (fca.enforceInOut() && numOut == fca.numRets() - 1) {
3601 bool match = true;
3602 for (auto i = 0; i < fca.numArgs(); ++i) {
3603 auto const kind = env.index.lookup_param_prep(env.ctx, func, i);
3604 if (kind == PrepKind::Unknown) {
3605 match = false;
3606 break;
3609 if (kind != (fca.isInOut(i) ? PrepKind::InOut : PrepKind::Val)) {
3610 // The function/method may not exist, in which case we should raise a
3611 // different error. Just defer the checks to the runtime.
3612 if (!func.exactFunc()) return false;
3614 // inout mismatch
3615 auto const exCls = makeStaticString("InvalidArgumentException");
3616 auto const err = makeStaticString(formatParamInOutMismatch(
3617 func.name()->data(), i, !fca.isInOut(i)));
3619 reduce(
3620 env,
3621 bc::NewObjD { exCls },
3622 bc::Dup {},
3623 bc::NullUninit {},
3624 bc::NullUninit {},
3625 bc::String { err },
3626 bc::FCallCtor { FCallArgs(1), staticEmptyString() },
3627 bc::PopC {},
3628 bc::LockObj {},
3629 bc::Throw {}
3631 return true;
3635 if (match) {
3636 // Optimize away the runtime inout-ness check.
3637 reduce(env, fcallWithFCA(fca.withoutInOut()));
3638 return true;
3642 // Infer whether the callee supports async eager return.
3643 if (fca.asyncEagerTarget() != NoBlockId) {
3644 auto const status = env.index.supports_async_eager_return(func);
3645 if (status && !*status) {
3646 reduce(env, fcallWithFCA(fca.withoutAsyncEagerTarget()));
3647 return true;
3651 if (!fca.skipRepack() && fcallCanSkipRepack(env, fca, func)) {
3652 reduce(env, fcallWithFCA(fca.withoutRepack()));
3653 return true;
3656 return false;
3659 bool fcallTryFold(
3660 ISS& env,
3661 const FCallArgs& fca,
3662 const res::Func& func,
3663 Type context,
3664 bool maybeDynamic,
3665 uint32_t numExtraInputs
3667 auto const foldableFunc = func.exactFunc();
3668 if (!foldableFunc) return false;
3669 if (!shouldAttemptToFold(env, foldableFunc, fca, context, maybeDynamic)) {
3670 return false;
3673 assertx(!fca.hasUnpack() && !fca.hasGenerics() && fca.numRets() == 1);
3674 assertx(options.ConstantFoldBuiltins);
3676 auto tried_lookup = false;
3677 auto ty = [&] () {
3678 if (foldableFunc->attrs & AttrBuiltin &&
3679 foldableFunc->attrs & AttrIsFoldable) {
3680 auto ret = const_fold(env, fca.numArgs(), numExtraInputs, *foldableFunc,
3681 false);
3682 return ret ? *ret : TBottom;
3684 CompactVector<Type> args(fca.numArgs());
3685 auto const firstArgPos = numExtraInputs + fca.numInputs() - 1;
3686 for (auto i = uint32_t{0}; i < fca.numArgs(); ++i) {
3687 auto const& arg = topT(env, firstArgPos - i);
3688 auto const isScalar = is_scalar(arg);
3689 if (!isScalar &&
3690 (env.index.func_depends_on_arg(foldableFunc, i) ||
3691 !arg.subtypeOf(BInitCell))) {
3692 return TBottom;
3694 args[i] = isScalar ? scalarize(arg) : arg;
3697 tried_lookup = true;
3698 return env.index.lookup_foldable_return_type(
3699 env.ctx, foldableFunc, context, std::move(args));
3700 }();
3702 if (auto v = tv(ty)) {
3703 BytecodeVec repl;
3704 for (uint32_t i = 0; i < numExtraInputs; ++i) repl.push_back(bc::PopC {});
3705 for (uint32_t i = 0; i < fca.numArgs(); ++i) repl.push_back(bc::PopC {});
3706 repl.push_back(bc::PopU {});
3707 repl.push_back(bc::PopU {});
3708 if (topT(env, fca.numArgs() + 2 + numExtraInputs).subtypeOf(TInitCell)) {
3709 repl.push_back(bc::PopC {});
3710 } else {
3711 assertx(topT(env, fca.numArgs() + 2 + numExtraInputs).subtypeOf(TUninit));
3712 repl.push_back(bc::PopU {});
3714 repl.push_back(gen_constant(*v));
3715 reduce(env, std::move(repl));
3716 return true;
3719 if (tried_lookup) {
3720 env.collect.unfoldableFuncs.emplace(foldableFunc, env.bid);
3722 return false;
3725 Type typeFromWH(Type t) {
3726 if (!t.couldBe(BObj)) {
3727 // Exceptions will be thrown if a non-object is awaited.
3728 return TBottom;
3731 // Throw away non-obj component.
3732 t &= TObj;
3734 // If we aren't even sure this is a wait handle, there's nothing we can
3735 // infer here.
3736 if (!is_specialized_wait_handle(t)) {
3737 return TInitCell;
3740 return wait_handle_inner(t);
3743 void pushCallReturnType(ISS& env, Type&& ty, const FCallArgs& fca) {
3744 if (ty == TBottom) {
3745 // The callee function never returns. It might throw, or loop forever.
3746 unreachable(env);
3748 auto const numRets = fca.numRets();
3749 if (numRets != 1) {
3750 assertx(fca.asyncEagerTarget() == NoBlockId);
3751 for (auto i = uint32_t{0}; i < numRets - 1; ++i) popU(env);
3752 if (is_specialized_vec(ty)) {
3753 for (int32_t i = 1; i < numRets; i++) {
3754 push(env, vec_elem(ty, ival(i)).first);
3756 push(env, vec_elem(ty, ival(0)).first);
3757 } else {
3758 for (int32_t i = 0; i < numRets; i++) push(env, TInitCell);
3760 return;
3762 if (fca.asyncEagerTarget() != NoBlockId) {
3763 push(env, typeFromWH(ty));
3764 assertx(topC(env) != TBottom);
3765 env.propagate(fca.asyncEagerTarget(), &env.state);
3766 popC(env);
3768 return push(env, std::move(ty));
3771 const StaticString s_defined { "defined" };
3772 const StaticString s_function_exists { "function_exists" };
3774 template<class FCallWithFCA>
3775 void fcallKnownImpl(
3776 ISS& env,
3777 const FCallArgs& fca,
3778 const res::Func& func,
3779 Type context,
3780 bool nullsafe,
3781 uint32_t numExtraInputs,
3782 FCallWithFCA fcallWithFCA
3784 auto const numArgs = fca.numArgs();
3785 auto returnType = [&] {
3786 CompactVector<Type> args(numArgs);
3787 auto const firstArgPos = numExtraInputs + fca.numInputs() - 1;
3788 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3789 args[i] = topCV(env, firstArgPos - i);
3792 auto ty = fca.hasUnpack()
3793 ? env.index.lookup_return_type(env.ctx, func)
3794 : env.index.lookup_return_type(env.ctx, args, context, func);
3795 if (nullsafe) {
3796 ty = union_of(std::move(ty), TInitNull);
3798 return ty;
3799 }();
3801 if (fca.asyncEagerTarget() != NoBlockId && typeFromWH(returnType) == TBottom) {
3802 // Kill the async eager target if the function never returns.
3803 reduce(env, fcallWithFCA(std::move(fca.withoutAsyncEagerTarget())));
3804 return;
3807 if (func.name()->isame(s_function_exists.get()) &&
3808 (numArgs == 1 || numArgs == 2) &&
3809 !fca.hasUnpack() && !fca.hasGenerics()) {
3810 handle_function_exists(env, topT(env, numExtraInputs + numArgs - 1));
3813 for (auto i = uint32_t{0}; i < numExtraInputs; ++i) popC(env);
3814 if (fca.hasGenerics()) popC(env);
3815 if (fca.hasUnpack()) popC(env);
3816 for (auto i = uint32_t{0}; i < numArgs; ++i) popCV(env);
3817 popU(env);
3818 popU(env);
3819 popCU(env);
3820 pushCallReturnType(env, std::move(returnType), fca);
3823 void fcallUnknownImpl(ISS& env, const FCallArgs& fca) {
3824 if (fca.hasGenerics()) popC(env);
3825 if (fca.hasUnpack()) popC(env);
3826 auto const numArgs = fca.numArgs();
3827 auto const numRets = fca.numRets();
3828 for (auto i = uint32_t{0}; i < numArgs; ++i) popCV(env);
3829 popU(env);
3830 popU(env);
3831 popCU(env);
3832 if (fca.asyncEagerTarget() != NoBlockId) {
3833 assertx(numRets == 1);
3834 push(env, TInitCell);
3835 env.propagate(fca.asyncEagerTarget(), &env.state);
3836 popC(env);
3838 for (auto i = uint32_t{0}; i < numRets - 1; ++i) popU(env);
3839 for (auto i = uint32_t{0}; i < numRets; ++i) push(env, TInitCell);
3842 void in(ISS& env, const bc::FCallFuncD& op) {
3843 auto const rfunc = env.index.resolve_func(env.ctx, op.str2);
3845 if (op.fca.hasGenerics()) {
3846 auto const tsList = topC(env);
3847 if (!tsList.couldBe(RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr)) {
3848 return unreachable(env);
3851 if (!rfunc.couldHaveReifiedGenerics()) {
3852 return reduce(
3853 env,
3854 bc::PopC {},
3855 bc::FCallFuncD { op.fca.withoutGenerics(), op.str2 }
3860 auto const updateBC = [&] (FCallArgs fca) {
3861 return bc::FCallFuncD { std::move(fca), op.str2 };
3864 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
3865 fcallTryFold(env, op.fca, rfunc, TBottom, false, 0)) {
3866 return;
3869 if (auto const func = rfunc.exactFunc()) {
3870 if (can_emit_builtin(env, func, op.fca)) {
3871 return finish_builtin(env, func, op.fca);
3875 fcallKnownImpl(env, op.fca, rfunc, TBottom, false, 0, updateBC);
3878 namespace {
3880 void fcallFuncUnknown(ISS& env, const bc::FCallFunc& op) {
3881 popC(env);
3882 fcallUnknownImpl(env, op.fca);
3885 void fcallFuncClsMeth(ISS& env, const bc::FCallFunc& op) {
3886 assertx(topC(env).subtypeOf(BClsMeth));
3888 // TODO: optimize me
3889 fcallFuncUnknown(env, op);
3892 void fcallFuncFunc(ISS& env, const bc::FCallFunc& op) {
3893 assertx(topC(env).subtypeOf(BFunc));
3895 // TODO: optimize me
3896 fcallFuncUnknown(env, op);
3899 void fcallFuncObj(ISS& env, const bc::FCallFunc& op) {
3900 assertx(topC(env).subtypeOf(BObj));
3902 // TODO: optimize me
3903 fcallFuncUnknown(env, op);
3906 void fcallFuncStr(ISS& env, const bc::FCallFunc& op) {
3907 assertx(topC(env).subtypeOf(BStr));
3908 auto funcName = getNameFromType(topC(env));
3909 if (!funcName) return fcallFuncUnknown(env, op);
3911 funcName = normalizeNS(funcName);
3912 if (!isNSNormalized(funcName) || !notClassMethodPair(funcName)) {
3913 return fcallFuncUnknown(env, op);
3916 auto const rfunc = env.index.resolve_func(env.ctx, funcName);
3917 if (!rfunc.mightCareAboutDynCalls()) {
3918 return reduce(env, bc::PopC {}, bc::FCallFuncD { op.fca, funcName });
3921 auto const updateBC = [&] (FCallArgs fca) {
3922 return bc::FCallFunc { std::move(fca) };
3925 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC)) return;
3926 fcallKnownImpl(env, op.fca, rfunc, TBottom, false, 1, updateBC);
3929 } // namespace
3931 void in(ISS& env, const bc::FCallFunc& op) {
3932 auto const callable = topC(env);
3933 if (callable.subtypeOf(BFunc)) return fcallFuncFunc(env, op);
3934 if (callable.subtypeOf(BClsMeth)) return fcallFuncClsMeth(env, op);
3935 if (callable.subtypeOf(BObj)) return fcallFuncObj(env, op);
3936 if (callable.subtypeOf(BStr)) return fcallFuncStr(env, op);
3937 fcallFuncUnknown(env, op);
3940 void in(ISS& env, const bc::ResolveFunc& op) {
3941 push(env, TFunc);
3944 void in(ISS& env, const bc::ResolveMethCaller& op) {
3945 // TODO (T29639296)
3946 push(env, TFunc);
3949 void in(ISS& env, const bc::ResolveRFunc& op) {
3950 popC(env);
3951 push(env, TFuncLike);
3954 void in(ISS& env, const bc::ResolveObjMethod& op) {
3955 popC(env);
3956 popC(env);
3957 if (RuntimeOption::EvalHackArrDVArrs) {
3958 push(env, TVec);
3959 } else {
3960 push(env, TVArr);
3964 namespace {
3966 Type ctxCls(ISS& env) {
3967 auto const s = selfCls(env);
3968 return setctx(s ? *s : TCls);
3971 Type specialClsRefToCls(ISS& env, SpecialClsRef ref) {
3972 if (!env.ctx.cls) return TCls;
3973 auto const op = [&]()-> folly::Optional<Type> {
3974 switch (ref) {
3975 case SpecialClsRef::Static: return ctxCls(env);
3976 case SpecialClsRef::Self: return selfClsExact(env);
3977 case SpecialClsRef::Parent: return parentClsExact(env);
3979 always_assert(false);
3980 }();
3981 return op ? *op : TCls;
3984 template<bool reifiedVersion = false>
3985 void resolveClsMethodSImpl(ISS& env, SpecialClsRef ref, LSString meth_name) {
3986 auto const clsTy = specialClsRefToCls(env, ref);
3987 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, meth_name);
3988 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
3989 !rfunc.couldHaveReifiedGenerics()) {
3990 auto const clsName = dcls_of(clsTy).cls.name();
3991 return reduce(env, bc::ResolveClsMethodD { clsName, meth_name });
3993 if (reifiedVersion) popC(env);
3994 if (!reifiedVersion || !rfunc.couldHaveReifiedGenerics()) {
3995 push(env, TClsMeth);
3996 } else {
3997 push(env, TClsMethLike);
4001 } // namespace
4003 void in(ISS& env, const bc::ResolveClsMethod& op) {
4004 popC(env);
4005 push(env, TClsMeth);
4008 void in(ISS& env, const bc::ResolveClsMethodD& op) {
4009 push(env, TClsMeth);
4012 void in(ISS& env, const bc::ResolveClsMethodS& op) {
4013 resolveClsMethodSImpl<false>(env, op.subop1, op.str2);
4016 void in(ISS& env, const bc::ResolveRClsMethod&) {
4017 popC(env);
4018 popC(env);
4019 push(env, TClsMethLike);
4022 void in(ISS& env, const bc::ResolveRClsMethodD&) {
4023 popC(env);
4024 push(env, TClsMethLike);
4027 void in(ISS& env, const bc::ResolveRClsMethodS& op) {
4028 resolveClsMethodSImpl<true>(env, op.subop1, op.str2);
4031 void in(ISS& env, const bc::ResolveClass& op) {
4032 // TODO (T61651936)
4033 auto cls = env.index.resolve_class(env.ctx, op.str1);
4034 if (cls && cls->resolved()) {
4035 push(env, clsExact(*cls));
4036 } else {
4037 // If the class is not resolved,
4038 // it might not be unique or it might not be a valid classname.
4039 push(env, TArrKeyCompat);
4043 void in(ISS& env, const bc::LazyClass&) {
4044 // TODO: T70712990: Specialize HHBBC types for lazy classes
4045 push(env, TLazyCls);
4048 namespace {
4050 Context getCallContext(const ISS& env, const FCallArgs& fca) {
4051 if (auto const name = fca.context()) {
4052 auto const rcls = env.index.resolve_class(env.ctx, name);
4053 if (rcls && rcls->cls()) {
4054 return Context { env.ctx.unit, env.ctx.func, rcls->cls() };
4056 return Context { env.ctx.unit, env.ctx.func, nullptr };
4058 return env.ctx;
4061 void fcallObjMethodNullsafe(ISS& env, const FCallArgs& fca, bool extraInput) {
4062 BytecodeVec repl;
4063 if (extraInput) repl.push_back(bc::PopC {});
4064 if (fca.hasGenerics()) repl.push_back(bc::PopC {});
4065 if (fca.hasUnpack()) repl.push_back(bc::PopC {});
4066 auto const numArgs = fca.numArgs();
4067 for (uint32_t i = 0; i < numArgs; ++i) {
4068 assertx(topC(env, repl.size()).subtypeOf(BInitCell));
4069 repl.push_back(bc::PopC {});
4071 repl.push_back(bc::PopU {});
4072 repl.push_back(bc::PopU {});
4073 repl.push_back(bc::PopC {});
4074 auto const numRets = fca.numRets();
4075 for (uint32_t i = 0; i < numRets - 1; ++i) {
4076 repl.push_back(bc::PopU {});
4078 repl.push_back(bc::Null {});
4080 reduce(env, std::move(repl));
4083 template <typename Op, class UpdateBC>
4084 void fcallObjMethodImpl(ISS& env, const Op& op, SString methName, bool dynamic,
4085 bool extraInput, UpdateBC updateBC) {
4086 auto const nullThrows = op.subop3 == ObjMethodOp::NullThrows;
4087 auto const inputPos = op.fca.numInputs() + (extraInput ? 3 : 2);
4088 auto const input = topC(env, inputPos);
4089 auto const location = topStkEquiv(env, inputPos);
4090 auto const mayCallMethod = input.couldBe(BObj);
4091 auto const mayUseNullsafe = !nullThrows && input.couldBe(BNull);
4092 auto const mayThrowNonObj = !input.subtypeOf(nullThrows ? BObj : BOptObj);
4094 auto const refineLoc = [&] {
4095 if (location == NoLocalId) return;
4096 if (!refineLocation(env, location, [&] (Type t) {
4097 if (nullThrows) return intersection_of(t, TObj);
4098 if (!t.couldBe(BUninit)) return intersection_of(t, TOptObj);
4099 if (!t.couldBe(BObj)) return intersection_of(t, TNull);
4100 return t;
4101 })) {
4102 unreachable(env);
4106 auto const unknown = [&] {
4107 if (extraInput) popC(env);
4108 fcallUnknownImpl(env, op.fca);
4109 refineLoc();
4112 if (!mayCallMethod && !mayUseNullsafe) {
4113 // This FCallObjMethodD may only throw, make sure it's not optimized away.
4114 unknown();
4115 unreachable(env);
4116 return;
4119 if (!mayCallMethod && !mayThrowNonObj) {
4120 // Null input, this may only return null, so do that.
4121 return fcallObjMethodNullsafe(env, op.fca, extraInput);
4124 if (!mayCallMethod) {
4125 // May only return null, but can't fold as we may still throw.
4126 assertx(mayUseNullsafe && mayThrowNonObj);
4127 return unknown();
4130 auto const ctx = getCallContext(env, op.fca);
4131 auto const ctxTy = intersection_of(input, TObj);
4132 auto const clsTy = objcls(ctxTy);
4133 auto const rfunc = env.index.resolve_method(ctx, clsTy, methName);
4135 auto const canFold = !mayUseNullsafe && !mayThrowNonObj;
4136 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4137 (canFold && fcallTryFold(env, op.fca, rfunc, ctxTy, dynamic,
4138 extraInput ? 1 : 0))) {
4139 return;
4142 if (rfunc.exactFunc() && op.str2->empty()) {
4143 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4146 fcallKnownImpl(env, op.fca, rfunc, ctxTy, mayUseNullsafe, extraInput ? 1 : 0,
4147 updateBC);
4148 refineLoc();
4151 } // namespace
4153 void in(ISS& env, const bc::FCallObjMethodD& op) {
4154 if (op.fca.hasGenerics()) {
4155 auto const tsList = topC(env);
4156 if (!tsList.couldBe(RuntimeOption::EvalHackArrDVArrs ? BVec : BVArr)) {
4157 return unreachable(env);
4160 auto const input = topC(env, op.fca.numInputs() + 2);
4161 auto const clsTy = objcls(intersection_of(input, TObj));
4162 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4163 if (!rfunc.couldHaveReifiedGenerics()) {
4164 return reduce(
4165 env,
4166 bc::PopC {},
4167 bc::FCallObjMethodD {
4168 op.fca.withoutGenerics(), op.str2, op.subop3, op.str4 }
4173 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4174 if (!clsHint) clsHint = op.str2;
4175 return bc::FCallObjMethodD { std::move(fca), clsHint, op.subop3, op.str4 };
4177 fcallObjMethodImpl(env, op, op.str4, false, false, updateBC);
4180 void in(ISS& env, const bc::FCallObjMethod& op) {
4181 auto const methName = getNameFromType(topC(env));
4182 if (!methName) {
4183 popC(env);
4184 fcallUnknownImpl(env, op.fca);
4185 return;
4188 auto const input = topC(env, op.fca.numInputs() + 3);
4189 auto const clsTy = objcls(intersection_of(input, TObj));
4190 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4191 if (!rfunc.mightCareAboutDynCalls()) {
4192 return reduce(
4193 env,
4194 bc::PopC {},
4195 bc::FCallObjMethodD { op.fca, op.str2, op.subop3, methName }
4199 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4200 if (!clsHint) clsHint = op.str2;
4201 return bc::FCallObjMethod { std::move(fca), clsHint, op.subop3 };
4203 fcallObjMethodImpl(env, op, methName, true, true, updateBC);
4206 namespace {
4208 template <typename Op, class UpdateBC>
4209 void fcallClsMethodImpl(ISS& env, const Op& op, Type clsTy, SString methName,
4210 bool dynamic, uint32_t numExtraInputs,
4211 UpdateBC updateBC) {
4212 auto const ctx = getCallContext(env, op.fca);
4213 auto const rfunc = env.index.resolve_method(ctx, clsTy, methName);
4215 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4216 fcallTryFold(env, op.fca, rfunc, clsTy, dynamic, numExtraInputs)) {
4217 return;
4220 if (rfunc.exactFunc() && op.str2->empty()) {
4221 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4224 fcallKnownImpl(env, op.fca, rfunc, clsTy, false /* nullsafe */,
4225 numExtraInputs, updateBC);
4228 } // namespace
4230 void in(ISS& env, const bc::FCallClsMethodD& op) {
4231 auto const rcls = env.index.resolve_class(env.ctx, op.str3);
4232 auto const clsTy = rcls ? clsExact(*rcls) : TCls;
4233 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4235 if (op.fca.hasGenerics() && !rfunc.couldHaveReifiedGenerics()) {
4236 return reduce(
4237 env,
4238 bc::PopC {},
4239 bc::FCallClsMethodD {
4240 op.fca.withoutGenerics(), op.str2, op.str3, op.str4 }
4244 if (auto const func = rfunc.exactFunc()) {
4245 assertx(func->cls != nullptr);
4246 if (func->cls->name->same(op.str3) && can_emit_builtin(env, func, op.fca)) {
4247 // When we use FCallBuiltin to call a static method, the litstr method
4248 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4250 // As a result, we can only do this optimization if the name of the
4251 // builtin function's class matches this op's class name immediate.
4252 return finish_builtin(env, func, op.fca);
4256 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4257 if (!clsHint) clsHint = op.str2;
4258 return bc::FCallClsMethodD { std::move(fca), clsHint, op.str3, op.str4 };
4260 fcallClsMethodImpl(env, op, clsTy, op.str4, false, 0, updateBC);
4263 void in(ISS& env, const bc::FCallClsMethod& op) {
4264 auto const methName = getNameFromType(topC(env, 1));
4265 if (!methName) {
4266 popC(env);
4267 popC(env);
4268 fcallUnknownImpl(env, op.fca);
4269 return;
4272 auto const clsTy = topC(env);
4273 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4274 auto const skipLogAsDynamicCall =
4275 !RuntimeOption::EvalLogKnownMethodsAsDynamicCalls &&
4276 op.subop3 == IsLogAsDynamicCallOp::DontLogAsDynamicCall;
4277 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
4278 (!rfunc.mightCareAboutDynCalls() || skipLogAsDynamicCall)) {
4279 auto const clsName = dcls_of(clsTy).cls.name();
4280 return reduce(
4281 env,
4282 bc::PopC {},
4283 bc::PopC {},
4284 bc::FCallClsMethodD { op.fca, op.str2, clsName, methName }
4288 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4289 if (!clsHint) clsHint = op.str2;
4290 return bc::FCallClsMethod { std::move(fca), clsHint, op.subop3 };
4292 fcallClsMethodImpl(env, op, clsTy, methName, true, 2, updateBC);
4295 namespace {
4297 template <typename Op, class UpdateBC>
4298 void fcallClsMethodSImpl(ISS& env, const Op& op, SString methName, bool dynamic,
4299 bool extraInput, UpdateBC updateBC) {
4300 auto const clsTy = specialClsRefToCls(env, op.subop3);
4301 if (is_specialized_cls(clsTy) && dcls_of(clsTy).type == DCls::Exact &&
4302 !dynamic && op.subop3 == SpecialClsRef::Static) {
4303 auto const clsName = dcls_of(clsTy).cls.name();
4304 reduce(env, bc::FCallClsMethodD { op.fca, op.str2, clsName, methName });
4305 return;
4308 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4310 if (fcallOptimizeChecks(env, op.fca, rfunc, updateBC) ||
4311 fcallTryFold(env, op.fca, rfunc, ctxCls(env), dynamic,
4312 extraInput ? 1 : 0)) {
4313 return;
4316 if (rfunc.exactFunc() && op.str2->empty()) {
4317 return reduce(env, updateBC(op.fca, rfunc.exactFunc()->cls->name));
4320 fcallKnownImpl(env, op.fca, rfunc, ctxCls(env), false /* nullsafe */,
4321 extraInput ? 1 : 0, updateBC);
4324 } // namespace
4326 void in(ISS& env, const bc::FCallClsMethodSD& op) {
4327 if (op.fca.hasGenerics()) {
4328 auto const clsTy = specialClsRefToCls(env, op.subop3);
4329 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, op.str4);
4330 if (!rfunc.couldHaveReifiedGenerics()) {
4331 return reduce(
4332 env,
4333 bc::PopC {},
4334 bc::FCallClsMethodSD {
4335 op.fca.withoutGenerics(), op.str2, op.subop3, op.str4 }
4340 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4341 if (!clsHint) clsHint = op.str2;
4342 return bc::FCallClsMethodSD { std::move(fca), clsHint, op.subop3, op.str4 };
4344 fcallClsMethodSImpl(env, op, op.str4, false, false, updateBC);
4347 void in(ISS& env, const bc::FCallClsMethodS& op) {
4348 auto const methName = getNameFromType(topC(env));
4349 if (!methName) {
4350 popC(env);
4351 fcallUnknownImpl(env, op.fca);
4352 return;
4355 auto const clsTy = specialClsRefToCls(env, op.subop3);
4356 auto const rfunc = env.index.resolve_method(env.ctx, clsTy, methName);
4357 if (!rfunc.mightCareAboutDynCalls() && !rfunc.couldHaveReifiedGenerics()) {
4358 return reduce(
4359 env,
4360 bc::PopC {},
4361 bc::FCallClsMethodSD { op.fca, op.str2, op.subop3, methName }
4365 auto const updateBC = [&] (FCallArgs fca, SString clsHint = nullptr) {
4366 if (!clsHint) clsHint = op.str2;
4367 return bc::FCallClsMethodS { std::move(fca), clsHint, op.subop3 };
4369 fcallClsMethodSImpl(env, op, methName, true, true, updateBC);
4372 namespace {
4374 void newObjDImpl(ISS& env, const StringData* className, bool rflavor) {
4375 auto const rcls = env.index.resolve_class(env.ctx, className);
4376 if (!rcls) {
4377 if (rflavor) popC(env);
4378 push(env, TObj);
4379 return;
4381 if (rflavor && !rcls->couldHaveReifiedGenerics()) {
4382 return reduce(env, bc::PopC {}, bc::NewObjD { className });
4384 auto const isCtx = !rcls->couldBeOverriden() && env.ctx.cls &&
4385 rcls->same(env.index.resolve_class(env.ctx.cls));
4386 if (rflavor) popC(env);
4387 push(env, setctx(objExact(*rcls), isCtx));
4390 } // namespace
4392 void in(ISS& env, const bc::NewObjD& op) { newObjDImpl(env, op.str1, false); }
4393 void in(ISS& env, const bc::NewObjRD& op) { newObjDImpl(env, op.str1, true); }
4395 void in(ISS& env, const bc::NewObjS& op) {
4396 auto const cls = specialClsRefToCls(env, op.subop1);
4397 if (!is_specialized_cls(cls)) {
4398 push(env, TObj);
4399 return;
4402 auto const dcls = dcls_of(cls);
4403 auto const exact = dcls.type == DCls::Exact;
4404 if (exact && !dcls.cls.couldHaveReifiedGenerics() &&
4405 (!dcls.cls.couldBeOverriden() || equivalently_refined(cls, unctx(cls)))) {
4406 return reduce(env, bc::NewObjD { dcls.cls.name() });
4409 push(env, toobj(cls));
4412 void in(ISS& env, const bc::NewObj& op) {
4413 auto const cls = topC(env);
4414 if (!is_specialized_cls(cls)) {
4415 popC(env);
4416 push(env, TObj);
4417 return;
4420 auto const dcls = dcls_of(cls);
4421 auto const exact = dcls.type == DCls::Exact;
4422 if (exact && !dcls.cls.mightCareAboutDynConstructs()) {
4423 return reduce(
4424 env,
4425 bc::PopC {},
4426 bc::NewObjD { dcls.cls.name() }
4430 popC(env);
4431 push(env, toobj(cls));
4434 void in(ISS& env, const bc::NewObjR& op) {
4435 auto const generics = topC(env);
4436 auto const cls = topC(env, 1);
4438 if (generics.subtypeOf(BInitNull)) {
4439 return reduce(
4440 env,
4441 bc::PopC {},
4442 bc::NewObj {}
4446 if (!is_specialized_cls(cls)) {
4447 popC(env);
4448 popC(env);
4449 push(env, TObj);
4450 return;
4453 auto const dcls = dcls_of(cls);
4454 auto const exact = dcls.type == DCls::Exact;
4455 if (exact && !dcls.cls.couldHaveReifiedGenerics()) {
4456 return reduce(
4457 env,
4458 bc::PopC {},
4459 bc::NewObj {}
4463 popC(env);
4464 popC(env);
4465 push(env, toobj(cls));
4468 namespace {
4470 bool objMightHaveConstProps(const Type& t) {
4471 assertx(t.subtypeOf(BObj));
4472 assertx(is_specialized_obj(t));
4473 auto const dobj = dobj_of(t);
4474 switch (dobj.type) {
4475 case DObj::Exact:
4476 return dobj.cls.couldHaveConstProp();
4477 case DObj::Sub:
4478 return dobj.cls.derivedCouldHaveConstProp();
4480 not_reached();
4485 void in(ISS& env, const bc::FCallCtor& op) {
4486 auto const obj = topC(env, op.fca.numInputs() + 2);
4487 assertx(op.fca.numRets() == 1);
4489 if (!is_specialized_obj(obj)) {
4490 return fcallUnknownImpl(env, op.fca);
4493 if (op.fca.lockWhileUnwinding() && !objMightHaveConstProps(obj)) {
4494 return reduce(
4495 env, bc::FCallCtor { op.fca.withoutLockWhileUnwinding(), op.str2 }
4499 auto const dobj = dobj_of(obj);
4500 auto const exact = dobj.type == DObj::Exact;
4501 auto const rfunc = env.index.resolve_ctor(env.ctx, dobj.cls, exact);
4502 if (!rfunc) {
4503 return fcallUnknownImpl(env, op.fca);
4506 auto const updateFCA = [&] (FCallArgs&& fca) {
4507 return bc::FCallCtor { std::move(fca), op.str2 };
4510 auto const canFold = obj.subtypeOf(BObj);
4511 if (fcallOptimizeChecks(env, op.fca, *rfunc, updateFCA) ||
4512 (canFold && fcallTryFold(env, op.fca, *rfunc,
4513 obj, false /* dynamic */, 0))) {
4514 return;
4517 if (rfunc->exactFunc() && op.str2->empty()) {
4518 // We've found the exact func that will be called, set the hint.
4519 return reduce(env, bc::FCallCtor { op.fca, rfunc->exactFunc()->cls->name });
4522 fcallKnownImpl(env, op.fca, *rfunc, obj, false /* nullsafe */, 0,
4523 updateFCA);
4526 void in(ISS& env, const bc::LockObj& op) {
4527 auto const t = topC(env);
4528 auto bail = [&]() {
4529 discard(env, 1);
4530 return push(env, t);
4532 if (!t.subtypeOf(BObj)) return bail();
4533 if (!is_specialized_obj(t) || objMightHaveConstProps(t)) {
4534 nothrow(env);
4535 return bail();
4537 reduce(env);
4540 namespace {
4542 // baseLoc is NoLocalId for non-local iterators.
4543 void iterInitImpl(ISS& env, IterArgs ita, BlockId target, LocalId baseLoc) {
4544 auto const local = baseLoc != NoLocalId;
4545 auto const sourceLoc = local ? baseLoc : topStkLocal(env);
4546 auto const base = local ? locAsCell(env, baseLoc) : topC(env);
4547 auto ity = iter_types(base);
4549 auto const fallthrough = [&] {
4550 auto const baseCannotBeObject = !base.couldBe(BObj);
4551 setIter(env, ita.iterId, LiveIter { ity, sourceLoc, NoLocalId, env.bid,
4552 false, baseCannotBeObject });
4553 // Do this after setting the iterator, in case it clobbers the base local
4554 // equivalency.
4555 setLoc(env, ita.valId, std::move(ity.value));
4556 if (ita.hasKey()) {
4557 setLoc(env, ita.keyId, std::move(ity.key));
4558 setIterKey(env, ita.iterId, ita.keyId);
4562 assert(iterIsDead(env, ita.iterId));
4564 if (!ity.mayThrowOnInit) {
4565 if (ity.count == IterTypes::Count::Empty && will_reduce(env)) {
4566 if (local) {
4567 reduce(env);
4568 } else {
4569 reduce(env, bc::PopC{});
4571 return jmp_setdest(env, target);
4573 nothrow(env);
4576 if (!local) popC(env);
4578 switch (ity.count) {
4579 case IterTypes::Count::Empty:
4580 mayReadLocal(env, ita.valId);
4581 if (ita.hasKey()) mayReadLocal(env, ita.keyId);
4582 jmp_setdest(env, target);
4583 return;
4584 case IterTypes::Count::Single:
4585 case IterTypes::Count::NonEmpty:
4586 fallthrough();
4587 return jmp_nevertaken(env);
4588 case IterTypes::Count::ZeroOrOne:
4589 case IterTypes::Count::Any:
4590 // Take the branch before setting locals if the iter is already
4591 // empty, but after popping. Similar for the other IterInits
4592 // below.
4593 env.propagate(target, &env.state);
4594 fallthrough();
4595 return;
4597 always_assert(false);
4600 // baseLoc is NoLocalId for non-local iterators.
4601 void iterNextImpl(ISS& env, IterArgs ita, BlockId target, LocalId baseLoc) {
4602 auto const curVal = peekLocRaw(env, ita.valId);
4603 auto const curKey = ita.hasKey() ? peekLocRaw(env, ita.keyId) : TBottom;
4605 auto noThrow = false;
4606 auto const noTaken = match<bool>(
4607 env.state.iters[ita.iterId],
4608 [&] (DeadIter) {
4609 always_assert(false && "IterNext on dead iter");
4610 return false;
4612 [&] (const LiveIter& ti) {
4613 if (!ti.types.mayThrowOnNext) noThrow = true;
4614 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4615 switch (ti.types.count) {
4616 case IterTypes::Count::Single:
4617 case IterTypes::Count::ZeroOrOne:
4618 return true;
4619 case IterTypes::Count::NonEmpty:
4620 case IterTypes::Count::Any:
4621 setLoc(env, ita.valId, ti.types.value);
4622 if (ita.hasKey()) {
4623 setLoc(env, ita.keyId, ti.types.key);
4624 setIterKey(env, ita.iterId, ita.keyId);
4626 return false;
4627 case IterTypes::Count::Empty:
4628 always_assert(false);
4630 not_reached();
4634 if (noTaken && noThrow && will_reduce(env)) {
4635 auto const iterId = safe_cast<IterId>(ita.iterId);
4636 return baseLoc == NoLocalId
4637 ? reduce(env, bc::IterFree { iterId })
4638 : reduce(env, bc::LIterFree { iterId, baseLoc });
4641 mayReadLocal(env, baseLoc);
4642 mayReadLocal(env, ita.valId);
4643 if (ita.hasKey()) mayReadLocal(env, ita.keyId);
4645 if (noThrow) nothrow(env);
4647 if (noTaken) {
4648 jmp_nevertaken(env);
4649 freeIter(env, ita.iterId);
4650 return;
4653 env.propagate(target, &env.state);
4655 freeIter(env, ita.iterId);
4656 setLocRaw(env, ita.valId, curVal);
4657 if (ita.hasKey()) setLocRaw(env, ita.keyId, curKey);
4662 void in(ISS& env, const bc::IterInit& op) {
4663 iterInitImpl(env, op.ita, op.target2, NoLocalId);
4666 void in(ISS& env, const bc::LIterInit& op) {
4667 iterInitImpl(env, op.ita, op.target3, op.loc2);
4670 void in(ISS& env, const bc::IterNext& op) {
4671 iterNextImpl(env, op.ita, op.target2, NoLocalId);
4674 void in(ISS& env, const bc::LIterNext& op) {
4675 iterNextImpl(env, op.ita, op.target3, op.loc2);
4678 void in(ISS& env, const bc::IterFree& op) {
4679 // IterFree is used for weak iterators too, so we can't assert !iterIsDead.
4680 auto const isNop = match<bool>(
4681 env.state.iters[op.iter1],
4682 [] (DeadIter) {
4683 return true;
4685 [&] (const LiveIter& ti) {
4686 if (ti.baseLocal != NoLocalId) hasInvariantIterBase(env);
4687 return false;
4691 if (isNop && will_reduce(env)) return reduce(env);
4693 nothrow(env);
4694 freeIter(env, op.iter1);
4697 void in(ISS& env, const bc::LIterFree& op) {
4698 nothrow(env);
4699 mayReadLocal(env, op.loc2);
4700 freeIter(env, op.iter1);
4704 * Any include/require (or eval) op kills all locals, and private properties.
4706 void inclOpImpl(ISS& env) {
4707 popC(env);
4708 killLocals(env);
4709 killThisProps(env);
4710 killSelfProps(env);
4711 push(env, TInitCell);
4714 void in(ISS& env, const bc::Incl&) { inclOpImpl(env); }
4715 void in(ISS& env, const bc::InclOnce&) { inclOpImpl(env); }
4716 void in(ISS& env, const bc::Req&) { inclOpImpl(env); }
4717 void in(ISS& env, const bc::ReqOnce&) { inclOpImpl(env); }
4718 void in(ISS& env, const bc::ReqDoc&) { inclOpImpl(env); }
4719 void in(ISS& env, const bc::Eval&) { inclOpImpl(env); }
4721 void in(ISS& env, const bc::This&) {
4722 if (thisAvailable(env)) {
4723 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
4725 auto const ty = thisTypeNonNull(env);
4726 push(env, ty, StackThisId);
4727 setThisAvailable(env);
4728 if (ty.subtypeOf(BBottom)) unreachable(env);
4731 void in(ISS& env, const bc::LateBoundCls& op) {
4732 if (env.ctx.cls) effect_free(env);
4733 auto const ty = selfCls(env);
4734 push(env, setctx(ty ? *ty : TCls));
4737 void in(ISS& env, const bc::CheckThis&) {
4738 if (thisAvailable(env)) {
4739 return reduce(env);
4741 setThisAvailable(env);
4744 void in(ISS& env, const bc::BareThis& op) {
4745 if (thisAvailable(env)) {
4746 if (op.subop1 != BareThisOp::NeverNull) {
4747 return reduce(env, bc::BareThis { BareThisOp::NeverNull });
4751 auto const ty = thisType(env);
4752 switch (op.subop1) {
4753 case BareThisOp::Notice:
4754 break;
4755 case BareThisOp::NoNotice:
4756 effect_free(env);
4757 break;
4758 case BareThisOp::NeverNull:
4759 setThisAvailable(env);
4760 if (!env.state.unreachable) effect_free(env);
4761 return push(env, ty, StackThisId);
4764 push(env, ty, StackThisId);
4768 * Amongst other things, we use this to mark units non-persistent.
4770 void in(ISS& env, const bc::OODeclExists& op) {
4771 auto flag = popC(env);
4772 auto name = popC(env);
4773 push(env, [&] {
4774 if (!name.strictSubtypeOf(TStr)) return TBool;
4775 auto const v = tv(name);
4776 if (!v) return TBool;
4777 auto rcls = env.index.resolve_class(env.ctx, v->m_data.pstr);
4778 if (!rcls || !rcls->cls()) return TBool;
4779 auto const mayExist = [&] () -> bool {
4780 switch (op.subop1) {
4781 case OODeclExistsOp::Class:
4782 return !(rcls->cls()->attrs & (AttrInterface | AttrTrait));
4783 case OODeclExistsOp::Interface:
4784 return rcls->cls()->attrs & AttrInterface;
4785 case OODeclExistsOp::Trait:
4786 return rcls->cls()->attrs & AttrTrait;
4788 not_reached();
4789 }();
4790 auto unit = rcls->cls()->unit;
4791 auto canConstProp = [&] {
4792 // Its generally not safe to constprop this, because of
4793 // autoload. We're safe if its part of systemlib, or a
4794 // superclass of the current context.
4795 if (is_systemlib_part(*unit)) return true;
4796 if (!env.ctx.cls) return false;
4797 auto thisClass = env.index.resolve_class(env.ctx.cls);
4798 return thisClass.mustBeSubtypeOf(*rcls);
4800 if (canConstProp()) {
4801 constprop(env);
4802 return mayExist ? TTrue : TFalse;
4804 // At this point, if it mayExist, we still don't know that it
4805 // *does* exist, but if not we know that it either doesn't
4806 // exist, or it doesn't have the right type.
4807 return mayExist ? TBool : TFalse;
4808 } ());
4811 namespace {
4812 bool couldBeMocked(const Type& t) {
4813 if (is_specialized_cls(t)) {
4814 return dcls_of(t).cls.couldBeMocked();
4815 } else if (is_specialized_obj(t)) {
4816 return dobj_of(t).cls.couldBeMocked();
4818 // In practice this should not occur since this is used mostly on the result
4819 // of looked up type constraints.
4820 return true;
4824 using TCVec = std::vector<const TypeConstraint*>;
4826 void in(ISS& env, const bc::VerifyParamType& op) {
4827 IgnoreUsedParams _{env};
4829 if (env.ctx.func->isMemoizeImpl) {
4830 // a MemoizeImpl's params have already been checked by the wrapper
4831 return reduce(env);
4834 auto const& pinfo = env.ctx.func->params[op.loc1];
4835 // Generally we won't know anything about the params, but
4836 // analyze_func_inline does - and this can help with effect-free analysis
4837 TCVec tcs = {&pinfo.typeConstraint};
4838 for (auto const& t : pinfo.upperBounds) tcs.push_back(&t);
4839 if (std::all_of(std::begin(tcs), std::end(tcs),
4840 [&](const TypeConstraint* tc) {
4841 return env.index.satisfies_constraint(env.ctx,
4842 locAsCell(env, op.loc1),
4843 *tc);
4844 })) {
4845 if (!locAsCell(env, op.loc1).couldBe(BCls)) {
4846 return reduce(env);
4851 * We assume that if this opcode doesn't throw, the parameter was of the
4852 * specified type.
4854 Type tcT;
4855 for (auto const& constraint : tcs) {
4856 if (constraint->hasConstraint() && !constraint->isTypeVar() &&
4857 !constraint->isTypeConstant()) {
4858 auto t = env.index.lookup_constraint(env.ctx, *constraint);
4859 if (constraint->isThis() && couldBeMocked(t)) {
4860 t = unctx(std::move(t));
4862 FTRACE(2, " {} ({})\n", constraint->fullName(), show(t));
4863 tcT = intersection_of(std::move(tcT), std::move(t));
4864 if (tcT.subtypeOf(BBottom)) unreachable(env);
4867 if (tcT != TTop) setLoc(env, op.loc1, std::move(tcT));
4870 void in(ISS& env, const bc::VerifyParamTypeTS& op) {
4871 auto const a = topC(env);
4872 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
4873 if (!a.couldBe(requiredTSType)) {
4874 unreachable(env);
4875 popC(env);
4876 return;
4878 auto const constraint = env.ctx.func->params[op.loc1].typeConstraint;
4879 // TODO(T31677864): We are being extremely pessimistic here, relax it
4880 if (!env.ctx.func->isReified &&
4881 (!env.ctx.cls || !env.ctx.cls->hasReifiedGenerics) &&
4882 !env.index.could_have_reified_type(env.ctx, constraint)) {
4883 return reduce(env, bc::PopC {}, bc::VerifyParamType { op.loc1 });
4886 if (auto const inputTS = tv(a)) {
4887 if (!isValidTSType(*inputTS, false)) {
4888 unreachable(env);
4889 popC(env);
4890 return;
4892 auto const resolvedTS =
4893 resolveTSStatically(env, inputTS->m_data.parr, env.ctx.cls);
4894 if (resolvedTS && resolvedTS != inputTS->m_data.parr) {
4895 reduce(env, bc::PopC {});
4896 RuntimeOption::EvalHackArrDVArrs ? reduce(env, bc::Dict { resolvedTS })
4897 : reduce(env, bc::Array { resolvedTS });
4898 reduce(env, bc::VerifyParamTypeTS { op.loc1 });
4899 return;
4902 popC(env);
4905 void verifyRetImpl(ISS& env, const TCVec& tcs,
4906 bool reduce_this, bool ts_flavor) {
4907 // If it is the ts flavor, then second thing on the stack, otherwise first
4908 auto stackT = topC(env, (int)ts_flavor);
4909 auto const stackEquiv = topStkEquiv(env, (int)ts_flavor);
4911 // If there is no return type constraint, or if the return type
4912 // constraint is a typevar, or if the top of stack is the same or a
4913 // subtype of the type constraint, then this is a no-op, unless
4914 // reified types could be involved.
4915 if (std::all_of(std::begin(tcs), std::end(tcs),
4916 [&](const TypeConstraint* tc) {
4917 return env.index.satisfies_constraint(env.ctx, stackT, *tc);
4918 })) {
4919 if (ts_flavor) {
4920 // we wouldn't get here if reified types were definitely not
4921 // involved, so just bail.
4922 popC(env);
4923 popC(env);
4924 push(env, std::move(stackT), stackEquiv);
4925 return;
4927 return reduce(env);
4930 std::vector<Type> constraintTypes;
4931 auto dont_reduce = false;
4933 for (auto const& constraint : tcs) {
4934 // When the constraint is not soft.
4935 // We can safely assume that either VerifyRetTypeC will
4936 // throw or it will produce a value whose type is compatible with the
4937 // return type constraint.
4938 auto tcT = remove_uninit(env.index.lookup_constraint(env.ctx, *constraint));
4939 constraintTypes.push_back(tcT);
4941 // In some circumstances, verifyRetType can modify the type. If it
4942 // does that we can't reduce even when we know it succeeds.
4943 // VerifyRetType will convert a TCls to a TStr implicitly
4944 // (and possibly warn)
4945 if (tcT.couldBe(BStr) && stackT.couldBe(BCls)) {
4946 stackT |= TStr;
4947 dont_reduce = true;
4950 // VerifyRetType will convert TClsMeth to TVec/TVArr/TArr implicitly
4951 if (stackT.couldBe(BClsMeth)) {
4952 if (tcT.couldBe(BVec)) {
4953 stackT |= TVec;
4954 dont_reduce = true;
4956 if (tcT.couldBe(BVArr)) {
4957 stackT |= TVArr;
4958 dont_reduce = true;
4960 if (tcT.couldBe(TArr)) {
4961 stackT |= TArr;
4962 dont_reduce = true;
4966 // If the constraint is soft, then there are no optimizations we can safely
4967 // do here, so just leave the top of stack as is.
4968 if (constraint->isSoft() ||
4969 (RuntimeOption::EvalEnforceGenericsUB < 2 &&
4970 constraint->isUpperBound()))
4972 if (ts_flavor) popC(env);
4973 popC(env);
4974 push(env, std::move(stackT), stackEquiv);
4975 return;
4979 // In cases where we have a `this` hint where stackT is an TOptObj known to
4980 // be this, we can replace the check with a non null check. These cases are
4981 // likely from a BareThis that could return Null. Since the runtime will
4982 // split these translations, it will rarely in practice return null.
4983 if (reduce_this &&
4984 !dont_reduce &&
4985 is_opt(stackT) &&
4986 std::all_of(std::begin(tcs), std::end(tcs),
4987 [&](const TypeConstraint* constraint) {
4988 return constraint->isThis() &&
4989 !constraint->isNullable() &&
4990 env.index.satisfies_constraint(
4991 env.ctx, unopt(stackT), *constraint);
4995 if (ts_flavor) {
4996 return reduce(env, bc::PopC {}, bc::VerifyRetNonNullC {});
4998 return reduce(env, bc::VerifyRetNonNullC {});
5001 auto retT = std::move(stackT);
5002 for (auto& tcT : constraintTypes) {
5003 retT = intersection_of(std::move(tcT), std::move(retT));
5004 if (retT.subtypeOf(BBottom)) {
5005 unreachable(env);
5006 if (ts_flavor) popC(env); // the type structure
5007 return;
5011 if (ts_flavor) popC(env); // the type structure
5012 popC(env);
5013 push(env, std::move(retT));
5016 void in(ISS& env, const bc::VerifyOutType& op) {
5017 TCVec tcs;
5018 auto const& pinfo = env.ctx.func->params[op.arg1];
5019 tcs.push_back(&pinfo.typeConstraint);
5020 for (auto const& t : pinfo.upperBounds) tcs.push_back(&t);
5021 verifyRetImpl(env, tcs, false, false);
5024 void in(ISS& env, const bc::VerifyRetTypeC& /*op*/) {
5025 TCVec tcs;
5026 tcs.push_back(&env.ctx.func->retTypeConstraint);
5027 for (auto const& t : env.ctx.func->returnUBs) tcs.push_back(&t);
5028 verifyRetImpl(env, tcs, true, false);
5031 void in(ISS& env, const bc::VerifyRetTypeTS& /*op*/) {
5032 auto const a = topC(env);
5033 auto const requiredTSType = RuntimeOption::EvalHackArrDVArrs ? BDict : BDArr;
5034 if (!a.couldBe(requiredTSType)) {
5035 unreachable(env);
5036 popC(env);
5037 return;
5039 auto const constraint = env.ctx.func->retTypeConstraint;
5040 // TODO(T31677864): We are being extremely pessimistic here, relax it
5041 if (!env.ctx.func->isReified &&
5042 (!env.ctx.cls || !env.ctx.cls->hasReifiedGenerics) &&
5043 !env.index.could_have_reified_type(env.ctx, constraint)) {
5044 return reduce(env, bc::PopC {}, bc::VerifyRetTypeC {});
5046 if (auto const inputTS = tv(a)) {
5047 if (!isValidTSType(*inputTS, false)) {
5048 unreachable(env);
5049 popC(env);
5050 return;
5052 auto const resolvedTS =
5053 resolveTSStatically(env, inputTS->m_data.parr, env.ctx.cls);
5054 if (resolvedTS && resolvedTS != inputTS->m_data.parr) {
5055 reduce(env, bc::PopC {});
5056 RuntimeOption::EvalHackArrDVArrs ? reduce(env, bc::Dict { resolvedTS })
5057 : reduce(env, bc::Array { resolvedTS });
5058 reduce(env, bc::VerifyRetTypeTS {});
5059 return;
5062 TCVec tcs {&constraint};
5063 for (auto const& t : env.ctx.func->returnUBs) tcs.push_back(&t);
5064 verifyRetImpl(env, tcs, true, true);
5067 void in(ISS& env, const bc::VerifyRetNonNullC& /*op*/) {
5068 auto const constraint = env.ctx.func->retTypeConstraint;
5069 if (constraint.isSoft()) {
5070 return;
5073 auto stackT = topC(env);
5075 if (!stackT.couldBe(BInitNull)) {
5076 reduce(env);
5077 return;
5080 if (stackT.subtypeOf(BNull)) return unreachable(env);
5082 auto const equiv = topStkEquiv(env);
5084 if (is_opt(stackT)) stackT = unopt(std::move(stackT));
5086 popC(env);
5087 push(env, stackT, equiv);
5090 void in(ISS& env, const bc::Self& op) {
5091 auto const self = selfClsExact(env);
5092 if (self) {
5093 effect_free(env);
5094 push(env, *self);
5095 } else {
5096 push(env, TCls);
5100 void in(ISS& env, const bc::Parent& op) {
5101 auto const parent = parentClsExact(env);
5102 if (parent) {
5103 effect_free(env);
5104 push(env, *parent);
5105 } else {
5106 push(env, TCls);
5110 void in(ISS& env, const bc::CreateCl& op) {
5111 auto const nargs = op.arg1;
5112 auto const clsPair = env.index.resolve_closure_class(env.ctx, op.arg2);
5115 * Every closure should have a unique allocation site, but we may see it
5116 * multiple times in a given round of analyzing this function. Each time we
5117 * may have more information about the used variables; the types should only
5118 * possibly grow. If it's already there we need to merge the used vars in
5119 * with what we saw last time.
5121 if (nargs) {
5122 CompactVector<Type> usedVars(nargs);
5123 for (auto i = uint32_t{0}; i < nargs; ++i) {
5124 usedVars[nargs - i - 1] = unctx(popCU(env));
5126 merge_closure_use_vars_into(
5127 env.collect.closureUseTypes,
5128 clsPair.second,
5129 std::move(usedVars)
5133 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
5134 // assert the exact type of closure objects. The best we can do is assert
5135 // that it's a subclass of Closure.
5136 auto const closure = env.index.builtin_class(s_Closure.get());
5138 return push(env, subObj(closure));
5141 void in(ISS& env, const bc::CreateCont& /*op*/) {
5142 // First resume is always next() which pushes null.
5143 push(env, TInitNull);
5146 void in(ISS& env, const bc::ContEnter&) { popC(env); push(env, TInitCell); }
5147 void in(ISS& env, const bc::ContRaise&) { popC(env); push(env, TInitCell); }
5149 void in(ISS& env, const bc::Yield&) {
5150 popC(env);
5151 push(env, TInitCell);
5154 void in(ISS& env, const bc::YieldK&) {
5155 popC(env);
5156 popC(env);
5157 push(env, TInitCell);
5160 void in(ISS& /*env*/, const bc::ContCheck&) {}
5161 void in(ISS& env, const bc::ContValid&) { push(env, TBool); }
5162 void in(ISS& env, const bc::ContKey&) { push(env, TInitCell); }
5163 void in(ISS& env, const bc::ContCurrent&) { push(env, TInitCell); }
5164 void in(ISS& env, const bc::ContGetReturn&) { push(env, TInitCell); }
5166 void pushTypeFromWH(ISS& env, Type t) {
5167 auto inner = typeFromWH(t);
5168 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5169 if (inner.subtypeOf(BBottom)) unreachable(env);
5170 push(env, std::move(inner));
5173 void in(ISS& env, const bc::WHResult&) {
5174 pushTypeFromWH(env, popC(env));
5177 void in(ISS& env, const bc::Await&) {
5178 pushTypeFromWH(env, popC(env));
5181 void in(ISS& env, const bc::AwaitAll& op) {
5182 auto const equiv = equivLocalRange(env, op.locrange);
5183 if (equiv != op.locrange.first) {
5184 return reduce(
5185 env,
5186 bc::AwaitAll {LocalRange {equiv, op.locrange.count}}
5190 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5191 mayReadLocal(env, op.locrange.first + i);
5194 push(env, TInitNull);
5197 namespace {
5199 void idxImpl(ISS& env, bool arraysOnly) {
5200 auto const def = popC(env);
5201 auto const key = popC(env);
5202 auto const base = popC(env);
5204 if (key.subtypeOf(BInitNull)) {
5205 // A null key, regardless of whether we're ArrayIdx or Idx will always
5206 // silently return the default value, regardless of the base type.
5207 constprop(env);
5208 effect_free(env);
5209 return push(env, def);
5212 // Push the returned type and annotate effects appropriately, taking into
5213 // account if the base might be null. Allowing for a possibly null base lets
5214 // us capture more cases.
5215 auto const finish = [&] (const Type& t, bool canThrow) {
5216 // A null base will raise if we're ArrayIdx. For Idx, it will silently
5217 // return the default value.
5218 auto const baseMaybeNull = base.couldBe(BInitNull);
5219 if (!canThrow && (!arraysOnly || !baseMaybeNull)) {
5220 constprop(env);
5221 effect_free(env);
5223 if (!arraysOnly && baseMaybeNull) return push(env, union_of(t, def));
5224 if (t.subtypeOf(BBottom)) unreachable(env);
5225 return push(env, t);
5228 if (arraysOnly) {
5229 // If ArrayIdx, we'll raise an error for anything other than array-like and
5230 // null. This op is only terminal if null isn't possible.
5231 if (!base.couldBe(BArr | BVec | BDict | BKeyset | BClsMeth)) {
5232 return finish(key.couldBe(BInitNull) ? def : TBottom, true);
5234 } else if (
5235 !base.couldBe(BArr | BVec | BDict | BKeyset | BStr | BObj | BClsMeth)) {
5236 // Otherwise, any strange bases for Idx will just return the default value
5237 // without raising.
5238 return finish(def, false);
5241 // Helper for Hack arrays. "validKey" is the set key types which can return a
5242 // value from Idx. "silentKey" is the set of key types which will silently
5243 // return null (anything else throws). The Hack array elem functions will
5244 // treat values of "silentKey" as throwing, so we must identify those cases
5245 // and deal with them.
5246 auto const hackArr = [&] (std::pair<Type, ThrowMode> elem,
5247 const Type& validKey,
5248 const Type& silentKey) {
5249 switch (elem.second) {
5250 case ThrowMode::None:
5251 case ThrowMode::MaybeMissingElement:
5252 case ThrowMode::MissingElement:
5253 assertx(key.subtypeOf(validKey));
5254 return finish(elem.first, false);
5255 case ThrowMode::MaybeBadKey:
5256 assertx(key.couldBe(validKey));
5257 if (key.couldBe(silentKey)) elem.first |= def;
5258 return finish(elem.first, !key.subtypeOf(BOptArrKeyCompat));
5259 case ThrowMode::BadOperation:
5260 assertx(!key.couldBe(validKey));
5261 return finish(key.couldBe(silentKey) ? def : TBottom, true);
5265 if (base.subtypeOrNull(BVec)) {
5266 // Vecs will throw for any key other than Int, Str, or Null, and will
5267 // silently return the default value for the latter two.
5268 if (key.subtypeOrNull(BStr)) return finish(def, false);
5269 return hackArr(vec_elem(base, key, def), TInt, TOptStr);
5272 if (base.subtypeOfAny(TOptDict, TOptKeyset)) {
5273 // Dicts and keysets will throw for any key other than Int, Str, or Null,
5274 // and will silently return the default value for Null.
5275 auto const elem = base.subtypeOrNull(BDict)
5276 ? dict_elem(base, key, def)
5277 : keyset_elem(base, key, def);
5278 return hackArr(elem, TArrKeyCompat, TInitNull);
5281 if (base.subtypeOrNull(BArr)) {
5282 // A possibly null key is more complicated for arrays. array_elem() will
5283 // transform a null key into an empty string (matching the semantics of
5284 // array access), but that's not what Idx does. So, attempt to remove
5285 // nullish from the key first. If we can't, it just means we'll get a more
5286 // conservative value.
5287 auto maybeNull = false;
5288 auto const fixedKey = [&]{
5289 if (key.couldBe(TInitNull)) {
5290 maybeNull = true;
5291 if (is_nullish(key)) return unnullish(key);
5293 return key;
5294 }();
5296 auto elem = array_elem(base, fixedKey, def);
5297 // If the key was null, Idx will return the default value, so add to the
5298 // return type.
5299 if (maybeNull) elem.first |= def;
5301 switch (elem.second) {
5302 case ThrowMode::None:
5303 case ThrowMode::MaybeMissingElement:
5304 case ThrowMode::MissingElement:
5305 return finish(elem.first, false);
5306 case ThrowMode::MaybeBadKey:
5307 return finish(elem.first, true);
5308 case ThrowMode::BadOperation:
5309 always_assert(false);
5313 if (!arraysOnly && base.subtypeOrNull(BStr)) {
5314 // Idx on a string always produces a string or the default value (without
5315 // ever raising).
5316 return finish(union_of(TStr, def), false);
5319 // Objects or other unions of possible bases
5320 push(env, TInitCell);
5325 void in(ISS& env, const bc::Idx&) { idxImpl(env, false); }
5326 void in(ISS& env, const bc::ArrayIdx&) { idxImpl(env, true); }
5328 void in(ISS& env, const bc::CheckProp&) {
5329 if (env.ctx.cls->attrs & AttrNoOverride) {
5330 return reduce(env, bc::False {});
5332 nothrow(env);
5333 push(env, TBool);
5336 void in(ISS& env, const bc::InitProp& op) {
5337 auto const t = topC(env);
5338 switch (op.subop2) {
5339 case InitPropOp::Static:
5340 mergeSelfProp(env, op.str1, t);
5341 env.collect.publicSPropMutations.merge(
5342 env.index, env.ctx, *env.ctx.cls, sval(op.str1), t, true
5344 break;
5345 case InitPropOp::NonStatic:
5346 mergeThisProp(env, op.str1, t);
5347 break;
5350 for (auto& prop : env.ctx.func->cls->properties) {
5351 if (prop.name != op.str1) continue;
5353 ITRACE(1, "InitProp: {} = {}\n", op.str1, show(t));
5355 if (env.index.satisfies_constraint(env.ctx, t, prop.typeConstraint) &&
5356 std::all_of(prop.ubs.begin(), prop.ubs.end(),
5357 [&](TypeConstraint ub) {
5358 applyFlagsToUB(ub, prop.typeConstraint);
5359 return env.index.satisfies_constraint(env.ctx, t, ub);
5360 })) {
5361 prop.attrs |= AttrInitialSatisfiesTC;
5362 } else {
5363 badPropInitialValue(env);
5364 prop.attrs = (Attr)(prop.attrs & ~AttrInitialSatisfiesTC);
5367 auto const v = tv(t);
5368 if (v || !could_contain_objects(t)) {
5369 prop.attrs = (Attr)(prop.attrs & ~AttrDeepInit);
5370 if (!v) break;
5371 prop.val = *v;
5372 env.index.update_static_prop_init_val(env.ctx.func->cls, op.str1);
5373 return reduce(env, bc::PopC {});
5377 popC(env);
5380 void in(ISS& env, const bc::Silence& op) {
5381 nothrow(env);
5382 switch (op.subop2) {
5383 case SilenceOp::Start:
5384 setLoc(env, op.loc1, TInt);
5385 break;
5386 case SilenceOp::End:
5387 locRaw(env, op.loc1);
5388 break;
5392 namespace {
5394 template <typename Op, typename Rebind>
5395 bool memoGetImpl(ISS& env, const Op& op, Rebind&& rebind) {
5396 always_assert(env.ctx.func->isMemoizeWrapper);
5397 always_assert(op.locrange.first + op.locrange.count
5398 <= env.ctx.func->locals.size());
5400 if (will_reduce(env)) {
5401 // If we can use an equivalent, earlier range, then use that instead.
5402 auto const equiv = equivLocalRange(env, op.locrange);
5403 if (equiv != op.locrange.first) {
5404 reduce(env, rebind(LocalRange { equiv, op.locrange.count }));
5405 return true;
5409 auto retTy = memoizeImplRetType(env);
5411 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5412 // and $this isn't available.
5413 auto allArrKey = true;
5414 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5415 allArrKey &= locRaw(env, op.locrange.first + i).subtypeOf(BArrKey);
5417 if (allArrKey &&
5418 (!env.ctx.func->cls ||
5419 (env.ctx.func->attrs & AttrStatic) ||
5420 thisAvailable(env))) {
5421 if (will_reduce(env)) {
5422 if (retTy.first.subtypeOf(BBottom)) {
5423 reduce(env);
5424 jmp_setdest(env, op.target1);
5425 return true;
5427 // deal with constprop manually; otherwise we will propagate the
5428 // taken edge and *then* replace the MemoGet with a constant.
5429 if (retTy.second) {
5430 if (auto v = tv(retTy.first)) {
5431 reduce(env, gen_constant(*v));
5432 return true;
5436 nothrow(env);
5439 if (retTy.first == TBottom) {
5440 jmp_setdest(env, op.target1);
5441 return true;
5444 env.propagate(op.target1, &env.state);
5445 push(env, std::move(retTy.first));
5446 return false;
5451 void in(ISS& env, const bc::MemoGet& op) {
5452 memoGetImpl(
5453 env, op,
5454 [&] (const LocalRange& l) { return bc::MemoGet { op.target1, l }; }
5458 void in(ISS& env, const bc::MemoGetEager& op) {
5459 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
5461 auto const reduced = memoGetImpl(
5462 env, op,
5463 [&] (const LocalRange& l) {
5464 return bc::MemoGetEager { op.target1, op.target2, l };
5467 if (reduced) return;
5469 env.propagate(op.target2, &env.state);
5470 auto const t = popC(env);
5471 push(
5472 env,
5473 is_specialized_wait_handle(t) ? wait_handle_inner(t) : TInitCell
5477 namespace {
5479 template <typename Op>
5480 void memoSetImpl(ISS& env, const Op& op) {
5481 always_assert(env.ctx.func->isMemoizeWrapper);
5482 always_assert(op.locrange.first + op.locrange.count
5483 <= env.ctx.func->locals.size());
5485 // If we can use an equivalent, earlier range, then use that instead.
5486 auto const equiv = equivLocalRange(env, op.locrange);
5487 if (equiv != op.locrange.first) {
5488 return reduce(
5489 env,
5490 Op { LocalRange { equiv, op.locrange.count } }
5494 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5495 // and $this isn't available.
5496 auto allArrKey = true;
5497 for (uint32_t i = 0; i < op.locrange.count; ++i) {
5498 allArrKey &= locRaw(env, op.locrange.first + i).subtypeOf(BArrKey);
5500 if (allArrKey &&
5501 (!env.ctx.func->cls ||
5502 (env.ctx.func->attrs & AttrStatic) ||
5503 thisAvailable(env))) {
5504 nothrow(env);
5506 push(env, popC(env));
5511 void in(ISS& env, const bc::MemoSet& op) {
5512 memoSetImpl(env, op);
5515 void in(ISS& env, const bc::MemoSetEager& op) {
5516 always_assert(env.ctx.func->isAsync && !env.ctx.func->isGenerator);
5517 memoSetImpl(env, op);
5522 namespace {
5524 //////////////////////////////////////////////////////////////////////
5526 void dispatch(ISS& env, const Bytecode& op) {
5527 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5528 switch (op.op) { OPCODES }
5529 #undef O
5530 not_reached();
5533 //////////////////////////////////////////////////////////////////////
5535 void interpStep(ISS& env, const Bytecode& bc) {
5536 ITRACE(2, " {} ({})\n",
5537 show(env.ctx.func, bc),
5538 env.unchangedBcs + env.replacedBcs.size());
5539 Trace::Indent _;
5541 // If there are throw exit edges, make a copy of the state (except
5542 // stacks) in case we need to propagate across throw exits (if
5543 // it's a PEI).
5544 if (!env.stateBefore && env.blk.throwExit != NoBlockId) {
5545 env.stateBefore.emplace(with_throwable_only(env.index, env.state));
5548 env.flags = {};
5550 default_dispatch(env, bc);
5552 if (env.flags.reduced) return;
5554 auto const_prop = [&] {
5555 if (!options.ConstantProp || !env.flags.canConstProp) return false;
5557 auto const numPushed = bc.numPush();
5558 TinyVector<TypedValue> cells;
5560 auto i = size_t{0};
5561 while (i < numPushed) {
5562 auto const v = tv(topT(env, i));
5563 if (!v) return false;
5564 cells.push_back(*v);
5565 ++i;
5568 if (env.flags.wasPEI) {
5569 ITRACE(2, " nothrow (due to constprop)\n");
5570 env.flags.wasPEI = false;
5572 if (!env.flags.effectFree) {
5573 ITRACE(2, " effect_free (due to constprop)\n");
5574 env.flags.effectFree = true;
5577 rewind(env, bc);
5579 auto const numPop = bc.numPop();
5580 for (auto j = 0; j < numPop; j++) {
5581 auto const flavor = bc.popFlavor(j);
5582 if (flavor == Flavor::C) {
5583 interpStep(env, bc::PopC {});
5584 } else if (flavor == Flavor::U) {
5585 interpStep(env, bc::PopU {});
5586 } else {
5587 assertx(flavor == Flavor::CU);
5588 auto const& popped = topT(env);
5589 if (popped.subtypeOf(BUninit)) {
5590 interpStep(env, bc::PopU {});
5591 } else {
5592 assertx(popped.subtypeOf(BInitCell));
5593 interpStep(env, bc::PopC {});
5598 while (i--) {
5599 push(env, from_cell(cells[i]));
5600 record(env, gen_constant(cells[i]));
5602 return true;
5605 if (const_prop()) {
5606 return;
5609 assertx(!env.flags.effectFree || !env.flags.wasPEI);
5610 if (env.flags.wasPEI) {
5611 ITRACE(2, " PEI.\n");
5612 if (env.stateBefore) {
5613 env.propagate(env.blk.throwExit, &*env.stateBefore);
5616 env.stateBefore.reset();
5618 record(env, bc);
5621 void interpOne(ISS& env, const Bytecode& bc) {
5622 env.srcLoc = bc.srcLoc;
5623 interpStep(env, bc);
5626 BlockId speculate(Interp& interp) {
5627 auto low_water = interp.state.stack.size();
5629 interp.collect.opts = interp.collect.opts | CollectionOpts::Speculating;
5630 SCOPE_EXIT {
5631 interp.collect.opts = interp.collect.opts - CollectionOpts::Speculating;
5634 auto failed = false;
5635 ISS env { interp, [&] (BlockId, const State*) { failed = true; } };
5637 FTRACE(4, " Speculate B{}\n", interp.bid);
5638 for (auto const& bc : interp.blk->hhbcs) {
5639 assertx(!interp.state.unreachable);
5640 auto const numPop = bc.numPop() +
5641 (bc.op == Op::CGetL2 ? 1 :
5642 bc.op == Op::Dup ? -1 : 0);
5643 if (interp.state.stack.size() - numPop < low_water) {
5644 low_water = interp.state.stack.size() - numPop;
5647 interpOne(env, bc);
5648 if (failed) {
5649 env.collect.mInstrState.clear();
5650 FTRACE(3, " Bailing from speculate because propagate was called\n");
5651 return NoBlockId;
5654 auto const& flags = env.flags;
5655 if (!flags.effectFree) {
5656 env.collect.mInstrState.clear();
5657 FTRACE(3, " Bailing from speculate because not effect free\n");
5658 return NoBlockId;
5661 assertx(!flags.returned);
5663 if (flags.jmpDest != NoBlockId && interp.state.stack.size() == low_water) {
5664 FTRACE(2, " Speculate found target block {}\n", flags.jmpDest);
5665 return flags.jmpDest;
5669 if (interp.state.stack.size() != low_water) {
5670 FTRACE(3,
5671 " Bailing from speculate because the speculated block "
5672 "left items on the stack\n");
5673 return NoBlockId;
5676 if (interp.blk->fallthrough == NoBlockId) {
5677 FTRACE(3,
5678 " Bailing from speculate because there was no fallthrough");
5679 return NoBlockId;
5682 FTRACE(2, " Speculate found fallthrough block {}\n",
5683 interp.blk->fallthrough);
5685 return interp.blk->fallthrough;
5688 BlockId speculateHelper(ISS& env, BlockId orig, bool updateTaken) {
5689 assertx(orig != NoBlockId);
5691 if (!will_reduce(env)) return orig;
5693 auto const last = last_op(env);
5694 bool endsInControlFlow = last && instrIsNonCallControlFlow(last->op);
5695 auto target = orig;
5696 auto pops = 0;
5698 if (options.RemoveDeadBlocks) {
5699 State temp{env.state, State::Compact{}};
5700 while (true) {
5701 auto const& func = env.ctx.func;
5702 auto const targetBlk = func.blocks()[target].get();
5703 if (!targetBlk->multiPred) break;
5704 auto const ok = [&] {
5705 switch (targetBlk->hhbcs.back().op) {
5706 case Op::JmpZ:
5707 case Op::JmpNZ:
5708 case Op::SSwitch:
5709 case Op::Switch:
5710 return true;
5711 default:
5712 return false;
5714 }();
5716 if (!ok) break;
5718 Interp interp {
5719 env.index, env.ctx, env.collect, target, targetBlk, temp
5722 auto const old_size = temp.stack.size();
5723 auto const new_target = speculate(interp);
5724 if (new_target == NoBlockId) break;
5726 const ssize_t delta = old_size - temp.stack.size();
5727 assertx(delta >= 0);
5728 if (delta && endsInControlFlow) break;
5730 pops += delta;
5731 target = new_target;
5732 temp.stack.compact();
5736 if (endsInControlFlow && updateTaken) {
5737 assertx(!pops);
5738 auto needsUpdate = target != orig;
5739 if (!needsUpdate) {
5740 forEachTakenEdge(
5741 *last,
5742 [&] (BlockId bid) {
5743 if (bid != orig) needsUpdate = true;
5747 if (needsUpdate) {
5748 auto& bc = mutate_last_op(env);
5749 forEachTakenEdge(
5751 [&] (BlockId& bid) {
5752 bid = bid == orig ? target : NoBlockId;
5758 while (pops--) {
5759 auto const& popped = topT(env);
5760 if (popped.subtypeOf(BInitCell)) {
5761 interpStep(env, bc::PopC {});
5762 } else {
5763 assertx(popped.subtypeOf(BUninit));
5764 interpStep(env, bc::PopU {});
5768 return target;
5773 //////////////////////////////////////////////////////////////////////
5775 RunFlags run(Interp& interp, const State& in, PropagateFn propagate) {
5776 SCOPE_EXIT {
5777 FTRACE(2, "out {}{}\n",
5778 state_string(*interp.ctx.func, interp.state, interp.collect),
5779 property_state_string(interp.collect.props));
5782 auto env = ISS { interp, propagate };
5783 auto ret = RunFlags {};
5784 auto finish = [&] (BlockId fallthrough) {
5785 ret.updateInfo.fallthrough = fallthrough;
5786 ret.updateInfo.unchangedBcs = env.unchangedBcs;
5787 ret.updateInfo.replacedBcs = std::move(env.replacedBcs);
5788 return ret;
5791 BytecodeVec retryBcs;
5792 auto retryOffset = interp.blk->hhbcs.size();
5793 auto size = retryOffset;
5794 BlockId retryFallthrough = interp.blk->fallthrough;
5795 size_t idx = 0;
5797 while (true) {
5798 if (idx == size) {
5799 finish_tracked_elems(env, 0);
5800 if (!env.reprocess) break;
5801 FTRACE(2, " Reprocess mutated block {}\n", interp.bid);
5802 assertx(env.unchangedBcs < retryOffset || env.replacedBcs.size());
5803 retryOffset = env.unchangedBcs;
5804 retryBcs = std::move(env.replacedBcs);
5805 env.unchangedBcs = 0;
5806 env.state.copy_from(in);
5807 env.reprocess = false;
5808 env.replacedBcs.clear();
5809 size = retryOffset + retryBcs.size();
5810 idx = 0;
5811 continue;
5814 auto const& bc = idx < retryOffset ?
5815 interp.blk->hhbcs[idx] : retryBcs[idx - retryOffset];
5816 ++idx;
5818 interpOne(env, bc);
5819 auto const& flags = env.flags;
5820 if (interp.collect.effectFree && !flags.effectFree) {
5821 interp.collect.effectFree = false;
5822 if (any(interp.collect.opts & CollectionOpts::EffectFreeOnly)) {
5823 env.collect.mInstrState.clear();
5824 FTRACE(2, " Bailing because not effect free\n");
5825 return finish(NoBlockId);
5829 if (flags.returned) {
5830 always_assert(idx == size);
5831 if (env.reprocess) continue;
5833 always_assert(interp.blk->fallthrough == NoBlockId);
5834 assertx(!ret.returned);
5835 FTRACE(2, " returned {}\n", show(*flags.returned));
5836 ret.retParam = flags.retParam;
5837 ret.returned = flags.returned;
5838 return finish(NoBlockId);
5841 if (flags.jmpDest != NoBlockId) {
5842 always_assert(idx == size);
5843 auto const hasFallthrough = [&] {
5844 if (flags.jmpDest != interp.blk->fallthrough) {
5845 FTRACE(2, " <took branch; no fallthrough>\n");
5846 auto const last = last_op(env);
5847 return !last || !instrIsNonCallControlFlow(last->op);
5848 } else {
5849 FTRACE(2, " <branch never taken>\n");
5850 return true;
5852 }();
5853 if (hasFallthrough) retryFallthrough = flags.jmpDest;
5854 if (env.reprocess) continue;
5855 finish_tracked_elems(env, 0);
5856 auto const newDest = speculateHelper(env, flags.jmpDest, true);
5857 propagate(newDest, &interp.state);
5858 return finish(hasFallthrough ? newDest : NoBlockId);
5861 if (interp.state.unreachable) {
5862 if (env.reprocess) {
5863 idx = size;
5864 continue;
5866 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
5867 finish_tracked_elems(env, 0);
5868 return finish(NoBlockId);
5872 FTRACE(2, " <end block>\n");
5873 if (retryFallthrough != NoBlockId) {
5874 retryFallthrough = speculateHelper(env, retryFallthrough, false);
5875 propagate(retryFallthrough, &interp.state);
5877 return finish(retryFallthrough);
5880 StepFlags step(Interp& interp, const Bytecode& op) {
5881 auto noop = [] (BlockId, const State*) {};
5882 ISS env { interp, noop };
5883 env.analyzeDepth++;
5884 default_dispatch(env, op);
5885 if (env.state.unreachable) {
5886 env.collect.mInstrState.clear();
5888 assertx(env.trackedElems.empty());
5889 return env.flags;
5892 void default_dispatch(ISS& env, const Bytecode& op) {
5893 if (!env.trackedElems.empty()) {
5894 auto const pops = [&] () -> uint32_t {
5895 switch (op.op) {
5896 case Op::AddElemC:
5897 case Op::AddNewElemC:
5898 return numPop(op) - 1;
5899 case Op::Concat:
5900 case Op::ConcatN:
5901 return 0;
5902 default:
5903 return numPop(op);
5905 }();
5907 finish_tracked_elems(env, env.state.stack.size() - pops);
5909 dispatch(env, op);
5910 if (instrFlags(op.op) & TF && env.flags.jmpDest == NoBlockId) {
5911 unreachable(env);
5912 } else if (env.state.unreachable) {
5913 env.collect.mInstrState.clear();
5917 folly::Optional<Type> thisType(const Index& index, Context ctx) {
5918 return thisTypeFromContext(index, ctx);
5921 //////////////////////////////////////////////////////////////////////