Don't allow TRef in equivLocs
[hiphop-php.git] / hphp / hhbbc / interp-internal.h
blob77a3c0821e1ff4e20362451764eda5a32465d163
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #ifndef incl_HPHP_INTERP_INTERNAL_H_
17 #define incl_HPHP_INTERP_INTERNAL_H_
19 #include <algorithm>
21 #include <folly/Optional.h>
23 #include "hphp/runtime/base/type-string.h"
25 #include "hphp/hhbbc/interp-state.h"
26 #include "hphp/hhbbc/interp.h"
27 #include "hphp/hhbbc/representation.h"
28 #include "hphp/hhbbc/type-system.h"
29 #include "hphp/hhbbc/func-util.h"
30 #include "hphp/hhbbc/context.h"
32 namespace HPHP { namespace HHBBC {
34 struct LocalRange;
36 //////////////////////////////////////////////////////////////////////
38 TRACE_SET_MOD(hhbbc);
40 const StaticString s_assert("assert");
41 const StaticString s_set_frame_metadata("HH\\set_frame_metadata");
42 const StaticString s_86metadata("86metadata");
43 const StaticString s_func_num_args("func_num_args");
44 const StaticString s_func_get_args("func_get_args");
45 const StaticString s_func_get_arg("func_get_arg");
46 const StaticString s_func_slice_args("__SystemLib\\func_slice_args");
48 //////////////////////////////////////////////////////////////////////
51 * Interpreter Step State.
53 * This struct gives interpreter functions access to shared state. It's not in
54 * interp-state.h because it's part of the internal implementation of
55 * interpreter routines. The publicized state as results of interpretation are
56 * in that header and interp.h.
58 struct ISS {
59 explicit ISS(Interp& bag,
60 StepFlags& flags,
61 PropagateFn propagate)
62 : index(bag.index)
63 , ctx(bag.ctx)
64 , collect(bag.collect)
65 , blk(*bag.blk)
66 , state(bag.state)
67 , flags(flags)
68 , propagate(propagate)
71 const Index& index;
72 const Context ctx;
73 CollectedInfo& collect;
74 const php::Block& blk;
75 State& state;
76 StepFlags& flags;
77 PropagateFn propagate;
80 void impl_vec(ISS& env, bool reduce, std::vector<Bytecode>&& bcs);
82 //////////////////////////////////////////////////////////////////////
84 namespace interp_step {
87 * An interp_step::in(ISS&, const bc::op&) function exists for every
88 * bytecode. Most are defined in interp.cpp, but some (like FCallBuiltin and
89 * member instructions) are defined elsewhere.
91 #define O(opcode, ...) void in(ISS&, const bc::opcode&);
92 OPCODES
93 #undef O
98 * Find a contiguous local range which is equivalent to the given range and has
99 * a smaller starting id. Only returns the equivalent first local because the
100 * size doesn't change.
102 LocalId equivLocalRange(ISS& env, const LocalRange& range);
104 namespace {
106 Type peekLocRaw(ISS& env, LocalId l);
108 #ifdef __clang__
109 #pragma clang diagnostic push
110 #pragma clang diagnostic ignored "-Wunused-function"
111 #endif
114 * impl(...)
116 * Utility for chaining one bytecode implementation to a series of a few
117 * others. Use reduce() if you also want to enable strength reduction
118 * (i.e. the bytecode can be replaced by some other bytecode as an
119 * optimization).
121 * The chained-to bytecodes should not take branches. For impl, the
122 * canConstProp flag will only be set if it was set for all the
123 * bytecodes.
126 template<class... Ts>
127 void impl(ISS& env, Ts&&... ts) {
128 impl_vec(env, false, { std::forward<Ts>(ts)... });
132 * Reduce means that (given some situation in the execution state),
133 * a given bytecode could be replaced by some other bytecode
134 * sequence. Ensure that if you call reduce(), it is before any
135 * state-affecting operations (like popC()).
137 * If env.collect.propagate_constants is set, the reduced bytecodes
138 * will have been constant-propagated, and the canConstProp flag will
139 * be clear; otherwise canConstProp will be set as for impl.
141 void reduce(ISS& env, std::vector<Bytecode>&& bcs) {
142 impl_vec(env, true, std::move(bcs));
145 template<class... Bytecodes>
146 void reduce(ISS& env, Bytecodes&&... hhbc) {
147 reduce(env, { std::forward<Bytecodes>(hhbc)... });
150 bool fpassCanThrow(ISS& /*env*/, PrepKind kind, FPassHint hint) {
151 switch (kind) {
152 case PrepKind::Unknown: return hint != FPassHint::Any;
153 case PrepKind::Val: return hint == FPassHint::Ref;
154 case PrepKind::Ref: return hint == FPassHint::Cell;
156 not_reached();
159 void nothrow(ISS& env) {
160 FTRACE(2, " nothrow\n");
161 env.flags.wasPEI = false;
164 void unreachable(ISS& env) {
165 FTRACE(2, " unreachable\n");
166 env.state.unreachable = true;
169 void constprop(ISS& env) {
170 FTRACE(2, " constprop\n");
171 env.flags.canConstProp = true;
174 void effect_free(ISS& env) {
175 FTRACE(2, " effect_free\n");
176 nothrow(env);
177 env.flags.effectFree = true;
180 void jmp_setdest(ISS& env, BlockId blk) {
181 env.flags.jmpDest = blk;
183 void jmp_nevertaken(ISS& env) {
184 jmp_setdest(env, env.blk.fallthrough);
187 void readUnknownParams(ISS& env) {
188 for (LocalId p = 0; p < env.ctx.func->params.size(); p++) {
189 if (p == env.flags.mayReadLocalSet.size()) break;
190 env.flags.mayReadLocalSet.set(p);
194 void readUnknownLocals(ISS& env) { env.flags.mayReadLocalSet.set(); }
195 void readAllLocals(ISS& env) { env.flags.mayReadLocalSet.set(); }
197 void modifyLocalStatic(ISS& env, LocalId id, const Type& t) {
198 auto modifyOne = [&] (LocalId lid) {
199 if (is_volatile_local(env.ctx.func, lid)) return;
200 if (env.state.localStaticBindings.size() <= lid) return;
201 if (env.state.localStaticBindings[lid] == LocalStaticBinding::None) return;
202 if (t.subtypeOf(TUninit) && !t.subtypeOf(TBottom)) {
203 // Uninit means we are unbinding.
204 env.state.localStaticBindings[lid] = id == NoLocalId ?
205 LocalStaticBinding::None : LocalStaticBinding::Maybe;
206 return;
208 if (lid >= env.collect.localStaticTypes.size()) {
209 env.collect.localStaticTypes.resize(lid + 1, TBottom);
211 env.collect.localStaticTypes[lid] = t.subtypeOf(TCell) ?
212 union_of(std::move(env.collect.localStaticTypes[lid]), t) :
213 TGen;
215 if (id != NoLocalId) {
216 return modifyOne(id);
218 for (LocalId i = 0; i < env.state.localStaticBindings.size(); i++) {
219 modifyOne(i);
223 void maybeBindLocalStatic(ISS& env, LocalId id) {
224 if (is_volatile_local(env.ctx.func, id)) return;
225 if (env.state.localStaticBindings.size() <= id) return;
226 if (env.state.localStaticBindings[id] != LocalStaticBinding::None) return;
227 env.state.localStaticBindings[id] = LocalStaticBinding::Maybe;
228 return;
231 void unbindLocalStatic(ISS& env, LocalId id) {
232 modifyLocalStatic(env, id, TUninit);
235 void bindLocalStatic(ISS& env, LocalId id, const Type& t) {
236 if (is_volatile_local(env.ctx.func, id)) return;
237 if (env.state.localStaticBindings.size() <= id) {
238 env.state.localStaticBindings.resize(id + 1);
240 env.state.localStaticBindings[id] = LocalStaticBinding::Bound;
241 modifyLocalStatic(env, id, t);
244 void doRet(ISS& env, Type t, bool hasEffects) {
245 readAllLocals(env);
246 assert(env.state.stack.empty());
247 env.flags.returned = t;
248 if (hasEffects) return;
249 nothrow(env);
250 for (auto const& l : env.state.locals) {
251 if (could_run_destructor(l)) {
252 return;
255 effect_free(env);
258 void mayUseVV(ISS& env) {
259 env.collect.mayUseVV = true;
262 void hasInvariantIterBase(ISS& env) {
263 env.collect.hasInvariantIterBase = true;
266 //////////////////////////////////////////////////////////////////////
267 // eval stack
269 Type popT(ISS& env) {
270 assert(!env.state.stack.empty());
271 auto const ret = std::move(env.state.stack.back().type);
272 FTRACE(2, " pop: {}\n", show(ret));
273 assert(ret.subtypeOf(TGen));
274 env.state.stack.pop_back();
275 return ret;
278 Type popC(ISS& env) {
279 auto const v = popT(env);
280 assert(v.subtypeOf(TInitCell));
281 return v;
284 Type popV(ISS& env) {
285 auto const v = popT(env);
286 assert(v.subtypeOf(TRef));
287 return v;
290 Type popU(ISS& env) {
291 auto const v = popT(env);
292 assert(v.subtypeOf(TUninit));
293 return v;
296 Type popCU(ISS& env) {
297 auto const v = popT(env);
298 assert(v.subtypeOf(TCell));
299 return v;
302 Type popR(ISS& env) { return popT(env); }
303 Type popF(ISS& env) { return popT(env); }
304 Type popCV(ISS& env) { return popT(env); }
306 void discard(ISS& env, int n) {
307 for (auto i = 0; i < n; ++i) {
308 popT(env);
312 Type& topT(ISS& env, uint32_t idx = 0) {
313 assert(idx < env.state.stack.size());
314 return env.state.stack[env.state.stack.size() - idx - 1].type;
317 Type& topC(ISS& env, uint32_t i = 0) {
318 assert(topT(env, i).subtypeOf(TInitCell));
319 return topT(env, i);
322 Type& topR(ISS& env, uint32_t i = 0) { return topT(env, i); }
324 Type& topV(ISS& env, uint32_t i = 0) {
325 assert(topT(env, i).subtypeOf(TRef));
326 return topT(env, i);
329 void push(ISS& env, Type t) {
330 FTRACE(2, " push: {}\n", show(t));
331 env.state.stack.push_back(StackElem {std::move(t), NoLocalId});
334 void push(ISS& env, Type t, LocalId l) {
335 if (l != StackDupId) {
336 if (l == NoLocalId || peekLocRaw(env, l).couldBe(TRef)) {
337 return push(env, t);
339 assertx(!is_volatile_local(env.ctx.func, l)); // volatiles are TGen
341 FTRACE(2, " push: {} (={})\n",
342 show(t), l == StackDupId ? "Dup" : local_string(*env.ctx.func, l));
343 env.state.stack.push_back(StackElem {std::move(t), l});
346 //////////////////////////////////////////////////////////////////////
347 // fpi
350 * Push an ActRec.
352 * nArgs should either be the number of parameters that will be passed
353 * to the call, or -1 for unknown. We only need the number of args
354 * when we know the exact function being called, in order to determine
355 * eligibility for folding.
357 * returns the foldable flag as a convenience.
359 bool fpiPush(ISS& env, ActRec ar, int32_t nArgs, bool maybeDynamic) {
360 auto foldable = [&] {
361 if (nArgs < 0 ||
362 ar.kind == FPIKind::Ctor ||
363 ar.kind == FPIKind::Builtin ||
364 !ar.func || ar.fallbackFunc) {
365 return false;
367 if (maybeDynamic && ar.func->mightCareAboutDynCalls()) return false;
368 auto const func = ar.func->exactFunc();
369 if (!func) return false;
370 if (func->attrs & AttrTakesInOutParams) return false;
371 if (env.collect.unfoldableFuncs.count(std::make_pair(func, env.blk.id))) {
372 return false;
374 // Foldable builtins are always worth trying
375 if (ar.func->isFoldable()) return true;
376 // Any native functions at this point are known to be
377 // non-foldable, but other builtins might be, even if they
378 // don't have the __Foldable attribute.
379 if (func->nativeInfo) return false;
381 // Don't try to fold functions which aren't guaranteed to be accessible at
382 // this call site.
383 if (func->attrs & AttrPrivate) {
384 if (env.ctx.cls != func->cls) return false;
385 } else if (func->attrs & AttrProtected) {
386 if (!env.ctx.cls) return false;
387 if (!env.index.must_be_derived_from(env.ctx.cls, func->cls) &&
388 !env.index.must_be_derived_from(func->cls, env.ctx.cls)) return false;
391 if (func->params.size()) {
392 // Not worth trying if we're going to warn due to missing args
393 return check_nargs_in_range(func, nArgs);
395 // If the function has no args, we can simply check that its effect free
396 // and returns a literal.
397 return env.index.is_effect_free(*ar.func) &&
398 is_scalar(env.index.lookup_return_type_raw(func));
399 }();
400 if (foldable) effect_free(env);
401 ar.foldable = foldable;
402 ar.pushBlk = env.blk.id;
404 FTRACE(2, " fpi+: {}\n", show(ar));
405 env.state.fpiStack.push_back(std::move(ar));
406 return foldable;
409 void fpiPushNoFold(ISS& env, ActRec ar) {
410 ar.foldable = false;
411 ar.pushBlk = env.blk.id;
413 FTRACE(2, " fpi+: {}\n", show(ar));
414 env.state.fpiStack.push_back(std::move(ar));
417 ActRec fpiPop(ISS& env) {
418 assert(!env.state.fpiStack.empty());
419 auto const ret = env.state.fpiStack.back();
420 FTRACE(2, " fpi-: {}\n", show(ret));
421 env.state.fpiStack.pop_back();
422 return ret;
425 ActRec& fpiTop(ISS& env) {
426 assert(!env.state.fpiStack.empty());
427 return env.state.fpiStack.back();
430 void fpiNotFoldable(ISS& env) {
431 // By the time we're optimizing, we should know up front which funcs
432 // are foldable (the analyze phase iterates to convergence, the
433 // optimize phase does not - so its too late to fix now).
434 assertx(!any(env.collect.opts & CollectionOpts::Optimizing));
436 auto& ar = fpiTop(env);
437 assertx(ar.func && ar.foldable);
438 auto const func = ar.func->exactFunc();
439 assertx(func);
440 env.collect.unfoldableFuncs.emplace(func, ar.pushBlk);
441 env.propagate(ar.pushBlk, nullptr);
442 ar.foldable = false;
443 // we're going to reprocess the whole fpi region; any results we've
444 // got so far are bogus, so stop prevent further useless work by
445 // marking the next bytecode unreachable
446 unreachable(env);
447 FTRACE(2, " fpi: not foldable\n");
450 PrepKind prepKind(ISS& env, uint32_t paramId) {
451 auto& ar = fpiTop(env);
452 if (ar.func && !ar.fallbackFunc) {
453 auto const ret = env.index.lookup_param_prep(env.ctx, *ar.func, paramId);
454 if (ar.foldable && ret != PrepKind::Val) {
455 fpiNotFoldable(env);
457 if (ret != PrepKind::Unknown) {
458 return ret;
461 assertx(ar.kind != FPIKind::Builtin && !ar.foldable);
462 return PrepKind::Unknown;
465 template<class... Bytecodes>
466 void killFPass(ISS& env, PrepKind kind, FPassHint hint, uint32_t arg,
467 Bytecodes&&... bcs) {
468 assert(kind != PrepKind::Unknown);
470 // Since PrepKind is never Unknown for builtins or foldables we
471 // should know statically if we will throw or not at runtime
472 // (PrepKind and FPassHint don't match).
473 if (fpassCanThrow(env, kind, hint)) {
474 auto const& ar = fpiTop(env);
475 assertx(ar.foldable || ar.kind == FPIKind::Builtin);
476 assertx(ar.func && !ar.fallbackFunc);
477 return reduce(
478 env,
479 std::forward<Bytecodes>(bcs)...,
480 bc::FHandleRefMismatch { arg, hint, ar.func->name() }
483 return reduce(env, std::forward<Bytecodes>(bcs)...);
486 bool shouldKillFPass(ISS& env, FPassHint hint, uint32_t param) {
487 auto& ar = fpiTop(env);
488 if (ar.kind == FPIKind::Builtin) return true;
489 if (!ar.foldable) return false;
490 prepKind(env, param);
491 if (!ar.foldable) return false;
492 auto const ok = [&] {
493 if (hint == FPassHint::Ref) return false;
494 auto const& t = topT(env);
495 if (!is_scalar(t)) return false;
496 auto const callee = ar.func->exactFunc();
497 if (param >= callee->params.size() ||
498 (param + 1 == callee->params.size() &&
499 callee->params.back().isVariadic)) {
500 return true;
502 auto const constraint = callee->params[param].typeConstraint;
503 if (!constraint.hasConstraint() ||
504 constraint.isTypeVar() ||
505 constraint.isTypeConstant()) {
506 return true;
508 return env.index.satisfies_constraint(
509 Context { callee->unit, const_cast<php::Func*>(callee), callee->cls },
510 t, constraint);
511 }();
512 if (ok) return true;
513 fpiNotFoldable(env);
514 return false;
517 //////////////////////////////////////////////////////////////////////
518 // locals
520 void useLocalStatic(ISS& env, LocalId l) {
521 assert(env.collect.localStaticTypes.size() > l);
522 if (!env.flags.usedLocalStatics) {
523 env.flags.usedLocalStatics = std::make_shared<std::map<LocalId,Type>>();
525 // Ignore the return value, since we only want the first type used,
526 // as that will be the narrowest.
527 env.flags.usedLocalStatics->emplace(l, env.collect.localStaticTypes[l]);
530 void mayReadLocal(ISS& env, uint32_t id) {
531 if (id < env.flags.mayReadLocalSet.size()) {
532 env.flags.mayReadLocalSet.set(id);
536 // Find a local which is equivalent to the given local
537 LocalId findLocEquiv(ISS& env, LocalId l) {
538 if (l >= env.state.equivLocals.size()) return NoLocalId;
539 assert(env.state.equivLocals[l] == NoLocalId ||
540 !is_volatile_local(env.ctx.func, l));
541 return env.state.equivLocals[l];
544 // Determine whether two locals are equivalent
545 bool locsAreEquiv(ISS& env, LocalId l1, LocalId l2) {
546 if (l1 >= env.state.equivLocals.size() ||
547 l2 >= env.state.equivLocals.size() ||
548 env.state.equivLocals[l1] == NoLocalId ||
549 env.state.equivLocals[l2] == NoLocalId) {
550 return false;
553 auto l = l1;
554 while ((l = env.state.equivLocals[l]) != l1) {
555 if (l == l2) return true;
557 return false;
560 void killLocEquiv(State& state, LocalId l) {
561 if (l >= state.equivLocals.size()) return;
562 if (state.equivLocals[l] == NoLocalId) return;
563 auto loc = l;
564 do {
565 loc = state.equivLocals[loc];
566 } while (state.equivLocals[loc] != l);
567 assert(loc != l);
568 if (state.equivLocals[l] == loc) {
569 state.equivLocals[loc] = NoLocalId;
570 } else {
571 state.equivLocals[loc] = state.equivLocals[l];
573 state.equivLocals[l] = NoLocalId;
576 void killLocEquiv(ISS& env, LocalId l) {
577 killLocEquiv(env.state, l);
580 void killAllLocEquiv(ISS& env) {
581 env.state.equivLocals.clear();
584 // Add from to to's equivalency set.
585 void addLocEquiv(ISS& env,
586 LocalId from,
587 LocalId to) {
588 always_assert(peekLocRaw(env, from).subtypeOf(TCell));
589 always_assert(!is_volatile_local(env.ctx.func, to));
590 always_assert(from != to && findLocEquiv(env, from) == NoLocalId);
592 auto m = std::max(to, from);
593 if (env.state.equivLocals.size() <= m) {
594 env.state.equivLocals.resize(m + 1, NoLocalId);
597 if (env.state.equivLocals[to] == NoLocalId) {
598 env.state.equivLocals[from] = to;
599 env.state.equivLocals[to] = from;
600 } else {
601 env.state.equivLocals[from] = env.state.equivLocals[to];
602 env.state.equivLocals[to] = from;
606 // Obtain a local which is equivalent to the given stack value
607 LocalId topStkLocal(const State& state, uint32_t idx = 0) {
608 assert(idx < state.stack.size());
609 auto const equiv = state.stack[state.stack.size() - idx - 1].equivLoc;
610 return equiv == StackDupId ? NoLocalId : equiv;
612 LocalId topStkLocal(ISS& env, uint32_t idx = 0) {
613 return topStkLocal(env.state);
616 // Obtain a location which is equivalent to the given stack value
617 LocalId topStkEquiv(ISS& env, uint32_t idx = 0) {
618 assert(idx < env.state.stack.size());
619 return env.state.stack[env.state.stack.size() - idx - 1].equivLoc;
622 void setStkLocal(ISS& env, LocalId loc, uint32_t idx = 0) {
623 assertx(loc <= MaxLocalId);
624 always_assert(peekLocRaw(env, loc).subtypeOf(TCell));
625 while (true) {
626 auto equiv = topStkEquiv(env, idx);
627 if (equiv != StackDupId) {
628 if (equiv <= MaxLocalId) {
629 if (loc == equiv || locsAreEquiv(env, loc, equiv)) return;
630 addLocEquiv(env, loc, equiv);
631 return;
633 env.state.stack[env.state.stack.size() - idx - 1].equivLoc = loc;
634 return;
636 idx++;
640 void killThisLocToKill(ISS& env, LocalId l) {
641 if (l != NoLocalId ?
642 env.state.thisLocToKill == l : env.state.thisLocToKill != NoLocalId) {
643 FTRACE(2, "Killing thisLocToKill: {}\n", env.state.thisLocToKill);
644 env.state.thisLocToKill = NoLocalId;
648 // Kill all equivalencies involving the given local to stack values
649 void killStkEquiv(ISS& env, LocalId l) {
650 for (auto& e : env.state.stack) {
651 if (e.equivLoc == l) e.equivLoc = NoLocalId;
655 void killAllStkEquiv(ISS& env) {
656 for (auto& e : env.state.stack) {
657 if (e.equivLoc != StackDupId) e.equivLoc = NoLocalId;
661 void killIterEquivs(ISS& env, LocalId l, LocalId key = NoLocalId) {
662 for (auto& i : env.state.iters) {
663 match<void>(
665 [] (DeadIter) {},
666 [&] (LiveIter& iter) {
667 if (iter.keyLocal == l) iter.keyLocal = NoLocalId;
668 if (iter.baseLocal == l) {
669 if (key == NoLocalId || key != iter.keyLocal) {
670 iter.baseLocal = NoLocalId;
678 void killAllIterEquivs(ISS& env) {
679 for (auto& i : env.state.iters) {
680 match<void>(
682 [] (DeadIter) {},
683 [] (LiveIter& iter) {
684 iter.baseLocal = NoLocalId;
685 iter.keyLocal = NoLocalId;
691 void setIterKey(ISS& env, IterId id, LocalId key) {
692 match<void>(
693 env.state.iters[id],
694 [] (DeadIter) {},
695 [&] (LiveIter& iter) { iter.keyLocal = key; }
699 Type peekLocRaw(ISS& env, LocalId l) {
700 auto ret = env.state.locals[l];
701 if (is_volatile_local(env.ctx.func, l)) {
702 always_assert_flog(ret == TGen, "volatile local was not TGen");
704 return ret;
707 Type peekLocation(ISS& env, LocalId l, uint32_t idx = 0) {
708 return l == StackDupId ? topT(env, idx) : peekLocRaw(env, l);
711 Type locRaw(ISS& env, LocalId l) {
712 mayReadLocal(env, l);
713 return peekLocRaw(env, l);
716 void setLocRaw(ISS& env, LocalId l, Type t) {
717 mayReadLocal(env, l);
718 killLocEquiv(env, l);
719 killStkEquiv(env, l);
720 killIterEquivs(env, l);
721 killThisLocToKill(env, l);
722 if (is_volatile_local(env.ctx.func, l)) {
723 auto current = env.state.locals[l];
724 always_assert_flog(current == TGen, "volatile local was not TGen");
725 return;
727 modifyLocalStatic(env, l, t);
728 env.state.locals[l] = std::move(t);
731 folly::Optional<Type> staticLocType(ISS& env, LocalId l, const Type& super) {
732 mayReadLocal(env, l);
733 if (env.state.localStaticBindings.size() > l &&
734 env.state.localStaticBindings[l] == LocalStaticBinding::Bound) {
735 assert(env.collect.localStaticTypes.size() > l);
736 auto t = env.collect.localStaticTypes[l];
737 if (t.subtypeOf(super)) {
738 useLocalStatic(env, l);
739 if (t.subtypeOf(TBottom)) t = TInitNull;
740 return std::move(t);
743 return folly::none;
746 // Read a local type in the sense of CGetL. (TUninits turn into
747 // TInitNull, and potentially reffy types return the "inner" type,
748 // which is always a subtype of InitCell.)
749 Type locAsCell(ISS& env, LocalId l) {
750 if (auto s = staticLocType(env, l, TInitCell)) {
751 return std::move(*s);
753 auto t = locRaw(env, l);
754 return !t.subtypeOf(TCell) ? TInitCell :
755 t.subtypeOf(TUninit) ? TInitNull :
756 remove_uninit(std::move(t));
759 // Read a local type, dereferencing refs, but without converting
760 // potential TUninits to TInitNull.
761 Type derefLoc(ISS& env, LocalId l) {
762 if (auto s = staticLocType(env, l, TCell)) {
763 return std::move(*s);
765 auto v = locRaw(env, l);
766 if (v.subtypeOf(TCell)) return v;
767 return v.couldBe(TUninit) ? TCell : TInitCell;
770 bool locCouldBeUninit(ISS& env, LocalId l) {
771 return locRaw(env, l).couldBe(TUninit);
774 bool locCouldBeRef(ISS& env, LocalId l) {
775 return locRaw(env, l).couldBe(TRef);
779 * Update the known type of a local, based on assertions
780 * (VerifyParamType; or IsType/JmpCC), rather than an actual
781 * modification to the local.
783 void refineLocHelper(ISS& env, LocalId l, Type t) {
784 auto v = peekLocRaw(env, l);
785 if (v.subtypeOf(TCell)) env.state.locals[l] = std::move(t);
788 template<typename F>
789 void refineLocation(ISS& env, LocalId l, F fun) {
790 auto refine = [&] (Type t) {
791 always_assert(t.subtypeOf(TCell));
792 auto r1 = fun(t);
793 auto r2 = intersection_of(r1, t);
794 // In unusual edge cases (mainly intersection of two unrelated
795 // interfaces) the intersection may not be a subtype of its inputs.
796 // In that case, always choose fun's type.
797 if (r2.subtypeOf(r1)) return r2;
798 return r1;
800 if (l == StackDupId) {
801 auto stk = &env.state.stack.back();
802 while (true) {
803 stk->type = refine(std::move(stk->type));
804 if (stk->equivLoc != StackDupId) break;
805 assertx(stk != &env.state.stack.front());
806 --stk;
808 l = stk->equivLoc;
810 if (l == NoLocalId) return;
811 auto equiv = findLocEquiv(env, l);
812 if (equiv != NoLocalId) {
813 do {
814 refineLocHelper(env, equiv, refine(peekLocRaw(env, equiv)));
815 equiv = findLocEquiv(env, equiv);
816 } while (equiv != l);
818 refineLocHelper(env, l, refine(peekLocRaw(env, l)));
821 template<typename PreFun, typename PostFun>
822 void refineLocation(ISS& env, LocalId l,
823 PreFun pre, BlockId target, PostFun post) {
824 auto state = env.state;
825 refineLocation(env, l, pre);
826 env.propagate(target, &env.state);
827 env.state = std::move(state);
828 refineLocation(env, l, post);
832 * Set a local type in the sense of tvSet. If the local is boxed or
833 * not known to be not boxed, we can't change the type. May be used
834 * to set locals to types that include Uninit.
836 void setLoc(ISS& env, LocalId l, Type t, LocalId key = NoLocalId) {
837 killLocEquiv(env, l);
838 killStkEquiv(env, l);
839 killIterEquivs(env, l, key);
840 killThisLocToKill(env, l);
841 modifyLocalStatic(env, l, t);
842 mayReadLocal(env, l);
843 refineLocHelper(env, l, std::move(t));
846 LocalId findLocal(ISS& env, SString name) {
847 for (auto& l : env.ctx.func->locals) {
848 if (l.name->same(name)) {
849 mayReadLocal(env, l.id);
850 return l.id;
853 return NoLocalId;
856 // Force non-ref locals to TCell. Used when something modifies an
857 // unknown local's value, without changing reffiness.
858 void loseNonRefLocalTypes(ISS& env) {
859 readUnknownLocals(env);
860 FTRACE(2, " loseNonRefLocalTypes\n");
861 for (auto& l : env.state.locals) {
862 if (l.subtypeOf(TCell)) l = TCell;
864 killAllLocEquiv(env);
865 killAllStkEquiv(env);
866 killAllIterEquivs(env);
867 killThisLocToKill(env, NoLocalId);
868 modifyLocalStatic(env, NoLocalId, TCell);
871 void boxUnknownLocal(ISS& env) {
872 readUnknownLocals(env);
873 FTRACE(2, " boxUnknownLocal\n");
874 for (auto& l : env.state.locals) {
875 if (!l.subtypeOf(TRef)) l = TGen;
877 killAllLocEquiv(env);
878 killAllStkEquiv(env);
879 killAllIterEquivs(env);
880 killThisLocToKill(env, NoLocalId);
881 // Don't update the local statics here; this is called both for
882 // boxing and binding, and the effects on local statics are
883 // different.
886 void unsetUnknownLocal(ISS& env) {
887 readUnknownLocals(env);
888 FTRACE(2, " unsetUnknownLocal\n");
889 for (auto& l : env.state.locals) l |= TUninit;
890 killAllLocEquiv(env);
891 killAllStkEquiv(env);
892 killAllIterEquivs(env);
893 killThisLocToKill(env, NoLocalId);
894 unbindLocalStatic(env, NoLocalId);
897 void killLocals(ISS& env) {
898 FTRACE(2, " killLocals\n");
899 readUnknownLocals(env);
900 modifyLocalStatic(env, NoLocalId, TGen);
901 for (auto& l : env.state.locals) l = TGen;
902 killAllLocEquiv(env);
903 killAllStkEquiv(env);
904 killAllIterEquivs(env);
905 killThisLocToKill(env, NoLocalId);
908 //////////////////////////////////////////////////////////////////////
909 // Special functions
911 void specialFunctionEffects(ISS& env, const res::Func& func) {
912 if (func.name()->isame(s_set_frame_metadata.get())) {
914 * HH\set_frame_metadata can write to the local named 86metadata,
915 * but doesn't require a VV.
917 auto const l = findLocal(env, s_86metadata.get());
918 if (l != NoLocalId) setLoc(env, l, TInitCell);
919 return;
922 if (func.name()->isame(s_assert.get())) {
924 * Assert is somewhat special. In the most general case, it can read and
925 * write to the caller's frame (and is marked as such). The first parameter,
926 * if a string, will be evaled and can have arbitrary effects. Luckily this
927 * is forbidden in RepoAuthoritative mode, so we can ignore that here. If
928 * the assert fails, it may execute an arbitrary pre-registered callback
929 * which still might try to write to the assert caller's frame. This can't
930 * happen if calling such frame accessing functions dynamically is
931 * forbidden.
933 if (RuntimeOption::DisallowDynamicVarEnvFuncs == HackStrictOption::ON) {
934 return;
939 * Skip-frame functions won't write or read to the caller's frame, but they
940 * might dynamically call a function which can. So, skip-frame functions kill
941 * our locals unless they can't call such functions.
943 if (func.mightWriteCallerFrame() ||
944 (RuntimeOption::DisallowDynamicVarEnvFuncs != HackStrictOption::ON &&
945 func.mightBeSkipFrame())) {
946 readUnknownLocals(env);
947 killLocals(env);
948 mayUseVV(env);
949 return;
952 if (func.mightReadCallerFrame()) {
953 if (func.name()->isame(s_func_num_args.get())) return;
954 if (func.name()->isame(s_func_get_args.get()) ||
955 func.name()->isame(s_func_get_arg.get()) ||
956 func.name()->isame(s_func_slice_args.get())) {
957 readUnknownParams(env);
958 } else {
959 readUnknownLocals(env);
961 mayUseVV(env);
962 return;
966 void specialFunctionEffects(ISS& env, ActRec ar) {
967 switch (ar.kind) {
968 case FPIKind::Unknown:
969 // fallthrough
970 case FPIKind::Func:
971 if (!ar.func) {
972 if (RuntimeOption::DisallowDynamicVarEnvFuncs != HackStrictOption::ON) {
973 readUnknownLocals(env);
974 killLocals(env);
975 mayUseVV(env);
977 return;
979 case FPIKind::Builtin:
980 specialFunctionEffects(env, *ar.func);
981 if (ar.fallbackFunc) specialFunctionEffects(env, *ar.fallbackFunc);
982 break;
983 case FPIKind::Ctor:
984 case FPIKind::ObjMeth:
985 case FPIKind::ObjMethNS:
986 case FPIKind::ClsMeth:
987 case FPIKind::ObjInvoke:
988 case FPIKind::CallableArr:
990 * Methods cannot read or write to the caller's frame, but they can be
991 * skip-frame (if they're a builtin). So, its possible they'll dynamically
992 * call a function which reads or writes to the caller's frame. If we don't
993 * forbid this, we have to be pessimistic. Imagine something like
994 * Vector::map calling assert.
996 if (RuntimeOption::DisallowDynamicVarEnvFuncs != HackStrictOption::ON &&
997 (!ar.func || ar.func->mightBeSkipFrame())) {
998 readUnknownLocals(env);
999 killLocals(env);
1000 mayUseVV(env);
1002 break;
1006 //////////////////////////////////////////////////////////////////////
1007 // class-ref slots
1009 // Read the specified class-ref slot without discarding the stored value.
1010 const Type& peekClsRefSlot(ISS& env, ClsRefSlotId slot) {
1011 assert(slot != NoClsRefSlotId);
1012 always_assert_flog(env.state.clsRefSlots[slot].subtypeOf(TCls),
1013 "class-ref slot contained non-TCls");
1014 return env.state.clsRefSlots[slot];
1017 // Read the specified class-ref slot and discard the stored value.
1018 Type takeClsRefSlot(ISS& env, ClsRefSlotId slot) {
1019 assert(slot != NoClsRefSlotId);
1020 auto ret = std::move(env.state.clsRefSlots[slot]);
1021 FTRACE(2, " read class-ref: {} -> {}\n", slot, show(ret));
1022 always_assert_flog(ret.subtypeOf(TCls), "class-ref slot contained non-TCls");
1023 env.state.clsRefSlots[slot] = TCls;
1024 return ret;
1027 void putClsRefSlot(ISS& env, ClsRefSlotId slot, Type ty) {
1028 assert(slot != NoClsRefSlotId);
1029 always_assert_flog(ty.subtypeOf(TCls),
1030 "attempted to set class-ref slot to non-TCls");
1031 FTRACE(2, " write class-ref: {} -> {}\n", slot, show(ty));
1032 env.state.clsRefSlots[slot] = std::move(ty);
1035 //////////////////////////////////////////////////////////////////////
1036 // iterators
1038 void setIter(ISS& env, IterId iter, Iter iterState) {
1039 env.state.iters[iter] = std::move(iterState);
1041 void freeIter(ISS& env, IterId iter) {
1042 env.state.iters[iter] = DeadIter {};
1045 bool iterIsDead(ISS& env, IterId iter) {
1046 return match<bool>(
1047 env.state.iters[iter],
1048 [] (DeadIter) { return true; },
1049 [] (const LiveIter&) { return false; }
1053 //////////////////////////////////////////////////////////////////////
1054 // $this
1056 void setThisAvailable(ISS& env) {
1057 FTRACE(2, " setThisAvailable\n");
1058 env.state.thisAvailable = true;
1061 bool thisAvailable(ISS& env) { return env.state.thisAvailable; }
1063 // Returns the type $this would have if it's not null. Generally
1064 // you have to check thisIsAvailable() before assuming it can't be
1065 // null.
1066 folly::Optional<Type> thisTypeHelper(const Index& index, Context ctx) {
1067 // Due to `bindTo`, we can't conclude the type of $this.
1068 if (RuntimeOption::EvalAllowScopeBinding && ctx.func->isClosureBody) {
1069 return folly::none;
1072 if (auto rcls = index.selfCls(ctx)) return setctx(subObj(*rcls));
1073 return folly::none;
1076 folly::Optional<Type> thisType(ISS& env) {
1077 return thisTypeHelper(env.index, env.ctx);
1080 folly::Optional<Type> selfCls(ISS& env) {
1081 if (auto rcls = env.index.selfCls(env.ctx)) return subCls(*rcls);
1082 return folly::none;
1085 folly::Optional<Type> selfClsExact(ISS& env) {
1086 if (auto rcls = env.index.selfCls(env.ctx)) return clsExact(*rcls);
1087 return folly::none;
1090 folly::Optional<Type> parentCls(ISS& env) {
1091 if (auto rcls = env.index.parentCls(env.ctx)) return subCls(*rcls);
1092 return folly::none;
1095 folly::Optional<Type> parentClsExact(ISS& env) {
1096 if (auto rcls = env.index.parentCls(env.ctx)) return clsExact(*rcls);
1097 return folly::none;
1100 //////////////////////////////////////////////////////////////////////
1101 // properties on $this
1104 * Note: we are only tracking control-flow insensitive types for
1105 * object properties, because it can be pretty rough to try to track
1106 * all cases that could re-enter the VM, run arbitrary code, and
1107 * potentially change the type of a property.
1109 * Because of this, the various "setter" functions for thisProps
1110 * here actually just union the new type into what we already had.
1113 Type* thisPropRaw(ISS& env, SString name) {
1114 auto& privateProperties = env.collect.props.privateProperties();
1115 auto const it = privateProperties.find(name);
1116 if (it != end(privateProperties)) {
1117 return &it->second;
1119 return nullptr;
1122 bool isTrackedThisProp(ISS& env, SString name) {
1123 return thisPropRaw(env, name);
1126 bool isNonSerializedThisProp(ISS& env, SString name) {
1127 return env.collect.props.isNonSerialized(name);
1130 void killThisProps(ISS& env) {
1131 FTRACE(2, " killThisProps\n");
1132 for (auto& kv : env.collect.props.privateProperties()) {
1133 kv.second = TGen;
1138 * This function returns a type that includes all the possible types
1139 * that could result from reading a property $this->name.
1141 * Note that this may include types that the property itself cannot
1142 * actually contain, due to the effects of a possible __get function.
1144 folly::Optional<Type> thisPropAsCell(ISS& env, SString name) {
1145 auto const t = thisPropRaw(env, name);
1146 if (!t) return folly::none;
1147 if (t->couldBe(TUninit)) {
1148 auto const rthis = thisType(env);
1149 if (!rthis || dobj_of(*rthis).cls.couldHaveMagicGet()) {
1150 return TInitCell;
1153 return !t->subtypeOf(TCell) ? TInitCell :
1154 t->subtypeOf(TUninit) ? TInitNull :
1155 remove_uninit(*t);
1159 * Merge a type into the tracked property types on $this, in the sense
1160 * of tvSet (i.e. setting the inner type on possible refs).
1162 * Note that all types we see that could go into an object property have to
1163 * loosen_all. This is because the object could be serialized and then
1164 * deserialized, losing the static-ness of a string or array member, and we
1165 * don't guarantee deserialization would preserve a constant value object
1166 * property type.
1168 void mergeThisProp(ISS& env, SString name, Type type) {
1169 auto const t = thisPropRaw(env, name);
1170 if (!t) return;
1171 *t |= (isNonSerializedThisProp(env, name) ? type : loosen_all(type));
1175 * Merge something into each this prop. Usually MapFn will be a
1176 * predicate that returns TBottom when some condition doesn't hold.
1178 * The types given to the map function are the raw tracked types
1179 * (i.e. could be TRef or TUninit).
1181 template<class MapFn>
1182 void mergeEachThisPropRaw(ISS& env, MapFn fn) {
1183 for (auto& kv : env.collect.props.privateProperties()) {
1184 mergeThisProp(env, kv.first, fn(kv.second));
1188 void unsetThisProp(ISS& env, SString name) {
1189 mergeThisProp(env, name, TUninit);
1192 void unsetUnknownThisProp(ISS& env) {
1193 for (auto& kv : env.collect.props.privateProperties()) {
1194 mergeThisProp(env, kv.first, TUninit);
1198 void boxThisProp(ISS& env, SString name) {
1199 auto const t = thisPropRaw(env, name);
1200 if (!t) return;
1201 *t |= TRef;
1205 * Forces non-ref property types up to TCell. This is used when an
1206 * operation affects an unknown property on $this, but can't change
1207 * its reffiness. This could only do TInitCell, but we're just
1208 * going to gradually get rid of the callsites of this.
1210 void loseNonRefThisPropTypes(ISS& env) {
1211 FTRACE(2, " loseNonRefThisPropTypes\n");
1212 for (auto& kv : env.collect.props.privateProperties()) {
1213 if (kv.second.subtypeOf(TCell)) kv.second = TCell;
1217 //////////////////////////////////////////////////////////////////////
1218 // properties on self::
1220 // Similar to $this properties above, we only track control-flow
1221 // insensitive types for these.
1223 Type* selfPropRaw(ISS& env, SString name) {
1224 auto& privateStatics = env.collect.props.privateStatics();
1225 auto it = privateStatics.find(name);
1226 if (it != end(privateStatics)) {
1227 return &it->second;
1229 return nullptr;
1232 void killSelfProps(ISS& env) {
1233 FTRACE(2, " killSelfProps\n");
1234 for (auto& kv : env.collect.props.privateStatics()) {
1235 kv.second = TGen;
1239 void killSelfProp(ISS& env, SString name) {
1240 FTRACE(2, " killSelfProp {}\n", name->data());
1241 if (auto t = selfPropRaw(env, name)) *t = TGen;
1244 // TODO(#3684136): self::$foo can't actually ever be uninit. Right
1245 // now uninits may find their way into here though.
1246 folly::Optional<Type> selfPropAsCell(ISS& env, SString name) {
1247 auto const t = selfPropRaw(env, name);
1248 if (!t) return folly::none;
1249 return !t->subtypeOf(TCell) ? TInitCell :
1250 t->subtypeOf(TUninit) ? TInitNull :
1251 remove_uninit(*t);
1255 * Merges a type into tracked static properties on self, in the
1256 * sense of tvSet (i.e. setting the inner type on possible refs).
1258 void mergeSelfProp(ISS& env, SString name, Type type) {
1259 auto const t = selfPropRaw(env, name);
1260 if (!t) return;
1261 // Context types might escape to other contexts here.
1262 *t |= unctx(type);
1266 * Similar to mergeEachThisPropRaw, but for self props.
1268 template<class MapFn>
1269 void mergeEachSelfPropRaw(ISS& env, MapFn fn) {
1270 for (auto& kv : env.collect.props.privateStatics()) {
1271 mergeSelfProp(env, kv.first, fn(kv.second));
1275 void boxSelfProp(ISS& env, SString name) {
1276 mergeSelfProp(env, name, TRef);
1280 * Forces non-ref static properties up to TCell. This is used when
1281 * an operation affects an unknown static property on self::, but
1282 * can't change its reffiness.
1284 * This could only do TInitCell because static properties can never
1285 * be unset. We're just going to get rid of the callers of this
1286 * function over a few more changes, though.
1288 void loseNonRefSelfPropTypes(ISS& env) {
1289 FTRACE(2, " loseNonRefSelfPropTypes\n");
1290 for (auto& kv : env.collect.props.privateStatics()) {
1291 if (kv.second.subtypeOf(TInitCell)) kv.second = TCell;
1295 #ifdef __clang__
1296 #pragma clang diagnostic pop
1297 #endif
1300 //////////////////////////////////////////////////////////////////////
1304 #endif