Move func_num_args() to an opcode
[hiphop-php.git] / hphp / hhbbc / interp-internal.h
blobc35734c04cff479bc8c7b768b65e2311ee8927b7
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #ifndef incl_HPHP_INTERP_INTERNAL_H_
17 #define incl_HPHP_INTERP_INTERNAL_H_
19 #include <algorithm>
21 #include <folly/Optional.h>
23 #include "hphp/runtime/base/type-string.h"
25 #include "hphp/hhbbc/class-util.h"
26 #include "hphp/hhbbc/context.h"
27 #include "hphp/hhbbc/func-util.h"
28 #include "hphp/hhbbc/interp-state.h"
29 #include "hphp/hhbbc/interp.h"
30 #include "hphp/hhbbc/representation.h"
31 #include "hphp/hhbbc/type-system.h"
33 namespace HPHP { namespace HHBBC {
35 struct LocalRange;
37 //////////////////////////////////////////////////////////////////////
39 TRACE_SET_MOD(hhbbc);
41 const StaticString s_func_get_args("func_get_args");
42 const StaticString s_func_get_arg("func_get_arg");
43 const StaticString s_func_slice_args("__SystemLib\\func_slice_args");
45 //////////////////////////////////////////////////////////////////////
48 * Interpreter Step State.
50 * This struct gives interpreter functions access to shared state. It's not in
51 * interp-state.h because it's part of the internal implementation of
52 * interpreter routines. The publicized state as results of interpretation are
53 * in that header and interp.h.
55 struct ISS {
56 explicit ISS(Interp& bag,
57 StepFlags& flags,
58 PropagateFn propagate)
59 : index(bag.index)
60 , ctx(bag.ctx)
61 , collect(bag.collect)
62 , bid(bag.bid)
63 , blk(*bag.blk)
64 , state(bag.state)
65 , flags(flags)
66 , propagate(propagate)
69 const Index& index;
70 const Context ctx;
71 CollectedInfo& collect;
72 const BlockId bid;
73 const php::Block& blk;
74 State& state;
75 StepFlags& flags;
76 PropagateFn propagate;
77 bool recordUsedParams{true};
80 void impl_vec(ISS& env, bool reduce, BytecodeVec&& bcs);
82 //////////////////////////////////////////////////////////////////////
84 namespace interp_step {
87 * An interp_step::in(ISS&, const bc::op&) function exists for every
88 * bytecode. Most are defined in interp.cpp, but some (like FCallBuiltin and
89 * member instructions) are defined elsewhere.
91 #define O(opcode, ...) void in(ISS&, const bc::opcode&);
92 OPCODES
93 #undef O
98 * Find a contiguous local range which is equivalent to the given range and has
99 * a smaller starting id. Only returns the equivalent first local because the
100 * size doesn't change.
102 LocalId equivLocalRange(ISS& env, const LocalRange& range);
104 namespace {
106 Type peekLocRaw(ISS& env, LocalId l);
108 #ifdef __clang__
109 #pragma clang diagnostic push
110 #pragma clang diagnostic ignored "-Wunused-function"
111 #endif
114 * impl(...)
116 * Utility for chaining one bytecode implementation to a series of a few
117 * others. Use reduce() if you also want to enable strength reduction
118 * (i.e. the bytecode can be replaced by some other bytecode as an
119 * optimization).
121 * The chained-to bytecodes should not take branches. For impl, the
122 * canConstProp flag will only be set if it was set for all the
123 * bytecodes.
126 template<class... Ts>
127 void impl(ISS& env, Ts&&... ts) {
128 impl_vec(env, false, { std::forward<Ts>(ts)... });
132 * Reduce means that (given some situation in the execution state),
133 * a given bytecode could be replaced by some other bytecode
134 * sequence. Ensure that if you call reduce(), it is before any
135 * state-affecting operations (like popC()).
137 * If env.collect.propagate_constants is set, the reduced bytecodes
138 * will have been constant-propagated, and the canConstProp flag will
139 * be clear; otherwise canConstProp will be set as for impl.
141 void reduce(ISS& env, BytecodeVec&& bcs) {
142 impl_vec(env, true, std::move(bcs));
145 template<class... Bytecodes>
146 void reduce(ISS& env, Bytecodes&&... hhbc) {
147 reduce(env, { std::forward<Bytecodes>(hhbc)... });
150 void nothrow(ISS& env) {
151 FTRACE(2, " nothrow\n");
152 env.flags.wasPEI = false;
155 void unreachable(ISS& env) {
156 FTRACE(2, " unreachable\n");
157 env.state.unreachable = true;
160 void constprop(ISS& env) {
161 FTRACE(2, " constprop\n");
162 env.flags.canConstProp = true;
165 void effect_free(ISS& env) {
166 FTRACE(2, " effect_free\n");
167 nothrow(env);
168 env.flags.effectFree = true;
172 * Mark the current block as unconditionally jumping to target. The
173 * caller must arrange for env.state to reflect the state that needs
174 * to be propagated to the target, but it should not propagate that
175 * state.
177 void jmp_setdest(ISS& env, BlockId target) {
178 env.flags.jmpDest = target;
180 void jmp_nevertaken(ISS& env) {
181 jmp_setdest(env, env.blk.fallthrough);
184 struct IgnoreUsedParams {
185 explicit IgnoreUsedParams(ISS& env) :
186 env{env}, record{env.recordUsedParams} {
187 env.recordUsedParams = false;
190 ~IgnoreUsedParams() {
191 env.recordUsedParams = record;
194 ISS& env;
195 const bool record;
198 void readUnknownParams(ISS& env) {
199 for (LocalId p = 0; p < env.ctx.func->params.size(); p++) {
200 if (p == env.flags.mayReadLocalSet.size()) break;
201 env.flags.mayReadLocalSet.set(p);
203 if (env.recordUsedParams) env.collect.usedParams.set();
206 void readUnknownLocals(ISS& env) {
207 env.flags.mayReadLocalSet.set();
208 if (env.recordUsedParams) env.collect.usedParams.set();
211 void readAllLocals(ISS& env) {
212 env.flags.mayReadLocalSet.set();
213 if (env.recordUsedParams) env.collect.usedParams.set();
216 void modifyLocalStatic(ISS& env, LocalId id, const Type& t) {
217 auto modifyOne = [&] (LocalId lid) {
218 if (is_volatile_local(env.ctx.func, lid)) return;
219 if (env.state.localStaticBindings.size() <= lid) return;
220 if (env.state.localStaticBindings[lid] == LocalStaticBinding::None) return;
221 if (t.subtypeOf(BUninit) && !t.subtypeOf(BBottom)) {
222 // Uninit means we are unbinding.
223 env.state.localStaticBindings[lid] = id == NoLocalId ?
224 LocalStaticBinding::None : LocalStaticBinding::Maybe;
225 return;
227 if (lid >= env.collect.localStaticTypes.size()) {
228 env.collect.localStaticTypes.resize(lid + 1, TBottom);
230 env.collect.localStaticTypes[lid] = t.subtypeOf(BCell) ?
231 union_of(std::move(env.collect.localStaticTypes[lid]), t) :
232 TGen;
234 if (id != NoLocalId) {
235 return modifyOne(id);
237 for (LocalId i = 0; i < env.state.localStaticBindings.size(); i++) {
238 modifyOne(i);
242 void maybeBindLocalStatic(ISS& env, LocalId id) {
243 if (is_volatile_local(env.ctx.func, id)) return;
244 if (env.state.localStaticBindings.size() <= id) return;
245 if (env.state.localStaticBindings[id] != LocalStaticBinding::None) return;
246 env.state.localStaticBindings[id] = LocalStaticBinding::Maybe;
247 return;
250 void unbindLocalStatic(ISS& env, LocalId id) {
251 modifyLocalStatic(env, id, TUninit);
254 void bindLocalStatic(ISS& env, LocalId id, const Type& t) {
255 if (is_volatile_local(env.ctx.func, id)) return;
256 if (env.state.localStaticBindings.size() <= id) {
257 env.state.localStaticBindings.resize(id + 1);
259 env.state.localStaticBindings[id] = LocalStaticBinding::Bound;
260 modifyLocalStatic(env, id, t);
263 void doRet(ISS& env, Type t, bool hasEffects) {
264 IgnoreUsedParams _{env};
266 readAllLocals(env);
267 assert(env.state.stack.empty());
268 env.flags.retParam = NoLocalId;
269 env.flags.returned = t;
270 if (!hasEffects) {
271 effect_free(env);
275 void mayUseVV(ISS& env) {
276 env.collect.mayUseVV = true;
279 void hasInvariantIterBase(ISS& env) {
280 env.collect.hasInvariantIterBase = true;
283 //////////////////////////////////////////////////////////////////////
284 // eval stack
286 Type popT(ISS& env) {
287 assert(!env.state.stack.empty());
288 auto const ret = std::move(env.state.stack.back().type);
289 FTRACE(2, " pop: {}\n", show(ret));
290 assert(ret.subtypeOf(BGen));
291 env.state.stack.pop_back();
292 return ret;
295 Type popC(ISS& env) {
296 auto const v = popT(env);
297 assert(v.subtypeOf(BInitCell));
298 return v;
301 Type popV(ISS& env) {
302 auto const v = popT(env);
303 assert(v.subtypeOf(BRef));
304 return v;
307 Type popU(ISS& env) {
308 auto const v = popT(env);
309 assert(v.subtypeOf(BUninit));
310 return v;
313 Type popCU(ISS& env) {
314 auto const v = popT(env);
315 assert(v.subtypeOf(BCell));
316 return v;
319 Type popCV(ISS& env) { return popT(env); }
321 void discard(ISS& env, int n) {
322 for (auto i = 0; i < n; ++i) {
323 popT(env);
327 Type& topT(ISS& env, uint32_t idx = 0) {
328 assert(idx < env.state.stack.size());
329 return env.state.stack[env.state.stack.size() - idx - 1].type;
332 Type& topC(ISS& env, uint32_t i = 0) {
333 assert(topT(env, i).subtypeOf(BInitCell));
334 return topT(env, i);
337 Type& topCV(ISS& env, uint32_t i = 0) { return topT(env, i); }
339 Type& topV(ISS& env, uint32_t i = 0) {
340 assert(topT(env, i).subtypeOf(BRef));
341 return topT(env, i);
344 void push(ISS& env, Type t) {
345 FTRACE(2, " push: {}\n", show(t));
346 env.state.stack.push_back(StackElem {std::move(t), NoLocalId});
349 void push(ISS& env, Type t, LocalId l) {
350 if (l == NoLocalId) return push(env, t);
351 if (l <= MaxLocalId) {
352 if (peekLocRaw(env, l).couldBe(BRef)) {
353 return push(env, t);
355 assertx(!is_volatile_local(env.ctx.func, l)); // volatiles are TGen
357 FTRACE(2, " push: {} (={})\n", show(t), local_string(*env.ctx.func, l));
358 env.state.stack.push_back(StackElem {std::move(t), l});
361 //////////////////////////////////////////////////////////////////////
362 // $this
364 void setThisAvailable(ISS& env) {
365 FTRACE(2, " setThisAvailable\n");
366 if (env.ctx.cls ?
367 is_unused_trait(*env.ctx.cls) || (env.ctx.func->attrs & AttrStatic) :
368 !is_pseudomain(env.ctx.func)) {
369 return unreachable(env);
371 if (!env.state.thisType.couldBe(BObj) ||
372 !env.state.thisType.subtypeOf(BOptObj)) {
373 return unreachable(env);
375 if (is_opt(env.state.thisType)) {
376 env.state.thisType = unopt(env.state.thisType);
380 bool thisAvailable(ISS& env) {
381 assertx(!env.state.thisType.subtypeOf(BBottom));
382 return env.state.thisType.subtypeOf(BObj);
385 // Returns the type $this would have if it's not null. Generally
386 // you have to check thisAvailable() before assuming it can't be
387 // null.
388 folly::Optional<Type> thisTypeFromContext(const Index& index, Context ctx) {
389 // Due to `bindTo`, we can't conclude the type of $this.
390 if (RuntimeOption::EvalAllowScopeBinding && ctx.func->isClosureBody) {
391 return folly::none;
394 if (auto rcls = index.selfCls(ctx)) return setctx(subObj(*rcls));
395 return folly::none;
398 folly::Optional<Type> thisType(ISS& env) {
399 if (!is_specialized_obj(env.state.thisType)) return folly::none;
400 return is_opt(env.state.thisType) ?
401 unopt(env.state.thisType) : env.state.thisType;
404 folly::Optional<Type> selfCls(ISS& env) {
405 if (auto rcls = env.index.selfCls(env.ctx)) return subCls(*rcls);
406 return folly::none;
409 folly::Optional<Type> selfClsExact(ISS& env) {
410 if (auto rcls = env.index.selfCls(env.ctx)) return clsExact(*rcls);
411 return folly::none;
414 folly::Optional<Type> parentCls(ISS& env) {
415 if (auto rcls = env.index.parentCls(env.ctx)) return subCls(*rcls);
416 return folly::none;
419 folly::Optional<Type> parentClsExact(ISS& env) {
420 if (auto rcls = env.index.parentCls(env.ctx)) return clsExact(*rcls);
421 return folly::none;
424 //////////////////////////////////////////////////////////////////////
425 // fpi
428 * Push an ActRec.
430 * nArgs should either be the number of parameters that will be passed
431 * to the call, or -1 for unknown. We only need the number of args
432 * when we know the exact function being called, in order to determine
433 * eligibility for folding.
435 * returns the foldable flag as a convenience.
437 bool fpiPush(ISS& env, ActRec ar, int32_t nArgs, bool maybeDynamic) {
438 auto foldable = [&] {
439 if (!options.ConstantFoldBuiltins) return false;
440 if (any(env.collect.opts & CollectionOpts::Speculating)) return false;
441 if (!env.collect.propagate_constants &&
442 any(env.collect.opts & CollectionOpts::Optimizing)) {
443 // we're in the optimization phase, but we're not folding constants
444 return false;
446 if (nArgs < 0 ||
447 ar.kind == FPIKind::Ctor ||
448 ar.kind == FPIKind::Builtin ||
449 !ar.func || ar.fallbackFunc) {
450 return false;
452 if (maybeDynamic && ar.func->mightCareAboutDynCalls()) return false;
453 // Reified functions may have a mismatch of arity or reified generics
454 // so we cannot fold them
455 // TODO(T31677864): Detect the arity mismatch at HHBBC and enable them to
456 // be foldable
457 if (ar.func->couldHaveReifiedGenerics()) return false;
458 auto const func = ar.func->exactFunc();
459 if (!func) return false;
460 if (func->attrs & AttrTakesInOutParams) return false;
461 if (env.collect.unfoldableFuncs.count(std::make_pair(func, env.bid))) {
462 return false;
464 // Foldable builtins are always worth trying
465 if (ar.func->isFoldable()) return true;
466 // Any native functions at this point are known to be
467 // non-foldable, but other builtins might be, even if they
468 // don't have the __Foldable attribute.
469 if (func->nativeInfo) return false;
471 // Don't try to fold functions which aren't guaranteed to be accessible at
472 // this call site.
473 if (func->attrs & AttrPrivate) {
474 if (env.ctx.cls != func->cls) return false;
475 } else if (func->attrs & AttrProtected) {
476 if (!env.ctx.cls) return false;
477 if (!env.index.must_be_derived_from(env.ctx.cls, func->cls) &&
478 !env.index.must_be_derived_from(func->cls, env.ctx.cls)) return false;
481 if (func->params.size()) {
482 // Not worth trying if we're going to warn due to missing args
483 return check_nargs_in_range(func, nArgs);
486 auto has_better_this = [&] {
487 if (!is_specialized_obj(env.state.thisType)) return false;
488 auto const dobj = dobj_of(env.state.thisType);
489 return dobj.type == DObj::Exact || dobj.cls.cls() != func->cls;
492 if (!(func->attrs & AttrStatic) && func->cls) {
493 return thisAvailable(env) &&
494 (has_better_this() ||
495 is_scalar(env.index.lookup_return_type_raw(func)));
498 // The function has no args. Just check that it's effect free
499 // and returns a literal.
500 return env.index.is_effect_free(*ar.func) &&
501 is_scalar(env.index.lookup_return_type_raw(func));
502 }();
503 if (foldable) effect_free(env);
504 ar.foldable = foldable;
505 ar.pushBlk = env.bid;
507 FTRACE(2, " fpi+: {}\n", show(ar));
508 env.state.fpiStack.push_back(std::move(ar));
509 return foldable;
512 void fpiPushNoFold(ISS& env, ActRec ar) {
513 ar.foldable = false;
514 ar.pushBlk = env.bid;
516 FTRACE(2, " fpi+: {}\n", show(ar));
517 env.state.fpiStack.push_back(std::move(ar));
520 ActRec fpiPop(ISS& env) {
521 assert(!env.state.fpiStack.empty());
522 auto const ret = env.state.fpiStack.back();
523 FTRACE(2, " fpi-: {}\n", show(ret));
524 env.state.fpiStack.pop_back();
525 return ret;
528 ActRec& fpiTop(ISS& env) {
529 assert(!env.state.fpiStack.empty());
530 return env.state.fpiStack.back();
533 void fpiNotFoldable(ISS& env) {
534 // By the time we're optimizing, we should know up front which funcs
535 // are foldable (the analyze phase iterates to convergence, the
536 // optimize phase does not - so its too late to fix now).
537 assertx(!any(env.collect.opts & CollectionOpts::Optimizing));
539 auto& ar = fpiTop(env);
540 assertx(ar.func && ar.foldable);
541 auto const func = ar.func->exactFunc();
542 assertx(func);
543 env.collect.unfoldableFuncs.emplace(func, ar.pushBlk);
544 env.propagate(ar.pushBlk, nullptr);
545 ar.foldable = false;
546 // we're going to reprocess the whole fpi region; any results we've
547 // got so far are bogus, so stop prevent further useless work by
548 // marking the next bytecode unreachable
549 unreachable(env);
550 FTRACE(2, " fpi: not foldable\n");
553 //////////////////////////////////////////////////////////////////////
554 // locals
556 void useLocalStatic(ISS& env, LocalId l) {
557 assert(env.collect.localStaticTypes.size() > l);
558 if (!env.flags.usedLocalStatics) {
559 env.flags.usedLocalStatics =
560 std::make_shared<hphp_fast_map<LocalId,Type>>();
562 // Ignore the return value, since we only want the first type used,
563 // as that will be the narrowest.
564 env.flags.usedLocalStatics->emplace(l, env.collect.localStaticTypes[l]);
567 void mayReadLocal(ISS& env, uint32_t id) {
568 if (id < env.flags.mayReadLocalSet.size()) {
569 env.flags.mayReadLocalSet.set(id);
571 if (env.recordUsedParams && id < env.collect.usedParams.size()) {
572 env.collect.usedParams.set(id);
576 // Find a local which is equivalent to the given local
577 LocalId findLocEquiv(ISS& env, LocalId l) {
578 if (l >= env.state.equivLocals.size()) return NoLocalId;
579 assert(env.state.equivLocals[l] == NoLocalId ||
580 !is_volatile_local(env.ctx.func, l));
581 return env.state.equivLocals[l];
584 // Determine whether two locals are equivalent
585 bool locsAreEquiv(ISS& env, LocalId l1, LocalId l2) {
586 if (l1 >= env.state.equivLocals.size() ||
587 l2 >= env.state.equivLocals.size() ||
588 env.state.equivLocals[l1] == NoLocalId ||
589 env.state.equivLocals[l2] == NoLocalId) {
590 return false;
593 auto l = l1;
594 while ((l = env.state.equivLocals[l]) != l1) {
595 if (l == l2) return true;
597 return false;
600 bool locIsThis(ISS& env, LocalId l) {
601 assertx(l <= MaxLocalId);
602 return l == env.state.thisLoc ||
603 (env.state.thisLoc <= MaxLocalId &&
604 locsAreEquiv(env, l, env.state.thisLoc));
607 void killLocEquiv(State& state, LocalId l) {
608 if (l >= state.equivLocals.size()) return;
609 if (state.equivLocals[l] == NoLocalId) return;
610 auto loc = l;
611 do {
612 loc = state.equivLocals[loc];
613 } while (state.equivLocals[loc] != l);
614 assert(loc != l);
615 if (state.equivLocals[l] == loc) {
616 state.equivLocals[loc] = NoLocalId;
617 } else {
618 state.equivLocals[loc] = state.equivLocals[l];
620 state.equivLocals[l] = NoLocalId;
623 void killLocEquiv(ISS& env, LocalId l) {
624 killLocEquiv(env.state, l);
627 void killAllLocEquiv(ISS& env) {
628 env.state.equivLocals.clear();
631 // Add from to to's equivalency set.
632 void addLocEquiv(ISS& env,
633 LocalId from,
634 LocalId to) {
635 always_assert(peekLocRaw(env, from).subtypeOf(BCell));
636 always_assert(!is_volatile_local(env.ctx.func, to));
637 always_assert(from != to && findLocEquiv(env, from) == NoLocalId);
639 auto m = std::max(to, from);
640 if (env.state.equivLocals.size() <= m) {
641 env.state.equivLocals.resize(m + 1, NoLocalId);
644 if (env.state.equivLocals[to] == NoLocalId) {
645 env.state.equivLocals[from] = to;
646 env.state.equivLocals[to] = from;
647 } else {
648 env.state.equivLocals[from] = env.state.equivLocals[to];
649 env.state.equivLocals[to] = from;
653 // Obtain a local which is equivalent to the given stack value
654 LocalId topStkLocal(const State& state, uint32_t idx = 0) {
655 assert(idx < state.stack.size());
656 auto const equiv = state.stack[state.stack.size() - idx - 1].equivLoc;
657 return equiv > MaxLocalId ? NoLocalId : equiv;
659 LocalId topStkLocal(ISS& env, uint32_t idx = 0) {
660 return topStkLocal(env.state, idx);
663 // Obtain a location which is equivalent to the given stack value
664 LocalId topStkEquiv(ISS& env, uint32_t idx = 0) {
665 assert(idx < env.state.stack.size());
666 return env.state.stack[env.state.stack.size() - idx - 1].equivLoc;
669 void setStkLocal(ISS& env, LocalId loc, uint32_t idx = 0) {
670 assertx(loc <= MaxLocalId);
671 always_assert(peekLocRaw(env, loc).subtypeOf(BCell));
672 auto const equiv = [&] {
673 while (true) {
674 auto const e = topStkEquiv(env, idx);
675 if (e != StackDupId) return e;
676 idx++;
678 }();
680 if (equiv <= MaxLocalId) {
681 if (loc == equiv || locsAreEquiv(env, loc, equiv)) return;
682 addLocEquiv(env, loc, equiv);
683 return;
685 env.state.stack[env.state.stack.size() - idx - 1].equivLoc = loc;
688 void killThisLoc(ISS& env, LocalId l) {
689 if (l != NoLocalId ?
690 env.state.thisLoc == l : env.state.thisLoc != NoLocalId) {
691 FTRACE(2, "Killing thisLoc: {}\n", env.state.thisLoc);
692 env.state.thisLoc = NoLocalId;
696 // Kill all equivalencies involving the given local to stack values
697 void killStkEquiv(ISS& env, LocalId l) {
698 for (auto& e : env.state.stack) {
699 if (e.equivLoc != l) continue;
700 e.equivLoc = findLocEquiv(env, l);
701 assertx(e.equivLoc != l);
705 void killAllStkEquiv(ISS& env) {
706 for (auto& e : env.state.stack) {
707 if (e.equivLoc <= MaxLocalId) e.equivLoc = NoLocalId;
711 void killIterEquivs(ISS& env, LocalId l, LocalId key = NoLocalId) {
712 for (auto& i : env.state.iters) {
713 match<void>(
715 [] (DeadIter) {},
716 [&] (LiveIter& iter) {
717 if (iter.keyLocal == l) iter.keyLocal = NoLocalId;
718 if (iter.baseLocal == l) {
719 if (key == NoLocalId || key != iter.keyLocal) {
720 iter.baseLocal = NoLocalId;
728 void killAllIterEquivs(ISS& env) {
729 for (auto& i : env.state.iters) {
730 match<void>(
732 [] (DeadIter) {},
733 [] (LiveIter& iter) {
734 iter.baseLocal = NoLocalId;
735 iter.keyLocal = NoLocalId;
741 void setIterKey(ISS& env, IterId id, LocalId key) {
742 match<void>(
743 env.state.iters[id],
744 [] (DeadIter) {},
745 [&] (LiveIter& iter) { iter.keyLocal = key; }
749 Type peekLocRaw(ISS& env, LocalId l) {
750 auto ret = env.state.locals[l];
751 if (is_volatile_local(env.ctx.func, l)) {
752 always_assert_flog(ret == TGen, "volatile local was not TGen");
754 return ret;
757 Type locRaw(ISS& env, LocalId l) {
758 mayReadLocal(env, l);
759 return peekLocRaw(env, l);
762 void setLocRaw(ISS& env, LocalId l, Type t) {
763 mayReadLocal(env, l);
764 killStkEquiv(env, l);
765 killLocEquiv(env, l);
766 killIterEquivs(env, l);
767 killThisLoc(env, l);
768 if (is_volatile_local(env.ctx.func, l)) {
769 auto current = env.state.locals[l];
770 always_assert_flog(current == TGen, "volatile local was not TGen");
771 return;
773 modifyLocalStatic(env, l, t);
774 env.state.locals[l] = std::move(t);
777 folly::Optional<Type> staticLocType(ISS& env, LocalId l, const Type& super) {
778 mayReadLocal(env, l);
779 if (env.state.localStaticBindings.size() > l &&
780 env.state.localStaticBindings[l] == LocalStaticBinding::Bound) {
781 assert(env.collect.localStaticTypes.size() > l);
782 auto t = env.collect.localStaticTypes[l];
783 if (t.subtypeOf(super)) {
784 useLocalStatic(env, l);
785 if (t.subtypeOf(BBottom)) t = TInitNull;
786 return std::move(t);
789 return folly::none;
792 // Read a local type in the sense of CGetL. (TUninits turn into
793 // TInitNull, and potentially reffy types return the "inner" type,
794 // which is always a subtype of InitCell.)
795 Type locAsCell(ISS& env, LocalId l) {
796 if (auto s = staticLocType(env, l, TInitCell)) {
797 return std::move(*s);
799 return to_cell(locRaw(env, l));
802 // Read a local type, dereferencing refs, but without converting
803 // potential TUninits to TInitNull.
804 Type derefLoc(ISS& env, LocalId l) {
805 if (auto s = staticLocType(env, l, TCell)) {
806 return std::move(*s);
808 auto v = locRaw(env, l);
809 if (v.subtypeOf(BCell)) return v;
810 return v.couldBe(BUninit) ? TCell : TInitCell;
813 bool locCouldBeUninit(ISS& env, LocalId l) {
814 return locRaw(env, l).couldBe(BUninit);
817 bool locCouldBeRef(ISS& env, LocalId l) {
818 return locRaw(env, l).couldBe(BRef);
822 * Update the known type of a local, based on assertions
823 * (VerifyParamType; or IsType/JmpCC), rather than an actual
824 * modification to the local.
826 void refineLocHelper(ISS& env, LocalId l, Type t) {
827 auto v = peekLocRaw(env, l);
828 if (v.subtypeOf(BCell)) env.state.locals[l] = std::move(t);
831 template<typename F>
832 bool refineLocation(ISS& env, LocalId l, F fun) {
833 bool ok = true;
834 auto refine = [&] (Type t) {
835 always_assert(t.subtypeOf(BCell));
836 auto r1 = fun(t);
837 auto r2 = intersection_of(r1, t);
838 // In unusual edge cases (mainly intersection of two unrelated
839 // interfaces) the intersection may not be a subtype of its inputs.
840 // In that case, always choose fun's type.
841 if (r2.subtypeOf(r1)) {
842 if (r2.subtypeOf(BBottom)) ok = false;
843 return r2;
845 if (r1.subtypeOf(BBottom)) ok = false;
846 return r1;
848 if (l == StackDupId) {
849 auto stk = &env.state.stack.back();
850 while (true) {
851 stk->type = refine(std::move(stk->type));
852 if (stk->equivLoc != StackDupId) break;
853 assertx(stk != &env.state.stack.front());
854 --stk;
856 l = stk->equivLoc;
858 if (l == StackThisId) {
859 if (env.state.thisLoc != NoLocalId) {
860 l = env.state.thisLoc;
863 if (l > MaxLocalId) return ok;
864 auto equiv = findLocEquiv(env, l);
865 if (equiv != NoLocalId) {
866 do {
867 refineLocHelper(env, equiv, refine(peekLocRaw(env, equiv)));
868 equiv = findLocEquiv(env, equiv);
869 } while (equiv != l);
871 refineLocHelper(env, l, refine(peekLocRaw(env, l)));
872 return ok;
875 template<typename PreFun, typename PostFun>
876 void refineLocation(ISS& env, LocalId l,
877 PreFun pre, BlockId target, PostFun post) {
878 auto state = env.state;
879 auto const target_reachable = refineLocation(env, l, pre);
880 if (!target_reachable) jmp_nevertaken(env);
881 // swap, so we can restore this state if the branch is always taken.
882 std::swap(env.state, state);
883 if (!refineLocation(env, l, post)) {
884 jmp_setdest(env, target);
885 env.state = std::move(state);
886 } else if (target_reachable) {
887 env.propagate(target, &state);
892 * Set a local type in the sense of tvSet. If the local is boxed or
893 * not known to be not boxed, we can't change the type. May be used
894 * to set locals to types that include Uninit.
896 void setLoc(ISS& env, LocalId l, Type t, LocalId key = NoLocalId) {
897 killStkEquiv(env, l);
898 killLocEquiv(env, l);
899 killIterEquivs(env, l, key);
900 killThisLoc(env, l);
901 modifyLocalStatic(env, l, t);
902 mayReadLocal(env, l);
903 refineLocHelper(env, l, std::move(t));
906 LocalId findLocal(ISS& env, SString name) {
907 for (auto& l : env.ctx.func->locals) {
908 if (l.name->same(name)) {
909 mayReadLocal(env, l.id);
910 return l.id;
913 return NoLocalId;
916 // Force non-ref locals to TCell. Used when something modifies an
917 // unknown local's value, without changing reffiness.
918 void loseNonRefLocalTypes(ISS& env) {
919 readUnknownLocals(env);
920 FTRACE(2, " loseNonRefLocalTypes\n");
921 for (auto& l : env.state.locals) {
922 if (l.subtypeOf(BCell)) l = TCell;
924 killAllLocEquiv(env);
925 killAllStkEquiv(env);
926 killAllIterEquivs(env);
927 killThisLoc(env, NoLocalId);
928 modifyLocalStatic(env, NoLocalId, TCell);
931 void killLocals(ISS& env) {
932 FTRACE(2, " killLocals\n");
933 readUnknownLocals(env);
934 modifyLocalStatic(env, NoLocalId, TGen);
935 for (auto& l : env.state.locals) l = TGen;
936 killAllLocEquiv(env);
937 killAllStkEquiv(env);
938 killAllIterEquivs(env);
939 killThisLoc(env, NoLocalId);
942 //////////////////////////////////////////////////////////////////////
943 // Special functions
945 void specialFunctionEffects(ISS& env, const res::Func& func) {
947 * Skip-frame functions won't read from the caller's frame, but they might
948 * dynamically call a function which can. So, skip-frame functions read our
949 * locals unless they can't call such functions.
951 if ((RuntimeOption::DisallowDynamicVarEnvFuncs != HackStrictOption::ON &&
952 func.mightBeSkipFrame())) {
953 readUnknownLocals(env);
956 if (func.mightReadCallerFrame()) {
957 if (func.name()->isame(s_func_get_args.get()) ||
958 func.name()->isame(s_func_get_arg.get()) ||
959 func.name()->isame(s_func_slice_args.get())) {
960 readUnknownParams(env);
961 } else {
962 readUnknownLocals(env);
964 mayUseVV(env);
968 void specialFunctionEffects(ISS& env, ActRec ar) {
969 switch (ar.kind) {
970 case FPIKind::Unknown:
971 // fallthrough
972 case FPIKind::Func:
973 if (!ar.func) {
974 if (RuntimeOption::DisallowDynamicVarEnvFuncs != HackStrictOption::ON) {
975 readUnknownLocals(env);
977 return;
979 case FPIKind::Builtin:
980 specialFunctionEffects(env, *ar.func);
981 if (ar.fallbackFunc) specialFunctionEffects(env, *ar.fallbackFunc);
982 break;
983 case FPIKind::Ctor:
984 case FPIKind::ObjMeth:
985 case FPIKind::ObjMethNS:
986 case FPIKind::ClsMeth:
987 case FPIKind::ObjInvoke:
988 case FPIKind::CallableArr:
990 * Methods cannot read or write to the caller's frame, but they can be
991 * skip-frame (if they're a builtin). So, its possible they'll dynamically
992 * call a function which reads from the caller's frame. If we don't
993 * forbid this, we have to be pessimistic. Imagine something like
994 * Vector::map calling assert.
996 if (RuntimeOption::DisallowDynamicVarEnvFuncs != HackStrictOption::ON &&
997 (!ar.func || ar.func->mightBeSkipFrame())) {
998 readUnknownLocals(env);
1000 break;
1004 //////////////////////////////////////////////////////////////////////
1005 // class-ref slots
1007 // Read the specified class-ref slot without discarding the stored value.
1008 const Type& peekClsRefSlot(ISS& env, ClsRefSlotId slot) {
1009 assert(slot != NoClsRefSlotId);
1010 always_assert_flog(env.state.clsRefSlots[slot].subtypeOf(BCls),
1011 "class-ref slot contained non-TCls");
1012 return env.state.clsRefSlots[slot];
1015 // Read the specified class-ref slot and discard the stored value.
1016 Type takeClsRefSlot(ISS& env, ClsRefSlotId slot) {
1017 assert(slot != NoClsRefSlotId);
1018 auto ret = std::move(env.state.clsRefSlots[slot]);
1019 FTRACE(2, " read class-ref: {} -> {}\n", slot, show(ret));
1020 always_assert_flog(ret.subtypeOf(BCls), "class-ref slot contained non-TCls");
1021 env.state.clsRefSlots[slot] = TCls;
1022 return ret;
1025 void putClsRefSlot(ISS& env, ClsRefSlotId slot, Type ty) {
1026 assert(slot != NoClsRefSlotId);
1027 always_assert_flog(ty.subtypeOf(BCls),
1028 "attempted to set class-ref slot to non-TCls");
1029 FTRACE(2, " write class-ref: {} -> {}\n", slot, show(ty));
1030 env.state.clsRefSlots[slot] = std::move(ty);
1033 //////////////////////////////////////////////////////////////////////
1034 // iterators
1036 void setIter(ISS& env, IterId iter, Iter iterState) {
1037 env.state.iters[iter] = std::move(iterState);
1039 void freeIter(ISS& env, IterId iter) {
1040 env.state.iters[iter] = DeadIter {};
1043 bool iterIsDead(ISS& env, IterId iter) {
1044 return match<bool>(
1045 env.state.iters[iter],
1046 [] (DeadIter) { return true; },
1047 [] (const LiveIter&) { return false; }
1051 //////////////////////////////////////////////////////////////////////
1052 // properties on $this
1055 * Note: we are only tracking control-flow insensitive types for
1056 * object properties, because it can be pretty rough to try to track
1057 * all cases that could re-enter the VM, run arbitrary code, and
1058 * potentially change the type of a property.
1060 * Because of this, the various "setter" functions for thisProps
1061 * here actually just union the new type into what we already had.
1064 PropStateElem<>* thisPropRaw(ISS& env, SString name) {
1065 auto& privateProperties = env.collect.props.privateProperties();
1066 auto const it = privateProperties.find(name);
1067 if (it != end(privateProperties)) {
1068 return &it->second;
1070 return nullptr;
1073 bool isTrackedThisProp(ISS& env, SString name) {
1074 return thisPropRaw(env, name);
1077 bool isMaybeLateInitThisProp(ISS& env, SString name) {
1078 if (!env.ctx.cls) return false;
1079 for (auto const& prop : env.ctx.cls->properties) {
1080 if (prop.name == name &&
1081 (prop.attrs & AttrPrivate) &&
1082 !(prop.attrs & AttrStatic)
1084 return prop.attrs & AttrLateInit;
1087 // Prop either doesn't exist, or is on an unflattened trait. Be conservative.
1088 return true;
1091 void killThisProps(ISS& env) {
1092 FTRACE(2, " killThisProps\n");
1093 for (auto& kv : env.collect.props.privateProperties()) {
1094 kv.second.ty |=
1095 adjust_type_for_prop(env.index, *env.ctx.cls, kv.second.tc, TGen);
1100 * This function returns a type that includes all the possible types
1101 * that could result from reading a property $this->name.
1103 * Note that this may include types that the property itself cannot
1104 * actually contain, due to the effects of a possible __get function.
1106 folly::Optional<Type> thisPropAsCell(ISS& env, SString name) {
1107 auto const elem = thisPropRaw(env, name);
1108 if (!elem) return folly::none;
1109 if (elem->ty.couldBe(BUninit)) {
1110 auto const rthis = thisType(env);
1111 if (!rthis || dobj_of(*rthis).cls.couldHaveMagicGet()) {
1112 return TInitCell;
1115 return to_cell(elem->ty);
1119 * Merge a type into the tracked property types on $this, in the sense
1120 * of tvSet (i.e. setting the inner type on possible refs).
1122 * Note that all types we see that could go into an object property have to
1123 * loosen_all. This is because the object could be serialized and then
1124 * deserialized, losing the static-ness of a string or array member, and we
1125 * don't guarantee deserialization would preserve a constant value object
1126 * property type.
1128 void mergeThisProp(ISS& env, SString name, Type type) {
1129 auto const elem = thisPropRaw(env, name);
1130 if (!elem) return;
1131 auto const adjusted =
1132 adjust_type_for_prop(env.index, *env.ctx.cls, elem->tc, loosen_all(type));
1133 elem->ty |= adjusted;
1137 * Merge something into each this prop. Usually MapFn will be a
1138 * predicate that returns TBottom when some condition doesn't hold.
1140 * The types given to the map function are the raw tracked types
1141 * (i.e. could be TRef or TUninit).
1143 template<class MapFn>
1144 void mergeEachThisPropRaw(ISS& env, MapFn fn) {
1145 for (auto& kv : env.collect.props.privateProperties()) {
1146 mergeThisProp(env, kv.first, fn(kv.second.ty));
1150 void unsetThisProp(ISS& env, SString name) {
1151 mergeThisProp(env, name, TUninit);
1154 void unsetUnknownThisProp(ISS& env) {
1155 for (auto& kv : env.collect.props.privateProperties()) {
1156 mergeThisProp(env, kv.first, TUninit);
1160 void boxThisProp(ISS& env, SString name) {
1161 auto const elem = thisPropRaw(env, name);
1162 if (!elem) return;
1163 elem->ty |=
1164 adjust_type_for_prop(env.index, *env.ctx.cls, elem->tc, TRef);
1168 * Forces non-ref property types up to TCell. This is used when an
1169 * operation affects an unknown property on $this, but can't change
1170 * its reffiness. This could only do TInitCell, but we're just
1171 * going to gradually get rid of the callsites of this.
1173 void loseNonRefThisPropTypes(ISS& env) {
1174 FTRACE(2, " loseNonRefThisPropTypes\n");
1175 for (auto& kv : env.collect.props.privateProperties()) {
1176 if (kv.second.ty.subtypeOf(BCell)) {
1177 kv.second.ty |=
1178 adjust_type_for_prop(env.index, *env.ctx.cls, kv.second.tc, TCell);
1183 //////////////////////////////////////////////////////////////////////
1184 // properties on self::
1186 // Similar to $this properties above, we only track control-flow
1187 // insensitive types for these.
1189 PropStateElem<>* selfPropRaw(ISS& env, SString name) {
1190 auto& privateStatics = env.collect.props.privateStatics();
1191 auto it = privateStatics.find(name);
1192 if (it != end(privateStatics)) {
1193 return &it->second;
1195 return nullptr;
1198 void killSelfProps(ISS& env) {
1199 FTRACE(2, " killSelfProps\n");
1200 for (auto& kv : env.collect.props.privateStatics()) {
1201 kv.second.ty |=
1202 adjust_type_for_prop(env.index, *env.ctx.cls, kv.second.tc, TGen);
1206 void killSelfProp(ISS& env, SString name) {
1207 FTRACE(2, " killSelfProp {}\n", name->data());
1208 if (auto elem = selfPropRaw(env, name)) {
1209 elem->ty |= adjust_type_for_prop(env.index, *env.ctx.cls, elem->tc, TGen);
1213 // TODO(#3684136): self::$foo can't actually ever be uninit. Right
1214 // now uninits may find their way into here though.
1215 folly::Optional<Type> selfPropAsCell(ISS& env, SString name) {
1216 auto const elem = selfPropRaw(env, name);
1217 if (!elem) return folly::none;
1218 return to_cell(elem->ty);
1222 * Merges a type into tracked static properties on self, in the
1223 * sense of tvSet (i.e. setting the inner type on possible refs).
1225 void mergeSelfProp(ISS& env, SString name, Type type) {
1226 auto const elem = selfPropRaw(env, name);
1227 if (!elem) return;
1228 // Context types might escape to other contexts here.
1229 auto const adjusted =
1230 adjust_type_for_prop(env.index, *env.ctx.cls, elem->tc, unctx(type));
1231 elem->ty |= adjusted;
1235 * Similar to mergeEachThisPropRaw, but for self props.
1237 template<class MapFn>
1238 void mergeEachSelfPropRaw(ISS& env, MapFn fn) {
1239 for (auto& kv : env.collect.props.privateStatics()) {
1240 mergeSelfProp(env, kv.first, fn(kv.second.ty));
1244 void boxSelfProp(ISS& env, SString name) {
1245 mergeSelfProp(env, name, TRef);
1249 * Forces non-ref static properties up to TCell. This is used when
1250 * an operation affects an unknown static property on self::, but
1251 * can't change its reffiness.
1253 * This could only do TInitCell because static properties can never
1254 * be unset. We're just going to get rid of the callers of this
1255 * function over a few more changes, though.
1257 void loseNonRefSelfPropTypes(ISS& env) {
1258 FTRACE(2, " loseNonRefSelfPropTypes\n");
1259 for (auto& kv : env.collect.props.privateStatics()) {
1260 if (kv.second.ty.subtypeOf(BInitCell)) {
1261 kv.second.ty |=
1262 adjust_type_for_prop(env.index, *env.ctx.cls, kv.second.tc, TCell);
1267 bool isMaybeLateInitSelfProp(ISS& env, SString name) {
1268 if (!env.ctx.cls) return false;
1269 for (auto const& prop : env.ctx.cls->properties) {
1270 if (prop.name == name &&
1271 (prop.attrs & AttrPrivate) &&
1272 (prop.attrs & AttrStatic)
1274 return prop.attrs & AttrLateInit;
1277 // Prop either doesn't exist, or is on an unflattened trait. Be conservative.
1278 return true;
1281 //////////////////////////////////////////////////////////////////////
1282 // misc
1285 * Check whether the class given by the type might raise when initialized.
1287 bool classInitMightRaise(ISS& env, const Type& cls) {
1288 if (RuntimeOption::EvalCheckPropTypeHints <= 0) return false;
1289 if (!is_specialized_cls(cls)) return true;
1290 auto const dcls = dcls_of(cls);
1291 if (dcls.type != DCls::Exact) return true;
1292 return env.index.lookup_class_init_might_raise(env.ctx, dcls.cls);
1295 void badPropInitialValue(ISS& env) {
1296 FTRACE(2, " badPropInitialValue\n");
1297 env.collect.props.setBadPropInitialValues();
1300 #ifdef __clang__
1301 #pragma clang diagnostic pop
1302 #endif
1305 //////////////////////////////////////////////////////////////////////
1309 #endif