Deshim VirtualExecutor in folly
[hiphop-php.git] / hphp / hhbbc / interp-internal.h
blob5ab82e3cc2c8675ae4501bb9b354508bb53234ea
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #pragma once
18 #include <algorithm>
20 #include "hphp/runtime/base/type-string.h"
21 #include "hphp/runtime/base/array-provenance.h"
23 #include "hphp/hhbbc/analyze.h"
24 #include "hphp/hhbbc/bc.h"
25 #include "hphp/hhbbc/class-util.h"
26 #include "hphp/hhbbc/context.h"
27 #include "hphp/hhbbc/func-util.h"
28 #include "hphp/hhbbc/index.h"
29 #include "hphp/hhbbc/interp-state.h"
30 #include "hphp/hhbbc/interp.h"
31 #include "hphp/hhbbc/options.h"
32 #include "hphp/hhbbc/representation.h"
33 #include "hphp/hhbbc/type-structure.h"
34 #include "hphp/hhbbc/type-system.h"
36 namespace HPHP::HHBBC {
38 struct LocalRange;
40 //////////////////////////////////////////////////////////////////////
42 TRACE_SET_MOD(hhbbc);
44 //////////////////////////////////////////////////////////////////////
46 struct TrackedElemInfo {
47 TrackedElemInfo(uint32_t d, uint32_t i) : depth{d}, idx{i} {}
48 // stack depth of the AddElem we're tracking
49 uint32_t depth;
50 // bytecode index of the previous AddElem
51 uint32_t idx;
55 * Interpreter Step State.
57 * This struct gives interpreter functions access to shared state. It's not in
58 * interp-state.h because it's part of the internal implementation of
59 * interpreter routines. The publicized state as results of interpretation are
60 * in that header and interp.h.
62 struct ISS {
63 ISS(Interp& bag, PropagateFn propagate)
64 : index(bag.index)
65 , ctx(bag.ctx)
66 , collect(bag.collect)
67 , bid(bag.bid)
68 , blk(*bag.blk)
69 , state(bag.state)
70 , undo(bag.undo)
71 , propagate(std::move(propagate))
72 , analyzeDepth(0)
75 const IIndex& index;
76 const AnalysisContext ctx;
77 CollectedInfo& collect;
78 const BlockId bid;
79 const php::Block& blk;
80 State& state;
81 StateMutationUndo* undo;
82 StepFlags flags;
83 PropagateFn propagate;
85 Optional<State> stateBefore;
87 // If we're inside an impl (as opposed to reduce) this will be > 0
88 uint32_t analyzeDepth{0};
89 int32_t srcLoc{-1};
90 bool reprocess{false};
91 // As we process the block, we keep track of the optimized bytecode
92 // stream. We expect that in steady state, there will be no changes;
93 // so as we process the block, if the initial bytecodes are the
94 // same, we just keep track of how many are the same in
95 // unchangedBcs. Once things diverge, the replacements are stored in
96 // replacedBcs.
98 // number of unchanged bcs to take from blk.hhbcs
99 uint32_t unchangedBcs{0};
100 // new bytecodes
101 BytecodeVec replacedBcs;
102 CompactVector<TrackedElemInfo> trackedElems;
105 void impl_vec(ISS& env, bool reduce, BytecodeVec&& bcs);
106 void rewind(ISS& env, const Bytecode&);
107 void rewind(ISS& env, int);
108 const Bytecode* last_op(ISS& env, int idx = 0);
109 const Bytecode* op_from_slot(ISS& env, int, int prev = 0);
111 //////////////////////////////////////////////////////////////////////
113 namespace interp_step {
116 * An interp_step::in(ISS&, const bc::op&) function exists for every
117 * bytecode. Most are defined in interp.cpp, but some (like FCallBuiltin and
118 * member instructions) are defined elsewhere.
120 #define O(opcode, ...) void in(ISS&, const bc::opcode&);
121 OPCODES
122 #undef O
127 * Find a contiguous local range which is equivalent to the given range and has
128 * a smaller starting id. Only returns the equivalent first local because the
129 * size doesn't change.
131 LocalId equivLocalRange(ISS& env, const LocalRange& range);
133 namespace {
135 Type peekLocRaw(ISS& env, LocalId l);
136 bool peekLocCouldBeUninit(ISS& env, LocalId l);
138 #ifdef __clang__
139 #pragma clang diagnostic push
140 #pragma clang diagnostic ignored "-Wunused-function"
141 #endif
144 * impl(...)
146 * Utility for chaining one bytecode implementation to a series of a few
147 * others. Use reduce() if you also want to enable strength reduction
148 * (i.e. the bytecode can be replaced by some other bytecode as an
149 * optimization).
151 * The chained-to bytecodes should not take branches. For impl, the
152 * canConstProp flag will only be set if it was set for all the
153 * bytecodes.
156 template<class... Ts>
157 void impl(ISS& env, Ts&&... ts) {
158 impl_vec(env, false, { std::forward<Ts>(ts)... });
162 * Reduce means that (given some situation in the execution state),
163 * a given bytecode could be replaced by some other bytecode
164 * sequence. Ensure that if you call reduce(), it is before any
165 * state-affecting operations (like popC()).
167 void reduce(ISS& env, BytecodeVec&& bcs) {
168 impl_vec(env, true, std::move(bcs));
171 template<class... Bytecodes>
172 void reduce(ISS& env, Bytecodes&&... hhbc) {
173 reduce(env, { std::forward<Bytecodes>(hhbc)... });
176 bool will_reduce(ISS& env) { return env.analyzeDepth == 0; }
178 void nothrow(ISS& env) {
179 FTRACE(2, " nothrow\n");
180 env.flags.wasPEI = false;
183 void unreachable(ISS& env) {
184 FTRACE(2, " unreachable\n");
185 env.state.unreachable = true;
188 void constprop(ISS& env) {
189 FTRACE(2, " constprop\n");
190 env.flags.canConstProp = true;
193 void effect_free(ISS& env) {
194 FTRACE(2, " effect_free\n");
195 nothrow(env);
196 env.flags.effectFree = true;
200 * Mark the current block as unconditionally jumping to target. The
201 * caller must arrange for env.state to reflect the state that needs
202 * to be propagated to the target, but it should not propagate that
203 * state.
205 void jmp_setdest(ISS& env, BlockId target) {
206 env.flags.jmpDest = target;
208 void jmp_nevertaken(ISS& env) {
209 jmp_setdest(env, env.blk.fallthrough);
212 void readUnknownParams(ISS& env) {
213 for (LocalId p = 0; p < env.ctx.func->params.size(); p++) {
214 if (p == env.flags.mayReadLocalSet.size()) break;
215 env.flags.mayReadLocalSet.set(p);
217 env.flags.usedParams.set();
220 void readUnknownLocals(ISS& env) {
221 env.flags.mayReadLocalSet.set();
222 env.flags.usedParams.set();
225 void readAllLocals(ISS& env) {
226 env.flags.mayReadLocalSet.set();
227 env.flags.usedParams.set();
230 void doRet(ISS& env, Type t, bool hasEffects) {
231 assertx(env.state.stack.empty());
232 env.flags.mayReadLocalSet.set();
233 env.flags.retParam = NoLocalId;
234 env.flags.returned = t;
235 if (!hasEffects) effect_free(env);
238 void hasInvariantIterBase(ISS& env) {
239 env.collect.hasInvariantIterBase = true;
242 //////////////////////////////////////////////////////////////////////
243 // eval stack
245 Type popT(ISS& env) {
246 assertx(!env.state.stack.empty());
247 auto const ret = env.state.stack.back().type;
248 FTRACE(2, " pop: {}\n", show(ret));
249 assertx(ret.subtypeOf(BCell));
250 env.state.stack.pop_elem();
251 if (env.undo) env.undo->onPop(ret);
252 return ret;
255 Type popC(ISS& env) {
256 auto const v = popT(env);
257 assertx(v.subtypeOf(BInitCell));
258 return v;
261 Type popU(ISS& env) {
262 auto const v = popT(env);
263 assertx(v.subtypeOf(BUninit));
264 return v;
267 Type popCU(ISS& env) {
268 auto const v = popT(env);
269 assertx(v.subtypeOf(BCell));
270 return v;
273 Type popCV(ISS& env) { return popT(env); }
275 void discard(ISS& env, int n) {
276 for (auto i = 0; i < n; ++i) popT(env);
279 const Type& topT(ISS& env, uint32_t idx = 0) {
280 assertx(idx < env.state.stack.size());
281 return env.state.stack[env.state.stack.size() - idx - 1].type;
284 const Type& topC(ISS& env, uint32_t i = 0) {
285 assertx(topT(env, i).subtypeOf(BInitCell));
286 return topT(env, i);
289 const Type& topCV(ISS& env, uint32_t i = 0) { return topT(env, i); }
291 void push(ISS& env, Type t) {
292 FTRACE(2, " push: {}\n", show(t));
293 env.state.stack.push_elem(std::move(t), NoLocalId,
294 env.unchangedBcs + env.replacedBcs.size());
295 if (env.undo) env.undo->onPush();
298 void push(ISS& env, Type t, LocalId l) {
299 if (l == NoLocalId) return push(env, t);
300 if (l <= MaxLocalId && is_volatile_local(env.ctx.func, l)) {
301 return push(env, t);
303 FTRACE(2, " push: {} (={})\n", show(t), local_string(*env.ctx.func, l));
304 env.state.stack.push_elem(std::move(t), l,
305 env.unchangedBcs + env.replacedBcs.size());
306 if (env.undo) env.undo->onPush();
309 //////////////////////////////////////////////////////////////////////
310 // $this
312 void setThisAvailable(ISS& env) {
313 FTRACE(2, " setThisAvailable\n");
314 if (!env.ctx.cls || is_unused_trait(*env.ctx.cls) ||
315 (env.ctx.func->attrs & AttrStatic)) {
316 return unreachable(env);
318 if (!env.state.thisType.couldBe(BObj) ||
319 !env.state.thisType.subtypeOf(BOptObj)) {
320 return unreachable(env);
322 if (env.state.thisType.couldBe(BInitNull)) {
323 env.state.thisType = unopt(std::move(env.state.thisType));
327 bool thisAvailable(ISS& env) {
328 return
329 env.state.thisType.subtypeOf(BObj) &&
330 !env.state.thisType.is(BBottom);
333 Type thisType(ISS& env) {
334 return env.state.thisType;
337 Type thisTypeNonNull(ISS& env) {
338 if (!env.state.thisType.couldBe(TObj)) return TBottom;
339 if (env.state.thisType.couldBe(BInitNull)) return unopt(env.state.thisType);
340 return env.state.thisType;
343 //////////////////////////////////////////////////////////////////////
344 // self
346 inline Optional<Type> selfCls(ISS& env) {
347 return selfCls(env.index, env.ctx);
349 inline Optional<Type> selfClsExact(ISS& env) {
350 return selfClsExact(env.index, env.ctx);
353 inline Optional<Type> parentCls(ISS& env) {
354 return parentCls(env.index, env.ctx);
356 inline Optional<Type> parentClsExact(ISS& env) {
357 return parentClsExact(env.index, env.ctx);
360 // Like selfClsExact, but if the func is non-static, use an object
361 // type instead.
362 inline Type selfExact(ISS& env) {
363 assertx(env.ctx.func);
364 auto ty = selfClsExact(env);
365 if (env.ctx.func->attrs & AttrStatic) {
366 return ty ? *ty : TCls;
368 return ty ? toobj(*ty) : TObj;
371 //////////////////////////////////////////////////////////////////////
372 // class constants
374 inline ClsConstLookupResult lookupClsConstant(const IIndex& index,
375 const Context& ctx,
376 const CollectedInfo* collect,
377 const Type& cls,
378 const Type& name) {
379 // Check if the constant's class is definitely the current context.
380 auto const isClsCtx = [&] {
381 if (!collect || !collect->clsCns) return false;
382 if (!is_specialized_cls(cls)) return false;
383 auto const& dcls = dcls_of(cls);
384 if (!dcls.isExact()) return false;
385 auto const self = selfClsExact(index, ctx);
386 if (!self || !is_specialized_cls(*self)) return false;
387 return dcls.cls().same(dcls_of(*self).cls());
388 }();
390 if (isClsCtx && is_specialized_string(name)) {
391 auto lookup = collect->clsCns->lookup(sval_of(name));
392 if (lookup.found == TriBool::Yes) return lookup;
394 return index.lookup_class_constant(ctx, cls, name);
397 inline ClsConstLookupResult lookupClsConstant(ISS& env,
398 const Type& cls,
399 const Type& name) {
400 return lookupClsConstant(env.index, env.ctx, &env.collect, cls, name);
403 //////////////////////////////////////////////////////////////////////
404 // folding
406 const StaticString s___NEVER_INLINE("__NEVER_INLINE");
408 bool shouldAttemptToFold(ISS& env, const php::Func* func, const FCallArgs& fca,
409 Type context, bool maybeDynamic) {
410 if (!func ||
411 fca.hasUnpack() ||
412 fca.hasGenerics() ||
413 fca.numRets() != 1 ||
414 !will_reduce(env) ||
415 any(env.collect.opts & CollectionOpts::Speculating) ||
416 any(env.collect.opts & CollectionOpts::Optimizing)) {
417 return false;
420 if (maybeDynamic && (
421 (Cfg::Eval::NoticeOnBuiltinDynamicCalls &&
422 (func->attrs & AttrBuiltin)) ||
423 (dyn_call_error_level(func) > 0))) {
424 return false;
427 if (func->userAttributes.count(s___NEVER_INLINE.get())) {
428 return false;
431 // Reified functions may have a mismatch of arity or reified generics
432 // so we cannot fold them
433 // TODO(T31677864): Detect the arity mismatch at HHBBC and enable them to
434 // be foldable
435 if (func->isReified) return false;
437 // Coeffect violation may raise warning or throw an exception
438 if (!fca.skipCoeffectsCheck()) return false;
440 // Readonly violation may raise warning or throw an exception
441 if (fca.enforceReadonly() ||
442 fca.enforceMutableReturn() ||
443 fca.enforceReadonlyThis()) {
444 return false;
447 auto const funcUnit = env.index.lookup_func_unit(*func);
449 // Internal functions may raise module boundary violations
450 if ((func->attrs & AttrInternal) &&
451 env.index.lookup_func_unit(*env.ctx.func)->moduleName !=
452 funcUnit->moduleName) {
453 return false;
456 // Deployment violation may raise raise warning or throw an exception
457 auto const& packageInfo = funcUnit->packageInfo;
458 if (auto const activeDeployment = packageInfo.getActiveDeployment()) {
459 if (!packageInfo.moduleInDeployment(
460 funcUnit->moduleName, *activeDeployment, DeployKind::Hard)) {
461 return false;
465 // We only fold functions when numRets == 1
466 if (func->hasInOutArgs) return false;
468 // Can't fold if we get the wrong amount of arguments
469 if (!check_nargs_in_range(func, fca.numArgs())) return false;
471 // Don't try to fold functions which aren't guaranteed to be accessible at
472 // this call site.
473 if (func->attrs & AttrPrivate) {
474 if (env.ctx.cls != func->cls) return false;
475 } else if (func->attrs & AttrProtected) {
476 assertx(func->cls);
477 if (env.ctx.cls != func->cls) {
478 if (!env.ctx.cls) return false;
479 auto const rcls1 = env.index.resolve_class(env.ctx.cls->name);
480 auto const rcls2 = env.index.resolve_class(func->cls->name);
481 if (!rcls1 || !rcls2) return false;
482 if (!rcls1->exactSubtypeOf(*rcls2, true, true) &&
483 !rcls2->exactSubtypeOf(*rcls1, true, true)) {
484 return false;
489 // Foldable builtins are always worth trying
490 if (func->attrs & AttrIsFoldable) return true;
492 // Any native functions at this point are known to be
493 // non-foldable, but other builtins might be, even if they
494 // don't have the __Foldable attribute.
495 if (func->isNative) return false;
497 if (func->params.size()) return true;
499 auto const rfunc = env.index.resolve_func_or_method(*func);
501 // The function has no args. Check if it's effect free and returns
502 // a literal.
503 auto [retTy, effectFree] = env.index.lookup_return_type(
504 env.ctx,
505 &env.collect.methods,
506 rfunc,
507 Dep::InlineDepthLimit
509 auto const isScalar = is_scalar(retTy);
510 if (effectFree && isScalar) return true;
512 if (!(func->attrs & AttrStatic) && func->cls) {
513 // May be worth trying to fold if the method returns a scalar,
514 // assuming its only "effect" is checking for existence of $this.
515 if (isScalar) return true;
517 // The method may be foldable if we know more about $this.
518 if (is_specialized_obj(context)) {
519 auto const& dobj = dobj_of(context);
520 if (dobj.isExact() ||
521 (dobj.isSub() && dobj.cls().cls() != func->cls) ||
522 (dobj.isIsectAndExact() &&
523 dobj.isectAndExact().first.cls() != func->cls)) {
524 return true;
529 return false;
532 //////////////////////////////////////////////////////////////////////
533 // locals
535 void mayReadLocal(ISS& env, uint32_t id, bool isUse = true) {
536 if (id < env.flags.mayReadLocalSet.size()) {
537 env.flags.mayReadLocalSet.set(id);
539 if (isUse && id < env.flags.usedParams.size()) {
540 env.flags.usedParams.set(id);
544 // Find a local which is equivalent to the given local
545 LocalId findLocEquiv(State& state, const php::Func* func, LocalId l) {
546 if (l >= state.equivLocals.size()) return NoLocalId;
547 assertx(state.equivLocals[l] == NoLocalId || !is_volatile_local(func, l));
548 return state.equivLocals[l];
550 LocalId findLocEquiv(ISS& env, LocalId l) {
551 return findLocEquiv(env.state, env.ctx.func, l);
554 // Given an iterator base local, find an equivalent local that is possibly
555 // better. LIterInit/LIterNext often uses an unnamed local that came from
556 // a regular local, which would be a better choice if that local was not
557 // manipulated in an unsafe way. Regular locals have lower ids.
558 LocalId findIterBaseLoc(State& state, const php::Func* func, LocalId l) {
559 assertx(l != NoLocalId);
560 auto const locEquiv = findLocEquiv(state, func, l);
561 if (locEquiv == NoLocalId) return l;
562 return std::min(l, locEquiv);
564 LocalId findIterBaseLoc(ISS& env, LocalId l) {
565 return findIterBaseLoc(env.state, env.ctx.func, l);
568 // Find an equivalent local with minimum id
569 LocalId findMinLocEquiv(ISS& env, LocalId l, bool allowUninit) {
570 if (l >= env.state.equivLocals.size() ||
571 env.state.equivLocals[l] == NoLocalId) {
572 return NoLocalId;
575 auto min = l;
576 auto cur = env.state.equivLocals[l];
577 while (cur != l) {
578 if (cur < min && (allowUninit || !peekLocCouldBeUninit(env, cur))) {
579 min = cur;
581 cur = env.state.equivLocals[cur];
583 return min != l ? min : NoLocalId;
586 // Determine whether two locals are equivalent
587 bool locsAreEquiv(ISS& env, LocalId l1, LocalId l2) {
588 if (l1 >= env.state.equivLocals.size() ||
589 l2 >= env.state.equivLocals.size() ||
590 env.state.equivLocals[l1] == NoLocalId ||
591 env.state.equivLocals[l2] == NoLocalId) {
592 return false;
595 auto l = l1;
596 while ((l = env.state.equivLocals[l]) != l1) {
597 if (l == l2) return true;
599 return false;
602 bool locIsThis(ISS& env, LocalId l) {
603 assertx(l <= MaxLocalId);
604 return l == env.state.thisLoc ||
605 (env.state.thisLoc <= MaxLocalId &&
606 locsAreEquiv(env, l, env.state.thisLoc));
609 void killLocEquiv(State& state, LocalId l) {
610 if (l >= state.equivLocals.size()) return;
611 if (state.equivLocals[l] == NoLocalId) return;
612 auto loc = l;
613 do {
614 loc = state.equivLocals[loc];
615 } while (state.equivLocals[loc] != l);
616 assertx(loc != l);
617 if (state.equivLocals[l] == loc) {
618 state.equivLocals[loc] = NoLocalId;
619 } else {
620 state.equivLocals[loc] = state.equivLocals[l];
622 state.equivLocals[l] = NoLocalId;
625 void killLocEquiv(ISS& env, LocalId l) {
626 killLocEquiv(env.state, l);
629 void killAllLocEquiv(ISS& env) {
630 env.state.equivLocals.clear();
633 // Add from to to's equivalency set.
634 void addLocEquiv(ISS& env,
635 LocalId from,
636 LocalId to) {
637 always_assert(peekLocRaw(env, from).subtypeOf(BCell));
638 always_assert(!is_volatile_local(env.ctx.func, to));
639 always_assert(from != to && findLocEquiv(env, from) == NoLocalId);
641 auto m = std::max(to, from);
642 if (env.state.equivLocals.size() <= m) {
643 env.state.equivLocals.resize(m + 1, NoLocalId);
646 if (env.state.equivLocals[to] == NoLocalId) {
647 env.state.equivLocals[from] = to;
648 env.state.equivLocals[to] = from;
649 } else {
650 env.state.equivLocals[from] = env.state.equivLocals[to];
651 env.state.equivLocals[to] = from;
655 // Obtain a local which is equivalent to the given stack value
656 LocalId topStkLocal(const State& state, uint32_t idx = 0) {
657 assertx(idx < state.stack.size());
658 auto const equiv = state.stack[state.stack.size() - idx - 1].equivLoc;
659 return equiv > MaxLocalId ? NoLocalId : equiv;
661 LocalId topStkLocal(ISS& env, uint32_t idx = 0) {
662 return topStkLocal(env.state, idx);
665 // Obtain a location which is equivalent to the given stack value
666 LocalId topStkEquiv(ISS& env, uint32_t idx = 0) {
667 assertx(idx < env.state.stack.size());
668 return env.state.stack[env.state.stack.size() - idx - 1].equivLoc;
671 void setStkLocal(ISS& env, LocalId loc, uint32_t idx = 0) {
672 assertx(loc <= MaxLocalId);
673 always_assert(peekLocRaw(env, loc).subtypeOf(BCell));
674 auto const equiv = [&] {
675 while (true) {
676 auto const e = topStkEquiv(env, idx);
677 if (e != StackDupId) return e;
678 idx++;
680 }();
682 if (equiv <= MaxLocalId) {
683 if (loc == equiv || locsAreEquiv(env, loc, equiv)) return;
684 addLocEquiv(env, loc, equiv);
685 return;
687 env.state.stack[env.state.stack.size() - idx - 1].equivLoc = loc;
690 void killThisLoc(ISS& env, LocalId l) {
691 if (l != NoLocalId ?
692 env.state.thisLoc == l : env.state.thisLoc != NoLocalId) {
693 FTRACE(2, "Killing thisLoc: {}\n", env.state.thisLoc);
694 env.state.thisLoc = NoLocalId;
698 // Kill all equivalencies involving the given local to stack values
699 void killStkEquiv(ISS& env, LocalId l) {
700 for (auto& e : env.state.stack) {
701 if (e.equivLoc != l) continue;
702 e.equivLoc = findLocEquiv(env, l);
703 assertx(e.equivLoc != l);
707 void killAllStkEquiv(ISS& env) {
708 for (auto& e : env.state.stack) {
709 if (e.equivLoc <= MaxLocalId) e.equivLoc = NoLocalId;
713 void killIterEquivs(ISS& env, LocalId l, LocalId key = NoLocalId) {
714 for (auto& i : env.state.iters) {
715 match<void>(
717 [] (DeadIter) {},
718 [&] (LiveIter& iter) {
719 if (iter.keyLocal == l) iter.keyLocal = NoLocalId;
720 if (iter.baseLocal == l) {
721 iter.baseUpdated = true;
722 if (key == NoLocalId || key != iter.keyLocal) {
723 iter.baseLocal = NoLocalId;
731 void killAllIterEquivs(ISS& env) {
732 for (auto& i : env.state.iters) {
733 match<void>(
735 [] (DeadIter) {},
736 [] (LiveIter& iter) {
737 iter.baseUpdated = true;
738 iter.baseLocal = NoLocalId;
739 iter.keyLocal = NoLocalId;
745 void setIterKey(ISS& env, IterId id, LocalId key) {
746 match<void>(
747 env.state.iters[id],
748 [] (DeadIter) {},
749 [&] (LiveIter& iter) { iter.keyLocal = key; }
753 Type peekLocRaw(ISS& env, LocalId l) {
754 auto ret = env.state.locals[l];
755 if (is_volatile_local(env.ctx.func, l)) {
756 always_assert_flog(ret == TCell, "volatile local was not TCell");
758 return ret;
761 Type locRaw(ISS& env, LocalId l) {
762 mayReadLocal(env, l);
763 return peekLocRaw(env, l);
766 void setLocRaw(ISS& env, LocalId l, Type t) {
767 mayReadLocal(env, l);
768 killStkEquiv(env, l);
769 killLocEquiv(env, l);
770 killIterEquivs(env, l);
771 killThisLoc(env, l);
772 if (is_volatile_local(env.ctx.func, l)) {
773 auto current = env.state.locals[l];
774 always_assert_flog(current == TCell, "volatile local was not TCell");
775 return;
777 if (env.undo) env.undo->onLocalWrite(l, std::move(env.state.locals[l]));
778 env.state.locals[l] = std::move(t);
781 // Read a local type in the sense of CGetL. (TUninits turn into
782 // TInitNull)
783 Type locAsCell(ISS& env, LocalId l) {
784 return to_cell(locRaw(env, l));
787 bool peekLocCouldBeUninit(ISS& env, LocalId l) {
788 return peekLocRaw(env, l).couldBe(BUninit);
791 bool locCouldBeUninit(ISS& env, LocalId l) {
792 return locRaw(env, l).couldBe(BUninit);
796 * Update the known type of a local, based on assertions (e.g. IsType/JmpCC),
797 * rather than an actual modification to the local.
799 void refineLocHelper(ISS& env, LocalId l, Type t) {
800 auto v = peekLocRaw(env, l);
801 assertx(v.subtypeOf(BCell));
802 if (!is_volatile_local(env.ctx.func, l)) {
803 if (env.undo) env.undo->onLocalWrite(l, std::move(env.state.locals[l]));
804 env.state.locals[l] = std::move(t);
809 * Refine all locals in an equivalence class using fun. Returns false if refined
810 * local is unreachable.
812 template<typename F>
813 bool refineLocation(ISS& env, LocalId l, F fun) {
814 bool ok = true;
815 auto refine = [&] (Type t) {
816 always_assert(t.subtypeOf(BCell));
817 auto i = intersection_of(fun(t), t);
818 if (i.subtypeOf(BBottom)) ok = false;
819 return i;
821 if (l == StackDupId) {
822 auto stkIdx = env.state.stack.size();
823 while (true) {
824 --stkIdx;
825 auto& stk = env.state.stack[stkIdx];
826 if (env.undo) env.undo->onStackWrite(stkIdx, stk.type);
827 stk.type = refine(std::move(stk.type));
828 if (stk.equivLoc != StackDupId) break;
829 assertx(stkIdx > 0);
831 l = env.state.stack[stkIdx].equivLoc;
833 if (l == StackThisId) {
834 if (env.state.thisLoc != NoLocalId) {
835 l = env.state.thisLoc;
837 return ok;
839 if (l == NoLocalId) return ok;
840 assertx(l <= MaxLocalId);
841 auto fixThis = false;
842 auto equiv = findLocEquiv(env, l);
843 if (equiv != NoLocalId) {
844 do {
845 if (equiv == env.state.thisLoc) fixThis = true;
846 refineLocHelper(env, equiv, refine(peekLocRaw(env, equiv)));
847 equiv = findLocEquiv(env, equiv);
848 } while (equiv != l);
850 if (fixThis || l == env.state.thisLoc) {
851 env.state.thisType = refine(env.state.thisType);
853 refineLocHelper(env, l, refine(peekLocRaw(env, l)));
854 return ok;
858 * Refine locals along taken and fallthrough edges.
860 template<typename Taken, typename Fallthrough>
861 void refineLocation(ISS& env, LocalId l,
862 Taken taken, BlockId target, Fallthrough fallthrough) {
863 auto state = env.state;
864 auto const target_reachable = refineLocation(env, l, taken);
865 if (!target_reachable) jmp_nevertaken(env);
866 // swap, so we can restore this state if the branch is always taken.
867 env.state.swap(state);
868 if (!refineLocation(env, l, fallthrough)) { // fallthrough unreachable.
869 jmp_setdest(env, target);
870 env.state.copy_from(std::move(state));
871 } else if (target_reachable) {
872 env.propagate(target, &state);
877 * Set a local type in the sense of tvSet. If the local is boxed or
878 * not known to be not boxed, we can't change the type. May be used
879 * to set locals to types that include Uninit.
881 void setLoc(ISS& env, LocalId l, Type t, LocalId key = NoLocalId) {
882 killStkEquiv(env, l);
883 killLocEquiv(env, l);
884 killIterEquivs(env, l, key);
885 killThisLoc(env, l);
886 mayReadLocal(env, l);
887 refineLocHelper(env, l, std::move(t));
890 LocalId findLocal(ISS& env, SString name) {
891 for (auto& l : env.ctx.func->locals) {
892 if (l.name->same(name)) {
893 mayReadLocal(env, l.id);
894 return l.id;
897 return NoLocalId;
900 void killLocals(ISS& env) {
901 FTRACE(2, " killLocals\n");
902 readUnknownLocals(env);
903 for (size_t l = 0; l < env.state.locals.size(); ++l) {
904 if (env.undo) env.undo->onLocalWrite(l, std::move(env.state.locals[l]));
905 env.state.locals[l] = TCell;
907 killAllLocEquiv(env);
908 killAllStkEquiv(env);
909 killAllIterEquivs(env);
910 killThisLoc(env, NoLocalId);
913 //////////////////////////////////////////////////////////////////////
914 // iterators
916 void setIter(ISS& env, IterId iter, Iter iterState) {
917 env.state.iters[iter] = std::move(iterState);
919 void freeIter(ISS& env, IterId iter) {
920 env.state.iters[iter] = DeadIter {};
923 bool iterIsDead(ISS& env, IterId iter) {
924 return match<bool>(
925 env.state.iters[iter],
926 [] (DeadIter) { return true; },
927 [] (const LiveIter&) { return false; }
931 //////////////////////////////////////////////////////////////////////
932 // properties on $this
935 * Note: we are only tracking control-flow insensitive types for
936 * object properties, because it can be pretty rough to try to track
937 * all cases that could re-enter the VM, run arbitrary code, and
938 * potentially change the type of a property.
940 * Because of this, the various "setter" functions for thisProps
941 * here actually just union the new type into what we already had.
944 Optional<Type> thisPropType(ISS& env, SString name) {
945 if (auto const elem = env.collect.props.readPrivateProp(name)) {
946 return elem->ty;
948 return std::nullopt;
951 bool isMaybeThisPropAttr(ISS& env, SString name, Attr attr) {
952 auto const& raw = env.collect.props.privatePropertiesRaw();
953 auto const it = raw.find(name);
954 // Prop either doesn't exist, or is on an unflattened trait. Be
955 // conservative.
956 if (it == raw.end()) return true;
957 return it->second.attrs & attr;
960 bool isDefinitelyThisPropAttr(ISS& env, SString name, Attr attr) {
961 auto const& raw = env.collect.props.privatePropertiesRaw();
962 auto const it = raw.find(name);
963 // Prop either doesn't exist, or is on an unflattened trait. Be
964 // conservative.
965 if (it == raw.end()) return false;
966 return it->second.attrs & attr;
969 void killThisProps(ISS& env) {
970 FTRACE(2, " killThisProps\n");
971 env.collect.props.mergeInAllPrivateProps(env.index, TCell);
975 * This function returns a type that includes all the possible types
976 * that could result from reading a property $this->name.
978 Optional<Type> thisPropAsCell(ISS& env, SString name) {
979 auto const ty = thisPropType(env, name);
980 if (!ty) return std::nullopt;
981 return to_cell(ty.value());
985 * Merge a type into the tracked property types on $this, in the sense
986 * of tvSet.
988 * Note that all types we see that could go into an object property have to
989 * loosen_all. This is because the object could be serialized and then
990 * deserialized, losing the static-ness of a string or array member, and we
991 * don't guarantee deserialization would preserve a constant value object
992 * property type.
994 void mergeThisProp(ISS& env, SString name, Type type) {
995 env.collect.props.mergeInPrivateProp(
996 env.index,
997 name,
998 loosen_this_prop_for_serialization(*env.ctx.cls, name, std::move(type))
1003 * Merge something into each this prop. Usually MapFn will be a
1004 * predicate that returns TBottom when some condition doesn't hold.
1006 * The types given to the map function are the raw tracked types
1007 * (i.e. could be TUninit).
1009 template<typename MapFn>
1010 void mergeEachThisPropRaw(ISS& env, MapFn fn) {
1011 for (auto const& kv : env.collect.props.privatePropertiesRaw()) {
1012 auto const ty = thisPropType(env, kv.first);
1013 assertx(ty.has_value());
1014 mergeThisProp(env, kv.first, fn(*ty));
1018 void unsetThisProp(ISS& env, SString name) {
1019 mergeThisProp(env, name, TUninit);
1022 void unsetUnknownThisProp(ISS& env) {
1023 env.collect.props.mergeInAllPrivateProps(env.index, TUninit);
1026 //////////////////////////////////////////////////////////////////////
1027 // properties on self::
1029 // Similar to $this properties above, we only track control-flow
1030 // insensitive types for these.
1032 void killPrivateStatics(ISS& env) {
1033 FTRACE(2, " killPrivateStatics\n");
1034 env.collect.props.mergeInAllPrivateStatics(env.index, TInitCell, true, false);
1037 //////////////////////////////////////////////////////////////////////
1038 // misc
1040 inline void propInitialValue(ISS& env,
1041 const php::Prop& prop,
1042 TypedValue val,
1043 bool satisfies,
1044 bool deepInit) {
1045 FTRACE(2, " propInitialValue \"{}\" -> {}{}{}\n",
1046 prop.name, show(from_cell(val)),
1047 satisfies ? " (initial satisfies TC)" : "",
1048 deepInit ? " (deep init)" : "");
1049 env.collect.props.setInitialValue(prop, val, satisfies, deepInit);
1052 inline PropMergeResult mergeStaticProp(ISS& env,
1053 const Type& self,
1054 const Type& name,
1055 const Type& val,
1056 bool checkUB = false,
1057 bool ignoreConst = false,
1058 bool mustBeReadOnly = false) {
1059 FTRACE(2, " mergeStaticProp {}::{} -> {}\n",
1060 show(self), show(name), show(val));
1061 return env.index.merge_static_type(
1062 env.ctx,
1063 env.collect.publicSPropMutations,
1064 env.collect.props,
1065 self,
1066 name,
1067 val,
1068 checkUB,
1069 ignoreConst,
1070 mustBeReadOnly
1074 inline Index::ReturnType memoGet(ISS& env) {
1075 env.collect.allMemoGets.emplace(env.bid);
1076 return env.collect.allMemoSets;
1079 inline void memoSet(ISS& env, Type t, bool effectFree) {
1080 auto reflow = false;
1082 t |= env.collect.allMemoSets.t;
1083 if (env.collect.allMemoSets.t.strictSubtypeOf(t)) {
1084 env.collect.allMemoSets.t = std::move(t);
1085 reflow = true;
1087 if (!effectFree && env.collect.allMemoSets.effectFree) {
1088 env.collect.allMemoSets.effectFree = false;
1089 reflow = true;
1091 if (reflow) {
1092 for (auto const bid : env.collect.allMemoGets) {
1093 env.propagate(bid, nullptr);
1098 //////////////////////////////////////////////////////////////////////
1100 #ifdef __clang__
1101 #pragma clang diagnostic pop
1102 #endif
1105 //////////////////////////////////////////////////////////////////////