2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
20 #include <folly/Optional.h>
22 #include "hphp/runtime/base/type-string.h"
23 #include "hphp/runtime/base/array-provenance.h"
25 #include "hphp/hhbbc/bc.h"
26 #include "hphp/hhbbc/class-util.h"
27 #include "hphp/hhbbc/context.h"
28 #include "hphp/hhbbc/func-util.h"
29 #include "hphp/hhbbc/interp-state.h"
30 #include "hphp/hhbbc/interp.h"
31 #include "hphp/hhbbc/options.h"
32 #include "hphp/hhbbc/representation.h"
33 #include "hphp/hhbbc/type-system.h"
35 namespace HPHP
{ namespace HHBBC
{
39 //////////////////////////////////////////////////////////////////////
43 //////////////////////////////////////////////////////////////////////
45 struct TrackedElemInfo
{
46 TrackedElemInfo(uint32_t d
, uint32_t i
) : depth
{d
}, idx
{i
} {}
47 // stack depth of the AddElem we're tracking
49 // bytecode index of the previous AddElem
54 * Interpreter Step State.
56 * This struct gives interpreter functions access to shared state. It's not in
57 * interp-state.h because it's part of the internal implementation of
58 * interpreter routines. The publicized state as results of interpretation are
59 * in that header and interp.h.
62 explicit ISS(Interp
& bag
,
63 PropagateFn propagate
)
66 , collect(bag
.collect
)
71 , propagate(propagate
)
72 , analyzeDepth(options
.StrengthReduce
? 0 : 1)
76 const AnalysisContext ctx
;
77 CollectedInfo
& collect
;
79 const php::Block
& blk
;
81 StateMutationUndo
* undo
;
83 PropagateFn propagate
;
84 bool recordUsedParams
{true};
86 folly::Optional
<State
> stateBefore
;
88 // If we're inside an impl (as opposed to reduce) this will be > 0
89 uint32_t analyzeDepth
{0};
91 bool reprocess
{false};
92 // As we process the block, we keep track of the optimized bytecode
93 // stream. We expect that in steady state, there will be no changes;
94 // so as we process the block, if the initial bytecodes are the
95 // same, we just keep track of how many are the same in
96 // unchangedBcs. Once things diverge, the replacements are stored in
99 // number of unchanged bcs to take from blk.hhbcs
100 uint32_t unchangedBcs
{0};
102 BytecodeVec replacedBcs
;
103 CompactVector
<TrackedElemInfo
> trackedElems
;
106 void impl_vec(ISS
& env
, bool reduce
, BytecodeVec
&& bcs
);
107 void rewind(ISS
& env
, const Bytecode
&);
108 void rewind(ISS
& env
, int);
109 const Bytecode
* last_op(ISS
& env
, int idx
= 0);
110 const Bytecode
* op_from_slot(ISS
& env
, int, int prev
= 0);
111 ArrayData
* resolveTSStatically(ISS
& env
, SArray
, const php::Class
*);
113 //////////////////////////////////////////////////////////////////////
115 namespace interp_step
{
118 * An interp_step::in(ISS&, const bc::op&) function exists for every
119 * bytecode. Most are defined in interp.cpp, but some (like FCallBuiltin and
120 * member instructions) are defined elsewhere.
122 #define O(opcode, ...) void in(ISS&, const bc::opcode&);
129 * Find a contiguous local range which is equivalent to the given range and has
130 * a smaller starting id. Only returns the equivalent first local because the
131 * size doesn't change.
133 LocalId
equivLocalRange(ISS
& env
, const LocalRange
& range
);
137 Type
peekLocRaw(ISS
& env
, LocalId l
);
138 bool peekLocCouldBeUninit(ISS
& env
, LocalId l
);
141 #pragma clang diagnostic push
142 #pragma clang diagnostic ignored "-Wunused-function"
148 * Utility for chaining one bytecode implementation to a series of a few
149 * others. Use reduce() if you also want to enable strength reduction
150 * (i.e. the bytecode can be replaced by some other bytecode as an
153 * The chained-to bytecodes should not take branches. For impl, the
154 * canConstProp flag will only be set if it was set for all the
158 template<class... Ts
>
159 void impl(ISS
& env
, Ts
&&... ts
) {
160 impl_vec(env
, false, { std::forward
<Ts
>(ts
)... });
164 * Reduce means that (given some situation in the execution state),
165 * a given bytecode could be replaced by some other bytecode
166 * sequence. Ensure that if you call reduce(), it is before any
167 * state-affecting operations (like popC()).
169 void reduce(ISS
& env
, BytecodeVec
&& bcs
) {
170 impl_vec(env
, true, std::move(bcs
));
173 template<class... Bytecodes
>
174 void reduce(ISS
& env
, Bytecodes
&&... hhbc
) {
175 reduce(env
, { std::forward
<Bytecodes
>(hhbc
)... });
178 bool will_reduce(ISS
& env
) { return env
.analyzeDepth
== 0; }
180 void nothrow(ISS
& env
) {
181 FTRACE(2, " nothrow\n");
182 env
.flags
.wasPEI
= false;
185 void unreachable(ISS
& env
) {
186 FTRACE(2, " unreachable\n");
187 env
.state
.unreachable
= true;
190 void constprop(ISS
& env
) {
191 FTRACE(2, " constprop\n");
192 env
.flags
.canConstProp
= true;
195 void effect_free(ISS
& env
) {
196 FTRACE(2, " effect_free\n");
198 env
.flags
.effectFree
= true;
202 * Mark the current block as unconditionally jumping to target. The
203 * caller must arrange for env.state to reflect the state that needs
204 * to be propagated to the target, but it should not propagate that
207 void jmp_setdest(ISS
& env
, BlockId target
) {
208 env
.flags
.jmpDest
= target
;
210 void jmp_nevertaken(ISS
& env
) {
211 jmp_setdest(env
, env
.blk
.fallthrough
);
214 struct IgnoreUsedParams
{
215 explicit IgnoreUsedParams(ISS
& env
) :
216 env
{env
}, record
{env
.recordUsedParams
} {
217 env
.recordUsedParams
= false;
220 ~IgnoreUsedParams() {
221 env
.recordUsedParams
= record
;
228 void readUnknownParams(ISS
& env
) {
229 for (LocalId p
= 0; p
< env
.ctx
.func
->params
.size(); p
++) {
230 if (p
== env
.flags
.mayReadLocalSet
.size()) break;
231 env
.flags
.mayReadLocalSet
.set(p
);
233 if (env
.recordUsedParams
) env
.collect
.usedParams
.set();
236 void readUnknownLocals(ISS
& env
) {
237 env
.flags
.mayReadLocalSet
.set();
238 if (env
.recordUsedParams
) env
.collect
.usedParams
.set();
241 void readAllLocals(ISS
& env
) {
242 env
.flags
.mayReadLocalSet
.set();
243 if (env
.recordUsedParams
) env
.collect
.usedParams
.set();
246 void doRet(ISS
& env
, Type t
, bool hasEffects
) {
247 IgnoreUsedParams _
{env
};
250 assertx(env
.state
.stack
.empty());
251 env
.flags
.retParam
= NoLocalId
;
252 env
.flags
.returned
= t
;
258 void hasInvariantIterBase(ISS
& env
) {
259 env
.collect
.hasInvariantIterBase
= true;
262 //////////////////////////////////////////////////////////////////////
265 Type
popT(ISS
& env
) {
266 assertx(!env
.state
.stack
.empty());
267 auto const ret
= env
.state
.stack
.back().type
;
268 FTRACE(2, " pop: {}\n", show(ret
));
269 assertx(ret
.subtypeOf(BCell
));
270 env
.state
.stack
.pop_elem();
271 if (env
.undo
) env
.undo
->onPop(ret
);
275 Type
popC(ISS
& env
) {
276 auto const v
= popT(env
);
277 assertx(v
.subtypeOf(BInitCell
));
281 Type
popU(ISS
& env
) {
282 auto const v
= popT(env
);
283 assertx(v
.subtypeOf(BUninit
));
287 Type
popCU(ISS
& env
) {
288 auto const v
= popT(env
);
289 assertx(v
.subtypeOf(BCell
));
293 Type
popCV(ISS
& env
) { return popT(env
); }
295 void discard(ISS
& env
, int n
) {
296 for (auto i
= 0; i
< n
; ++i
) popT(env
);
299 const Type
& topT(ISS
& env
, uint32_t idx
= 0) {
300 assertx(idx
< env
.state
.stack
.size());
301 return env
.state
.stack
[env
.state
.stack
.size() - idx
- 1].type
;
304 const Type
& topC(ISS
& env
, uint32_t i
= 0) {
305 assertx(topT(env
, i
).subtypeOf(BInitCell
));
309 const Type
& topCV(ISS
& env
, uint32_t i
= 0) { return topT(env
, i
); }
311 void push(ISS
& env
, Type t
) {
312 FTRACE(2, " push: {}\n", show(t
));
313 env
.state
.stack
.push_elem(std::move(t
), NoLocalId
,
314 env
.unchangedBcs
+ env
.replacedBcs
.size());
315 if (env
.undo
) env
.undo
->onPush();
318 void push(ISS
& env
, Type t
, LocalId l
) {
319 if (l
== NoLocalId
) return push(env
, t
);
320 if (l
<= MaxLocalId
&& is_volatile_local(env
.ctx
.func
, l
)) {
323 FTRACE(2, " push: {} (={})\n", show(t
), local_string(*env
.ctx
.func
, l
));
324 env
.state
.stack
.push_elem(std::move(t
), l
,
325 env
.unchangedBcs
+ env
.replacedBcs
.size());
326 if (env
.undo
) env
.undo
->onPush();
329 //////////////////////////////////////////////////////////////////////
332 void setThisAvailable(ISS
& env
) {
333 FTRACE(2, " setThisAvailable\n");
334 if (!env
.ctx
.cls
|| is_unused_trait(*env
.ctx
.cls
) ||
335 (env
.ctx
.func
->attrs
& AttrStatic
)) {
336 return unreachable(env
);
338 if (!env
.state
.thisType
.couldBe(BObj
) ||
339 !env
.state
.thisType
.subtypeOf(BOptObj
)) {
340 return unreachable(env
);
342 if (env
.state
.thisType
.couldBe(BInitNull
)) {
343 env
.state
.thisType
= unopt(std::move(env
.state
.thisType
));
347 bool thisAvailable(ISS
& env
) {
348 assertx(!env
.state
.thisType
.subtypeOf(BBottom
));
349 return env
.state
.thisType
.subtypeOf(BObj
);
352 // Returns the type $this would have if it's not null. Generally
353 // you have to check thisAvailable() before assuming it can't be
355 folly::Optional
<Type
> thisTypeFromContext(const Index
& index
, Context ctx
) {
356 if (auto rcls
= index
.selfCls(ctx
)) return setctx(subObj(*rcls
));
360 Type
thisType(ISS
& env
) {
361 return env
.state
.thisType
;
364 Type
thisTypeNonNull(ISS
& env
) {
365 if (!env
.state
.thisType
.couldBe(TObj
)) return TBottom
;
366 if (env
.state
.thisType
.couldBe(BInitNull
)) return unopt(env
.state
.thisType
);
367 return env
.state
.thisType
;
370 folly::Optional
<Type
> selfCls(ISS
& env
) {
371 if (auto rcls
= env
.index
.selfCls(env
.ctx
)) return subCls(*rcls
);
375 folly::Optional
<Type
> selfClsExact(ISS
& env
) {
376 if (auto rcls
= env
.index
.selfCls(env
.ctx
)) return clsExact(*rcls
);
380 folly::Optional
<Type
> parentCls(ISS
& env
) {
381 if (auto rcls
= env
.index
.parentCls(env
.ctx
)) return subCls(*rcls
);
385 folly::Optional
<Type
> parentClsExact(ISS
& env
) {
386 if (auto rcls
= env
.index
.parentCls(env
.ctx
)) return clsExact(*rcls
);
390 //////////////////////////////////////////////////////////////////////
393 bool canDefinitelyCallWithoutCoeffectViolation(const php::Func
* caller
,
394 const php::Func
* callee
) {
395 if (!caller
->coeffectRules
.empty() || !callee
->coeffectRules
.empty()) {
398 // TODO(oulgen): We can actually be smarter here and actually check for
399 // bits matching but HHBBC currently does not know about the bit patterns
400 // and enforcement levels, so just check for coeffects matching identically
401 return std::is_permutation(caller
->staticCoeffects
.begin(),
402 caller
->staticCoeffects
.end(),
403 callee
->staticCoeffects
.begin(),
404 callee
->staticCoeffects
.end());
407 const StaticString
s___NEVER_INLINE("__NEVER_INLINE");
408 bool shouldAttemptToFold(ISS
& env
, const php::Func
* func
, const FCallArgs
& fca
,
409 Type context
, bool maybeDynamic
) {
413 fca
.numRets() != 1 ||
414 !options
.ConstantFoldBuiltins
||
416 any(env
.collect
.opts
& CollectionOpts::Speculating
) ||
417 any(env
.collect
.opts
& CollectionOpts::Optimizing
)) {
421 if (env
.collect
.unfoldableFuncs
.count(std::make_pair(func
, env
.bid
))) {
424 if (maybeDynamic
&& (
425 (RuntimeOption::EvalNoticeOnBuiltinDynamicCalls
&&
426 (func
->attrs
& AttrBuiltin
)) ||
427 (dyn_call_error_level(func
) > 0))) {
431 if (func
->userAttributes
.count(s___NEVER_INLINE
.get())) {
435 // Reified functions may have a mismatch of arity or reified generics
436 // so we cannot fold them
437 // TODO(T31677864): Detect the arity mismatch at HHBBC and enable them to
439 if (func
->isReified
) return false;
441 // Coeffect violation may raise warning or throw an exception
442 if (!canDefinitelyCallWithoutCoeffectViolation(env
.ctx
.func
, func
)) {
446 // We only fold functions when numRets == 1
447 if (func
->hasInOutArgs
) return false;
449 // Can't fold if we get the wrong amount of arguments
450 if (!check_nargs_in_range(func
, fca
.numArgs())) return false;
452 // Don't try to fold functions which aren't guaranteed to be accessible at
454 if (func
->attrs
& AttrPrivate
) {
455 if (env
.ctx
.cls
!= func
->cls
) return false;
456 } else if (func
->attrs
& AttrProtected
) {
457 if (!env
.ctx
.cls
) return false;
458 if (!env
.index
.must_be_derived_from(env
.ctx
.cls
, func
->cls
) &&
459 !env
.index
.must_be_derived_from(func
->cls
, env
.ctx
.cls
)) return false;
462 // Foldable builtins are always worth trying
463 if (func
->attrs
& AttrIsFoldable
) return true;
465 // Any native functions at this point are known to be
466 // non-foldable, but other builtins might be, even if they
467 // don't have the __Foldable attribute.
468 if (func
->nativeInfo
) return false;
470 if (func
->params
.size()) return true;
472 // The function has no args. Check if it's effect free and returns
474 if (env
.index
.is_effect_free(func
) &&
475 is_scalar(env
.index
.lookup_return_type_raw(func
).first
)) {
479 if (!(func
->attrs
& AttrStatic
) && func
->cls
) {
480 // May be worth trying to fold if the method returns a scalar,
481 // assuming its only "effect" is checking for existence of $this.
482 if (is_scalar(env
.index
.lookup_return_type_raw(func
).first
)) return true;
484 // The method may be foldable if we know more about $this.
485 if (is_specialized_obj(context
)) {
486 auto const dobj
= dobj_of(context
);
487 if (dobj
.type
== DObj::Exact
|| dobj
.cls
.cls() != func
->cls
) {
496 //////////////////////////////////////////////////////////////////////
499 void mayReadLocal(ISS
& env
, uint32_t id
) {
500 if (id
< env
.flags
.mayReadLocalSet
.size()) {
501 env
.flags
.mayReadLocalSet
.set(id
);
503 if (env
.recordUsedParams
&& id
< env
.collect
.usedParams
.size()) {
504 env
.collect
.usedParams
.set(id
);
508 // Find a local which is equivalent to the given local
509 LocalId
findLocEquiv(ISS
& env
, LocalId l
) {
510 if (l
>= env
.state
.equivLocals
.size()) return NoLocalId
;
511 assertx(env
.state
.equivLocals
[l
] == NoLocalId
||
512 !is_volatile_local(env
.ctx
.func
, l
));
513 return env
.state
.equivLocals
[l
];
516 // Find an equivalent local with minimum id
517 LocalId
findMinLocEquiv(ISS
& env
, LocalId l
, bool allowUninit
) {
518 if (l
>= env
.state
.equivLocals
.size() ||
519 env
.state
.equivLocals
[l
] == NoLocalId
) {
524 auto cur
= env
.state
.equivLocals
[l
];
526 if (cur
< min
&& (allowUninit
|| !peekLocCouldBeUninit(env
, cur
))) {
529 cur
= env
.state
.equivLocals
[cur
];
531 return min
!= l
? min
: NoLocalId
;
534 // Determine whether two locals are equivalent
535 bool locsAreEquiv(ISS
& env
, LocalId l1
, LocalId l2
) {
536 if (l1
>= env
.state
.equivLocals
.size() ||
537 l2
>= env
.state
.equivLocals
.size() ||
538 env
.state
.equivLocals
[l1
] == NoLocalId
||
539 env
.state
.equivLocals
[l2
] == NoLocalId
) {
544 while ((l
= env
.state
.equivLocals
[l
]) != l1
) {
545 if (l
== l2
) return true;
550 bool locIsThis(ISS
& env
, LocalId l
) {
551 assertx(l
<= MaxLocalId
);
552 return l
== env
.state
.thisLoc
||
553 (env
.state
.thisLoc
<= MaxLocalId
&&
554 locsAreEquiv(env
, l
, env
.state
.thisLoc
));
557 void killLocEquiv(State
& state
, LocalId l
) {
558 if (l
>= state
.equivLocals
.size()) return;
559 if (state
.equivLocals
[l
] == NoLocalId
) return;
562 loc
= state
.equivLocals
[loc
];
563 } while (state
.equivLocals
[loc
] != l
);
565 if (state
.equivLocals
[l
] == loc
) {
566 state
.equivLocals
[loc
] = NoLocalId
;
568 state
.equivLocals
[loc
] = state
.equivLocals
[l
];
570 state
.equivLocals
[l
] = NoLocalId
;
573 void killLocEquiv(ISS
& env
, LocalId l
) {
574 killLocEquiv(env
.state
, l
);
577 void killAllLocEquiv(ISS
& env
) {
578 env
.state
.equivLocals
.clear();
581 // Add from to to's equivalency set.
582 void addLocEquiv(ISS
& env
,
585 always_assert(peekLocRaw(env
, from
).subtypeOf(BCell
));
586 always_assert(!is_volatile_local(env
.ctx
.func
, to
));
587 always_assert(from
!= to
&& findLocEquiv(env
, from
) == NoLocalId
);
589 auto m
= std::max(to
, from
);
590 if (env
.state
.equivLocals
.size() <= m
) {
591 env
.state
.equivLocals
.resize(m
+ 1, NoLocalId
);
594 if (env
.state
.equivLocals
[to
] == NoLocalId
) {
595 env
.state
.equivLocals
[from
] = to
;
596 env
.state
.equivLocals
[to
] = from
;
598 env
.state
.equivLocals
[from
] = env
.state
.equivLocals
[to
];
599 env
.state
.equivLocals
[to
] = from
;
603 // Obtain a local which is equivalent to the given stack value
604 LocalId
topStkLocal(const State
& state
, uint32_t idx
= 0) {
605 assertx(idx
< state
.stack
.size());
606 auto const equiv
= state
.stack
[state
.stack
.size() - idx
- 1].equivLoc
;
607 return equiv
> MaxLocalId
? NoLocalId
: equiv
;
609 LocalId
topStkLocal(ISS
& env
, uint32_t idx
= 0) {
610 return topStkLocal(env
.state
, idx
);
613 // Obtain a location which is equivalent to the given stack value
614 LocalId
topStkEquiv(ISS
& env
, uint32_t idx
= 0) {
615 assertx(idx
< env
.state
.stack
.size());
616 return env
.state
.stack
[env
.state
.stack
.size() - idx
- 1].equivLoc
;
619 void setStkLocal(ISS
& env
, LocalId loc
, uint32_t idx
= 0) {
620 assertx(loc
<= MaxLocalId
);
621 always_assert(peekLocRaw(env
, loc
).subtypeOf(BCell
));
622 auto const equiv
= [&] {
624 auto const e
= topStkEquiv(env
, idx
);
625 if (e
!= StackDupId
) return e
;
630 if (equiv
<= MaxLocalId
) {
631 if (loc
== equiv
|| locsAreEquiv(env
, loc
, equiv
)) return;
632 addLocEquiv(env
, loc
, equiv
);
635 env
.state
.stack
[env
.state
.stack
.size() - idx
- 1].equivLoc
= loc
;
638 void killThisLoc(ISS
& env
, LocalId l
) {
640 env
.state
.thisLoc
== l
: env
.state
.thisLoc
!= NoLocalId
) {
641 FTRACE(2, "Killing thisLoc: {}\n", env
.state
.thisLoc
);
642 env
.state
.thisLoc
= NoLocalId
;
646 // Kill all equivalencies involving the given local to stack values
647 void killStkEquiv(ISS
& env
, LocalId l
) {
648 for (auto& e
: env
.state
.stack
) {
649 if (e
.equivLoc
!= l
) continue;
650 e
.equivLoc
= findLocEquiv(env
, l
);
651 assertx(e
.equivLoc
!= l
);
655 void killAllStkEquiv(ISS
& env
) {
656 for (auto& e
: env
.state
.stack
) {
657 if (e
.equivLoc
<= MaxLocalId
) e
.equivLoc
= NoLocalId
;
661 void killIterEquivs(ISS
& env
, LocalId l
, LocalId key
= NoLocalId
) {
662 for (auto& i
: env
.state
.iters
) {
666 [&] (LiveIter
& iter
) {
667 if (iter
.keyLocal
== l
) iter
.keyLocal
= NoLocalId
;
668 if (iter
.baseLocal
== l
) {
669 iter
.baseUpdated
= true;
670 if (key
== NoLocalId
|| key
!= iter
.keyLocal
) {
671 iter
.baseLocal
= NoLocalId
;
679 void killAllIterEquivs(ISS
& env
) {
680 for (auto& i
: env
.state
.iters
) {
684 [] (LiveIter
& iter
) {
685 iter
.baseUpdated
= true;
686 iter
.baseLocal
= NoLocalId
;
687 iter
.keyLocal
= NoLocalId
;
693 void setIterKey(ISS
& env
, IterId id
, LocalId key
) {
697 [&] (LiveIter
& iter
) { iter
.keyLocal
= key
; }
701 Type
peekLocRaw(ISS
& env
, LocalId l
) {
702 auto ret
= env
.state
.locals
[l
];
703 if (is_volatile_local(env
.ctx
.func
, l
)) {
704 always_assert_flog(ret
== TCell
, "volatile local was not TCell");
709 Type
locRaw(ISS
& env
, LocalId l
) {
710 mayReadLocal(env
, l
);
711 return peekLocRaw(env
, l
);
714 void setLocRaw(ISS
& env
, LocalId l
, Type t
) {
715 mayReadLocal(env
, l
);
716 killStkEquiv(env
, l
);
717 killLocEquiv(env
, l
);
718 killIterEquivs(env
, l
);
720 if (is_volatile_local(env
.ctx
.func
, l
)) {
721 auto current
= env
.state
.locals
[l
];
722 always_assert_flog(current
== TCell
, "volatile local was not TCell");
725 if (env
.undo
) env
.undo
->onLocalWrite(l
, std::move(env
.state
.locals
[l
]));
726 env
.state
.locals
[l
] = std::move(t
);
729 // Read a local type in the sense of CGetL. (TUninits turn into
731 Type
locAsCell(ISS
& env
, LocalId l
) {
732 return to_cell(locRaw(env
, l
));
735 bool peekLocCouldBeUninit(ISS
& env
, LocalId l
) {
736 return peekLocRaw(env
, l
).couldBe(BUninit
);
739 bool locCouldBeUninit(ISS
& env
, LocalId l
) {
740 return locRaw(env
, l
).couldBe(BUninit
);
744 * Update the known type of a local, based on assertions
745 * (VerifyParamType; or IsType/JmpCC), rather than an actual
746 * modification to the local.
748 void refineLocHelper(ISS
& env
, LocalId l
, Type t
) {
749 auto v
= peekLocRaw(env
, l
);
750 if (!is_volatile_local(env
.ctx
.func
, l
) && v
.subtypeOf(BCell
)) {
751 if (env
.undo
) env
.undo
->onLocalWrite(l
, std::move(env
.state
.locals
[l
]));
752 env
.state
.locals
[l
] = std::move(t
);
757 bool refineLocation(ISS
& env
, LocalId l
, F fun
) {
759 auto refine
= [&] (Type t
) {
760 always_assert(t
.subtypeOf(BCell
));
762 auto r2
= intersection_of(r1
, t
);
763 // In unusual edge cases (mainly intersection of two unrelated
764 // interfaces) the intersection may not be a subtype of its inputs.
765 // In that case, always choose fun's type.
766 if (r2
.subtypeOf(r1
)) {
767 if (r2
.subtypeOf(BBottom
)) ok
= false;
770 if (r1
.subtypeOf(BBottom
)) ok
= false;
773 if (l
== StackDupId
) {
774 auto stkIdx
= env
.state
.stack
.size();
777 auto& stk
= env
.state
.stack
[stkIdx
];
778 if (env
.undo
) env
.undo
->onStackWrite(stkIdx
, stk
.type
);
779 stk
.type
= refine(std::move(stk
.type
));
780 if (stk
.equivLoc
!= StackDupId
) break;
783 l
= env
.state
.stack
[stkIdx
].equivLoc
;
785 if (l
== StackThisId
) {
786 if (env
.state
.thisLoc
!= NoLocalId
) {
787 l
= env
.state
.thisLoc
;
790 if (l
> MaxLocalId
) return ok
;
791 auto fixThis
= false;
792 auto equiv
= findLocEquiv(env
, l
);
793 if (equiv
!= NoLocalId
) {
795 if (equiv
== env
.state
.thisLoc
) fixThis
= true;
796 refineLocHelper(env
, equiv
, refine(peekLocRaw(env
, equiv
)));
797 equiv
= findLocEquiv(env
, equiv
);
798 } while (equiv
!= l
);
800 if (fixThis
|| l
== env
.state
.thisLoc
) {
801 env
.state
.thisType
= refine(env
.state
.thisType
);
803 refineLocHelper(env
, l
, refine(peekLocRaw(env
, l
)));
807 template<typename PreFun
, typename PostFun
>
808 void refineLocation(ISS
& env
, LocalId l
,
809 PreFun pre
, BlockId target
, PostFun post
) {
810 auto state
= env
.state
;
811 auto const target_reachable
= refineLocation(env
, l
, pre
);
812 if (!target_reachable
) jmp_nevertaken(env
);
813 // swap, so we can restore this state if the branch is always taken.
814 env
.state
.swap(state
);
815 if (!refineLocation(env
, l
, post
)) {
816 jmp_setdest(env
, target
);
817 env
.state
.copy_from(std::move(state
));
818 } else if (target_reachable
) {
819 env
.propagate(target
, &state
);
824 * Set a local type in the sense of tvSet. If the local is boxed or
825 * not known to be not boxed, we can't change the type. May be used
826 * to set locals to types that include Uninit.
828 void setLoc(ISS
& env
, LocalId l
, Type t
, LocalId key
= NoLocalId
) {
829 killStkEquiv(env
, l
);
830 killLocEquiv(env
, l
);
831 killIterEquivs(env
, l
, key
);
833 mayReadLocal(env
, l
);
834 refineLocHelper(env
, l
, std::move(t
));
837 LocalId
findLocal(ISS
& env
, SString name
) {
838 for (auto& l
: env
.ctx
.func
->locals
) {
839 if (l
.name
->same(name
)) {
840 mayReadLocal(env
, l
.id
);
847 void killLocals(ISS
& env
) {
848 FTRACE(2, " killLocals\n");
849 readUnknownLocals(env
);
850 for (size_t l
= 0; l
< env
.state
.locals
.size(); ++l
) {
851 if (env
.undo
) env
.undo
->onLocalWrite(l
, std::move(env
.state
.locals
[l
]));
852 env
.state
.locals
[l
] = TCell
;
854 killAllLocEquiv(env
);
855 killAllStkEquiv(env
);
856 killAllIterEquivs(env
);
857 killThisLoc(env
, NoLocalId
);
860 //////////////////////////////////////////////////////////////////////
863 void setIter(ISS
& env
, IterId iter
, Iter iterState
) {
864 env
.state
.iters
[iter
] = std::move(iterState
);
866 void freeIter(ISS
& env
, IterId iter
) {
867 env
.state
.iters
[iter
] = DeadIter
{};
870 bool iterIsDead(ISS
& env
, IterId iter
) {
872 env
.state
.iters
[iter
],
873 [] (DeadIter
) { return true; },
874 [] (const LiveIter
&) { return false; }
878 //////////////////////////////////////////////////////////////////////
879 // properties on $this
882 * Note: we are only tracking control-flow insensitive types for
883 * object properties, because it can be pretty rough to try to track
884 * all cases that could re-enter the VM, run arbitrary code, and
885 * potentially change the type of a property.
887 * Because of this, the various "setter" functions for thisProps
888 * here actually just union the new type into what we already had.
891 folly::Optional
<Type
> thisPropType(ISS
& env
, SString name
) {
892 if (auto const elem
= env
.collect
.props
.readPrivateProp(name
)) {
898 bool isMaybeThisPropAttr(ISS
& env
, SString name
, Attr attr
) {
899 auto const& raw
= env
.collect
.props
.privatePropertiesRaw();
900 auto const it
= raw
.find(name
);
901 // Prop either doesn't exist, or is on an unflattened trait. Be
903 if (it
== raw
.end()) return true;
904 return it
->second
.attrs
& attr
;
907 bool isDefinitelyThisPropAttr(ISS
& env
, SString name
, Attr attr
) {
908 auto const& raw
= env
.collect
.props
.privatePropertiesRaw();
909 auto const it
= raw
.find(name
);
910 // Prop either doesn't exist, or is on an unflattened trait. Be
912 if (it
== raw
.end()) return false;
913 return it
->second
.attrs
& attr
;
916 void killThisProps(ISS
& env
) {
917 FTRACE(2, " killThisProps\n");
918 env
.collect
.props
.mergeInAllPrivateProps(env
.index
, TCell
);
922 * This function returns a type that includes all the possible types
923 * that could result from reading a property $this->name.
925 folly::Optional
<Type
> thisPropAsCell(ISS
& env
, SString name
) {
926 auto const elem
= env
.collect
.props
.readPrivateProp(name
);
927 if (!elem
) return folly::none
;
928 if (elem
->ty
.couldBe(BUninit
) && !is_specialized_obj(thisType(env
))) {
931 return to_cell(elem
->ty
);
935 * Merge a type into the tracked property types on $this, in the sense
936 * of tvSet (i.e. setting the inner type on possible refs).
938 * Note that all types we see that could go into an object property have to
939 * loosen_all. This is because the object could be serialized and then
940 * deserialized, losing the static-ness of a string or array member, and we
941 * don't guarantee deserialization would preserve a constant value object
944 void mergeThisProp(ISS
& env
, SString name
, Type type
) {
945 env
.collect
.props
.mergeInPrivateProp(
948 loosen_vec_or_dict(loosen_all(std::move(type
)))
953 * Merge something into each this prop. Usually MapFn will be a
954 * predicate that returns TBottom when some condition doesn't hold.
956 * The types given to the map function are the raw tracked types
957 * (i.e. could be TUninit).
959 template<typename MapFn
>
960 void mergeEachThisPropRaw(ISS
& env
, MapFn fn
) {
961 for (auto const& kv
: env
.collect
.props
.privatePropertiesRaw()) {
962 auto const ty
= thisPropType(env
, kv
.first
);
963 assertx(ty
.has_value());
964 mergeThisProp(env
, kv
.first
, fn(*ty
));
968 void unsetThisProp(ISS
& env
, SString name
) {
969 mergeThisProp(env
, name
, TUninit
);
972 void unsetUnknownThisProp(ISS
& env
) {
973 env
.collect
.props
.mergeInAllPrivateProps(env
.index
, TUninit
);
976 //////////////////////////////////////////////////////////////////////
977 // properties on self::
979 // Similar to $this properties above, we only track control-flow
980 // insensitive types for these.
982 void killPrivateStatics(ISS
& env
) {
983 FTRACE(2, " killPrivateStatics\n");
984 env
.collect
.props
.mergeInAllPrivateStatics(env
.index
, TInitCell
, true, false);
987 //////////////////////////////////////////////////////////////////////
990 void badPropInitialValue(ISS
& env
) {
991 FTRACE(2, " badPropInitialValue\n");
992 env
.collect
.props
.setBadPropInitialValues();
996 #pragma clang diagnostic pop
1000 //////////////////////////////////////////////////////////////////////