2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #ifndef incl_HPHP_INTERP_INTERNAL_H_
17 #define incl_HPHP_INTERP_INTERNAL_H_
21 #include <folly/Optional.h>
23 #include "hphp/runtime/base/type-string.h"
25 #include "hphp/hhbbc/class-util.h"
26 #include "hphp/hhbbc/context.h"
27 #include "hphp/hhbbc/func-util.h"
28 #include "hphp/hhbbc/interp-state.h"
29 #include "hphp/hhbbc/interp.h"
30 #include "hphp/hhbbc/representation.h"
31 #include "hphp/hhbbc/type-system.h"
33 namespace HPHP
{ namespace HHBBC
{
37 //////////////////////////////////////////////////////////////////////
41 //////////////////////////////////////////////////////////////////////
43 struct TrackedElemInfo
{
44 TrackedElemInfo(uint32_t d
, uint32_t i
) : depth
{d
}, idx
{i
} {}
45 // stack depth of the AddElem we're tracking
47 // bytecode index of the previous AddElem
52 * Interpreter Step State.
54 * This struct gives interpreter functions access to shared state. It's not in
55 * interp-state.h because it's part of the internal implementation of
56 * interpreter routines. The publicized state as results of interpretation are
57 * in that header and interp.h.
60 explicit ISS(Interp
& bag
,
61 PropagateFn propagate
)
64 , collect(bag
.collect
)
68 , propagate(propagate
)
69 , analyzeDepth(options
.StrengthReduce
? 0 : 1)
74 CollectedInfo
& collect
;
76 const php::Block
& blk
;
79 PropagateFn propagate
;
80 bool recordUsedParams
{true};
82 folly::Optional
<State
> stateBefore
;
84 // If we're inside an impl (as opposed to reduce) this will be > 0
85 uint32_t analyzeDepth
{0};
87 bool reprocess
{false};
88 // As we process the block, we keep track of the optimized bytecode
89 // stream. We expect that in steady state, there will be no changes;
90 // so as we process the block, if the initial bytecodes are the
91 // same, we just keep track of how many are the same in
92 // unchangedBcs. Once things diverge, the replacements are stored in
95 // number of unchanged bcs to take from blk.hhbcs
96 uint32_t unchangedBcs
{0};
98 BytecodeVec replacedBcs
;
99 CompactVector
<TrackedElemInfo
> trackedElems
;
102 void impl_vec(ISS
& env
, bool reduce
, BytecodeVec
&& bcs
);
103 void rewind(ISS
& env
, const Bytecode
&);
104 void rewind(ISS
& env
, int);
105 const Bytecode
* last_op(ISS
& env
, int idx
= 0);
106 const Bytecode
* op_from_slot(ISS
& env
, int, int prev
= 0);
107 ArrayData
* resolveTSStatically(ISS
& env
, SArray
, const php::Class
*, bool);
109 //////////////////////////////////////////////////////////////////////
111 namespace interp_step
{
114 * An interp_step::in(ISS&, const bc::op&) function exists for every
115 * bytecode. Most are defined in interp.cpp, but some (like FCallBuiltin and
116 * member instructions) are defined elsewhere.
118 #define O(opcode, ...) void in(ISS&, const bc::opcode&);
125 * Find a contiguous local range which is equivalent to the given range and has
126 * a smaller starting id. Only returns the equivalent first local because the
127 * size doesn't change.
129 LocalId
equivLocalRange(ISS
& env
, const LocalRange
& range
);
133 Type
peekLocRaw(ISS
& env
, LocalId l
);
134 bool peekLocCouldBeUninit(ISS
& env
, LocalId l
);
137 #pragma clang diagnostic push
138 #pragma clang diagnostic ignored "-Wunused-function"
144 * Utility for chaining one bytecode implementation to a series of a few
145 * others. Use reduce() if you also want to enable strength reduction
146 * (i.e. the bytecode can be replaced by some other bytecode as an
149 * The chained-to bytecodes should not take branches. For impl, the
150 * canConstProp flag will only be set if it was set for all the
154 template<class... Ts
>
155 void impl(ISS
& env
, Ts
&&... ts
) {
156 impl_vec(env
, false, { std::forward
<Ts
>(ts
)... });
160 * Reduce means that (given some situation in the execution state),
161 * a given bytecode could be replaced by some other bytecode
162 * sequence. Ensure that if you call reduce(), it is before any
163 * state-affecting operations (like popC()).
165 * If env.collect.propagate_constants is set, the reduced bytecodes
166 * will have been constant-propagated, and the canConstProp flag will
167 * be clear; otherwise canConstProp will be set as for impl.
169 void reduce(ISS
& env
, BytecodeVec
&& bcs
) {
170 impl_vec(env
, true, std::move(bcs
));
173 template<class... Bytecodes
>
174 void reduce(ISS
& env
, Bytecodes
&&... hhbc
) {
175 reduce(env
, { std::forward
<Bytecodes
>(hhbc
)... });
178 bool will_reduce(ISS
& env
) { return env
.analyzeDepth
== 0; }
180 void nothrow(ISS
& env
) {
181 FTRACE(2, " nothrow\n");
182 env
.flags
.wasPEI
= false;
185 void unreachable(ISS
& env
) {
186 FTRACE(2, " unreachable\n");
187 env
.state
.unreachable
= true;
190 void constprop(ISS
& env
) {
191 FTRACE(2, " constprop\n");
192 env
.flags
.canConstProp
= true;
195 void effect_free(ISS
& env
) {
196 FTRACE(2, " effect_free\n");
198 env
.flags
.effectFree
= true;
202 * Mark the current block as unconditionally jumping to target. The
203 * caller must arrange for env.state to reflect the state that needs
204 * to be propagated to the target, but it should not propagate that
207 void jmp_setdest(ISS
& env
, BlockId target
) {
208 env
.flags
.jmpDest
= target
;
210 void jmp_nevertaken(ISS
& env
) {
211 jmp_setdest(env
, env
.blk
.fallthrough
);
214 struct IgnoreUsedParams
{
215 explicit IgnoreUsedParams(ISS
& env
) :
216 env
{env
}, record
{env
.recordUsedParams
} {
217 env
.recordUsedParams
= false;
220 ~IgnoreUsedParams() {
221 env
.recordUsedParams
= record
;
228 void readUnknownParams(ISS
& env
) {
229 for (LocalId p
= 0; p
< env
.ctx
.func
->params
.size(); p
++) {
230 if (p
== env
.flags
.mayReadLocalSet
.size()) break;
231 env
.flags
.mayReadLocalSet
.set(p
);
233 if (env
.recordUsedParams
) env
.collect
.usedParams
.set();
236 void readUnknownLocals(ISS
& env
) {
237 env
.flags
.mayReadLocalSet
.set();
238 if (env
.recordUsedParams
) env
.collect
.usedParams
.set();
241 void readAllLocals(ISS
& env
) {
242 env
.flags
.mayReadLocalSet
.set();
243 if (env
.recordUsedParams
) env
.collect
.usedParams
.set();
246 void doRet(ISS
& env
, Type t
, bool hasEffects
) {
247 IgnoreUsedParams _
{env
};
250 assert(env
.state
.stack
.empty());
251 env
.flags
.retParam
= NoLocalId
;
252 env
.flags
.returned
= t
;
258 void mayUseVV(ISS
& env
) {
259 env
.collect
.mayUseVV
= true;
262 void hasInvariantIterBase(ISS
& env
) {
263 env
.collect
.hasInvariantIterBase
= true;
266 //////////////////////////////////////////////////////////////////////
269 Type
popT(ISS
& env
) {
270 assert(!env
.state
.stack
.empty());
271 auto const ret
= env
.state
.stack
.back().type
;
272 FTRACE(2, " pop: {}\n", show(ret
));
273 assert(ret
.subtypeOf(BGen
));
274 env
.state
.stack
.pop_elem();
278 Type
popC(ISS
& env
) {
279 auto const v
= popT(env
);
280 assert(v
.subtypeOf(BInitCell
));
284 Type
popV(ISS
& env
) {
285 auto const v
= popT(env
);
286 assert(v
.subtypeOf(BRef
));
290 Type
popU(ISS
& env
) {
291 auto const v
= popT(env
);
292 assert(v
.subtypeOf(BUninit
));
296 Type
popCU(ISS
& env
) {
297 auto const v
= popT(env
);
298 assert(v
.subtypeOf(BCell
));
302 Type
popCV(ISS
& env
) { return popT(env
); }
304 void discard(ISS
& env
, int n
) {
305 for (auto i
= 0; i
< n
; ++i
) {
310 const Type
& topT(ISS
& env
, uint32_t idx
= 0) {
311 assert(idx
< env
.state
.stack
.size());
312 return env
.state
.stack
[env
.state
.stack
.size() - idx
- 1].type
;
315 const Type
& topC(ISS
& env
, uint32_t i
= 0) {
316 assert(topT(env
, i
).subtypeOf(BInitCell
));
320 const Type
& topCV(ISS
& env
, uint32_t i
= 0) { return topT(env
, i
); }
322 const Type
& topV(ISS
& env
, uint32_t i
= 0) {
323 assert(topT(env
, i
).subtypeOf(BRef
));
327 void push(ISS
& env
, Type t
) {
328 FTRACE(2, " push: {}\n", show(t
));
329 env
.state
.stack
.push_elem(std::move(t
), NoLocalId
,
330 env
.unchangedBcs
+ env
.replacedBcs
.size());
333 void push(ISS
& env
, Type t
, LocalId l
) {
334 if (l
== NoLocalId
) return push(env
, t
);
335 if (l
<= MaxLocalId
) {
336 if (peekLocRaw(env
, l
).couldBe(BRef
)) {
339 assertx(!is_volatile_local(env
.ctx
.func
, l
)); // volatiles are TGen
341 FTRACE(2, " push: {} (={})\n", show(t
), local_string(*env
.ctx
.func
, l
));
342 env
.state
.stack
.push_elem(std::move(t
), l
,
343 env
.unchangedBcs
+ env
.replacedBcs
.size());
346 void discardAR(ISS
& env
, uint32_t idx
) {
347 assert(topT(env
, idx
).subtypeOf(BUninit
));
348 assert(topT(env
, idx
+ 1).subtypeOf(BUninit
));
349 assert(topT(env
, idx
+ 2).subtypeOf(BCell
));
350 auto& stack
= env
.state
.stack
;
351 stack
.erase(stack
.end() - idx
- 3, stack
.end() - idx
);
352 if (idx
&& stack
[stack
.size() - idx
].equivLoc
== StackDupId
) {
353 stack
[stack
.size() - idx
].equivLoc
= NoLocalId
;
357 //////////////////////////////////////////////////////////////////////
360 void setThisAvailable(ISS
& env
) {
361 FTRACE(2, " setThisAvailable\n");
363 is_unused_trait(*env
.ctx
.cls
) || (env
.ctx
.func
->attrs
& AttrStatic
) :
364 !is_pseudomain(env
.ctx
.func
)) {
365 return unreachable(env
);
367 if (!env
.state
.thisType
.couldBe(BObj
) ||
368 !env
.state
.thisType
.subtypeOf(BOptObj
)) {
369 return unreachable(env
);
371 if (is_opt(env
.state
.thisType
)) {
372 env
.state
.thisType
= unopt(env
.state
.thisType
);
376 bool thisAvailable(ISS
& env
) {
377 assertx(!env
.state
.thisType
.subtypeOf(BBottom
));
378 return env
.state
.thisType
.subtypeOf(BObj
);
381 // Returns the type $this would have if it's not null. Generally
382 // you have to check thisAvailable() before assuming it can't be
384 folly::Optional
<Type
> thisTypeFromContext(const Index
& index
, Context ctx
) {
385 if (auto rcls
= index
.selfCls(ctx
)) return setctx(subObj(*rcls
));
389 Type
thisType(ISS
& env
) {
390 return env
.state
.thisType
;
393 Type
thisTypeNonNull(ISS
& env
) {
394 if (!env
.state
.thisType
.couldBe(TObj
)) return TBottom
;
395 if (is_opt(env
.state
.thisType
)) return unopt(env
.state
.thisType
);
396 return env
.state
.thisType
;
399 folly::Optional
<Type
> selfCls(ISS
& env
) {
400 if (auto rcls
= env
.index
.selfCls(env
.ctx
)) return subCls(*rcls
);
404 folly::Optional
<Type
> selfClsExact(ISS
& env
) {
405 if (auto rcls
= env
.index
.selfCls(env
.ctx
)) return clsExact(*rcls
);
409 folly::Optional
<Type
> parentCls(ISS
& env
) {
410 if (auto rcls
= env
.index
.parentCls(env
.ctx
)) return subCls(*rcls
);
414 folly::Optional
<Type
> parentClsExact(ISS
& env
) {
415 if (auto rcls
= env
.index
.parentCls(env
.ctx
)) return clsExact(*rcls
);
419 //////////////////////////////////////////////////////////////////////
422 bool canFold(ISS
& env
, const res::Func
& rfunc
, int32_t nArgs
,
423 Type context
, bool maybeDynamic
) {
424 auto const func
= rfunc
.exactFunc();
425 if (!func
) return false;
426 if (maybeDynamic
&& (
427 (RuntimeOption::EvalNoticeOnBuiltinDynamicCalls
&&
428 (func
->attrs
& AttrBuiltin
)) ||
429 (RuntimeOption::EvalForbidDynamicCalls
> 0 &&
430 !(func
->attrs
& AttrDynamicallyCallable
)))) {
434 // Reified functions may have a mismatch of arity or reified generics
435 // so we cannot fold them
436 // TODO(T31677864): Detect the arity mismatch at HHBBC and enable them to
438 if (func
->isReified
) return false;
439 if (func
->attrs
& AttrTakesInOutParams
) return false;
441 // Foldable builtins are always worth trying
442 if (func
->attrs
& AttrIsFoldable
) return true;
444 // Any native functions at this point are known to be
445 // non-foldable, but other builtins might be, even if they
446 // don't have the __Foldable attribute.
447 if (func
->nativeInfo
) return false;
449 // Don't try to fold functions which aren't guaranteed to be accessible at
451 if (func
->attrs
& AttrPrivate
) {
452 if (env
.ctx
.cls
!= func
->cls
) return false;
453 } else if (func
->attrs
& AttrProtected
) {
454 if (!env
.ctx
.cls
) return false;
455 if (!env
.index
.must_be_derived_from(env
.ctx
.cls
, func
->cls
) &&
456 !env
.index
.must_be_derived_from(func
->cls
, env
.ctx
.cls
)) return false;
459 if (func
->params
.size()) {
460 // Not worth trying if we're going to warn due to missing args
461 return check_nargs_in_range(func
, nArgs
);
464 // The function has no args. Check if it's effect free and returns
466 if (env
.index
.is_effect_free(rfunc
) &&
467 is_scalar(env
.index
.lookup_return_type_raw(func
))) {
471 if (!(func
->attrs
& AttrStatic
) && func
->cls
) {
472 // May be worth trying to fold if the method returns a scalar,
473 // assuming its only "effect" is checking for existence of $this.
474 if (is_scalar(env
.index
.lookup_return_type_raw(func
))) return true;
476 // The method may be foldable if we know more about $this.
477 if (is_specialized_obj(context
)) {
478 auto const dobj
= dobj_of(context
);
479 if (dobj
.type
== DObj::Exact
|| dobj
.cls
.cls() != func
->cls
) {
491 * nArgs should either be the number of parameters that will be passed
492 * to the call, or -1 for unknown. We only need the number of args
493 * when we know the exact function being called, in order to determine
494 * eligibility for folding.
496 * returns the foldable flag as a convenience.
498 bool fpiPush(ISS
& env
, ActRec ar
, int32_t nArgs
, bool maybeDynamic
) {
499 auto foldable
= [&] {
500 if (!options
.ConstantFoldBuiltins
||
502 any(env
.collect
.opts
& CollectionOpts::Speculating
) ||
503 (!env
.collect
.propagate_constants
&&
504 any(env
.collect
.opts
& CollectionOpts::Optimizing
))) {
508 ar
.kind
== FPIKind::Ctor
||
509 ar
.kind
== FPIKind::Builtin
||
513 if (!canFold(env
, *ar
.func
, nArgs
, ar
.context
, maybeDynamic
)) return false;
515 auto const func
= ar
.func
->exactFunc();
516 if (env
.collect
.unfoldableFuncs
.count(std::make_pair(func
, env
.bid
))) {
522 ar
.foldable
= foldable
;
523 ar
.pushBlk
= env
.bid
;
524 FTRACE(2, " fpi+: {} {}\n", env
.state
.fpiStack
.size(), show(ar
));
525 env
.state
.fpiStack
.push_back(std::move(ar
));
529 void fpiPushNoFold(ISS
& env
, ActRec ar
) {
531 ar
.pushBlk
= env
.bid
;
533 FTRACE(2, " fpi+: {} {}\n", env
.state
.fpiStack
.size(), show(ar
));
534 env
.state
.fpiStack
.push_back(std::move(ar
));
537 ActRec
fpiPop(ISS
& env
) {
538 assert(!env
.state
.fpiStack
.empty());
539 auto const ret
= env
.state
.fpiStack
.back();
540 FTRACE(2, " fpi-: {} {}\n", env
.state
.fpiStack
.size() - 1, show(ret
));
541 env
.state
.fpiStack
.pop_back();
545 ActRec
& fpiTop(ISS
& env
) {
546 assert(!env
.state
.fpiStack
.empty());
547 return env
.state
.fpiStack
.back();
550 void unfoldable(ISS
& env
, ActRec
& ar
) {
551 env
.propagate(ar
.pushBlk
, nullptr);
553 // we're going to reprocess the whole fpi region; any results we've
554 // got so far are bogus, so stop prevent further useless work by
555 // marking the next bytecode unreachable
557 // This also means we shouldn't reprocess any changes to the
558 // bytecode, since we're pretending the block ends here, and we may
559 // have already thrown away the FPush.
560 env
.reprocess
= false;
561 FTRACE(2, " fpi: not foldable\n");
564 void fpiNotFoldable(ISS
& env
) {
565 // By the time we're optimizing, we should know up front which funcs
566 // are foldable (the analyze phase iterates to convergence, the
567 // optimize phase does not - so its too late to fix now).
568 assertx(!any(env
.collect
.opts
& CollectionOpts::Optimizing
));
570 auto& ar
= fpiTop(env
);
571 assertx(ar
.func
&& ar
.foldable
);
572 auto const func
= ar
.func
->exactFunc();
574 env
.collect
.unfoldableFuncs
.emplace(func
, ar
.pushBlk
);
578 //////////////////////////////////////////////////////////////////////
581 void mayReadLocal(ISS
& env
, uint32_t id
) {
582 if (id
< env
.flags
.mayReadLocalSet
.size()) {
583 env
.flags
.mayReadLocalSet
.set(id
);
585 if (env
.recordUsedParams
&& id
< env
.collect
.usedParams
.size()) {
586 env
.collect
.usedParams
.set(id
);
590 // Find a local which is equivalent to the given local
591 LocalId
findLocEquiv(ISS
& env
, LocalId l
) {
592 if (l
>= env
.state
.equivLocals
.size()) return NoLocalId
;
593 assert(env
.state
.equivLocals
[l
] == NoLocalId
||
594 !is_volatile_local(env
.ctx
.func
, l
));
595 return env
.state
.equivLocals
[l
];
598 // Find an equivalent local with minimum id
599 LocalId
findMinLocEquiv(ISS
& env
, LocalId l
, bool allowUninit
) {
600 if (l
>= env
.state
.equivLocals
.size() ||
601 env
.state
.equivLocals
[l
] == NoLocalId
) {
606 auto cur
= env
.state
.equivLocals
[l
];
608 if (cur
< min
&& (allowUninit
|| !peekLocCouldBeUninit(env
, cur
))) {
611 cur
= env
.state
.equivLocals
[cur
];
613 return min
!= l
? min
: NoLocalId
;
616 // Determine whether two locals are equivalent
617 bool locsAreEquiv(ISS
& env
, LocalId l1
, LocalId l2
) {
618 if (l1
>= env
.state
.equivLocals
.size() ||
619 l2
>= env
.state
.equivLocals
.size() ||
620 env
.state
.equivLocals
[l1
] == NoLocalId
||
621 env
.state
.equivLocals
[l2
] == NoLocalId
) {
626 while ((l
= env
.state
.equivLocals
[l
]) != l1
) {
627 if (l
== l2
) return true;
632 bool locIsThis(ISS
& env
, LocalId l
) {
633 assertx(l
<= MaxLocalId
);
634 return l
== env
.state
.thisLoc
||
635 (env
.state
.thisLoc
<= MaxLocalId
&&
636 locsAreEquiv(env
, l
, env
.state
.thisLoc
));
639 void killLocEquiv(State
& state
, LocalId l
) {
640 if (l
>= state
.equivLocals
.size()) return;
641 if (state
.equivLocals
[l
] == NoLocalId
) return;
644 loc
= state
.equivLocals
[loc
];
645 } while (state
.equivLocals
[loc
] != l
);
647 if (state
.equivLocals
[l
] == loc
) {
648 state
.equivLocals
[loc
] = NoLocalId
;
650 state
.equivLocals
[loc
] = state
.equivLocals
[l
];
652 state
.equivLocals
[l
] = NoLocalId
;
655 void killLocEquiv(ISS
& env
, LocalId l
) {
656 killLocEquiv(env
.state
, l
);
659 void killAllLocEquiv(ISS
& env
) {
660 env
.state
.equivLocals
.clear();
663 // Add from to to's equivalency set.
664 void addLocEquiv(ISS
& env
,
667 always_assert(peekLocRaw(env
, from
).subtypeOf(BCell
));
668 always_assert(!is_volatile_local(env
.ctx
.func
, to
));
669 always_assert(from
!= to
&& findLocEquiv(env
, from
) == NoLocalId
);
671 auto m
= std::max(to
, from
);
672 if (env
.state
.equivLocals
.size() <= m
) {
673 env
.state
.equivLocals
.resize(m
+ 1, NoLocalId
);
676 if (env
.state
.equivLocals
[to
] == NoLocalId
) {
677 env
.state
.equivLocals
[from
] = to
;
678 env
.state
.equivLocals
[to
] = from
;
680 env
.state
.equivLocals
[from
] = env
.state
.equivLocals
[to
];
681 env
.state
.equivLocals
[to
] = from
;
685 // Obtain a local which is equivalent to the given stack value
686 LocalId
topStkLocal(const State
& state
, uint32_t idx
= 0) {
687 assert(idx
< state
.stack
.size());
688 auto const equiv
= state
.stack
[state
.stack
.size() - idx
- 1].equivLoc
;
689 return equiv
> MaxLocalId
? NoLocalId
: equiv
;
691 LocalId
topStkLocal(ISS
& env
, uint32_t idx
= 0) {
692 return topStkLocal(env
.state
, idx
);
695 // Obtain a location which is equivalent to the given stack value
696 LocalId
topStkEquiv(ISS
& env
, uint32_t idx
= 0) {
697 assert(idx
< env
.state
.stack
.size());
698 return env
.state
.stack
[env
.state
.stack
.size() - idx
- 1].equivLoc
;
701 void setStkLocal(ISS
& env
, LocalId loc
, uint32_t idx
= 0) {
702 assertx(loc
<= MaxLocalId
);
703 always_assert(peekLocRaw(env
, loc
).subtypeOf(BCell
));
704 auto const equiv
= [&] {
706 auto const e
= topStkEquiv(env
, idx
);
707 if (e
!= StackDupId
) return e
;
712 if (equiv
<= MaxLocalId
) {
713 if (loc
== equiv
|| locsAreEquiv(env
, loc
, equiv
)) return;
714 addLocEquiv(env
, loc
, equiv
);
717 env
.state
.stack
[env
.state
.stack
.size() - idx
- 1].equivLoc
= loc
;
720 void killThisLoc(ISS
& env
, LocalId l
) {
722 env
.state
.thisLoc
== l
: env
.state
.thisLoc
!= NoLocalId
) {
723 FTRACE(2, "Killing thisLoc: {}\n", env
.state
.thisLoc
);
724 env
.state
.thisLoc
= NoLocalId
;
728 // Kill all equivalencies involving the given local to stack values
729 void killStkEquiv(ISS
& env
, LocalId l
) {
730 for (auto& e
: env
.state
.stack
) {
731 if (e
.equivLoc
!= l
) continue;
732 e
.equivLoc
= findLocEquiv(env
, l
);
733 assertx(e
.equivLoc
!= l
);
737 void killAllStkEquiv(ISS
& env
) {
738 for (auto& e
: env
.state
.stack
) {
739 if (e
.equivLoc
<= MaxLocalId
) e
.equivLoc
= NoLocalId
;
743 void killIterEquivs(ISS
& env
, LocalId l
, LocalId key
= NoLocalId
) {
744 for (auto& i
: env
.state
.iters
) {
748 [&] (LiveIter
& iter
) {
749 if (iter
.keyLocal
== l
) iter
.keyLocal
= NoLocalId
;
750 if (iter
.baseLocal
== l
) {
751 if (key
== NoLocalId
|| key
!= iter
.keyLocal
) {
752 iter
.baseLocal
= NoLocalId
;
760 void killAllIterEquivs(ISS
& env
) {
761 for (auto& i
: env
.state
.iters
) {
765 [] (LiveIter
& iter
) {
766 iter
.baseLocal
= NoLocalId
;
767 iter
.keyLocal
= NoLocalId
;
773 void setIterKey(ISS
& env
, IterId id
, LocalId key
) {
777 [&] (LiveIter
& iter
) { iter
.keyLocal
= key
; }
781 Type
peekLocRaw(ISS
& env
, LocalId l
) {
782 auto ret
= env
.state
.locals
[l
];
783 if (is_volatile_local(env
.ctx
.func
, l
)) {
784 always_assert_flog(ret
== TGen
, "volatile local was not TGen");
789 Type
locRaw(ISS
& env
, LocalId l
) {
790 mayReadLocal(env
, l
);
791 return peekLocRaw(env
, l
);
794 void setLocRaw(ISS
& env
, LocalId l
, Type t
) {
795 mayReadLocal(env
, l
);
796 killStkEquiv(env
, l
);
797 killLocEquiv(env
, l
);
798 killIterEquivs(env
, l
);
800 if (is_volatile_local(env
.ctx
.func
, l
)) {
801 auto current
= env
.state
.locals
[l
];
802 always_assert_flog(current
== TGen
, "volatile local was not TGen");
805 env
.state
.locals
[l
] = std::move(t
);
808 // Read a local type in the sense of CGetL. (TUninits turn into
809 // TInitNull, and potentially reffy types return the "inner" type,
810 // which is always a subtype of InitCell.)
811 Type
locAsCell(ISS
& env
, LocalId l
) {
812 return to_cell(locRaw(env
, l
));
815 // Read a local type, dereferencing refs, but without converting
816 // potential TUninits to TInitNull.
817 Type
derefLoc(ISS
& env
, LocalId l
) {
818 auto v
= locRaw(env
, l
);
819 if (v
.subtypeOf(BCell
)) return v
;
820 return v
.couldBe(BUninit
) ? TCell
: TInitCell
;
823 bool peekLocCouldBeUninit(ISS
& env
, LocalId l
) {
824 return peekLocRaw(env
, l
).couldBe(BUninit
);
827 bool locCouldBeUninit(ISS
& env
, LocalId l
) {
828 return locRaw(env
, l
).couldBe(BUninit
);
831 bool locCouldBeRef(ISS
& env
, LocalId l
) {
832 return locRaw(env
, l
).couldBe(BRef
);
836 * Update the known type of a local, based on assertions
837 * (VerifyParamType; or IsType/JmpCC), rather than an actual
838 * modification to the local.
840 void refineLocHelper(ISS
& env
, LocalId l
, Type t
) {
841 auto v
= peekLocRaw(env
, l
);
842 if (v
.subtypeOf(BCell
)) env
.state
.locals
[l
] = std::move(t
);
846 bool refineLocation(ISS
& env
, LocalId l
, F fun
) {
848 auto refine
= [&] (Type t
) {
849 always_assert(t
.subtypeOf(BCell
));
851 auto r2
= intersection_of(r1
, t
);
852 // In unusual edge cases (mainly intersection of two unrelated
853 // interfaces) the intersection may not be a subtype of its inputs.
854 // In that case, always choose fun's type.
855 if (r2
.subtypeOf(r1
)) {
856 if (r2
.subtypeOf(BBottom
)) ok
= false;
859 if (r1
.subtypeOf(BBottom
)) ok
= false;
862 if (l
== StackDupId
) {
863 auto stk
= env
.state
.stack
.end();
866 stk
->type
= refine(std::move(stk
->type
));
867 if (stk
->equivLoc
!= StackDupId
) break;
868 assertx(stk
!= env
.state
.stack
.begin());
872 if (l
== StackThisId
) {
873 if (env
.state
.thisLoc
!= NoLocalId
) {
874 l
= env
.state
.thisLoc
;
877 if (l
> MaxLocalId
) return ok
;
878 auto fixThis
= false;
879 auto equiv
= findLocEquiv(env
, l
);
880 if (equiv
!= NoLocalId
) {
882 if (equiv
== env
.state
.thisLoc
) fixThis
= true;
883 refineLocHelper(env
, equiv
, refine(peekLocRaw(env
, equiv
)));
884 equiv
= findLocEquiv(env
, equiv
);
885 } while (equiv
!= l
);
887 if (fixThis
|| l
== env
.state
.thisLoc
) {
888 env
.state
.thisType
= refine(env
.state
.thisType
);
890 refineLocHelper(env
, l
, refine(peekLocRaw(env
, l
)));
894 template<typename PreFun
, typename PostFun
>
895 void refineLocation(ISS
& env
, LocalId l
,
896 PreFun pre
, BlockId target
, PostFun post
) {
897 auto state
= env
.state
;
898 auto const target_reachable
= refineLocation(env
, l
, pre
);
899 if (!target_reachable
) jmp_nevertaken(env
);
900 // swap, so we can restore this state if the branch is always taken.
901 env
.state
.swap(state
);
902 if (!refineLocation(env
, l
, post
)) {
903 jmp_setdest(env
, target
);
904 env
.state
.copy_from(std::move(state
));
905 } else if (target_reachable
) {
906 env
.propagate(target
, &state
);
911 * Set a local type in the sense of tvSet. If the local is boxed or
912 * not known to be not boxed, we can't change the type. May be used
913 * to set locals to types that include Uninit.
915 void setLoc(ISS
& env
, LocalId l
, Type t
, LocalId key
= NoLocalId
) {
916 killStkEquiv(env
, l
);
917 killLocEquiv(env
, l
);
918 killIterEquivs(env
, l
, key
);
920 mayReadLocal(env
, l
);
921 refineLocHelper(env
, l
, std::move(t
));
924 LocalId
findLocal(ISS
& env
, SString name
) {
925 for (auto& l
: env
.ctx
.func
->locals
) {
926 if (l
.name
->same(name
)) {
927 mayReadLocal(env
, l
.id
);
934 // Force non-ref locals to TCell. Used when something modifies an
935 // unknown local's value, without changing reffiness.
936 void loseNonRefLocalTypes(ISS
& env
) {
937 readUnknownLocals(env
);
938 FTRACE(2, " loseNonRefLocalTypes\n");
939 for (auto& l
: env
.state
.locals
) {
940 if (l
.subtypeOf(BCell
)) l
= TCell
;
942 killAllLocEquiv(env
);
943 killAllStkEquiv(env
);
944 killAllIterEquivs(env
);
945 killThisLoc(env
, NoLocalId
);
948 void killLocals(ISS
& env
) {
949 FTRACE(2, " killLocals\n");
950 readUnknownLocals(env
);
951 for (auto& l
: env
.state
.locals
) l
= TGen
;
952 killAllLocEquiv(env
);
953 killAllStkEquiv(env
);
954 killAllIterEquivs(env
);
955 killThisLoc(env
, NoLocalId
);
958 //////////////////////////////////////////////////////////////////////
961 // Read the specified class-ref slot without discarding the stored value.
962 const Type
& peekClsRefSlot(ISS
& env
, ClsRefSlotId slot
) {
963 assert(slot
!= NoClsRefSlotId
);
964 always_assert_flog(env
.state
.clsRefSlots
[slot
].subtypeOf(BCls
),
965 "class-ref slot contained non-TCls");
966 return env
.state
.clsRefSlots
[slot
];
969 // Read the specified class-ref slot and discard the stored value.
970 Type
takeClsRefSlot(ISS
& env
, ClsRefSlotId slot
) {
971 assert(slot
!= NoClsRefSlotId
);
972 auto ret
= std::move(env
.state
.clsRefSlots
[slot
]);
973 FTRACE(2, " read class-ref: {} -> {}\n", slot
, show(ret
));
974 always_assert_flog(ret
.subtypeOf(BCls
), "class-ref slot contained non-TCls");
975 env
.state
.clsRefSlots
[slot
] = TCls
;
979 void putClsRefSlot(ISS
& env
, ClsRefSlotId slot
, Type ty
) {
980 assert(slot
!= NoClsRefSlotId
);
981 always_assert_flog(ty
.subtypeOf(BCls
),
982 "attempted to set class-ref slot to non-TCls");
983 FTRACE(2, " write class-ref: {} -> {}\n", slot
, show(ty
));
984 env
.state
.clsRefSlots
[slot
] = std::move(ty
);
987 //////////////////////////////////////////////////////////////////////
990 void setIter(ISS
& env
, IterId iter
, Iter iterState
) {
991 env
.state
.iters
[iter
] = std::move(iterState
);
993 void freeIter(ISS
& env
, IterId iter
) {
994 env
.state
.iters
[iter
] = DeadIter
{};
997 bool iterIsDead(ISS
& env
, IterId iter
) {
999 env
.state
.iters
[iter
],
1000 [] (DeadIter
) { return true; },
1001 [] (const LiveIter
&) { return false; }
1005 //////////////////////////////////////////////////////////////////////
1006 // properties on $this
1009 * Note: we are only tracking control-flow insensitive types for
1010 * object properties, because it can be pretty rough to try to track
1011 * all cases that could re-enter the VM, run arbitrary code, and
1012 * potentially change the type of a property.
1014 * Because of this, the various "setter" functions for thisProps
1015 * here actually just union the new type into what we already had.
1018 PropStateElem
<>* thisPropRaw(ISS
& env
, SString name
) {
1019 auto& privateProperties
= env
.collect
.props
.privateProperties();
1020 auto const it
= privateProperties
.find(name
);
1021 if (it
!= end(privateProperties
)) {
1027 bool isTrackedThisProp(ISS
& env
, SString name
) {
1028 return thisPropRaw(env
, name
);
1031 bool isMaybeLateInitThisProp(ISS
& env
, SString name
) {
1032 if (!env
.ctx
.cls
) return false;
1033 for (auto const& prop
: env
.ctx
.cls
->properties
) {
1034 if (prop
.name
== name
&&
1035 (prop
.attrs
& AttrPrivate
) &&
1036 !(prop
.attrs
& AttrStatic
)
1038 return prop
.attrs
& AttrLateInit
;
1041 // Prop either doesn't exist, or is on an unflattened trait. Be conservative.
1045 void killThisProps(ISS
& env
) {
1046 FTRACE(2, " killThisProps\n");
1047 for (auto& kv
: env
.collect
.props
.privateProperties()) {
1049 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, kv
.second
.tc
, TGen
);
1054 * This function returns a type that includes all the possible types
1055 * that could result from reading a property $this->name.
1057 * Note that this may include types that the property itself cannot
1058 * actually contain, due to the effects of a possible __get function.
1060 folly::Optional
<Type
> thisPropAsCell(ISS
& env
, SString name
) {
1061 auto const elem
= thisPropRaw(env
, name
);
1062 if (!elem
) return folly::none
;
1063 if (elem
->ty
.couldBe(BUninit
)) {
1064 auto const rthis
= thisType(env
);
1065 if (!is_specialized_obj(rthis
) || dobj_of(rthis
).cls
.couldHaveMagicGet()) {
1069 return to_cell(elem
->ty
);
1073 * Merge a type into the tracked property types on $this, in the sense
1074 * of tvSet (i.e. setting the inner type on possible refs).
1076 * Note that all types we see that could go into an object property have to
1077 * loosen_all. This is because the object could be serialized and then
1078 * deserialized, losing the static-ness of a string or array member, and we
1079 * don't guarantee deserialization would preserve a constant value object
1082 void mergeThisProp(ISS
& env
, SString name
, Type type
) {
1083 auto const elem
= thisPropRaw(env
, name
);
1085 auto const adjusted
=
1086 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, elem
->tc
, loosen_all(type
));
1087 elem
->ty
|= adjusted
;
1091 * Merge something into each this prop. Usually MapFn will be a
1092 * predicate that returns TBottom when some condition doesn't hold.
1094 * The types given to the map function are the raw tracked types
1095 * (i.e. could be TRef or TUninit).
1097 template<class MapFn
>
1098 void mergeEachThisPropRaw(ISS
& env
, MapFn fn
) {
1099 for (auto& kv
: env
.collect
.props
.privateProperties()) {
1100 mergeThisProp(env
, kv
.first
, fn(kv
.second
.ty
));
1104 void unsetThisProp(ISS
& env
, SString name
) {
1105 mergeThisProp(env
, name
, TUninit
);
1108 void unsetUnknownThisProp(ISS
& env
) {
1109 for (auto& kv
: env
.collect
.props
.privateProperties()) {
1110 mergeThisProp(env
, kv
.first
, TUninit
);
1114 void boxThisProp(ISS
& env
, SString name
) {
1115 auto const elem
= thisPropRaw(env
, name
);
1118 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, elem
->tc
, TRef
);
1122 * Forces non-ref property types up to TCell. This is used when an
1123 * operation affects an unknown property on $this, but can't change
1124 * its reffiness. This could only do TInitCell, but we're just
1125 * going to gradually get rid of the callsites of this.
1127 void loseNonRefThisPropTypes(ISS
& env
) {
1128 FTRACE(2, " loseNonRefThisPropTypes\n");
1129 for (auto& kv
: env
.collect
.props
.privateProperties()) {
1130 if (kv
.second
.ty
.subtypeOf(BCell
)) {
1132 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, kv
.second
.tc
, TCell
);
1137 //////////////////////////////////////////////////////////////////////
1138 // properties on self::
1140 // Similar to $this properties above, we only track control-flow
1141 // insensitive types for these.
1143 PropStateElem
<>* selfPropRaw(ISS
& env
, SString name
) {
1144 auto& privateStatics
= env
.collect
.props
.privateStatics();
1145 auto it
= privateStatics
.find(name
);
1146 if (it
!= end(privateStatics
)) {
1152 void killSelfProps(ISS
& env
) {
1153 FTRACE(2, " killSelfProps\n");
1154 for (auto& kv
: env
.collect
.props
.privateStatics()) {
1156 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, kv
.second
.tc
, TGen
);
1160 void killSelfProp(ISS
& env
, SString name
) {
1161 FTRACE(2, " killSelfProp {}\n", name
->data());
1162 if (auto elem
= selfPropRaw(env
, name
)) {
1163 elem
->ty
|= adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, elem
->tc
, TGen
);
1167 // TODO(#3684136): self::$foo can't actually ever be uninit. Right
1168 // now uninits may find their way into here though.
1169 folly::Optional
<Type
> selfPropAsCell(ISS
& env
, SString name
) {
1170 auto const elem
= selfPropRaw(env
, name
);
1171 if (!elem
) return folly::none
;
1172 return to_cell(elem
->ty
);
1176 * Merges a type into tracked static properties on self, in the
1177 * sense of tvSet (i.e. setting the inner type on possible refs).
1179 void mergeSelfProp(ISS
& env
, SString name
, Type type
) {
1180 auto const elem
= selfPropRaw(env
, name
);
1182 // Context types might escape to other contexts here.
1183 auto const adjusted
=
1184 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, elem
->tc
, unctx(type
));
1185 elem
->ty
|= adjusted
;
1189 * Similar to mergeEachThisPropRaw, but for self props.
1191 template<class MapFn
>
1192 void mergeEachSelfPropRaw(ISS
& env
, MapFn fn
) {
1193 for (auto& kv
: env
.collect
.props
.privateStatics()) {
1194 mergeSelfProp(env
, kv
.first
, fn(kv
.second
.ty
));
1198 void boxSelfProp(ISS
& env
, SString name
) {
1199 mergeSelfProp(env
, name
, TRef
);
1203 * Forces non-ref static properties up to TCell. This is used when
1204 * an operation affects an unknown static property on self::, but
1205 * can't change its reffiness.
1207 * This could only do TInitCell because static properties can never
1208 * be unset. We're just going to get rid of the callers of this
1209 * function over a few more changes, though.
1211 void loseNonRefSelfPropTypes(ISS
& env
) {
1212 FTRACE(2, " loseNonRefSelfPropTypes\n");
1213 for (auto& kv
: env
.collect
.props
.privateStatics()) {
1214 if (kv
.second
.ty
.subtypeOf(BInitCell
)) {
1216 adjust_type_for_prop(env
.index
, *env
.ctx
.cls
, kv
.second
.tc
, TCell
);
1221 bool isMaybeLateInitSelfProp(ISS
& env
, SString name
) {
1222 if (!env
.ctx
.cls
) return false;
1223 for (auto const& prop
: env
.ctx
.cls
->properties
) {
1224 if (prop
.name
== name
&&
1225 (prop
.attrs
& AttrPrivate
) &&
1226 (prop
.attrs
& AttrStatic
)
1228 return prop
.attrs
& AttrLateInit
;
1231 // Prop either doesn't exist, or is on an unflattened trait. Be conservative.
1235 //////////////////////////////////////////////////////////////////////
1239 * Check whether the class given by the type might raise when initialized.
1241 bool classInitMightRaise(ISS
& env
, const Type
& cls
) {
1242 if (RuntimeOption::EvalCheckPropTypeHints
<= 0) return false;
1243 if (!is_specialized_cls(cls
)) return true;
1244 auto const dcls
= dcls_of(cls
);
1245 if (dcls
.type
!= DCls::Exact
) return true;
1246 return env
.index
.lookup_class_init_might_raise(env
.ctx
, dcls
.cls
);
1249 void badPropInitialValue(ISS
& env
) {
1250 FTRACE(2, " badPropInitialValue\n");
1251 env
.collect
.props
.setBadPropInitialValues();
1255 #pragma clang diagnostic pop
1259 //////////////////////////////////////////////////////////////////////