2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/analyze.h"
26 #include "hphp/util/configs/eval.h"
27 #include "hphp/util/dataflow-worklist.h"
28 #include "hphp/util/trace.h"
30 #include "hphp/hhbbc/interp-state.h"
31 #include "hphp/hhbbc/interp.h"
32 #include "hphp/hhbbc/index.h"
33 #include "hphp/hhbbc/options.h"
34 #include "hphp/hhbbc/representation.h"
35 #include "hphp/hhbbc/cfg.h"
36 #include "hphp/hhbbc/unit-util.h"
37 #include "hphp/hhbbc/cfg-opts.h"
38 #include "hphp/hhbbc/class-util.h"
39 #include "hphp/hhbbc/func-util.h"
40 #include "hphp/hhbbc/options-util.h"
42 #include "hphp/runtime/vm/reified-generics.h"
44 namespace HPHP::HHBBC
{
52 const CompactVector
<Type
>& args
;
55 //////////////////////////////////////////////////////////////////////
57 const StaticString
s_Closure("Closure");
58 const StaticString
s_AsyncGenerator("HH\\AsyncGenerator");
59 const StaticString
s_Generator("Generator");
61 //////////////////////////////////////////////////////////////////////
64 * Short-hand to get the rpoId of a block in a given FuncAnalysis. (The RPO
65 * ids are re-assigned per analysis.)
67 uint32_t rpoId(const FuncAnalysis
& ai
, BlockId blk
) {
68 return ai
.bdata
[blk
].rpoId
;
71 const StaticString
s_reified_generics_var("0ReifiedGenerics");
72 const StaticString
s_coeffects_var("0Coeffects");
74 Optional
<State
> entry_state(const IIndex
& index
, CollectedInfo
& collect
,
75 const Context
& ctx
, const KnownArgs
* knownArgs
) {
77 ret
.initialized
= true;
79 if (!ctx
.cls
) return TNull
;
80 if (knownArgs
&& !knownArgs
->context
.subtypeOf(BBottom
)) {
81 if (knownArgs
->context
.subtypeOf(BOptObj
)) {
82 return setctx(knownArgs
->context
);
84 if (knownArgs
->context
.subtypeOf(BCls
) &&
85 is_specialized_cls(knownArgs
->context
)) {
86 return setctx(toobj(knownArgs
->context
));
89 auto const maybeSelfType
= selfCls(index
, ctx
);
90 auto thisType
= maybeSelfType
? setctx(toobj(*maybeSelfType
)) : TObj
;
92 !(ctx
.func
->cls
->attrs
& AttrTrait
) &&
93 !(ctx
.func
->attrs
& AttrStatic
)) {
96 return opt(std::move(thisType
));
98 ret
.locals
.resize(ctx
.func
->locals
.size());
99 ret
.iters
.resize(ctx
.func
->numIters
);
101 auto locId
= uint32_t{0};
102 for (; locId
< ctx
.func
->params
.size(); ++locId
) {
104 if (locId
< knownArgs
->args
.size()) {
105 if (ctx
.func
->params
[locId
].isVariadic
) {
106 std::vector
<Type
> pack(knownArgs
->args
.begin() + locId
,
107 knownArgs
->args
.end());
108 for (auto& p
: pack
) p
= unctx(std::move(p
));
109 ret
.locals
[locId
] = vec(std::move(pack
));
111 auto [ty
, _
, effectFree
] =
112 verify_param_type(index
, ctx
, locId
, unctx(knownArgs
->args
[locId
]));
114 if (ty
.subtypeOf(BBottom
)) {
115 ret
.unreachable
= true;
116 if (ctx
.func
->params
[locId
].dvEntryPoint
== NoBlockId
) {
121 ret
.locals
[locId
] = std::move(ty
);
122 collect
.effectFree
&= effectFree
;
125 ret
.locals
[locId
] = ctx
.func
->params
[locId
].isVariadic
? TVec
: TUninit
;
130 if (ctx
.func
->params
[locId
].isVariadic
) {
131 ret
.locals
[locId
] = TVec
;
135 // Because we throw a non-recoverable error for having fewer than the
136 // required number of args, all function parameters must be initialized.
137 auto [ty
, _
, effectFree
] = verify_param_type(index
, ctx
, locId
, TInitCell
);
139 if (ty
.subtypeOf(BBottom
)) {
140 ret
.unreachable
= true;
141 if (ctx
.func
->params
[locId
].dvEntryPoint
== NoBlockId
) {
146 ret
.locals
[locId
] = std::move(ty
);
147 collect
.effectFree
&= effectFree
;
150 // Closures have use vars, we need to look up their types from the index.
151 auto const useVars
= ctx
.func
->isClosureBody
152 ? index
.lookup_closure_use_vars(ctx
.func
)
153 : CompactVector
<Type
>{};
156 * Reified functions have a hidden local that's always the first
157 * (non-parameter) local, which stores reified generics.
159 if (ctx
.func
->isReified
) {
160 // Currently closures cannot be reified
161 assertx(!ctx
.func
->isClosureBody
);
162 assertx(locId
< ret
.locals
.size());
163 assertx(ctx
.func
->locals
[locId
].name
->same(s_reified_generics_var
.get()));
164 ret
.locals
[locId
++] = get_type_of_reified_list(ctx
.func
->userAttributes
);
168 * Functions with coeffect rules have a hidden local that's always the first
169 * (non-parameter) local (after reified generics, if exists),
170 * which stores the ambient coeffects.
172 if (has_coeffects_local(ctx
.func
)) {
173 assertx(locId
< ret
.locals
.size());
174 assertx(ctx
.func
->locals
[locId
].name
->same(s_coeffects_var
.get()));
175 ret
.locals
[locId
++] = TInt
;
178 auto afterParamsLocId
= uint32_t{0};
179 for (; locId
< ctx
.func
->locals
.size(); ++locId
, ++afterParamsLocId
) {
181 * Some of the closure locals are mapped to used variables. The types of
182 * use vars are looked up from the index.
184 if (ctx
.func
->isClosureBody
) {
185 if (afterParamsLocId
< useVars
.size()) {
186 ret
.locals
[locId
] = useVars
[afterParamsLocId
];
191 // Otherwise the local will start uninitialized, like normal.
192 ret
.locals
[locId
] = TUninit
;
195 // Finally, make sure any volatile locals are set to Gen, even if they are
197 for (auto locId
= uint32_t{0}; locId
< ctx
.func
->locals
.size(); ++locId
) {
198 if (is_volatile_local(ctx
.func
, locId
)) {
199 ret
.locals
[locId
] = TCell
;
207 * Helper for do_analyze to initialize the states for all function entries
208 * (i.e. each dv init and the main entry), and all of them count as places the
209 * function could be entered, so they all must be visited at least once.
211 * If we're entering at a DV-init, all higher parameter locals must be
212 * Uninit, with the possible exception of a final variadic param
213 * (which will be an array). It is also possible that the DV-init is
214 * reachable from within the function with these parameter locals
215 * already initialized (although the normal php emitter can't do
216 * this), but that case will be discovered when iterating.
218 dataflow_worklist
<uint32_t>
219 prepare_incompleteQ(const IIndex
& index
,
221 CollectedInfo
& collect
,
222 const KnownArgs
* knownArgs
) {
223 auto incompleteQ
= dataflow_worklist
<uint32_t>(ai
.rpoBlocks
.size());
224 auto const ctx
= ai
.ctx
;
225 auto const numParams
= ctx
.func
->params
.size();
227 auto const entryState
= entry_state(index
, collect
, ctx
, knownArgs
);
228 if (!entryState
) return incompleteQ
;
231 // When we have known args, we only need to add one of the entry points to
232 // the initial state, since we know how many arguments were passed.
233 auto const useDvInit
= [&] {
234 if (knownArgs
->args
.size() >= numParams
) return false;
235 for (auto i
= knownArgs
->args
.size(); i
< numParams
; ++i
) {
236 auto const dv
= ctx
.func
->params
[i
].dvEntryPoint
;
237 if (dv
!= NoBlockId
) {
238 ai
.bdata
[dv
].stateIn
.copy_from(*entryState
);
239 ai
.bdata
[dv
].stateIn
.unreachable
= false;
240 incompleteQ
.push(rpoId(ai
, dv
));
247 if (!useDvInit
&& !entryState
->unreachable
) {
248 ai
.bdata
[ctx
.func
->mainEntry
].stateIn
.copy_from(*entryState
);
249 incompleteQ
.push(rpoId(ai
, ctx
.func
->mainEntry
));
255 for (auto paramId
= uint32_t{0}; paramId
< numParams
; ++paramId
) {
256 auto const dv
= ctx
.func
->params
[paramId
].dvEntryPoint
;
257 if (dv
!= NoBlockId
) {
258 ai
.bdata
[dv
].stateIn
.copy_from(*entryState
);
259 ai
.bdata
[dv
].stateIn
.unreachable
= false;
260 incompleteQ
.push(rpoId(ai
, dv
));
261 for (auto locId
= paramId
; locId
< numParams
; ++locId
) {
262 ai
.bdata
[dv
].stateIn
.locals
[locId
] =
263 ctx
.func
->params
[locId
].isVariadic
? TVec
: TUninit
;
266 // If a DV-init's param has an entry state of Bottom, then none of
267 // the following DV-inits are reachable.
268 if (entryState
->locals
[paramId
].is(BBottom
)) break;
271 if (!entryState
->unreachable
) {
272 ai
.bdata
[ctx
.func
->mainEntry
].stateIn
.copy_from(*entryState
);
273 incompleteQ
.push(rpoId(ai
, ctx
.func
->mainEntry
));
280 * Closures inside of classes are analyzed in the context they are
281 * created in (this affects accessibility rules, access to privates,
284 * Note that in the interpreter code, ctx.func->cls is not
285 * necessarily the same as ctx.cls because of closures.
287 AnalysisContext
adjust_closure_context(const IIndex
& index
,
288 AnalysisContext ctx
) {
289 if (ctx
.cls
) ctx
.cls
= index
.lookup_closure_context(*ctx
.cls
);
293 Type
fixup_return_type(const IIndex
& index
, const php::Func
& func
, Type ty
) {
294 if (func
.isGenerator
) {
296 // Async generators always return AsyncGenerator object.
297 return objExact(builtin_class(index
, s_AsyncGenerator
.get()));
299 // Non-async generators always return Generator object.
300 return objExact(builtin_class(index
, s_Generator
.get()));
302 } else if (func
.isAsync
) {
303 // Async functions always return WaitH<T>, where T is the type returned
305 return wait_handle(std::move(ty
));
311 FuncAnalysis
do_analyze_collect(const IIndex
& index
,
312 const AnalysisContext
& ctx
,
313 CollectedInfo
& collect
,
314 const KnownArgs
* knownArgs
) {
315 assertx(ctx
.cls
== adjust_closure_context(index
, ctx
).cls
);
316 auto ai
= FuncAnalysis
{ ctx
};
318 SCOPE_ASSERT_DETAIL("do-analyze-collect-2") {
320 for (auto bid
: ctx
.func
.blockRange()) {
322 "block #{}\nin-{}\n{}",
324 state_string(*ctx
.func
, ai
.bdata
[bid
].stateIn
, collect
),
325 show(*ctx
.func
, *ctx
.func
.blocks()[bid
])
332 SCOPE_ASSERT_DETAIL("do-analyze-collect-1") {
333 return "Analyzing: " + show(ctx
);
336 auto const UNUSED bump
=
337 trace_bump(ctx
, Trace::hhbbc
, Trace::hhbbc_cfg
, Trace::hhbbc_index
);
340 using namespace folly::gen
;
345 "Inline Interp (context: {}, args: {})",
346 show(knownArgs
->context
),
347 from(knownArgs
->args
)
348 | map([] (const Type
& t
) { return show(t
); })
349 | unsplit
<std::string
>(",")
355 FTRACE(2, "{:.^70}\n", "End Inline Interp");
359 FTRACE(2, "{:-^70}\n-- {}\n", "Analyze", show(ctx
));
362 * Set of RPO ids that still need to be visited.
364 * Initially, we need each entry block in this list. As we visit
365 * blocks, we propagate states to their successors and across their
366 * back edges---when state merges cause a change to the block
367 * stateIn, we will add it to this queue so it gets visited again.
369 auto incompleteQ
= prepare_incompleteQ(index
, ai
, collect
, knownArgs
);
372 * There are potentially infinitely growing types when we're using union_of to
373 * merge states, so occasionally we need to apply a widening operator.
375 * Currently this is done by having a straight-forward hueristic: if you visit
376 * a block too many times, we'll start doing all the merges with the widening
377 * operator. We must then continue iterating in case the actual fixed point is
378 * higher than the result of widening. Likewise if we loop too much because of
379 * local static types changing, we'll widen those.
381 * Termination is guaranteed because the widening operator has only finite
382 * chains in the type lattice.
384 auto totalVisits
= std::vector
<uint32_t>(ctx
.func
.blocks().size());
386 // For debugging, count how many times basic blocks get interpreted.
387 auto interp_counter
= uint32_t{0};
389 hphp_fast_map
<BlockId
, BlockUpdateInfo
> blockUpdates
;
392 * Iterate until a fixed point.
394 * Each time a stateIn for a block changes, we re-insert the block's
395 * rpo ID in incompleteQ. Since incompleteQ is ordered, we'll
396 * always visit blocks with earlier RPO ids first, which hopefully
397 * means less iterations.
400 while (!incompleteQ
.empty()) {
401 auto const bid
= ai
.rpoBlocks
[incompleteQ
.pop()];
405 FTRACE(2, "block #{}\nin {}{}", bid
,
406 state_string(*ctx
.func
, ai
.bdata
[bid
].stateIn
, collect
),
407 property_state_string(collect
.props
));
410 // Vector for consistent iteration order.
411 hphp_vector_map
<BlockId
, State
> rollbackStates
;
412 hphp_fast_set
<BlockId
> scheduled
;
414 auto const propagate
= [&] (BlockId target
, const State
* st
) {
416 FTRACE(2, " Force reprocess: {}\n", target
);
417 incompleteQ
.push(rpoId(ai
, target
));
421 auto const needsWiden
=
422 totalVisits
[target
] >= options
.analyzeFuncWideningLimit
;
424 FTRACE(2, " {}-> {}\n", needsWiden
? "widening " : "", target
);
425 FTRACE(4, "\ntarget old {}\n",
426 state_string(*ctx
.func
, ai
.bdata
[target
].stateIn
, collect
));
428 // Save the before start of the target (if we don't have one
429 // already). If we need to rollback this block, we'll restore
430 // all targets back to their original state.
431 rollbackStates
.try_emplace(target
, ai
.bdata
[target
].stateIn
);
434 needsWiden
? widen_into(ai
.bdata
[target
].stateIn
, *st
)
435 : merge_into(ai
.bdata
[target
].stateIn
, *st
);
436 if (changed
) scheduled
.emplace(target
);
437 FTRACE(4, "target new {}",
438 state_string(*ctx
.func
, ai
.bdata
[target
].stateIn
, collect
));
439 if (changed
) FTRACE(2, " changed: {}\n", target
);
442 auto const rollback
= [&] {
443 for (auto& [b
, st
] : rollbackStates
) {
444 FTRACE(4, "\nrollback block #{}:\n", b
);
445 FTRACE(4, "target old {}\n",
446 state_string(*ctx
.func
, ai
.bdata
[b
].stateIn
, collect
));
447 ai
.bdata
[b
].stateIn
.copy_from(std::move(st
));
448 FTRACE(4, "target new {}\n",
449 state_string(*ctx
.func
, ai
.bdata
[b
].stateIn
, collect
));
451 rollbackStates
.clear();
455 auto const blk
= ctx
.func
.blocks()[bid
].get();
456 auto stateOut
= ai
.bdata
[bid
].stateIn
;
457 auto interp
= Interp
{ index
, ctx
, collect
, bid
, blk
, stateOut
};
458 auto flags
= run(interp
, ai
.bdata
[bid
].stateIn
, propagate
, rollback
);
460 for (auto const target
: scheduled
) incompleteQ
.push(rpoId(ai
, target
));
462 if (any(collect
.opts
& CollectionOpts::EffectFreeOnly
) &&
463 !collect
.effectFree
) {
466 if (flags
.updateInfo
.replacedBcs
.size() ||
467 flags
.updateInfo
.unchangedBcs
!= blk
->hhbcs
.size() ||
468 flags
.updateInfo
.fallthrough
!= blk
->fallthrough
) {
469 blockUpdates
[bid
] = std::move(flags
.updateInfo
);
471 blockUpdates
.erase(bid
);
474 if (flags
.returned
) {
475 ai
.inferredReturn
|= std::move(*flags
.returned
);
476 if (flags
.retParam
== NoLocalId
) {
477 ai
.retParam
= NoLocalId
;
478 } else if (ai
.retParam
!= flags
.retParam
) {
479 if (ai
.retParam
!= MaxLocalId
) {
480 ai
.retParam
= NoLocalId
;
482 ai
.retParam
= flags
.retParam
;
487 ai
.usedParams
|= flags
.usedParams
;
488 ai
.bdata
[bid
].noThrow
= flags
.noThrow
;
491 if (any(collect
.opts
& CollectionOpts::EffectFreeOnly
) &&
492 !collect
.effectFree
) {
495 } while (!incompleteQ
.empty());
497 ai
.closureUseTypes
= std::move(collect
.closureUseTypes
);
498 ai
.effectFree
= collect
.effectFree
;
499 ai
.hasInvariantIterBase
= collect
.hasInvariantIterBase
;
500 ai
.unfoldableFuncs
= collect
.unfoldableFuncs
;
501 ai
.publicSPropMutations
= std::move(collect
.publicSPropMutations
);
502 for (auto& elm
: blockUpdates
) {
503 ai
.blockUpdates
.emplace_back(elm
.first
, std::move(elm
.second
));
506 fixup_return_type(index
, *ctx
.func
, std::move(ai
.inferredReturn
));
508 if (collect
.props
.hasInitialValues()) {
509 for (size_t i
= 0, size
= ctx
.cls
->properties
.size(); i
< size
; ++i
) {
510 auto const& prop
= ctx
.cls
->properties
[i
];
511 auto initial
= collect
.props
.getInitialValue(prop
);
512 if (!initial
) continue;
513 if (ai
.resolvedInitializers
.isNull()) {
514 ai
.resolvedInitializers
= decltype(ai
.resolvedInitializers
)::makeR();
516 ai
.resolvedInitializers
.right()->emplace_back(i
, std::move(*initial
));
521 * If inferredReturn is TBottom, the callee didn't execute a return
522 * at all. (E.g. it unconditionally throws, or is an abstract
525 * In this case, we leave the return type as TBottom, to indicate
526 * the same to callers.
528 assertx(ai
.inferredReturn
.subtypeOf(BCell
));
530 // For debugging, print the final input states for each block.
531 FTRACE(2, "{}", [&] {
532 auto const bsep
= std::string(60, '=') + "\n";
533 auto const sep
= std::string(60, '-') + "\n";
534 auto ret
= folly::format(
535 "{}function {} ({} block interps):\n{}",
541 for (auto& bd
: ai
.bdata
) {
544 "{}block {}{}:\nin {}",
546 ai
.rpoBlocks
[bd
.rpoId
],
547 bd
.noThrow
? " (no throw)" : "",
548 state_string(*ctx
.func
, bd
.stateIn
, collect
)
554 "Inferred return type: {}{}\n",
555 show(ai
.inferredReturn
),
556 ai
.effectFree
? " (effect-free)" : ""
564 FuncAnalysis
do_analyze(const IIndex
& index
,
565 const AnalysisContext
& inputCtx
,
566 ClassAnalysis
* clsAnalysis
,
567 ClsConstantWork
* clsCnsWork
= nullptr,
568 const KnownArgs
* knownArgs
= nullptr,
569 CollectionOpts opts
= CollectionOpts
{}) {
570 auto const ctx
= adjust_closure_context(index
, inputCtx
);
571 ContextPusher _
{index
, ctx
};
573 // If this isn't an 86cinit, or if we're inline interping, just do a
575 if (ctx
.func
->name
!= s_86cinit
.get() || knownArgs
) {
576 auto collect
= CollectedInfo
{ index
, ctx
, clsAnalysis
, opts
, clsCnsWork
};
577 return do_analyze_collect(index
, ctx
, collect
, knownArgs
);
580 // Otherwise this is an 86cinit. We want to inline interp it for
581 // each constant to resolve them. Since a constant can be defined in
582 // terms of another, we want to repeat this until we reach a fixed
585 // Use a scratch ClsConstantWork if one hasn't been provided (this
586 // is the case when we're the analyzing constants phase).
587 Optional
<ClsConstantWork
> temp
;
589 temp
.emplace(index
, *ctx
.cls
);
590 clsCnsWork
= temp
.get_pointer();
592 assertx(!clsCnsWork
->next());
594 // Schedule initial work
595 for (auto const& cns
: ctx
.cls
->constants
) {
596 if (cns
.kind
!= ConstModifiers::Kind::Value
) continue;
597 if (!cns
.val
) continue;
598 if (cns
.val
->m_type
!= KindOfUninit
) continue;
599 clsCnsWork
->add(cns
.name
);
602 // Inline interp for this constant, and reschedule interp for any
603 // constants which rely on this constant. Continue until there's no
605 while (auto const cns
= clsCnsWork
->next()) {
606 clsCnsWork
->setCurrent(cns
);
607 SCOPE_EXIT
{ clsCnsWork
->clearCurrent(cns
); };
608 auto fa
= analyze_func_inline(
615 clsCnsWork
->update(cns
, unctx(std::move(fa
.inferredReturn
)));
618 // Analyze the 86cinit as a normal functions. We do this last so it
619 // can take advantage of any resolved constants in clsCnsWork.
620 auto collect
= CollectedInfo
{ index
, ctx
, clsAnalysis
, opts
, clsCnsWork
};
621 auto ret
= do_analyze_collect(index
, ctx
, collect
, nullptr);
623 // Propagate out the resolved constants so they can be reflected
625 for (size_t i
= 0, size
= ctx
.cls
->constants
.size(); i
< size
; ++i
) {
626 auto const& cns
= ctx
.cls
->constants
[i
];
627 if (cns
.kind
!= ConstModifiers::Kind::Value
) continue;
628 if (!cns
.val
) continue;
629 if (cns
.val
->m_type
!= KindOfUninit
) continue;
630 auto& info
= clsCnsWork
->constants
.at(cns
.name
);
631 if (ret
.resolvedInitializers
.isNull()) {
632 ret
.resolvedInitializers
= decltype(ret
.resolvedInitializers
)::makeL();
634 assertx(ret
.resolvedInitializers
.left());
635 ret
.resolvedInitializers
.left()->emplace_back(i
, std::move(info
));
640 //////////////////////////////////////////////////////////////////////
643 * In the case of HNI builtin classes, private properties are
644 * allowed to be mutated by native code, so we may not see all the
647 * We are allowed to assume the type annotation on the property is
648 * accurate, although nothing is currently checking that this is the
649 * case. We handle this right now by doing inference as if it
650 * couldn't be affected by native code, then assert the inferred
651 * type is at least a subtype of the annotated type, and expanding
652 * it to be the annotated type if it is bigger.
654 void expand_hni_prop_types(ClassAnalysis
& clsAnalysis
) {
655 auto relax_prop
= [&] (const php::Prop
& prop
, PropState
& propState
) {
656 auto it
= propState
.find(prop
.name
);
657 if (it
== end(propState
)) return;
659 it
->second
.everModified
= true;
662 * When any functions are interceptable, we don't require the constraints to
663 * actually match, and relax all the HNI types to Gen.
665 * This is because with any interceptable functions, it's quite
666 * possible that some function calls in systemlib might not be
667 * known to return things matching the property type hints for
668 * some properties, or not to take their arguments by reference.
670 auto const hniTy
= from_hni_constraint(prop
.userType
);
671 if (it
->second
.ty
.subtypeOf(hniTy
)) {
672 it
->second
.ty
= hniTy
;
678 "HNI class {}::{} inferred property type ({}) doesn't "
679 "match annotation ({})\n",
680 clsAnalysis
.ctx
.cls
->name
,
687 for (auto& prop
: clsAnalysis
.ctx
.cls
->properties
) {
688 relax_prop(prop
, clsAnalysis
.privateProperties
);
689 relax_prop(prop
, clsAnalysis
.privateStatics
);
693 //////////////////////////////////////////////////////////////////////
695 void resolve_type_constants(const IIndex
& index
,
697 ClassAnalysis
& analysis
) {
698 auto const UNUSED bump
=
699 trace_bump(ctx
, Trace::hhbbc
, Trace::hhbbc_cfg
, Trace::hhbbc_index
);
703 for (auto const& pair
:
704 index
.lookup_flattened_class_type_constants(*ctx
.cls
)) {
705 auto const name
= pair
.first
;
706 auto const idx
= pair
.second
;
708 if (idx
.cls
->tsame(ctx
.cls
->name
)) {
709 assertx(idx
.idx
< ctx
.cls
->constants
.size());
710 auto const& cns
= ctx
.cls
->constants
[idx
.idx
];
711 if (cns
.resolvedTypeStructure
) continue;
714 SCOPE_ASSERT_DETAIL("resolve-type-constants") {
715 return folly::sformat(
716 "Resolving {}::{} -> {}",
717 idx
.cls
, name
, show(ctx
)
720 FTRACE(2, "{:-^70}\n-- ({}) {}::{}\n", "Resolve", show(ctx
), idx
.cls
, name
);
722 auto const lookup
= index
.lookup_class_type_constant(*ctx
.cls
, name
, idx
);
723 auto const ts
= lookup
.resolution
.sarray();
725 FTRACE(2, " no resolution\n");
730 2, " resolved to {}{}\n",
731 staticArrayStreamer(ts
),
732 lookup
.resolution
.contextSensitive
? " (context sensitive)" : ""
735 analysis
.resolvedTypeConsts
.emplace_back(
736 ResolvedClsTypeConst
{
738 !lookup
.resolution
.contextSensitive
,
745 //////////////////////////////////////////////////////////////////////
749 //////////////////////////////////////////////////////////////////////
751 const php::Func
* ClassAnalysisWorklist::next() {
752 if (worklist
.empty()) return nullptr;
753 auto f
= worklist
.front();
755 worklist
.pop_front();
759 bool ClassAnalysisWorklist::schedule(const php::Func
& f
) {
760 auto const insert
= inWorklist
.emplace(&f
);
761 if (!insert
.second
) return false;
762 worklist
.emplace_back(&f
);
766 void ClassAnalysisWorklist::scheduleForProp(SString name
) {
767 auto const it
= propDeps
.find(name
);
768 if (it
== propDeps
.end()) return;
769 for (auto const f
: it
->second
) schedule(*f
);
772 void ClassAnalysisWorklist::scheduleForPropMutate(SString name
) {
773 auto const it
= propMutateDeps
.find(name
);
774 if (it
== propMutateDeps
.end()) return;
775 for (auto const f
: it
->second
) schedule(*f
);
778 void ClassAnalysisWorklist::scheduleForReturnType(const php::Func
& callee
) {
779 auto const it
= returnTypeDeps
.find(&callee
);
780 if (it
== returnTypeDeps
.end()) return;
781 for (auto const f
: it
->second
) schedule(*f
);
784 //////////////////////////////////////////////////////////////////////
786 ClsConstantWork::ClsConstantWork(const IIndex
& index
,
787 const php::Class
& cls
): cls
{cls
} {
788 auto initial
= index
.lookup_class_constants(cls
);
789 for (auto& [name
, info
] : initial
) constants
.emplace(name
, std::move(info
));
792 ClsConstLookupResult
ClsConstantWork::lookup(SString name
) {
793 auto const it
= constants
.find(name
);
794 if (it
== end(constants
)) {
795 return ClsConstLookupResult
{ TBottom
, TriBool::No
, true };
797 if (current
) deps
[name
].emplace(current
);
798 return ClsConstLookupResult
{
801 !is_scalar(it
->second
.type
) || bool(cls
.attrs
& AttrInternal
)
805 void ClsConstantWork::update(SString name
, Type t
) {
806 auto& old
= constants
.at(name
);
807 if (t
.strictlyMoreRefined(old
.type
)) {
808 if (old
.refinements
< options
.returnTypeRefineLimit
) {
809 old
.type
= std::move(t
);
814 1, "maxed out refinements for class constant {}::{}\n",
820 t
.moreRefined(old
.type
),
821 "Class constant type invariant violated for {}::{}.\n"
822 " {} is not at least as refined as {}\n",
831 void ClsConstantWork::add(SString name
) {
832 auto const emplaced
= inWorklist
.emplace(name
);
833 if (!emplaced
.second
) return;
834 worklist
.emplace_back(name
);
837 void ClsConstantWork::schedule(SString name
) {
838 auto const it
= deps
.find(name
);
839 if (it
== end(deps
)) return;
840 for (auto const d
: it
->second
) {
841 FTRACE(2, "Scheduling {}::{} because of {}::{}\n",
842 cls
.name
, d
, cls
.name
, name
);
847 SString
ClsConstantWork::next() {
848 if (worklist
.empty()) return nullptr;
849 auto n
= worklist
.front();
851 worklist
.pop_front();
855 //////////////////////////////////////////////////////////////////////
857 FuncAnalysisResult::FuncAnalysisResult(AnalysisContext ctx
)
859 , inferredReturn(TBottom
)
863 FuncAnalysis::FuncAnalysis(AnalysisContext ctx
)
864 : FuncAnalysisResult
{ctx
}
865 , rpoBlocks
{rpoSortAddDVs(ctx
.func
)}
866 , bdata
{ctx
.func
.blocks().size()}
868 for (auto rpoId
= size_t{0}; rpoId
< rpoBlocks
.size(); ++rpoId
) {
869 bdata
[rpoBlocks
[rpoId
]].rpoId
= rpoId
;
873 FuncAnalysis
analyze_func(const IIndex
& index
, const AnalysisContext
& ctx
,
874 CollectionOpts opts
) {
875 return do_analyze(index
, ctx
, nullptr, nullptr, nullptr, opts
);
878 FuncAnalysis
analyze_func_inline(const IIndex
& index
,
879 const AnalysisContext
& ctx
,
880 const Type
& thisType
,
881 const CompactVector
<Type
>& args
,
882 ClsConstantWork
* clsCnsWork
,
883 CollectionOpts opts
) {
884 auto const knownArgs
= KnownArgs
{
885 ctx
.func
->isClosureBody
? TBottom
: thisType
,
889 return do_analyze(index
, ctx
, nullptr, clsCnsWork
, &knownArgs
,
890 opts
| CollectionOpts::Inlining
);
893 ClassAnalysis
analyze_class_constants(const IIndex
& index
, const Context
& ctx
) {
896 assertx(!is_closure(*ctx
.cls
));
899 auto const UNUSED bump
=
900 trace_bump(ctx
, Trace::hhbbc
, Trace::hhbbc_cfg
, Trace::hhbbc_index
);
902 2, "{:#^70}\n-- {}\n",
903 index
.frozen() ? " Class (Frozen) " : " Class ",
908 ContextPusher _
{index
, ctx
};
909 ClassAnalysis analysis
{ctx
};
911 if (!is_used_trait(*ctx
.cls
)) {
912 resolve_type_constants(index
, ctx
, analysis
);
915 for (auto const& m
: ctx
.cls
->methods
) {
916 if (!is_86init_func(*m
)) continue;
917 auto const wf
= php::WideFunc::cns(m
.get());
918 analysis
.methods
.emplace_back(
921 AnalysisContext
{ ctx
.unit
, wf
, ctx
.cls
},
930 ClassAnalysis
analyze_class(const IIndex
& index
, const Context
& ctx
) {
932 assertx(ctx
.cls
&& !ctx
.func
&& !is_used_trait(*ctx
.cls
));
935 auto const UNUSED bump
=
936 trace_bump(ctx
, Trace::hhbbc
, Trace::hhbbc_cfg
, Trace::hhbbc_index
);
938 2, "{:#^70}\n-- {}\n",
939 index
.frozen() ? " Class (Frozen) " : " Class ",
944 ContextPusher _
{index
, ctx
};
946 ClassAnalysis
clsAnalysis(ctx
);
947 auto const associatedClosures
= index
.lookup_closures(ctx
.cls
);
948 auto const associatedMethods
= index
.lookup_extra_methods(ctx
.cls
);
949 auto const isHNIBuiltin
= ctx
.cls
->attrs
& AttrBuiltin
;
952 * Initialize inferred private property types to their in-class
955 * We need to loosen_all on instance properties, because the class could be
956 * unserialized, which we don't guarantee preserves those aspects of the
959 * Also, set Uninit properties to TBottom, so that analysis
960 * of 86pinit methods sets them to the correct type.
963 auto const initialMightBeBad
= [&] (const php::Prop
& prop
,
964 const Type
& initial
) {
965 if (is_closure(*ctx
.cls
)) return false;
966 if (prop
.attrs
& (AttrSystemInitialValue
| AttrLateInit
)) return false;
967 if (!initial
.moreRefined(
968 lookup_constraint(index
, ctx
, prop
.typeConstraint
, initial
).lower
973 begin(prop
.ubs
.m_constraints
), end(prop
.ubs
.m_constraints
),
974 [&] (const TypeConstraint
& ub
) {
975 return !initial
.moreRefined(
976 lookup_constraint(index
, ctx
, ub
, initial
).lower
982 for (size_t propIdx
= 0, size
= ctx
.cls
->properties
.size();
983 propIdx
< size
; ++propIdx
) {
984 auto const& prop
= ctx
.cls
->properties
[propIdx
];
985 auto const cellTy
= from_cell(prop
.val
);
987 if (!(prop
.attrs
& AttrInitialSatisfiesTC
) &&
988 !cellTy
.subtypeOf(BUninit
)) {
989 if (!initialMightBeBad(prop
, cellTy
)) {
990 clsAnalysis
.resolvedProps
.emplace_back(
991 propIdx
, PropInitInfo
{prop
.val
, true, false}
996 if (!(prop
.attrs
& AttrPrivate
)) continue;
999 auto const hniTy
= from_hni_constraint(prop
.userType
);
1001 cellTy
.subtypeOf(hniTy
),
1002 "hni {}::{} has impossible type. "
1003 "The annotation says it is type ({}) "
1004 "but the default value is type ({}).\n",
1012 if (!(prop
.attrs
& AttrStatic
)) {
1013 auto t
= loosen_this_prop_for_serialization(*ctx
.cls
, prop
.name
, cellTy
);
1015 if (!is_closure(*ctx
.cls
) && t
.subtypeOf(BUninit
)) {
1017 * For non-closure classes, a property of type KindOfUninit
1018 * means that it has non-scalar initializer which will be set
1019 * by a 86pinit method. For these classes, we want the
1020 * initial type of the property to be the type set by the
1021 * 86pinit method, so we set the type to TBottom.
1023 * Closures will not have an 86pinit body, but still may have
1024 * properties of kind KindOfUninit (they will later contain
1025 * used variables). We don't want to touch those.
1028 } else if (!(prop
.attrs
& AttrSystemInitialValue
)) {
1029 t
= adjust_type_for_prop(index
, *ctx
.cls
, &prop
.typeConstraint
, t
);
1031 auto& elem
= clsAnalysis
.privateProperties
[prop
.name
];
1032 elem
.ty
= std::move(t
);
1033 elem
.tc
= &prop
.typeConstraint
;
1034 elem
.attrs
= prop
.attrs
;
1035 elem
.everModified
= false;
1037 // Same thing as the above regarding TUninit and TBottom.
1038 // Static properties don't need to exclude closures for this,
1039 // though---we use instance properties for the closure use vars.
1040 auto t
= cellTy
.subtypeOf(BUninit
)
1042 : (prop
.attrs
& AttrSystemInitialValue
)
1044 : adjust_type_for_prop(index
, *ctx
.cls
, &prop
.typeConstraint
, cellTy
);
1045 auto& elem
= clsAnalysis
.privateStatics
[prop
.name
];
1046 elem
.ty
= std::move(t
);
1047 elem
.tc
= &prop
.typeConstraint
;
1048 elem
.attrs
= prop
.attrs
;
1049 elem
.everModified
= false;
1054 * For builtins, we assume the runtime can write to the properties
1055 * in un-analyzable ways (but won't violate their type-hint). So,
1056 * expand the analyzed types to at least include the type-hint.
1058 if (isHNIBuiltin
) expand_hni_prop_types(clsAnalysis
);
1060 ClsConstantWork clsCnsWork
{index
, *ctx
.cls
};
1063 * For classes with non-scalar initializers, the 86pinit, 86sinit,
1064 * 86linit, 86cinit, and 86reifiedinit methods are guaranteed to run
1065 * before any other method, and are never called afterwards. Thus,
1066 * we can analyze these methods first to determine the initial types
1067 * of properties with non-scalar initializers, and these need not be
1068 * be run again as part of the fixedpoint computation.
1070 CompactVector
<FuncAnalysis
> initResults
;
1071 auto const analyze_86init
= [&] (const StaticString
& name
) {
1072 if (auto func
= find_method(ctx
.cls
, name
.get())) {
1073 auto const wf
= php::WideFunc::cns(func
);
1074 auto const context
= AnalysisContext
{ ctx
.unit
, wf
, ctx
.cls
};
1075 initResults
.emplace_back(
1076 do_analyze(index
, context
, &clsAnalysis
, &clsCnsWork
)
1080 analyze_86init(s_86pinit
);
1081 analyze_86init(s_86sinit
);
1082 analyze_86init(s_86linit
);
1083 analyze_86init(s_86cinit
);
1084 analyze_86init(s_86reifiedinit
);
1086 // NB: Properties can still be TBottom at this point if their initial values
1087 // cannot possibly satisfy their type-constraints. The classes of such
1088 // properties cannot be instantiated.
1091 * Similar to the function case in do_analyze, we have to handle the
1092 * fact that there are infinitely growing chains in our type lattice
1095 * So if we've visited a func some number of times and still aren't
1096 * at a fixed point, we'll set the property state to the result of
1097 * widening the old state with the new state, and then reset the
1098 * counter. This guarantees eventual termination.
1101 ClassAnalysisWork work
;
1102 clsAnalysis
.work
= &work
;
1104 clsAnalysis
.methods
.reserve(initResults
.size() + ctx
.cls
->methods
.size());
1105 for (auto& m
: initResults
) {
1106 clsAnalysis
.methods
.emplace_back(std::move(m
));
1108 if (associatedClosures
) {
1109 clsAnalysis
.closures
.reserve(associatedClosures
->size());
1112 auto const startPrivateProperties
= clsAnalysis
.privateProperties
;
1113 auto const startPrivateStatics
= clsAnalysis
.privateStatics
;
1117 const php::Class
* cls
;
1118 CompactVector
<FuncAnalysisResult
>* output
;
1119 size_t startReturnRefinements
;
1120 size_t localReturnRefinements
= 0;
1124 hphp_fast_map
<const php::Func
*, FuncMeta
> funcMeta
;
1126 auto const getMeta
= [&] (const php::Func
& f
) -> FuncMeta
& {
1127 auto metaIt
= funcMeta
.find(&f
);
1128 assertx(metaIt
!= funcMeta
.end());
1129 return metaIt
->second
;
1132 // Build up the initial worklist:
1133 for (auto const& f
: ctx
.cls
->methods
) {
1134 if (f
->name
== s_86pinit
.get() ||
1135 f
->name
== s_86sinit
.get() ||
1136 f
->name
== s_86linit
.get() ||
1137 f
->name
== s_86cinit
.get() ||
1138 f
->name
== s_86reifiedinit
.get()) {
1141 auto const DEBUG_ONLY inserted
= work
.worklist
.schedule(*f
);
1143 auto [type
, refinements
] = index
.lookup_return_type_raw(f
.get());
1144 work
.returnTypes
.emplace(f
.get(), std::move(type
));
1147 FuncMeta
{ctx
.unit
, ctx
.cls
, &clsAnalysis
.methods
, refinements
}
1151 if (associatedClosures
) {
1152 for (auto const c
: *associatedClosures
) {
1153 auto const f
= c
->methods
[0].get();
1154 auto const DEBUG_ONLY inserted
= work
.worklist
.schedule(*f
);
1156 auto [type
, refinements
] = index
.lookup_return_type_raw(f
);
1157 work
.returnTypes
.emplace(f
, std::move(type
));
1159 f
, FuncMeta
{ctx
.unit
, c
, &clsAnalysis
.closures
, refinements
}
1163 if (associatedMethods
) {
1164 for (auto const m
: *associatedMethods
) {
1165 auto const DEBUG_ONLY inserted
= work
.worklist
.schedule(*m
);
1169 FuncMeta
{m
->unit
, ctx
.cls
, nullptr, 0, 0}
1174 // Keep analyzing until we have more functions scheduled (the fixed
1176 while (!work
.worklist
.empty()) {
1177 // First analyze funcs until we hit a fixed point for the
1178 // properties. Until we reach that, the return types are *not*
1179 // guaranteed to be correct.
1180 while (auto const f
= work
.worklist
.next()) {
1181 auto& meta
= getMeta(*f
);
1183 auto const wf
= php::WideFunc::cns(f
);
1184 auto const context
= AnalysisContext
{ meta
.unit
, wf
, meta
.cls
};
1185 auto results
= do_analyze(index
, context
, &clsAnalysis
, &clsCnsWork
);
1188 if (meta
.outputIdx
< 0) {
1189 meta
.outputIdx
= meta
.output
->size();
1190 meta
.output
->emplace_back(std::move(results
));
1192 (*meta
.output
)[meta
.outputIdx
] = std::move(results
);
1196 if (meta
.visits
++ >= options
.analyzeClassWideningLimit
) {
1197 for (auto& prop
: clsAnalysis
.privateProperties
) {
1198 auto wide
= widen_type(prop
.second
.ty
);
1199 if (prop
.second
.ty
.strictlyMoreRefined(wide
)) {
1200 prop
.second
.ty
= std::move(wide
);
1201 work
.worklist
.scheduleForProp(prop
.first
);
1204 for (auto& prop
: clsAnalysis
.privateStatics
) {
1205 auto wide
= widen_type(prop
.second
.ty
);
1206 if (prop
.second
.ty
.strictlyMoreRefined(wide
)) {
1207 prop
.second
.ty
= std::move(wide
);
1208 work
.worklist
.scheduleForProp(prop
.first
);
1214 // We've hit a fixed point for the properties. Other local
1215 // information (such as return type information) is now correct
1216 // (but might not be optimal).
1220 // Reflect any improved return types into the results. This will
1221 // make them available for local analysis and they'll eventually
1222 // be written back into the Index.
1223 for (auto& kv
: funcMeta
) {
1224 auto const f
= kv
.first
;
1225 auto& meta
= kv
.second
;
1226 if (!meta
.output
) continue;
1227 assertx(meta
.outputIdx
>= 0);
1228 auto& results
= (*meta
.output
)[meta
.outputIdx
];
1230 auto const oldTypeIt
= work
.returnTypes
.find(f
);
1231 assertx(oldTypeIt
!= work
.returnTypes
.end());
1232 auto& [oldType
, oldEffectFree
] = oldTypeIt
->second
;
1234 // Heed the return type refinement limit
1235 if (results
.inferredReturn
.strictlyMoreRefined(oldType
)) {
1236 if (meta
.startReturnRefinements
+ meta
.localReturnRefinements
1237 < options
.returnTypeRefineLimit
) {
1238 oldType
= results
.inferredReturn
;
1239 work
.worklist
.scheduleForReturnType(*f
);
1240 } else if (meta
.localReturnRefinements
> 0) {
1241 results
.inferredReturn
= oldType
;
1242 results
.effectFree
= oldEffectFree
;
1244 ++meta
.localReturnRefinements
;
1245 } else if (!results
.inferredReturn
.moreRefined(oldType
)) {
1246 // If we have a monotonicity violation, bail out immediately
1247 // and let the Index complain.
1252 if (results
.effectFree
) {
1253 if (!oldEffectFree
) {
1254 oldEffectFree
= true;
1255 work
.worklist
.scheduleForReturnType(*f
);
1257 } else if (oldEffectFree
) {
1262 results
.localReturnRefinements
= meta
.localReturnRefinements
;
1263 if (results
.localReturnRefinements
> 0) --results
.localReturnRefinements
;
1267 hphp_fast_set
<const php::Func
*> changed
;
1269 // We've made the return types available for local analysis. Now
1270 // iterate again and see if we can improve them.
1271 while (auto const f
= work
.worklist
.next()) {
1272 auto& meta
= getMeta(*f
);
1274 auto const wf
= php::WideFunc::cns(f
);
1275 auto const context
= AnalysisContext
{ meta
.unit
, wf
, meta
.cls
};
1277 work
.noPropRefine
= true;
1278 auto results
= do_analyze(index
, context
, &clsAnalysis
, &clsCnsWork
);
1279 work
.noPropRefine
= false;
1281 if (!meta
.output
) continue;
1283 auto returnTypeIt
= work
.returnTypes
.find(f
);
1284 assertx(returnTypeIt
!= work
.returnTypes
.end());
1286 auto& [oldReturn
, oldEffectFree
] = returnTypeIt
->second
;
1288 // Heed the return type refinement limit
1289 if (results
.inferredReturn
.strictlyMoreRefined(oldReturn
)) {
1290 if (meta
.startReturnRefinements
+ meta
.localReturnRefinements
1291 < options
.returnTypeRefineLimit
) {
1292 oldReturn
= results
.inferredReturn
;
1293 work
.worklist
.scheduleForReturnType(*f
);
1295 } else if (meta
.localReturnRefinements
> 0) {
1296 results
.inferredReturn
= oldReturn
;
1297 results
.effectFree
= oldEffectFree
;
1299 ++meta
.localReturnRefinements
;
1300 } else if (!results
.inferredReturn
.moreRefined(oldReturn
)) {
1301 // If we have a monotonicity violation, bail out immediately
1302 // and let the Index complain.
1307 if (results
.effectFree
) {
1308 if (!oldEffectFree
) {
1309 oldEffectFree
= true;
1310 work
.worklist
.scheduleForReturnType(*f
);
1312 } else if (oldEffectFree
) {
1317 results
.localReturnRefinements
= meta
.localReturnRefinements
;
1318 if (results
.localReturnRefinements
> 0) --results
.localReturnRefinements
;
1320 assertx(meta
.outputIdx
>= 0);
1321 (*meta
.output
)[meta
.outputIdx
] = std::move(results
);
1325 // Return types have reached a fixed point. However, this means
1326 // that we might be able to further improve property types. So, if
1327 // a method has an improved return return, examine the methods
1328 // which depend on that return type. Drop any property info for
1329 // properties those methods write to. Reschedule any methods which
1330 // or write to those properties. The idea is we want to re-analyze
1331 // all mutations of those properties again, since the refined
1332 // returned types may result in better property types. This
1333 // process may repeat multiple times, but will eventually reach a
1336 if (!work
.propMutators
.empty()) {
1337 auto const resetProp
= [&] (SString name
,
1338 const PropState
& src
,
1340 auto dstIt
= dst
.find(name
);
1341 auto const srcIt
= src
.find(name
);
1342 if (dstIt
== dst
.end()) {
1343 assertx(srcIt
== src
.end());
1346 assertx(srcIt
!= src
.end());
1347 dstIt
->second
.ty
= srcIt
->second
.ty
;
1348 dstIt
->second
.everModified
= srcIt
->second
.everModified
;
1351 hphp_fast_set
<SString
> retryProps
;
1352 for (auto const f
: changed
) {
1353 auto const deps
= work
.worklist
.depsForReturnType(*f
);
1354 if (!deps
) continue;
1355 for (auto const dep
: *deps
) {
1356 auto const propsIt
= work
.propMutators
.find(dep
);
1357 if (propsIt
== work
.propMutators
.end()) continue;
1358 for (auto const prop
: propsIt
->second
) retryProps
.emplace(prop
);
1362 // Schedule the funcs which mutate the props before the ones
1364 for (auto const prop
: retryProps
) {
1365 resetProp(prop
, startPrivateProperties
,
1366 clsAnalysis
.privateProperties
);
1367 resetProp(prop
, startPrivateStatics
,
1368 clsAnalysis
.privateStatics
);
1369 work
.worklist
.scheduleForPropMutate(prop
);
1371 for (auto const prop
: retryProps
) {
1372 work
.worklist
.scheduleForProp(prop
);
1376 // This entire loop will eventually terminate when we cannot
1377 // improve properties nor return types.
1380 auto const UNUSED bump
=
1381 trace_bump(ctx
, Trace::hhbbc
, Trace::hhbbc_cfg
, Trace::hhbbc_index
);
1383 // For debugging, print the final state of the class analysis.
1384 FTRACE(2, "{}", [&] {
1385 auto const bsep
= std::string(60, '+') + "\n";
1386 auto ret
= folly::format(
1392 for (auto& kv
: clsAnalysis
.privateProperties
) {
1393 ret
+= folly::format(
1394 "private ${: <14} :: {}\n",
1399 for (auto& kv
: clsAnalysis
.privateStatics
) {
1400 ret
+= folly::format(
1401 "private static ${: <14} :: {}\n",
1410 clsAnalysis
.work
= nullptr;
1414 //////////////////////////////////////////////////////////////////////
1416 PropagatedStates::PropagatedStates(State
&& state
, StateMutationUndo undos
)
1417 : m_locals
{std::move(state
.locals
)}
1418 , m_undos
{std::move(undos
)}
1420 for (size_t i
= 0; i
< state
.stack
.size(); ++i
) {
1421 m_stack
.emplace_back(std::move(state
.stack
[i
].type
));
1425 void PropagatedStates::next() {
1426 // The undo log shouldn't be empty, and we should be at a mark
1427 // (which marks instruction boundary).
1428 assertx(!m_undos
.events
.empty());
1429 assertx(boost::get
<StateMutationUndo::Mark
>(&m_undos
.events
.back()));
1432 m_afterLocals
.clear();
1433 m_undos
.events
.pop_back();
1435 // Use the undo log to "unwind" the current state.
1437 assertx(!m_undos
.events
.empty());
1438 auto const stop
= match
<bool>(
1439 m_undos
.events
.back(),
1440 [] (const StateMutationUndo::Mark
&) { return true; },
1441 [this] (StateMutationUndo::Push
) {
1442 assertx(!m_stack
.empty());
1443 if (!m_lastPush
) m_lastPush
.emplace(std::move(m_stack
.back()));
1447 [this] (StateMutationUndo::Pop
& p
) {
1448 m_stack
.emplace_back(std::move(p
.t
));
1451 [this] (StateMutationUndo::Stack
& s
) {
1452 assertx(s
.idx
< m_stack
.size());
1453 m_stack
[s
.idx
] = std::move(s
.t
);
1456 [this] (StateMutationUndo::Local
& l
) {
1457 assertx(l
.id
< m_locals
.size());
1458 auto& old
= m_locals
[l
.id
];
1459 m_afterLocals
.emplace_back(std::make_pair(l
.id
, std::move(old
)));
1460 old
= std::move(l
.t
);
1465 m_undos
.events
.pop_back();
1470 locally_propagated_states(const Index
& index
,
1471 const AnalysisContext
& ctx
,
1472 CollectedInfo
& collect
,
1475 Trace::Bump bumper
{Trace::hhbbc
, 10};
1477 auto const blk
= ctx
.func
.blocks()[bid
].get();
1479 // Set up the undo log for the interp. We reserve it using this size
1480 // heuristic which captures typical undo log sizes.
1481 StateMutationUndo undos
;
1482 undos
.events
.reserve((blk
->hhbcs
.size() + 1) * 4);
1484 IndexAdaptor adaptor
{index
};
1485 auto interp
= Interp
{ adaptor
, ctx
, collect
, bid
, blk
, state
, &undos
};
1487 for (auto const& op
: blk
->hhbcs
) {
1488 auto const markIdx
= undos
.events
.size();
1489 // Record instruction boundary
1490 undos
.events
.emplace_back(StateMutationUndo::Mark
{});
1491 // Interpret it. This appends more info to the undo log.
1492 auto const stepFlags
= step(interp
, op
);
1493 // Store the step flags in the mark we recorded before the
1495 auto& mark
= boost::get
<StateMutationUndo::Mark
>(undos
.events
[markIdx
]);
1496 mark
.wasPEI
= stepFlags
.wasPEI
;
1497 mark
.mayReadLocalSet
= stepFlags
.mayReadLocalSet
;
1498 mark
.unreachable
= state
.unreachable
;
1499 state
.stack
.compact();
1501 // Add a final mark to maintain invariants (this will be popped
1502 // immediately when the first next() is called).
1503 undos
.events
.emplace_back(StateMutationUndo::Mark
{});
1504 return PropagatedStates
{std::move(state
), std::move(undos
)};
1507 State
locally_propagated_bid_state(const Index
& index
,
1508 const AnalysisContext
& ctx
,
1509 CollectedInfo
& collect
,
1512 BlockId targetBid
) {
1513 Trace::Bump bumper
{Trace::hhbbc
, 10};
1514 if (!state
.initialized
) return {};
1516 auto const originalState
= state
;
1517 auto const blk
= ctx
.func
.blocks()[bid
].get();
1518 IndexAdaptor adaptor
{index
};
1519 auto interp
= Interp
{ adaptor
, ctx
, collect
, bid
, blk
, state
};
1522 auto const propagate
= [&] (BlockId target
, const State
* st
) {
1523 if (target
== targetBid
) merge_into(ret
, *st
);
1525 run(interp
, originalState
, propagate
, []{});
1527 ret
.stack
.compact();
1531 //////////////////////////////////////////////////////////////////////
1533 UnitAnalysis
analyze_unit(const AnalysisIndex
& index
, const Context
& ctx
) {
1538 auto const UNUSED bump
=
1539 trace_bump(ctx
, Trace::hhbbc
, Trace::hhbbc_cfg
, Trace::hhbbc_index
);
1542 2, "{:#^70}\n-- {}\n",
1543 index
.frozen() ? " Unit (Frozen) " : " Unit ",
1547 AnalysisIndexAdaptor adaptor
{ index
};
1548 ContextPusher _
{adaptor
, ctx
};
1550 UnitAnalysis analysis
{ctx
};
1552 InTypeCns _2
{adaptor
};
1554 auto const& unit
= index
.lookup_unit(ctx
.unit
);
1555 for (size_t i
= 0, size
= unit
.typeAliases
.size(); i
< size
; ++i
) {
1556 auto const& ta
= *unit
.typeAliases
[i
];
1557 if (ta
.resolvedTypeStructure
) continue;
1559 SCOPE_ASSERT_DETAIL("resolve-type-alias") {
1560 return folly::sformat("Resolving {} {}", show(ctx
), ta
.name
);
1562 FTRACE(2, "{:-^70}\n-- ({}) {}\n", "Resolve", show(ctx
), ta
.name
);
1564 auto const resolution
=
1565 resolve_type_structure(AnalysisIndexAdaptor
{ index
}, nullptr, ta
);
1566 auto const ts
= resolution
.sarray();
1567 assertx(!resolution
.contextSensitive
);
1569 FTRACE(2, " no resolution\n");
1573 FTRACE(2, " resolved to {}\n", staticArrayStreamer(ts
));
1574 analysis
.resolvedTypeAliases
.emplace_back(ResolvedTypeAlias
{i
, ts
});
1580 //////////////////////////////////////////////////////////////////////
1584 template <typename Resolve
, typename Self
>
1585 ConstraintType
type_from_constraint_impl(const TypeConstraint
& tc
,
1586 const Type
& candidate
,
1587 const Resolve
& resolve
,
1589 using C
= ConstraintType
;
1591 assertx(IMPLIES(!tc
.isCheckable(), tc
.isMixed()));
1594 auto const exact
= [&] (const Type
& t
) {
1599 auto range
= eachTypeConstraintInUnion(tc
);
1600 auto it
= range
.begin();
1601 auto c
= type_from_constraint_impl(*it
, candidate
, resolve
, self
);
1604 for(; it
!= range
.end(); ++it
) {
1605 auto c2
= type_from_constraint_impl(*it
, candidate
, resolve
, self
);
1606 c
= union_constraint(c
, c2
);
1611 switch (getAnnotMetaType(tc
.type())) {
1612 case AnnotMetaType::Precise
: {
1613 switch (getAnnotDataType(tc
.type())) {
1614 case KindOfNull
: return exact(TInitNull
);
1615 case KindOfBoolean
: return exact(TBool
);
1616 case KindOfInt64
: return exact(TInt
);
1617 case KindOfDouble
: return exact(TDbl
);
1618 case KindOfPersistentVec
:
1619 case KindOfVec
: return exact(TVec
);
1620 case KindOfPersistentDict
:
1621 case KindOfDict
: return exact(TDict
);
1622 case KindOfPersistentKeyset
:
1623 case KindOfKeyset
: return exact(TKeyset
);
1624 case KindOfResource
: return exact(TRes
);
1625 case KindOfClsMeth
: return exact(TClsMeth
);
1626 case KindOfEnumClassLabel
: return exact(TEnumClassLabel
);
1627 case KindOfObject
: return exact(TObj
);
1628 case KindOfPersistentString
:
1632 union_of(TStr
, TCls
, TLazyCls
),
1638 case KindOfRClsMeth
:
1640 case KindOfLazyClass
: break;
1642 always_assert(false);
1644 case AnnotMetaType::Mixed
:
1645 return C
{ TInitCell
, TInitCell
, TriBool::No
, true };
1646 case AnnotMetaType::Nothing
:
1647 case AnnotMetaType::NoReturn
: return exact(TBottom
);
1648 case AnnotMetaType::Nonnull
: return exact(TNonNull
);
1649 case AnnotMetaType::Number
: return exact(TNum
);
1650 case AnnotMetaType::VecOrDict
: return exact(TKVish
);
1651 case AnnotMetaType::ArrayLike
: return exact(TArrLike
);
1652 case AnnotMetaType::SubObject
: {
1653 auto const cls
= resolve(tc
.clsName());
1654 auto lower
= cls
? subObj(*cls
) : TBottom
;
1657 // The "magic" interfaces cannot be represented with a single
1658 // type. Obj=Foo|Str, for example, is not a valid type. It is
1659 // safe to only provide a subset as the lower bound. We can
1660 // use any provided candidate type to refine the lower bound
1661 // and supply the subset which would allow us to optimize away
1663 if (interface_supports_arrlike(tc
.clsName())) {
1664 if (candidate
.subtypeOf(BArrLike
)) lower
= TArrLike
;
1667 if (interface_supports_int(tc
.clsName())) {
1668 if (candidate
.subtypeOf(BInt
)) lower
= TInt
;
1671 if (interface_supports_double(tc
.clsName())) {
1672 if (candidate
.subtypeOf(BDbl
)) lower
= TDbl
;
1676 if (interface_supports_string(tc
.clsName())) {
1677 if (candidate
.subtypeOf(BStr
)) lower
= TStr
;
1678 upper
|= union_of(TStr
, TCls
, TLazyCls
);
1686 return C
{ std::move(lower
), std::move(upper
) };
1688 case AnnotMetaType::Unresolved
:
1689 return C
{ TBottom
, TBottom
};
1690 case AnnotMetaType::This
:
1691 if (auto const s
= self()) {
1692 auto const obj
= toobj(*s
);
1693 if (!is_specialized_cls(*s
) || dcls_of(*s
).cls().couldBeMocked()) {
1694 return C
{ setctx(obj
), obj
};
1696 return exact(setctx(obj
));
1698 return C
{ TBottom
, TObj
};
1699 case AnnotMetaType::Callable
:
1702 union_of(TStr
, TVec
, TDict
, TFunc
, TRFunc
, TObj
, TClsMeth
, TRClsMeth
)
1704 case AnnotMetaType::ArrayKey
:
1707 union_of(TArrKey
, TCls
, TLazyCls
),
1710 case AnnotMetaType::Classname
:
1711 if (!Cfg::Eval::ClassPassesClassname
) {
1715 Cfg::Eval::ClassnameNoticesSampleRate
> 0 ?
1716 TStr
: union_of(TStr
, TCls
, TLazyCls
),
1717 union_of(TStr
, TCls
, TLazyCls
)
1721 always_assert(false);
1724 // Nullable constraint always includes TInitNull.
1725 if (tc
.isNullable()) {
1726 ty
.lower
= opt(std::move(ty
.lower
));
1727 ty
.upper
= opt(std::move(ty
.upper
));
1729 // We cannot ever say an upper-bound check will succeed, so its
1730 // lower-bound is always empty.
1731 if (tc
.isUpperBound()) ty
.lower
= TBottom
;
1732 // Soft type-constraints can potentially allow anything, so its
1733 // upper-bound is maximal. We might still be able to optimize away
1734 // the check if the lower bound is satisfied.
1736 ty
.upper
= TInitCell
;
1737 ty
.maybeMixed
= true;
1744 ConstraintType
type_from_constraint(
1745 const TypeConstraint
& tc
,
1746 const Type
& candidate
,
1747 const std::function
<Optional
<res::Class
>(SString
)>& resolve
,
1748 const std::function
<Optional
<Type
>()>& self
)
1750 return type_from_constraint_impl(tc
, candidate
, resolve
, self
);
1754 lookup_constraint(const IIndex
& index
,
1756 const TypeConstraint
& tc
,
1757 const Type
& candidate
) {
1758 return type_from_constraint_impl(
1760 [&] (SString name
) { return index
.resolve_class(name
); },
1761 [&] { return selfCls(index
, ctx
); }
1765 std::tuple
<Type
, bool, bool> verify_param_type(const IIndex
& index
,
1769 // Builtins verify parameter types differently.
1770 if (ctx
.func
->isNative
) return { t
, true, true };
1772 assertx(paramId
< ctx
.func
->params
.size());
1773 auto const& pinfo
= ctx
.func
->params
[paramId
];
1775 std::vector
<const TypeConstraint
*> tcs
{&pinfo
.typeConstraint
};
1776 for (auto const& tc
: pinfo
.upperBounds
.m_constraints
) tcs
.push_back(&tc
);
1778 auto refined
= TInitCell
;
1780 auto effectFree
= true;
1781 for (auto const tc
: tcs
) {
1782 auto const lookup
= lookup_constraint(index
, ctx
, *tc
, t
);
1783 if (t
.moreRefined(lookup
.lower
)) {
1784 refined
= intersection_of(std::move(refined
), t
);
1788 if (!t
.couldBe(lookup
.upper
)) return { TBottom
, false, false };
1792 auto result
= intersection_of(t
, lookup
.upper
);
1793 if (lookup
.coerceClassToString
== TriBool::Yes
) {
1794 assertx(!lookup
.lower
.couldBe(BCls
| BLazyCls
));
1795 assertx(lookup
.upper
.couldBe(BStr
| BCls
| BLazyCls
));
1796 if (result
.couldBe(BCls
| BLazyCls
)) {
1797 result
= promote_classish(std::move(result
));
1798 if (effectFree
&& (Cfg::Eval::ClassStringHintNoticesSampleRate
> 0 ||
1799 !promote_classish(t
).moreRefined(lookup
.lower
))) {
1805 } else if (lookup
.coerceClassToString
== TriBool::Maybe
) {
1806 if (result
.couldBe(BCls
| BLazyCls
)) result
|= TSStr
;
1812 refined
= intersection_of(std::move(refined
), std::move(result
));
1813 if (refined
.is(BBottom
)) return { TBottom
, false, false };
1816 return { std::move(refined
), noop
, effectFree
};
1819 Type
adjust_type_for_prop(const IIndex
& index
,
1820 const php::Class
& propCls
,
1821 const TypeConstraint
* tc
,
1824 assertx(tc
->validForProp());
1825 if (Cfg::Eval::CheckPropTypeHints
<= 2) return ty
;
1826 auto lookup
= lookup_constraint(
1828 Context
{ nullptr, nullptr, &propCls
},
1832 auto upper
= unctx(lookup
.upper
);
1833 // A property with a mixed type-hint can be unset and therefore by
1834 // Uninit. Any other type-hint forbids unsetting.
1835 if (lookup
.maybeMixed
) upper
|= TUninit
;
1836 auto ret
= intersection_of(std::move(upper
), ty
);
1837 if (lookup
.coerceClassToString
== TriBool::Yes
) {
1838 assertx(!lookup
.lower
.couldBe(BCls
| BLazyCls
));
1839 assertx(lookup
.upper
.couldBe(BStr
| BCls
| BLazyCls
));
1840 ret
= promote_classish(std::move(ret
));
1841 } else if (lookup
.coerceClassToString
== TriBool::Maybe
) {
1842 if (ret
.couldBe(BCls
| BLazyCls
)) ret
|= TSStr
;
1847 ConstraintType
union_constraint(const ConstraintType
& a
,
1848 const ConstraintType
& b
) {
1849 return ConstraintType
{
1850 intersection_of(a
.lower
, b
.lower
),
1851 union_of(a
.upper
, b
.upper
),
1852 a
.coerceClassToString
| b
.coerceClassToString
,
1853 a
.maybeMixed
|| b
.maybeMixed
,
1857 //////////////////////////////////////////////////////////////////////
1859 Optional
<Type
> selfCls(const IIndex
& index
, const Context
& ctx
) {
1860 if (!ctx
.cls
|| is_used_trait(*ctx
.cls
)) {
1861 return std::nullopt
;
1863 if (auto const c
= index
.resolve_class(ctx
.cls
->name
)) {
1864 return subCls(*c
, true);
1866 return std::nullopt
;
1869 Optional
<Type
> selfClsExact(const IIndex
& index
, const Context
& ctx
) {
1870 if (!ctx
.cls
|| is_used_trait(*ctx
.cls
)) {
1871 return std::nullopt
;
1873 if (auto const c
= index
.resolve_class(ctx
.cls
->name
)) {
1874 return clsExact(*c
, true);
1876 return std::nullopt
;
1879 Optional
<Type
> parentCls(const IIndex
& index
, const Context
& ctx
) {
1880 if (!ctx
.cls
|| is_used_trait(*ctx
.cls
) || !ctx
.cls
->parentName
) {
1881 return std::nullopt
;
1883 if (auto const c
= index
.resolve_class(ctx
.cls
->parentName
)) {
1884 return subCls(*c
, true);
1886 return std::nullopt
;
1889 Optional
<Type
> parentClsExact(const IIndex
& index
, const Context
& ctx
) {
1890 if (!ctx
.cls
|| is_used_trait(*ctx
.cls
) || !ctx
.cls
->parentName
) {
1891 return std::nullopt
;
1893 if (auto const c
= index
.resolve_class(ctx
.cls
->parentName
)) {
1894 return clsExact(*c
, true);
1896 return std::nullopt
;
1899 //////////////////////////////////////////////////////////////////////
1901 res::Class
builtin_class(const IIndex
& index
, SString name
) {
1902 // A builtin class may not necessarily be resolved, but it must
1903 // exist, and if it is resolved, must be AttrBuiltin.
1904 auto const rcls
= index
.resolve_class(name
);
1907 "A builtin class ({}) does not exist",
1911 !rcls
->cls() || rcls
->cls()->attrs
& AttrBuiltin
,
1912 "A builtin class ({}) resolved to non-builtin",
1918 //////////////////////////////////////////////////////////////////////