Introduce the "disable_shape_and_tuple_arrays" experiment flag
[hiphop-php.git] / hphp / hhbbc / analyze.cpp
blobd42d3dbd436660321ac1e28c5175c36c44771070
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/analyze.h"
18 #include <cstdint>
19 #include <cstdio>
20 #include <set>
21 #include <algorithm>
22 #include <string>
23 #include <vector>
26 #include "hphp/util/trace.h"
27 #include "hphp/util/dataflow-worklist.h"
29 #include "hphp/hhbbc/interp-state.h"
30 #include "hphp/hhbbc/interp.h"
31 #include "hphp/hhbbc/index.h"
32 #include "hphp/hhbbc/representation.h"
33 #include "hphp/hhbbc/cfg.h"
34 #include "hphp/hhbbc/unit-util.h"
35 #include "hphp/hhbbc/cfg-opts.h"
36 #include "hphp/hhbbc/class-util.h"
37 #include "hphp/hhbbc/func-util.h"
38 #include "hphp/hhbbc/options-util.h"
40 namespace HPHP { namespace HHBBC {
42 namespace {
44 TRACE_SET_MOD(hhbbc);
46 //////////////////////////////////////////////////////////////////////
48 const StaticString s_86pinit("86pinit");
49 const StaticString s_86sinit("86sinit");
50 const StaticString s_AsyncGenerator("HH\\AsyncGenerator");
51 const StaticString s_Generator("Generator");
52 const StaticString s_Closure("Closure");
54 //////////////////////////////////////////////////////////////////////
57 * Short-hand to get the rpoId of a block in a given FuncAnalysis. (The RPO
58 * ids are re-assigned per analysis.)
60 uint32_t rpoId(const FuncAnalysis& ai, BlockId blk) {
61 return ai.bdata[blk].rpoId;
64 State pseudomain_entry_state(borrowed_ptr<const php::Func> func) {
65 auto ret = State{};
66 ret.initialized = true;
67 ret.thisAvailable = false;
68 ret.locals.resize(func->locals.size());
69 ret.iters.resize(func->numIters);
70 ret.clsRefSlots.resize(func->numClsRefSlots);
71 for (auto& l : ret.locals) l = TGen;
72 for (auto& s : ret.clsRefSlots) s = TCls;
73 return ret;
76 State entry_state(const Index& index,
77 Context const ctx,
78 ClassAnalysis* clsAnalysis,
79 const std::vector<Type>* knownArgs) {
80 auto ret = State{};
81 ret.initialized = true;
82 ret.thisAvailable = index.lookup_this_available(ctx.func);
83 ret.locals.resize(ctx.func->locals.size());
84 ret.iters.resize(ctx.func->numIters);
85 ret.clsRefSlots.resize(ctx.func->numClsRefSlots);
87 for (auto& s : ret.clsRefSlots) s = TCls;
89 // TODO(#3788877): when we're doing a context sensitive analyze_func_inline,
90 // thisAvailable and specific type of $this should be able to come from the
91 // call context.
93 auto locId = uint32_t{0};
94 for (; locId < ctx.func->params.size(); ++locId) {
95 // Parameters may be Uninit (i.e. no InitCell). Also note that if
96 // a function takes a param by ref, it might come in as a Cell
97 // still if FPassC was used.
98 if (knownArgs) {
99 if (locId < knownArgs->size()) {
100 ret.locals[locId] = (*knownArgs)[locId];
101 } else {
102 ret.locals[locId] = ctx.func->params[locId].isVariadic ? TArr : TUninit;
104 continue;
106 auto const& param = ctx.func->params[locId];
107 if (ctx.func->isMemoizeImpl &&
108 !param.byRef &&
109 options.HardTypeHints) {
110 auto const& constraint = param.typeConstraint;
111 if (constraint.hasConstraint() && !constraint.isTypeVar() &&
112 !constraint.isTypeConstant()) {
113 ret.locals[locId] = index.lookup_constraint(ctx, constraint);
114 continue;
117 ret.locals[locId] = param.byRef ? TGen : TCell;
121 * Closures have a hidden local that's always the first (non-parameter)
122 * local, which stores the closure itself. Due to Class rescoping in the
123 * runtime, the strongest type we can assert here is <= Closure. We also need
124 * to look up the types of use vars from the index.
126 if (ctx.func->isClosureBody) {
127 assert(locId < ret.locals.size());
128 assert(ctx.func->cls);
129 auto const rcls = index.resolve_class(ctx, s_Closure.get());
130 assert(rcls && "Closure class must always be unique and must resolve");
131 ret.locals[locId++] = subObj(*rcls);
133 auto const useVars = ctx.func->isClosureBody
134 ? index.lookup_closure_use_vars(ctx.func)
135 : std::vector<Type>{};
137 auto afterParamsLocId = uint32_t{0};
138 for (; locId < ctx.func->locals.size(); ++locId, ++afterParamsLocId) {
140 * Some of the closure locals are mapped to used variables or static
141 * locals. The types of use vars are looked up from the index, but we
142 * don't currently do anything to try to track closure static local types.
144 if (ctx.func->isClosureBody) {
145 if (afterParamsLocId < useVars.size()) {
146 ret.locals[locId] = useVars[afterParamsLocId];
147 continue;
149 if (afterParamsLocId < ctx.func->staticLocals.size()) {
150 ret.locals[locId] = TGen;
151 continue;
155 // Otherwise the local will start uninitialized, like normal.
156 ret.locals[locId] = TUninit;
159 // Finally, make sure any volatile locals are set to Gen, even if they are
160 // parameters.
161 for (auto locId = uint32_t{0}; locId < ctx.func->locals.size(); ++locId) {
162 if (is_volatile_local(ctx.func, locId)) {
163 ret.locals[locId] = TGen;
167 return ret;
171 * Helper for do_analyze to initialize the states for all function entries
172 * (i.e. each dv init and the main entry), and all of them count as places the
173 * function could be entered, so they all must be visited at least once.
175 * If we're entering at a DV-init, all higher parameter locals must be
176 * Uninit, with the possible exception of a final variadic param
177 * (which will be an array). It is also possible that the DV-init is
178 * reachable from within the function with these parameter locals
179 * already initialized (although the normal php emitter can't do
180 * this), but that case will be discovered when iterating.
182 dataflow_worklist<uint32_t>
183 prepare_incompleteQ(const Index& index,
184 FuncAnalysis& ai,
185 ClassAnalysis* clsAnalysis,
186 const std::vector<Type>* knownArgs) {
187 auto incompleteQ = dataflow_worklist<uint32_t>(ai.rpoBlocks.size());
188 auto const ctx = ai.ctx;
189 auto const numParams = ctx.func->params.size();
191 auto const entryState = [&] {
192 if (!is_pseudomain(ctx.func)) {
193 return entry_state(index, ctx, clsAnalysis, knownArgs);
196 assert(!knownArgs && !clsAnalysis);
197 assert(numParams == 0);
198 return pseudomain_entry_state(ctx.func);
199 }();
201 if (knownArgs) {
202 // When we have known args, we only need to add one of the entry points to
203 // the initial state, since we know how many arguments were passed.
204 auto const useDvInit = [&] {
205 if (knownArgs->size() >= numParams) return false;
206 for (auto i = knownArgs->size(); i < numParams; ++i) {
207 auto const dv = ctx.func->params[i].dvEntryPoint;
208 if (dv != NoBlockId) {
209 ai.bdata[dv].stateIn = entryState;
210 incompleteQ.push(rpoId(ai, dv));
211 return true;
214 return false;
215 }();
217 if (!useDvInit) {
218 ai.bdata[ctx.func->mainEntry].stateIn = entryState;
219 incompleteQ.push(rpoId(ai, ctx.func->mainEntry));
222 return incompleteQ;
225 for (auto paramId = uint32_t{0}; paramId < numParams; ++paramId) {
226 auto const dv = ctx.func->params[paramId].dvEntryPoint;
227 if (dv != NoBlockId) {
228 ai.bdata[dv].stateIn = entryState;
229 incompleteQ.push(rpoId(ai, dv));
230 for (auto locId = paramId; locId < numParams; ++locId) {
231 ai.bdata[dv].stateIn.locals[locId] =
232 ctx.func->params[locId].isVariadic ? TArr : TUninit;
237 ai.bdata[ctx.func->mainEntry].stateIn = entryState;
238 incompleteQ.push(rpoId(ai, ctx.func->mainEntry));
240 return incompleteQ;
244 * Closures inside of classes are analyzed in the context they are
245 * created in (this affects accessibility rules, access to privates,
246 * etc).
248 * Note that in the interpreter code, ctx.func->cls is not
249 * necessarily the same as ctx.cls because of closures.
251 Context adjust_closure_context(Context ctx) {
252 if (ctx.cls && ctx.cls->closureContextCls) {
253 ctx.cls = ctx.cls->closureContextCls;
255 return ctx;
258 FuncAnalysis do_analyze_collect(const Index& index,
259 Context const inputCtx,
260 CollectedInfo& collect,
261 ClassAnalysis* clsAnalysis,
262 const std::vector<Type>* knownArgs) {
263 auto const ctx = adjust_closure_context(inputCtx);
264 FuncAnalysis ai{ctx};
266 auto const bump = trace_bump_for(ctx.cls, ctx.func);
267 Trace::Bump bumper1{Trace::hhbbc, bump};
268 Trace::Bump bumper2{Trace::hhbbc_cfg, bump};
270 if (knownArgs) {
271 FTRACE(2, "{:.^70}\n", "Inline Interp");
273 SCOPE_EXIT {
274 if (knownArgs) {
275 FTRACE(2, "{:.^70}\n", "End Inline Interp");
279 FTRACE(2, "{:-^70}\n-- {}\n", "Analyze", show(ctx));
282 * Set of RPO ids that still need to be visited.
284 * Initially, we need each entry block in this list. As we visit
285 * blocks, we propagate states to their successors and across their
286 * back edges---when state merges cause a change to the block
287 * stateIn, we will add it to this queue so it gets visited again.
289 auto incompleteQ = prepare_incompleteQ(index, ai, clsAnalysis, knownArgs);
292 * There are potentially infinitely growing types when we're using
293 * union_of to merge states, so occasonially we need to apply a
294 * widening operator.
296 * Currently this is done by having a straight-forward hueristic: if
297 * you visit a block too many times, we'll start doing all the
298 * merges with the widening operator until we've had a chance to
299 * visit the block again. We must then continue iterating in case
300 * the actual fixed point is higher than the result of widening.
302 * Terminiation is guaranteed because the widening operator has only
303 * finite chains in the type lattice.
305 auto nonWideVisits = std::vector<uint32_t>(ctx.func->blocks.size());
307 // For debugging, count how many times basic blocks get interpreted.
308 auto interp_counter = uint32_t{0};
310 // Used to force blocks that depended on the types of local statics
311 // to be re-analyzed when the local statics change.
312 std::unordered_map<borrowed_ptr<const php::Block>, std::map<LocalId, Type>>
313 usedLocalStatics;
316 * Iterate until a fixed point.
318 * Each time a stateIn for a block changes, we re-insert the block's
319 * rpo ID in incompleteQ. Since incompleteQ is ordered, we'll
320 * always visit blocks with earlier RPO ids first, which hopefully
321 * means less iterations.
323 do {
324 while (!incompleteQ.empty()) {
325 auto const blk = ai.rpoBlocks[incompleteQ.pop()];
327 if (nonWideVisits[blk->id]++ > options.analyzeFuncWideningLimit) {
328 nonWideVisits[blk->id] = 0;
331 FTRACE(2, "block #{}\nin {}{}", blk->id,
332 state_string(*ctx.func, ai.bdata[blk->id].stateIn, collect),
333 property_state_string(collect.props));
334 ++interp_counter;
336 auto propagate = [&] (BlockId target, const State& st) {
337 auto const needsWiden =
338 nonWideVisits[target] >= options.analyzeFuncWideningLimit;
340 // We haven't optimized the widening operator much, because it
341 // doesn't happen in practice right now. We want to know when
342 // it starts happening:
343 if (needsWiden) {
344 std::fprintf(stderr, "widening in %s on %s\n",
345 ctx.unit->filename->data(),
346 ctx.func->name->data());
349 FTRACE(2, " {}-> {}\n", needsWiden ? "widening " : "", target);
350 FTRACE(4, "target old {}",
351 state_string(*ctx.func, ai.bdata[target].stateIn, collect));
353 auto const changed =
354 needsWiden ? widen_into(ai.bdata[target].stateIn, st)
355 : merge_into(ai.bdata[target].stateIn, st);
356 if (changed) {
357 incompleteQ.push(rpoId(ai, target));
359 FTRACE(4, "target new {}",
360 state_string(*ctx.func, ai.bdata[target].stateIn, collect));
363 auto stateOut = ai.bdata[blk->id].stateIn;
364 auto interp = Interp { index, ctx, collect, blk, stateOut };
365 auto flags = run(interp, propagate);
366 // We only care about the usedLocalStatics from the last visit
367 if (flags.usedLocalStatics) {
368 usedLocalStatics[blk] = std::move(*flags.usedLocalStatics);
369 } else {
370 usedLocalStatics.erase(blk);
373 if (flags.returned) {
374 ai.inferredReturn |= std::move(*flags.returned);
377 // maybe some local statics changed type since the last time their
378 // blocks were visited.
379 for (auto const& elm : usedLocalStatics) {
380 for (auto const& ls : elm.second) {
381 if (collect.localStaticTypes[ls.first] != ls.second) {
382 incompleteQ.push(rpoId(ai, elm.first->id));
383 break;
387 } while (!incompleteQ.empty());
389 ai.closureUseTypes = std::move(collect.closureUseTypes);
390 ai.cnsMap = std::move(collect.cnsMap);
391 ai.readsUntrackedConstants = collect.readsUntrackedConstants;
392 ai.mayUseVV = collect.mayUseVV;
394 if (ctx.func->isGenerator) {
395 if (ctx.func->isAsync) {
396 // Async generators always return AsyncGenerator object.
397 ai.inferredReturn = objExact(index.builtin_class(s_AsyncGenerator.get()));
398 } else {
399 // Non-async generators always return Generator object.
400 ai.inferredReturn = objExact(index.builtin_class(s_Generator.get()));
402 } else if (ctx.func->isAsync) {
403 // Async functions always return WaitH<T>, where T is the type returned
404 // internally.
405 ai.inferredReturn = wait_handle(index, ai.inferredReturn);
409 * If inferredReturn is TBottom, the callee didn't execute a return
410 * at all. (E.g. it unconditionally throws, or is an abstract
411 * function body.)
413 * In this case, we leave the return type as TBottom, to indicate
414 * the same to callers.
416 assert(ai.inferredReturn.subtypeOf(TGen));
418 // For debugging, print the final input states for each block.
419 FTRACE(2, "{}", [&] {
420 auto const bsep = std::string(60, '=') + "\n";
421 auto const sep = std::string(60, '-') + "\n";
422 auto ret = folly::format(
423 "{}function {} ({} block interps):\n{}",
424 bsep,
425 show(ctx),
426 interp_counter,
427 bsep
428 ).str();
429 for (auto& bd : ai.bdata) {
430 folly::format(
431 &ret,
432 "{}block {}:\nin {}",
433 sep,
434 ai.rpoBlocks[bd.rpoId]->id,
435 state_string(*ctx.func, bd.stateIn, collect)
438 ret += sep + bsep;
439 folly::format(&ret, "Inferred return type: {}\n", show(ai.inferredReturn));
440 ret += bsep;
441 return ret;
442 }());
444 // Do this after the tracing above
445 ai.localStaticTypes = std::move(collect.localStaticTypes);
446 return ai;
449 FuncAnalysis do_analyze(const Index& index,
450 Context const ctx,
451 ClassAnalysis* clsAnalysis,
452 const std::vector<Type>* knownArgs,
453 bool trackConstantArrays) {
454 CollectedInfo collect {
455 index, ctx, clsAnalysis, nullptr, trackConstantArrays
458 return do_analyze_collect(index, ctx, collect, clsAnalysis, knownArgs);
461 //////////////////////////////////////////////////////////////////////
464 * In the case of HNI builtin classes, private properties are
465 * allowed to be mutated by native code, so we may not see all the
466 * modifications.
468 * We are allowed to assume the type annotation on the property is
469 * accurate, although nothing is currently checking that this is the
470 * case. We handle this right now by doing inference as if it
471 * couldn't be affected by native code, then assert the inferred
472 * type is at least a subtype of the annotated type, and expanding
473 * it to be the annotated type if it is bigger.
475 void expand_hni_prop_types(ClassAnalysis& clsAnalysis) {
476 auto relax_prop = [&] (const php::Prop& prop, PropState& propState) {
477 auto it = propState.find(prop.name);
478 if (it == end(propState)) return;
481 * When HardTypeHints isn't on, DisallowDynamicVarEnvFuncs isn't on, or any
482 * functions are interceptable, we don't require the constraints to actually
483 * match, and relax all the HNI types to Gen.
485 * This is because extensions may wish to assign to properties
486 * after a typehint guard, which is going to fail without
487 * HardTypeHints. Or, with any interceptable functions, it's
488 * quite possible that some function calls in systemlib might not
489 * be known to return things matching the property type hints for
490 * some properties, or not to take their arguments by reference.
492 auto const hniTy =
493 !options.HardTypeHints ||
494 !options.DisallowDynamicVarEnvFuncs ||
495 clsAnalysis.anyInterceptable
496 ? TGen
497 : from_hni_constraint(prop.typeConstraint);
498 if (it->second.subtypeOf(hniTy)) {
499 it->second = hniTy;
500 return;
503 std::fprintf(
504 stderr,
505 "HNI class %s::%s inferred property type (%s) doesn't "
506 "match annotation (%s)\n",
507 clsAnalysis.ctx.cls->name->data(),
508 prop.name->data(),
509 show(it->second).c_str(),
510 show(hniTy).c_str()
512 always_assert(!"HNI property type annotation was wrong");
515 for (auto& prop : clsAnalysis.ctx.cls->properties) {
516 relax_prop(prop, clsAnalysis.privateProperties);
517 relax_prop(prop, clsAnalysis.privateStatics);
521 //////////////////////////////////////////////////////////////////////
525 //////////////////////////////////////////////////////////////////////
527 FuncAnalysis::FuncAnalysis(Context ctx)
528 : ctx(ctx)
529 , rpoBlocks(rpoSortAddDVs(*ctx.func))
530 , bdata(ctx.func->blocks.size())
531 , inferredReturn(TBottom)
533 for (auto rpoId = size_t{0}; rpoId < rpoBlocks.size(); ++rpoId) {
534 bdata[rpoBlocks[rpoId]->id].rpoId = rpoId;
538 FuncAnalysis analyze_func(const Index& index, Context const ctx,
539 bool trackConstantArrays) {
540 while (true) {
541 auto ret = do_analyze(index, ctx, nullptr, nullptr, trackConstantArrays);
542 if (!rebuild_exn_tree(ret)) return ret;
546 FuncAnalysis analyze_func_collect(const Index& index,
547 Context const ctx,
548 CollectedInfo& collect) {
549 return do_analyze_collect(index, ctx, collect, nullptr, nullptr);
552 FuncAnalysis analyze_func_inline(const Index& index,
553 Context const ctx,
554 std::vector<Type> args) {
555 assert(!ctx.func->isClosureBody);
556 return do_analyze(index, ctx, nullptr, &args, true);
559 ClassAnalysis analyze_class(const Index& index, Context const ctx) {
561 assert(ctx.cls && !ctx.func);
563 Trace::Bump bumper{Trace::hhbbc, kSystemLibBump,
564 is_systemlib_part(*ctx.unit)};
565 FTRACE(2, "{:#^70}\n", "Class");
568 ClassAnalysis clsAnalysis(ctx, index.any_interceptable_functions());
569 auto const associatedClosures = index.lookup_closures(ctx.cls);
570 auto const isHNIBuiltin = ctx.cls->attrs & AttrBuiltin;
573 * Initialize inferred private property types to their in-class
574 * initializers.
576 * We need to loosen_statics and loosen_values on instance
577 * properties, because the class could be unserialized, which we
578 * don't guarantee preserves those aspects of the type.
580 * Also, set Uninit properties to TBottom, so that analysis
581 * of 86pinit methods sets them to the correct type.
583 for (auto& prop : ctx.cls->properties) {
584 if (!(prop.attrs & AttrPrivate)) continue;
586 auto const cellTy = from_cell(prop.val);
587 if (isHNIBuiltin) {
588 auto const hniTy = from_hni_constraint(prop.typeConstraint);
589 if (!cellTy.subtypeOf(hniTy)) {
590 std::fprintf(stderr, "hni %s::%s has impossible type. "
591 "The annotation says it is type (%s) "
592 "but the default value is type (%s).\n",
593 ctx.cls->name->data(),
594 prop.name->data(),
595 show(hniTy).c_str(),
596 show(cellTy).c_str()
598 always_assert(0 && "HNI systemlib has invalid type annotations");
602 if (!(prop.attrs & AttrStatic)) {
603 auto t = loosen_statics(loosen_values(cellTy));
604 if (!is_closure(*ctx.cls) && t.subtypeOf(TUninit)) {
606 * For non-closure classes, a property of type KindOfUninit
607 * means that it has non-scalar initializer which will be set
608 * by a 86pinit method. For these classes, we want the
609 * initial type of the property to be the type set by the
610 * 86pinit method, so we set the type to TBottom.
612 * Closures will not have an 86pinit body, but still may have
613 * properties of kind KindOfUninit (they will later contain
614 * used variables or static locals for the closure body). We
615 * don't want to touch those.
617 t = TBottom;
619 clsAnalysis.privateProperties[prop.name] = t;
620 } else {
621 // Same thing as the above regarding TUninit and TBottom.
622 // Static properties don't need to exclude closures for this,
623 // though---we use instance properties for the closure
624 // 86static_* properties.
625 auto t = cellTy;
626 if (t.subtypeOf(TUninit)) {
627 t = TBottom;
629 clsAnalysis.privateStatics[prop.name] = t;
634 * For classes with non-scalar initializers, the 86pinit and 86sinit
635 * methods are guaranteed to run before any other method, and
636 * are never called afterwards. Thus, we can analyze these
637 * methods first to determine the initial types of properties with
638 * non-scalar initializers, and these need not be be run again as part
639 * of the fixedpoint computation.
641 if (auto f = find_method(ctx.cls, s_86pinit.get())) {
642 do_analyze(
643 index,
644 Context { ctx.unit, f, ctx.cls },
645 &clsAnalysis,
646 nullptr,
647 true
650 if (auto f = find_method(ctx.cls, s_86sinit.get())) {
651 do_analyze(
652 index,
653 Context { ctx.unit, f, ctx.cls },
654 &clsAnalysis,
655 nullptr,
656 true
660 // Verify that none of the class properties are TBottom, i.e.
661 // any property of type KindOfUninit has been initialized (by
662 // 86pinit or 86sinit).
663 for (auto& prop : ctx.cls->properties) {
664 if (!(prop.attrs & AttrPrivate)) continue;
665 if (prop.attrs & AttrStatic) {
666 assert(!clsAnalysis.privateStatics[prop.name].subtypeOf(TBottom));
667 } else {
668 assert(!clsAnalysis.privateProperties[prop.name].subtypeOf(TBottom));
672 // TODO(#3696042): We don't have support for static properties with
673 // specialized array types in the minstr functions yet, so if we
674 // have one after 86sinit, throw it away.
675 for (auto& kv : clsAnalysis.privateStatics) {
676 if (is_specialized_array(kv.second)) {
677 kv.second |= TArr;
682 * Similar to the function case in do_analyze, we have to handle the
683 * fact that there are infinitely growing chains in our type lattice
684 * under union_of.
686 * So if we've visited the whole class some number of times and
687 * still aren't at a fixed point, we'll set the property state to
688 * the result of widening the old state with the new state, and then
689 * reset the counter. This guarantees eventual termination.
691 auto nonWideVisits = uint32_t{0};
693 for (;;) {
694 auto const previousProps = clsAnalysis.privateProperties;
695 auto const previousStatics = clsAnalysis.privateStatics;
697 std::vector<FuncAnalysis> methodResults;
698 std::vector<FuncAnalysis> closureResults;
700 // Analyze every method in the class until we reach a fixed point
701 // on the private property states.
702 for (auto& f : ctx.cls->methods) {
703 if (f->name->isame(s_86pinit.get()) ||
704 f->name->isame(s_86sinit.get())) {
705 continue;
708 methodResults.push_back(
709 do_analyze(
710 index,
711 Context { ctx.unit, borrow(f), ctx.cls },
712 &clsAnalysis,
713 nullptr,
714 true
719 if (associatedClosures) {
720 for (auto& c : *associatedClosures) {
721 auto const invoke = borrow(c->methods[0]);
722 closureResults.push_back(
723 do_analyze(
724 index,
725 Context { ctx.unit, invoke, c },
726 &clsAnalysis,
727 nullptr,
728 true
734 auto noExceptionalChanges = [&] {
735 auto changes = false;
736 for (auto& fa : methodResults) {
737 if (rebuild_exn_tree(fa)) changes = true;
739 for (auto& fa : closureResults) {
740 if (rebuild_exn_tree(fa)) changes = true;
742 return !changes;
745 // Check if we've reached a fixed point yet.
746 if (previousProps == clsAnalysis.privateProperties &&
747 previousStatics == clsAnalysis.privateStatics &&
748 noExceptionalChanges()) {
749 clsAnalysis.methods = std::move(methodResults);
750 clsAnalysis.closures = std::move(closureResults);
751 break;
754 if (nonWideVisits++ > options.analyzeClassWideningLimit) {
755 auto const a = widen_into(clsAnalysis.privateProperties, previousProps);
756 auto const b = widen_into(clsAnalysis.privateStatics, previousStatics);
757 always_assert(a || b);
758 nonWideVisits = 0;
762 if (isHNIBuiltin) expand_hni_prop_types(clsAnalysis);
764 Trace::Bump bumper{Trace::hhbbc, kSystemLibBump,
765 is_systemlib_part(*ctx.unit)};
767 // For debugging, print the final state of the class analysis.
768 FTRACE(2, "{}", [&] {
769 auto const bsep = std::string(60, '+') + "\n";
770 auto ret = folly::format(
771 "{}class {}:\n{}",
772 bsep,
773 ctx.cls->name,
774 bsep
775 ).str();
776 for (auto& kv : clsAnalysis.privateProperties) {
777 ret += folly::format(
778 "private ${: <14} :: {}\n",
779 kv.first,
780 show(kv.second)
781 ).str();
783 for (auto& kv : clsAnalysis.privateStatics) {
784 ret += folly::format(
785 "private static ${: <14} :: {}\n",
786 kv.first,
787 show(kv.second)
788 ).str();
790 ret += bsep;
791 return ret;
792 }());
794 return clsAnalysis;
797 //////////////////////////////////////////////////////////////////////
799 std::vector<std::pair<State,StepFlags>>
800 locally_propagated_states(const Index& index,
801 const FuncAnalysis& fa,
802 borrowed_ptr<const php::Block> blk,
803 State state) {
804 Trace::Bump bumper{Trace::hhbbc, 10};
806 std::vector<std::pair<State,StepFlags>> ret;
807 ret.reserve(blk->hhbcs.size() + 1);
809 CollectedInfo collect { index, fa.ctx, nullptr, nullptr, true, &fa };
810 auto interp = Interp { index, fa.ctx, collect, blk, state };
812 for (auto& op : blk->hhbcs) {
813 ret.emplace_back(state, StepFlags{});
814 ret.back().second = step(interp, op);
817 ret.emplace_back(std::move(state), StepFlags{});
818 return ret;
821 //////////////////////////////////////////////////////////////////////