2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/optimize.h"
25 #include <boost/dynamic_bitset.hpp>
27 #include <folly/Optional.h>
28 #include <folly/gen/Base.h>
29 #include <folly/gen/String.h>
31 #include "hphp/util/trace.h"
32 #include "hphp/util/match.h"
34 #include "hphp/runtime/vm/hhbc.h"
35 #include "hphp/runtime/base/datatype.h"
37 #include "hphp/hhbbc/hhbbc.h"
38 #include "hphp/hhbbc/analyze.h"
39 #include "hphp/hhbbc/cfg.h"
40 #include "hphp/hhbbc/cfg-opts.h"
41 #include "hphp/hhbbc/dce.h"
42 #include "hphp/hhbbc/func-util.h"
43 #include "hphp/hhbbc/interp.h"
44 #include "hphp/hhbbc/interp-internal.h"
45 #include "hphp/hhbbc/interp-state.h"
46 #include "hphp/hhbbc/misc.h"
47 #include "hphp/hhbbc/options-util.h"
48 #include "hphp/hhbbc/representation.h"
49 #include "hphp/hhbbc/type-system.h"
50 #include "hphp/hhbbc/unit-util.h"
52 namespace HPHP
{ namespace HHBBC
{
54 //////////////////////////////////////////////////////////////////////
58 //////////////////////////////////////////////////////////////////////
61 * For filtering assertions, some opcodes are considered to have no
62 * use for a stack input assertion.
64 * For now this is restricted to opcodes that do literally nothing.
66 bool ignoresStackInput(Op op
) {
78 template<class TyBC
, class ArgType
>
79 folly::Optional
<Bytecode
> makeAssert(ArrayTypeTable::Builder
& arrTable
,
82 if (t
.subtypeOf(BBottom
)) return folly::none
;
83 auto const rat
= make_repo_type(arrTable
, t
);
84 using T
= RepoAuthType::Tag
;
85 if (options
.FilterAssertions
) {
86 // Cell and InitCell don't add any useful information, so leave them
88 if (rat
== RepoAuthType
{T::Cell
}) return folly::none
;
89 if (rat
== RepoAuthType
{T::InitCell
}) return folly::none
;
91 return Bytecode
{ TyBC
{ arg
, rat
} };
95 void insert_assertions_step(ArrayTypeTable::Builder
& arrTable
,
96 const php::Func
& func
,
97 const Bytecode
& bcode
,
99 std::bitset
<kMaxTrackedLocals
> mayReadLocalSet
,
100 std::vector
<uint8_t> obviousStackOutputs
,
102 if (state
.unreachable
) return;
104 for (LocalId i
= 0; i
< state
.locals
.size(); ++i
) {
105 if (func
.locals
[i
].killed
) continue;
106 if (options
.FilterAssertions
) {
107 // Do not emit assertions for untracked locals.
108 if (i
>= mayReadLocalSet
.size()) break;
109 if (!mayReadLocalSet
.test(i
)) continue;
111 auto const realT
= state
.locals
[i
];
112 auto const op
= makeAssert
<bc::AssertRATL
>(arrTable
, i
, realT
);
116 if (!options
.InsertStackAssertions
) return;
118 assert(obviousStackOutputs
.size() == state
.stack
.size());
120 auto const assert_stack
= [&] (size_t idx
) {
121 assert(idx
< state
.stack
.size());
122 if (obviousStackOutputs
[state
.stack
.size() - idx
- 1]) return;
123 if (ignoresStackInput(bcode
.op
)) return;
124 auto const realT
= state
.stack
[state
.stack
.size() - idx
- 1].type
;
125 auto const flav
= stack_flav(realT
);
127 if (options
.FilterAssertions
&& !realT
.strictSubtypeOf(flav
)) {
132 makeAssert
<bc::AssertRATStk
>(
134 static_cast<uint32_t>(idx
),
140 for (auto i
= size_t{0}; i
< bcode
.numPop(); ++i
) assert_stack(i
);
142 // The base instructions are special in that they may read from the
143 // stack without necessarily popping it. We want type assertions on
144 // the stack slots they'll read.
146 case Op::BaseC
: assert_stack(bcode
.BaseC
.arg1
); break;
147 case Op::BaseGC
: assert_stack(bcode
.BaseGC
.arg1
); break;
149 assert_stack(bcode
.BaseSC
.arg1
);
150 assert_stack(bcode
.BaseSC
.arg2
);
153 switch (bcode
.Dim
.mkey
.mcode
) {
155 assert_stack(bcode
.Dim
.mkey
.idx
);
157 case MW
: case MEL
: case MPL
: case MEI
:
158 case MET
: case MPT
: case MQT
:
167 * When filter assertions is on, we use this to avoid putting stack
168 * assertions on some "obvious" instructions.
170 * These are instructions that push an output type that is always the
171 * same, i.e. where an AssertT is never going to add information.
172 * (E.g. "Int 3" obviously pushes an Int, and the JIT has no trouble
173 * figuring that out, so there's no reason to assert about it.)
175 * TODO(#3676101): It turns out many hhbc opcodes have known output
176 * types---there are some super polymorphic ones, but many always do
177 * bools or objects, etc. We might consider making stack flavors have
178 * subtypes and adding this to the opcode table.
180 bool hasObviousStackOutput(const Bytecode
& op
, const Interp
& interp
) {
195 case Op::NewDictArray
:
196 case Op::NewPackedArray
:
198 case Op::NewStructArray
:
199 case Op::NewStructDArray
:
200 case Op::NewStructDict
:
201 case Op::NewVecArray
:
202 case Op::NewKeysetArray
:
203 case Op::AddNewElemC
:
235 case Op::InstanceOfD
:
236 case Op::IsLateBoundCls
:
237 case Op::IsTypeStructC
:
238 case Op::CombineAndResolveTypeStruct
:
239 case Op::RecordReifiedGeneric
:
249 case Op::OODeclExists
:
254 if (auto tt
= thisType(interp
.index
, interp
.ctx
)) {
255 auto t
= interp
.state
.stack
.back().type
;
256 if (is_opt(t
)) t
= unopt(std::move(t
));
257 return !t
.strictSubtypeOf(*tt
);
268 // The output of SetL is obvious if you know what its input is
269 // (which we'll assert if we know).
273 // The output of SetM isn't quite as obvious as SetL, but the jit
274 // can work it out from the input just as well as hhbbc (if not better).
283 void insert_assertions(const Index
& index
,
284 const FuncAnalysis
& ainfo
,
285 CollectedInfo
& collect
,
289 auto& cblk
= ainfo
.ctx
.func
->blocks
[bid
];
290 newBCs
.reserve(cblk
->hhbcs
.size());
292 auto& arrTable
= *index
.array_table_builder();
293 auto const ctx
= ainfo
.ctx
;
295 std::vector
<uint8_t> obviousStackOutputs(state
.stack
.size(), false);
297 auto interp
= Interp
{ index
, ctx
, collect
, bid
, cblk
.get(), state
};
298 for (auto& op
: cblk
->hhbcs
) {
299 FTRACE(2, " == {}\n", show(ctx
.func
, op
));
301 auto gen
= [&] (const Bytecode
& newb
) {
302 newBCs
.push_back(newb
);
303 newBCs
.back().srcLoc
= op
.srcLoc
;
304 FTRACE(2, " + {}\n", show(ctx
.func
, newBCs
.back()));
307 if (state
.unreachable
) {
308 if (cblk
->fallthrough
!= NoBlockId
) {
309 auto const blk
= cblk
.mutate();
310 blk
->fallthrough
= NoBlockId
;
312 if (!(instrFlags(op
.op
) & TF
)) {
313 gen(bc::BreakTraceHint
{});
314 gen(bc::String
{ s_unreachable
.get() });
315 gen(bc::Fatal
{ FatalOp::Runtime
});
320 auto const preState
= state
;
321 auto const flags
= step(interp
, op
);
323 insert_assertions_step(
328 flags
.mayReadLocalSet
,
333 if (op
.op
== Op::CGetL2
) {
334 obviousStackOutputs
.emplace(obviousStackOutputs
.end() - 1,
335 hasObviousStackOutput(op
, interp
));
337 for (int i
= 0; i
< op
.numPop(); i
++) {
338 obviousStackOutputs
.pop_back();
340 for (auto i
= op
.numPush(); i
--; ) {
341 obviousStackOutputs
.emplace_back(hasObviousStackOutput(op
, interp
));
348 if (cblk
->hhbcs
!= newBCs
) cblk
.mutate()->hhbcs
= std::move(newBCs
);
351 bool persistence_check(const php::Func
* const func
) {
352 auto bid
= func
->mainEntry
;
353 while (bid
!= NoBlockId
) {
354 auto const& blk
= func
->blocks
[bid
];
355 for (auto& op
: blk
->hhbcs
) {
361 case Op::DefTypeAlias
:
375 // Not strictly no-side effects, but as long as the rest of
376 // the unit is limited to the above, we're fine (and we expect
377 // one following a DefCns).
385 bid
= blk
->fallthrough
;
390 //////////////////////////////////////////////////////////////////////
393 bool propagate_constants(const Bytecode
& op
, State
& state
, Gen gen
) {
394 auto const numPop
= op
.numPop();
395 auto const numPush
= op
.numPush();
396 auto const stkSize
= state
.stack
.size();
397 constexpr auto numCells
= 4;
398 TypedValue constVals
[numCells
];
400 // All outputs of the instruction must have constant types for this
402 for (auto i
= size_t{0}; i
< numPush
; ++i
) {
403 auto const& ty
= state
.stack
[stkSize
- i
- 1].type
;
405 auto const v
= tv(ty
);
406 if (!v
) return false;
408 } else if (!is_scalar(ty
)) {
413 // Pop the inputs, and push the constants.
414 for (auto i
= size_t{0}; i
< numPop
; ++i
) {
415 DEBUG_ONLY
auto flavor
= op
.popFlavor(i
);
416 assertx(flavor
!= Flavor::U
);
417 // Even for CU we only support C's.
421 for (auto i
= size_t{0}; i
< numPush
; ++i
) {
422 auto const v
= i
< numCells
?
423 constVals
[i
] : *tv(state
.stack
[stkSize
- i
- 1].type
);
424 gen(gen_constant(v
));
425 state
.stack
[stkSize
- i
- 1].type
= from_cell(v
);
431 bool propagate_constants(const Bytecode
& bc
, State
& state
,
433 return propagate_constants(bc
, state
, [&] (const Bytecode
& bc
) {
438 //////////////////////////////////////////////////////////////////////
441 * Create a block similar to another block (but with no bytecode in it yet).
443 BlockId
make_block(php::Func
* func
,
444 const php::Block
* srcBlk
) {
445 FTRACE(1, " ++ new block {}\n", func
->blocks
.size());
447 auto newBlk
= copy_ptr
<php::Block
>{php::Block
{}};
448 auto const blk
= newBlk
.mutate();
449 blk
->exnNodeId
= srcBlk
->exnNodeId
;
450 blk
->throwExit
= srcBlk
->throwExit
;
451 auto const bid
= func
->blocks
.size();
452 func
->blocks
.push_back(std::move(newBlk
));
457 BlockId
make_fatal_block(php::Func
* func
,
458 const php::Block
* srcBlk
) {
459 auto bid
= make_block(func
, srcBlk
);
460 auto const blk
= func
->blocks
[bid
].mutate();
461 auto const srcLoc
= srcBlk
->hhbcs
.back().srcLoc
;
463 bc_with_loc(srcLoc
, bc::String
{ s_unreachable
.get() }),
464 bc_with_loc(srcLoc
, bc::Fatal
{ FatalOp::Runtime
})
466 blk
->fallthrough
= NoBlockId
;
467 blk
->throwExit
= NoBlockId
;
468 blk
->exnNodeId
= NoExnNodeId
;
472 void sync_ainfo(BlockId bid
, FuncAnalysis
& ainfo
, const State
& state
) {
473 assertx(ainfo
.bdata
.size() == bid
);
474 assertx(bid
+ 1 == ainfo
.ctx
.func
->blocks
.size());
476 ainfo
.rpoBlocks
.push_back(bid
);
477 ainfo
.bdata
.push_back(FuncAnalysis::BlockData
{
478 static_cast<uint32_t>(ainfo
.rpoBlocks
.size() - 1),
484 * Create a block similar to another block (but with no bytecode in it yet).
486 BlockId
make_block(FuncAnalysis
& ainfo
,
487 const php::Block
* srcBlk
,
488 const State
& state
) {
489 auto const bid
= make_block(ainfo
.ctx
.func
, srcBlk
);
490 sync_ainfo(bid
, ainfo
, state
);
494 BlockId
make_fatal_block(FuncAnalysis
& ainfo
,
495 const php::Block
* srcBlk
,
496 const State
& state
) {
497 auto bid
= make_fatal_block(ainfo
.ctx
.func
, srcBlk
);
498 sync_ainfo(bid
, ainfo
, state
);
502 //////////////////////////////////////////////////////////////////////
504 template<class BlockContainer
, class AInfo
, class Fun
>
505 void visit_blocks_impl(const char* what
,
508 CollectedInfo
& collect
,
509 const BlockContainer
& rpoBlocks
,
512 BlockId curBlk
= NoBlockId
;
513 SCOPE_ASSERT_DETAIL(what
) {
514 if (curBlk
== NoBlockId
) return std::string
{"\nNo block processed\n"};
515 return folly::sformat(
516 "block #{}\nin-{}", curBlk
,
517 state_string(*ainfo
.ctx
.func
, ainfo
.bdata
[curBlk
].stateIn
, collect
)
521 FTRACE(1, "|---- {}\n", what
);
522 for (auto const bid
: rpoBlocks
) {
524 FTRACE(2, "block #{}\n", bid
);
525 auto const& state
= ainfo
.bdata
[bid
].stateIn
;
526 if (!state
.initialized
) {
527 FTRACE(2, " unreachable\n");
530 // TODO(#3732260): this should probably spend an extra interp pass
531 // in debug builds to check that no transformation to the bytecode
532 // was made that changes the block output state.
533 fun(index
, ainfo
, collect
, bid
, state
);
535 assert(check(*ainfo
.ctx
.func
));
539 void visit_blocks_mutable(const char* what
,
542 CollectedInfo
& collect
,
544 // Make a copy of the block list so it can be mutated by the visitor.
545 auto const blocksCopy
= ainfo
.rpoBlocks
;
546 visit_blocks_impl(what
, index
, ainfo
, collect
,
547 blocksCopy
, std::forward
<Fun
>(fun
));
551 void visit_blocks(const char* what
,
553 const FuncAnalysis
& ainfo
,
554 CollectedInfo
& collect
,
556 visit_blocks_impl(what
, index
, ainfo
, collect
,
557 ainfo
.rpoBlocks
, std::forward
<Fun
>(fun
));
560 //////////////////////////////////////////////////////////////////////
562 IterId
iterFromInit(const php::Func
& func
, BlockId initBlock
) {
563 auto const& op
= func
.blocks
[initBlock
]->hhbcs
.back();
564 if (op
.op
== Op::IterInit
) return op
.IterInit
.ita
.iterId
;
565 if (op
.op
== Op::LIterInit
) return op
.LIterInit
.ita
.iterId
;
566 always_assert(false);
570 * Attempt to convert normal iterators into liters. In order for an iterator to
571 * be converted to a liter, the following needs to be true:
573 * - The iterator is initialized with the value in a local at exactly one block.
575 * - That same local is not modified on all possible paths from the
576 * initialization to every usage of that iterator.
578 * The first condition is actually more restrictive than necessary, but
579 * enforcing that the iterator is initialized at exactly one place simplifies
580 * the bookkeeping and is always true with how we currently emit bytecode.
583 struct OptimizeIterState
{
584 void operator()(const Index
& index
,
585 const FuncAnalysis
& ainfo
,
586 CollectedInfo
& collect
,
589 auto const ctx
= ainfo
.ctx
;
590 auto const blk
= ctx
.func
->blocks
[bid
].get();
591 auto interp
= Interp
{ index
, ctx
, collect
, bid
, blk
, state
};
592 for (uint32_t opIdx
= 0; opIdx
< blk
->hhbcs
.size(); ++opIdx
) {
593 // If we've already determined that nothing is eligible, we can just stop.
594 if (!eligible
.any()) break;
596 auto const& op
= blk
->hhbcs
[opIdx
];
597 FTRACE(2, " == {}\n", show(ctx
.func
, op
));
599 if (state
.unreachable
) break;
601 // At every op, we check the known state of all live iterators and mark it
602 // as ineligible as necessary.
603 for (IterId it
= 0; it
< state
.iters
.size(); ++it
) {
607 [&] (const LiveIter
& ti
) {
608 FTRACE(4, " iter {: <2} :: {}\n",
609 it
, show(*ctx
.func
, state
.iters
[it
]));
610 // The init block is unknown. This can only happen if there's more
611 // than one block where this iterator was initialized. This makes
612 // tracking the iteration loop ambiguous, and can't happen with how
613 // we currently emit bytecode, so just pessimize everything.
614 if (ti
.initBlock
== NoBlockId
) {
615 FTRACE(2, " - pessimize all\n");
619 // Otherwise, if the iterator doesn't have an equivalent local,
620 // either it was never initialized with a local to begin with, or
621 // that local got changed within the loop. Either way, this
622 // iteration loop isn't eligible.
623 if (eligible
[ti
.initBlock
] && ti
.baseLocal
== NoLocalId
) {
624 FTRACE(2, " - blk:{} ineligible\n", ti
.initBlock
);
625 eligible
[ti
.initBlock
] = false;
626 } else if (ti
.baseUpdated
) {
627 FTRACE(2, " - blk:{} updated\n", ti
.initBlock
);
628 updated
[ti
.initBlock
] = true;
634 auto const fixupForInit
= [&] {
635 auto const base
= topStkLocal(state
);
636 if (base
== NoLocalId
&& eligible
[bid
]) {
637 FTRACE(2, " - blk:{} ineligible\n", bid
);
638 eligible
[bid
] = false;
640 fixups
.emplace_back(Fixup
{bid
, opIdx
, bid
, base
});
641 FTRACE(2, " + fixup ({})\n", fixups
.back().show(*ctx
.func
));
644 auto const fixupFromState
= [&] (IterId it
) {
648 [&] (const LiveIter
& ti
) {
649 if (ti
.initBlock
!= NoBlockId
) {
650 assertx(iterFromInit(*ctx
.func
, ti
.initBlock
) == it
);
652 Fixup
{bid
, opIdx
, ti
.initBlock
, ti
.baseLocal
}
654 FTRACE(2, " + fixup ({})\n", fixups
.back().show(*ctx
.func
));
660 // Record a fixup for this iteration op. This iteration loop may not be
661 // ultimately eligible, but we'll check that before actually doing the
665 assertx(opIdx
== blk
->hhbcs
.size() - 1);
669 fixupFromState(op
.IterNext
.ita
.iterId
);
672 fixupFromState(op
.IterFree
.iter1
);
682 // We identify iteration loops by the block of the initialization op (which we
683 // enforce is exactly one block). A fixup describes a transformation to an
684 // iteration instruction which must be applied only if its associated loop is
687 BlockId block
; // Block of the op
688 uint32_t op
; // Index into the block of the op
689 BlockId init
; // Block of the loop's initializer
690 LocalId base
; // Invariant base of the iterator
692 std::string
show(const php::Func
& f
) const {
693 return folly::sformat(
694 "blk:{},{},blk:{},{}",
696 base
!= NoLocalId
? local_string(f
, base
) : "-"
700 std::vector
<Fixup
> fixups
;
701 // All of the associated iterator operations within an iterator loop can be
702 // optimized to liter if the iterator's initialization block is eligible.
703 boost::dynamic_bitset
<> eligible
;
704 // For eligible blocks, the "updated" flag tracks whether there was *any*
705 // change to the base initialized in that block (including "safe" changes).
706 boost::dynamic_bitset
<> updated
;
709 void optimize_iterators(const Index
& index
,
710 const FuncAnalysis
& ainfo
,
711 CollectedInfo
& collect
) {
712 auto const func
= ainfo
.ctx
.func
;
713 // Quick exit. If there's no iterators, or if no associated local survives to
714 // the end of the iterator, there's nothing to do.
715 if (!func
->numIters
|| !ainfo
.hasInvariantIterBase
) return;
717 OptimizeIterState state
;
718 // All blocks starts out eligible. We'll remove initialization blocks as go.
719 // Similarly, the iterator bases for all blocks start out not being updated.
720 state
.eligible
.resize(func
->blocks
.size(), true);
721 state
.updated
.resize(func
->blocks
.size(), false);
723 // Visit all the blocks and build up the fixup state.
724 visit_blocks("optimize_iterators", index
, ainfo
, collect
, state
);
725 if (!state
.eligible
.any()) return;
727 FTRACE(2, "Rewrites:\n");
728 for (auto const& fixup
: state
.fixups
) {
729 auto& cblk
= func
->blocks
[fixup
.block
];
730 auto const& op
= cblk
->hhbcs
[fixup
.op
];
732 if (!state
.eligible
[fixup
.init
]) {
733 // This iteration loop isn't eligible, so don't apply the fixup
734 FTRACE(2, " * ({}): {}\n", fixup
.show(*func
), show(func
, op
));
738 auto const flags
= state
.updated
[fixup
.init
]
739 ? IterArgs::Flags::None
740 : IterArgs::Flags::BaseConst
;
743 assertx(fixup
.base
!= NoLocalId
);
745 // Rewrite the iteration op to its liter equivalent:
748 auto args
= op
.IterInit
.ita
;
749 auto const target
= op
.IterInit
.target2
;
752 bc_with_loc(op
.srcLoc
, bc::PopC
{}),
753 bc_with_loc(op
.srcLoc
, bc::LIterInit
{args
, fixup
.base
, target
})
758 auto args
= op
.IterNext
.ita
;
759 auto const target
= op
.IterNext
.target2
;
762 bc_with_loc(op
.srcLoc
, bc::LIterNext
{args
, fixup
.base
, target
}),
770 bc::LIterFree
{ op
.IterFree
.iter1
, fixup
.base
}
775 always_assert(false);
779 2, " ({}): {} ==> {}\n",
780 fixup
.show(*func
), show(func
, op
),
782 using namespace folly::gen
;
784 | map([&] (const Bytecode
& bc
) { return show(func
, bc
); })
785 | unsplit
<std::string
>(",");
789 auto const blk
= cblk
.mutate();
790 blk
->hhbcs
.erase(blk
->hhbcs
.begin() + fixup
.op
);
791 blk
->hhbcs
.insert(blk
->hhbcs
.begin() + fixup
.op
,
792 newOps
.begin(), newOps
.end());
795 FTRACE(10, "{}", show(*func
));
798 //////////////////////////////////////////////////////////////////////
801 * Use the information in the index to resolve a type-constraint to its
802 * underlying type, if possible.
804 void fixTypeConstraint(const Index
& index
, TypeConstraint
& tc
) {
805 if (!tc
.isCheckable() || !tc
.isObject() || tc
.isResolved()) return;
807 if (interface_supports_non_objects(tc
.typeName())) return;
808 auto const resolved
= index
.resolve_type_name(tc
.typeName());
810 assertx(!RuntimeOption::EvalHackArrDVArrs
||
811 (resolved
.type
!= AnnotType::VArray
&&
812 resolved
.type
!= AnnotType::DArray
));
814 if (resolved
.type
== AnnotType::Object
) {
815 auto const resolvedValue
= match
<folly::Optional
<res::Class
>>(
817 [&] (boost::blank
) { return folly::none
; },
818 [&] (const res::Class
& c
) { return folly::make_optional(c
); },
819 [&] (const res::Record
&) { always_assert(false); return folly::none
; }
821 if (!resolvedValue
|| !resolvedValue
->resolved()) return;
822 // Can't resolve if it resolves to a magic interface. If we mark it as
823 // resolved, we'll think its an object and not do the special magic
824 // interface checks at runtime.
825 if (interface_supports_non_objects(resolvedValue
->name())) return;
826 if (!resolvedValue
->couldHaveMockedDerivedClass()) tc
.setNoMockObjects();
829 tc
.resolveType(resolved
.type
, tc
.isNullable() || resolved
.nullable
);
830 FTRACE(1, "Retype tc {} -> {}\n", tc
.typeName(), tc
.displayName());
833 //////////////////////////////////////////////////////////////////////
835 void do_optimize(const Index
& index
, FuncAnalysis
&& ainfo
, bool isFinal
) {
836 FTRACE(2, "{:-^70} {}\n", "Optimize Func", ainfo
.ctx
.func
->name
);
839 folly::Optional
<CollectedInfo
> collect
;
842 index
, ainfo
.ctx
, nullptr,
843 CollectionOpts
{}, &ainfo
846 update_bytecode(ainfo
.ctx
.func
, std::move(ainfo
.blockUpdates
), &ainfo
);
847 optimize_iterators(index
, ainfo
, *collect
);
851 FTRACE(10, "{}", show(*ainfo
.ctx
.func
));
853 * Note: it's useful to do dead block removal before DCE, so it can remove
854 * code relating to the branch to the dead block.
856 remove_unreachable_blocks(ainfo
);
858 if (options
.LocalDCE
) {
859 visit_blocks("local DCE", index
, ainfo
, *collect
, local_dce
);
861 if (options
.GlobalDCE
) {
862 if (global_dce(index
, ainfo
)) again
= true;
863 if (control_flow_opts(ainfo
)) again
= true;
864 assert(check(*ainfo
.ctx
.func
));
866 * Global DCE can change types of locals across blocks. See
867 * dce.cpp for an explanation.
869 * We need to perform a final type analysis before we do
872 ainfo
= analyze_func(index
, ainfo
.ctx
, CollectionOpts
{});
873 update_bytecode(ainfo
.ctx
.func
, std::move(ainfo
.blockUpdates
), &ainfo
);
875 index
, ainfo
.ctx
, nullptr,
876 CollectionOpts
{}, &ainfo
880 // If we merged blocks, there could be new optimization opportunities
883 if (!isFinal
) return;
885 auto const func
= ainfo
.ctx
.func
;
886 if (func
->name
== s_86pinit
.get() ||
887 func
->name
== s_86sinit
.get() ||
888 func
->name
== s_86linit
.get()) {
889 auto const& blk
= *func
->blocks
[func
->mainEntry
];
890 if (blk
.hhbcs
.size() == 2 &&
891 blk
.hhbcs
[0].op
== Op::Null
&&
892 blk
.hhbcs
[1].op
== Op::RetC
) {
893 FTRACE(2, "Erasing {}::{}\n", func
->cls
->name
, func
->name
);
894 func
->cls
->methods
.erase(
895 std::find_if(func
->cls
->methods
.begin(),
896 func
->cls
->methods
.end(),
897 [&](const std::unique_ptr
<php::Func
>& f
) {
898 return f
.get() == func
;
904 attrSetter(func
->attrs
,
905 is_pseudomain(func
) ||
906 func
->attrs
& AttrInterceptable
||
910 if (options
.InsertAssertions
) {
911 visit_blocks("insert assertions", index
, ainfo
, *collect
,
915 for (auto& p
: func
->params
) fixTypeConstraint(index
, p
.typeConstraint
);
917 if (RuntimeOption::EvalCheckReturnTypeHints
>= 3) {
918 fixTypeConstraint(index
, func
->retTypeConstraint
);
922 //////////////////////////////////////////////////////////////////////
926 //////////////////////////////////////////////////////////////////////
928 Bytecode
gen_constant(const TypedValue
& cell
) {
929 switch (cell
.m_type
) {
931 return bc::NullUninit
{};
935 if (cell
.m_data
.num
) {
941 return bc::Int
{ cell
.m_data
.num
};
943 return bc::Double
{ cell
.m_data
.dbl
};
945 assert(cell
.m_data
.pstr
->isStatic());
946 case KindOfPersistentString
:
947 return bc::String
{ cell
.m_data
.pstr
};
949 assert(cell
.m_data
.parr
->isStatic());
950 case KindOfPersistentVec
:
951 assert(cell
.m_data
.parr
->isVecArrayType());
952 return bc::Vec
{ cell
.m_data
.parr
};
954 assert(cell
.m_data
.parr
->isStatic());
955 case KindOfPersistentDict
:
956 assert(cell
.m_data
.parr
->isDictType());
957 return bc::Dict
{ cell
.m_data
.parr
};
959 assert(cell
.m_data
.parr
->isStatic());
960 case KindOfPersistentKeyset
:
961 assert(cell
.m_data
.parr
->isKeysetType());
962 return bc::Keyset
{ cell
.m_data
.parr
};
966 assert(cell
.m_data
.parr
->isStatic());
967 case KindOfPersistentVArray
:
968 case KindOfPersistentDArray
:
969 case KindOfPersistentArray
:
970 assert(cell
.m_data
.parr
->isPHPArrayType());
971 return bc::Array
{ cell
.m_data
.parr
};
978 case KindOfRecord
: // TODO(arnabde)
979 always_assert(0 && "invalid constant in propagate_constants");
984 void optimize_func(const Index
& index
, FuncAnalysis
&& ainfo
, bool isFinal
) {
985 auto const bump
= trace_bump_for(ainfo
.ctx
.cls
, ainfo
.ctx
.func
);
987 SCOPE_ASSERT_DETAIL("optimize_func") {
988 return "Optimizing:" + show(ainfo
.ctx
);
991 Trace::Bump bumper1
{Trace::hhbbc
, bump
};
992 Trace::Bump bumper2
{Trace::hhbbc_cfg
, bump
};
993 Trace::Bump bumper3
{Trace::hhbbc_dce
, bump
};
994 do_optimize(index
, std::move(ainfo
), isFinal
);
997 void update_bytecode(
999 CompactVector
<std::pair
<BlockId
, BlockUpdateInfo
>>&& blockUpdates
,
1000 FuncAnalysis
* ainfo
) {
1002 for (auto& ent
: blockUpdates
) {
1003 auto blk
= func
->blocks
[ent
.first
].mutate();
1004 auto const srcLoc
= blk
->hhbcs
.front().srcLoc
;
1005 if (!ent
.second
.unchangedBcs
) {
1006 if (ent
.second
.replacedBcs
.size()) {
1007 blk
->hhbcs
= std::move(ent
.second
.replacedBcs
);
1009 blk
->hhbcs
= { bc_with_loc(blk
->hhbcs
.front().srcLoc
, bc::Nop
{}) };
1012 blk
->hhbcs
.erase(blk
->hhbcs
.begin() + ent
.second
.unchangedBcs
,
1014 blk
->hhbcs
.reserve(blk
->hhbcs
.size() + ent
.second
.replacedBcs
.size());
1015 for (auto& bc
: ent
.second
.replacedBcs
) {
1016 blk
->hhbcs
.push_back(std::move(bc
));
1019 if (blk
->hhbcs
.empty()) {
1020 blk
->hhbcs
.push_back(bc_with_loc(srcLoc
, bc::Nop
{}));
1022 auto fatal_block
= NoBlockId
;
1024 if (fatal_block
== NoBlockId
) {
1025 fatal_block
= make_fatal_block(func
, blk
);
1027 sync_ainfo(fatal_block
, *ainfo
, {});
1032 blk
->fallthrough
= ent
.second
.fallthrough
;
1034 forEachTakenEdge(blk
->hhbcs
.back(),
1035 [&] (BlockId
& bid
) {
1037 if (bid
== NoBlockId
) bid
= fatal();
1039 if (blk
->fallthrough
== NoBlockId
&&
1040 !(instrFlags(blk
->hhbcs
.back().op
) & TF
)) {
1042 blk
->fallthrough
= fatal();
1044 blk
->hhbcs
.push_back(bc::BreakTraceHint
{});
1045 blk
->hhbcs
.push_back(bc::String
{ s_unreachable
.get() });
1046 blk
->hhbcs
.push_back(bc::Fatal
{ FatalOp::Runtime
});
1050 blockUpdates
.clear();
1052 if (is_pseudomain(func
) &&
1053 func
->unit
->persistent
.load(std::memory_order_relaxed
)) {
1054 func
->unit
->persistent_pseudomain
.store(
1055 persistence_check(func
), std::memory_order_relaxed
1060 //////////////////////////////////////////////////////////////////////
1062 void optimize_class_prop_type_hints(const Index
& index
, Context ctx
) {
1064 auto const bump
= trace_bump_for(ctx
.cls
, nullptr);
1065 Trace::Bump bumper
{Trace::hhbbc
, bump
};
1066 for (auto& prop
: ctx
.cls
->properties
) {
1069 const_cast<TypeConstraint
&>(prop
.typeConstraint
)
1074 //////////////////////////////////////////////////////////////////////