2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/optimize.h"
25 #include <boost/dynamic_bitset.hpp>
27 #include <folly/Optional.h>
28 #include <folly/gen/Base.h>
29 #include <folly/gen/String.h>
31 #include "hphp/util/trace.h"
32 #include "hphp/util/match.h"
34 #include "hphp/runtime/vm/hhbc.h"
35 #include "hphp/runtime/base/datatype.h"
37 #include "hphp/hhbbc/hhbbc.h"
38 #include "hphp/hhbbc/analyze.h"
39 #include "hphp/hhbbc/cfg.h"
40 #include "hphp/hhbbc/cfg-opts.h"
41 #include "hphp/hhbbc/dce.h"
42 #include "hphp/hhbbc/func-util.h"
43 #include "hphp/hhbbc/interp.h"
44 #include "hphp/hhbbc/interp-internal.h"
45 #include "hphp/hhbbc/interp-state.h"
46 #include "hphp/hhbbc/misc.h"
47 #include "hphp/hhbbc/options-util.h"
48 #include "hphp/hhbbc/representation.h"
49 #include "hphp/hhbbc/type-system.h"
50 #include "hphp/hhbbc/unit-util.h"
52 namespace HPHP
{ namespace HHBBC
{
54 //////////////////////////////////////////////////////////////////////
58 //////////////////////////////////////////////////////////////////////
61 * For filtering assertions, some opcodes are considered to have no
62 * use for a stack input assertion.
64 * For now this is restricted to opcodes that do literally nothing.
66 bool ignoresStackInput(Op op
) {
78 template<class TyBC
, class ArgType
>
79 folly::Optional
<Bytecode
> makeAssert(ArrayTypeTable::Builder
& arrTable
,
82 if (t
.subtypeOf(BBottom
)) return folly::none
;
83 auto const rat
= make_repo_type(arrTable
, t
);
84 using T
= RepoAuthType::Tag
;
85 if (options
.FilterAssertions
) {
86 // Gen and InitGen don't add any useful information, so leave them
88 if (rat
== RepoAuthType
{T::Gen
}) return folly::none
;
89 if (rat
== RepoAuthType
{T::InitGen
}) return folly::none
;
91 return Bytecode
{ TyBC
{ arg
, rat
} };
95 void insert_assertions_step(ArrayTypeTable::Builder
& arrTable
,
96 const php::Func
& func
,
97 const Bytecode
& bcode
,
99 std::bitset
<kMaxTrackedLocals
> mayReadLocalSet
,
100 std::vector
<uint8_t> obviousStackOutputs
,
102 if (state
.unreachable
) return;
104 for (LocalId i
= 0; i
< state
.locals
.size(); ++i
) {
105 if (func
.locals
[i
].killed
) continue;
106 if (options
.FilterAssertions
) {
107 // Do not emit assertions for untracked locals.
108 if (i
>= mayReadLocalSet
.size()) break;
109 if (!mayReadLocalSet
.test(i
)) continue;
111 auto const realT
= state
.locals
[i
];
112 auto const op
= makeAssert
<bc::AssertRATL
>(arrTable
, i
, realT
);
116 if (!options
.InsertStackAssertions
) return;
118 assert(obviousStackOutputs
.size() == state
.stack
.size());
120 auto const assert_stack
= [&] (size_t idx
) {
121 assert(idx
< state
.stack
.size());
122 if (obviousStackOutputs
[state
.stack
.size() - idx
- 1]) return;
123 if (ignoresStackInput(bcode
.op
)) return;
124 auto const realT
= state
.stack
[state
.stack
.size() - idx
- 1].type
;
125 auto const flav
= stack_flav(realT
);
127 if (options
.FilterAssertions
&& !realT
.strictSubtypeOf(flav
)) {
132 makeAssert
<bc::AssertRATStk
>(
134 static_cast<uint32_t>(idx
),
140 for (auto i
= size_t{0}; i
< bcode
.numPop(); ++i
) assert_stack(i
);
142 // The base instructions are special in that they may read from the
143 // stack without necessarily popping it. We want type assertions on
144 // the stack slots they'll read.
146 case Op::BaseC
: assert_stack(bcode
.BaseC
.arg1
); break;
147 case Op::BaseGC
: assert_stack(bcode
.BaseGC
.arg1
); break;
149 assert_stack(bcode
.BaseSC
.arg1
);
150 assert_stack(bcode
.BaseSC
.arg2
);
153 switch (bcode
.Dim
.mkey
.mcode
) {
155 assert_stack(bcode
.Dim
.mkey
.idx
);
157 case MW
: case MEL
: case MPL
: case MEI
:
158 case MET
: case MPT
: case MQT
:
167 * When filter assertions is on, we use this to avoid putting stack
168 * assertions on some "obvious" instructions.
170 * These are instructions that push an output type that is always the
171 * same, i.e. where an AssertT is never going to add information.
172 * (E.g. "Int 3" obviously pushes an Int, and the JIT has no trouble
173 * figuring that out, so there's no reason to assert about it.)
175 * TODO(#3676101): It turns out many hhbc opcodes have known output
176 * types---there are some super polymorphic ones, but many always do
177 * bools or objects, etc. We might consider making stack flavors have
178 * subtypes and adding this to the opcode table.
180 bool hasObviousStackOutput(const Bytecode
& op
, const Interp
& interp
) {
181 // Generally consider CGetL obvious because if we knew the type of the local,
182 // we'll assert that right before the CGetL.
183 auto cgetlObvious
= [&] (LocalId l
, int idx
) {
184 return !interp
.state
.locals
[l
].couldBe(BRef
) ||
185 !interp
.state
.stack
[interp
.state
.stack
.size() - idx
- 1].
186 type
.strictSubtypeOf(TInitCell
);
202 case Op::NewDictArray
:
203 case Op::NewPackedArray
:
205 case Op::NewStructArray
:
206 case Op::NewStructDArray
:
207 case Op::NewStructDict
:
208 case Op::NewVecArray
:
209 case Op::NewKeysetArray
:
210 case Op::AddNewElemC
:
242 case Op::InstanceOfD
:
243 case Op::IsLateBoundCls
:
244 case Op::IsTypeStructC
:
245 case Op::CombineAndResolveTypeStruct
:
246 case Op::RecordReifiedGeneric
:
259 case Op::OODeclExists
:
265 if (auto tt
= thisType(interp
.index
, interp
.ctx
)) {
266 auto t
= interp
.state
.stack
.back().type
;
267 if (is_opt(t
)) t
= unopt(std::move(t
));
268 return !t
.strictSubtypeOf(*tt
);
273 return cgetlObvious(op
.CGetL
.loc1
, 0);
275 return cgetlObvious(op
.CGetQuietL
.loc1
, 0);
277 return cgetlObvious(op
.CUGetL
.loc1
, 0);
279 return cgetlObvious(op
.CGetL2
.loc1
, 1);
281 return cgetlObvious(op
.PushL
.loc1
, 0);
283 // The output of SetL is obvious if you know what its input is
284 // (which we'll assert if we know).
288 // The output of SetM isn't quite as obvious as SetL, but the jit
289 // can work it out from the input just as well as hhbbc (if not better).
298 void insert_assertions(const Index
& index
,
299 const FuncAnalysis
& ainfo
,
300 CollectedInfo
& collect
,
304 auto& cblk
= ainfo
.ctx
.func
->blocks
[bid
];
305 newBCs
.reserve(cblk
->hhbcs
.size());
307 auto& arrTable
= *index
.array_table_builder();
308 auto const ctx
= ainfo
.ctx
;
310 std::vector
<uint8_t> obviousStackOutputs(state
.stack
.size(), false);
312 auto interp
= Interp
{ index
, ctx
, collect
, bid
, cblk
.get(), state
};
313 for (auto& op
: cblk
->hhbcs
) {
314 FTRACE(2, " == {}\n", show(ctx
.func
, op
));
316 auto gen
= [&] (const Bytecode
& newb
) {
317 newBCs
.push_back(newb
);
318 newBCs
.back().srcLoc
= op
.srcLoc
;
319 FTRACE(2, " + {}\n", show(ctx
.func
, newBCs
.back()));
322 if (state
.unreachable
) {
323 if (cblk
->fallthrough
!= NoBlockId
) {
324 auto const blk
= cblk
.mutate();
325 blk
->fallthrough
= NoBlockId
;
327 if (!(instrFlags(op
.op
) & TF
)) {
328 gen(bc::BreakTraceHint
{});
329 gen(bc::String
{ s_unreachable
.get() });
330 gen(bc::Fatal
{ FatalOp::Runtime
});
335 auto const preState
= state
;
336 auto const flags
= step(interp
, op
);
338 insert_assertions_step(
343 flags
.mayReadLocalSet
,
348 if (op
.op
== Op::CGetL2
) {
349 obviousStackOutputs
.emplace(obviousStackOutputs
.end() - 1,
350 hasObviousStackOutput(op
, interp
));
352 for (int i
= 0; i
< op
.numPop(); i
++) {
353 obviousStackOutputs
.pop_back();
355 for (auto i
= op
.numPush(); i
--; ) {
356 obviousStackOutputs
.emplace_back(hasObviousStackOutput(op
, interp
));
363 if (cblk
->hhbcs
!= newBCs
) cblk
.mutate()->hhbcs
= std::move(newBCs
);
366 bool persistence_check(const php::Func
* const func
) {
367 auto bid
= func
->mainEntry
;
368 while (bid
!= NoBlockId
) {
369 auto const& blk
= func
->blocks
[bid
];
370 for (auto& op
: blk
->hhbcs
) {
376 case Op::DefTypeAlias
:
390 // Not strictly no-side effects, but as long as the rest of
391 // the unit is limited to the above, we're fine (and we expect
392 // one following a DefCns).
400 bid
= blk
->fallthrough
;
405 //////////////////////////////////////////////////////////////////////
408 bool propagate_constants(const Bytecode
& op
, State
& state
, Gen gen
) {
409 auto const numPop
= op
.numPop();
410 auto const numPush
= op
.numPush();
411 auto const stkSize
= state
.stack
.size();
412 constexpr auto numCells
= 4;
413 Cell constVals
[numCells
];
415 // All outputs of the instruction must have constant types for this
417 for (auto i
= size_t{0}; i
< numPush
; ++i
) {
418 auto const& ty
= state
.stack
[stkSize
- i
- 1].type
;
420 auto const v
= tv(ty
);
421 if (!v
) return false;
423 } else if (!is_scalar(ty
)) {
428 // Pop the inputs, and push the constants.
429 for (auto i
= size_t{0}; i
< numPop
; ++i
) {
430 switch (op
.popFlavor(i
)) {
431 case Flavor::C
: gen(bc::PopC
{}); break;
432 case Flavor::V
: gen(bc::PopV
{}); break;
433 case Flavor::U
: not_reached(); break;
435 // We only support C's for CU right now.
438 case Flavor::CV
: not_reached(); break;
440 // Note that we only support C's for CVU so far (this only comes up with
441 // FCallBuiltin)---we'll fail the verifier if something changes to send
442 // V's or U's through here.
448 for (auto i
= size_t{0}; i
< numPush
; ++i
) {
449 auto const v
= i
< numCells
?
450 constVals
[i
] : *tv(state
.stack
[stkSize
- i
- 1].type
);
451 gen(gen_constant(v
));
452 state
.stack
[stkSize
- i
- 1].type
= from_cell(v
);
458 bool propagate_constants(const Bytecode
& bc
, State
& state
,
460 return propagate_constants(bc
, state
, [&] (const Bytecode
& bc
) {
465 //////////////////////////////////////////////////////////////////////
468 * Create a block similar to another block (but with no bytecode in it yet).
470 BlockId
make_block(php::Func
* func
,
471 const php::Block
* srcBlk
) {
472 FTRACE(1, " ++ new block {}\n", func
->blocks
.size());
474 auto newBlk
= copy_ptr
<php::Block
>{php::Block
{}};
475 auto const blk
= newBlk
.mutate();
476 blk
->exnNodeId
= srcBlk
->exnNodeId
;
477 blk
->throwExit
= srcBlk
->throwExit
;
478 auto const bid
= func
->blocks
.size();
479 func
->blocks
.push_back(std::move(newBlk
));
484 BlockId
make_fatal_block(php::Func
* func
,
485 const php::Block
* srcBlk
) {
486 auto bid
= make_block(func
, srcBlk
);
487 auto const blk
= func
->blocks
[bid
].mutate();
488 auto const srcLoc
= srcBlk
->hhbcs
.back().srcLoc
;
490 bc_with_loc(srcLoc
, bc::String
{ s_unreachable
.get() }),
491 bc_with_loc(srcLoc
, bc::Fatal
{ FatalOp::Runtime
})
493 blk
->fallthrough
= NoBlockId
;
494 blk
->throwExit
= NoBlockId
;
495 blk
->exnNodeId
= NoExnNodeId
;
499 void sync_ainfo(BlockId bid
, FuncAnalysis
& ainfo
, const State
& state
) {
500 assertx(ainfo
.bdata
.size() == bid
);
501 assertx(bid
+ 1 == ainfo
.ctx
.func
->blocks
.size());
503 ainfo
.rpoBlocks
.push_back(bid
);
504 ainfo
.bdata
.push_back(FuncAnalysis::BlockData
{
505 static_cast<uint32_t>(ainfo
.rpoBlocks
.size() - 1),
511 * Create a block similar to another block (but with no bytecode in it yet).
513 BlockId
make_block(FuncAnalysis
& ainfo
,
514 const php::Block
* srcBlk
,
515 const State
& state
) {
516 auto const bid
= make_block(ainfo
.ctx
.func
, srcBlk
);
517 sync_ainfo(bid
, ainfo
, state
);
521 BlockId
make_fatal_block(FuncAnalysis
& ainfo
,
522 const php::Block
* srcBlk
,
523 const State
& state
) {
524 auto bid
= make_fatal_block(ainfo
.ctx
.func
, srcBlk
);
525 sync_ainfo(bid
, ainfo
, state
);
529 //////////////////////////////////////////////////////////////////////
531 template<class BlockContainer
, class AInfo
, class Fun
>
532 void visit_blocks_impl(const char* what
,
535 CollectedInfo
& collect
,
536 const BlockContainer
& rpoBlocks
,
539 BlockId curBlk
= NoBlockId
;
540 SCOPE_ASSERT_DETAIL(what
) {
541 if (curBlk
== NoBlockId
) return std::string
{"\nNo block processed\n"};
542 return folly::sformat(
543 "block #{}\nin-{}", curBlk
,
544 state_string(*ainfo
.ctx
.func
, ainfo
.bdata
[curBlk
].stateIn
, collect
)
548 FTRACE(1, "|---- {}\n", what
);
549 for (auto const bid
: rpoBlocks
) {
551 FTRACE(2, "block #{}\n", bid
);
552 auto const& state
= ainfo
.bdata
[bid
].stateIn
;
553 if (!state
.initialized
) {
554 FTRACE(2, " unreachable\n");
557 // TODO(#3732260): this should probably spend an extra interp pass
558 // in debug builds to check that no transformation to the bytecode
559 // was made that changes the block output state.
560 fun(index
, ainfo
, collect
, bid
, state
);
562 assert(check(*ainfo
.ctx
.func
));
566 void visit_blocks_mutable(const char* what
,
569 CollectedInfo
& collect
,
571 // Make a copy of the block list so it can be mutated by the visitor.
572 auto const blocksCopy
= ainfo
.rpoBlocks
;
573 visit_blocks_impl(what
, index
, ainfo
, collect
,
574 blocksCopy
, std::forward
<Fun
>(fun
));
578 void visit_blocks(const char* what
,
580 const FuncAnalysis
& ainfo
,
581 CollectedInfo
& collect
,
583 visit_blocks_impl(what
, index
, ainfo
, collect
,
584 ainfo
.rpoBlocks
, std::forward
<Fun
>(fun
));
587 //////////////////////////////////////////////////////////////////////
589 IterId
iterFromInit(const php::Func
& func
, BlockId initBlock
) {
590 auto const& op
= func
.blocks
[initBlock
]->hhbcs
.back();
591 if (op
.op
== Op::IterInit
) return op
.IterInit
.iter1
;
592 if (op
.op
== Op::IterInitK
) return op
.IterInitK
.iter1
;
593 if (op
.op
== Op::LIterInit
) return op
.LIterInit
.iter1
;
594 if (op
.op
== Op::LIterInitK
) return op
.LIterInitK
.iter1
;
595 always_assert(false);
599 * Attempt to convert normal iterators into liters. In order for an iterator to
600 * be converted to a liter, the following needs to be true:
602 * - The iterator is initialized with the value in a local at exactly one block.
604 * - That same local is not modified on all possible paths from the
605 * initialization to every usage of that iterator.
607 * The first condition is actually more restrictive than necessary, but
608 * enforcing that the iterator is initialized at exactly one place simplifies
609 * the bookkeeping and is always true with how we currently emit bytecode.
612 struct OptimizeIterState
{
613 void operator()(const Index
& index
,
614 const FuncAnalysis
& ainfo
,
615 CollectedInfo
& collect
,
618 auto const ctx
= ainfo
.ctx
;
619 auto const blk
= ctx
.func
->blocks
[bid
].get();
620 auto interp
= Interp
{ index
, ctx
, collect
, bid
, blk
, state
};
621 for (uint32_t opIdx
= 0; opIdx
< blk
->hhbcs
.size(); ++opIdx
) {
622 // If we've already determined that nothing is eligible, we can just stop.
623 if (!eligible
.any()) break;
625 auto const& op
= blk
->hhbcs
[opIdx
];
626 FTRACE(2, " == {}\n", show(ctx
.func
, op
));
628 if (state
.unreachable
) break;
630 // At every op, we check the known state of all live iterators and mark it
631 // as ineligible as necessary.
632 for (IterId it
= 0; it
< state
.iters
.size(); ++it
) {
636 [&] (const LiveIter
& ti
) {
637 FTRACE(4, " iter {: <2} :: {}\n",
638 it
, show(*ctx
.func
, state
.iters
[it
]));
639 // The init block is unknown. This can only happen if there's more
640 // than one block where this iterator was initialized. This makes
641 // tracking the iteration loop ambiguous, and can't happen with how
642 // we currently emit bytecode, so just pessimize everything.
643 if (ti
.initBlock
== NoBlockId
) {
644 FTRACE(2, " - pessimize all\n");
648 // Otherwise, if the iterator doesn't have an equivalent local,
649 // either it was never initialized with a local to begin with, or
650 // that local got changed within the loop. Either way, this
651 // iteration loop isn't eligible.
652 if (eligible
[ti
.initBlock
] && ti
.baseLocal
== NoLocalId
) {
653 FTRACE(2, " - blk:{} ineligible\n", ti
.initBlock
);
654 eligible
[ti
.initBlock
] = false;
655 } else if (ti
.baseUpdated
) {
656 FTRACE(2, " - blk:{} updated\n", ti
.initBlock
);
657 updated
[ti
.initBlock
] = true;
663 auto const fixupForInit
= [&] {
664 auto const base
= topStkLocal(state
);
665 if (base
== NoLocalId
&& eligible
[bid
]) {
666 FTRACE(2, " - blk:{} ineligible\n", bid
);
667 eligible
[bid
] = false;
669 fixups
.emplace_back(Fixup
{bid
, opIdx
, bid
, base
});
670 FTRACE(2, " + fixup ({})\n", fixups
.back().show(*ctx
.func
));
673 auto const fixupFromState
= [&] (IterId it
) {
677 [&] (const LiveIter
& ti
) {
678 if (ti
.initBlock
!= NoBlockId
) {
679 assertx(iterFromInit(*ctx
.func
, ti
.initBlock
) == it
);
681 Fixup
{bid
, opIdx
, ti
.initBlock
, ti
.baseLocal
}
683 FTRACE(2, " + fixup ({})\n", fixups
.back().show(*ctx
.func
));
689 // Record a fixup for this iteration op. This iteration loop may not be
690 // ultimately eligible, but we'll check that before actually doing the
695 assertx(opIdx
== blk
->hhbcs
.size() - 1);
699 fixupFromState(op
.IterNext
.iter1
);
702 fixupFromState(op
.IterNextK
.iter1
);
705 fixupFromState(op
.IterFree
.iter1
);
708 for (auto const& it
: op
.IterBreak
.iterTab
) {
709 if (it
.kind
== KindOfIter
) fixupFromState(it
.id
);
720 // We identify iteration loops by the block of the initialization op (which we
721 // enforce is exactly one block). A fixup describes a transformation to an
722 // iteration instruction which must be applied only if its associated loop is
725 BlockId block
; // Block of the op
726 uint32_t op
; // Index into the block of the op
727 BlockId init
; // Block of the loop's initializer
728 LocalId base
; // Invariant base of the iterator
730 std::string
show(const php::Func
& f
) const {
731 return folly::sformat(
732 "blk:{},{},blk:{},{}",
734 base
!= NoLocalId
? local_string(f
, base
) : "-"
738 std::vector
<Fixup
> fixups
;
739 // All of the associated iterator operations within an iterator loop can be
740 // optimized to liter if the iterator's initialization block is eligible.
741 boost::dynamic_bitset
<> eligible
;
742 // For eligible blocks, the "updated" flag tracks whether there was *any*
743 // change to the base initialized in that block (including "safe" changes).
744 boost::dynamic_bitset
<> updated
;
747 void optimize_iterators(const Index
& index
,
748 const FuncAnalysis
& ainfo
,
749 CollectedInfo
& collect
) {
750 auto const func
= ainfo
.ctx
.func
;
751 // Quick exit. If there's no iterators, or if no associated local survives to
752 // the end of the iterator, there's nothing to do.
753 if (!func
->numIters
|| !ainfo
.hasInvariantIterBase
) return;
755 OptimizeIterState state
;
756 // All blocks starts out eligible. We'll remove initialization blocks as go.
757 // Similarly, the iterator bases for all blocks start out not being updated.
758 state
.eligible
.resize(func
->blocks
.size(), true);
759 state
.updated
.resize(func
->blocks
.size(), false);
761 // Visit all the blocks and build up the fixup state.
762 visit_blocks("optimize_iterators", index
, ainfo
, collect
, state
);
763 if (!state
.eligible
.any()) return;
765 FTRACE(2, "Rewrites:\n");
766 for (auto const& fixup
: state
.fixups
) {
767 auto& cblk
= func
->blocks
[fixup
.block
];
768 auto const& op
= cblk
->hhbcs
[fixup
.op
];
770 if (!state
.eligible
[fixup
.init
]) {
771 // This iteration loop isn't eligible, so don't apply the fixup
772 FTRACE(2, " * ({}): {}\n", fixup
.show(*func
), show(func
, op
));
776 auto const subop
= state
.updated
[fixup
.init
]
777 ? IterTypeOp::LocalBaseMutable
778 : IterTypeOp::LocalBaseConst
;
781 assertx(fixup
.base
!= NoLocalId
);
783 // Rewrite the iteration op to its liter equivalent:
787 bc_with_loc(op
.srcLoc
, bc::PopC
{}),
802 bc_with_loc(op
.srcLoc
, bc::PopC
{}),
808 op
.IterInitK
.target2
,
837 op
.IterNextK
.target2
,
849 bc::LIterFree
{ op
.IterFree
.iter1
, fixup
.base
}
853 case Op::IterBreak
: {
854 auto const iter
= iterFromInit(*func
, fixup
.init
);
856 for (auto& it
: newOps
.back().IterBreak
.iterTab
) {
858 assertx(it
.kind
== KindOfIter
);
859 it
.kind
= KindOfLIter
;
860 it
.local
= fixup
.base
;
866 always_assert(false);
870 2, " ({}): {} ==> {}\n",
871 fixup
.show(*func
), show(func
, op
),
873 using namespace folly::gen
;
875 | map([&] (const Bytecode
& bc
) { return show(func
, bc
); })
876 | unsplit
<std::string
>(",");
880 auto const blk
= cblk
.mutate();
881 blk
->hhbcs
.erase(blk
->hhbcs
.begin() + fixup
.op
);
882 blk
->hhbcs
.insert(blk
->hhbcs
.begin() + fixup
.op
,
883 newOps
.begin(), newOps
.end());
886 FTRACE(10, "{}", show(*func
));
889 //////////////////////////////////////////////////////////////////////
892 * Use the information in the index to resolve a type-constraint to its
893 * underlying type, if possible.
895 void fixTypeConstraint(const Index
& index
, TypeConstraint
& tc
) {
896 if (!tc
.isCheckable() || !tc
.isObject() || tc
.isResolved()) return;
898 if (interface_supports_non_objects(tc
.typeName())) return;
899 auto const resolved
= index
.resolve_type_name(tc
.typeName());
901 assertx(!RuntimeOption::EvalHackArrDVArrs
||
902 (resolved
.type
!= AnnotType::VArray
&&
903 resolved
.type
!= AnnotType::DArray
));
905 if (resolved
.type
== AnnotType::Object
) {
906 if (!resolved
.value
|| !resolved
.value
->resolved()) return;
907 // Can't resolve if it resolves to a magic interface. If we mark it as
908 // resolved, we'll think its an object and not do the special magic
909 // interface checks at runtime.
910 if (interface_supports_non_objects(resolved
.value
->name())) return;
911 if (!resolved
.value
->couldHaveMockedDerivedClass()) tc
.setNoMockObjects();
914 tc
.resolveType(resolved
.type
, tc
.isNullable() || resolved
.nullable
);
915 FTRACE(1, "Retype tc {} -> {}\n", tc
.typeName(), tc
.displayName());
918 //////////////////////////////////////////////////////////////////////
920 void do_optimize(const Index
& index
, FuncAnalysis
&& ainfo
, bool isFinal
) {
921 FTRACE(2, "{:-^70} {}\n", "Optimize Func", ainfo
.ctx
.func
->name
);
924 folly::Optional
<CollectedInfo
> collect
;
927 index
, ainfo
.ctx
, nullptr,
928 CollectionOpts
{}, &ainfo
931 update_bytecode(ainfo
.ctx
.func
, std::move(ainfo
.blockUpdates
), &ainfo
);
932 optimize_iterators(index
, ainfo
, *collect
);
936 FTRACE(10, "{}", show(*ainfo
.ctx
.func
));
938 * Note: it's useful to do dead block removal before DCE, so it can remove
939 * code relating to the branch to the dead block.
941 remove_unreachable_blocks(ainfo
);
943 if (options
.LocalDCE
) {
944 visit_blocks("local DCE", index
, ainfo
, *collect
, local_dce
);
946 if (options
.GlobalDCE
) {
947 if (global_dce(index
, ainfo
)) again
= true;
948 if (control_flow_opts(ainfo
)) again
= true;
949 assert(check(*ainfo
.ctx
.func
));
951 * Global DCE can change types of locals across blocks. See
952 * dce.cpp for an explanation.
954 * We need to perform a final type analysis before we do
957 ainfo
= analyze_func(index
, ainfo
.ctx
, CollectionOpts
{});
958 update_bytecode(ainfo
.ctx
.func
, std::move(ainfo
.blockUpdates
), &ainfo
);
960 index
, ainfo
.ctx
, nullptr,
961 CollectionOpts
{}, &ainfo
965 // If we merged blocks, there could be new optimization opportunities
968 if (!isFinal
) return;
970 auto const func
= ainfo
.ctx
.func
;
971 if (func
->name
== s_86pinit
.get() ||
972 func
->name
== s_86sinit
.get() ||
973 func
->name
== s_86linit
.get()) {
974 auto const& blk
= *func
->blocks
[func
->mainEntry
];
975 if (blk
.hhbcs
.size() == 2 &&
976 blk
.hhbcs
[0].op
== Op::Null
&&
977 blk
.hhbcs
[1].op
== Op::RetC
) {
978 FTRACE(2, "Erasing {}::{}\n", func
->cls
->name
, func
->name
);
979 func
->cls
->methods
.erase(
980 std::find_if(func
->cls
->methods
.begin(),
981 func
->cls
->methods
.end(),
982 [&](const std::unique_ptr
<php::Func
>& f
) {
983 return f
.get() == func
;
989 attrSetter(func
->attrs
,
990 is_pseudomain(func
) ||
991 func
->attrs
& AttrInterceptable
||
995 if (options
.InsertAssertions
) {
996 visit_blocks("insert assertions", index
, ainfo
, *collect
,
1000 for (auto& p
: func
->params
) fixTypeConstraint(index
, p
.typeConstraint
);
1002 if (RuntimeOption::EvalCheckReturnTypeHints
>= 3) {
1003 fixTypeConstraint(index
, func
->retTypeConstraint
);
1007 //////////////////////////////////////////////////////////////////////
1011 //////////////////////////////////////////////////////////////////////
1013 Bytecode
gen_constant(const Cell
& cell
) {
1014 switch (cell
.m_type
) {
1016 return bc::NullUninit
{};
1020 if (cell
.m_data
.num
) {
1023 return bc::False
{};
1026 return bc::Int
{ cell
.m_data
.num
};
1028 return bc::Double
{ cell
.m_data
.dbl
};
1030 assert(cell
.m_data
.pstr
->isStatic());
1031 case KindOfPersistentString
:
1032 return bc::String
{ cell
.m_data
.pstr
};
1034 assert(cell
.m_data
.parr
->isStatic());
1035 case KindOfPersistentVec
:
1036 assert(cell
.m_data
.parr
->isVecArray());
1037 return bc::Vec
{ cell
.m_data
.parr
};
1039 assert(cell
.m_data
.parr
->isStatic());
1040 case KindOfPersistentDict
:
1041 assert(cell
.m_data
.parr
->isDict());
1042 return bc::Dict
{ cell
.m_data
.parr
};
1044 assert(cell
.m_data
.parr
->isStatic());
1045 case KindOfPersistentKeyset
:
1046 assert(cell
.m_data
.parr
->isKeyset());
1047 return bc::Keyset
{ cell
.m_data
.parr
};
1049 assert(cell
.m_data
.parr
->isStatic());
1050 case KindOfPersistentArray
:
1051 assert(cell
.m_data
.parr
->isPHPArray());
1052 return bc::Array
{ cell
.m_data
.parr
};
1055 case KindOfResource
:
1060 case KindOfRecord
: // TODO(arnabde)
1061 always_assert(0 && "invalid constant in propagate_constants");
1066 void optimize_func(const Index
& index
, FuncAnalysis
&& ainfo
, bool isFinal
) {
1067 auto const bump
= trace_bump_for(ainfo
.ctx
.cls
, ainfo
.ctx
.func
);
1069 SCOPE_ASSERT_DETAIL("optimize_func") {
1070 return "Optimizing:" + show(ainfo
.ctx
);
1073 Trace::Bump bumper1
{Trace::hhbbc
, bump
};
1074 Trace::Bump bumper2
{Trace::hhbbc_cfg
, bump
};
1075 Trace::Bump bumper3
{Trace::hhbbc_dce
, bump
};
1076 do_optimize(index
, std::move(ainfo
), isFinal
);
1079 void update_bytecode(
1081 CompactVector
<std::pair
<BlockId
, BlockUpdateInfo
>>&& blockUpdates
,
1082 FuncAnalysis
* ainfo
) {
1084 for (auto& ent
: blockUpdates
) {
1085 auto blk
= func
->blocks
[ent
.first
].mutate();
1086 auto const srcLoc
= blk
->hhbcs
.front().srcLoc
;
1087 if (!ent
.second
.unchangedBcs
) {
1088 if (ent
.second
.replacedBcs
.size()) {
1089 blk
->hhbcs
= std::move(ent
.second
.replacedBcs
);
1091 blk
->hhbcs
= { bc_with_loc(blk
->hhbcs
.front().srcLoc
, bc::Nop
{}) };
1094 blk
->hhbcs
.erase(blk
->hhbcs
.begin() + ent
.second
.unchangedBcs
,
1096 blk
->hhbcs
.reserve(blk
->hhbcs
.size() + ent
.second
.replacedBcs
.size());
1097 for (auto& bc
: ent
.second
.replacedBcs
) {
1098 blk
->hhbcs
.push_back(std::move(bc
));
1101 if (blk
->hhbcs
.empty()) {
1102 blk
->hhbcs
.push_back(bc_with_loc(srcLoc
, bc::Nop
{}));
1104 auto fatal_block
= NoBlockId
;
1106 if (fatal_block
== NoBlockId
) {
1107 fatal_block
= make_fatal_block(func
, blk
);
1109 sync_ainfo(fatal_block
, *ainfo
, {});
1114 blk
->fallthrough
= ent
.second
.fallthrough
;
1116 forEachTakenEdge(blk
->hhbcs
.back(),
1117 [&] (BlockId
& bid
) {
1119 if (bid
== NoBlockId
) bid
= fatal();
1121 if (blk
->fallthrough
== NoBlockId
&&
1122 !(instrFlags(blk
->hhbcs
.back().op
) & TF
)) {
1124 blk
->fallthrough
= fatal();
1126 blk
->hhbcs
.push_back(bc::BreakTraceHint
{});
1127 blk
->hhbcs
.push_back(bc::String
{ s_unreachable
.get() });
1128 blk
->hhbcs
.push_back(bc::Fatal
{ FatalOp::Runtime
});
1132 blockUpdates
.clear();
1134 if (is_pseudomain(func
) &&
1135 func
->unit
->persistent
.load(std::memory_order_relaxed
)) {
1136 func
->unit
->persistent_pseudomain
.store(
1137 persistence_check(func
), std::memory_order_relaxed
1142 //////////////////////////////////////////////////////////////////////
1144 void optimize_class_prop_type_hints(const Index
& index
, Context ctx
) {
1146 auto const bump
= trace_bump_for(ctx
.cls
, nullptr);
1147 Trace::Bump bumper
{Trace::hhbbc
, bump
};
1148 for (auto& prop
: ctx
.cls
->properties
) {
1151 const_cast<TypeConstraint
&>(prop
.typeConstraint
)
1156 //////////////////////////////////////////////////////////////////////