2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/emit.h"
23 #include <type_traits>
25 #include <folly/gen/Base.h>
26 #include <folly/Conv.h>
27 #include <folly/Optional.h>
28 #include <folly/Memory.h>
30 #include "hphp/hhbbc/cfg.h"
31 #include "hphp/hhbbc/class-util.h"
32 #include "hphp/hhbbc/context.h"
33 #include "hphp/hhbbc/func-util.h"
34 #include "hphp/hhbbc/index.h"
35 #include "hphp/hhbbc/representation.h"
36 #include "hphp/hhbbc/unit-util.h"
38 #include "hphp/runtime/base/repo-auth-type-array.h"
39 #include "hphp/runtime/base/repo-auth-type-codec.h"
40 #include "hphp/runtime/base/repo-auth-type.h"
41 #include "hphp/runtime/base/tv-comparisons.h"
43 #include "hphp/runtime/vm/bytecode.h"
44 #include "hphp/runtime/vm/func-emitter.h"
45 #include "hphp/runtime/vm/native.h"
46 #include "hphp/runtime/vm/preclass-emitter.h"
47 #include "hphp/runtime/vm/record-emitter.h"
48 #include "hphp/runtime/vm/unit-emitter.h"
50 namespace HPHP
{ namespace HHBBC
{
52 TRACE_SET_MOD(hhbbc_emit
);
56 //////////////////////////////////////////////////////////////////////
58 const StaticString
s_empty("");
59 const StaticString
s_invoke("__invoke");
61 //////////////////////////////////////////////////////////////////////
73 struct EmitUnitState
{
74 explicit EmitUnitState(const Index
& index
, const php::Unit
* unit
) :
75 index(index
), unit(unit
) {}
78 * Access to the Index for this program.
83 * Access to the unit we're emitting
85 const php::Unit
* unit
;
88 * While emitting bytecode, we keep track of the classes and funcs
91 std::vector
<Offset
> classOffsets
;
92 std::vector
<PceInfo
> pceInfo
;
93 std::vector
<FeInfo
> feInfo
;
94 std::vector
<Id
> typeAliasInfo
;
98 * Some bytecodes need to be mutated before being emitted. Pass those
99 * bytecodes by value to their respective emit_op functions.
102 struct OpInfoHelper
{
103 static constexpr bool by_value
=
104 T::op
== Op::DefCls
||
105 T::op
== Op::DefClsNop
||
106 T::op
== Op::CreateCl
||
107 T::op
== Op::DefTypeAlias
;
109 using type
= typename
std::conditional
<by_value
, T
, const T
&>::type
;
113 using OpInfo
= typename OpInfoHelper
<T
>::type
;
116 * Helper to conditionally call fun on data provided data is of the
119 template<Op op
, typename F
, typename Bc
>
120 std::enable_if_t
<std::remove_reference_t
<Bc
>::op
== op
>
121 caller(F
&& fun
, Bc
&& data
) { fun(std::forward
<Bc
>(data
)); }
123 template<Op op
, typename F
, typename Bc
>
124 std::enable_if_t
<std::remove_reference_t
<Bc
>::op
!= op
>
127 Id
recordClass(EmitUnitState
& euState
, UnitEmitter
& ue
, Id id
) {
128 auto cls
= euState
.unit
->classes
[id
].get();
129 euState
.pceInfo
.push_back(
130 { ue
.newPreClassEmitter(cls
->name
->toCppString(), cls
->hoistability
), id
}
132 return euState
.pceInfo
.back().pce
->id();
135 //////////////////////////////////////////////////////////////////////
137 php::SrcLoc
srcLoc(const php::Func
& func
, int32_t ix
) {
138 if (ix
< 0) return php::SrcLoc
{};
139 auto const unit
= func
.originalUnit
? func
.originalUnit
: func
.unit
;
140 return unit
->srcLocs
[ix
];
144 * We need to ensure that all blocks in an fpi region are
145 * contiguous. This will normally be guaranteed by rpo order (since
146 * the blocks are dominated by the FPush and post-dominated by the
147 * FCall). If there are exceptional edges from the fpi region,
148 * however, but the FCall is still reachable its possible that the
149 * block(s) ending in the exceptional edge get sorted after the block
150 * containing the FCall.
152 * To solve this, we need to find all exceptional exits from fpi
153 * regions, and insert "fake" edges to the block containing the
154 * corresponding call.
156 std::vector
<BlockId
> initial_sort(const php::Func
& f
) {
157 auto sorted
= rpoSortFromMain(f
);
159 FTRACE(4, "Initial sort {}\n", f
.name
);
161 // We give each fpi region a unique id
163 // Map from fpi region id to the block containing its FCall.
164 // Needs value stability because extraEdges holds pointers into this map.
165 hphp_hash_map
<int, BlockId
> fpiToCallBlkMap
;
166 // Map from the ids of terminal blocks within fpi regions to the
167 // entries in fpiToCallBlkMap corresponding to the active fpi regions.
168 hphp_fast_map
<BlockId
, std::vector
<BlockId
*>> extraEdges
;
169 // The fpi state at the start of each block
170 std::vector
<folly::Optional
<std::vector
<int>>> blkFpiState(f
.blocks
.size());
171 for (auto const bid
: sorted
) {
172 auto& fpiState
= blkFpiState
[bid
];
173 if (!fpiState
) fpiState
.emplace();
174 auto curState
= *fpiState
;
175 for (auto const& bc
: f
.blocks
[bid
]->hhbcs
) {
176 if (isLegacyFPush(bc
.op
)) {
177 FTRACE(4, "blk:{} legacy FPush {} (nesting {})\n",
178 bid
, nextFpi
, curState
.size());
179 curState
.push_back(nextFpi
++);
180 } else if (isLegacyFCall(bc
.op
)) {
181 FTRACE(4, "blk:{} legacy FCall {} (nesting {})\n",
182 bid
, curState
.back(), curState
.size() - 1);
183 fpiToCallBlkMap
[curState
.back()] = bid
;
187 auto hasNormalSucc
= false;
188 forEachNormalSuccessor(
191 hasNormalSucc
= true;
192 auto &succState
= blkFpiState
[id
];
193 FTRACE(4, "blk:{} propagate state to {}\n",
196 succState
= curState
;
198 assertx(succState
== curState
);
202 if (!hasNormalSucc
) {
203 for (auto fpi
: curState
) {
204 // We may or may not have seen the FCall yet; if we've not
205 // seen it, either there is none (in which case there's
206 // nothing to do), or the blocks happen to be in the right
207 // order anyway (so again there's apparently nothing to
208 // do). But in the latter case its possible that adding edges
209 // for another fpi region (or another exceptional exit in this
210 // fpi region) might perturb the order; so we record a pointer
211 // to the unordered_map element, in case it gets filled in
213 auto const res
= fpiToCallBlkMap
.emplace(fpi
, NoBlockId
);
214 extraEdges
[bid
].push_back(&res
.first
->second
);
219 if (!extraEdges
.empty()) {
220 hphp_fast_map
<BlockId
, std::vector
<BlockId
>> extraIds
;
221 for (auto& elm
: extraEdges
) {
222 for (auto const bidPtr
: elm
.second
) {
223 if (*bidPtr
== NoBlockId
) {
224 // There was no FCall, so no need to do anything
227 FTRACE(4, "blk:{} add extra edge to {}\n",
229 extraIds
[elm
.first
].push_back(*bidPtr
);
232 if (extraIds
.size()) {
233 // redo the sort with the extra edges in place
234 sorted
= rpoSortFromMain(f
, &extraIds
);
242 * Order the blocks for bytecode emission.
244 * Rules about block order:
246 * - All bytecodes corresponding to a given FPI region must be
247 * contiguous. Note that an FPI region can start or end part way
248 * through a block, so this constraint is on bytecodes, not blocks
250 * - Each DV funclet must have all of its blocks contiguous, with the
253 * - Main entry point must be the first block.
255 * It is not a requirement, but we attempt to locate all the DV entry
256 * points after the rest of the primary function body. The normal
257 * case for DV initializers is that each one falls through to the
258 * next, with the block jumping back to the main entry point.
260 std::vector
<BlockId
> order_blocks(const php::Func
& f
) {
261 auto sorted
= initial_sort(f
);
263 // Get the DV blocks, without the rest of the primary function body,
264 // and then add them to the end of sorted.
265 auto const dvBlocks
= [&] {
266 auto withDVs
= rpoSortAddDVs(f
);
268 std::find(begin(withDVs
), end(withDVs
), sorted
.front()),
273 sorted
.insert(end(sorted
), begin(dvBlocks
), end(dvBlocks
));
275 FTRACE(2, " block order:{}\n",
278 for (auto const bid
: sorted
) {
280 ret
+= folly::to
<std::string
>(bid
);
288 // While emitting bytecode, we learn about some metadata that will
289 // need to be registered in the FuncEmitter.
297 struct JmpFixup
{ Offset instrOff
; Offset jmpImmedOff
; };
301 : offset(kInvalidOffset
)
302 , past(kInvalidOffset
)
306 // The offset of the block, if we've already emitted it.
307 // Otherwise kInvalidOffset.
310 // The offset past the end of this block.
313 // How many catch regions the jump at the end of this block is leaving.
314 // 0 if there is no jump or if the jump is to the same catch region or a
318 // When we emit a forward jump to a block we haven't seen yet, we
319 // write down where the jump was so we can fix it up when we get
321 std::vector
<JmpFixup
> forwardJumps
;
323 // When we see a forward jump to a block, we record the stack
324 // depth at the jump site here. This is needed to track
325 // currentStackDepth correctly (and we also assert all the jumps
326 // have the same depth).
327 folly::Optional
<uint32_t> expectedStackDepth
;
329 // Similar to expectedStackDepth, for the fpi stack. Needed to deal with
330 // terminal instructions that end an fpi region.
331 folly::Optional
<uint32_t> expectedFPIDepth
;
334 std::vector
<BlockId
> blockOrder
;
335 uint32_t maxStackDepth
;
337 std::vector
<FPI
> fpiRegions
;
338 std::vector
<BlockInfo
> blockInfo
;
341 using ExnNodePtr
= php::ExnNode
*;
343 bool handleEquivalent(const php::Func
& func
, ExnNodeId eh1
, ExnNodeId eh2
) {
344 auto entry
= [&] (ExnNodeId eid
) {
345 return func
.exnNodes
[eid
].region
.catchEntry
;
349 assertx(eh1
!= NoExnNodeId
&&
350 eh2
!= NoExnNodeId
&&
351 func
.exnNodes
[eh1
].depth
== func
.exnNodes
[eh2
].depth
);
352 if (entry(eh1
) != entry(eh2
)) return false;
353 eh1
= func
.exnNodes
[eh1
].parent
;
354 eh2
= func
.exnNodes
[eh2
].parent
;
360 // The common parent P of eh1 and eh2 is the deepest region such that
361 // eh1 and eh2 are both handle-equivalent to P or a child of P
362 ExnNodeId
commonParent(const php::Func
& func
, ExnNodeId eh1
, ExnNodeId eh2
) {
363 if (eh1
== NoExnNodeId
|| eh2
== NoExnNodeId
) return NoExnNodeId
;
364 while (func
.exnNodes
[eh1
].depth
> func
.exnNodes
[eh2
].depth
) {
365 eh1
= func
.exnNodes
[eh1
].parent
;
367 while (func
.exnNodes
[eh2
].depth
> func
.exnNodes
[eh1
].depth
) {
368 eh2
= func
.exnNodes
[eh2
].parent
;
370 while (!handleEquivalent(func
, eh1
, eh2
)) {
371 eh1
= func
.exnNodes
[eh1
].parent
;
372 eh2
= func
.exnNodes
[eh2
].parent
;
378 s_hhbbc_fail_verification("__hhvm_intrinsics\\hhbbc_fail_verification");
380 EmitBcInfo
emit_bytecode(EmitUnitState
& euState
,
382 const php::Func
& func
) {
384 auto& blockInfo
= ret
.blockInfo
;
385 blockInfo
.resize(func
.blocks
.size());
387 // Track the stack depth while emitting to determine maxStackDepth.
388 int32_t currentStackDepth
{ 0 };
390 // Stack of in-progress fpi regions.
391 std::vector
<EmitBcInfo::FPI
> fpiStack
;
393 // Temporary buffer for vector immediates. (Hoisted so it's not
394 // allocated in the loop.)
395 std::vector
<uint8_t> immVec
;
397 // Offset of the last emitted bytecode.
398 Offset lastOff
{ 0 };
400 bool traceBc
= false;
404 auto const pseudomain
= is_pseudomain(&func
);
405 auto process_mergeable
= [&] (const Bytecode
& bc
) {
406 if (!pseudomain
) return;
410 if (!ue
.m_returnSeen
) {
411 auto const& cls
= euState
.unit
->classes
[
412 bc
.op
== Op::DefCls
? bc
.DefCls
.arg1
: bc
.DefClsNop
.arg1
];
413 if (cls
->hoistability
== PreClass::NotHoistable
) {
414 cls
->hoistability
= PreClass::Mergeable
;
419 case Op::AssertRATStk
:
424 if (ue
.m_returnSeen
|| tos
.subtypeOf(BBottom
)) break;
427 auto val
= euState
.index
.lookup_persistent_constant(bc
.DefCns
.str1
);
428 // If there's a persistent constant with the same name, either
429 // this is the one and only definition, or the persistent
430 // definition is in systemlib (and this one will always fail).
431 auto const kind
= val
&& cellSame(*val
, *top
) ?
432 Unit::MergeKind::PersistentDefine
: Unit::MergeKind::Define
;
433 ue
.pushMergeableDef(kind
, bc
.DefCns
.str1
, *top
);
436 case Op::DefTypeAlias
:
437 ue
.pushMergeableTypeAlias(Unit::MergeKind::TypeAlias
,
438 bc
.DefTypeAlias
.arg1
);
441 case Op::Null
: tos
= TInitNull
; return;
442 case Op::True
: tos
= TTrue
; return;
443 case Op::False
: tos
= TFalse
; return;
444 case Op::Int
: tos
= ival(bc
.Int
.arg1
); return;
445 case Op::Double
: tos
= dval(bc
.Double
.dbl1
); return;
446 case Op::String
: tos
= sval(bc
.String
.str1
); return;
447 case Op::Vec
: tos
= vec_val(bc
.Vec
.arr1
); return;
448 case Op::Dict
: tos
= dict_val(bc
.Dict
.arr1
); return;
449 case Op::Keyset
: tos
= keyset_val(bc
.Keyset
.arr1
); return;
450 case Op::Array
: tos
= aval(bc
.Array
.arr1
); return;
455 if (ue
.m_returnSeen
|| tos
.subtypeOf(BBottom
)) break;
458 ue
.m_returnSeen
= true;
459 ue
.m_mainReturn
= *top
;
466 ue
.m_returnSeen
= true;
467 ue
.m_mainReturn
= make_tv
<KindOfUninit
>();
471 auto map_local
= [&] (LocalId id
) {
472 auto const loc
= func
.locals
[id
];
474 assert(loc
.id
<= id
);
478 auto end_fpi
= [&] (Offset off
) {
479 auto& fpi
= fpiStack
.back();
481 ret
.fpiRegions
.push_back(fpi
);
485 auto set_expected_depth
= [&] (BlockId block
) {
486 auto& info
= blockInfo
[block
];
488 if (info
.expectedStackDepth
) {
489 assert(*info
.expectedStackDepth
== currentStackDepth
);
491 info
.expectedStackDepth
= currentStackDepth
;
494 if (info
.expectedFPIDepth
) {
495 assert(*info
.expectedFPIDepth
== fpiStack
.size());
497 info
.expectedFPIDepth
= fpiStack
.size();
501 auto make_member_key
= [&] (MKey mkey
) {
502 switch (mkey
.mcode
) {
504 return MemberKey
{mkey
.mcode
, mkey
.idx
};
507 mkey
.mcode
, static_cast<int32_t>(map_local(mkey
.local
))
509 case MET
: case MPT
: case MQT
:
510 return MemberKey
{mkey
.mcode
, mkey
.litstr
};
512 return MemberKey
{mkey
.mcode
, mkey
.int64
};
519 auto emit_inst
= [&] (const Bytecode
& inst
) {
520 process_mergeable(inst
);
521 auto const startOffset
= ue
.bcPos();
522 lastOff
= startOffset
;
524 FTRACE(4, " emit: {} -- {} @ {}\n", currentStackDepth
, show(&func
, inst
),
525 show(srcLoc(func
, inst
.srcLoc
)));
527 if (options
.TraceBytecodes
.count(inst
.op
)) traceBc
= true;
529 auto emit_vsa
= [&] (const CompactVector
<LSString
>& keys
) {
530 auto n
= keys
.size();
532 for (size_t i
= 0; i
< n
; ++i
) {
533 ue
.emitInt32(ue
.mergeLitstr(keys
[i
]));
537 auto emit_branch
= [&] (BlockId id
) {
538 auto& info
= blockInfo
[id
];
539 if (info
.offset
!= kInvalidOffset
) {
540 ue
.emitInt32(info
.offset
- startOffset
);
542 info
.forwardJumps
.push_back({ startOffset
, ue
.bcPos() });
547 auto emit_switch
= [&] (const SwitchTab
& targets
) {
548 ue
.emitIVA(targets
.size());
549 for (auto t
: targets
) {
550 set_expected_depth(t
);
555 auto emit_sswitch
= [&] (const SSwitchTab
& targets
) {
556 ue
.emitIVA(targets
.size());
557 for (size_t i
= 0; i
< targets
.size() - 1; ++i
) {
558 set_expected_depth(targets
[i
].second
);
559 ue
.emitInt32(ue
.mergeLitstr(targets
[i
].first
));
560 emit_branch(targets
[i
].second
);
563 set_expected_depth(targets
[targets
.size() - 1].second
);
564 emit_branch(targets
[targets
.size() - 1].second
);
567 auto emit_itertab
= [&] (const IterTab
& iterTab
) {
568 ue
.emitIVA(iterTab
.size());
569 for (auto const& kv
: iterTab
) {
572 if (kv
.kind
== KindOfLIter
) {
573 always_assert(kv
.local
!= NoLocalId
);
574 ue
.emitIVA(map_local(kv
.local
));
576 always_assert(kv
.local
== NoLocalId
);
581 auto emit_argvec32
= [&] (const CompactVector
<uint32_t>& argv
) {
582 ue
.emitIVA(argv
.size());
583 for (auto i
: argv
) {
588 auto emit_srcloc
= [&] {
589 auto const sl
= srcLoc(func
, inst
.srcLoc
);
590 if (!sl
.isValid()) return;
591 Location::Range
loc(sl
.start
.line
, sl
.start
.col
,
592 sl
.past
.line
, sl
.past
.col
);
593 ue
.recordSourceLocation(loc
, startOffset
);
596 auto pop
= [&] (int32_t n
) {
597 currentStackDepth
-= n
;
598 assert(currentStackDepth
>= 0);
600 auto push
= [&] (int32_t n
) {
601 currentStackDepth
+= n
;
602 auto const depth
= currentStackDepth
+ fpiStack
.size() * kNumActRecCells
;
603 ret
.maxStackDepth
= std::max
<uint32_t>(ret
.maxStackDepth
, depth
);
606 auto begin_fpi
= [&] {
607 fpiStack
.push_back({startOffset
, kInvalidOffset
, currentStackDepth
});
608 auto const depth
= currentStackDepth
+ fpiStack
.size() * kNumActRecCells
;
609 ret
.maxStackDepth
= std::max
<uint32_t>(ret
.maxStackDepth
, depth
);
612 auto ret_assert
= [&] { assert(currentStackDepth
== inst
.numPop()); };
614 auto clsid_impl
= [&] (uint32_t& id
, bool closure
) {
615 if (euState
.classOffsets
[id
] != kInvalidOffset
) {
616 always_assert(closure
);
617 for (auto const& elm
: euState
.pceInfo
) {
618 if (elm
.origId
== id
) {
623 always_assert(false);
625 euState
.classOffsets
[id
] = startOffset
;
626 id
= recordClass(euState
, ue
, id
);
628 auto defcls
= [&] (auto& data
) { clsid_impl(data
.arg1
, false); };
629 auto defclsnop
= [&] (auto& data
) { clsid_impl(data
.arg1
, false); };
630 auto createcl
= [&] (auto& data
) { clsid_impl(data
.arg2
, true); };
631 auto deftype
= [&] (auto& data
) {
632 euState
.typeAliasInfo
.push_back(data
.arg1
);
633 data
.arg1
= euState
.typeAliasInfo
.size() - 1;
636 auto emit_lar
= [&](const LocalRange
& range
) {
637 always_assert(range
.first
+ range
.count
<= func
.locals
.size());
638 auto const first
= (range
.count
> 0) ? map_local(range
.first
) : 0;
639 encodeLocalRange(ue
, HPHP::LocalRange
{first
, range
.count
});
642 #define IMM_BLA(n) emit_switch(data.targets);
643 #define IMM_SLA(n) emit_sswitch(data.targets);
644 #define IMM_ILA(n) emit_itertab(data.iterTab);
645 #define IMM_I32LA(n) emit_argvec32(data.argv);
646 #define IMM_IVA(n) ue.emitIVA(data.arg##n);
647 #define IMM_I64A(n) ue.emitInt64(data.arg##n);
648 #define IMM_LA(n) ue.emitIVA(map_local(data.loc##n));
649 #define IMM_IA(n) ue.emitIVA(data.iter##n);
650 #define IMM_CAR(n) ue.emitIVA(data.slot);
651 #define IMM_CAW(n) ue.emitIVA(data.slot);
652 #define IMM_DA(n) ue.emitDouble(data.dbl##n);
653 #define IMM_SA(n) ue.emitInt32(ue.mergeLitstr(data.str##n));
654 #define IMM_RATA(n) encodeRAT(ue, data.rat);
655 #define IMM_AA(n) ue.emitInt32(ue.mergeArray(data.arr##n));
656 #define IMM_OA_IMPL(n) ue.emitByte(static_cast<uint8_t>(data.subop##n));
657 #define IMM_OA(type) IMM_OA_IMPL
658 #define IMM_BA(n) targets[numTargets++] = data.target##n; \
659 emit_branch(data.target##n);
660 #define IMM_VSA(n) emit_vsa(data.keys);
661 #define IMM_KA(n) encode_member_key(make_member_key(data.mkey), ue);
662 #define IMM_LAR(n) emit_lar(data.locrange);
663 #define IMM_FCA(n) encodeFCallArgs( \
664 ue, data.fca, data.fca.byRefs.get(), \
665 data.fca.asyncEagerTarget != NoBlockId, \
667 set_expected_depth(data.fca.asyncEagerTarget); \
668 emit_branch(data.fca.asyncEagerTarget); \
670 if (!data.fca.hasUnpack()) ret.containsCalls = true;\
673 #define IMM_ONE(x) IMM_##x(1)
674 #define IMM_TWO(x, y) IMM_##x(1); IMM_##y(2);
675 #define IMM_THREE(x, y, z) IMM_TWO(x, y); IMM_##z(3);
676 #define IMM_FOUR(x, y, z, n) IMM_THREE(x, y, z); IMM_##n(4);
677 #define IMM_FIVE(x, y, z, n, m) IMM_FOUR(x, y, z, n); IMM_##m(5);
680 #define POP_ONE(x) pop(1);
681 #define POP_TWO(x, y) pop(2);
682 #define POP_THREE(x, y, z) pop(3);
684 #define POP_MFINAL pop(data.arg1);
685 #define POP_C_MFINAL(n) pop(n); pop(data.arg1);
686 #define POP_CMANY pop(data.arg##1);
687 #define POP_SMANY pop(data.keys.size());
688 #define POP_CUMANY pop(data.arg##1);
689 #define POP_CVUMANY pop(data.arg##1);
690 #define POP_FPUSH(nin, nobj) \
691 pop(data.arg1 + nin + 3);
692 #define POP_FCALL(nin, nobj) \
693 pop(nin + data.fca.numArgsInclUnpack() + 2 + \
695 #define POP_FCALLO pop(data.fca.numArgsInclUnpack() + data.fca.numRets - 1);
698 #define PUSH_ONE(x) push(1);
699 #define PUSH_TWO(x, y) push(2);
700 #define PUSH_THREE(x, y, z) push(3);
701 #define PUSH_FPUSH push(data.arg1);
702 #define PUSH_FCALL push(data.fca.numRets);
704 #define O(opcode, imms, inputs, outputs, flags) \
705 auto emit_##opcode = [&] (OpInfo<bc::opcode> data) { \
706 if (RuntimeOption::EnableIntrinsicsExtension) { \
707 if (Op::opcode == Op::FCallBuiltin && \
708 inst.FCallBuiltin.str3->isame( \
709 s_hhbbc_fail_verification.get())) { \
710 ue.emitOp(Op::CheckProp); \
712 ue.mergeLitstr(inst.FCallBuiltin.str3)); \
713 ue.emitOp(Op::PopC); \
716 caller<Op::DefCls>(defcls, data); \
717 caller<Op::DefClsNop>(defclsnop, data); \
718 caller<Op::CreateCl>(createcl, data); \
719 caller<Op::DefTypeAlias>(deftype, data); \
721 if (isRet(Op::opcode)) ret_assert(); \
722 ue.emitOp(Op::opcode); \
724 if (isLegacyFPush(Op::opcode)) begin_fpi(); \
725 if (isLegacyFCall(Op::opcode)) end_fpi(startOffset); \
727 size_t numTargets = 0; \
728 std::array<BlockId, kMaxHhbcImms> targets; \
730 if (Op::opcode == Op::MemoGet) { \
732 assertx(numTargets == 1); \
733 set_expected_depth(targets[0]); \
735 } else if (Op::opcode == Op::MemoGetEager) { \
737 assertx(numTargets == 2); \
738 set_expected_depth(targets[0]); \
740 set_expected_depth(targets[1]); \
744 for (size_t i = 0; i < numTargets; ++i) { \
745 set_expected_depth(targets[i]); \
749 if (flags & TF) currentStackDepth = 0; \
809 #define O(opcode, ...) \
811 if (Op::opcode != Op::Nop) emit_##opcode(inst.opcode); \
813 switch (inst
.op
) { OPCODES
}
817 ret
.blockOrder
= order_blocks(func
);
818 auto blockIt
= begin(ret
.blockOrder
);
819 auto const endBlockIt
= end(ret
.blockOrder
);
820 for (; blockIt
!= endBlockIt
; ++blockIt
) {
822 auto& info
= blockInfo
[bid
];
823 auto const b
= func
.blocks
[bid
].get();
825 info
.offset
= ue
.bcPos();
826 FTRACE(2, " block {}: {}\n", bid
, info
.offset
);
828 for (auto& fixup
: info
.forwardJumps
) {
829 ue
.emitInt32(info
.offset
- fixup
.instrOff
, fixup
.jmpImmedOff
);
832 if (!info
.expectedStackDepth
) {
833 // unreachable, or entry block
834 info
.expectedStackDepth
= b
->catchEntry
? 1 : 0;
837 currentStackDepth
= *info
.expectedStackDepth
;
839 if (!info
.expectedFPIDepth
) {
840 // unreachable, or an entry block
841 info
.expectedFPIDepth
= 0;
844 // deal with fpiRegions that were ended by terminal instructions
845 assert(*info
.expectedFPIDepth
<= fpiStack
.size());
846 while (*info
.expectedFPIDepth
< fpiStack
.size()) end_fpi(lastOff
);
848 auto fallthrough
= b
->fallthrough
;
849 auto end
= b
->hhbcs
.end();
852 if (is_single_nop(*b
)) {
853 if (blockIt
== begin(ret
.blockOrder
)) {
854 // If the first block is just a Nop, this means that there is
855 // a jump to the second block from somewhere in the
856 // function. We don't want this, so we change this nop to an
857 // EntryNop so it doesn't get optimized away
858 emit_inst(bc_with_loc(b
->hhbcs
.front().srcLoc
, bc::EntryNop
{}));
861 // If the block ends with JmpZ or JmpNZ to the next block, flip
862 // the condition to make the fallthrough the next block
863 if (b
->hhbcs
.back().op
== Op::JmpZ
||
864 b
->hhbcs
.back().op
== Op::JmpNZ
) {
865 auto const& bc
= b
->hhbcs
.back();
867 bc
.op
== Op::JmpNZ
? bc
.JmpNZ
.target1
: bc
.JmpZ
.target1
;
868 if (std::next(blockIt
) != endBlockIt
&& blockIt
[1] == target
) {
869 fallthrough
= target
;
875 for (auto iit
= b
->hhbcs
.begin(); iit
!= end
; ++iit
) emit_inst(*iit
);
877 if (end
->op
== Op::JmpNZ
) {
878 emit_inst(bc_with_loc(end
->srcLoc
, bc::JmpZ
{ b
->fallthrough
}));
880 emit_inst(bc_with_loc(end
->srcLoc
, bc::JmpNZ
{ b
->fallthrough
}));
885 info
.past
= ue
.bcPos();
887 if (fallthrough
!= NoBlockId
) {
888 set_expected_depth(fallthrough
);
889 if (std::next(blockIt
) == endBlockIt
||
890 blockIt
[1] != fallthrough
) {
891 if (b
->fallthroughNS
) {
892 emit_inst(bc::JmpNS
{ fallthrough
});
894 emit_inst(bc::Jmp
{ fallthrough
});
897 auto const parent
= commonParent(func
,
898 func
.blocks
[fallthrough
]->exnNodeId
,
901 auto depth
= [&] (ExnNodeId eid
) {
902 return eid
== NoExnNodeId
? 0 : func
.exnNodes
[eid
].depth
;
904 // If we are in an exn region we pop from the current region to the
905 // common parent. If the common parent is null, we pop all regions
906 info
.regionsToPop
= depth(b
->exnNodeId
) - depth(parent
);
907 assert(info
.regionsToPop
>= 0);
908 FTRACE(4, " popped catch regions: {}\n", info
.regionsToPop
);
912 if (b
->throwExit
!= NoBlockId
) {
913 FTRACE(4, " throw: {}\n", b
->throwExit
);
915 if (fallthrough
!= NoBlockId
) {
916 FTRACE(4, " fallthrough: {}\n", fallthrough
);
918 FTRACE(2, " block {} end: {}\n", bid
, info
.past
);
921 while (fpiStack
.size()) end_fpi(lastOff
);
924 FTRACE(0, "TraceBytecode (emit): {}::{} in {}\n",
925 func
.cls
? func
.cls
->name
->data() : "",
926 func
.name
, func
.unit
->filename
);
932 void emit_locals_and_params(FuncEmitter
& fe
,
933 const php::Func
& func
,
934 const EmitBcInfo
& info
) {
937 for (auto& loc
: func
.locals
) {
938 if (loc
.id
< func
.params
.size()) {
940 auto& param
= func
.params
[id
];
941 FuncEmitter::ParamInfo pinfo
;
942 pinfo
.defaultValue
= param
.defaultValue
;
943 pinfo
.typeConstraint
= param
.typeConstraint
;
944 pinfo
.userType
= param
.userTypeConstraint
;
945 pinfo
.phpCode
= param
.phpCode
;
946 pinfo
.userAttributes
= param
.userAttributes
;
947 pinfo
.builtinType
= param
.builtinType
;
948 pinfo
.byRef
= param
.byRef
;
949 pinfo
.inout
= param
.inout
;
950 pinfo
.variadic
= param
.isVariadic
;
951 fe
.appendParam(func
.locals
[id
].name
, pinfo
);
952 auto const dv
= param
.dvEntryPoint
;
953 if (dv
!= NoBlockId
) {
954 fe
.params
[id
].funcletOff
= info
.blockInfo
[dv
].offset
;
957 } else if (!loc
.killed
) {
959 fe
.allocVarId(loc
.name
);
960 assert(fe
.lookupVarId(loc
.name
) == id
);
961 assert(loc
.id
== id
);
963 fe
.allocUnnamedLocal();
968 assert(fe
.numLocals() == id
);
969 fe
.setNumIterators(func
.numIters
);
970 fe
.setNumClsRefSlots(func
.numClsRefSlots
);
974 const php::ExnNode
* node
;
980 template<class BlockInfo
, class ParentIndexMap
>
981 void emit_eh_region(FuncEmitter
& fe
,
982 const EHRegion
* region
,
983 const BlockInfo
& blockInfo
,
984 ParentIndexMap
& parentIndexMap
) {
985 FTRACE(2, " func {}: ExnNode {}\n", fe
.name
, region
->node
->idx
);
987 auto const unreachable
= [&] (const php::ExnNode
& node
) {
988 return blockInfo
[node
.region
.catchEntry
].offset
== kInvalidOffset
;
991 // A region on a single empty block.
992 if (region
->start
== region
->past
) {
993 FTRACE(2, " Skipping\n");
995 } else if (unreachable(*region
->node
)) {
996 FTRACE(2, " Unreachable\n");
1000 FTRACE(2, " Process @ {}-{}\n", region
->start
, region
->past
);
1002 auto& eh
= fe
.addEHEnt();
1003 eh
.m_base
= region
->start
;
1004 eh
.m_past
= region
->past
;
1005 assert(eh
.m_past
>= eh
.m_base
);
1006 assert(eh
.m_base
!= kInvalidOffset
&& eh
.m_past
!= kInvalidOffset
);
1008 // An unreachable parent won't be emitted (and thus its offset won't be set),
1009 // so find the closest reachable one.
1010 auto parent
= region
->parent
;
1011 while (parent
&& unreachable(*parent
->node
)) parent
= parent
->parent
;
1013 auto parentIt
= parentIndexMap
.find(parent
);
1014 assert(parentIt
!= end(parentIndexMap
));
1015 eh
.m_parentIndex
= parentIt
->second
;
1017 eh
.m_parentIndex
= -1;
1019 parentIndexMap
[region
] = fe
.ehtab
.size() - 1;
1021 auto const& cr
= region
->node
->region
;
1022 eh
.m_handler
= blockInfo
[cr
.catchEntry
].offset
;
1023 eh
.m_end
= kInvalidOffset
;
1024 eh
.m_iterId
= cr
.iterId
;
1026 assert(eh
.m_handler
!= kInvalidOffset
);
1029 void exn_path(const php::Func
& func
,
1030 std::vector
<const php::ExnNode
*>& ret
,
1032 if (id
== NoExnNodeId
) return;
1033 auto const& n
= func
.exnNodes
[id
];
1034 exn_path(func
, ret
, n
.parent
);
1038 // Return the count of shared elements in the front of two forward
1040 template<class ForwardRange1
, class ForwardRange2
>
1041 size_t shared_prefix(ForwardRange1
& r1
, ForwardRange2
& r2
) {
1042 auto r1it
= begin(r1
);
1043 auto r2it
= begin(r2
);
1044 auto const r1end
= end(r1
);
1045 auto const r2end
= end(r2
);
1046 auto ret
= size_t{0};
1047 while (r1it
!= r1end
&& r2it
!= r2end
&& *r1it
== *r2it
) {
1048 ++ret
; ++r1it
; ++r2it
;
1054 * Traverse the actual block layout, and find out the intervals for
1055 * each exception region in the tree.
1057 * The basic idea here is that we haven't constrained block layout
1058 * based on the exception tree, but adjacent blocks are still
1059 * reasonably likely to have the same ExnNode. Try to coalesce the EH
1060 * regions we create for in those cases.
1062 void emit_ehent_tree(FuncEmitter
& fe
, const php::Func
& func
,
1063 const EmitBcInfo
& info
) {
1065 const php::ExnNode
*,
1066 std::vector
<std::unique_ptr
<EHRegion
>>
1070 * While walking over the blocks in layout order, we track the set
1071 * of "active" exnNodes. This are a list of exnNodes that inherit
1072 * from each other. When a new active node is pushed, begin an
1073 * EHEnt, and when it's popped, it's done.
1075 std::vector
<const php::ExnNode
*> activeList
;
1077 auto pop_active
= [&] (Offset past
) {
1078 auto p
= activeList
.back();
1079 activeList
.pop_back();
1080 exnMap
[p
].back()->past
= past
;
1083 auto push_active
= [&] (const php::ExnNode
* p
, Offset start
) {
1084 auto const parent
= activeList
.empty()
1086 : exnMap
[activeList
.back()].back().get();
1087 exnMap
[p
].push_back(
1088 std::make_unique
<EHRegion
>(
1089 EHRegion
{ p
, parent
, start
, kInvalidOffset
}
1092 activeList
.push_back(p
);
1096 * Walk over the blocks, and compare the new block's exnNode path to
1097 * the active one. Find the least common ancestor of the two paths,
1098 * then modify the active list by popping and then pushing nodes to
1099 * set it to the new block's path.
1101 for (auto const bid
: info
.blockOrder
) {
1102 auto const b
= func
.blocks
[bid
].get();
1103 auto const offset
= info
.blockInfo
[bid
].offset
;
1105 if (b
->exnNodeId
== NoExnNodeId
) {
1106 while (!activeList
.empty()) pop_active(offset
);
1110 std::vector
<const php::ExnNode
*> current
;
1111 exn_path(func
, current
, b
->exnNodeId
);
1113 auto const prefix
= shared_prefix(current
, activeList
);
1114 for (size_t i
= prefix
, sz
= activeList
.size(); i
< sz
; ++i
) {
1117 for (size_t i
= prefix
, sz
= current
.size(); i
< sz
; ++i
) {
1118 push_active(current
[i
], offset
);
1121 for (int i
= 0; i
< info
.blockInfo
[bid
].regionsToPop
; i
++) {
1122 // If the block ended in a jump out of the catch region, this effectively
1123 // ends all catch regions deeper than the one we are jumping to
1124 pop_active(info
.blockInfo
[bid
].past
);
1127 if (debug
&& !activeList
.empty()) {
1129 exn_path(func
, current
, activeList
.back()->idx
);
1130 assert(current
== activeList
);
1134 while (!activeList
.empty()) {
1135 pop_active(info
.blockInfo
[info
.blockOrder
.back()].past
);
1139 * We've created all our regions, but we need to sort them instead
1140 * of trying to get the UnitEmitter to do it.
1142 * The UnitEmitter expects EH regions that look a certain way
1143 * (basically the way emitter.cpp likes them). There are some rules
1144 * about the order it needs to have at runtime, which we set up
1147 * Essentially, an entry a is less than an entry b iff:
1149 * - a starts before b
1150 * - a starts at the same place, but encloses b entirely
1151 * - a has the same extents as b, but is a parent of b
1153 std::vector
<EHRegion
*> regions
;
1154 for (auto& mapEnt
: exnMap
) {
1155 for (auto& region
: mapEnt
.second
) {
1156 regions
.push_back(region
.get());
1160 begin(regions
), end(regions
),
1161 [&] (const EHRegion
* a
, const EHRegion
* b
) {
1162 if (a
== b
) return false;
1163 if (a
->start
== b
->start
) {
1164 if (a
->past
== b
->past
) {
1165 // When regions exactly overlap, the parent is less than the
1167 for (auto p
= b
->parent
; p
!= nullptr; p
= p
->parent
) {
1168 if (p
== a
) return true;
1170 // If a is not a parent of b, and they have the same region;
1171 // then b better be a parent of a.
1174 for (; p
!= b
&& p
!= nullptr; p
= p
->parent
) continue;
1179 return a
->past
> b
->past
;
1181 return a
->start
< b
->start
;
1185 hphp_fast_map
<const EHRegion
*,uint32_t> parentIndexMap
;
1186 for (auto& r
: regions
) {
1187 emit_eh_region(fe
, r
, info
.blockInfo
, parentIndexMap
);
1189 fe
.setEHTabIsSorted();
1192 void merge_repo_auth_type(UnitEmitter
& ue
, RepoAuthType rat
) {
1193 using T
= RepoAuthType::Tag
;
1195 switch (rat
.tag()) {
1207 case T::OptUncArrKey
:
1209 case T::OptUncStrLike
:
1262 // NOTE: In repo mode, RAT's in Array's might only contain global litstr
1263 // id's. No need to merge. In non-repo mode, RAT's in Array's might contain
1264 // local litstr id's.
1265 if (RuntimeOption::RepoAuthoritative
) return;
1267 if (rat
.hasArrData()) {
1268 auto arr
= rat
.array();
1269 switch (arr
->tag()) {
1270 case RepoAuthType::Array::Tag::Packed
:
1271 for (uint32_t i
= 0; i
< arr
->size(); ++i
) {
1272 merge_repo_auth_type(ue
, arr
->packedElem(i
));
1275 case RepoAuthType::Array::Tag::PackedN
:
1276 merge_repo_auth_type(ue
, arr
->elemType());
1283 case T::OptExactObj
:
1286 ue
.mergeLitstr(rat
.clsName());
1291 void emit_finish_func(EmitUnitState
& state
,
1292 const php::Func
& func
,
1294 const EmitBcInfo
& info
) {
1295 if (info
.containsCalls
) fe
.containsCalls
= true;;
1297 for (auto& fpi
: info
.fpiRegions
) {
1298 auto& e
= fe
.addFPIEnt();
1299 e
.m_fpushOff
= fpi
.fpushOff
;
1300 e
.m_fpiEndOff
= fpi
.fpiEndOff
;
1301 e
.m_fpOff
= fpi
.fpDelta
;
1304 emit_locals_and_params(fe
, func
, info
);
1305 emit_ehent_tree(fe
, func
, info
);
1307 // Nothing should look at the bytecode from now on. Free it up to
1308 // compensate for the UnitEmitter we're creating.
1309 const_cast<php::Func
&>(func
).blocks
.clear();
1311 fe
.userAttributes
= func
.userAttributes
;
1312 fe
.retUserType
= func
.returnUserType
;
1313 fe
.originalFilename
=
1314 func
.originalFilename
? func
.originalFilename
:
1315 func
.originalUnit
? func
.originalUnit
->filename
: nullptr;
1316 fe
.isClosureBody
= func
.isClosureBody
;
1317 fe
.isAsync
= func
.isAsync
;
1318 fe
.isGenerator
= func
.isGenerator
;
1319 fe
.isPairGenerator
= func
.isPairGenerator
;
1320 fe
.isNative
= func
.nativeInfo
!= nullptr;
1321 fe
.isMemoizeWrapper
= func
.isMemoizeWrapper
;
1322 fe
.isMemoizeWrapperLSB
= func
.isMemoizeWrapperLSB
;
1323 fe
.isRxDisabled
= func
.isRxDisabled
;
1325 auto const retTy
= state
.index
.lookup_return_type_raw(&func
);
1326 if (!retTy
.subtypeOf(BBottom
)) {
1327 auto const rat
= make_repo_type(*state
.index
.array_table_builder(), retTy
);
1328 merge_repo_auth_type(fe
.ue(), rat
);
1329 fe
.repoReturnType
= rat
;
1332 if (is_specialized_wait_handle(retTy
)) {
1333 auto const awaitedTy
= wait_handle_inner(retTy
);
1334 if (!awaitedTy
.subtypeOf(BBottom
)) {
1335 auto const rat
= make_repo_type(
1336 *state
.index
.array_table_builder(),
1339 merge_repo_auth_type(fe
.ue(), rat
);
1340 fe
.repoAwaitedReturnType
= rat
;
1344 if (func
.nativeInfo
) {
1345 fe
.hniReturnType
= func
.nativeInfo
->returnType
;
1347 fe
.retTypeConstraint
= func
.retTypeConstraint
;
1349 fe
.maxStackCells
= info
.maxStackDepth
+
1351 fe
.numIterators() * kNumIterCells
+
1352 clsRefCountToCells(fe
.numClsRefSlots());
1354 fe
.finish(fe
.ue().bcPos(), false /* load */);
1357 void renumber_locals(const php::Func
& func
) {
1360 for (auto& loc
: const_cast<php::Func
&>(func
).locals
) {
1362 // make sure its out of range, in case someone tries to read it.
1370 void emit_init_func(FuncEmitter
& fe
, const php::Func
& func
) {
1371 renumber_locals(func
);
1373 std::get
<0>(func
.srcInfo
.loc
),
1374 std::get
<1>(func
.srcInfo
.loc
),
1378 func
.srcInfo
.docComment
1382 void emit_func(EmitUnitState
& state
, UnitEmitter
& ue
,
1383 FuncEmitter
* fe
, const php::Func
& func
) {
1384 FTRACE(2, " func {}\n", func
.name
->data());
1385 emit_init_func(*fe
, func
);
1386 auto const info
= emit_bytecode(state
, ue
, func
);
1387 emit_finish_func(state
, func
, *fe
, info
);
1390 void emit_pseudomain(EmitUnitState
& state
,
1392 const php::Unit
& unit
) {
1393 FTRACE(2, " pseudomain\n");
1394 auto& pm
= *unit
.pseudomain
;
1395 renumber_locals(pm
);
1396 ue
.initMain(std::get
<0>(pm
.srcInfo
.loc
),
1397 std::get
<1>(pm
.srcInfo
.loc
));
1398 auto const fe
= ue
.getMain();
1399 auto const info
= emit_bytecode(state
, ue
, pm
);
1400 if (is_systemlib_part(unit
)) {
1401 ue
.m_mergeOnly
= true;
1402 auto const tv
= make_tv
<KindOfInt64
>(1);
1403 ue
.m_mainReturn
= tv
;
1406 ue
.m_returnSeen
&& ue
.m_mainReturn
.m_type
!= KindOfUninit
;
1409 emit_finish_func(state
, pm
, *fe
, info
);
1412 void emit_record(UnitEmitter
& ue
, const php::Record
& rec
) {
1413 auto const re
= ue
.newRecordEmitter(rec
.name
->toCppString());
1415 std::get
<0>(rec
.srcInfo
.loc
),
1416 std::get
<1>(rec
.srcInfo
.loc
),
1419 rec
.srcInfo
.docComment
1421 re
->setUserAttributes(rec
.userAttributes
);
1422 for (auto&& f
: rec
.fields
) {
1436 void emit_class(EmitUnitState
& state
,
1438 PreClassEmitter
* pce
,
1440 const php::Class
& cls
) {
1441 FTRACE(2, " class: {}\n", cls
.name
->data());
1443 std::get
<0>(cls
.srcInfo
.loc
),
1444 std::get
<1>(cls
.srcInfo
.loc
),
1445 offset
== kInvalidOffset
? ue
.bcPos() : offset
,
1447 cls
.parentName
? cls
.parentName
: s_empty
.get(),
1448 cls
.srcInfo
.docComment
1450 pce
->setUserAttributes(cls
.userAttributes
);
1452 for (auto& x
: cls
.interfaceNames
) pce
->addInterface(x
);
1453 for (auto& x
: cls
.usedTraitNames
) pce
->addUsedTrait(x
);
1454 for (auto& x
: cls
.requirements
) pce
->addClassRequirement(x
);
1455 for (auto& x
: cls
.traitPrecRules
) pce
->addTraitPrecRule(x
);
1456 for (auto& x
: cls
.traitAliasRules
) pce
->addTraitAliasRule(x
);
1458 pce
->setIfaceVtableSlot(state
.index
.lookup_iface_vtable_slot(&cls
));
1460 bool needs86cinit
= false;
1462 auto const nativeConsts
= cls
.attrs
& AttrBuiltin
?
1463 Native::getClassConstants(cls
.name
) : nullptr;
1465 for (auto& cconst
: cls
.constants
) {
1466 if (nativeConsts
&& nativeConsts
->count(cconst
.name
)) {
1469 if (!cconst
.val
.hasValue()) {
1470 pce
->addAbstractConstant(
1472 cconst
.typeConstraint
,
1476 needs86cinit
|= cconst
.val
->m_type
== KindOfUninit
;
1480 cconst
.typeConstraint
,
1481 &cconst
.val
.value(),
1488 for (auto& m
: cls
.methods
) {
1489 if (!needs86cinit
&& m
->name
== s_86cinit
.get()) continue;
1490 FTRACE(2, " method: {}\n", m
->name
->data());
1491 auto const fe
= ue
.newMethodEmitter(m
->name
, pce
);
1492 emit_init_func(*fe
, *m
);
1494 auto const info
= emit_bytecode(state
, ue
, *m
);
1495 emit_finish_func(state
, *m
, *fe
, info
);
1498 CompactVector
<Type
> useVars
;
1499 if (is_closure(cls
)) {
1500 auto f
= find_method(&cls
, s_invoke
.get());
1501 useVars
= state
.index
.lookup_closure_use_vars(f
, true);
1503 auto uvIt
= useVars
.begin();
1505 auto const privateProps
= state
.index
.lookup_private_props(&cls
, true);
1506 auto const privateStatics
= state
.index
.lookup_private_statics(&cls
, true);
1507 for (auto& prop
: cls
.properties
) {
1508 auto const makeRat
= [&] (const Type
& ty
) -> RepoAuthType
{
1509 if (!ty
.subtypeOf(BGen
)) return RepoAuthType
{};
1510 if (ty
.subtypeOf(BBottom
)) {
1511 // A property can be TBottom if no sets (nor its initial value) is
1512 // compatible with its type-constraint, or if its LateInit and there's
1513 // no sets to it. The repo auth type here doesn't particularly matter,
1514 // since such a prop will be inaccessible.
1515 return RepoAuthType
{};
1518 auto const rat
= make_repo_type(*state
.index
.array_table_builder(), ty
);
1519 merge_repo_auth_type(ue
, rat
);
1523 auto const privPropTy
= [&] (const PropState
& ps
) -> Type
{
1524 if (is_closure(cls
)) {
1525 // For closures use variables will be the first properties of the
1526 // closure object, in declaration order
1527 if (uvIt
!= useVars
.end()) return *uvIt
++;
1531 auto it
= ps
.find(prop
.name
);
1532 if (it
== end(ps
)) return Type
{};
1533 return it
->second
.ty
;
1537 auto const attrs
= prop
.attrs
;
1538 if (attrs
& AttrPrivate
) {
1539 propTy
= privPropTy((attrs
& AttrStatic
) ? privateStatics
: privateProps
);
1540 } else if ((attrs
& AttrPublic
) && (attrs
& AttrStatic
)) {
1541 propTy
= state
.index
.lookup_public_static(Context
{}, &cls
, prop
.name
);
1548 prop
.typeConstraint
,
1555 assert(uvIt
== useVars
.end());
1557 pce
->setEnumBaseTy(cls
.enumBaseTy
);
1560 void emit_typealias(UnitEmitter
& ue
, const php::TypeAlias
& alias
) {
1561 auto const id
= ue
.addTypeAlias(alias
);
1562 ue
.pushMergeableTypeAlias(HPHP::Unit::MergeKind::TypeAlias
, id
);
1565 //////////////////////////////////////////////////////////////////////
1569 std::unique_ptr
<UnitEmitter
> emit_unit(const Index
& index
,
1570 const php::Unit
& unit
) {
1572 Trace::hhbbc_emit
, kSystemLibBump
, is_systemlib_part(unit
)
1575 assert(check(unit
));
1577 auto ue
= std::make_unique
<UnitEmitter
>(unit
.sha1
,
1579 Native::s_noNativeFuncs
,
1581 FTRACE(1, " unit {}\n", unit
.filename
->data());
1582 ue
->m_filepath
= unit
.filename
;
1583 ue
->m_isHHFile
= unit
.isHHFile
;
1584 ue
->m_metaData
= unit
.metaData
;
1585 ue
->m_fileAttributes
= unit
.fileAttributes
;
1587 EmitUnitState state
{ index
, &unit
};
1588 state
.classOffsets
.resize(unit
.classes
.size(), kInvalidOffset
);
1590 emit_pseudomain(state
, *ue
, unit
);
1592 std::vector
<std::unique_ptr
<FuncEmitter
> > top_fes
;
1594 * Top level funcs are always defined when the unit is loaded, and
1595 * don't have a DefFunc bytecode. Process them up front.
1597 for (size_t id
= 0; id
< unit
.funcs
.size(); ++id
) {
1598 auto const f
= unit
.funcs
[id
].get();
1599 assertx(f
!= unit
.pseudomain
.get());
1600 if (!f
->top
) continue;
1601 top_fes
.push_back(std::make_unique
<FuncEmitter
>(*ue
, -1, -1, f
->name
));
1602 emit_func(state
, *ue
, top_fes
.back().get(), *f
);
1606 * Find any top-level classes that need to be included due to
1607 * hoistability, even though the corresponding DefCls was not
1610 for (size_t id
= 0; id
< unit
.classes
.size(); ++id
) {
1611 if (state
.classOffsets
[id
] != kInvalidOffset
) continue;
1612 auto const c
= unit
.classes
[id
].get();
1613 if (c
->hoistability
!= PreClass::MaybeHoistable
&&
1614 c
->hoistability
!= PreClass::AlwaysHoistable
) {
1617 // Closures are AlwaysHoistable; but there's no need to include
1618 // them unless there's a reachable CreateCl.
1619 if (is_closure(*c
)) continue;
1620 recordClass(state
, *ue
, id
);
1623 size_t pceId
= 0, feId
= 0;
1625 // Note that state.pceInfo can grow inside the loop
1626 while (pceId
< state
.pceInfo
.size()) {
1627 auto const& pceInfo
= state
.pceInfo
[pceId
++];
1628 auto const& c
= unit
.classes
[pceInfo
.origId
];
1629 emit_class(state
, *ue
, pceInfo
.pce
,
1630 state
.classOffsets
[pceInfo
.origId
], *c
);
1633 while (feId
< state
.feInfo
.size()) {
1634 auto const& feInfo
= state
.feInfo
[feId
++];
1635 // DefFunc ids are off by one wrt unit.funcs because we don't
1636 // store the pseudomain there.
1637 auto const& f
= unit
.funcs
[feInfo
.origId
- 1];
1638 emit_func(state
, *ue
, feInfo
.fe
, *f
);
1640 } while (pceId
< state
.pceInfo
.size());
1642 for (auto tid
: state
.typeAliasInfo
) {
1643 emit_typealias(*ue
, *unit
.typeAliases
[tid
]);
1646 for (size_t id
= 0; id
< unit
.records
.size(); ++id
) {
1647 emit_record(*ue
, *unit
.records
[id
]);
1650 // Top level funcs need to go after any non-top level funcs. See
1651 // Unit::merge for details.
1652 for (auto& fe
: top_fes
) ue
->appendTopEmitter(std::move(fe
));
1657 //////////////////////////////////////////////////////////////////////