2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/emit.h"
23 #include <type_traits>
25 #include <folly/gen/Base.h>
26 #include <folly/Conv.h>
27 #include <folly/Optional.h>
28 #include <folly/Memory.h>
30 #include "hphp/hhbbc/cfg.h"
31 #include "hphp/hhbbc/class-util.h"
32 #include "hphp/hhbbc/context.h"
33 #include "hphp/hhbbc/func-util.h"
34 #include "hphp/hhbbc/index.h"
35 #include "hphp/hhbbc/options.h"
36 #include "hphp/hhbbc/representation.h"
37 #include "hphp/hhbbc/unit-util.h"
39 #include "hphp/runtime/base/repo-auth-type-array.h"
40 #include "hphp/runtime/base/repo-auth-type-codec.h"
41 #include "hphp/runtime/base/repo-auth-type.h"
42 #include "hphp/runtime/base/tv-comparisons.h"
44 #include "hphp/runtime/vm/bytecode.h"
45 #include "hphp/runtime/vm/func-emitter.h"
46 #include "hphp/runtime/vm/native.h"
47 #include "hphp/runtime/vm/preclass-emitter.h"
48 #include "hphp/runtime/vm/record-emitter.h"
49 #include "hphp/runtime/vm/unit-emitter.h"
51 namespace HPHP
{ namespace HHBBC
{
53 TRACE_SET_MOD(hhbbc_emit
);
57 //////////////////////////////////////////////////////////////////////
59 const StaticString
s_invoke("__invoke");
61 //////////////////////////////////////////////////////////////////////
73 struct EmitUnitState
{
74 explicit EmitUnitState(const Index
& index
, const php::Unit
* unit
) :
75 index(index
), unit(unit
) {}
78 * Access to the Index for this program.
83 * Access to the unit we're emitting
85 const php::Unit
* unit
;
88 * While emitting bytecode, we keep track of the classes and funcs
91 std::vector
<Offset
> classOffsets
;
92 std::vector
<PceInfo
> pceInfo
;
93 std::vector
<FeInfo
> feInfo
;
94 std::vector
<Id
> typeAliasInfo
;
95 std::vector
<Id
> constantInfo
;
97 std::unordered_set
<Id
> processedTypeAlias
;
98 std::unordered_set
<Id
> processedConstant
;
102 * Some bytecodes need to be mutated before being emitted. Pass those
103 * bytecodes by value to their respective emit_op functions.
106 struct OpInfoHelper
{
107 static constexpr bool by_value
=
108 T::op
== Op::DefCls
||
109 T::op
== Op::DefClsNop
||
110 T::op
== Op::CreateCl
||
111 T::op
== Op::DefCns
||
112 T::op
== Op::DefTypeAlias
;
115 using type
= typename
std::conditional
<by_value
, T
, const T
&>::type
;
119 using OpInfo
= typename OpInfoHelper
<T
>::type
;
122 * Helper to conditionally call fun on data provided data is of the
125 template<Op op
, typename F
, typename Bc
>
126 std::enable_if_t
<std::remove_reference_t
<Bc
>::op
== op
>
127 caller(F
&& fun
, Bc
&& data
) { fun(std::forward
<Bc
>(data
)); }
129 template<Op op
, typename F
, typename Bc
>
130 std::enable_if_t
<std::remove_reference_t
<Bc
>::op
!= op
>
133 Id
recordClass(EmitUnitState
& euState
, UnitEmitter
& ue
, Id id
) {
134 auto cls
= euState
.unit
->classes
[id
].get();
135 euState
.pceInfo
.push_back(
136 { ue
.newPreClassEmitter(cls
->name
->toCppString(), cls
->hoistability
), id
}
138 return euState
.pceInfo
.back().pce
->id();
141 //////////////////////////////////////////////////////////////////////
143 php::SrcLoc
srcLoc(const php::Func
& func
, int32_t ix
) {
144 if (ix
< 0) return php::SrcLoc
{};
145 auto const unit
= func
.originalUnit
? func
.originalUnit
: func
.unit
;
146 return unit
->srcLocs
[ix
];
150 * Order the blocks for bytecode emission.
152 * Rules about block order:
154 * - Each DV funclet must have all of its blocks contiguous, with the
157 * - Main entry point must be the first block.
159 * It is not a requirement, but we attempt to locate all the DV entry
160 * points after the rest of the primary function body. The normal
161 * case for DV initializers is that each one falls through to the
162 * next, with the block jumping back to the main entry point.
164 std::vector
<BlockId
> order_blocks(const php::Func
& f
) {
165 auto sorted
= rpoSortFromMain(f
);
167 // Get the DV blocks, without the rest of the primary function body,
168 // and then add them to the end of sorted.
169 auto const dvBlocks
= [&] {
170 auto withDVs
= rpoSortAddDVs(f
);
172 std::find(begin(withDVs
), end(withDVs
), sorted
.front()),
177 sorted
.insert(end(sorted
), begin(dvBlocks
), end(dvBlocks
));
179 FTRACE(2, " block order:{}\n",
182 for (auto const bid
: sorted
) {
184 ret
+= folly::to
<std::string
>(bid
);
192 // While emitting bytecode, we learn about some metadata that will
193 // need to be registered in the FuncEmitter.
195 struct JmpFixup
{ Offset instrOff
; Offset jmpImmedOff
; };
199 : offset(kInvalidOffset
)
200 , past(kInvalidOffset
)
204 // The offset of the block, if we've already emitted it.
205 // Otherwise kInvalidOffset.
208 // The offset past the end of this block.
211 // How many catch regions the jump at the end of this block is leaving.
212 // 0 if there is no jump or if the jump is to the same catch region or a
216 // When we emit a forward jump to a block we haven't seen yet, we
217 // write down where the jump was so we can fix it up when we get
219 std::vector
<JmpFixup
> forwardJumps
;
221 // When we see a forward jump to a block, we record the stack
222 // depth at the jump site here. This is needed to track
223 // currentStackDepth correctly (and we also assert all the jumps
224 // have the same depth).
225 folly::Optional
<uint32_t> expectedStackDepth
;
228 std::vector
<BlockId
> blockOrder
;
229 uint32_t maxStackDepth
;
231 std::vector
<BlockInfo
> blockInfo
;
234 using ExnNodePtr
= php::ExnNode
*;
236 bool handleEquivalent(const php::Func
& func
, ExnNodeId eh1
, ExnNodeId eh2
) {
237 auto entry
= [&] (ExnNodeId eid
) {
238 return func
.exnNodes
[eid
].region
.catchEntry
;
242 assertx(eh1
!= NoExnNodeId
&&
243 eh2
!= NoExnNodeId
&&
244 func
.exnNodes
[eh1
].depth
== func
.exnNodes
[eh2
].depth
);
245 if (entry(eh1
) != entry(eh2
)) return false;
246 eh1
= func
.exnNodes
[eh1
].parent
;
247 eh2
= func
.exnNodes
[eh2
].parent
;
253 // The common parent P of eh1 and eh2 is the deepest region such that
254 // eh1 and eh2 are both handle-equivalent to P or a child of P
255 ExnNodeId
commonParent(const php::Func
& func
, ExnNodeId eh1
, ExnNodeId eh2
) {
256 if (eh1
== NoExnNodeId
|| eh2
== NoExnNodeId
) return NoExnNodeId
;
257 while (func
.exnNodes
[eh1
].depth
> func
.exnNodes
[eh2
].depth
) {
258 eh1
= func
.exnNodes
[eh1
].parent
;
260 while (func
.exnNodes
[eh2
].depth
> func
.exnNodes
[eh1
].depth
) {
261 eh2
= func
.exnNodes
[eh2
].parent
;
263 while (!handleEquivalent(func
, eh1
, eh2
)) {
264 eh1
= func
.exnNodes
[eh1
].parent
;
265 eh2
= func
.exnNodes
[eh2
].parent
;
271 s_hhbbc_fail_verification("__hhvm_intrinsics\\hhbbc_fail_verification");
273 EmitBcInfo
emit_bytecode(EmitUnitState
& euState
,
275 const php::Func
& func
) {
277 auto& blockInfo
= ret
.blockInfo
;
278 blockInfo
.resize(func
.blocks
.size());
280 // Track the stack depth while emitting to determine maxStackDepth.
281 int32_t currentStackDepth
{ 0 };
283 // Temporary buffer for vector immediates. (Hoisted so it's not
284 // allocated in the loop.)
285 std::vector
<uint8_t> immVec
;
287 // Offset of the last emitted bytecode.
288 Offset lastOff
{ 0 };
290 bool traceBc
= false;
294 SCOPE_ASSERT_DETAIL("emit") {
296 for (auto bid
: func
.blockRange()) {
300 show(func
, *func
.blocks
[bid
])
307 auto const pseudomain
= is_pseudomain(&func
);
308 auto process_mergeable
= [&] (const Bytecode
& bc
) {
309 if (!pseudomain
) return;
313 if (!ue
.m_returnSeen
) {
314 auto const& cls
= euState
.unit
->classes
[
315 bc
.op
== Op::DefCls
? bc
.DefCls
.arg1
: bc
.DefClsNop
.arg1
];
316 if (cls
->hoistability
== PreClass::NotHoistable
) {
317 cls
->hoistability
= PreClass::Mergeable
;
322 case Op::AssertRATStk
:
327 if (ue
.m_returnSeen
|| tos
.subtypeOf(BBottom
)) break;
328 auto cid
= bc
.DefCns
.arg1
;
329 ue
.pushMergeableId(Unit::MergeKind::Define
, cid
);
330 euState
.processedConstant
.insert(cid
);
333 case Op::DefTypeAlias
: {
334 auto tid
= bc
.DefTypeAlias
.arg1
;
335 ue
.pushMergeableId(Unit::MergeKind::TypeAlias
, tid
);
336 euState
.processedTypeAlias
.insert(tid
);
339 case Op::DefRecord
: {
340 auto rid
= bc
.DefRecord
.arg1
;
341 ue
.pushMergeableRecord(rid
);
345 case Op::Null
: tos
= TInitNull
; return;
346 case Op::True
: tos
= TTrue
; return;
347 case Op::False
: tos
= TFalse
; return;
348 case Op::Int
: tos
= ival(bc
.Int
.arg1
); return;
349 case Op::Double
: tos
= dval(bc
.Double
.dbl1
); return;
350 case Op::String
: tos
= sval(bc
.String
.str1
); return;
351 case Op::Vec
: tos
= vec_val(bc
.Vec
.arr1
); return;
352 case Op::Dict
: tos
= dict_val(bc
.Dict
.arr1
); return;
353 case Op::Keyset
: tos
= keyset_val(bc
.Keyset
.arr1
); return;
354 case Op::Array
: tos
= aval(bc
.Array
.arr1
); return;
359 if (ue
.m_returnSeen
|| tos
.subtypeOf(BBottom
)) break;
362 ue
.m_returnSeen
= true;
363 ue
.m_mainReturn
= *top
;
370 ue
.m_returnSeen
= true;
371 ue
.m_mainReturn
= make_tv
<KindOfUninit
>();
375 auto const map_local
= [&] (LocalId id
) {
376 if (id
>= func
.locals
.size()) return id
;
377 auto const loc
= func
.locals
[id
];
378 assertx(!loc
.killed
);
379 assertx(loc
.id
<= id
);
384 auto const map_local_name
= [&] (NamedLocal nl
) {
385 nl
.id
= map_local(nl
.id
);
386 if (nl
.name
== kInvalidLocalName
) return nl
;
387 if (nl
.name
>= func
.locals
.size()) return nl
;
388 auto const loc
= func
.locals
[nl
.name
];
390 nl
.name
= kInvalidLocalName
;
393 assert(!loc
.unusedName
);
394 nl
.name
= loc
.nameId
;
398 auto set_expected_depth
= [&] (BlockId block
) {
399 auto& info
= blockInfo
[block
];
401 if (info
.expectedStackDepth
) {
402 assert(*info
.expectedStackDepth
== currentStackDepth
);
404 info
.expectedStackDepth
= currentStackDepth
;
408 auto make_member_key
= [&] (MKey mkey
) {
409 switch (mkey
.mcode
) {
411 return MemberKey
{mkey
.mcode
, static_cast<int32_t>(mkey
.idx
)};
413 return MemberKey
{mkey
.mcode
, map_local_name(mkey
.local
)};
414 case MET
: case MPT
: case MQT
:
415 return MemberKey
{mkey
.mcode
, mkey
.litstr
};
417 return MemberKey
{mkey
.mcode
, mkey
.int64
};
424 auto emit_inst
= [&] (const Bytecode
& inst
) {
425 process_mergeable(inst
);
426 auto const startOffset
= ue
.bcPos();
427 lastOff
= startOffset
;
429 FTRACE(4, " emit: {} -- {} @ {}\n", currentStackDepth
, show(&func
, inst
),
430 show(srcLoc(func
, inst
.srcLoc
)));
432 if (options
.TraceBytecodes
.count(inst
.op
)) traceBc
= true;
434 auto emit_vsa
= [&] (const CompactVector
<LSString
>& keys
) {
435 auto n
= keys
.size();
437 for (size_t i
= 0; i
< n
; ++i
) {
438 ue
.emitInt32(ue
.mergeLitstr(keys
[i
]));
442 auto emit_branch
= [&] (BlockId id
) {
443 auto& info
= blockInfo
[id
];
444 if (info
.offset
!= kInvalidOffset
) {
445 ue
.emitInt32(info
.offset
- startOffset
);
447 info
.forwardJumps
.push_back({ startOffset
, ue
.bcPos() });
452 auto emit_switch
= [&] (const SwitchTab
& targets
) {
453 ue
.emitIVA(targets
.size());
454 for (auto t
: targets
) {
455 set_expected_depth(t
);
460 auto emit_sswitch
= [&] (const SSwitchTab
& targets
) {
461 ue
.emitIVA(targets
.size());
462 for (size_t i
= 0; i
< targets
.size() - 1; ++i
) {
463 set_expected_depth(targets
[i
].second
);
464 ue
.emitInt32(ue
.mergeLitstr(targets
[i
].first
));
465 emit_branch(targets
[i
].second
);
468 set_expected_depth(targets
[targets
.size() - 1].second
);
469 emit_branch(targets
[targets
.size() - 1].second
);
472 auto emit_srcloc
= [&] {
473 auto const sl
= srcLoc(func
, inst
.srcLoc
);
474 if (!sl
.isValid()) return;
475 Location::Range
loc(sl
.start
.line
, sl
.start
.col
,
476 sl
.past
.line
, sl
.past
.col
);
477 ue
.recordSourceLocation(loc
, startOffset
);
480 auto pop
= [&] (int32_t n
) {
481 currentStackDepth
-= n
;
482 assert(currentStackDepth
>= 0);
484 auto push
= [&] (int32_t n
) {
485 currentStackDepth
+= n
;
487 std::max
<uint32_t>(ret
.maxStackDepth
, currentStackDepth
);
490 auto ret_assert
= [&] { assert(currentStackDepth
== inst
.numPop()); };
492 auto clsid_impl
= [&] (uint32_t& id
, bool closure
) {
493 if (euState
.classOffsets
[id
] != kInvalidOffset
) {
494 always_assert(closure
);
495 for (auto const& elm
: euState
.pceInfo
) {
496 if (elm
.origId
== id
) {
501 always_assert(false);
503 euState
.classOffsets
[id
] = startOffset
;
504 id
= recordClass(euState
, ue
, id
);
506 auto defcls
= [&] (auto& data
) { clsid_impl(data
.arg1
, false); };
507 auto defclsnop
= [&] (auto& data
) { clsid_impl(data
.arg1
, false); };
508 auto createcl
= [&] (auto& data
) { clsid_impl(data
.arg2
, true); };
509 auto deftypealias
= [&] (auto& data
) {
510 euState
.typeAliasInfo
.push_back(data
.arg1
);
511 data
.arg1
= euState
.typeAliasInfo
.size() - 1;
513 auto defconstant
= [&] (auto& data
) {
514 euState
.constantInfo
.push_back(data
.arg1
);
515 data
.arg1
= euState
.constantInfo
.size() - 1;
518 auto emit_lar
= [&](const LocalRange
& range
) {
519 encodeLocalRange(ue
, HPHP::LocalRange
{
520 map_local(range
.first
), range
.count
524 auto emit_ita
= [&](IterArgs ita
) {
525 if (ita
.hasKey()) ita
.keyId
= map_local(ita
.keyId
);
526 ita
.valId
= map_local(ita
.valId
);
527 encodeIterArgs(ue
, ita
);
530 #define IMM_BLA(n) emit_switch(data.targets);
531 #define IMM_SLA(n) emit_sswitch(data.targets);
532 #define IMM_IVA(n) ue.emitIVA(data.arg##n);
533 #define IMM_I64A(n) ue.emitInt64(data.arg##n);
534 #define IMM_LA(n) ue.emitIVA(map_local(data.loc##n));
535 #define IMM_NLA(n) ue.emitNamedLocal(map_local_name(data.nloc##n));
536 #define IMM_ILA(n) ue.emitIVA(map_local(data.loc##n));
537 #define IMM_IA(n) ue.emitIVA(data.iter##n);
538 #define IMM_DA(n) ue.emitDouble(data.dbl##n);
539 #define IMM_SA(n) ue.emitInt32(ue.mergeLitstr(data.str##n));
540 #define IMM_RATA(n) encodeRAT(ue, data.rat);
541 #define IMM_AA(n) ue.emitInt32(ue.mergeArray(data.arr##n));
542 #define IMM_OA_IMPL(n) ue.emitByte(static_cast<uint8_t>(data.subop##n));
543 #define IMM_OA(type) IMM_OA_IMPL
544 #define IMM_BA(n) targets[numTargets++] = data.target##n; \
545 emit_branch(data.target##n);
546 #define IMM_VSA(n) emit_vsa(data.keys);
547 #define IMM_KA(n) encode_member_key(make_member_key(data.mkey), ue);
548 #define IMM_LAR(n) emit_lar(data.locrange);
549 #define IMM_ITA(n) emit_ita(data.ita);
550 #define IMM_FCA(n) encodeFCallArgs( \
551 ue, data.fca.base(), \
552 data.fca.enforceInOut(), \
555 [&] (int numBytes, const uint8_t* inOut) { \
556 encodeFCallArgsIO(ue, numBytes, inOut); \
560 data.fca.asyncEagerTarget() != NoBlockId, \
562 set_expected_depth(data.fca.asyncEagerTarget()); \
563 emit_branch(data.fca.asyncEagerTarget()); \
565 data.fca.context() != nullptr, \
567 ue.emitInt32(ue.mergeLitstr(data.fca.context()));\
569 if (!data.fca.hasUnpack()) ret.containsCalls = true;
572 #define IMM_ONE(x) IMM_##x(1)
573 #define IMM_TWO(x, y) IMM_##x(1); IMM_##y(2);
574 #define IMM_THREE(x, y, z) IMM_TWO(x, y); IMM_##z(3);
575 #define IMM_FOUR(x, y, z, n) IMM_THREE(x, y, z); IMM_##n(4);
576 #define IMM_FIVE(x, y, z, n, m) IMM_FOUR(x, y, z, n); IMM_##m(5);
577 #define IMM_SIX(x, y, z, n, m, o) IMM_FIVE(x, y, z, n, m); IMM_##o(6);
580 #define POP_ONE(x) pop(1);
581 #define POP_TWO(x, y) pop(2);
582 #define POP_THREE(x, y, z) pop(3);
584 #define POP_MFINAL pop(data.arg1);
585 #define POP_C_MFINAL(n) pop(n); pop(data.arg1);
586 #define POP_CMANY pop(data.arg##1);
587 #define POP_SMANY pop(data.keys.size());
588 #define POP_CUMANY pop(data.arg##1);
589 #define POP_CMANY_U3 pop(data.arg1 + 3);
590 #define POP_CALLNATIVE pop(data.arg1 + data.arg3);
591 #define POP_FCALL(nin, nobj) \
592 pop(nin + data.fca.numInputs() + 2 + data.fca.numRets());
595 #define PUSH_ONE(x) push(1);
596 #define PUSH_TWO(x, y) push(2);
597 #define PUSH_THREE(x, y, z) push(3);
598 #define PUSH_CMANY push(data.arg1);
599 #define PUSH_FCALL push(data.fca.numRets());
600 #define PUSH_CALLNATIVE push(data.arg3 + 1);
602 #define O(opcode, imms, inputs, outputs, flags) \
603 auto emit_##opcode = [&] (OpInfo<bc::opcode> data) { \
604 if (RuntimeOption::EnableIntrinsicsExtension) { \
605 if (Op::opcode == Op::FCallBuiltin && \
606 inst.FCallBuiltin.str4->isame( \
607 s_hhbbc_fail_verification.get())) { \
608 ue.emitOp(Op::CheckProp); \
610 ue.mergeLitstr(inst.FCallBuiltin.str4)); \
611 ue.emitOp(Op::PopC); \
614 caller<Op::DefCls>(defcls, data); \
615 caller<Op::DefClsNop>(defclsnop, data); \
616 caller<Op::CreateCl>(createcl, data); \
617 caller<Op::DefTypeAlias>(deftypealias, data); \
618 caller<Op::DefCns>(defconstant, data); \
620 if (isRet(Op::opcode)) ret_assert(); \
621 ue.emitOp(Op::opcode); \
624 size_t numTargets = 0; \
625 std::array<BlockId, kMaxHhbcImms> targets; \
627 if (Op::opcode == Op::MemoGet) { \
629 assertx(numTargets == 1); \
630 set_expected_depth(targets[0]); \
632 } else if (Op::opcode == Op::MemoGetEager) { \
634 assertx(numTargets == 2); \
635 set_expected_depth(targets[0]); \
637 set_expected_depth(targets[1]); \
641 for (size_t i = 0; i < numTargets; ++i) { \
642 set_expected_depth(targets[i]); \
646 if (flags & TF) currentStackDepth = 0; \
692 #undef POP_CALLNATIVE
703 #undef PUSH_CALLNATIVE
705 #define O(opcode, ...) \
707 if (Op::opcode != Op::Nop) emit_##opcode(inst.opcode); \
709 switch (inst
.op
) { OPCODES
}
713 ret
.blockOrder
= order_blocks(func
);
714 auto blockIt
= begin(ret
.blockOrder
);
715 auto const endBlockIt
= end(ret
.blockOrder
);
716 for (; blockIt
!= endBlockIt
; ++blockIt
) {
718 auto& info
= blockInfo
[bid
];
719 auto const b
= func
.blocks
[bid
].get();
721 info
.offset
= ue
.bcPos();
722 FTRACE(2, " block {}: {}\n", bid
, info
.offset
);
724 for (auto& fixup
: info
.forwardJumps
) {
725 ue
.emitInt32(info
.offset
- fixup
.instrOff
, fixup
.jmpImmedOff
);
728 if (!info
.expectedStackDepth
) {
729 // unreachable, or entry block
730 info
.expectedStackDepth
= b
->catchEntry
? 1 : 0;
733 currentStackDepth
= *info
.expectedStackDepth
;
735 auto fallthrough
= b
->fallthrough
;
736 auto end
= b
->hhbcs
.end();
739 if (is_single_nop(*b
)) {
740 if (blockIt
== begin(ret
.blockOrder
)) {
741 // If the first block is just a Nop, this means that there is
742 // a jump to the second block from somewhere in the
743 // function. We don't want this, so we change this nop to an
744 // EntryNop so it doesn't get optimized away
745 emit_inst(bc_with_loc(b
->hhbcs
.front().srcLoc
, bc::EntryNop
{}));
748 // If the block ends with JmpZ or JmpNZ to the next block, flip
749 // the condition to make the fallthrough the next block
750 if (b
->hhbcs
.back().op
== Op::JmpZ
||
751 b
->hhbcs
.back().op
== Op::JmpNZ
) {
752 auto const& bc
= b
->hhbcs
.back();
754 bc
.op
== Op::JmpNZ
? bc
.JmpNZ
.target1
: bc
.JmpZ
.target1
;
755 if (std::next(blockIt
) != endBlockIt
&& blockIt
[1] == target
) {
756 fallthrough
= target
;
762 for (auto iit
= b
->hhbcs
.begin(); iit
!= end
; ++iit
) emit_inst(*iit
);
764 if (end
->op
== Op::JmpNZ
) {
765 emit_inst(bc_with_loc(end
->srcLoc
, bc::JmpZ
{ b
->fallthrough
}));
767 emit_inst(bc_with_loc(end
->srcLoc
, bc::JmpNZ
{ b
->fallthrough
}));
772 info
.past
= ue
.bcPos();
774 if (fallthrough
!= NoBlockId
) {
775 set_expected_depth(fallthrough
);
776 if (std::next(blockIt
) == endBlockIt
||
777 blockIt
[1] != fallthrough
) {
778 if (b
->fallthroughNS
) {
779 emit_inst(bc::JmpNS
{ fallthrough
});
781 emit_inst(bc::Jmp
{ fallthrough
});
784 auto const parent
= commonParent(func
,
785 func
.blocks
[fallthrough
]->exnNodeId
,
788 auto depth
= [&] (ExnNodeId eid
) {
789 return eid
== NoExnNodeId
? 0 : func
.exnNodes
[eid
].depth
;
791 // If we are in an exn region we pop from the current region to the
792 // common parent. If the common parent is null, we pop all regions
793 info
.regionsToPop
= depth(b
->exnNodeId
) - depth(parent
);
794 assert(info
.regionsToPop
>= 0);
795 FTRACE(4, " popped catch regions: {}\n", info
.regionsToPop
);
799 if (b
->throwExit
!= NoBlockId
) {
800 FTRACE(4, " throw: {}\n", b
->throwExit
);
802 if (fallthrough
!= NoBlockId
) {
803 FTRACE(4, " fallthrough: {}\n", fallthrough
);
805 FTRACE(2, " block {} end: {}\n", bid
, info
.past
);
809 FTRACE(0, "TraceBytecode (emit): {}::{} in {}\n",
810 func
.cls
? func
.cls
->name
->data() : "",
811 func
.name
, func
.unit
->filename
);
817 void emit_locals_and_params(FuncEmitter
& fe
,
818 const php::Func
& func
,
819 const EmitBcInfo
& info
) {
821 for (auto const& loc
: func
.locals
) {
822 if (loc
.id
< func
.params
.size()) {
825 assert(!loc
.unusedName
);
826 auto& param
= func
.params
[id
];
827 FuncEmitter::ParamInfo pinfo
;
828 pinfo
.defaultValue
= param
.defaultValue
;
829 pinfo
.typeConstraint
= param
.typeConstraint
;
830 pinfo
.userType
= param
.userTypeConstraint
;
831 pinfo
.upperBounds
= param
.upperBounds
;
832 pinfo
.phpCode
= param
.phpCode
;
833 pinfo
.userAttributes
= param
.userAttributes
;
834 pinfo
.builtinType
= param
.builtinType
;
835 if (param
.inout
) pinfo
.setFlag(Func::ParamInfo::Flags::InOut
);
836 if (param
.isVariadic
) pinfo
.setFlag(Func::ParamInfo::Flags::Variadic
);
837 fe
.appendParam(func
.locals
[id
].name
, pinfo
);
838 auto const dv
= param
.dvEntryPoint
;
839 if (dv
!= NoBlockId
) {
840 fe
.params
[id
].funcletOff
= info
.blockInfo
[dv
].offset
;
844 if (loc
.killed
) continue;
845 if (loc
.name
&& !loc
.unusedName
&& loc
.name
) {
846 fe
.allocVarId(loc
.name
);
847 assert(fe
.lookupVarId(loc
.name
) == id
);
848 assert(loc
.id
== id
);
851 fe
.allocUnnamedLocal();
856 for (auto const& loc
: func
.locals
) {
857 if (loc
.killed
&& !loc
.unusedName
&& loc
.name
) {
858 fe
.allocVarId(loc
.name
, true);
863 for (auto const& loc
: func
.locals
) {
865 assertx(loc
.id
< fe
.numLocals());
867 if (!loc
.unusedName
&& loc
.name
) {
868 assertx(loc
.nameId
< fe
.numNamedLocals());
873 assert(fe
.numLocals() == id
);
874 fe
.setNumIterators(func
.numIters
);
878 const php::ExnNode
* node
;
884 template<class BlockInfo
, class ParentIndexMap
>
885 void emit_eh_region(FuncEmitter
& fe
,
886 const EHRegion
* region
,
887 const BlockInfo
& blockInfo
,
888 ParentIndexMap
& parentIndexMap
) {
889 FTRACE(2, " func {}: ExnNode {}\n", fe
.name
, region
->node
->idx
);
891 auto const unreachable
= [&] (const php::ExnNode
& node
) {
892 return blockInfo
[node
.region
.catchEntry
].offset
== kInvalidOffset
;
895 // A region on a single empty block.
896 if (region
->start
== region
->past
) {
897 FTRACE(2, " Skipping\n");
899 } else if (unreachable(*region
->node
)) {
900 FTRACE(2, " Unreachable\n");
904 FTRACE(2, " Process @ {}-{}\n", region
->start
, region
->past
);
906 auto& eh
= fe
.addEHEnt();
907 eh
.m_base
= region
->start
;
908 eh
.m_past
= region
->past
;
909 assert(eh
.m_past
>= eh
.m_base
);
910 assert(eh
.m_base
!= kInvalidOffset
&& eh
.m_past
!= kInvalidOffset
);
912 // An unreachable parent won't be emitted (and thus its offset won't be set),
913 // so find the closest reachable one.
914 auto parent
= region
->parent
;
915 while (parent
&& unreachable(*parent
->node
)) parent
= parent
->parent
;
917 auto parentIt
= parentIndexMap
.find(parent
);
918 assert(parentIt
!= end(parentIndexMap
));
919 eh
.m_parentIndex
= parentIt
->second
;
921 eh
.m_parentIndex
= -1;
923 parentIndexMap
[region
] = fe
.ehtab
.size() - 1;
925 auto const& cr
= region
->node
->region
;
926 eh
.m_handler
= blockInfo
[cr
.catchEntry
].offset
;
927 eh
.m_end
= kInvalidOffset
;
928 eh
.m_iterId
= cr
.iterId
;
930 assert(eh
.m_handler
!= kInvalidOffset
);
933 void exn_path(const php::Func
& func
,
934 std::vector
<const php::ExnNode
*>& ret
,
936 if (id
== NoExnNodeId
) return;
937 auto const& n
= func
.exnNodes
[id
];
938 exn_path(func
, ret
, n
.parent
);
942 // Return the count of shared elements in the front of two forward
944 template<class ForwardRange1
, class ForwardRange2
>
945 size_t shared_prefix(ForwardRange1
& r1
, ForwardRange2
& r2
) {
946 auto r1it
= begin(r1
);
947 auto r2it
= begin(r2
);
948 auto const r1end
= end(r1
);
949 auto const r2end
= end(r2
);
950 auto ret
= size_t{0};
951 while (r1it
!= r1end
&& r2it
!= r2end
&& *r1it
== *r2it
) {
952 ++ret
; ++r1it
; ++r2it
;
958 * Traverse the actual block layout, and find out the intervals for
959 * each exception region in the tree.
961 * The basic idea here is that we haven't constrained block layout
962 * based on the exception tree, but adjacent blocks are still
963 * reasonably likely to have the same ExnNode. Try to coalesce the EH
964 * regions we create for in those cases.
966 void emit_ehent_tree(FuncEmitter
& fe
, const php::Func
& func
,
967 const EmitBcInfo
& info
) {
970 std::vector
<std::unique_ptr
<EHRegion
>>
974 * While walking over the blocks in layout order, we track the set
975 * of "active" exnNodes. This are a list of exnNodes that inherit
976 * from each other. When a new active node is pushed, begin an
977 * EHEnt, and when it's popped, it's done.
979 std::vector
<const php::ExnNode
*> activeList
;
981 auto pop_active
= [&] (Offset past
) {
982 auto p
= activeList
.back();
983 activeList
.pop_back();
984 exnMap
[p
].back()->past
= past
;
987 auto push_active
= [&] (const php::ExnNode
* p
, Offset start
) {
988 auto const parent
= activeList
.empty()
990 : exnMap
[activeList
.back()].back().get();
992 std::make_unique
<EHRegion
>(
993 EHRegion
{ p
, parent
, start
, kInvalidOffset
}
996 activeList
.push_back(p
);
1000 * Walk over the blocks, and compare the new block's exnNode path to
1001 * the active one. Find the least common ancestor of the two paths,
1002 * then modify the active list by popping and then pushing nodes to
1003 * set it to the new block's path.
1005 for (auto const bid
: info
.blockOrder
) {
1006 auto const b
= func
.blocks
[bid
].get();
1007 auto const offset
= info
.blockInfo
[bid
].offset
;
1009 if (b
->exnNodeId
== NoExnNodeId
) {
1010 while (!activeList
.empty()) pop_active(offset
);
1014 std::vector
<const php::ExnNode
*> current
;
1015 exn_path(func
, current
, b
->exnNodeId
);
1017 auto const prefix
= shared_prefix(current
, activeList
);
1018 for (size_t i
= prefix
, sz
= activeList
.size(); i
< sz
; ++i
) {
1021 for (size_t i
= prefix
, sz
= current
.size(); i
< sz
; ++i
) {
1022 push_active(current
[i
], offset
);
1025 for (int i
= 0; i
< info
.blockInfo
[bid
].regionsToPop
; i
++) {
1026 // If the block ended in a jump out of the catch region, this effectively
1027 // ends all catch regions deeper than the one we are jumping to
1028 pop_active(info
.blockInfo
[bid
].past
);
1031 if (debug
&& !activeList
.empty()) {
1033 exn_path(func
, current
, activeList
.back()->idx
);
1034 assert(current
== activeList
);
1038 while (!activeList
.empty()) {
1039 pop_active(info
.blockInfo
[info
.blockOrder
.back()].past
);
1043 * We've created all our regions, but we need to sort them instead
1044 * of trying to get the UnitEmitter to do it.
1046 * The UnitEmitter expects EH regions that look a certain way
1047 * (basically the way emitter.cpp likes them). There are some rules
1048 * about the order it needs to have at runtime, which we set up
1051 * Essentially, an entry a is less than an entry b iff:
1053 * - a starts before b
1054 * - a starts at the same place, but encloses b entirely
1055 * - a has the same extents as b, but is a parent of b
1057 std::vector
<EHRegion
*> regions
;
1058 for (auto& mapEnt
: exnMap
) {
1059 for (auto& region
: mapEnt
.second
) {
1060 regions
.push_back(region
.get());
1064 begin(regions
), end(regions
),
1065 [&] (const EHRegion
* a
, const EHRegion
* b
) {
1066 if (a
== b
) return false;
1067 if (a
->start
== b
->start
) {
1068 if (a
->past
== b
->past
) {
1069 // When regions exactly overlap, the parent is less than the
1071 for (auto p
= b
->parent
; p
!= nullptr; p
= p
->parent
) {
1072 if (p
== a
) return true;
1074 // If a is not a parent of b, and they have the same region;
1075 // then b better be a parent of a.
1078 for (; p
!= b
&& p
!= nullptr; p
= p
->parent
) continue;
1083 return a
->past
> b
->past
;
1085 return a
->start
< b
->start
;
1089 hphp_fast_map
<const EHRegion
*,uint32_t> parentIndexMap
;
1090 for (auto& r
: regions
) {
1091 emit_eh_region(fe
, r
, info
.blockInfo
, parentIndexMap
);
1093 fe
.setEHTabIsSorted();
1096 void merge_repo_auth_type(UnitEmitter
& ue
, RepoAuthType rat
) {
1097 using T
= RepoAuthType::Tag
;
1099 switch (rat
.tag()) {
1111 case T::OptUncArrKey
:
1113 case T::OptUncStrLike
:
1166 case T::OptPArrLike
:
1167 case T::OptVArrLike
:
1169 // NOTE: In repo mode, RAT's in Array's might only contain global litstr
1170 // id's. No need to merge. In non-repo mode, RAT's in Array's might contain
1171 // local litstr id's.
1172 if (RuntimeOption::RepoAuthoritative
) return;
1174 if (rat
.hasArrData()) {
1175 auto arr
= rat
.array();
1176 switch (arr
->tag()) {
1177 case RepoAuthType::Array::Tag::Packed
:
1178 for (uint32_t i
= 0; i
< arr
->size(); ++i
) {
1179 merge_repo_auth_type(ue
, arr
->packedElem(i
));
1182 case RepoAuthType::Array::Tag::PackedN
:
1183 merge_repo_auth_type(ue
, arr
->elemType());
1190 case T::OptExactObj
:
1194 case T::OptExactCls
:
1197 ue
.mergeLitstr(rat
.clsName());
1200 case T::OptSubRecord
:
1201 case T::OptExactRecord
:
1203 case T::ExactRecord
:
1204 ue
.mergeLitstr(rat
.recordName());
1209 void emit_finish_func(EmitUnitState
& state
,
1210 const php::Func
& func
,
1212 const EmitBcInfo
& info
) {
1213 if (info
.containsCalls
) fe
.containsCalls
= true;;
1215 emit_locals_and_params(fe
, func
, info
);
1216 emit_ehent_tree(fe
, func
, info
);
1218 // Nothing should look at the bytecode from now on. Free it up to
1219 // compensate for the UnitEmitter we're creating.
1220 const_cast<php::Func
&>(func
).blocks
.clear();
1222 fe
.userAttributes
= func
.userAttributes
;
1223 fe
.retUserType
= func
.returnUserType
;
1224 fe
.retUpperBounds
= func
.returnUBs
;
1225 fe
.originalFilename
=
1226 func
.originalFilename
? func
.originalFilename
:
1227 func
.originalUnit
? func
.originalUnit
->filename
: nullptr;
1228 fe
.isClosureBody
= func
.isClosureBody
;
1229 fe
.isAsync
= func
.isAsync
;
1230 fe
.isGenerator
= func
.isGenerator
;
1231 fe
.isPairGenerator
= func
.isPairGenerator
;
1232 fe
.isNative
= func
.nativeInfo
!= nullptr;
1233 fe
.isMemoizeWrapper
= func
.isMemoizeWrapper
;
1234 fe
.isMemoizeWrapperLSB
= func
.isMemoizeWrapperLSB
;
1235 fe
.isRxDisabled
= func
.isRxDisabled
;
1236 fe
.hasParamsWithMultiUBs
= func
.hasParamsWithMultiUBs
;
1237 fe
.hasReturnWithMultiUBs
= func
.hasReturnWithMultiUBs
;
1239 auto const retTy
= state
.index
.lookup_return_type_raw(&func
);
1240 if (!retTy
.subtypeOf(BBottom
)) {
1241 auto const rat
= make_repo_type(*state
.index
.array_table_builder(), retTy
);
1242 merge_repo_auth_type(fe
.ue(), rat
);
1243 fe
.repoReturnType
= rat
;
1246 if (is_specialized_wait_handle(retTy
)) {
1247 auto const awaitedTy
= wait_handle_inner(retTy
);
1248 if (!awaitedTy
.subtypeOf(BBottom
)) {
1249 auto const rat
= make_repo_type(
1250 *state
.index
.array_table_builder(),
1253 merge_repo_auth_type(fe
.ue(), rat
);
1254 fe
.repoAwaitedReturnType
= rat
;
1258 if (func
.nativeInfo
) {
1259 fe
.hniReturnType
= func
.nativeInfo
->returnType
;
1261 fe
.retTypeConstraint
= func
.retTypeConstraint
;
1263 fe
.maxStackCells
= info
.maxStackDepth
+
1265 fe
.numIterators() * kNumIterCells
;
1267 fe
.finish(fe
.ue().bcPos());
1270 void renumber_locals(const php::Func
& func
) {
1274 // We initialise the name ids in two passes, since locals that have not been
1275 // remapped may require their name be at the same offset as the local.
1276 // This is true for parameters, volatile locals, or locals in funcs with
1278 for (auto& loc
: const_cast<php::Func
&>(func
).locals
) {
1280 // make sure its out of range, in case someone tries to read it.
1283 loc
.nameId
= nameId
++;
1287 for (auto& loc
: const_cast<php::Func
&>(func
).locals
) {
1288 if (loc
.unusedName
|| !loc
.name
) {
1289 // make sure its out of range, in case someone tries to read it.
1290 loc
.nameId
= INT_MAX
;
1291 } else if (loc
.killed
) {
1292 // The local was moved to share another slot, but its name is still
1294 loc
.nameId
= nameId
++;
1300 void emit_init_func(FuncEmitter
& fe
, const php::Func
& func
) {
1301 renumber_locals(func
);
1303 std::get
<0>(func
.srcInfo
.loc
),
1304 std::get
<1>(func
.srcInfo
.loc
),
1308 func
.srcInfo
.docComment
1312 void emit_func(EmitUnitState
& state
, UnitEmitter
& ue
,
1313 FuncEmitter
* fe
, const php::Func
& func
) {
1314 FTRACE(2, " func {}\n", func
.name
->data());
1315 emit_init_func(*fe
, func
);
1316 auto const info
= emit_bytecode(state
, ue
, func
);
1317 emit_finish_func(state
, func
, *fe
, info
);
1320 const StaticString
s___HasTopLevelCode("__HasTopLevelCode");
1321 void emit_pseudomain(EmitUnitState
& state
,
1323 const php::Unit
& unit
) {
1324 FTRACE(2, " pseudomain\n");
1325 auto& pm
= *unit
.pseudomain
;
1326 renumber_locals(pm
);
1327 ue
.initMain(std::get
<0>(pm
.srcInfo
.loc
),
1328 std::get
<1>(pm
.srcInfo
.loc
));
1329 auto const fe
= ue
.getMain();
1330 auto const info
= emit_bytecode(state
, ue
, pm
);
1331 if (is_systemlib_part(unit
)) {
1332 ue
.m_mergeOnly
= true;
1333 auto const tv
= make_tv
<KindOfInt64
>(1);
1334 ue
.m_mainReturn
= tv
;
1336 ue
.m_mergeOnly
= ue
.m_returnSeen
1337 && ue
.m_mainReturn
.m_type
!= KindOfUninit
1338 && ue
.m_fileAttributes
.find(s___HasTopLevelCode
.get())
1339 == ue
.m_fileAttributes
.end();
1342 emit_finish_func(state
, pm
, *fe
, info
);
1345 void emit_record(UnitEmitter
& ue
, const php::Record
& rec
) {
1346 auto const re
= ue
.newRecordEmitter(rec
.name
->toCppString());
1348 std::get
<0>(rec
.srcInfo
.loc
),
1349 std::get
<1>(rec
.srcInfo
.loc
),
1351 rec
.parentName
? rec
.parentName
: staticEmptyString(),
1352 rec
.srcInfo
.docComment
1354 re
->setUserAttributes(rec
.userAttributes
);
1355 for (auto&& f
: rec
.fields
) {
1369 void emit_class(EmitUnitState
& state
,
1371 PreClassEmitter
* pce
,
1373 const php::Class
& cls
) {
1374 FTRACE(2, " class: {}\n", cls
.name
->data());
1376 std::get
<0>(cls
.srcInfo
.loc
),
1377 std::get
<1>(cls
.srcInfo
.loc
),
1378 offset
== kInvalidOffset
? ue
.bcPos() : offset
,
1380 cls
.parentName
? cls
.parentName
: staticEmptyString(),
1381 cls
.srcInfo
.docComment
1383 pce
->setUserAttributes(cls
.userAttributes
);
1385 for (auto& x
: cls
.interfaceNames
) pce
->addInterface(x
);
1386 for (auto& x
: cls
.usedTraitNames
) pce
->addUsedTrait(x
);
1387 for (auto& x
: cls
.requirements
) pce
->addClassRequirement(x
);
1388 for (auto& x
: cls
.traitPrecRules
) pce
->addTraitPrecRule(x
);
1389 for (auto& x
: cls
.traitAliasRules
) pce
->addTraitAliasRule(x
);
1391 pce
->setIfaceVtableSlot(state
.index
.lookup_iface_vtable_slot(&cls
));
1393 bool needs86cinit
= false;
1395 auto const nativeConsts
= cls
.attrs
& AttrBuiltin
?
1396 Native::getClassConstants(cls
.name
) : nullptr;
1398 for (auto& cconst
: cls
.constants
) {
1399 if (nativeConsts
&& nativeConsts
->count(cconst
.name
)) {
1402 if (!cconst
.val
.has_value()) {
1403 pce
->addAbstractConstant(
1405 cconst
.typeConstraint
,
1409 needs86cinit
|= cconst
.val
->m_type
== KindOfUninit
;
1413 cconst
.typeConstraint
,
1414 &cconst
.val
.value(),
1421 for (auto& m
: cls
.methods
) {
1422 if (!needs86cinit
&& m
->name
== s_86cinit
.get()) continue;
1423 FTRACE(2, " method: {}\n", m
->name
->data());
1424 auto const fe
= ue
.newMethodEmitter(m
->name
, pce
);
1425 emit_init_func(*fe
, *m
);
1427 auto const info
= emit_bytecode(state
, ue
, *m
);
1428 emit_finish_func(state
, *m
, *fe
, info
);
1431 CompactVector
<Type
> useVars
;
1432 if (is_closure(cls
)) {
1433 auto f
= find_method(&cls
, s_invoke
.get());
1434 useVars
= state
.index
.lookup_closure_use_vars(f
, true);
1436 auto uvIt
= useVars
.begin();
1438 auto const privateProps
= state
.index
.lookup_private_props(&cls
, true);
1439 auto const privateStatics
= state
.index
.lookup_private_statics(&cls
, true);
1440 for (auto& prop
: cls
.properties
) {
1441 auto const makeRat
= [&] (const Type
& ty
) -> RepoAuthType
{
1442 if (!ty
.subtypeOf(BCell
)) return RepoAuthType
{};
1443 if (ty
.subtypeOf(BBottom
)) {
1444 // A property can be TBottom if no sets (nor its initial value) is
1445 // compatible with its type-constraint, or if its LateInit and there's
1446 // no sets to it. The repo auth type here doesn't particularly matter,
1447 // since such a prop will be inaccessible.
1448 return RepoAuthType
{};
1451 auto const rat
= make_repo_type(*state
.index
.array_table_builder(), ty
);
1452 merge_repo_auth_type(ue
, rat
);
1456 auto const privPropTy
= [&] (const PropState
& ps
) -> Type
{
1457 if (is_closure(cls
)) {
1458 // For closures use variables will be the first properties of the
1459 // closure object, in declaration order
1460 if (uvIt
!= useVars
.end()) return *uvIt
++;
1464 auto it
= ps
.find(prop
.name
);
1465 if (it
== end(ps
)) return Type
{};
1466 return it
->second
.ty
;
1470 auto const attrs
= prop
.attrs
;
1471 if (attrs
& AttrPrivate
) {
1472 propTy
= privPropTy((attrs
& AttrStatic
) ? privateStatics
: privateProps
);
1473 } else if ((attrs
& AttrPublic
) && (attrs
& AttrStatic
)) {
1474 propTy
= state
.index
.lookup_public_static(Context
{}, &cls
, prop
.name
);
1481 prop
.typeConstraint
,
1488 assert(uvIt
== useVars
.end());
1490 pce
->setEnumBaseTy(cls
.enumBaseTy
);
1493 void emit_typealias(UnitEmitter
& ue
, const php::TypeAlias
& alias
,
1494 const EmitUnitState
& state
) {
1495 auto const id
= ue
.addTypeAlias(alias
);
1496 if (state
.processedTypeAlias
.find(id
) == state
.processedTypeAlias
.end()) {
1497 ue
.pushMergeableId(Unit::MergeKind::TypeAlias
, id
);
1501 void emit_constant(UnitEmitter
& ue
, const Constant
& constant
,
1502 const EmitUnitState
& state
) {
1503 auto const id
= ue
.addConstant(constant
);
1504 if (state
.processedConstant
.find(id
) == state
.processedConstant
.end()) {
1505 ue
.pushMergeableId(Unit::MergeKind::Define
, id
);
1509 //////////////////////////////////////////////////////////////////////
1513 std::unique_ptr
<UnitEmitter
> emit_unit(const Index
& index
,
1514 const php::Unit
& unit
) {
1516 Trace::hhbbc_emit
, kSystemLibBump
, is_systemlib_part(unit
)
1519 assert(check(unit
));
1521 auto ue
= std::make_unique
<UnitEmitter
>(unit
.sha1
,
1523 Native::s_noNativeFuncs
,
1525 FTRACE(1, " unit {}\n", unit
.filename
->data());
1527 ue
->m_filepath
= unit
.filename
;
1528 ue
->m_isHHFile
= unit
.isHHFile
;
1529 ue
->m_metaData
= unit
.metaData
;
1530 ue
->m_fileAttributes
= unit
.fileAttributes
;
1532 EmitUnitState state
{ index
, &unit
};
1533 state
.classOffsets
.resize(unit
.classes
.size(), kInvalidOffset
);
1535 emit_pseudomain(state
, *ue
, unit
);
1537 // Go thought all constant and see if they still need their matching 86cinit
1538 // func. In repo mode we are able to optimize away most of them away. And if
1539 // the const don't need them anymore we should not emit them.
1544 > const_86cinit_funcs
;
1545 for (auto cid
: state
.constantInfo
) {
1546 auto& c
= unit
.constants
[cid
];
1547 if (type(c
->val
) != KindOfUninit
) {
1548 const_86cinit_funcs
.insert(Constant::funcNameFromName(c
->name
));
1553 * Top level funcs are always defined when the unit is loaded, and
1554 * don't have a DefFunc bytecode. Process them up front.
1556 std::vector
<std::unique_ptr
<FuncEmitter
> > top_fes
;
1557 for (size_t id
= 0; id
< unit
.funcs
.size(); ++id
) {
1558 auto const f
= unit
.funcs
[id
].get();
1559 assertx(f
!= unit
.pseudomain
.get());
1560 if (!f
->top
) continue;
1561 if (const_86cinit_funcs
.find(f
->name
) != const_86cinit_funcs
.end()) {
1564 top_fes
.push_back(std::make_unique
<FuncEmitter
>(*ue
, -1, -1, f
->name
));
1565 emit_func(state
, *ue
, top_fes
.back().get(), *f
);
1569 * Find any top-level classes that need to be included due to
1570 * hoistability, even though the corresponding DefCls was not
1573 for (size_t id
= 0; id
< unit
.classes
.size(); ++id
) {
1574 if (state
.classOffsets
[id
] != kInvalidOffset
) continue;
1575 auto const c
= unit
.classes
[id
].get();
1576 if (c
->hoistability
!= PreClass::MaybeHoistable
&&
1577 c
->hoistability
!= PreClass::AlwaysHoistable
) {
1580 // Closures are AlwaysHoistable; but there's no need to include
1581 // them unless there's a reachable CreateCl.
1582 if (is_closure(*c
)) continue;
1583 recordClass(state
, *ue
, id
);
1586 size_t pceId
= 0, feId
= 0;
1588 // Note that state.pceInfo can grow inside the loop
1589 while (pceId
< state
.pceInfo
.size()) {
1590 auto const& pceInfo
= state
.pceInfo
[pceId
++];
1591 auto const& c
= unit
.classes
[pceInfo
.origId
];
1592 emit_class(state
, *ue
, pceInfo
.pce
,
1593 state
.classOffsets
[pceInfo
.origId
], *c
);
1596 while (feId
< state
.feInfo
.size()) {
1597 auto const& feInfo
= state
.feInfo
[feId
++];
1598 // DefFunc ids are off by one wrt unit.funcs because we don't
1599 // store the pseudomain there.
1600 auto const& f
= unit
.funcs
[feInfo
.origId
- 1];
1603 emit_func(state
, *ue
, feInfo
.fe
, *f
);
1605 } while (pceId
< state
.pceInfo
.size());
1607 for (auto tid
: state
.typeAliasInfo
) {
1608 emit_typealias(*ue
, *unit
.typeAliases
[tid
], state
);
1611 for (auto cid
: state
.constantInfo
) {
1612 emit_constant(*ue
, *unit
.constants
[cid
], state
);
1615 for (size_t id
= 0; id
< unit
.records
.size(); ++id
) {
1616 emit_record(*ue
, *unit
.records
[id
]);
1619 // Top level funcs need to go after any non-top level funcs. See
1620 // Unit::merge for details.
1621 for (auto& fe
: top_fes
) ue
->appendTopEmitter(std::move(fe
));
1626 //////////////////////////////////////////////////////////////////////