2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_VM_VERIFIER_CFG_H_
18 #define incl_HPHP_VM_VERIFIER_CFG_H_
20 #include "hphp/runtime/vm/hhbc-codec.h"
21 #include "hphp/runtime/vm/repo.h"
22 #include "hphp/runtime/vm/verifier/util.h"
23 #include "hphp/util/arena.h"
24 #include <boost/dynamic_bitset.hpp>
30 * Block is a standard basic block. The # of ordinary successors
31 * is determined by the last instruction and the # of exception
32 * edges is determined by the Graph that owns this Block.
34 * id and rpo_id are the block id and reverse-postorder-number,
35 * respectively. Both start from 0. Pass-specific data about
36 * blocks should be stored elsewhere and indexed by one of these ids,
37 * or in a Map<Block*> if you're a sadist.
40 explicit Block(PC start
) :
41 start(start
), last(0), end(0) , id(-1), rpo_id(-1), next_linear(0),
42 next_rpo(0), succs(0), exn(0) {
46 explicit Block(const Block
&) = delete;
47 Block
& operator=(const Block
&) = delete;
49 static bool reachable(Block
* from
, Block
* to
,
50 boost::dynamic_bitset
<>& visited
);
52 PC start
; // first instruction
53 PC last
; // last instruction (inclusive)
54 PC end
; // points past last instruction (exclusive)
55 int id
; // plain block id (creation order)
56 int rpo_id
; // reverse postorder number. 0 = entry.
57 Block
* next_linear
; // next block in linear order
58 Block
* next_rpo
; // next block in reverse postorder
59 Block
** succs
; // array of succesors (can have nulls)
60 Block
* exn
; // exception edge (can be null)
64 * Graph is a control-flow-graph container of Blocks. The idea is
65 * to Arena-allocate these (and Blocks) and only store pass-agnostic
66 * stuff about the function.
68 * Iteration over blocks can be done directly if necessary but when
69 * you can, use a predefined Range, like LinearBlocks or RpoBlocks.
72 Graph() : first_linear(0), first_rpo(0), entries(0), block_count(0) {
75 explicit Graph(const Graph
&) = delete;
76 Graph
& operator=(const Graph
&) = delete;
78 Block
* first_linear
; // blocks in linear order
79 Block
* first_rpo
; // blocks in reverse postorder
80 Block
** entries
; // entry points indexed by arg count [0:param_count]
85 inline bool isTF(PC pc
) {
86 return (instrFlags(peek_op(pc
)) & TF
) != 0;
89 inline bool isCF(PC pc
) {
90 return instrIsNonCallControlFlow(peek_op(pc
));
93 inline bool isRet(PC pc
) {
94 auto const op
= peek_op(pc
);
95 return op
== Op::RetC
|| op
== Op::RetCSuspended
|| op
== Op::RetM
;
98 // Return true if pc points to an Iter instruction whose first immedate
99 // argument is an iterator id.
100 inline bool isIter(PC pc
) {
101 // IterBreak is not included, because it has a variable-length list of
102 // iterartor ids, rather than a single iterator id.
103 switch (peek_op(pc
)) {
121 inline int getImmIva(PC pc
) {
122 return getImm(pc
, 0).u_IVA
;
125 inline int numSuccBlocks(const Block
* b
) {
126 return numSuccs(b
->last
);
129 #define APPLY(d, l, r) \
130 if (auto left = d.left()) return left->l; \
131 if (auto right = d.right()) return right->r; \
135 /* implicit */ SomeUnit(const Unit
* u
) : m_unit(u
) {}
136 /* implicit */ SomeUnit(const UnitEmitter
* u
) : m_unit(u
) {}
138 SomeUnit
& operator=(const Unit
* u
) { m_unit
= u
; return *this; }
139 SomeUnit
& operator=(const UnitEmitter
* u
) { m_unit
= u
; return *this; }
141 PC
entry() const { APPLY(m_unit
, entry(), bc()); }
142 PC
at(Offset o
) const { return entry() + o
; }
143 Offset
offsetOf(PC addr
) const { return static_cast<Offset
>(addr
- entry()); }
146 Either
<const Unit
*, const UnitEmitter
*> m_unit
;
150 /* implicit */ SomeFunc(const Func
* f
) : m_func(f
) {}
151 /* implicit */ SomeFunc(const FuncEmitter
* f
) : m_func(f
) {}
153 SomeFunc
& operator=(const Func
* f
) { m_func
= f
; return *this; }
154 SomeFunc
& operator=(const FuncEmitter
* f
) { m_func
= f
; return *this; }
156 SomeUnit
unit() const {
158 [&] (const Func
* f
) -> SomeUnit
{ return f
->unit(); },
159 [&] (const FuncEmitter
* f
) -> SomeUnit
{ return &f
->ue(); }
162 Offset
base() const { APPLY(m_func
, base(), base
); }
163 Offset
past() const { APPLY(m_func
, past(), past
); }
165 size_t numParams() const { APPLY(m_func
, params().size(), params
.size()); }
166 const Func::ParamInfo
& param(size_t idx
) const {
167 APPLY(m_func
, params()[idx
], params
[idx
]);
170 size_t numEHEnts() const { APPLY(m_func
, ehtab().size(), ehtab
.size()); }
171 const EHEnt
& ehent(size_t i
) const { APPLY(m_func
, ehtab()[i
], ehtab
[i
]); }
173 const EHEnt
* findEH(Offset off
) const {
175 [&] (const Func
* f
) { return Func::findEH(f
->ehtab(), off
); },
176 [&] (const FuncEmitter
* f
) { return Func::findEH(f
->ehtab
, off
); }
181 Either
<const Func
*, const FuncEmitter
*> m_func
;
187 * A GraphBuilder holds the temporary state required for building
188 * a graph and is typically not needed once a Graph is built.
190 struct GraphBuilder
{
192 typedef hphp_hash_map
<PC
, Block
*> BlockMap
;
193 enum EdgeKind
{ FallThrough
, Taken
};
196 GraphBuilder(Arena
& arena
, const F
* func
)
197 : m_arena(arena
), m_func(func
),
198 m_unit(m_func
.unit()), m_graph(0) {
201 Block
* at(Offset off
) const { return at(m_unit
.at(off
)); }
204 void createExBlocks();
207 Block
* createBlock(PC pc
);
208 Block
* createBlock(Offset off
) { return createBlock(m_unit
.at(off
)); }
209 Block
* at(PC addr
) const;
210 Offset
offset(PC addr
) const {
211 return m_unit
.offsetOf(addr
);
213 Block
** succs(Block
* b
);
217 const SomeFunc m_func
;
218 const SomeUnit m_unit
;
223 * Range over blocks in linear order
225 struct LinearBlocks
{
226 LinearBlocks(Block
* first
, Block
* end
) : b(first
), end(end
) {
228 bool empty() const { return b
== end
; }
229 Block
* front() const { assertx(!empty()); return b
; }
230 Block
* popFront() { Block
* f
= front(); b
= b
->next_linear
; return f
; }
232 Block
*b
; // The current block.
233 Block
*end
; // End marker; empty when b==end.
237 * Range over the non-null Block* pointers in array.
239 struct BlockPtrRange
{
240 BlockPtrRange(Block
** a
, int cap
) : i(a
), end(&a
[cap
]) {
244 bool empty() const { return i
>= end
; }
245 Block
* front() const { assertx(!empty()); return i
[0]; }
246 Block
* back() const { assertx(!empty()); return end
[-1]; }
258 void trimFront() { while (!empty() && !i
[0]) ++i
; }
259 void trimBack() { while (!empty() && !end
[-1]) --end
; }
261 Block
**i
; // current
262 Block
**end
; // points past end (exclusive)
266 * Iterate through a contiguous range of instructions
269 InstrRange(PC pc
, PC end
) : pc(pc
), end(end
) {
288 * Sort blocks in reverse postorder and reset each blocks's next_rpo and
291 void sortRpo(Graph
* g
);
293 inline InstrRange
funcInstrs(SomeFunc func
) {
294 return InstrRange(func
.unit().at(func
.base()),
295 func
.unit().at(func
.past()));
298 inline InstrRange
blockInstrs(const Block
* b
) {
299 return InstrRange(b
->start
, b
->end
);
302 inline BlockPtrRange
succBlocks(const Block
* b
) {
303 return BlockPtrRange(b
->succs
, numSuccBlocks(b
));
306 inline BlockPtrRange
entryBlocks(const Graph
* g
) {
307 return BlockPtrRange(g
->entries
, g
->param_count
+ 1);
310 inline LinearBlocks
linearBlocks(const Graph
* g
) {
311 return LinearBlocks(g
->first_linear
, 0);
314 // A callsite pushes 0 or more arguments on the stack, followed by a FPush* and
315 // FCall opcodes. The FPI Region protects the range of instructions that execute
316 // with the partial activation on the stack, which is the instruction after
317 // FPush* up to and including FCall. FPush* is not in the protected region. In
318 // other words, only FCall is part of the FPI region. This mechanism is being
319 // actively dismantled.
321 inline Offset
fpiBase(const FPIEnt
& fpi
, PC bc
) {
322 PC fpush
= bc
+ fpi
.m_fpushOff
;
323 return fpush
+ instrLen(fpush
) - bc
;
326 inline Offset
fpiPast(const FPIEnt
& fpi
, PC bc
) {
327 PC endFpiOp
= bc
+ fpi
.m_fpiEndOff
;
328 return endFpiOp
+ instrLen(endFpiOp
) - bc
;
333 #endif // incl_HPHP_VM_VERIFIER_CFG_H_