2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/containers.h"
18 #include "hphp/runtime/vm/jit/ir-opcode.h"
19 #include "hphp/runtime/vm/jit/trans-rec.h"
20 #include "hphp/runtime/vm/jit/vasm.h"
21 #include "hphp/runtime/vm/jit/vasm-instr.h"
22 #include "hphp/runtime/vm/jit/vasm-text.h"
23 #include "hphp/runtime/vm/jit/vasm-unit.h"
25 #include "hphp/util/data-block.h"
27 #include <folly/Random.h>
35 ///////////////////////////////////////////////////////////////////////////////
37 namespace vasm_detail
{
39 ///////////////////////////////////////////////////////////////////////////////
42 * Nouned verb class used to hide mostly-debug metadata updates from the main
43 * body of vasm_emit().
45 * This is invoked on every Vinstr encountered in order to accumulate mappings
46 * from higher-level representations.
48 struct IRMetadataUpdater
{
49 IRMetadataUpdater(const Venv
& env
, AsmInfo
* asm_info
);
52 * Update IR mappings for a Vinstr.
54 void register_inst(const Vinstr
& inst
);
57 * Update IR mappings at the end of a block.
59 void register_block_end();
62 * Update AsmInfo after the Vunit has been fully emitted.
64 void finish(const jit::vector
<Vlabel
>& labels
);
68 const IRInstruction
* origin
;
73 * Get HHIR mapping info for the current block in `m_env'.
75 jit::vector
<Snippet
>& block_info();
80 const IRInstruction
* m_origin
{nullptr};
81 jit::vector
<jit::vector
<jit::vector
<Snippet
>>> m_area_to_blockinfos
;
82 std::vector
<TransBCMapping
>* m_bcmap
{nullptr};
85 ///////////////////////////////////////////////////////////////////////////////
88 * Is `block' an empty catch block?
90 bool is_empty_catch(const Vblock
& block
);
93 * Register catch blocks for fixups.
95 void register_catch_block(const Venv
& env
, const Venv::LabelPatch
& p
);
98 * Arch-independent emitters.
100 * Return true if the instruction was supported.
102 template <class Inst
>
103 bool emit(Venv
& /*env*/, const Inst
&) {
106 bool emit(Venv
& env
, const callphpfe
& i
);
107 bool emit(Venv
& env
, const callphps
& i
);
108 bool emit(Venv
& env
, const bindjmp
& i
);
109 bool emit(Venv
& env
, const bindjcc
& i
);
110 bool emit(Venv
& env
, const bindaddr
& i
);
111 bool emit(Venv
& env
, const ldbindaddr
& i
);
112 bool emit(Venv
& env
, const fallback
& i
);
113 bool emit(Venv
& env
, const fallbackcc
& i
);
114 bool emit(Venv
& env
, const movqs
& i
);
115 bool emit(Venv
& env
, const jmps
& i
);
117 inline bool emit(Venv
& env
, const pushframe
&) {
118 if (env
.frame
== -1) return true; // unreachable block
119 assertx(env
.pending_frames
> 0);
121 --env
.pending_frames
;
125 inline bool emit(Venv
& env
, const recordbasenativesp
& i
) {
129 inline bool emit(Venv
& env
, const unrecordbasenativesp
& i
) {
133 inline bool emit(Venv
& env
, const recordstack
& i
) {
134 env
.record_inline_stack(i
.fakeAddress
);
138 inline void record_frame(Venv
& env
) {
139 auto const& block
= env
.unit
.blocks
[env
.current
];
140 auto const frame
= env
.frame
;
141 auto const start
= env
.framestart
;
142 auto& frames
= env
.unit
.frames
;
143 auto const size
= env
.cb
->frontier() - start
;
144 // It's possible that (a) this block is empty, and (b) cb is full, so the
145 // frontier from the start of the block is actually the first byte after the
146 // block. This is particularly likely when RetranslateAll is in use as
147 // ephemeral code blocks are resizable.
148 always_assert(!size
|| env
.cb
->contains(start
));
149 always_assert((int64_t)size
>= 0);
150 auto const area
= static_cast<uint8_t>(block
.area_idx
);
151 frames
[frame
].sections
[area
].exclusive
+= size
;
152 for (auto f
= block
.frame
; f
!= Vframe::Top
; f
= frames
[f
].parent
) {
153 frames
[f
].sections
[area
].inclusive
+= size
;
155 env
.framestart
= env
.cb
->frontier();
158 inline bool emit(Venv
& env
, const inlinestart
& i
) {
159 if (env
.frame
== -1) return true; // unreachable block
161 ++env
.pending_frames
;
162 always_assert(0 <= i
.id
&& i
.id
< env
.unit
.frames
.size());
167 inline bool emit(Venv
& env
, const inlineend
&) {
168 if (env
.frame
== -1) return true; // unreachable block
169 assertx(env
.pending_frames
> 0);
171 --env
.pending_frames
;
173 env
.frame
= env
.unit
.frames
[env
.frame
].parent
;
174 always_assert(0 <= env
.frame
&& env
.frame
< env
.unit
.frames
.size());
178 template<class Vemit
>
179 void check_nop_interval(Venv
& env
, const Vinstr
& inst
,
180 uint32_t& nop_counter
, uint32_t nop_interval
) {
181 if (LIKELY(nop_interval
== 0)) return;
183 if (nop_counter
== 0) {
184 // Initialize start position randomly.
185 nop_counter
= folly::Random::rand32(nop_interval
) + 1;
189 // These instructions are for exception handling or state syncing and do
190 // not represent any actual work, so they're excluded from the nop counter.
191 case Vinstr::landingpad
:
192 case Vinstr::nothrow
:
193 case Vinstr::syncpoint
:
198 if (--nop_counter
== 0) {
199 // We use a special emit_nop() function rather than emit(nop{}) because
200 // many performance counters exclude nops from their count of retired
201 // instructions. It's up the to arch-specific backends to emit some
202 // real work with no visible side-effects.
203 Vemit(env
).emit_nop();
204 nop_counter
= nop_interval
;
210 template<class Vemit
>
211 void emitLdBindRetAddrStubs(Venv
& env
) {
212 jit::fast_map
<SrcKey
, CodeAddress
, SrcKey::Hasher
> stubs
;
213 env
.cb
= &env
.text
.areas().back().code
;
215 for (auto const& ldbindretaddr
: env
.ldbindretaddrs
) {
216 CodeAddress stub
= [&] {
217 auto const i
= stubs
.find(ldbindretaddr
.target
);
218 if (i
!= stubs
.end()) return i
->second
;
220 auto const start
= env
.cb
->frontier();
221 stubs
.insert({ldbindretaddr
.target
, start
});
223 // Store return value to the stack.
224 Vemit(env
).emit(store
{rret_data(), rvmsp()[TVOFF(m_data
)]});
225 Vemit(env
).emit(store
{rret_type(), rvmsp()[TVOFF(m_type
)]});
227 // Bind jump to the translation.
229 ldbindretaddr
.target
,
231 cross_trace_regs_resumed()
237 auto const addr
= env
.unit
.makeAddr();
238 assertx(env
.vaddrs
.size() == addr
);
239 env
.vaddrs
.push_back(stub
);
240 env
.leas
.push_back({ldbindretaddr
.instr
, addr
});
244 void computeFrames(Vunit
& unit
);
246 ///////////////////////////////////////////////////////////////////////////////
250 ///////////////////////////////////////////////////////////////////////////////
252 inline Venv::Venv(Vunit
& unit
, Vtext
& text
, CGMeta
& meta
)
257 vaddrs
.resize(unit
.next_vaddr
);
260 inline void Venv::record_inline_stack(TCA addr
) {
261 // Do not record stack if we are not inlining or the code is unreachable.
262 if (frame
<= 0) return;
264 // Do not record stack if all frames are already published.
265 if (pending_frames
== 0) return;
267 assertx(pending_frames
> 0);
268 auto pubFrame
= frame
;
269 for (auto i
= pending_frames
; i
> 0; --i
) {
270 assertx(pubFrame
!= Vframe::Root
);
271 pubFrame
= unit
.frames
[pubFrame
].parent
;
274 pubFrame
= pubFrame
!= Vframe::Root
275 ? pubFrame
- 1 : kRootIFrameID
;
277 auto const sk
= origin
->marker().sk();
278 auto const callOff
= sk
.funcEntry() ? sk
.entryOffset() : sk
.offset();
280 assertx(frame
!= pubFrame
);
281 assertx(origin
->marker().fp()->inst()->is(BeginInlining
));
283 addr
, IStack
{frame
- 1, pubFrame
, callOff
});
286 template<class Vemit
>
287 void vasm_emit(Vunit
& unit
, Vtext
& text
, CGMeta
& fixups
,
289 using namespace vasm_detail
;
291 // Lower inlinestart and inlineend instructions to jmps, and annotate blocks
292 // with inlined function parents
293 if (unit
.needsFramesComputed()) computeFrames(unit
);
295 Venv env
{ unit
, text
, fixups
};
296 env
.addrs
.resize(unit
.blocks
.size());
298 auto labels
= layoutBlocks(unit
);
300 IRMetadataUpdater
irmu(env
, asm_info
);
302 auto const area_start
= [&] (Vlabel b
) {
303 auto area
= unit
.blocks
[b
].area_idx
;
304 return text
.area(area
).start
;
307 // We don't want to put nops in Vunits representing stubs, and those Vunits
308 // don't have a context set.
309 auto const nop_interval
=
310 unit
.context
? RuntimeOption::EvalJitNopInterval
: uint32_t{0};
311 auto nop_counter
= uint32_t{0};
313 for (int i
= 0, n
= labels
.size(); i
< n
; ++i
) {
314 assertx(checkBlockEnd(unit
, labels
[i
]));
317 auto& block
= unit
.blocks
[b
];
319 env
.cb
= &text
.area(block
.area_idx
).code
;
320 env
.addrs
[b
] = env
.cb
->frontier();
321 env
.framestart
= env
.cb
->frontier();
322 env
.frame
= block
.frame
;
323 env
.pending_frames
= std::max
<int32_t>(block
.pending_frames
, 0);
325 { // Compute the next block we will emit into the current area.
326 auto const cur_start
= area_start(labels
[i
]);
328 while (j
< labels
.size() &&
329 cur_start
!= area_start(labels
[j
])) {
332 env
.next
= j
< labels
.size() ? labels
[j
] : Vlabel(unit
.blocks
.size());
336 // We'll replace exception edges to empty catch blocks with the catch
337 // helper unique stub.
338 if (is_empty_catch(block
)) continue;
340 for (auto& inst
: block
.code
) {
341 irmu
.register_inst(inst
);
342 env
.origin
= inst
.origin
;
344 check_nop_interval
<Vemit
>(env
, inst
, nop_counter
, nop_interval
);
347 #define O(name, imms, uses, defs) \
349 if (emit(env, inst.name##_)) break; \
350 Vemit(env).emit(inst.name##_); \
357 if (block
.frame
!= -1) record_frame(env
);
358 irmu
.register_block_end();
361 emitLdBindRetAddrStubs
<Vemit
>(env
);
363 Vemit::emitVeneers(env
);
365 Vemit::handleLiterals(env
);
367 // Bind any Vaddrs that correspond to Vlabels.
368 for (auto const& p
: env
.pending_vaddrs
) {
369 assertx(env
.addrs
[p
.target
]);
370 env
.vaddrs
[p
.vaddr
] = env
.addrs
[p
.target
];
373 // Retarget smashable binds.
374 Vemit::retargetBinds(env
);
376 // Patch up jump targets and friends.
379 // Register catch blocks.
380 for (auto& p
: env
.catches
) register_catch_block(env
, p
);
382 // Register inline frames.
383 for (auto& f
: unit
.frames
) {
384 if (f
.parent
== Vframe::Top
) continue; // skip the top frame
385 auto const parent
= f
.parent
!= Vframe::Root
386 ? f
.parent
- 1 : kRootIFrameID
;
387 fixups
.inlineFrames
.emplace_back(
388 IFrame
{f
.func
, f
.callOff
, f
.sbToRootSbOff
.offset
, parent
});
391 // Register inline stacks.
392 fixups
.inlineStacks
= std::move(env
.stacks
);
394 if (unit
.padding
) Vemit::pad(text
.main().code
);
399 ///////////////////////////////////////////////////////////////////////////////