2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/service-requests.h"
19 #include "hphp/runtime/vm/jit/types.h"
20 #include "hphp/runtime/vm/jit/abi.h"
21 #include "hphp/runtime/vm/jit/align.h"
22 #include "hphp/runtime/vm/jit/stack-offsets.h"
23 #include "hphp/runtime/vm/jit/stub-alloc.h"
24 #include "hphp/runtime/vm/jit/tc.h"
25 #include "hphp/runtime/vm/jit/translator-inline.h"
26 #include "hphp/runtime/vm/jit/unique-stubs.h"
27 #include "hphp/runtime/vm/jit/vasm-gen.h"
28 #include "hphp/runtime/vm/jit/vasm-instr.h"
29 #include "hphp/runtime/vm/jit/vasm-unit.h"
30 #include "hphp/runtime/vm/resumable.h"
32 #include "hphp/util/arch.h"
33 #include "hphp/util/data-block.h"
34 #include "hphp/util/trace.h"
36 #include "hphp/vixl/a64/macro-assembler-a64.h"
37 #include "hphp/vixl/a64/disasm-a64.h"
39 #include "hphp/ppc64-asm/decoded-instr-ppc64.h"
41 #include <folly/Optional.h>
43 namespace HPHP
{ namespace jit
{ namespace svcreq
{
45 ///////////////////////////////////////////////////////////////////////////////
47 TRACE_SET_MOD(servicereq
);
49 ///////////////////////////////////////////////////////////////////////////////
53 ///////////////////////////////////////////////////////////////////////////////
56 * Service request stub emitter.
58 * Emit a service request stub of type `sr' at `start' in `cb'.
60 void emit_svcreq(CodeBlock
& cb
,
65 folly::Optional
<FPInvOffset
> spOff
,
68 FTRACE(2, "svcreq @{} {}(", start
, to_name(sr
));
70 auto const is_reused
= start
!= cb
.frontier();
72 if (!is_reused
) cb
.assertCanEmit(stub_size());
75 auto const realAddr
= is_reused
? start
: cb
.toDestAddress(start
);
76 stub
.init(start
, realAddr
, stub_size(), "svcreq_stub");
79 Vauto vasm
{stub
, stub
, data
, meta
};
80 auto& v
= vasm
.main();
82 // If we have an spOff, materialize rvmsp() so that handleSRHelper() can do
83 // a VM reg sync. (When we don't have an spOff, the caller of the service
84 // request was responsible for making sure rvmsp already contained the top
87 v
<< lea
{rvmfp()[-cellsToBytes(spOff
->offset
)], rvmsp()};
90 auto live_out
= leave_trace_regs();
92 assertx(argv
.size() <= kMaxArgs
);
94 // Pick up CondCode arguments first---vasm may optimize immediate loads
95 // into operations which clobber status flags.
96 for (auto i
= 0; i
< argv
.size(); ++i
) {
97 auto const& arg
= argv
[i
];
98 if (arg
.kind
!= Arg::Kind::CondCode
) continue;
100 FTRACE(2, "c({}), ", cc_names
[arg
.cc
]);
101 v
<< setcc
{arg
.cc
, r_svcreq_sf(), rbyte(r_svcreq_arg(i
))};
104 for (auto i
= 0; i
< argv
.size(); ++i
) {
105 auto const& arg
= argv
[i
];
106 auto const r
= r_svcreq_arg(i
);
109 case Arg::Kind::Immed
:
110 FTRACE(2, "{}, ", arg
.imm
);
111 v
<< copy
{v
.cns(arg
.imm
), r
};
113 case Arg::Kind::Address
:
114 FTRACE(2, "{}(%rip), ", arg
.imm
);
115 v
<< leap
{reg::rip
[arg
.imm
], r
};
117 case Arg::Kind::CondCode
:
122 FTRACE(2, ") : stub@");
126 v
<< copy
{v
.cns(0), r_svcreq_stub()};
128 FTRACE(2, "{}", stub
.base());
129 v
<< leap
{reg::rip
[int64_t(stub
.base())], r_svcreq_stub()};
131 v
<< copy
{v
.cns(sr
), r_svcreq_req()};
133 live_out
|= r_svcreq_stub();
134 live_out
|= r_svcreq_req();
136 v
<< jmpi
{tc::ustubs().handleSRHelper
, live_out
};
138 // We pad ephemeral stubs unconditionally. This is required for
139 // correctness by the x64 code relocator.
140 vasm
.unit().padding
= !persist
;
143 if (!is_reused
) cb
.skip(stub
.used());
146 ///////////////////////////////////////////////////////////////////////////////
150 ///////////////////////////////////////////////////////////////////////////////
152 TCA
emit_bindjmp_stub(CodeBlock
& cb
, DataBlock
& data
, CGMeta
& fixups
,
154 TCA jmp
, SrcKey target
, TransFlags trflags
) {
155 return emit_ephemeral(
159 allocTCStub(cb
, &fixups
),
160 target
.resumeMode() != ResumeMode::None
161 ? folly::none
: folly::make_optional(spOff
),
164 target
.toAtomicInt(),
169 TCA
emit_bindaddr_stub(CodeBlock
& cb
, DataBlock
& data
, CGMeta
& fixups
,
171 TCA
* addr
, SrcKey target
, TransFlags trflags
) {
172 // Right now it's possible that addr isn't PIC addressable, as it may be into
173 // the heap (SSwitchMap binds addresses directly into its heap memory,
174 // see #10347945). Passing a TCA generates an RIP relative address which can
175 // be handled by the relocation logic, while a TCA* will generate an immediate
176 // address which will not be remapped.
177 if (deltaFits((TCA
)addr
- cb
.frontier(), sz::dword
)) {
178 return emit_ephemeral(
182 allocTCStub(cb
, &fixups
),
183 target
.resumeMode() != ResumeMode::None
184 ? folly::none
: folly::make_optional(spOff
),
186 (TCA
)addr
, // needs to be RIP relative so that we can relocate it
187 target
.toAtomicInt(),
192 return emit_ephemeral(
196 allocTCStub(cb
, &fixups
),
197 target
.resumeMode() != ResumeMode::None
198 ? folly::none
: folly::make_optional(spOff
),
201 target
.toAtomicInt(),
206 TCA
emit_retranslate_stub(CodeBlock
& cb
, DataBlock
& data
, CGMeta
& fixups
,
208 SrcKey target
, TransFlags trflags
) {
209 return emit_persistent(
213 target
.resumeMode() != ResumeMode::None
214 ? folly::none
: folly::make_optional(spOff
),
221 TCA
emit_retranslate_opt_stub(CodeBlock
& cb
, DataBlock
& data
, CGMeta
& fixups
,
224 return emit_persistent(
228 sk
.resumeMode() != ResumeMode::None
229 ? folly::none
: folly::make_optional(spOff
),
235 ///////////////////////////////////////////////////////////////////////////////
237 FPInvOffset
extract_spoff(TCA stub
) {
240 HPHP::jit::x64::DecodedInstruction
instr(stub
);
242 // If it's not a lea, vasm optimized a lea{rvmfp, rvmsp} to a mov, so
244 if (!instr
.isLea()) return FPInvOffset
{0};
246 auto const offBytes
= safe_cast
<int32_t>(instr
.offset());
247 always_assert((offBytes
% sizeof(Cell
)) == 0);
248 return FPInvOffset
{-(offBytes
/ int32_t{sizeof(Cell
)})};
252 auto instr
= reinterpret_cast<vixl::Instruction
*>(stub
);
254 if (instr
->IsAddSubImmediate()) {
255 auto const offBytes
= safe_cast
<int32_t>(instr
->ImmAddSub());
256 always_assert((offBytes
% sizeof(Cell
)) == 0);
258 if (instr
->Mask(vixl::AddSubImmediateMask
) == vixl::SUB_w_imm
||
259 instr
->Mask(vixl::AddSubImmediateMask
) == vixl::SUB_x_imm
) {
260 return FPInvOffset
{offBytes
/ int32_t{sizeof(Cell
)}};
261 } else if (instr
->Mask(vixl::AddSubImmediateMask
) == vixl::ADD_w_imm
||
262 instr
->Mask(vixl::AddSubImmediateMask
) == vixl::ADD_x_imm
) {
263 return FPInvOffset
{-(offBytes
/ int32_t{sizeof(Cell
)})};
265 } else if (instr
->IsMovn()) {
266 auto next
= instr
->NextInstruction();
267 always_assert(next
->Mask(vixl::AddSubShiftedMask
) == vixl::ADD_w_shift
||
268 next
->Mask(vixl::AddSubShiftedMask
) == vixl::ADD_x_shift
);
269 auto const offBytes
= safe_cast
<int32_t>(~instr
->ImmMoveWide());
270 always_assert((offBytes
% sizeof(Cell
)) == 0);
271 return FPInvOffset
{-(offBytes
/ int32_t{sizeof(Cell
)})};
272 } else if (instr
->IsMovz()) {
273 auto next
= instr
->NextInstruction();
274 always_assert(next
->Mask(vixl::AddSubShiftedMask
) == vixl::SUB_w_shift
||
275 next
->Mask(vixl::AddSubShiftedMask
) == vixl::SUB_x_shift
);
276 auto const offBytes
= safe_cast
<int32_t>(instr
->ImmMoveWide());
277 always_assert((offBytes
% sizeof(Cell
)) == 0);
278 return FPInvOffset
{offBytes
/ int32_t{sizeof(Cell
)}};
280 always_assert(false && "Expected an instruction that offsets SP");
285 ppc64_asm::DecodedInstruction
instr(stub
);
286 if (!instr
.isSpOffsetInstr()) {
287 return FPInvOffset
{0};
289 auto const offBytes
= safe_cast
<int32_t>(instr
.offset());
290 return FPInvOffset
{-(offBytes
/ int32_t{sizeof(Cell
)})};
297 ///////////////////////////////////////////////////////////////////////////////