2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/irlower-internal.h"
19 #include "hphp/runtime/base/runtime-option.h"
21 #include "hphp/runtime/vm/jit/arg-group.h"
22 #include "hphp/runtime/vm/jit/bc-marker.h"
23 #include "hphp/runtime/vm/jit/containers.h"
24 #include "hphp/runtime/vm/jit/ir-instruction.h"
25 #include "hphp/runtime/vm/jit/memory-effects.h"
26 #include "hphp/runtime/vm/jit/native-calls.h"
27 #include "hphp/runtime/vm/jit/vasm-gen.h"
28 #include "hphp/runtime/vm/jit/vasm-instr.h"
29 #include "hphp/runtime/vm/jit/vasm-reg.h"
31 #include "hphp/util/match.h"
32 #include "hphp/util/immed.h"
33 #include "hphp/util/trace.h"
35 namespace HPHP
{ namespace jit
{ namespace irlower
{
37 TRACE_SET_MOD(irlower
);
39 ///////////////////////////////////////////////////////////////////////////////
43 ///////////////////////////////////////////////////////////////////////////////
46 * Prepare `arg' for a call by shifting or zero-extending as appropriate, then
47 * append its Vreg to `vargs'.
49 void prepareArg(const ArgDesc
& arg
,
52 VcallArgs::Spills
* spills
) {
53 assertx(IMPLIES(arg
.aux(), arg
.kind() == ArgDesc::Kind::Reg
));
56 case ArgDesc::Kind::IndRet
: {
57 auto const tmp
= v
.makeReg();
58 v
<< lea
{arg
.srcReg()[arg
.disp().l()], tmp
};
63 case ArgDesc::Kind::Reg
: {
64 auto reg
= arg
.srcReg();
65 if (arg
.isZeroExtend()) {
68 v
<< movzbq
{arg
.srcReg(), reg
};
70 } else if (auto const aux
= arg
.aux()) {
71 // DataType is signed. We're using movzbq here to clear out the upper 7
72 // bytes of the register, not to actually extend the type value.
73 auto const extended
= v
.makeReg();
74 auto const result
= v
.makeReg();
75 v
<< movzbq
{arg
.srcReg(), extended
};
78 v
.cns(auxToMask(*aux
)),
82 vargs
.push_back(result
);
89 case ArgDesc::Kind::Imm
:
90 vargs
.push_back(v
.cns(arg
.imm().q()));
93 case ArgDesc::Kind::TypeImm
:
94 vargs
.push_back(v
.cns(arg
.typeImm()));
97 case ArgDesc::Kind::Addr
: {
98 auto tmp
= v
.makeReg();
99 v
<< lea
{arg
.srcReg()[arg
.disp().l()], tmp
};
100 vargs
.push_back(tmp
);
104 case ArgDesc::Kind::DataPtr
: {
105 auto tmp
= v
.makeReg();
106 v
<< lead
{reinterpret_cast<void*>(arg
.imm().q()), tmp
};
107 vargs
.push_back(tmp
);
111 case ArgDesc::Kind::SpilledTV
: {
113 assertx(arg
.srcReg2().isValid());
114 spills
->emplace(vargs
.size(), arg
.srcReg2());
115 vargs
.push_back(arg
.srcReg());
121 ///////////////////////////////////////////////////////////////////////////////
125 ///////////////////////////////////////////////////////////////////////////////
127 Fixup
makeFixup(const BCMarker
& marker
, SyncOptions sync
) {
128 assertx(marker
.valid());
129 // We can get here if we are memory profiling, since we override the
130 // normal sync settings and sync anyway.
132 sync
== SyncOptions::Sync
||
133 RuntimeOption::EvalJitForceVMRegSync
||
134 RuntimeOption::HHProfEnabled
137 // Stublogue code operates on behalf of the caller, so it needs an indirect
138 // fixup to obtain the real savedRip from the native frame. The stack base
139 // of stublogues start at the fixup offset of their callers, so the SP offset
140 // of the marker represents the additional SP offset that needs to be added.
141 if (marker
.stublogue()) return Fixup::indirect(0, marker
.fixupBcSPOff());
143 // The rest of the prologue cannot throw exceptions, but may execute C++ code
144 // that may need a fixup. Let it point to the first opcode of the function.
145 if (marker
.prologue()) return Fixup::direct(0, marker
.fixupBcSPOff());
147 auto const bcOff
= marker
.fixupBcOff();
148 return Fixup::direct(bcOff
, marker
.fixupBcSPOff());
151 void cgCallHelper(Vout
& v
, IRLS
& env
, CallSpec call
, const CallDest
& dstInfo
,
152 SyncOptions sync
, const ArgGroup
& args
) {
153 assertx(call
.verifySignature(dstInfo
, args
.argTypes()));
154 auto const inst
= args
.inst();
155 VregList vIndRetArgs
, vargs
, vSimdArgs
, vStkArgs
;
156 VcallArgs::Spills vArgSpills
, vStkSpills
;
158 for (size_t i
= 0; i
< args
.numIndRetArgs(); ++i
) {
159 prepareArg(args
.indRetArg(i
), v
, vIndRetArgs
, nullptr);
161 for (size_t i
= 0; i
< args
.numGpArgs(); ++i
) {
162 prepareArg(args
.gpArg(i
), v
, vargs
, &vArgSpills
);
164 for (size_t i
= 0; i
< args
.numSimdArgs(); ++i
) {
165 prepareArg(args
.simdArg(i
), v
, vSimdArgs
, nullptr);
167 for (size_t i
= 0; i
< args
.numStackArgs(); ++i
) {
168 prepareArg(args
.stkArg(i
), v
, vStkArgs
, &vStkSpills
);
171 // If it is valid to sync the VMRegs within this call, we must track the load
172 // in memory-effects.
173 assertx(IMPLIES(sync
!= SyncOptions::None
, inst
->maySyncVMRegsWithSources()));
175 auto const syncFixup
= [&] {
176 if (RuntimeOption::HHProfEnabled
||
177 RuntimeOption::EvalJitForceVMRegSync
||
178 sync
!= SyncOptions::None
) {
179 // If we are profiling the heap, we always need to sync because regs need
180 // to be correct during allocations no matter what.
181 return makeFixup(inst
->marker(), sync
);
183 return Fixup::none();
187 bool nothrow
= false;
188 auto const taken
= inst
->taken();
189 auto const has_catch
= taken
&& taken
->isCatch();
190 auto const may_raise
= inst
->mayRaiseErrorWithSources();
191 assertx(IMPLIES(may_raise
, has_catch
));
192 auto const do_catch
= has_catch
&& may_raise
;
196 inst
->is(InterpOne
) || sync
!= SyncOptions::None
,
197 "cgCallHelper called with None but inst has a catch block: {}\n",
201 taken
->catchMarker() == inst
->marker(),
202 "Catch trace doesn't match fixup:\n"
207 taken
->catchMarker().show(),
208 inst
->marker().show()
211 targets
[0] = v
.makeBlock();
212 targets
[1] = env
.labels
[taken
];
214 // The current instruction claims to not throw. Register a null catch trace
215 // to indicate this to the unwinder.
220 if (dstInfo
.reg0
.isValid()) {
221 dstRegs
.push_back(dstInfo
.reg0
);
222 if (dstInfo
.reg1
.isValid()) {
223 dstRegs
.push_back(dstInfo
.reg1
);
227 auto const argsId
= v
.makeVcallArgs({
229 std::move(vSimdArgs
),
231 std::move(vIndRetArgs
),
232 std::move(vArgSpills
),
233 std::move(vStkSpills
)
235 auto const dstId
= v
.makeTuple(std::move(dstRegs
));
238 v
<< vinvoke
{call
, argsId
, dstId
, {targets
[0], targets
[1]},
239 syncFixup
, dstInfo
.type
};
242 v
<< vcall
{call
, argsId
, dstId
, syncFixup
, dstInfo
.type
, nothrow
};
246 void cgCallNative(Vout
& v
, IRLS
& env
, const IRInstruction
* inst
) {
247 using namespace NativeCalls
;
248 always_assert(CallMap::hasInfo(inst
->op()));
249 auto const& info
= CallMap::info(inst
->op());
251 ArgGroup args
= toArgGroup(info
, env
.locs
, inst
);
253 auto const dest
= [&]() -> CallDest
{
257 case DestType::Indirect
:
258 return kIndirectDest
;
261 return callDestTV(env
, inst
);
265 return callDest(env
, inst
);
270 cgCallHelper(v
, env
, info
.func
.call
, dest
, info
.sync
, args
);
273 Vreg
emitHashInt64(IRLS
& env
, const IRInstruction
* inst
, Vreg arr
) {
274 auto& v
= vmain(env
);
275 auto const hash
= v
.makeReg();
276 if (arch() == Arch::X64
) {
277 #if defined(USE_HWCRC) && defined(__SSE4_2__)
278 v
<< crc32q
{arr
, v
.cns(0), hash
};
285 CallSpec::direct(hash_int64
),
288 argGroup(env
, inst
).reg(arr
)
293 ///////////////////////////////////////////////////////////////////////////////