Compute BCMarker fixup offsets lazily
[hiphop-php.git] / hphp / runtime / vm / jit / irlower-internal.cpp
blobb17eadc22e7f3836d4705531063e9914d2ea2c77
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/irlower-internal.h"
19 #include "hphp/runtime/base/runtime-option.h"
21 #include "hphp/runtime/vm/jit/arg-group.h"
22 #include "hphp/runtime/vm/jit/bc-marker.h"
23 #include "hphp/runtime/vm/jit/containers.h"
24 #include "hphp/runtime/vm/jit/ir-instruction.h"
25 #include "hphp/runtime/vm/jit/memory-effects.h"
26 #include "hphp/runtime/vm/jit/native-calls.h"
27 #include "hphp/runtime/vm/jit/vasm-gen.h"
28 #include "hphp/runtime/vm/jit/vasm-instr.h"
29 #include "hphp/runtime/vm/jit/vasm-reg.h"
31 #include "hphp/util/match.h"
32 #include "hphp/util/immed.h"
33 #include "hphp/util/trace.h"
35 namespace HPHP { namespace jit { namespace irlower {
37 TRACE_SET_MOD(irlower);
39 ///////////////////////////////////////////////////////////////////////////////
41 namespace {
43 ///////////////////////////////////////////////////////////////////////////////
46 * Prepare `arg' for a call by shifting or zero-extending as appropriate, then
47 * append its Vreg to `vargs'.
49 void prepareArg(const ArgDesc& arg,
50 Vout& v,
51 VregList& vargs,
52 VcallArgs::Spills* spills) {
53 assertx(IMPLIES(arg.aux(), arg.kind() == ArgDesc::Kind::Reg));
55 switch (arg.kind()) {
56 case ArgDesc::Kind::IndRet: {
57 auto const tmp = v.makeReg();
58 v << lea{arg.srcReg()[arg.disp().l()], tmp};
59 vargs.push_back(tmp);
60 break;
63 case ArgDesc::Kind::Reg: {
64 auto reg = arg.srcReg();
65 if (arg.isZeroExtend()) {
66 assertx(!arg.aux());
67 reg = v.makeReg();
68 v << movzbq{arg.srcReg(), reg};
69 vargs.push_back(reg);
70 } else if (auto const aux = arg.aux()) {
71 // DataType is signed. We're using movzbq here to clear out the upper 7
72 // bytes of the register, not to actually extend the type value.
73 auto const extended = v.makeReg();
74 auto const result = v.makeReg();
75 v << movzbq{arg.srcReg(), extended};
76 v << orq{
77 extended,
78 v.cns(auxToMask(*aux)),
79 result,
80 v.makeReg()
82 vargs.push_back(result);
83 } else {
84 vargs.push_back(reg);
86 break;
89 case ArgDesc::Kind::Imm:
90 vargs.push_back(v.cns(arg.imm().q()));
91 break;
93 case ArgDesc::Kind::TypeImm:
94 vargs.push_back(v.cns(arg.typeImm()));
95 break;
97 case ArgDesc::Kind::Addr: {
98 auto tmp = v.makeReg();
99 v << lea{arg.srcReg()[arg.disp().l()], tmp};
100 vargs.push_back(tmp);
101 break;
104 case ArgDesc::Kind::DataPtr: {
105 auto tmp = v.makeReg();
106 v << lead{reinterpret_cast<void*>(arg.imm().q()), tmp};
107 vargs.push_back(tmp);
108 break;
111 case ArgDesc::Kind::SpilledTV: {
112 assertx(spills);
113 assertx(arg.srcReg2().isValid());
114 spills->emplace(vargs.size(), arg.srcReg2());
115 vargs.push_back(arg.srcReg());
116 break;
121 ///////////////////////////////////////////////////////////////////////////////
125 ///////////////////////////////////////////////////////////////////////////////
127 Fixup makeFixup(const BCMarker& marker, SyncOptions sync) {
128 assertx(marker.valid());
129 // We can get here if we are memory profiling, since we override the
130 // normal sync settings and sync anyway.
131 always_assert(
132 sync == SyncOptions::Sync ||
133 RuntimeOption::EvalJitForceVMRegSync ||
134 RuntimeOption::HHProfEnabled
137 // Stublogue code operates on behalf of the caller, so it needs an indirect
138 // fixup to obtain the real savedRip from the native frame. The stack base
139 // of stublogues start at the fixup offset of their callers, so the SP offset
140 // of the marker represents the additional SP offset that needs to be added.
141 if (marker.stublogue()) return Fixup::indirect(0, marker.fixupBcSPOff());
143 // The rest of the prologue cannot throw exceptions, but may execute C++ code
144 // that may need a fixup. Let it point to the first opcode of the function.
145 if (marker.prologue()) return Fixup::direct(0, marker.fixupBcSPOff());
147 auto const bcOff = marker.fixupBcOff();
148 return Fixup::direct(bcOff, marker.fixupBcSPOff());
151 void cgCallHelper(Vout& v, IRLS& env, CallSpec call, const CallDest& dstInfo,
152 SyncOptions sync, const ArgGroup& args) {
153 assertx(call.verifySignature(dstInfo, args.argTypes()));
154 auto const inst = args.inst();
155 VregList vIndRetArgs, vargs, vSimdArgs, vStkArgs;
156 VcallArgs::Spills vArgSpills, vStkSpills;
158 for (size_t i = 0; i < args.numIndRetArgs(); ++i) {
159 prepareArg(args.indRetArg(i), v, vIndRetArgs, nullptr);
161 for (size_t i = 0; i < args.numGpArgs(); ++i) {
162 prepareArg(args.gpArg(i), v, vargs, &vArgSpills);
164 for (size_t i = 0; i < args.numSimdArgs(); ++i) {
165 prepareArg(args.simdArg(i), v, vSimdArgs, nullptr);
167 for (size_t i = 0; i < args.numStackArgs(); ++i) {
168 prepareArg(args.stkArg(i), v, vStkArgs, &vStkSpills);
171 // If it is valid to sync the VMRegs within this call, we must track the load
172 // in memory-effects.
173 assertx(IMPLIES(sync != SyncOptions::None, inst->maySyncVMRegsWithSources()));
175 auto const syncFixup = [&] {
176 if (RuntimeOption::HHProfEnabled ||
177 RuntimeOption::EvalJitForceVMRegSync ||
178 sync != SyncOptions::None) {
179 // If we are profiling the heap, we always need to sync because regs need
180 // to be correct during allocations no matter what.
181 return makeFixup(inst->marker(), sync);
183 return Fixup::none();
184 }();
186 Vlabel targets[2];
187 bool nothrow = false;
188 auto const taken = inst->taken();
189 auto const has_catch = taken && taken->isCatch();
190 auto const may_raise = inst->mayRaiseErrorWithSources();
191 assertx(IMPLIES(may_raise, has_catch));
192 auto const do_catch = has_catch && may_raise;
194 if (do_catch) {
195 always_assert_flog(
196 inst->is(InterpOne) || sync != SyncOptions::None,
197 "cgCallHelper called with None but inst has a catch block: {}\n",
198 *inst
200 always_assert_flog(
201 taken->catchMarker() == inst->marker(),
202 "Catch trace doesn't match fixup:\n"
203 "Instruction: {}\n"
204 "Catch trace: {}\n"
205 "Fixup : {}\n",
206 inst->toString(),
207 taken->catchMarker().show(),
208 inst->marker().show()
211 targets[0] = v.makeBlock();
212 targets[1] = env.labels[taken];
213 } else {
214 // The current instruction claims to not throw. Register a null catch trace
215 // to indicate this to the unwinder.
216 nothrow = true;
219 VregList dstRegs;
220 if (dstInfo.reg0.isValid()) {
221 dstRegs.push_back(dstInfo.reg0);
222 if (dstInfo.reg1.isValid()) {
223 dstRegs.push_back(dstInfo.reg1);
227 auto const argsId = v.makeVcallArgs({
228 std::move(vargs),
229 std::move(vSimdArgs),
230 std::move(vStkArgs),
231 std::move(vIndRetArgs),
232 std::move(vArgSpills),
233 std::move(vStkSpills)
235 auto const dstId = v.makeTuple(std::move(dstRegs));
237 if (do_catch) {
238 v << vinvoke{call, argsId, dstId, {targets[0], targets[1]},
239 syncFixup, dstInfo.type};
240 v = targets[0];
241 } else {
242 v << vcall{call, argsId, dstId, syncFixup, dstInfo.type, nothrow};
246 void cgCallNative(Vout& v, IRLS& env, const IRInstruction* inst) {
247 using namespace NativeCalls;
248 always_assert(CallMap::hasInfo(inst->op()));
249 auto const& info = CallMap::info(inst->op());
251 ArgGroup args = toArgGroup(info, env.locs, inst);
253 auto const dest = [&]() -> CallDest {
254 switch (info.dest) {
255 case DestType::None:
256 return kVoidDest;
257 case DestType::Indirect:
258 return kIndirectDest;
259 case DestType::TV:
260 case DestType::SIMD:
261 return callDestTV(env, inst);
262 case DestType::SSA:
263 case DestType::Byte:
264 case DestType::Dbl:
265 return callDest(env, inst);
267 not_reached();
268 }();
270 cgCallHelper(v, env, info.func.call, dest, info.sync, args);
273 Vreg emitHashInt64(IRLS& env, const IRInstruction* inst, Vreg arr) {
274 auto& v = vmain(env);
275 auto const hash = v.makeReg();
276 if (arch() == Arch::X64) {
277 #if defined(USE_HWCRC) && defined(__SSE4_2__)
278 v << crc32q{arr, v.cns(0), hash};
279 return hash;
280 #endif
282 cgCallHelper(
284 env,
285 CallSpec::direct(hash_int64),
286 callDest(hash),
287 SyncOptions::Sync,
288 argGroup(env, inst).reg(arr)
290 return hash;
293 ///////////////////////////////////////////////////////////////////////////////