Add mode to EndCatch
[hiphop-php.git] / hphp / runtime / vm / jit / irgen.cpp
blob3d3020c95833d775c3f7cc415706562b78ea5060
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/jit/irgen.h"
18 #include "hphp/runtime/vm/jit/irgen-exit.h"
19 #include "hphp/runtime/vm/jit/irgen-control.h"
20 #include "hphp/runtime/vm/jit/cfg.h"
21 #include "hphp/runtime/vm/jit/dce.h"
22 #include "hphp/runtime/vm/jit/prof-data.h"
24 #include "hphp/runtime/vm/jit/irgen-internal.h"
26 namespace HPHP { namespace jit { namespace irgen {
28 namespace {
30 //////////////////////////////////////////////////////////////////////
32 void check_catch_stack_state(IRGS& env, const IRInstruction* inst) {
33 always_assert_flog(
34 !env.irb->fs().stackModified(),
35 "catch block used after writing to stack\n"
36 " inst: {}\n",
37 inst->toString()
41 //////////////////////////////////////////////////////////////////////
45 uint64_t curProfCount(const IRGS& env) {
46 auto const tid = env.profTransID;
47 assertx(tid == kInvalidTransID ||
48 (env.region != nullptr && profData() != nullptr));
49 return env.profFactor *
50 (tid != kInvalidTransID ? env.region->blockProfCount(tid) : 1);
53 uint64_t calleeProfCount(const IRGS& env, const RegionDesc& calleeRegion) {
54 auto const tid = calleeRegion.entry()->id();
55 if (tid == kInvalidTransID) return 0;
56 return env.profFactor * calleeRegion.blockProfCount(tid);
59 //////////////////////////////////////////////////////////////////////
62 * This is called when each instruction is generated during initial IR
63 * creation.
65 * This function inspects the instruction and prepares it for potentially being
66 * inserted to the instruction stream. It then calls IRBuilder optimizeInst,
67 * which may or may not insert it depending on a variety of factors.
69 namespace detail {
70 SSATmp* genInstruction(IRGS& env, IRInstruction* inst) {
71 if (inst->mayRaiseError() && inst->taken()) {
72 FTRACE(1, "{}: asserting about catch block\n", inst->toString());
74 * This assertion means you manually created a catch block, but didn't put
75 * an exceptionStackBoundary after an update to the stack. Even if you're
76 * manually creating catches we require this just to make sure you're not
77 * doing it on accident.
79 check_catch_stack_state(env, inst);
82 if (inst->mayRaiseError() && !inst->taken()) {
83 FTRACE(1, "{}: creating catch block\n", inst->toString());
85 * If you hit this assertion, you're gen'ing an IR instruction that can
86 * throw after gen'ing one that could write to the evaluation stack. This
87 * is usually not what HHBC opcodes do, and could be a bug. See the
88 * documentation for exceptionStackBoundary in the header for more
89 * information.
91 check_catch_stack_state(env, inst);
92 inst->setTaken(
93 create_catch_block(
94 env,
95 []{},
96 inst->is(Call) ||
97 inst->is(CallUnpack) ?
98 EndCatchData::SwitchMode : EndCatchData::UnwindOnly
103 if (inst->mayRaiseError()) {
104 assertx(inst->taken() && inst->taken()->isCatch());
107 return env.irb->optimizeInst(inst, IRBuilder::CloneFlag::Yes, nullptr);
111 //////////////////////////////////////////////////////////////////////
113 void incProfCounter(IRGS& env, TransID transId) {
114 gen(env, IncProfCounter, TransIDData(transId));
117 void checkCold(IRGS& env, TransID transId) {
118 gen(env, CheckCold, makeExitOpt(env), TransIDData(transId));
121 void ringbufferEntry(IRGS& env, Trace::RingBufferType t, SrcKey sk, int level) {
122 if (!Trace::moduleEnabled(Trace::ringbuffer, level)) return;
123 gen(env, RBTraceEntry, RBEntryData(t, sk));
126 void ringbufferMsg(IRGS& env,
127 Trace::RingBufferType t,
128 const StringData* msg,
129 int level) {
130 if (!Trace::moduleEnabled(Trace::ringbuffer, level)) return;
131 gen(env, RBTraceMsg, RBMsgData(t, msg));
134 void prepareEntry(IRGS& env) {
136 * If assertions are on, before we do anything, each region makes a call to a
137 * C++ function that checks the state of everything.
139 if (RuntimeOption::EvalHHIRGenerateAsserts) {
140 auto const data = IRSPRelOffsetData { spOffBCFromIRSP(env) };
141 gen(env, DbgTraceCall, data, fp(env), sp(env));
145 * We automatically hoist a load of the context to the beginning of every
146 * region. The reason is that it's trivially CSEable, so we might as well
147 * make it available everywhere. If nothing uses it, it'll just be DCE'd.
149 ldCtx(env);
152 void endRegion(IRGS& env) {
153 auto const curSk = curSrcKey(env);
154 if (!instrAllowsFallThru(curSk.op())) return; // nothing to do here
156 auto const nextSk = curSk.advanced(curUnit(env));
157 endRegion(env, nextSk);
160 void endRegion(IRGS& env, SrcKey nextSk) {
161 FTRACE(1, "------------------- endRegion ---------------------------\n");
162 if (!fp(env)) {
163 // The function already returned. There's no reason to generate code to
164 // try to go to the next part of it.
165 return;
167 auto const data = ReqBindJmpData {
168 nextSk,
169 spOffBCFromFP(env),
170 spOffBCFromIRSP(env),
171 TransFlags{}
173 gen(env, ReqBindJmp, data, sp(env), fp(env));
176 void sealUnit(IRGS& env) {
177 mandatoryDCE(env.unit);
180 ///////////////////////////////////////////////////////////////////////////////
182 Type publicTopType(const IRGS& env, BCSPRelOffset idx) {
183 // It's logically const, because we're using DataTypeGeneric.
184 return topType(const_cast<IRGS&>(env), idx, DataTypeGeneric);
187 Type predictedType(const IRGS& env, const Location& loc) {
188 auto& fs = env.irb->fs();
190 switch (loc.tag()) {
191 case LTag::Stack:
192 return fs.stack(offsetFromIRSP(env, loc.stackIdx())).predictedType;
193 case LTag::Local:
194 return fs.local(loc.localId()).predictedType;
195 case LTag::MBase:
196 return fs.mbase().predictedType;
197 case LTag::CSlotCls:
198 return fs.clsRefClsSlot(loc.clsRefClsSlot()).predictedType;
199 case LTag::CSlotTS:
200 return fs.clsRefTSSlot(loc.clsRefTSSlot()).predictedType;
202 not_reached();
205 Type provenType(const IRGS& env, const Location& loc) {
206 auto& fs = env.irb->fs();
208 switch (loc.tag()) {
209 case LTag::Stack:
210 return fs.stack(offsetFromIRSP(env, loc.stackIdx())).type;
211 case LTag::Local:
212 return fs.local(loc.localId()).type;
213 case LTag::MBase:
214 return fs.mbase().type;
215 case LTag::CSlotCls:
216 return fs.clsRefClsSlot(loc.clsRefClsSlot()).type;
217 case LTag::CSlotTS:
218 return fs.clsRefTSSlot(loc.clsRefTSSlot()).type;
220 not_reached();
223 ///////////////////////////////////////////////////////////////////////////////
225 void endBlock(IRGS& env, Offset next) {
226 if (!fp(env)) {
227 // If there's no fp, we've already executed a RetCtrl or similar, so
228 // there's no reason to try to jump anywhere now.
229 return;
231 // Don't emit the jump if it would be unreachable. This avoids
232 // unreachable blocks appearing to be reachable, which would cause
233 // translateRegion to process them.
234 if (auto const curBlock = env.irb->curBlock()) {
235 if (!curBlock->empty() && curBlock->back().isTerminal()) return;
237 jmpImpl(env, next);
240 void prepareForNextHHBC(IRGS& env,
241 const NormalizedInstruction* ni,
242 SrcKey newSk,
243 bool lastBcInst) {
244 FTRACE(1, "------------------- prepareForNextHHBC ------------------\n");
245 env.currentNormalizedInstruction = ni;
247 always_assert_flog(
248 IMPLIES(isInlining(env), !env.lastBcInst),
249 "Tried to end trace while inlining."
252 always_assert_flog(
253 IMPLIES(isInlining(env), !env.firstBcInst),
254 "Inlining while still at the first region instruction."
257 always_assert(env.bcStateStack.size() >= env.inlineLevel + 1);
258 auto pops = env.bcStateStack.size() - 1 - env.inlineLevel;
259 while (pops--) env.bcStateStack.pop_back();
261 always_assert_flog(env.bcStateStack.back().func() == newSk.func(),
262 "Tried to update current SrcKey with a different func");
264 env.bcStateStack.back().setOffset(newSk.offset());
265 updateMarker(env);
266 env.lastBcInst = lastBcInst;
267 env.irb->exceptionStackBoundary();
268 env.irb->resetCurIROff();
271 void finishHHBC(IRGS& env) {
272 env.firstBcInst = false;
275 //////////////////////////////////////////////////////////////////////