1 //===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This pass mulches exception handling code into a form adapted to code
11 // generation. Required if using dwarf exception handling.
13 //===----------------------------------------------------------------------===//
15 #define DEBUG_TYPE "dwarfehprepare"
16 #include "llvm/ADT/Statistic.h"
17 #include "llvm/Analysis/Dominators.h"
18 #include "llvm/CodeGen/Passes.h"
19 #include "llvm/Function.h"
20 #include "llvm/Instructions.h"
21 #include "llvm/IntrinsicInst.h"
22 #include "llvm/Module.h"
23 #include "llvm/Pass.h"
24 #include "llvm/Target/TargetLowering.h"
25 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
26 #include "llvm/Transforms/Utils/PromoteMemToReg.h"
29 STATISTIC(NumLandingPadsSplit
, "Number of landing pads split");
30 STATISTIC(NumUnwindsLowered
, "Number of unwind instructions lowered");
31 STATISTIC(NumExceptionValuesMoved
, "Number of eh.exception calls moved");
32 STATISTIC(NumStackTempsIntroduced
, "Number of stack temporaries introduced");
35 class DwarfEHPrepare
: public FunctionPass
{
36 const TargetLowering
*TLI
;
39 // The eh.exception intrinsic.
40 Function
*ExceptionValueIntrinsic
;
42 // _Unwind_Resume or the target equivalent.
43 Constant
*RewindFunction
;
45 // Dominator info is used when turning stack temporaries into registers.
47 DominanceFrontier
*DF
;
49 // The function we are running on.
52 // The landing pads for this function.
53 typedef SmallPtrSet
<BasicBlock
*, 8> BBSet
;
56 // Stack temporary used to hold eh.exception values.
57 AllocaInst
*ExceptionValueVar
;
59 bool NormalizeLandingPads();
61 bool MoveExceptionValueCalls();
62 bool FinishStackTemporaries();
63 bool PromoteStackTemporaries();
65 Instruction
*CreateExceptionValueCall(BasicBlock
*BB
);
66 Instruction
*CreateValueLoad(BasicBlock
*BB
);
68 /// CreateReadOfExceptionValue - Return the result of the eh.exception
69 /// intrinsic by calling the intrinsic if in a landing pad, or loading
70 /// it from the exception value variable otherwise.
71 Instruction
*CreateReadOfExceptionValue(BasicBlock
*BB
) {
72 return LandingPads
.count(BB
) ?
73 CreateExceptionValueCall(BB
) : CreateValueLoad(BB
);
77 static char ID
; // Pass identification, replacement for typeid.
78 DwarfEHPrepare(const TargetLowering
*tli
, bool fast
) :
79 FunctionPass(&ID
), TLI(tli
), CompileFast(fast
),
80 ExceptionValueIntrinsic(0), RewindFunction(0) {}
82 virtual bool runOnFunction(Function
&Fn
);
84 // getAnalysisUsage - We need dominance frontiers for memory promotion.
85 virtual void getAnalysisUsage(AnalysisUsage
&AU
) const {
87 AU
.addRequired
<DominatorTree
>();
88 AU
.addPreserved
<DominatorTree
>();
90 AU
.addRequired
<DominanceFrontier
>();
91 AU
.addPreserved
<DominanceFrontier
>();
94 const char *getPassName() const {
95 return "Exception handling preparation";
99 } // end anonymous namespace
101 char DwarfEHPrepare::ID
= 0;
103 FunctionPass
*llvm::createDwarfEHPass(const TargetLowering
*tli
, bool fast
) {
104 return new DwarfEHPrepare(tli
, fast
);
107 /// NormalizeLandingPads - Normalize and discover landing pads, noting them
108 /// in the LandingPads set. A landing pad is normal if the only CFG edges
109 /// that end at it are unwind edges from invoke instructions. If we inlined
110 /// through an invoke we could have a normal branch from the previous
111 /// unwind block through to the landing pad for the original invoke.
112 /// Abnormal landing pads are fixed up by redirecting all unwind edges to
113 /// a new basic block which falls through to the original.
114 bool DwarfEHPrepare::NormalizeLandingPads() {
115 bool Changed
= false;
117 for (Function::iterator I
= F
->begin(), E
= F
->end(); I
!= E
; ++I
) {
118 TerminatorInst
*TI
= I
->getTerminator();
119 if (!isa
<InvokeInst
>(TI
))
121 BasicBlock
*LPad
= TI
->getSuccessor(1);
122 // Skip landing pads that have already been normalized.
123 if (LandingPads
.count(LPad
))
126 // Check that only invoke unwind edges end at the landing pad.
127 bool OnlyUnwoundTo
= true;
128 for (pred_iterator PI
= pred_begin(LPad
), PE
= pred_end(LPad
);
130 TerminatorInst
*PT
= (*PI
)->getTerminator();
131 if (!isa
<InvokeInst
>(PT
) || LPad
== PT
->getSuccessor(0)) {
132 OnlyUnwoundTo
= false;
138 // Only unwind edges lead to the landing pad. Remember the landing pad.
139 LandingPads
.insert(LPad
);
143 // At least one normal edge ends at the landing pad. Redirect the unwind
144 // edges to a new basic block which falls through into this one.
146 // Create the new basic block.
147 BasicBlock
*NewBB
= BasicBlock::Create(F
->getContext(),
148 LPad
->getName() + "_unwind_edge");
150 // Insert it into the function right before the original landing pad.
151 LPad
->getParent()->getBasicBlockList().insert(LPad
, NewBB
);
153 // Redirect unwind edges from the original landing pad to NewBB.
154 for (pred_iterator PI
= pred_begin(LPad
), PE
= pred_end(LPad
); PI
!= PE
; ) {
155 TerminatorInst
*PT
= (*PI
++)->getTerminator();
156 if (isa
<InvokeInst
>(PT
) && PT
->getSuccessor(1) == LPad
)
157 // Unwind to the new block.
158 PT
->setSuccessor(1, NewBB
);
161 // If there are any PHI nodes in LPad, we need to update them so that they
162 // merge incoming values from NewBB instead.
163 for (BasicBlock::iterator II
= LPad
->begin(); isa
<PHINode
>(II
); ++II
) {
164 PHINode
*PN
= cast
<PHINode
>(II
);
165 pred_iterator PB
= pred_begin(NewBB
), PE
= pred_end(NewBB
);
167 // Check to see if all of the values coming in via unwind edges are the
168 // same. If so, we don't need to create a new PHI node.
169 Value
*InVal
= PN
->getIncomingValueForBlock(*PB
);
170 for (pred_iterator PI
= PB
; PI
!= PE
; ++PI
) {
171 if (PI
!= PB
&& InVal
!= PN
->getIncomingValueForBlock(*PI
)) {
178 // Different unwind edges have different values. Create a new PHI node
180 PHINode
*NewPN
= PHINode::Create(PN
->getType(), PN
->getName()+".unwind",
182 // Add an entry for each unwind edge, using the value from the old PHI.
183 for (pred_iterator PI
= PB
; PI
!= PE
; ++PI
)
184 NewPN
->addIncoming(PN
->getIncomingValueForBlock(*PI
), *PI
);
186 // Now use this new PHI as the common incoming value for NewBB in PN.
190 // Revector exactly one entry in the PHI node to come from NewBB
191 // and delete all other entries that come from unwind edges. If
192 // there are both normal and unwind edges from the same predecessor,
193 // this leaves an entry for the normal edge.
194 for (pred_iterator PI
= PB
; PI
!= PE
; ++PI
)
195 PN
->removeIncomingValue(*PI
);
196 PN
->addIncoming(InVal
, NewBB
);
199 // Add a fallthrough from NewBB to the original landing pad.
200 BranchInst::Create(LPad
, NewBB
);
202 // Now update DominatorTree and DominanceFrontier analysis information.
204 DT
->splitBlock(NewBB
);
206 DF
->splitBlock(NewBB
);
208 // Remember the newly constructed landing pad. The original landing pad
209 // LPad is no longer a landing pad now that all unwind edges have been
210 // revectored to NewBB.
211 LandingPads
.insert(NewBB
);
212 ++NumLandingPadsSplit
;
219 /// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
220 /// rethrowing any previously caught exception. This will crash horribly
221 /// at runtime if there is no such exception: using unwind to throw a new
222 /// exception is currently not supported.
223 bool DwarfEHPrepare::LowerUnwinds() {
224 SmallVector
<TerminatorInst
*, 16> UnwindInsts
;
226 for (Function::iterator I
= F
->begin(), E
= F
->end(); I
!= E
; ++I
) {
227 TerminatorInst
*TI
= I
->getTerminator();
228 if (isa
<UnwindInst
>(TI
))
229 UnwindInsts
.push_back(TI
);
232 if (UnwindInsts
.empty()) return false;
234 // Find the rewind function if we didn't already.
235 if (!RewindFunction
) {
236 LLVMContext
&Ctx
= UnwindInsts
[0]->getContext();
237 std::vector
<const Type
*>
238 Params(1, Type::getInt8PtrTy(Ctx
));
239 FunctionType
*FTy
= FunctionType::get(Type::getVoidTy(Ctx
),
241 const char *RewindName
= TLI
->getLibcallName(RTLIB::UNWIND_RESUME
);
242 RewindFunction
= F
->getParent()->getOrInsertFunction(RewindName
, FTy
);
245 bool Changed
= false;
247 for (SmallVectorImpl
<TerminatorInst
*>::iterator
248 I
= UnwindInsts
.begin(), E
= UnwindInsts
.end(); I
!= E
; ++I
) {
249 TerminatorInst
*TI
= *I
;
251 // Replace the unwind instruction with a call to _Unwind_Resume (or the
252 // appropriate target equivalent) followed by an UnreachableInst.
254 // Create the call...
255 CallInst
*CI
= CallInst::Create(RewindFunction
,
256 CreateReadOfExceptionValue(TI
->getParent()),
258 CI
->setCallingConv(TLI
->getLibcallCallingConv(RTLIB::UNWIND_RESUME
));
259 // ...followed by an UnreachableInst.
260 new UnreachableInst(TI
->getContext(), TI
);
262 // Nuke the unwind instruction.
263 TI
->eraseFromParent();
271 /// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
272 /// landing pads by replacing calls outside of landing pads with loads from a
273 /// stack temporary. Move eh.exception calls inside landing pads to the start
274 /// of the landing pad (optional, but may make things simpler for later passes).
275 bool DwarfEHPrepare::MoveExceptionValueCalls() {
276 // If the eh.exception intrinsic is not declared in the module then there is
277 // nothing to do. Speed up compilation by checking for this common case.
278 if (!ExceptionValueIntrinsic
&&
279 !F
->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception
)))
282 bool Changed
= false;
284 for (Function::iterator BB
= F
->begin(), E
= F
->end(); BB
!= E
; ++BB
) {
285 for (BasicBlock::iterator II
= BB
->begin(), E
= BB
->end(); II
!= E
;)
286 if (IntrinsicInst
*CI
= dyn_cast
<IntrinsicInst
>(II
++))
287 if (CI
->getIntrinsicID() == Intrinsic::eh_exception
) {
288 if (!CI
->use_empty()) {
289 Value
*ExceptionValue
= CreateReadOfExceptionValue(BB
);
290 if (CI
== ExceptionValue
) {
291 // The call was at the start of a landing pad - leave it alone.
292 assert(LandingPads
.count(BB
) &&
293 "Created eh.exception call outside landing pad!");
296 CI
->replaceAllUsesWith(ExceptionValue
);
298 CI
->eraseFromParent();
299 ++NumExceptionValuesMoved
;
307 /// FinishStackTemporaries - If we introduced a stack variable to hold the
308 /// exception value then initialize it in each landing pad.
309 bool DwarfEHPrepare::FinishStackTemporaries() {
310 if (!ExceptionValueVar
)
314 bool Changed
= false;
316 // Make sure that there is a store of the exception value at the start of
318 for (BBSet::iterator LI
= LandingPads
.begin(), LE
= LandingPads
.end();
320 Instruction
*ExceptionValue
= CreateReadOfExceptionValue(*LI
);
321 Instruction
*Store
= new StoreInst(ExceptionValue
, ExceptionValueVar
);
322 Store
->insertAfter(ExceptionValue
);
329 /// PromoteStackTemporaries - Turn any stack temporaries we introduced into
330 /// registers if possible.
331 bool DwarfEHPrepare::PromoteStackTemporaries() {
332 if (ExceptionValueVar
&& DT
&& DF
&& isAllocaPromotable(ExceptionValueVar
)) {
333 // Turn the exception temporary into registers and phi nodes if possible.
334 std::vector
<AllocaInst
*> Allocas(1, ExceptionValueVar
);
335 PromoteMemToReg(Allocas
, *DT
, *DF
);
341 /// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
342 /// the start of the basic block (unless there already is one, in which case
343 /// the existing call is returned).
344 Instruction
*DwarfEHPrepare::CreateExceptionValueCall(BasicBlock
*BB
) {
345 Instruction
*Start
= BB
->getFirstNonPHI();
346 // Is this a call to eh.exception?
347 if (IntrinsicInst
*CI
= dyn_cast
<IntrinsicInst
>(Start
))
348 if (CI
->getIntrinsicID() == Intrinsic::eh_exception
)
349 // Reuse the existing call.
352 // Find the eh.exception intrinsic if we didn't already.
353 if (!ExceptionValueIntrinsic
)
354 ExceptionValueIntrinsic
= Intrinsic::getDeclaration(F
->getParent(),
355 Intrinsic::eh_exception
);
358 return CallInst::Create(ExceptionValueIntrinsic
, "eh.value.call", Start
);
361 /// CreateValueLoad - Insert a load of the exception value stack variable
362 /// (creating it if necessary) at the start of the basic block (unless
363 /// there already is a load, in which case the existing load is returned).
364 Instruction
*DwarfEHPrepare::CreateValueLoad(BasicBlock
*BB
) {
365 Instruction
*Start
= BB
->getFirstNonPHI();
366 // Is this a load of the exception temporary?
367 if (ExceptionValueVar
)
368 if (LoadInst
* LI
= dyn_cast
<LoadInst
>(Start
))
369 if (LI
->getPointerOperand() == ExceptionValueVar
)
370 // Reuse the existing load.
373 // Create the temporary if we didn't already.
374 if (!ExceptionValueVar
) {
375 ExceptionValueVar
= new AllocaInst(PointerType::getUnqual(
376 Type::getInt8Ty(BB
->getContext())), "eh.value", F
->begin()->begin());
377 ++NumStackTempsIntroduced
;
381 return new LoadInst(ExceptionValueVar
, "eh.value.load", Start
);
384 bool DwarfEHPrepare::runOnFunction(Function
&Fn
) {
385 bool Changed
= false;
387 // Initialize internal state.
388 DT
= getAnalysisIfAvailable
<DominatorTree
>();
389 DF
= getAnalysisIfAvailable
<DominanceFrontier
>();
390 ExceptionValueVar
= 0;
393 // Ensure that only unwind edges end at landing pads (a landing pad is a
394 // basic block where an invoke unwind edge ends).
395 Changed
|= NormalizeLandingPads();
397 // Turn unwind instructions into libcalls.
398 Changed
|= LowerUnwinds();
400 // TODO: Move eh.selector calls to landing pads and combine them.
402 // Move eh.exception calls to landing pads.
403 Changed
|= MoveExceptionValueCalls();
405 // Initialize any stack temporaries we introduced.
406 Changed
|= FinishStackTemporaries();
408 // Turn any stack temporaries into registers if possible.
410 Changed
|= PromoteStackTemporaries();