[Simplify] Remove unused instructions and accesses.
[polly-mirror.git] / lib / Support / VirtualInstruction.cpp
blobd09c9b0376dd459dfd98b337531135d377c0b245
1 //===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // Tools for determining which instructions are within a statement and the
11 // nature of their operands.
13 //===----------------------------------------------------------------------===//
15 #include "polly/Support/VirtualInstruction.h"
16 #include "polly/Support/SCEVValidator.h"
18 using namespace polly;
19 using namespace llvm;
21 VirtualUse VirtualUse ::create(Scop *S, const Use &U, LoopInfo *LI,
22 bool Virtual) {
23 auto *UserBB = getUseBlock(U);
24 auto *UserStmt = S->getStmtFor(UserBB);
25 auto *UserScope = LI->getLoopFor(UserBB);
26 return create(S, UserStmt, UserScope, U.get(), Virtual);
29 VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope,
30 Value *Val, bool Virtual) {
31 assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used");
33 if (isa<BasicBlock>(Val))
34 return VirtualUse(UserStmt, Val, Block, nullptr, nullptr);
36 if (isa<llvm::Constant>(Val))
37 return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr);
39 // Is the value synthesizable? If the user has been pruned
40 // (UserStmt == nullptr), it is either not used anywhere or is synthesizable.
41 // We assume synthesizable which practically should have the same effect.
42 auto *SE = S->getSE();
43 if (SE->isSCEVable(Val->getType())) {
44 auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope);
45 if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope))
46 return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr);
49 // FIXME: Inconsistency between lookupInvariantEquivClass and
50 // getRequiredInvariantLoads. Querying one of them should be enough.
51 auto &RIL = S->getRequiredInvariantLoads();
52 if (S->lookupInvariantEquivClass(Val) || RIL.count(dyn_cast<LoadInst>(Val)))
53 return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr);
55 // ReadOnly uses may have MemoryAccesses that we want to associate with the
56 // use. This is why we look for a MemoryAccess here already.
57 MemoryAccess *InputMA = nullptr;
58 if (UserStmt && Virtual)
59 InputMA = UserStmt->lookupValueReadOf(Val);
61 // Uses are read-only if they have been defined before the SCoP, i.e., they
62 // cannot be written to inside the SCoP. Arguments are defined before any
63 // instructions, hence also before the SCoP. If the user has been pruned
64 // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is
65 // neither an intra- nor an inter-use.
66 if (!UserStmt || isa<Argument>(Val))
67 return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
69 auto Inst = cast<Instruction>(Val);
70 if (!S->contains(Inst))
71 return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
73 // A use is inter-statement if either it is defined in another statement, or
74 // there is a MemoryAccess that reads its value that has been written by
75 // another statement.
76 if (InputMA || (!Virtual && !UserStmt->contains(Inst->getParent())))
77 return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA);
79 return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr);
82 void VirtualUse::print(raw_ostream &OS, bool Reproducible) const {
83 OS << "User: [" << User->getBaseName() << "] ";
84 switch (Kind) {
85 case VirtualUse::Constant:
86 OS << "Constant Op:";
87 break;
88 case VirtualUse::Block:
89 OS << "BasicBlock Op:";
90 break;
91 case VirtualUse::Synthesizable:
92 OS << "Synthesizable Op:";
93 break;
94 case VirtualUse::Hoisted:
95 OS << "Hoisted load Op:";
96 break;
97 case VirtualUse::ReadOnly:
98 OS << "Read-Only Op:";
99 break;
100 case VirtualUse::Intra:
101 OS << "Intra Op:";
102 break;
103 case VirtualUse::Inter:
104 OS << "Inter Op:";
105 break;
108 if (Val) {
109 OS << ' ';
110 if (Reproducible)
111 OS << '"' << Val->getName() << '"';
112 else
113 Val->print(OS, true);
115 if (ScevExpr) {
116 OS << ' ';
117 ScevExpr->print(OS);
119 if (InputMA && !Reproducible)
120 OS << ' ' << InputMA;
123 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
124 void VirtualUse::dump() const {
125 print(errs(), false);
126 errs() << '\n';
128 #endif
130 void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const {
131 if (!Stmt || !Inst) {
132 OS << "[null VirtualInstruction]";
133 return;
136 OS << "[" << Stmt->getBaseName() << "]";
137 Inst->print(OS, !Reproducible);
140 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
141 void VirtualInstruction::dump() const {
142 print(errs(), false);
143 errs() << '\n';
145 #endif
147 /// Return true if @p Inst cannot be removed, even if it is nowhere referenced.
148 static bool isRoot(const Instruction *Inst) {
149 // The store is handled by its MemoryAccess. The load must be reached from the
150 // roots in order to be marked as used.
151 if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
152 return false;
154 // Terminator instructions (in region statements) are required for control
155 // flow.
156 if (isa<TerminatorInst>(Inst))
157 return true;
159 // Writes to memory must be honored.
160 if (Inst->mayWriteToMemory())
161 return true;
163 return false;
166 /// Return true if @p ComputingInst is used after SCoP @p S. It must not be
167 /// removed in order for its value to be available after the SCoP.
168 static bool isEscaping(Scop *S, Instruction *ComputingInst) {
169 for (Use &Use : ComputingInst->uses()) {
170 Instruction *User = cast<Instruction>(Use.getUser());
171 if (!S->contains(User))
172 return true;
174 return false;
177 /// Return true for MemoryAccesses that cannot be removed because it represents
178 /// an llvm::Value that is used after the SCoP.
179 static bool isEscaping(MemoryAccess *MA) {
180 assert(MA->isOriginalValueKind());
181 return isEscaping(MA->getStatement()->getParent(),
182 cast<Instruction>(MA->getAccessValue()));
185 /// Add non-removable virtual instructions in @p Stmt to @p RootInsts.
186 static void
187 addInstructionRoots(ScopStmt *Stmt,
188 SmallVectorImpl<VirtualInstruction> &RootInsts) {
189 // For region statements we must keep all instructions because we do not
190 // support removing instructions from region statements.
191 if (!Stmt->isBlockStmt()) {
192 for (auto *BB : Stmt->getRegion()->blocks())
193 for (Instruction &Inst : *BB)
194 RootInsts.emplace_back(Stmt, &Inst);
197 for (Instruction *Inst : Stmt->getInstructions())
198 if (isRoot(Inst))
199 RootInsts.emplace_back(Stmt, Inst);
202 /// Add non-removable memory accesses in @p Stmt to @p RootInsts.
204 /// @param Local If true, all writes are assumed to escape. markAndSweep
205 /// algorithms can use this to be applicable to a single ScopStmt only without
206 /// the risk of removing definitions required by other statements.
207 /// If false, only writes for SCoP-escaping values are roots. This
208 /// is global mode, where such writes must be marked by theirs uses
209 /// in order to be reachable.
210 static void addAccessRoots(ScopStmt *Stmt,
211 SmallVectorImpl<MemoryAccess *> &RootAccs,
212 bool Local) {
213 for (auto *MA : *Stmt) {
214 if (!MA->isWrite())
215 continue;
217 // Writes to arrays are always used.
218 if (MA->isLatestArrayKind())
219 RootAccs.push_back(MA);
221 // Values are roots if they are escaping.
222 else if (MA->isLatestValueKind()) {
223 if (Local || isEscaping(MA))
224 RootAccs.push_back(MA);
227 // Exit phis are, by definition, escaping.
228 else if (MA->isLatestExitPHIKind())
229 RootAccs.push_back(MA);
231 // phi writes are only roots if we are not visiting the statement
232 // containing the PHINode.
233 else if (Local && MA->isLatestPHIKind())
234 RootAccs.push_back(MA);
238 /// Determine all instruction and access roots.
239 static void addRoots(ScopStmt *Stmt,
240 SmallVectorImpl<VirtualInstruction> &RootInsts,
241 SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) {
242 addInstructionRoots(Stmt, RootInsts);
243 addAccessRoots(Stmt, RootAccs, Local);
246 /// Mark accesses and instructions as used if they are reachable from a root,
247 /// walking the operand trees.
249 /// @param S The SCoP to walk.
250 /// @param LI The LoopInfo Analysis.
251 /// @param RootInsts List of root instructions.
252 /// @param RootAccs List of root accesses.
253 /// @param UsesInsts[out] Receives all reachable instructions, including the
254 /// roots.
255 /// @param UsedAccs[out] Receives all reachable accesses, including the roots.
256 /// @param OnlyLocal If non-nullptr, restricts walking to a single
257 /// statement.
258 static void walkReachable(Scop *S, LoopInfo *LI,
259 ArrayRef<VirtualInstruction> RootInsts,
260 ArrayRef<MemoryAccess *> RootAccs,
261 DenseSet<VirtualInstruction> &UsedInsts,
262 DenseSet<MemoryAccess *> &UsedAccs,
263 ScopStmt *OnlyLocal = nullptr) {
264 UsedInsts.clear();
265 UsedAccs.clear();
267 SmallVector<VirtualInstruction, 32> WorklistInsts;
268 SmallVector<MemoryAccess *, 32> WorklistAccs;
270 WorklistInsts.append(RootInsts.begin(), RootInsts.end());
271 WorklistAccs.append(RootAccs.begin(), RootAccs.end());
273 auto AddToWorklist = [&](VirtualUse VUse) {
274 switch (VUse.getKind()) {
275 case VirtualUse::Block:
276 case VirtualUse::Constant:
277 case VirtualUse::Synthesizable:
278 case VirtualUse::Hoisted:
279 break;
280 case VirtualUse::ReadOnly:
281 // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is
282 // enabled.
283 if (!VUse.getMemoryAccess())
284 break;
285 LLVM_FALLTHROUGH;
286 case VirtualUse::Inter:
287 assert(VUse.getMemoryAccess());
288 WorklistAccs.push_back(VUse.getMemoryAccess());
289 break;
290 case VirtualUse::Intra:
291 WorklistInsts.emplace_back(VUse.getUser(),
292 cast<Instruction>(VUse.getValue()));
293 break;
297 while (true) {
298 // We have two worklists to process: Only when the MemoryAccess worklist is
299 // empty, we process the instruction worklist.
301 while (!WorklistAccs.empty()) {
302 auto *Acc = WorklistAccs.pop_back_val();
304 ScopStmt *Stmt = Acc->getStatement();
305 if (OnlyLocal && Stmt != OnlyLocal)
306 continue;
308 auto Inserted = UsedAccs.insert(Acc);
309 if (!Inserted.second)
310 continue;
312 if (Acc->isRead()) {
313 const ScopArrayInfo *SAI = Acc->getScopArrayInfo();
315 if (Acc->isOriginalValueKind()) {
316 MemoryAccess *DefAcc = S->getValueDef(SAI);
318 // Accesses to read-only values do not have a definition.
319 if (DefAcc)
320 WorklistAccs.push_back(S->getValueDef(SAI));
323 if (Acc->isOriginalAnyPHIKind()) {
324 auto IncomingMAs = S->getPHIIncomings(SAI);
325 WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end());
329 if (Acc->isWrite()) {
330 if (Acc->isOriginalValueKind() ||
331 (Acc->isOriginalArrayKind() && Acc->getAccessValue())) {
332 Loop *Scope = Stmt->getSurroundingLoop();
333 VirtualUse VUse =
334 VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true);
335 AddToWorklist(VUse);
338 if (Acc->isOriginalAnyPHIKind()) {
339 for (auto Incoming : Acc->getIncoming()) {
340 VirtualUse VUse = VirtualUse::create(
341 S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true);
342 AddToWorklist(VUse);
346 if (Acc->isOriginalArrayKind())
347 WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction());
351 // If both worklists are empty, stop walking.
352 if (WorklistInsts.empty())
353 break;
355 VirtualInstruction VInst = WorklistInsts.pop_back_val();
356 ScopStmt *Stmt = VInst.getStmt();
357 Instruction *Inst = VInst.getInstruction();
359 // Do not process statements other than the local.
360 if (OnlyLocal && Stmt != OnlyLocal)
361 continue;
363 auto InsertResult = UsedInsts.insert(VInst);
364 if (!InsertResult.second)
365 continue;
367 // Add all operands to the worklists.
368 if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
369 if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI))
370 WorklistAccs.push_back(PHIRead);
371 } else {
372 for (VirtualUse VUse : VInst.operands())
373 AddToWorklist(VUse);
376 // If there is an array access, also add its MemoryAccesses to the worklist.
377 const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst);
378 if (!Accs)
379 continue;
381 for (MemoryAccess *Acc : *Accs)
382 WorklistAccs.push_back(Acc);
386 void polly::markReachable(Scop *S, LoopInfo *LI,
387 DenseSet<VirtualInstruction> &UsedInsts,
388 DenseSet<MemoryAccess *> &UsedAccs,
389 ScopStmt *OnlyLocal) {
390 SmallVector<VirtualInstruction, 32> RootInsts;
391 SmallVector<MemoryAccess *, 32> RootAccs;
393 if (OnlyLocal) {
394 addRoots(OnlyLocal, RootInsts, RootAccs, true);
395 } else {
396 for (auto &Stmt : *S)
397 addRoots(&Stmt, RootInsts, RootAccs, false);
400 walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal);