* df-scan.c (df_collection_rec): Adjust.
[official-gcc.git] / gcc / tree-ssa-dse.c
blob202eb3e673b198349d49503b8888a32ceb47d09d
1 /* Dead store elimination
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "gimple.h"
31 #include "gimple-ssa.h"
32 #include "tree-cfg.h"
33 #include "tree-phinodes.h"
34 #include "ssa-iterators.h"
35 #include "tree-ssanames.h"
36 #include "tree-dfa.h"
37 #include "tree-pass.h"
38 #include "domwalk.h"
39 #include "flags.h"
40 #include "langhooks.h"
41 #include "tree-cfgcleanup.h"
43 /* This file implements dead store elimination.
45 A dead store is a store into a memory location which will later be
46 overwritten by another store without any intervening loads. In this
47 case the earlier store can be deleted.
49 In our SSA + virtual operand world we use immediate uses of virtual
50 operands to detect dead stores. If a store's virtual definition
51 is used precisely once by a later store to the same location which
52 post dominates the first store, then the first store is dead.
54 The single use of the store's virtual definition ensures that
55 there are no intervening aliased loads and the requirement that
56 the second load post dominate the first ensures that if the earlier
57 store executes, then the later stores will execute before the function
58 exits.
60 It may help to think of this as first moving the earlier store to
61 the point immediately before the later store. Again, the single
62 use of the virtual definition and the post-dominance relationship
63 ensure that such movement would be safe. Clearly if there are
64 back to back stores, then the second is redundant.
66 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
67 may also help in understanding this code since it discusses the
68 relationship between dead store and redundant load elimination. In
69 fact, they are the same transformation applied to different views of
70 the CFG. */
73 /* Bitmap of blocks that have had EH statements cleaned. We should
74 remove their dead edges eventually. */
75 static bitmap need_eh_cleanup;
77 static bool gate_dse (void);
78 static unsigned int tree_ssa_dse (void);
81 /* A helper of dse_optimize_stmt.
82 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
83 may prove STMT to be dead.
84 Return TRUE if the above conditions are met, otherwise FALSE. */
86 static bool
87 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
89 gimple temp;
90 unsigned cnt = 0;
92 *use_stmt = NULL;
94 /* Self-assignments are zombies. */
95 if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
97 *use_stmt = stmt;
98 return true;
101 /* Find the first dominated statement that clobbers (part of) the
102 memory stmt stores to with no intermediate statement that may use
103 part of the memory stmt stores. That is, find a store that may
104 prove stmt to be a dead store. */
105 temp = stmt;
108 gimple use_stmt, defvar_def;
109 imm_use_iterator ui;
110 bool fail = false;
111 tree defvar;
113 /* Limit stmt walking to be linear in the number of possibly
114 dead stores. */
115 if (++cnt > 256)
116 return false;
118 if (gimple_code (temp) == GIMPLE_PHI)
119 defvar = PHI_RESULT (temp);
120 else
121 defvar = gimple_vdef (temp);
122 defvar_def = temp;
123 temp = NULL;
124 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
126 cnt++;
128 /* If we ever reach our DSE candidate stmt again fail. We
129 cannot handle dead stores in loops. */
130 if (use_stmt == stmt)
132 fail = true;
133 BREAK_FROM_IMM_USE_STMT (ui);
135 /* In simple cases we can look through PHI nodes, but we
136 have to be careful with loops and with memory references
137 containing operands that are also operands of PHI nodes.
138 See gcc.c-torture/execute/20051110-*.c. */
139 else if (gimple_code (use_stmt) == GIMPLE_PHI)
141 if (temp
142 /* Make sure we are not in a loop latch block. */
143 || gimple_bb (stmt) == gimple_bb (use_stmt)
144 || dominated_by_p (CDI_DOMINATORS,
145 gimple_bb (stmt), gimple_bb (use_stmt))
146 /* We can look through PHIs to regions post-dominating
147 the DSE candidate stmt. */
148 || !dominated_by_p (CDI_POST_DOMINATORS,
149 gimple_bb (stmt), gimple_bb (use_stmt)))
151 fail = true;
152 BREAK_FROM_IMM_USE_STMT (ui);
154 /* Do not consider the PHI as use if it dominates the
155 stmt defining the virtual operand we are processing,
156 we have processed it already in this case. */
157 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
158 && !dominated_by_p (CDI_DOMINATORS,
159 gimple_bb (defvar_def),
160 gimple_bb (use_stmt)))
161 temp = use_stmt;
163 /* If the statement is a use the store is not dead. */
164 else if (ref_maybe_used_by_stmt_p (use_stmt,
165 gimple_assign_lhs (stmt)))
167 fail = true;
168 BREAK_FROM_IMM_USE_STMT (ui);
170 /* If this is a store, remember it or bail out if we have
171 multiple ones (the will be in different CFG parts then). */
172 else if (gimple_vdef (use_stmt))
174 if (temp)
176 fail = true;
177 BREAK_FROM_IMM_USE_STMT (ui);
179 temp = use_stmt;
183 if (fail)
184 return false;
186 /* If we didn't find any definition this means the store is dead
187 if it isn't a store to global reachable memory. In this case
188 just pretend the stmt makes itself dead. Otherwise fail. */
189 if (!temp)
191 if (stmt_may_clobber_global_p (stmt))
192 return false;
194 temp = stmt;
195 break;
198 /* We deliberately stop on clobbering statements and not only on
199 killing ones to make walking cheaper. Otherwise we can just
200 continue walking until both stores have equal reference trees. */
201 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
203 *use_stmt = temp;
205 return true;
209 /* Attempt to eliminate dead stores in the statement referenced by BSI.
211 A dead store is a store into a memory location which will later be
212 overwritten by another store without any intervening loads. In this
213 case the earlier store can be deleted.
215 In our SSA + virtual operand world we use immediate uses of virtual
216 operands to detect dead stores. If a store's virtual definition
217 is used precisely once by a later store to the same location which
218 post dominates the first store, then the first store is dead. */
220 static void
221 dse_optimize_stmt (gimple_stmt_iterator *gsi)
223 gimple stmt = gsi_stmt (*gsi);
225 /* If this statement has no virtual defs, then there is nothing
226 to do. */
227 if (!gimple_vdef (stmt))
228 return;
230 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
231 that's not also a function call, then record it into our table. */
232 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
233 return;
235 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
236 if (gimple_has_volatile_ops (stmt)
237 && (!gimple_clobber_p (stmt)
238 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
239 return;
241 if (is_gimple_assign (stmt))
243 gimple use_stmt;
245 if (!dse_possible_dead_store_p (stmt, &use_stmt))
246 return;
248 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
249 another clobber stmt. */
250 if (gimple_clobber_p (stmt)
251 && !gimple_clobber_p (use_stmt))
252 return;
254 /* If we have precisely one immediate use at this point and the
255 stores are to the same memory location or there is a chain of
256 virtual uses from stmt and the stmt which stores to that same
257 memory location, then we may have found redundant store. */
258 if ((gimple_has_lhs (use_stmt)
259 && (operand_equal_p (gimple_assign_lhs (stmt),
260 gimple_get_lhs (use_stmt), 0)))
261 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
263 basic_block bb;
265 /* If use_stmt is or might be a nop assignment, e.g. for
266 struct { ... } S a, b, *p; ...
267 b = a; b = b;
269 b = a; b = *p; where p might be &b,
271 *p = a; *p = b; where p might be &b,
273 *p = *u; *p = *v; where p might be v, then USE_STMT
274 acts as a use as well as definition, so store in STMT
275 is not dead. */
276 if (stmt != use_stmt
277 && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
278 return;
280 if (dump_file && (dump_flags & TDF_DETAILS))
282 fprintf (dump_file, " Deleted dead store '");
283 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
284 fprintf (dump_file, "'\n");
287 /* Then we need to fix the operand of the consuming stmt. */
288 unlink_stmt_vdef (stmt);
290 /* Remove the dead store. */
291 bb = gimple_bb (stmt);
292 if (gsi_remove (gsi, true))
293 bitmap_set_bit (need_eh_cleanup, bb->index);
295 /* And release any SSA_NAMEs set in this statement back to the
296 SSA_NAME manager. */
297 release_defs (stmt);
302 class dse_dom_walker : public dom_walker
304 public:
305 dse_dom_walker (cdi_direction direction) : dom_walker (direction) {}
307 virtual void before_dom_children (basic_block);
310 void
311 dse_dom_walker::before_dom_children (basic_block bb)
313 gimple_stmt_iterator gsi;
315 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
317 dse_optimize_stmt (&gsi);
318 if (gsi_end_p (gsi))
319 gsi = gsi_last_bb (bb);
320 else
321 gsi_prev (&gsi);
325 /* Main entry point. */
327 static unsigned int
328 tree_ssa_dse (void)
330 need_eh_cleanup = BITMAP_ALLOC (NULL);
332 renumber_gimple_stmt_uids ();
334 /* We might consider making this a property of each pass so that it
335 can be [re]computed on an as-needed basis. Particularly since
336 this pass could be seen as an extension of DCE which needs post
337 dominators. */
338 calculate_dominance_info (CDI_POST_DOMINATORS);
339 calculate_dominance_info (CDI_DOMINATORS);
341 /* Dead store elimination is fundamentally a walk of the post-dominator
342 tree and a backwards walk of statements within each block. */
343 dse_dom_walker (CDI_POST_DOMINATORS).walk (cfun->cfg->x_exit_block_ptr);
345 /* Removal of stores may make some EH edges dead. Purge such edges from
346 the CFG as needed. */
347 if (!bitmap_empty_p (need_eh_cleanup))
349 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
350 cleanup_tree_cfg ();
353 BITMAP_FREE (need_eh_cleanup);
355 /* For now, just wipe the post-dominator information. */
356 free_dominance_info (CDI_POST_DOMINATORS);
357 return 0;
360 static bool
361 gate_dse (void)
363 return flag_tree_dse != 0;
366 namespace {
368 const pass_data pass_data_dse =
370 GIMPLE_PASS, /* type */
371 "dse", /* name */
372 OPTGROUP_NONE, /* optinfo_flags */
373 true, /* has_gate */
374 true, /* has_execute */
375 TV_TREE_DSE, /* tv_id */
376 ( PROP_cfg | PROP_ssa ), /* properties_required */
377 0, /* properties_provided */
378 0, /* properties_destroyed */
379 0, /* todo_flags_start */
380 TODO_verify_ssa, /* todo_flags_finish */
383 class pass_dse : public gimple_opt_pass
385 public:
386 pass_dse (gcc::context *ctxt)
387 : gimple_opt_pass (pass_data_dse, ctxt)
390 /* opt_pass methods: */
391 opt_pass * clone () { return new pass_dse (m_ctxt); }
392 bool gate () { return gate_dse (); }
393 unsigned int execute () { return tree_ssa_dse (); }
395 }; // class pass_dse
397 } // anon namespace
399 gimple_opt_pass *
400 make_pass_dse (gcc::context *ctxt)
402 return new pass_dse (ctxt);