Add PR marker to PR id in ChangeLog.
[official-gcc.git] / gcc / tree-ssa-dse.c
blob65787582a28394b1d4a6fb7e86240961ee7fd6aa
1 /* Dead store elimination
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "domwalk.h"
32 #include "flags.h"
33 #include "langhooks.h"
35 /* This file implements dead store elimination.
37 A dead store is a store into a memory location which will later be
38 overwritten by another store without any intervening loads. In this
39 case the earlier store can be deleted.
41 In our SSA + virtual operand world we use immediate uses of virtual
42 operands to detect dead stores. If a store's virtual definition
43 is used precisely once by a later store to the same location which
44 post dominates the first store, then the first store is dead.
46 The single use of the store's virtual definition ensures that
47 there are no intervening aliased loads and the requirement that
48 the second load post dominate the first ensures that if the earlier
49 store executes, then the later stores will execute before the function
50 exits.
52 It may help to think of this as first moving the earlier store to
53 the point immediately before the later store. Again, the single
54 use of the virtual definition and the post-dominance relationship
55 ensure that such movement would be safe. Clearly if there are
56 back to back stores, then the second is redundant.
58 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
59 may also help in understanding this code since it discusses the
60 relationship between dead store and redundant load elimination. In
61 fact, they are the same transformation applied to different views of
62 the CFG. */
65 /* Bitmap of blocks that have had EH statements cleaned. We should
66 remove their dead edges eventually. */
67 static bitmap need_eh_cleanup;
69 static bool gate_dse (void);
70 static unsigned int tree_ssa_dse (void);
71 static void dse_enter_block (struct dom_walk_data *, basic_block);
74 /* A helper of dse_optimize_stmt.
75 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
76 may prove STMT to be dead.
77 Return TRUE if the above conditions are met, otherwise FALSE. */
79 static bool
80 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
82 gimple temp;
83 unsigned cnt = 0;
85 *use_stmt = NULL;
87 /* Self-assignments are zombies. */
88 if (operand_equal_p (gimple_assign_rhs1 (stmt), gimple_assign_lhs (stmt), 0))
90 *use_stmt = stmt;
91 return true;
94 /* Find the first dominated statement that clobbers (part of) the
95 memory stmt stores to with no intermediate statement that may use
96 part of the memory stmt stores. That is, find a store that may
97 prove stmt to be a dead store. */
98 temp = stmt;
101 gimple use_stmt, defvar_def;
102 imm_use_iterator ui;
103 bool fail = false;
104 tree defvar;
106 /* Limit stmt walking to be linear in the number of possibly
107 dead stores. */
108 if (++cnt > 256)
109 return false;
111 if (gimple_code (temp) == GIMPLE_PHI)
112 defvar = PHI_RESULT (temp);
113 else
114 defvar = gimple_vdef (temp);
115 defvar_def = temp;
116 temp = NULL;
117 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
119 cnt++;
121 /* If we ever reach our DSE candidate stmt again fail. We
122 cannot handle dead stores in loops. */
123 if (use_stmt == stmt)
125 fail = true;
126 BREAK_FROM_IMM_USE_STMT (ui);
128 /* In simple cases we can look through PHI nodes, but we
129 have to be careful with loops and with memory references
130 containing operands that are also operands of PHI nodes.
131 See gcc.c-torture/execute/20051110-*.c. */
132 else if (gimple_code (use_stmt) == GIMPLE_PHI)
134 if (temp
135 /* Make sure we are not in a loop latch block. */
136 || gimple_bb (stmt) == gimple_bb (use_stmt)
137 || dominated_by_p (CDI_DOMINATORS,
138 gimple_bb (stmt), gimple_bb (use_stmt))
139 /* We can look through PHIs to regions post-dominating
140 the DSE candidate stmt. */
141 || !dominated_by_p (CDI_POST_DOMINATORS,
142 gimple_bb (stmt), gimple_bb (use_stmt)))
144 fail = true;
145 BREAK_FROM_IMM_USE_STMT (ui);
147 /* Do not consider the PHI as use if it dominates the
148 stmt defining the virtual operand we are processing,
149 we have processed it already in this case. */
150 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
151 && !dominated_by_p (CDI_DOMINATORS,
152 gimple_bb (defvar_def),
153 gimple_bb (use_stmt)))
154 temp = use_stmt;
156 /* If the statement is a use the store is not dead. */
157 else if (ref_maybe_used_by_stmt_p (use_stmt,
158 gimple_assign_lhs (stmt)))
160 fail = true;
161 BREAK_FROM_IMM_USE_STMT (ui);
163 /* If this is a store, remember it or bail out if we have
164 multiple ones (the will be in different CFG parts then). */
165 else if (gimple_vdef (use_stmt))
167 if (temp)
169 fail = true;
170 BREAK_FROM_IMM_USE_STMT (ui);
172 temp = use_stmt;
176 if (fail)
177 return false;
179 /* If we didn't find any definition this means the store is dead
180 if it isn't a store to global reachable memory. In this case
181 just pretend the stmt makes itself dead. Otherwise fail. */
182 if (!temp)
184 if (stmt_may_clobber_global_p (stmt))
185 return false;
187 temp = stmt;
188 break;
191 /* We deliberately stop on clobbering statements and not only on
192 killing ones to make walking cheaper. Otherwise we can just
193 continue walking until both stores have equal reference trees. */
194 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
196 *use_stmt = temp;
198 return true;
202 /* Attempt to eliminate dead stores in the statement referenced by BSI.
204 A dead store is a store into a memory location which will later be
205 overwritten by another store without any intervening loads. In this
206 case the earlier store can be deleted.
208 In our SSA + virtual operand world we use immediate uses of virtual
209 operands to detect dead stores. If a store's virtual definition
210 is used precisely once by a later store to the same location which
211 post dominates the first store, then the first store is dead. */
213 static void
214 dse_optimize_stmt (gimple_stmt_iterator *gsi)
216 gimple stmt = gsi_stmt (*gsi);
218 /* If this statement has no virtual defs, then there is nothing
219 to do. */
220 if (!gimple_vdef (stmt))
221 return;
223 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
224 that's not also a function call, then record it into our table. */
225 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
226 return;
228 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
229 if (gimple_has_volatile_ops (stmt)
230 && (!gimple_clobber_p (stmt)
231 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
232 return;
234 if (is_gimple_assign (stmt))
236 gimple use_stmt;
238 if (!dse_possible_dead_store_p (stmt, &use_stmt))
239 return;
241 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
242 another clobber stmt. */
243 if (gimple_clobber_p (stmt)
244 && !gimple_clobber_p (use_stmt))
245 return;
247 /* If we have precisely one immediate use at this point and the
248 stores are to the same memory location or there is a chain of
249 virtual uses from stmt and the stmt which stores to that same
250 memory location, then we may have found redundant store. */
251 if ((gimple_has_lhs (use_stmt)
252 && (operand_equal_p (gimple_assign_lhs (stmt),
253 gimple_get_lhs (use_stmt), 0)))
254 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
256 basic_block bb;
258 /* If use_stmt is or might be a nop assignment, e.g. for
259 struct { ... } S a, b, *p; ...
260 b = a; b = b;
262 b = a; b = *p; where p might be &b,
264 *p = a; *p = b; where p might be &b,
266 *p = *u; *p = *v; where p might be v, then USE_STMT
267 acts as a use as well as definition, so store in STMT
268 is not dead. */
269 if (stmt != use_stmt
270 && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
271 return;
273 if (dump_file && (dump_flags & TDF_DETAILS))
275 fprintf (dump_file, " Deleted dead store '");
276 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
277 fprintf (dump_file, "'\n");
280 /* Then we need to fix the operand of the consuming stmt. */
281 unlink_stmt_vdef (stmt);
283 /* Remove the dead store. */
284 bb = gimple_bb (stmt);
285 if (gsi_remove (gsi, true))
286 bitmap_set_bit (need_eh_cleanup, bb->index);
288 /* And release any SSA_NAMEs set in this statement back to the
289 SSA_NAME manager. */
290 release_defs (stmt);
295 static void
296 dse_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
297 basic_block bb)
299 gimple_stmt_iterator gsi;
301 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
303 dse_optimize_stmt (&gsi);
304 if (gsi_end_p (gsi))
305 gsi = gsi_last_bb (bb);
306 else
307 gsi_prev (&gsi);
311 /* Main entry point. */
313 static unsigned int
314 tree_ssa_dse (void)
316 struct dom_walk_data walk_data;
318 need_eh_cleanup = BITMAP_ALLOC (NULL);
320 renumber_gimple_stmt_uids ();
322 /* We might consider making this a property of each pass so that it
323 can be [re]computed on an as-needed basis. Particularly since
324 this pass could be seen as an extension of DCE which needs post
325 dominators. */
326 calculate_dominance_info (CDI_POST_DOMINATORS);
327 calculate_dominance_info (CDI_DOMINATORS);
329 /* Dead store elimination is fundamentally a walk of the post-dominator
330 tree and a backwards walk of statements within each block. */
331 walk_data.dom_direction = CDI_POST_DOMINATORS;
332 walk_data.initialize_block_local_data = NULL;
333 walk_data.before_dom_children = dse_enter_block;
334 walk_data.after_dom_children = NULL;
336 walk_data.block_local_data_size = 0;
337 walk_data.global_data = NULL;
339 /* Initialize the dominator walker. */
340 init_walk_dominator_tree (&walk_data);
342 /* Recursively walk the dominator tree. */
343 walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
345 /* Finalize the dominator walker. */
346 fini_walk_dominator_tree (&walk_data);
348 /* Removal of stores may make some EH edges dead. Purge such edges from
349 the CFG as needed. */
350 if (!bitmap_empty_p (need_eh_cleanup))
352 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
353 cleanup_tree_cfg ();
356 BITMAP_FREE (need_eh_cleanup);
358 /* For now, just wipe the post-dominator information. */
359 free_dominance_info (CDI_POST_DOMINATORS);
360 return 0;
363 static bool
364 gate_dse (void)
366 return flag_tree_dse != 0;
369 namespace {
371 const pass_data pass_data_dse =
373 GIMPLE_PASS, /* type */
374 "dse", /* name */
375 OPTGROUP_NONE, /* optinfo_flags */
376 true, /* has_gate */
377 true, /* has_execute */
378 TV_TREE_DSE, /* tv_id */
379 ( PROP_cfg | PROP_ssa ), /* properties_required */
380 0, /* properties_provided */
381 0, /* properties_destroyed */
382 0, /* todo_flags_start */
383 TODO_verify_ssa, /* todo_flags_finish */
386 class pass_dse : public gimple_opt_pass
388 public:
389 pass_dse(gcc::context *ctxt)
390 : gimple_opt_pass(pass_data_dse, ctxt)
393 /* opt_pass methods: */
394 opt_pass * clone () { return new pass_dse (ctxt_); }
395 bool gate () { return gate_dse (); }
396 unsigned int execute () { return tree_ssa_dse (); }
398 }; // class pass_dse
400 } // anon namespace
402 gimple_opt_pass *
403 make_pass_dse (gcc::context *ctxt)
405 return new pass_dse (ctxt);