PR c++/18747
[official-gcc.git] / gcc / tree-ssa-dse.c
blobf9a0b85fc12190760c7fe4410b842a78f374c235
1 /* Dead store elimination
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "domwalk.h"
33 #include "flags.h"
34 #include "langhooks.h"
36 /* This file implements dead store elimination.
38 A dead store is a store into a memory location which will later be
39 overwritten by another store without any intervening loads. In this
40 case the earlier store can be deleted.
42 In our SSA + virtual operand world we use immediate uses of virtual
43 operands to detect dead stores. If a store's virtual definition
44 is used precisely once by a later store to the same location which
45 post dominates the first store, then the first store is dead.
47 The single use of the store's virtual definition ensures that
48 there are no intervening aliased loads and the requirement that
49 the second load post dominate the first ensures that if the earlier
50 store executes, then the later stores will execute before the function
51 exits.
53 It may help to think of this as first moving the earlier store to
54 the point immediately before the later store. Again, the single
55 use of the virtual definition and the post-dominance relationship
56 ensure that such movement would be safe. Clearly if there are
57 back to back stores, then the second is redundant.
59 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
60 may also help in understanding this code since it discusses the
61 relationship between dead store and redundant load elimination. In
62 fact, they are the same transformation applied to different views of
63 the CFG. */
66 /* Bitmap of blocks that have had EH statements cleaned. We should
67 remove their dead edges eventually. */
68 static bitmap need_eh_cleanup;
70 static bool gate_dse (void);
71 static unsigned int tree_ssa_dse (void);
72 static void dse_enter_block (struct dom_walk_data *, basic_block);
75 /* A helper of dse_optimize_stmt.
76 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
77 may prove STMT to be dead.
78 Return TRUE if the above conditions are met, otherwise FALSE. */
80 static bool
81 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
83 gimple temp;
84 unsigned cnt = 0;
86 *use_stmt = NULL;
88 /* Find the first dominated statement that clobbers (part of) the
89 memory stmt stores to with no intermediate statement that may use
90 part of the memory stmt stores. That is, find a store that may
91 prove stmt to be a dead store. */
92 temp = stmt;
95 gimple use_stmt, defvar_def;
96 imm_use_iterator ui;
97 bool fail = false;
98 tree defvar;
100 /* Limit stmt walking to be linear in the number of possibly
101 dead stores. */
102 if (++cnt > 256)
103 return false;
105 if (gimple_code (temp) == GIMPLE_PHI)
106 defvar = PHI_RESULT (temp);
107 else
108 defvar = gimple_vdef (temp);
109 defvar_def = temp;
110 temp = NULL;
111 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
113 cnt++;
115 /* If we ever reach our DSE candidate stmt again fail. We
116 cannot handle dead stores in loops. */
117 if (use_stmt == stmt)
119 fail = true;
120 BREAK_FROM_IMM_USE_STMT (ui);
122 /* In simple cases we can look through PHI nodes, but we
123 have to be careful with loops and with memory references
124 containing operands that are also operands of PHI nodes.
125 See gcc.c-torture/execute/20051110-*.c. */
126 else if (gimple_code (use_stmt) == GIMPLE_PHI)
128 if (temp
129 /* Make sure we are not in a loop latch block. */
130 || gimple_bb (stmt) == gimple_bb (use_stmt)
131 || dominated_by_p (CDI_DOMINATORS,
132 gimple_bb (stmt), gimple_bb (use_stmt))
133 /* We can look through PHIs to regions post-dominating
134 the DSE candidate stmt. */
135 || !dominated_by_p (CDI_POST_DOMINATORS,
136 gimple_bb (stmt), gimple_bb (use_stmt)))
138 fail = true;
139 BREAK_FROM_IMM_USE_STMT (ui);
141 /* Do not consider the PHI as use if it dominates the
142 stmt defining the virtual operand we are processing,
143 we have processed it already in this case. */
144 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
145 && !dominated_by_p (CDI_DOMINATORS,
146 gimple_bb (defvar_def),
147 gimple_bb (use_stmt)))
148 temp = use_stmt;
150 /* If the statement is a use the store is not dead. */
151 else if (ref_maybe_used_by_stmt_p (use_stmt,
152 gimple_assign_lhs (stmt)))
154 fail = true;
155 BREAK_FROM_IMM_USE_STMT (ui);
157 /* If this is a store, remember it or bail out if we have
158 multiple ones (the will be in different CFG parts then). */
159 else if (gimple_vdef (use_stmt))
161 if (temp)
163 fail = true;
164 BREAK_FROM_IMM_USE_STMT (ui);
166 temp = use_stmt;
170 if (fail)
171 return false;
173 /* If we didn't find any definition this means the store is dead
174 if it isn't a store to global reachable memory. In this case
175 just pretend the stmt makes itself dead. Otherwise fail. */
176 if (!temp)
178 if (stmt_may_clobber_global_p (stmt))
179 return false;
181 temp = stmt;
182 break;
185 /* We deliberately stop on clobbering statements and not only on
186 killing ones to make walking cheaper. Otherwise we can just
187 continue walking until both stores have equal reference trees. */
188 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
190 *use_stmt = temp;
192 return true;
196 /* Attempt to eliminate dead stores in the statement referenced by BSI.
198 A dead store is a store into a memory location which will later be
199 overwritten by another store without any intervening loads. In this
200 case the earlier store can be deleted.
202 In our SSA + virtual operand world we use immediate uses of virtual
203 operands to detect dead stores. If a store's virtual definition
204 is used precisely once by a later store to the same location which
205 post dominates the first store, then the first store is dead. */
207 static void
208 dse_optimize_stmt (gimple_stmt_iterator *gsi)
210 gimple stmt = gsi_stmt (*gsi);
212 /* If this statement has no virtual defs, then there is nothing
213 to do. */
214 if (!gimple_vdef (stmt))
215 return;
217 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
218 that's not also a function call, then record it into our table. */
219 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
220 return;
222 if (gimple_has_volatile_ops (stmt))
223 return;
225 if (is_gimple_assign (stmt))
227 gimple use_stmt;
229 if (!dse_possible_dead_store_p (stmt, &use_stmt))
230 return;
232 /* If we have precisely one immediate use at this point and the
233 stores are to the same memory location or there is a chain of
234 virtual uses from stmt and the stmt which stores to that same
235 memory location, then we may have found redundant store. */
236 if ((gimple_has_lhs (use_stmt)
237 && (operand_equal_p (gimple_assign_lhs (stmt),
238 gimple_get_lhs (use_stmt), 0)))
239 || stmt_kills_ref_p (use_stmt, gimple_assign_lhs (stmt)))
241 basic_block bb;
243 /* If use_stmt is or might be a nop assignment, e.g. for
244 struct { ... } S a, b, *p; ...
245 b = a; b = b;
247 b = a; b = *p; where p might be &b,
249 *p = a; *p = b; where p might be &b,
251 *p = *u; *p = *v; where p might be v, then USE_STMT
252 acts as a use as well as definition, so store in STMT
253 is not dead. */
254 if (stmt != use_stmt
255 && ref_maybe_used_by_stmt_p (use_stmt, gimple_assign_lhs (stmt)))
256 return;
258 if (dump_file && (dump_flags & TDF_DETAILS))
260 fprintf (dump_file, " Deleted dead store '");
261 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
262 fprintf (dump_file, "'\n");
265 /* Then we need to fix the operand of the consuming stmt. */
266 unlink_stmt_vdef (stmt);
268 /* Remove the dead store. */
269 bb = gimple_bb (stmt);
270 if (gsi_remove (gsi, true))
271 bitmap_set_bit (need_eh_cleanup, bb->index);
273 /* And release any SSA_NAMEs set in this statement back to the
274 SSA_NAME manager. */
275 release_defs (stmt);
280 static void
281 dse_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
282 basic_block bb)
284 gimple_stmt_iterator gsi;
286 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
288 dse_optimize_stmt (&gsi);
289 if (gsi_end_p (gsi))
290 gsi = gsi_last_bb (bb);
291 else
292 gsi_prev (&gsi);
296 /* Main entry point. */
298 static unsigned int
299 tree_ssa_dse (void)
301 struct dom_walk_data walk_data;
303 need_eh_cleanup = BITMAP_ALLOC (NULL);
305 renumber_gimple_stmt_uids ();
307 /* We might consider making this a property of each pass so that it
308 can be [re]computed on an as-needed basis. Particularly since
309 this pass could be seen as an extension of DCE which needs post
310 dominators. */
311 calculate_dominance_info (CDI_POST_DOMINATORS);
312 calculate_dominance_info (CDI_DOMINATORS);
314 /* Dead store elimination is fundamentally a walk of the post-dominator
315 tree and a backwards walk of statements within each block. */
316 walk_data.dom_direction = CDI_POST_DOMINATORS;
317 walk_data.initialize_block_local_data = NULL;
318 walk_data.before_dom_children = dse_enter_block;
319 walk_data.after_dom_children = NULL;
321 walk_data.block_local_data_size = 0;
322 walk_data.global_data = NULL;
324 /* Initialize the dominator walker. */
325 init_walk_dominator_tree (&walk_data);
327 /* Recursively walk the dominator tree. */
328 walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
330 /* Finalize the dominator walker. */
331 fini_walk_dominator_tree (&walk_data);
333 /* Removal of stores may make some EH edges dead. Purge such edges from
334 the CFG as needed. */
335 if (!bitmap_empty_p (need_eh_cleanup))
337 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
338 cleanup_tree_cfg ();
341 BITMAP_FREE (need_eh_cleanup);
343 /* For now, just wipe the post-dominator information. */
344 free_dominance_info (CDI_POST_DOMINATORS);
345 return 0;
348 static bool
349 gate_dse (void)
351 return flag_tree_dse != 0;
354 struct gimple_opt_pass pass_dse =
357 GIMPLE_PASS,
358 "dse", /* name */
359 gate_dse, /* gate */
360 tree_ssa_dse, /* execute */
361 NULL, /* sub */
362 NULL, /* next */
363 0, /* static_pass_number */
364 TV_TREE_DSE, /* tv_id */
365 PROP_cfg | PROP_ssa, /* properties_required */
366 0, /* properties_provided */
367 0, /* properties_destroyed */
368 0, /* todo_flags_start */
369 TODO_ggc_collect
370 | TODO_verify_ssa /* todo_flags_finish */