1 /* Dead store elimination
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
31 #include "gimple-iterator.h"
32 #include "gimple-ssa.h"
34 #include "tree-phinodes.h"
35 #include "ssa-iterators.h"
36 #include "tree-ssanames.h"
38 #include "tree-pass.h"
41 #include "langhooks.h"
42 #include "tree-cfgcleanup.h"
44 /* This file implements dead store elimination.
46 A dead store is a store into a memory location which will later be
47 overwritten by another store without any intervening loads. In this
48 case the earlier store can be deleted.
50 In our SSA + virtual operand world we use immediate uses of virtual
51 operands to detect dead stores. If a store's virtual definition
52 is used precisely once by a later store to the same location which
53 post dominates the first store, then the first store is dead.
55 The single use of the store's virtual definition ensures that
56 there are no intervening aliased loads and the requirement that
57 the second load post dominate the first ensures that if the earlier
58 store executes, then the later stores will execute before the function
61 It may help to think of this as first moving the earlier store to
62 the point immediately before the later store. Again, the single
63 use of the virtual definition and the post-dominance relationship
64 ensure that such movement would be safe. Clearly if there are
65 back to back stores, then the second is redundant.
67 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
68 may also help in understanding this code since it discusses the
69 relationship between dead store and redundant load elimination. In
70 fact, they are the same transformation applied to different views of
74 /* Bitmap of blocks that have had EH statements cleaned. We should
75 remove their dead edges eventually. */
76 static bitmap need_eh_cleanup
;
78 static bool gate_dse (void);
79 static unsigned int tree_ssa_dse (void);
82 /* A helper of dse_optimize_stmt.
83 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
84 may prove STMT to be dead.
85 Return TRUE if the above conditions are met, otherwise FALSE. */
88 dse_possible_dead_store_p (gimple stmt
, gimple
*use_stmt
)
95 /* Self-assignments are zombies. */
96 if (operand_equal_p (gimple_assign_rhs1 (stmt
), gimple_assign_lhs (stmt
), 0))
102 /* Find the first dominated statement that clobbers (part of) the
103 memory stmt stores to with no intermediate statement that may use
104 part of the memory stmt stores. That is, find a store that may
105 prove stmt to be a dead store. */
109 gimple use_stmt
, defvar_def
;
114 /* Limit stmt walking to be linear in the number of possibly
119 if (gimple_code (temp
) == GIMPLE_PHI
)
120 defvar
= PHI_RESULT (temp
);
122 defvar
= gimple_vdef (temp
);
125 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, defvar
)
129 /* If we ever reach our DSE candidate stmt again fail. We
130 cannot handle dead stores in loops. */
131 if (use_stmt
== stmt
)
134 BREAK_FROM_IMM_USE_STMT (ui
);
136 /* In simple cases we can look through PHI nodes, but we
137 have to be careful with loops and with memory references
138 containing operands that are also operands of PHI nodes.
139 See gcc.c-torture/execute/20051110-*.c. */
140 else if (gimple_code (use_stmt
) == GIMPLE_PHI
)
143 /* Make sure we are not in a loop latch block. */
144 || gimple_bb (stmt
) == gimple_bb (use_stmt
)
145 || dominated_by_p (CDI_DOMINATORS
,
146 gimple_bb (stmt
), gimple_bb (use_stmt
))
147 /* We can look through PHIs to regions post-dominating
148 the DSE candidate stmt. */
149 || !dominated_by_p (CDI_POST_DOMINATORS
,
150 gimple_bb (stmt
), gimple_bb (use_stmt
)))
153 BREAK_FROM_IMM_USE_STMT (ui
);
155 /* Do not consider the PHI as use if it dominates the
156 stmt defining the virtual operand we are processing,
157 we have processed it already in this case. */
158 if (gimple_bb (defvar_def
) != gimple_bb (use_stmt
)
159 && !dominated_by_p (CDI_DOMINATORS
,
160 gimple_bb (defvar_def
),
161 gimple_bb (use_stmt
)))
164 /* If the statement is a use the store is not dead. */
165 else if (ref_maybe_used_by_stmt_p (use_stmt
,
166 gimple_assign_lhs (stmt
)))
169 BREAK_FROM_IMM_USE_STMT (ui
);
171 /* If this is a store, remember it or bail out if we have
172 multiple ones (the will be in different CFG parts then). */
173 else if (gimple_vdef (use_stmt
))
178 BREAK_FROM_IMM_USE_STMT (ui
);
187 /* If we didn't find any definition this means the store is dead
188 if it isn't a store to global reachable memory. In this case
189 just pretend the stmt makes itself dead. Otherwise fail. */
192 if (stmt_may_clobber_global_p (stmt
))
199 /* We deliberately stop on clobbering statements and not only on
200 killing ones to make walking cheaper. Otherwise we can just
201 continue walking until both stores have equal reference trees. */
202 while (!stmt_may_clobber_ref_p (temp
, gimple_assign_lhs (stmt
)));
210 /* Attempt to eliminate dead stores in the statement referenced by BSI.
212 A dead store is a store into a memory location which will later be
213 overwritten by another store without any intervening loads. In this
214 case the earlier store can be deleted.
216 In our SSA + virtual operand world we use immediate uses of virtual
217 operands to detect dead stores. If a store's virtual definition
218 is used precisely once by a later store to the same location which
219 post dominates the first store, then the first store is dead. */
222 dse_optimize_stmt (gimple_stmt_iterator
*gsi
)
224 gimple stmt
= gsi_stmt (*gsi
);
226 /* If this statement has no virtual defs, then there is nothing
228 if (!gimple_vdef (stmt
))
231 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
232 that's not also a function call, then record it into our table. */
233 if (is_gimple_call (stmt
) && gimple_call_fndecl (stmt
))
236 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
237 if (gimple_has_volatile_ops (stmt
)
238 && (!gimple_clobber_p (stmt
)
239 || TREE_CODE (gimple_assign_lhs (stmt
)) != MEM_REF
))
242 if (is_gimple_assign (stmt
))
246 if (!dse_possible_dead_store_p (stmt
, &use_stmt
))
249 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
250 another clobber stmt. */
251 if (gimple_clobber_p (stmt
)
252 && !gimple_clobber_p (use_stmt
))
255 /* If we have precisely one immediate use at this point and the
256 stores are to the same memory location or there is a chain of
257 virtual uses from stmt and the stmt which stores to that same
258 memory location, then we may have found redundant store. */
259 if ((gimple_has_lhs (use_stmt
)
260 && (operand_equal_p (gimple_assign_lhs (stmt
),
261 gimple_get_lhs (use_stmt
), 0)))
262 || stmt_kills_ref_p (use_stmt
, gimple_assign_lhs (stmt
)))
266 /* If use_stmt is or might be a nop assignment, e.g. for
267 struct { ... } S a, b, *p; ...
270 b = a; b = *p; where p might be &b,
272 *p = a; *p = b; where p might be &b,
274 *p = *u; *p = *v; where p might be v, then USE_STMT
275 acts as a use as well as definition, so store in STMT
278 && ref_maybe_used_by_stmt_p (use_stmt
, gimple_assign_lhs (stmt
)))
281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
283 fprintf (dump_file
, " Deleted dead store '");
284 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), dump_flags
, 0);
285 fprintf (dump_file
, "'\n");
288 /* Then we need to fix the operand of the consuming stmt. */
289 unlink_stmt_vdef (stmt
);
291 /* Remove the dead store. */
292 bb
= gimple_bb (stmt
);
293 if (gsi_remove (gsi
, true))
294 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
296 /* And release any SSA_NAMEs set in this statement back to the
303 class dse_dom_walker
: public dom_walker
306 dse_dom_walker (cdi_direction direction
) : dom_walker (direction
) {}
308 virtual void before_dom_children (basic_block
);
312 dse_dom_walker::before_dom_children (basic_block bb
)
314 gimple_stmt_iterator gsi
;
316 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
318 dse_optimize_stmt (&gsi
);
320 gsi
= gsi_last_bb (bb
);
326 /* Main entry point. */
331 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
333 renumber_gimple_stmt_uids ();
335 /* We might consider making this a property of each pass so that it
336 can be [re]computed on an as-needed basis. Particularly since
337 this pass could be seen as an extension of DCE which needs post
339 calculate_dominance_info (CDI_POST_DOMINATORS
);
340 calculate_dominance_info (CDI_DOMINATORS
);
342 /* Dead store elimination is fundamentally a walk of the post-dominator
343 tree and a backwards walk of statements within each block. */
344 dse_dom_walker (CDI_POST_DOMINATORS
).walk (cfun
->cfg
->x_exit_block_ptr
);
346 /* Removal of stores may make some EH edges dead. Purge such edges from
347 the CFG as needed. */
348 if (!bitmap_empty_p (need_eh_cleanup
))
350 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
354 BITMAP_FREE (need_eh_cleanup
);
356 /* For now, just wipe the post-dominator information. */
357 free_dominance_info (CDI_POST_DOMINATORS
);
364 return flag_tree_dse
!= 0;
369 const pass_data pass_data_dse
=
371 GIMPLE_PASS
, /* type */
373 OPTGROUP_NONE
, /* optinfo_flags */
375 true, /* has_execute */
376 TV_TREE_DSE
, /* tv_id */
377 ( PROP_cfg
| PROP_ssa
), /* properties_required */
378 0, /* properties_provided */
379 0, /* properties_destroyed */
380 0, /* todo_flags_start */
381 TODO_verify_ssa
, /* todo_flags_finish */
384 class pass_dse
: public gimple_opt_pass
387 pass_dse (gcc::context
*ctxt
)
388 : gimple_opt_pass (pass_data_dse
, ctxt
)
391 /* opt_pass methods: */
392 opt_pass
* clone () { return new pass_dse (m_ctxt
); }
393 bool gate () { return gate_dse (); }
394 unsigned int execute () { return tree_ssa_dse (); }
401 make_pass_dse (gcc::context
*ctxt
)
403 return new pass_dse (ctxt
);