pa64-hpux.h (FINI_SECTION_ASM_OP): Define to null string.
[official-gcc.git] / gcc / tree-ssa-dse.c
blob372a0be2ffa1310dd488bfcee61ac55724450813
1 /* Dead store elimination
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "domwalk.h"
35 #include "tree-cfgcleanup.h"
37 /* This file implements dead store elimination.
39 A dead store is a store into a memory location which will later be
40 overwritten by another store without any intervening loads. In this
41 case the earlier store can be deleted.
43 In our SSA + virtual operand world we use immediate uses of virtual
44 operands to detect dead stores. If a store's virtual definition
45 is used precisely once by a later store to the same location which
46 post dominates the first store, then the first store is dead.
48 The single use of the store's virtual definition ensures that
49 there are no intervening aliased loads and the requirement that
50 the second load post dominate the first ensures that if the earlier
51 store executes, then the later stores will execute before the function
52 exits.
54 It may help to think of this as first moving the earlier store to
55 the point immediately before the later store. Again, the single
56 use of the virtual definition and the post-dominance relationship
57 ensure that such movement would be safe. Clearly if there are
58 back to back stores, then the second is redundant.
60 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
61 may also help in understanding this code since it discusses the
62 relationship between dead store and redundant load elimination. In
63 fact, they are the same transformation applied to different views of
64 the CFG. */
67 /* Bitmap of blocks that have had EH statements cleaned. We should
68 remove their dead edges eventually. */
69 static bitmap need_eh_cleanup;
72 /* A helper of dse_optimize_stmt.
73 Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate
74 statement *USE_STMT that may prove STMT to be dead.
75 Return TRUE if the above conditions are met, otherwise FALSE. */
77 static bool
78 dse_possible_dead_store_p (ao_ref *ref, gimple *stmt, gimple **use_stmt)
80 gimple *temp;
81 unsigned cnt = 0;
83 *use_stmt = NULL;
85 /* Find the first dominated statement that clobbers (part of) the
86 memory stmt stores to with no intermediate statement that may use
87 part of the memory stmt stores. That is, find a store that may
88 prove stmt to be a dead store. */
89 temp = stmt;
92 gimple *use_stmt, *defvar_def;
93 imm_use_iterator ui;
94 bool fail = false;
95 tree defvar;
97 /* Limit stmt walking to be linear in the number of possibly
98 dead stores. */
99 if (++cnt > 256)
100 return false;
102 if (gimple_code (temp) == GIMPLE_PHI)
103 defvar = PHI_RESULT (temp);
104 else
105 defvar = gimple_vdef (temp);
106 defvar_def = temp;
107 temp = NULL;
108 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
110 cnt++;
112 /* If we ever reach our DSE candidate stmt again fail. We
113 cannot handle dead stores in loops. */
114 if (use_stmt == stmt)
116 fail = true;
117 BREAK_FROM_IMM_USE_STMT (ui);
119 /* In simple cases we can look through PHI nodes, but we
120 have to be careful with loops and with memory references
121 containing operands that are also operands of PHI nodes.
122 See gcc.c-torture/execute/20051110-*.c. */
123 else if (gimple_code (use_stmt) == GIMPLE_PHI)
125 if (temp
126 /* Make sure we are not in a loop latch block. */
127 || gimple_bb (stmt) == gimple_bb (use_stmt)
128 || dominated_by_p (CDI_DOMINATORS,
129 gimple_bb (stmt), gimple_bb (use_stmt))
130 /* We can look through PHIs to regions post-dominating
131 the DSE candidate stmt. */
132 || !dominated_by_p (CDI_POST_DOMINATORS,
133 gimple_bb (stmt), gimple_bb (use_stmt)))
135 fail = true;
136 BREAK_FROM_IMM_USE_STMT (ui);
138 /* Do not consider the PHI as use if it dominates the
139 stmt defining the virtual operand we are processing,
140 we have processed it already in this case. */
141 if (gimple_bb (defvar_def) != gimple_bb (use_stmt)
142 && !dominated_by_p (CDI_DOMINATORS,
143 gimple_bb (defvar_def),
144 gimple_bb (use_stmt)))
145 temp = use_stmt;
147 /* If the statement is a use the store is not dead. */
148 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
150 fail = true;
151 BREAK_FROM_IMM_USE_STMT (ui);
153 /* If this is a store, remember it or bail out if we have
154 multiple ones (the will be in different CFG parts then). */
155 else if (gimple_vdef (use_stmt))
157 if (temp)
159 fail = true;
160 BREAK_FROM_IMM_USE_STMT (ui);
162 temp = use_stmt;
166 if (fail)
167 return false;
169 /* If we didn't find any definition this means the store is dead
170 if it isn't a store to global reachable memory. In this case
171 just pretend the stmt makes itself dead. Otherwise fail. */
172 if (!temp)
174 if (ref_may_alias_global_p (ref))
175 return false;
177 temp = stmt;
178 break;
181 /* Continue walking until we reach a kill. */
182 while (!stmt_kills_ref_p (temp, ref));
184 *use_stmt = temp;
186 return true;
190 /* Attempt to eliminate dead stores in the statement referenced by BSI.
192 A dead store is a store into a memory location which will later be
193 overwritten by another store without any intervening loads. In this
194 case the earlier store can be deleted.
196 In our SSA + virtual operand world we use immediate uses of virtual
197 operands to detect dead stores. If a store's virtual definition
198 is used precisely once by a later store to the same location which
199 post dominates the first store, then the first store is dead. */
201 static void
202 dse_optimize_stmt (gimple_stmt_iterator *gsi)
204 gimple *stmt = gsi_stmt (*gsi);
206 /* If this statement has no virtual defs, then there is nothing
207 to do. */
208 if (!gimple_vdef (stmt))
209 return;
211 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
212 if (gimple_has_volatile_ops (stmt)
213 && (!gimple_clobber_p (stmt)
214 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
215 return;
217 /* We know we have virtual definitions. We can handle assignments and
218 some builtin calls. */
219 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
221 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
223 case BUILT_IN_MEMCPY:
224 case BUILT_IN_MEMMOVE:
225 case BUILT_IN_MEMSET:
227 gimple *use_stmt;
228 ao_ref ref;
229 tree size = NULL_TREE;
230 if (gimple_call_num_args (stmt) == 3)
231 size = gimple_call_arg (stmt, 2);
232 tree ptr = gimple_call_arg (stmt, 0);
233 ao_ref_init_from_ptr_and_size (&ref, ptr, size);
234 if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
235 return;
237 if (dump_file && (dump_flags & TDF_DETAILS))
239 fprintf (dump_file, " Deleted dead call '");
240 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
241 fprintf (dump_file, "'\n");
244 tree lhs = gimple_call_lhs (stmt);
245 if (lhs)
247 gimple *new_stmt = gimple_build_assign (lhs, ptr);
248 unlink_stmt_vdef (stmt);
249 if (gsi_replace (gsi, new_stmt, true))
250 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
252 else
254 /* Then we need to fix the operand of the consuming stmt. */
255 unlink_stmt_vdef (stmt);
257 /* Remove the dead store. */
258 if (gsi_remove (gsi, true))
259 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
260 release_defs (stmt);
262 break;
264 default:
265 return;
269 if (is_gimple_assign (stmt))
271 gimple *use_stmt;
273 /* Self-assignments are zombies. */
274 if (operand_equal_p (gimple_assign_rhs1 (stmt),
275 gimple_assign_lhs (stmt), 0))
276 use_stmt = stmt;
277 else
279 ao_ref ref;
280 ao_ref_init (&ref, gimple_assign_lhs (stmt));
281 if (!dse_possible_dead_store_p (&ref, stmt, &use_stmt))
282 return;
285 /* Now we know that use_stmt kills the LHS of stmt. */
287 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
288 another clobber stmt. */
289 if (gimple_clobber_p (stmt)
290 && !gimple_clobber_p (use_stmt))
291 return;
293 if (dump_file && (dump_flags & TDF_DETAILS))
295 fprintf (dump_file, " Deleted dead store '");
296 print_gimple_stmt (dump_file, gsi_stmt (*gsi), dump_flags, 0);
297 fprintf (dump_file, "'\n");
300 /* Then we need to fix the operand of the consuming stmt. */
301 unlink_stmt_vdef (stmt);
303 /* Remove the dead store. */
304 basic_block bb = gimple_bb (stmt);
305 if (gsi_remove (gsi, true))
306 bitmap_set_bit (need_eh_cleanup, bb->index);
308 /* And release any SSA_NAMEs set in this statement back to the
309 SSA_NAME manager. */
310 release_defs (stmt);
314 class dse_dom_walker : public dom_walker
316 public:
317 dse_dom_walker (cdi_direction direction) : dom_walker (direction) {}
319 virtual edge before_dom_children (basic_block);
322 edge
323 dse_dom_walker::before_dom_children (basic_block bb)
325 gimple_stmt_iterator gsi;
327 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
329 dse_optimize_stmt (&gsi);
330 if (gsi_end_p (gsi))
331 gsi = gsi_last_bb (bb);
332 else
333 gsi_prev (&gsi);
335 return NULL;
338 namespace {
340 const pass_data pass_data_dse =
342 GIMPLE_PASS, /* type */
343 "dse", /* name */
344 OPTGROUP_NONE, /* optinfo_flags */
345 TV_TREE_DSE, /* tv_id */
346 ( PROP_cfg | PROP_ssa ), /* properties_required */
347 0, /* properties_provided */
348 0, /* properties_destroyed */
349 0, /* todo_flags_start */
350 0, /* todo_flags_finish */
353 class pass_dse : public gimple_opt_pass
355 public:
356 pass_dse (gcc::context *ctxt)
357 : gimple_opt_pass (pass_data_dse, ctxt)
360 /* opt_pass methods: */
361 opt_pass * clone () { return new pass_dse (m_ctxt); }
362 virtual bool gate (function *) { return flag_tree_dse != 0; }
363 virtual unsigned int execute (function *);
365 }; // class pass_dse
367 unsigned int
368 pass_dse::execute (function *fun)
370 need_eh_cleanup = BITMAP_ALLOC (NULL);
372 renumber_gimple_stmt_uids ();
374 /* We might consider making this a property of each pass so that it
375 can be [re]computed on an as-needed basis. Particularly since
376 this pass could be seen as an extension of DCE which needs post
377 dominators. */
378 calculate_dominance_info (CDI_POST_DOMINATORS);
379 calculate_dominance_info (CDI_DOMINATORS);
381 /* Dead store elimination is fundamentally a walk of the post-dominator
382 tree and a backwards walk of statements within each block. */
383 dse_dom_walker (CDI_POST_DOMINATORS).walk (fun->cfg->x_exit_block_ptr);
385 /* Removal of stores may make some EH edges dead. Purge such edges from
386 the CFG as needed. */
387 if (!bitmap_empty_p (need_eh_cleanup))
389 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
390 cleanup_tree_cfg ();
393 BITMAP_FREE (need_eh_cleanup);
395 /* For now, just wipe the post-dominator information. */
396 free_dominance_info (CDI_POST_DOMINATORS);
397 return 0;
400 } // anon namespace
402 gimple_opt_pass *
403 make_pass_dse (gcc::context *ctxt)
405 return new pass_dse (ctxt);