* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / tree-ssa-dse.c
blobbe440c9b65708ce469090e5f59821bb72465d163
1 /* Dead store elimination
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "domwalk.h"
35 #include "flags.h"
36 #include "langhooks.h"
38 /* This file implements dead store elimination.
40 A dead store is a store into a memory location which will later be
41 overwritten by another store without any intervening loads. In this
42 case the earlier store can be deleted.
44 In our SSA + virtual operand world we use immediate uses of virtual
45 operands to detect dead stores. If a store's virtual definition
46 is used precisely once by a later store to the same location which
47 post dominates the first store, then the first store is dead.
49 The single use of the store's virtual definition ensures that
50 there are no intervening aliased loads and the requirement that
51 the second load post dominate the first ensures that if the earlier
52 store executes, then the later stores will execute before the function
53 exits.
55 It may help to think of this as first moving the earlier store to
56 the point immediately before the later store. Again, the single
57 use of the virtual definition and the post-dominance relationship
58 ensure that such movement would be safe. Clearly if there are
59 back to back stores, then the second is redundant.
61 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
62 may also help in understanding this code since it discusses the
63 relationship between dead store and redundant load elimination. In
64 fact, they are the same transformation applied to different views of
65 the CFG. */
68 struct dse_global_data
70 /* This is the global bitmap for store statements.
72 Each statement has a unique ID. When we encounter a store statement
73 that we want to record, set the bit corresponding to the statement's
74 unique ID in this bitmap. */
75 bitmap stores;
78 /* We allocate a bitmap-per-block for stores which are encountered
79 during the scan of that block. This allows us to restore the
80 global bitmap of stores when we finish processing a block. */
81 struct dse_block_local_data
83 bitmap stores;
86 static bool gate_dse (void);
87 static unsigned int tree_ssa_dse (void);
88 static void dse_initialize_block_local_data (struct dom_walk_data *,
89 basic_block,
90 bool);
91 static void dse_enter_block (struct dom_walk_data *, basic_block);
92 static void dse_leave_block (struct dom_walk_data *, basic_block);
93 static void record_voperand_set (bitmap, bitmap *, unsigned int);
95 /* Returns uid of statement STMT. */
97 static unsigned
98 get_stmt_uid (gimple stmt)
100 if (gimple_code (stmt) == GIMPLE_PHI)
101 return SSA_NAME_VERSION (gimple_phi_result (stmt))
102 + gimple_stmt_max_uid (cfun);
104 return gimple_uid (stmt);
107 /* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed. */
109 static void
110 record_voperand_set (bitmap global, bitmap *local, unsigned int uid)
112 /* Lazily allocate the bitmap. Note that we do not get a notification
113 when the block local data structures die, so we allocate the local
114 bitmap backed by the GC system. */
115 if (*local == NULL)
116 *local = BITMAP_GGC_ALLOC ();
118 /* Set the bit in the local and global bitmaps. */
119 bitmap_set_bit (*local, uid);
120 bitmap_set_bit (global, uid);
123 /* Initialize block local data structures. */
125 static void
126 dse_initialize_block_local_data (struct dom_walk_data *walk_data,
127 basic_block bb ATTRIBUTE_UNUSED,
128 bool recycled)
130 struct dse_block_local_data *bd
131 = (struct dse_block_local_data *)
132 VEC_last (void_p, walk_data->block_data_stack);
134 /* If we are given a recycled block local data structure, ensure any
135 bitmap associated with the block is cleared. */
136 if (recycled)
138 if (bd->stores)
139 bitmap_clear (bd->stores);
143 /* A helper of dse_optimize_stmt.
144 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
145 may prove STMT to be dead.
146 Return TRUE if the above conditions are met, otherwise FALSE. */
148 static bool
149 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
151 gimple temp;
152 unsigned cnt = 0;
154 *use_stmt = NULL;
156 /* Find the first dominated statement that clobbers (part of) the
157 memory stmt stores to with no intermediate statement that may use
158 part of the memory stmt stores. That is, find a store that may
159 prove stmt to be a dead store. */
160 temp = stmt;
163 gimple use_stmt;
164 imm_use_iterator ui;
165 bool fail = false;
166 tree defvar;
168 /* Limit stmt walking to be linear in the number of possibly
169 dead stores. */
170 if (++cnt > 256)
171 return false;
173 if (gimple_code (temp) == GIMPLE_PHI)
174 defvar = PHI_RESULT (temp);
175 else
176 defvar = gimple_vdef (temp);
177 temp = NULL;
178 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
180 cnt++;
182 /* If we ever reach our DSE candidate stmt again fail. We
183 cannot handle dead stores in loops. */
184 if (use_stmt == stmt)
186 fail = true;
187 BREAK_FROM_IMM_USE_STMT (ui);
189 /* In simple cases we can look through PHI nodes, but we
190 have to be careful with loops and with memory references
191 containing operands that are also operands of PHI nodes.
192 See gcc.c-torture/execute/20051110-*.c. */
193 else if (gimple_code (use_stmt) == GIMPLE_PHI)
195 if (temp
196 /* Make sure we are not in a loop latch block. */
197 || gimple_bb (stmt) == gimple_bb (use_stmt)
198 || dominated_by_p (CDI_DOMINATORS,
199 gimple_bb (stmt), gimple_bb (use_stmt))
200 /* We can look through PHIs to regions post-dominating
201 the DSE candidate stmt. */
202 || !dominated_by_p (CDI_POST_DOMINATORS,
203 gimple_bb (stmt), gimple_bb (use_stmt)))
205 fail = true;
206 BREAK_FROM_IMM_USE_STMT (ui);
208 temp = use_stmt;
210 /* If the statement is a use the store is not dead. */
211 else if (ref_maybe_used_by_stmt_p (use_stmt,
212 gimple_assign_lhs (stmt)))
214 fail = true;
215 BREAK_FROM_IMM_USE_STMT (ui);
217 /* If this is a store, remember it or bail out if we have
218 multiple ones (the will be in different CFG parts then). */
219 else if (gimple_vdef (use_stmt))
221 if (temp)
223 fail = true;
224 BREAK_FROM_IMM_USE_STMT (ui);
226 temp = use_stmt;
230 if (fail)
231 return false;
233 /* If we didn't find any definition this means the store is dead
234 if it isn't a store to global reachable memory. In this case
235 just pretend the stmt makes itself dead. Otherwise fail. */
236 if (!temp)
238 if (is_hidden_global_store (stmt))
239 return false;
241 temp = stmt;
242 break;
245 /* We deliberately stop on clobbering statements and not only on
246 killing ones to make walking cheaper. Otherwise we can just
247 continue walking until both stores have equal reference trees. */
248 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
250 if (!is_gimple_assign (temp))
251 return false;
253 *use_stmt = temp;
255 return true;
259 /* Attempt to eliminate dead stores in the statement referenced by BSI.
261 A dead store is a store into a memory location which will later be
262 overwritten by another store without any intervening loads. In this
263 case the earlier store can be deleted.
265 In our SSA + virtual operand world we use immediate uses of virtual
266 operands to detect dead stores. If a store's virtual definition
267 is used precisely once by a later store to the same location which
268 post dominates the first store, then the first store is dead. */
270 static void
271 dse_optimize_stmt (struct dse_global_data *dse_gd,
272 struct dse_block_local_data *bd,
273 gimple_stmt_iterator gsi)
275 gimple stmt = gsi_stmt (gsi);
277 /* If this statement has no virtual defs, then there is nothing
278 to do. */
279 if (!gimple_vdef (stmt))
280 return;
282 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
283 that's not also a function call, then record it into our table. */
284 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
285 return;
287 if (gimple_has_volatile_ops (stmt))
288 return;
290 if (is_gimple_assign (stmt))
292 gimple use_stmt;
294 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
296 if (!dse_possible_dead_store_p (stmt, &use_stmt))
297 return;
299 /* If we have precisely one immediate use at this point and the
300 stores are to the same memory location or there is a chain of
301 virtual uses from stmt and the stmt which stores to that same
302 memory location, then we may have found redundant store. */
303 if (bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
304 && operand_equal_p (gimple_assign_lhs (stmt),
305 gimple_assign_lhs (use_stmt), 0))
307 /* If use_stmt is or might be a nop assignment, e.g. for
308 struct { ... } S a, b, *p; ...
309 b = a; b = b;
311 b = a; b = *p; where p might be &b,
313 *p = a; *p = b; where p might be &b,
315 *p = *u; *p = *v; where p might be v, then USE_STMT
316 acts as a use as well as definition, so store in STMT
317 is not dead. */
318 if (stmt != use_stmt
319 && !is_gimple_reg (gimple_assign_rhs1 (use_stmt))
320 && !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
321 /* ??? Should {} be invariant? */
322 && gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
323 && refs_may_alias_p (gimple_assign_lhs (use_stmt),
324 gimple_assign_rhs1 (use_stmt)))
325 return;
327 if (dump_file && (dump_flags & TDF_DETAILS))
329 fprintf (dump_file, " Deleted dead store '");
330 print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
331 fprintf (dump_file, "'\n");
334 /* Then we need to fix the operand of the consuming stmt. */
335 unlink_stmt_vdef (stmt);
337 /* Remove the dead store. */
338 gsi_remove (&gsi, true);
340 /* And release any SSA_NAMEs set in this statement back to the
341 SSA_NAME manager. */
342 release_defs (stmt);
347 /* Record that we have seen the PHIs at the start of BB which correspond
348 to virtual operands. */
349 static void
350 dse_record_phi (struct dse_global_data *dse_gd,
351 struct dse_block_local_data *bd,
352 gimple phi)
354 if (!is_gimple_reg (gimple_phi_result (phi)))
355 record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi));
358 static void
359 dse_enter_block (struct dom_walk_data *walk_data, basic_block bb)
361 struct dse_block_local_data *bd
362 = (struct dse_block_local_data *)
363 VEC_last (void_p, walk_data->block_data_stack);
364 struct dse_global_data *dse_gd
365 = (struct dse_global_data *) walk_data->global_data;
366 gimple_stmt_iterator gsi;
368 for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi); gsi_prev (&gsi))
369 dse_optimize_stmt (dse_gd, bd, gsi);
370 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
371 dse_record_phi (dse_gd, bd, gsi_stmt (gsi));
374 static void
375 dse_leave_block (struct dom_walk_data *walk_data,
376 basic_block bb ATTRIBUTE_UNUSED)
378 struct dse_block_local_data *bd
379 = (struct dse_block_local_data *)
380 VEC_last (void_p, walk_data->block_data_stack);
381 struct dse_global_data *dse_gd
382 = (struct dse_global_data *) walk_data->global_data;
383 bitmap stores = dse_gd->stores;
384 unsigned int i;
385 bitmap_iterator bi;
387 /* Unwind the stores noted in this basic block. */
388 if (bd->stores)
389 EXECUTE_IF_SET_IN_BITMAP (bd->stores, 0, i, bi)
391 bitmap_clear_bit (stores, i);
395 /* Main entry point. */
397 static unsigned int
398 tree_ssa_dse (void)
400 struct dom_walk_data walk_data;
401 struct dse_global_data dse_gd;
403 renumber_gimple_stmt_uids ();
405 /* We might consider making this a property of each pass so that it
406 can be [re]computed on an as-needed basis. Particularly since
407 this pass could be seen as an extension of DCE which needs post
408 dominators. */
409 calculate_dominance_info (CDI_POST_DOMINATORS);
410 calculate_dominance_info (CDI_DOMINATORS);
412 /* Dead store elimination is fundamentally a walk of the post-dominator
413 tree and a backwards walk of statements within each block. */
414 walk_data.dom_direction = CDI_POST_DOMINATORS;
415 walk_data.initialize_block_local_data = dse_initialize_block_local_data;
416 walk_data.before_dom_children = dse_enter_block;
417 walk_data.after_dom_children = dse_leave_block;
419 walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
421 /* This is the main hash table for the dead store elimination pass. */
422 dse_gd.stores = BITMAP_ALLOC (NULL);
423 walk_data.global_data = &dse_gd;
425 /* Initialize the dominator walker. */
426 init_walk_dominator_tree (&walk_data);
428 /* Recursively walk the dominator tree. */
429 walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
431 /* Finalize the dominator walker. */
432 fini_walk_dominator_tree (&walk_data);
434 /* Release the main bitmap. */
435 BITMAP_FREE (dse_gd.stores);
437 /* For now, just wipe the post-dominator information. */
438 free_dominance_info (CDI_POST_DOMINATORS);
439 return 0;
442 static bool
443 gate_dse (void)
445 return flag_tree_dse != 0;
448 struct gimple_opt_pass pass_dse =
451 GIMPLE_PASS,
452 "dse", /* name */
453 gate_dse, /* gate */
454 tree_ssa_dse, /* execute */
455 NULL, /* sub */
456 NULL, /* next */
457 0, /* static_pass_number */
458 TV_TREE_DSE, /* tv_id */
459 PROP_cfg | PROP_ssa, /* properties_required */
460 0, /* properties_provided */
461 0, /* properties_destroyed */
462 0, /* todo_flags_start */
463 TODO_dump_func
464 | TODO_ggc_collect
465 | TODO_verify_ssa /* todo_flags_finish */