* gnu/regexp/CharIndexedReader.java: Removed.
[official-gcc.git] / gcc / tree-ssa-dse.c
blobfca08ac65192be04de807919adbc3e0da3fee5f1
1 /* Dead store elimination
2 Copyright (C) 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "errors.h"
26 #include "ggc.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "tree-pass.h"
35 #include "tree-dump.h"
36 #include "domwalk.h"
37 #include "flags.h"
39 /* This file implements dead store elimination.
41 A dead store is a store into a memory location which will later be
42 overwritten by another store without any intervening loads. In this
43 case the earlier store can be deleted.
45 In our SSA + virtual operand world we use immediate uses of virtual
46 operands to detect dead stores. If a store's virtual definition
47 is used precisely once by a later store to the same location which
48 post dominates the first store, then the first store is dead.
50 The single use of the store's virtual definition ensures that
51 there are no intervening aliased loads and the requirement that
52 the second load post dominate the first ensures that if the earlier
53 store executes, then the later stores will execute before the function
54 exits.
56 It may help to think of this as first moving the earlier store to
57 the point immediately before the later store. Again, the single
58 use of the virtual defintion and the post-dominance relationship
59 ensure that such movement would be safe. Clearly if there are
60 back to back stores, then the second is redundant.
62 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
63 may also help in understanding this code since it discusses the
64 relationship between dead store and redundant load elimination. In
65 fact, they are the same transformation applied to different views of
66 the CFG. */
69 struct dse_global_data
71 /* This is the global bitmap for store statements.
73 Each statement has a unique ID. When we encounter a store statement
74 that we want to record, set the bit corresponding to the statement's
75 unique ID in this bitmap. */
76 bitmap stores;
79 /* We allocate a bitmap-per-block for stores which are encountered
80 during the scan of that block. This allows us to restore the
81 global bitmap of stores when we finish processing a block. */
82 struct dse_block_local_data
84 bitmap stores;
87 static bool gate_dse (void);
88 static void tree_ssa_dse (void);
89 static void dse_initialize_block_local_data (struct dom_walk_data *,
90 basic_block,
91 bool);
92 static void dse_optimize_stmt (struct dom_walk_data *,
93 basic_block,
94 block_stmt_iterator);
95 static void dse_record_phis (struct dom_walk_data *, basic_block);
96 static void dse_finalize_block (struct dom_walk_data *, basic_block);
97 static void fix_phi_uses (tree, tree);
98 static void fix_stmt_vdefs (tree, tree);
99 static void record_voperand_set (bitmap, bitmap *, unsigned int);
101 /* Function indicating whether we ought to include information for 'var'
102 when calculating immediate uses. For this pass we only want use
103 information for virtual variables. */
105 static bool
106 need_imm_uses_for (tree var)
108 return !is_gimple_reg (var);
112 /* Replace uses in PHI which match VDEF_RESULTs in STMT with the
113 corresponding VDEF_OP in STMT. */
115 static void
116 fix_phi_uses (tree phi, tree stmt)
118 stmt_ann_t ann = stmt_ann (stmt);
119 vdef_optype vdefs;
120 unsigned int i;
121 int j;
123 get_stmt_operands (stmt);
124 vdefs = VDEF_OPS (ann);
126 /* Walk each VDEF in STMT. */
127 for (i = 0; i < NUM_VDEFS (vdefs); i++)
129 tree vdef = VDEF_RESULT (vdefs, i);
131 /* Find any uses in the PHI which match VDEF and replace
132 them with the appropriate VDEF_OP. */
133 for (j = 0; j < PHI_NUM_ARGS (phi); j++)
134 if (vdef == PHI_ARG_DEF (phi, j))
135 PHI_ARG_DEF (phi, j) = VDEF_OP (vdefs, i);
139 /* Replace the VDEF_OPs in STMT1 which match VDEF_RESULTs in STMT2 with
140 the appropriate VDEF_OPs from STMT2. */
142 static void
143 fix_stmt_vdefs (tree stmt1, tree stmt2)
145 stmt_ann_t ann1 = stmt_ann (stmt1);
146 stmt_ann_t ann2 = stmt_ann (stmt2);
147 vdef_optype vdefs1;
148 vdef_optype vdefs2;
149 unsigned int i, j;
151 get_stmt_operands (stmt1);
152 get_stmt_operands (stmt2);
153 vdefs1 = VDEF_OPS (ann1);
154 vdefs2 = VDEF_OPS (ann2);
156 /* Walk each VDEF_OP in stmt1. */
157 for (i = 0; i < NUM_VDEFS (vdefs1); i++)
159 tree vdef1 = VDEF_OP (vdefs1, i);
161 /* Find the appropriate VDEF_RESULT in STMT2. */
162 for (j = 0; j < NUM_VDEFS (vdefs2); j++)
164 if (vdef1 == VDEF_RESULT (vdefs2, j))
166 /* Update. */
167 *VDEF_OP_PTR (vdefs1, i) = VDEF_OP (vdefs2, j);
168 break;
172 #ifdef ENABLE_CHECKING
173 /* If we did not find a corresponding VDEF_RESULT, then something
174 has gone terribly wrong. */
175 if (j == NUM_VDEFS (vdefs2))
176 abort ();
177 #endif
183 /* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed. */
184 static void
185 record_voperand_set (bitmap global, bitmap *local, unsigned int uid)
187 /* Lazily allocate the bitmap. Note that we do not get a notification
188 when the block local data structures die, so we allocate the local
189 bitmap backed by the GC system. */
190 if (*local == NULL)
191 *local = BITMAP_GGC_ALLOC ();
193 /* Set the bit in the local and global bitmaps. */
194 bitmap_set_bit (*local, uid);
195 bitmap_set_bit (global, uid);
197 /* Initialize block local data structures. */
199 static void
200 dse_initialize_block_local_data (struct dom_walk_data *walk_data,
201 basic_block bb ATTRIBUTE_UNUSED,
202 bool recycled)
204 struct dse_block_local_data *bd
205 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
207 /* If we are given a recycled block local data structure, ensure any
208 bitmap associated with the block is cleared. */
209 if (recycled)
211 if (bd->stores)
212 bitmap_clear (bd->stores);
216 /* Attempt to eliminate dead stores in the statement referenced by BSI.
218 A dead store is a store into a memory location which will later be
219 overwritten by another store without any intervening loads. In this
220 case the earlier store can be deleted.
222 In our SSA + virtual operand world we use immediate uses of virtual
223 operands to detect dead stores. If a store's virtual definition
224 is used precisely once by a later store to the same location which
225 post dominates the first store, then the first store is dead. */
227 static void
228 dse_optimize_stmt (struct dom_walk_data *walk_data,
229 basic_block bb ATTRIBUTE_UNUSED,
230 block_stmt_iterator bsi)
232 struct dse_block_local_data *bd
233 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
234 struct dse_global_data *dse_gd = walk_data->global_data;
235 tree stmt = bsi_stmt (bsi);
236 stmt_ann_t ann = stmt_ann (stmt);
237 vdef_optype vdefs;
239 get_stmt_operands (stmt);
240 vdefs = VDEF_OPS (ann);
242 /* If this statement has no virtual uses, then there is nothing
243 to do. */
244 if (NUM_VDEFS (vdefs) == 0)
245 return;
247 /* We know we have virtual definitions. If this is a MODIFY_EXPR, then
248 record it into our table. */
249 if (TREE_CODE (stmt) == MODIFY_EXPR
250 && TREE_CODE (TREE_OPERAND (stmt, 1)) != CALL_EXPR)
252 dataflow_t df = get_immediate_uses (stmt);
253 unsigned int num_uses = num_immediate_uses (df);
254 tree use;
255 tree skipped_phi;
258 /* If there are no uses then there is nothing left to do. */
259 if (num_uses == 0)
261 record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
262 return;
265 use = immediate_use (df, 0);
266 skipped_phi = NULL;
268 /* Skip through any PHI nodes we have already seen if the PHI
269 represents the only use of this store.
271 Note this does not handle the case where the store has
272 multiple VDEFs which all reach a set of PHI nodes in the
273 same block. */
274 while (num_uses == 1
275 && TREE_CODE (use) == PHI_NODE
276 && bitmap_bit_p (dse_gd->stores, stmt_ann (use)->uid))
278 /* Record the first PHI we skip so that we can fix its
279 uses if we find that STMT is a dead store. */
280 if (!skipped_phi)
281 skipped_phi = use;
283 /* Skip past this PHI and loop again in case we had a PHI
284 chain. */
285 df = get_immediate_uses (use);
286 num_uses = num_immediate_uses (df);
287 use = immediate_use (df, 0);
290 /* If we have precisely one immediate use at this point, then we may
291 have found redundant store. */
292 if (num_uses == 1
293 && bitmap_bit_p (dse_gd->stores, stmt_ann (use)->uid)
294 && operand_equal_p (TREE_OPERAND (stmt, 0),
295 TREE_OPERAND (use, 0), 0))
297 /* We need to fix the operands if either the first PHI we
298 skipped, or the store which we are not deleting if we did
299 not skip any PHIs. */
300 if (skipped_phi)
301 fix_phi_uses (skipped_phi, stmt);
302 else
303 fix_stmt_vdefs (use, stmt);
305 if (dump_file && (dump_flags & TDF_DETAILS))
307 fprintf (dump_file, " Deleted dead store '");
308 print_generic_expr (dump_file, bsi_stmt (bsi), dump_flags);
309 fprintf (dump_file, "'\n");
312 /* Any immediate uses which reference STMT need to instead
313 reference the new consumer, either SKIPPED_PHI or USE.
314 This allows us to cascade dead stores. */
315 redirect_immediate_uses (stmt, skipped_phi ? skipped_phi : use);
317 /* Finally remove the dead store. */
318 bsi_remove (&bsi);
321 record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
325 /* Record that we have seen the PHIs at the start of BB which correspond
326 to virtual operands. */
327 static void
328 dse_record_phis (struct dom_walk_data *walk_data, basic_block bb)
330 struct dse_block_local_data *bd
331 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
332 struct dse_global_data *dse_gd = walk_data->global_data;
333 tree phi;
335 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
336 if (need_imm_uses_for (PHI_RESULT (phi)))
337 record_voperand_set (dse_gd->stores,
338 &bd->stores,
339 get_stmt_ann (phi)->uid);
342 static void
343 dse_finalize_block (struct dom_walk_data *walk_data,
344 basic_block bb ATTRIBUTE_UNUSED)
346 struct dse_block_local_data *bd
347 = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
348 struct dse_global_data *dse_gd = walk_data->global_data;
349 bitmap stores = dse_gd->stores;
350 unsigned int i;
352 /* Unwind the stores noted in this basic block. */
353 if (bd->stores)
354 EXECUTE_IF_SET_IN_BITMAP (bd->stores, 0, i, bitmap_clear_bit (stores, i););
357 static void
358 tree_ssa_dse (void)
360 struct dom_walk_data walk_data;
361 struct dse_global_data dse_gd;
362 unsigned int uid = 0;
363 basic_block bb;
365 /* Create a UID for each statement in the function. Ordering of the
366 UIDs is not important for this pass. */
367 FOR_EACH_BB (bb)
369 block_stmt_iterator bsi;
370 tree phi;
372 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
373 stmt_ann (bsi_stmt (bsi))->uid = uid++;
375 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
376 stmt_ann (phi)->uid = uid++;
379 /* We might consider making this a property of each pass so that it
380 can be [re]computed on an as-needed basis. Particularly since
381 this pass could be seen as an extension of DCE which needs post
382 dominators. */
383 calculate_dominance_info (CDI_POST_DOMINATORS);
385 /* We also need immediate use information for virtual operands. */
386 compute_immediate_uses (TDFA_USE_VOPS, need_imm_uses_for);
388 /* Dead store elimination is fundamentally a walk of the post-dominator
389 tree and a backwards walk of statements within each block. */
390 walk_data.walk_stmts_backward = true;
391 walk_data.dom_direction = CDI_POST_DOMINATORS;
392 walk_data.initialize_block_local_data = dse_initialize_block_local_data;
393 walk_data.before_dom_children_before_stmts = NULL;
394 walk_data.before_dom_children_walk_stmts = dse_optimize_stmt;
395 walk_data.before_dom_children_after_stmts = dse_record_phis;
396 walk_data.after_dom_children_before_stmts = NULL;
397 walk_data.after_dom_children_walk_stmts = NULL;
398 walk_data.after_dom_children_after_stmts = dse_finalize_block;
400 walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
402 /* This is the main hash table for the dead store elimination pass. */
403 dse_gd.stores = BITMAP_XMALLOC ();
404 walk_data.global_data = &dse_gd;
406 /* Initialize the dominator walker. */
407 init_walk_dominator_tree (&walk_data);
409 /* Recursively walk the dominator tree. */
410 walk_dominator_tree (&walk_data, EXIT_BLOCK_PTR);
412 /* Finalize the dominator walker. */
413 fini_walk_dominator_tree (&walk_data);
415 /* Release the main bitmap. */
416 BITMAP_XFREE (dse_gd.stores);
418 /* Free dataflow information. It's probably out of date now anyway. */
419 free_df ();
421 /* For now, just wipe the post-dominator information. */
422 free_dominance_info (CDI_POST_DOMINATORS);
425 static bool
426 gate_dse (void)
428 return flag_tree_dse != 0;
431 struct tree_opt_pass pass_dse = {
432 "dse", /* name */
433 gate_dse, /* gate */
434 tree_ssa_dse, /* execute */
435 NULL, /* sub */
436 NULL, /* next */
437 0, /* static_pass_number */
438 TV_TREE_DSE, /* tv_id */
439 PROP_cfg | PROP_ssa, /* properties_required */
440 0, /* properties_provided */
441 0, /* properties_destroyed */
442 0, /* todo_flags_start */
443 TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
444 | TODO_verify_ssa