Make std::vector<bool> meet C++11 allocator requirements.
[official-gcc.git] / gcc / tree-ssa-dce.c
blob68655b84d1f1e9ad990f718252cc7ea9e782fcac
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
50 #include "tree.h"
51 #include "calls.h"
52 #include "gimple-pretty-print.h"
53 #include "predict.h"
54 #include "vec.h"
55 #include "hashtab.h"
56 #include "hash-set.h"
57 #include "machmode.h"
58 #include "hard-reg-set.h"
59 #include "input.h"
60 #include "function.h"
61 #include "dominance.h"
62 #include "cfg.h"
63 #include "cfganal.h"
64 #include "basic-block.h"
65 #include "tree-ssa-alias.h"
66 #include "internal-fn.h"
67 #include "tree-eh.h"
68 #include "gimple-expr.h"
69 #include "is-a.h"
70 #include "gimple.h"
71 #include "gimplify.h"
72 #include "gimple-iterator.h"
73 #include "gimple-ssa.h"
74 #include "tree-cfg.h"
75 #include "tree-phinodes.h"
76 #include "ssa-iterators.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79 #include "tree-ssa-loop-niter.h"
80 #include "tree-into-ssa.h"
81 #include "expr.h"
82 #include "tree-dfa.h"
83 #include "tree-pass.h"
84 #include "flags.h"
85 #include "cfgloop.h"
86 #include "tree-scalar-evolution.h"
88 static struct stmt_stats
90 int total;
91 int total_phis;
92 int removed;
93 int removed_phis;
94 } stats;
96 #define STMT_NECESSARY GF_PLF_1
98 static vec<gimple> worklist;
100 /* Vector indicating an SSA name has already been processed and marked
101 as necessary. */
102 static sbitmap processed;
104 /* Vector indicating that the last statement of a basic block has already
105 been marked as necessary. */
106 static sbitmap last_stmt_necessary;
108 /* Vector indicating that BB contains statements that are live. */
109 static sbitmap bb_contains_live_stmts;
111 /* Before we can determine whether a control branch is dead, we need to
112 compute which blocks are control dependent on which edges.
114 We expect each block to be control dependent on very few edges so we
115 use a bitmap for each block recording its edges. An array holds the
116 bitmap. The Ith bit in the bitmap is set if that block is dependent
117 on the Ith edge. */
118 static control_dependences *cd;
120 /* Vector indicating that a basic block has already had all the edges
121 processed that it is control dependent on. */
122 static sbitmap visited_control_parents;
124 /* TRUE if this pass alters the CFG (by removing control statements).
125 FALSE otherwise.
127 If this pass alters the CFG, then it will arrange for the dominators
128 to be recomputed. */
129 static bool cfg_altered;
132 /* If STMT is not already marked necessary, mark it, and add it to the
133 worklist if ADD_TO_WORKLIST is true. */
135 static inline void
136 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
138 gcc_assert (stmt);
140 if (gimple_plf (stmt, STMT_NECESSARY))
141 return;
143 if (dump_file && (dump_flags & TDF_DETAILS))
145 fprintf (dump_file, "Marking useful stmt: ");
146 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
147 fprintf (dump_file, "\n");
150 gimple_set_plf (stmt, STMT_NECESSARY, true);
151 if (add_to_worklist)
152 worklist.safe_push (stmt);
153 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
154 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
158 /* Mark the statement defining operand OP as necessary. */
160 static inline void
161 mark_operand_necessary (tree op)
163 gimple stmt;
164 int ver;
166 gcc_assert (op);
168 ver = SSA_NAME_VERSION (op);
169 if (bitmap_bit_p (processed, ver))
171 stmt = SSA_NAME_DEF_STMT (op);
172 gcc_assert (gimple_nop_p (stmt)
173 || gimple_plf (stmt, STMT_NECESSARY));
174 return;
176 bitmap_set_bit (processed, ver);
178 stmt = SSA_NAME_DEF_STMT (op);
179 gcc_assert (stmt);
181 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
182 return;
184 if (dump_file && (dump_flags & TDF_DETAILS))
186 fprintf (dump_file, "marking necessary through ");
187 print_generic_expr (dump_file, op, 0);
188 fprintf (dump_file, " stmt ");
189 print_gimple_stmt (dump_file, stmt, 0, 0);
192 gimple_set_plf (stmt, STMT_NECESSARY, true);
193 if (bb_contains_live_stmts)
194 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
195 worklist.safe_push (stmt);
199 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
200 it can make other statements necessary.
202 If AGGRESSIVE is false, control statements are conservatively marked as
203 necessary. */
205 static void
206 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
208 /* With non-call exceptions, we have to assume that all statements could
209 throw. If a statement could throw, it can be deemed necessary. */
210 if (cfun->can_throw_non_call_exceptions
211 && !cfun->can_delete_dead_exceptions
212 && stmt_could_throw_p (stmt))
214 mark_stmt_necessary (stmt, true);
215 return;
218 /* Statements that are implicitly live. Most function calls, asm
219 and return statements are required. Labels and GIMPLE_BIND nodes
220 are kept because they are control flow, and we have no way of
221 knowing whether they can be removed. DCE can eliminate all the
222 other statements in a block, and CFG can then remove the block
223 and labels. */
224 switch (gimple_code (stmt))
226 case GIMPLE_PREDICT:
227 case GIMPLE_LABEL:
228 mark_stmt_necessary (stmt, false);
229 return;
231 case GIMPLE_ASM:
232 case GIMPLE_RESX:
233 case GIMPLE_RETURN:
234 mark_stmt_necessary (stmt, true);
235 return;
237 case GIMPLE_CALL:
239 tree callee = gimple_call_fndecl (stmt);
240 if (callee != NULL_TREE
241 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
242 switch (DECL_FUNCTION_CODE (callee))
244 case BUILT_IN_MALLOC:
245 case BUILT_IN_ALIGNED_ALLOC:
246 case BUILT_IN_CALLOC:
247 case BUILT_IN_ALLOCA:
248 case BUILT_IN_ALLOCA_WITH_ALIGN:
249 return;
251 default:;
253 /* Most, but not all function calls are required. Function calls that
254 produce no result and have no side effects (i.e. const pure
255 functions) are unnecessary. */
256 if (gimple_has_side_effects (stmt))
258 mark_stmt_necessary (stmt, true);
259 return;
261 if (!gimple_call_lhs (stmt))
262 return;
263 break;
266 case GIMPLE_DEBUG:
267 /* Debug temps without a value are not useful. ??? If we could
268 easily locate the debug temp bind stmt for a use thereof,
269 would could refrain from marking all debug temps here, and
270 mark them only if they're used. */
271 if (!gimple_debug_bind_p (stmt)
272 || gimple_debug_bind_has_value_p (stmt)
273 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
274 mark_stmt_necessary (stmt, false);
275 return;
277 case GIMPLE_GOTO:
278 gcc_assert (!simple_goto_p (stmt));
279 mark_stmt_necessary (stmt, true);
280 return;
282 case GIMPLE_COND:
283 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
284 /* Fall through. */
286 case GIMPLE_SWITCH:
287 if (! aggressive)
288 mark_stmt_necessary (stmt, true);
289 break;
291 case GIMPLE_ASSIGN:
292 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
293 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
294 return;
295 break;
297 default:
298 break;
301 /* If the statement has volatile operands, it needs to be preserved.
302 Same for statements that can alter control flow in unpredictable
303 ways. */
304 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
306 mark_stmt_necessary (stmt, true);
307 return;
310 if (stmt_may_clobber_global_p (stmt))
312 mark_stmt_necessary (stmt, true);
313 return;
316 return;
320 /* Mark the last statement of BB as necessary. */
322 static void
323 mark_last_stmt_necessary (basic_block bb)
325 gimple stmt = last_stmt (bb);
327 bitmap_set_bit (last_stmt_necessary, bb->index);
328 bitmap_set_bit (bb_contains_live_stmts, bb->index);
330 /* We actually mark the statement only if it is a control statement. */
331 if (stmt && is_ctrl_stmt (stmt))
332 mark_stmt_necessary (stmt, true);
336 /* Mark control dependent edges of BB as necessary. We have to do this only
337 once for each basic block so we set the appropriate bit after we're done.
339 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
341 static void
342 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
344 bitmap_iterator bi;
345 unsigned edge_number;
346 bool skipped = false;
348 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
350 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
351 return;
353 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
354 0, edge_number, bi)
356 basic_block cd_bb = cd->get_edge (edge_number)->src;
358 if (ignore_self && cd_bb == bb)
360 skipped = true;
361 continue;
364 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
365 mark_last_stmt_necessary (cd_bb);
368 if (!skipped)
369 bitmap_set_bit (visited_control_parents, bb->index);
373 /* Find obviously necessary statements. These are things like most function
374 calls, and stores to file level variables.
376 If EL is NULL, control statements are conservatively marked as
377 necessary. Otherwise it contains the list of edges used by control
378 dependence analysis. */
380 static void
381 find_obviously_necessary_stmts (bool aggressive)
383 basic_block bb;
384 gimple_stmt_iterator gsi;
385 edge e;
386 gimple phi, stmt;
387 int flags;
389 FOR_EACH_BB_FN (bb, cfun)
391 /* PHI nodes are never inherently necessary. */
392 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
394 phi = gsi_stmt (gsi);
395 gimple_set_plf (phi, STMT_NECESSARY, false);
398 /* Check all statements in the block. */
399 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
401 stmt = gsi_stmt (gsi);
402 gimple_set_plf (stmt, STMT_NECESSARY, false);
403 mark_stmt_if_obviously_necessary (stmt, aggressive);
407 /* Pure and const functions are finite and thus have no infinite loops in
408 them. */
409 flags = flags_from_decl_or_type (current_function_decl);
410 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
411 return;
413 /* Prevent the empty possibly infinite loops from being removed. */
414 if (aggressive)
416 struct loop *loop;
417 scev_initialize ();
418 if (mark_irreducible_loops ())
419 FOR_EACH_BB_FN (bb, cfun)
421 edge_iterator ei;
422 FOR_EACH_EDGE (e, ei, bb->succs)
423 if ((e->flags & EDGE_DFS_BACK)
424 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
426 if (dump_file)
427 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
428 e->src->index, e->dest->index);
429 mark_control_dependent_edges_necessary (e->dest, false);
433 FOR_EACH_LOOP (loop, 0)
434 if (!finite_loop_p (loop))
436 if (dump_file)
437 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
438 mark_control_dependent_edges_necessary (loop->latch, false);
440 scev_finalize ();
445 /* Return true if REF is based on an aliased base, otherwise false. */
447 static bool
448 ref_may_be_aliased (tree ref)
450 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
451 while (handled_component_p (ref))
452 ref = TREE_OPERAND (ref, 0);
453 if (TREE_CODE (ref) == MEM_REF
454 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
455 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
456 return !(DECL_P (ref)
457 && !may_be_aliased (ref));
460 static bitmap visited = NULL;
461 static unsigned int longest_chain = 0;
462 static unsigned int total_chain = 0;
463 static unsigned int nr_walks = 0;
464 static bool chain_ovfl = false;
466 /* Worker for the walker that marks reaching definitions of REF,
467 which is based on a non-aliased decl, necessary. It returns
468 true whenever the defining statement of the current VDEF is
469 a kill for REF, as no dominating may-defs are necessary for REF
470 anymore. DATA points to the basic-block that contains the
471 stmt that refers to REF. */
473 static bool
474 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
476 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
478 /* All stmts we visit are necessary. */
479 mark_operand_necessary (vdef);
481 /* If the stmt lhs kills ref, then we can stop walking. */
482 if (gimple_has_lhs (def_stmt)
483 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
484 /* The assignment is not necessarily carried out if it can throw
485 and we can catch it in the current function where we could inspect
486 the previous value.
487 ??? We only need to care about the RHS throwing. For aggregate
488 assignments or similar calls and non-call exceptions the LHS
489 might throw as well. */
490 && !stmt_can_throw_internal (def_stmt))
492 tree base, lhs = gimple_get_lhs (def_stmt);
493 HOST_WIDE_INT size, offset, max_size;
494 ao_ref_base (ref);
495 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
496 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
497 so base == refd->base does not always hold. */
498 if (base == ref->base)
500 /* For a must-alias check we need to be able to constrain
501 the accesses properly. */
502 if (size != -1 && size == max_size
503 && ref->max_size != -1)
505 if (offset <= ref->offset
506 && offset + size >= ref->offset + ref->max_size)
507 return true;
509 /* Or they need to be exactly the same. */
510 else if (ref->ref
511 /* Make sure there is no induction variable involved
512 in the references (gcc.c-torture/execute/pr42142.c).
513 The simplest way is to check if the kill dominates
514 the use. */
515 /* But when both are in the same block we cannot
516 easily tell whether we came from a backedge
517 unless we decide to compute stmt UIDs
518 (see PR58246). */
519 && (basic_block) data != gimple_bb (def_stmt)
520 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
521 gimple_bb (def_stmt))
522 && operand_equal_p (ref->ref, lhs, 0))
523 return true;
527 /* Otherwise keep walking. */
528 return false;
531 static void
532 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
534 unsigned int chain;
535 ao_ref refd;
536 gcc_assert (!chain_ovfl);
537 ao_ref_init (&refd, ref);
538 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
539 mark_aliased_reaching_defs_necessary_1,
540 gimple_bb (stmt), NULL);
541 if (chain > longest_chain)
542 longest_chain = chain;
543 total_chain += chain;
544 nr_walks++;
547 /* Worker for the walker that marks reaching definitions of REF, which
548 is not based on a non-aliased decl. For simplicity we need to end
549 up marking all may-defs necessary that are not based on a non-aliased
550 decl. The only job of this walker is to skip may-defs based on
551 a non-aliased decl. */
553 static bool
554 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
555 tree vdef, void *data ATTRIBUTE_UNUSED)
557 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
559 /* We have to skip already visited (and thus necessary) statements
560 to make the chaining work after we dropped back to simple mode. */
561 if (chain_ovfl
562 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
564 gcc_assert (gimple_nop_p (def_stmt)
565 || gimple_plf (def_stmt, STMT_NECESSARY));
566 return false;
569 /* We want to skip stores to non-aliased variables. */
570 if (!chain_ovfl
571 && gimple_assign_single_p (def_stmt))
573 tree lhs = gimple_assign_lhs (def_stmt);
574 if (!ref_may_be_aliased (lhs))
575 return false;
578 /* We want to skip statments that do not constitute stores but have
579 a virtual definition. */
580 if (is_gimple_call (def_stmt))
582 tree callee = gimple_call_fndecl (def_stmt);
583 if (callee != NULL_TREE
584 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
585 switch (DECL_FUNCTION_CODE (callee))
587 case BUILT_IN_MALLOC:
588 case BUILT_IN_ALIGNED_ALLOC:
589 case BUILT_IN_CALLOC:
590 case BUILT_IN_ALLOCA:
591 case BUILT_IN_ALLOCA_WITH_ALIGN:
592 case BUILT_IN_FREE:
593 return false;
595 default:;
599 mark_operand_necessary (vdef);
601 return false;
604 static void
605 mark_all_reaching_defs_necessary (gimple stmt)
607 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
608 mark_all_reaching_defs_necessary_1, NULL, &visited);
611 /* Return true for PHI nodes with one or identical arguments
612 can be removed. */
613 static bool
614 degenerate_phi_p (gimple phi)
616 unsigned int i;
617 tree op = gimple_phi_arg_def (phi, 0);
618 for (i = 1; i < gimple_phi_num_args (phi); i++)
619 if (gimple_phi_arg_def (phi, i) != op)
620 return false;
621 return true;
624 /* Propagate necessity using the operands of necessary statements.
625 Process the uses on each statement in the worklist, and add all
626 feeding statements which contribute to the calculation of this
627 value to the worklist.
629 In conservative mode, EL is NULL. */
631 static void
632 propagate_necessity (bool aggressive)
634 gimple stmt;
636 if (dump_file && (dump_flags & TDF_DETAILS))
637 fprintf (dump_file, "\nProcessing worklist:\n");
639 while (worklist.length () > 0)
641 /* Take STMT from worklist. */
642 stmt = worklist.pop ();
644 if (dump_file && (dump_flags & TDF_DETAILS))
646 fprintf (dump_file, "processing: ");
647 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
648 fprintf (dump_file, "\n");
651 if (aggressive)
653 /* Mark the last statement of the basic blocks on which the block
654 containing STMT is control dependent, but only if we haven't
655 already done so. */
656 basic_block bb = gimple_bb (stmt);
657 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
658 && !bitmap_bit_p (visited_control_parents, bb->index))
659 mark_control_dependent_edges_necessary (bb, false);
662 if (gimple_code (stmt) == GIMPLE_PHI
663 /* We do not process virtual PHI nodes nor do we track their
664 necessity. */
665 && !virtual_operand_p (gimple_phi_result (stmt)))
667 /* PHI nodes are somewhat special in that each PHI alternative has
668 data and control dependencies. All the statements feeding the
669 PHI node's arguments are always necessary. In aggressive mode,
670 we also consider the control dependent edges leading to the
671 predecessor block associated with each PHI alternative as
672 necessary. */
673 size_t k;
675 for (k = 0; k < gimple_phi_num_args (stmt); k++)
677 tree arg = PHI_ARG_DEF (stmt, k);
678 if (TREE_CODE (arg) == SSA_NAME)
679 mark_operand_necessary (arg);
682 /* For PHI operands it matters from where the control flow arrives
683 to the BB. Consider the following example:
685 a=exp1;
686 b=exp2;
687 if (test)
689 else
691 c=PHI(a,b)
693 We need to mark control dependence of the empty basic blocks, since they
694 contains computation of PHI operands.
696 Doing so is too restrictive in the case the predecestor block is in
697 the loop. Consider:
699 if (b)
701 int i;
702 for (i = 0; i<1000; ++i)
704 j = 0;
706 return j;
708 There is PHI for J in the BB containing return statement.
709 In this case the control dependence of predecestor block (that is
710 within the empty loop) also contains the block determining number
711 of iterations of the block that would prevent removing of empty
712 loop in this case.
714 This scenario can be avoided by splitting critical edges.
715 To save the critical edge splitting pass we identify how the control
716 dependence would look like if the edge was split.
718 Consider the modified CFG created from current CFG by splitting
719 edge B->C. In the postdominance tree of modified CFG, C' is
720 always child of C. There are two cases how chlids of C' can look
721 like:
723 1) C' is leaf
725 In this case the only basic block C' is control dependent on is B.
727 2) C' has single child that is B
729 In this case control dependence of C' is same as control
730 dependence of B in original CFG except for block B itself.
731 (since C' postdominate B in modified CFG)
733 Now how to decide what case happens? There are two basic options:
735 a) C postdominate B. Then C immediately postdominate B and
736 case 2 happens iff there is no other way from B to C except
737 the edge B->C.
739 There is other way from B to C iff there is succesor of B that
740 is not postdominated by B. Testing this condition is somewhat
741 expensive, because we need to iterate all succesors of B.
742 We are safe to assume that this does not happen: we will mark B
743 as needed when processing the other path from B to C that is
744 conrol dependent on B and marking control dependencies of B
745 itself is harmless because they will be processed anyway after
746 processing control statement in B.
748 b) C does not postdominate B. Always case 1 happens since there is
749 path from C to exit that does not go through B and thus also C'. */
751 if (aggressive && !degenerate_phi_p (stmt))
753 for (k = 0; k < gimple_phi_num_args (stmt); k++)
755 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
757 if (gimple_bb (stmt)
758 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
760 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
761 mark_last_stmt_necessary (arg_bb);
763 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
764 && !bitmap_bit_p (visited_control_parents,
765 arg_bb->index))
766 mark_control_dependent_edges_necessary (arg_bb, true);
770 else
772 /* Propagate through the operands. Examine all the USE, VUSE and
773 VDEF operands in this statement. Mark all the statements
774 which feed this statement's uses as necessary. */
775 ssa_op_iter iter;
776 tree use;
778 /* If this is a call to free which is directly fed by an
779 allocation function do not mark that necessary through
780 processing the argument. */
781 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
783 tree ptr = gimple_call_arg (stmt, 0);
784 gimple def_stmt;
785 tree def_callee;
786 /* If the pointer we free is defined by an allocation
787 function do not add the call to the worklist. */
788 if (TREE_CODE (ptr) == SSA_NAME
789 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
790 && (def_callee = gimple_call_fndecl (def_stmt))
791 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
792 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
793 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
794 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
795 continue;
798 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
799 mark_operand_necessary (use);
801 use = gimple_vuse (stmt);
802 if (!use)
803 continue;
805 /* If we dropped to simple mode make all immediately
806 reachable definitions necessary. */
807 if (chain_ovfl)
809 mark_all_reaching_defs_necessary (stmt);
810 continue;
813 /* For statements that may load from memory (have a VUSE) we
814 have to mark all reaching (may-)definitions as necessary.
815 We partition this task into two cases:
816 1) explicit loads based on decls that are not aliased
817 2) implicit loads (like calls) and explicit loads not
818 based on decls that are not aliased (like indirect
819 references or loads from globals)
820 For 1) we mark all reaching may-defs as necessary, stopping
821 at dominating kills. For 2) we want to mark all dominating
822 references necessary, but non-aliased ones which we handle
823 in 1). By keeping a global visited bitmap for references
824 we walk for 2) we avoid quadratic behavior for those. */
826 if (is_gimple_call (stmt))
828 tree callee = gimple_call_fndecl (stmt);
829 unsigned i;
831 /* Calls to functions that are merely acting as barriers
832 or that only store to memory do not make any previous
833 stores necessary. */
834 if (callee != NULL_TREE
835 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
836 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
837 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
838 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
839 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
840 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
841 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
844 || (DECL_FUNCTION_CODE (callee)
845 == BUILT_IN_ALLOCA_WITH_ALIGN)
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
847 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
849 continue;
851 /* Calls implicitly load from memory, their arguments
852 in addition may explicitly perform memory loads. */
853 mark_all_reaching_defs_necessary (stmt);
854 for (i = 0; i < gimple_call_num_args (stmt); ++i)
856 tree arg = gimple_call_arg (stmt, i);
857 if (TREE_CODE (arg) == SSA_NAME
858 || is_gimple_min_invariant (arg))
859 continue;
860 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
861 arg = TREE_OPERAND (arg, 0);
862 if (!ref_may_be_aliased (arg))
863 mark_aliased_reaching_defs_necessary (stmt, arg);
866 else if (gimple_assign_single_p (stmt))
868 tree rhs;
869 /* If this is a load mark things necessary. */
870 rhs = gimple_assign_rhs1 (stmt);
871 if (TREE_CODE (rhs) != SSA_NAME
872 && !is_gimple_min_invariant (rhs)
873 && TREE_CODE (rhs) != CONSTRUCTOR)
875 if (!ref_may_be_aliased (rhs))
876 mark_aliased_reaching_defs_necessary (stmt, rhs);
877 else
878 mark_all_reaching_defs_necessary (stmt);
881 else if (gimple_code (stmt) == GIMPLE_RETURN)
883 tree rhs = gimple_return_retval (stmt);
884 /* A return statement may perform a load. */
885 if (rhs
886 && TREE_CODE (rhs) != SSA_NAME
887 && !is_gimple_min_invariant (rhs)
888 && TREE_CODE (rhs) != CONSTRUCTOR)
890 if (!ref_may_be_aliased (rhs))
891 mark_aliased_reaching_defs_necessary (stmt, rhs);
892 else
893 mark_all_reaching_defs_necessary (stmt);
896 else if (gimple_code (stmt) == GIMPLE_ASM)
898 unsigned i;
899 mark_all_reaching_defs_necessary (stmt);
900 /* Inputs may perform loads. */
901 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
903 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
904 if (TREE_CODE (op) != SSA_NAME
905 && !is_gimple_min_invariant (op)
906 && TREE_CODE (op) != CONSTRUCTOR
907 && !ref_may_be_aliased (op))
908 mark_aliased_reaching_defs_necessary (stmt, op);
911 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
913 /* The beginning of a transaction is a memory barrier. */
914 /* ??? If we were really cool, we'd only be a barrier
915 for the memories touched within the transaction. */
916 mark_all_reaching_defs_necessary (stmt);
918 else
919 gcc_unreachable ();
921 /* If we over-used our alias oracle budget drop to simple
922 mode. The cost metric allows quadratic behavior
923 (number of uses times number of may-defs queries) up to
924 a constant maximal number of queries and after that falls back to
925 super-linear complexity. */
926 if (/* Constant but quadratic for small functions. */
927 total_chain > 128 * 128
928 /* Linear in the number of may-defs. */
929 && total_chain > 32 * longest_chain
930 /* Linear in the number of uses. */
931 && total_chain > nr_walks * 32)
933 chain_ovfl = true;
934 if (visited)
935 bitmap_clear (visited);
941 /* Remove dead PHI nodes from block BB. */
943 static bool
944 remove_dead_phis (basic_block bb)
946 bool something_changed = false;
947 gimple phi;
948 gimple_stmt_iterator gsi;
950 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
952 stats.total_phis++;
953 phi = gsi_stmt (gsi);
955 /* We do not track necessity of virtual PHI nodes. Instead do
956 very simple dead PHI removal here. */
957 if (virtual_operand_p (gimple_phi_result (phi)))
959 /* Virtual PHI nodes with one or identical arguments
960 can be removed. */
961 if (degenerate_phi_p (phi))
963 tree vdef = gimple_phi_result (phi);
964 tree vuse = gimple_phi_arg_def (phi, 0);
966 use_operand_p use_p;
967 imm_use_iterator iter;
968 gimple use_stmt;
969 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
970 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
971 SET_USE (use_p, vuse);
972 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
973 && TREE_CODE (vuse) == SSA_NAME)
974 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
976 else
977 gimple_set_plf (phi, STMT_NECESSARY, true);
980 if (!gimple_plf (phi, STMT_NECESSARY))
982 something_changed = true;
983 if (dump_file && (dump_flags & TDF_DETAILS))
985 fprintf (dump_file, "Deleting : ");
986 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
987 fprintf (dump_file, "\n");
990 remove_phi_node (&gsi, true);
991 stats.removed_phis++;
992 continue;
995 gsi_next (&gsi);
997 return something_changed;
1000 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1002 static edge
1003 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1005 gimple_stmt_iterator gsi;
1006 edge e2 = NULL;
1007 edge_iterator ei;
1009 if (dump_file && (dump_flags & TDF_DETAILS))
1010 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1011 e->dest->index, post_dom_bb->index);
1013 e2 = redirect_edge_and_branch (e, post_dom_bb);
1014 cfg_altered = true;
1016 /* If edge was already around, no updating is necessary. */
1017 if (e2 != e)
1018 return e2;
1020 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1022 /* We are sure that for every live PHI we are seeing control dependent BB.
1023 This means that we can pick any edge to duplicate PHI args from. */
1024 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1025 if (e2 != e)
1026 break;
1027 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1029 gimple phi = gsi_stmt (gsi);
1030 tree op;
1031 source_location locus;
1033 /* PHIs for virtuals have no control dependency relation on them.
1034 We are lost here and must force renaming of the symbol. */
1035 if (virtual_operand_p (gimple_phi_result (phi)))
1037 mark_virtual_phi_result_for_renaming (phi);
1038 remove_phi_node (&gsi, true);
1039 continue;
1042 /* Dead PHI do not imply control dependency. */
1043 if (!gimple_plf (phi, STMT_NECESSARY))
1045 gsi_next (&gsi);
1046 continue;
1049 op = gimple_phi_arg_def (phi, e2->dest_idx);
1050 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1051 add_phi_arg (phi, op, e, locus);
1052 /* The resulting PHI if not dead can only be degenerate. */
1053 gcc_assert (degenerate_phi_p (phi));
1054 gsi_next (&gsi);
1057 return e;
1060 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1061 containing I so that we don't have to look it up. */
1063 static void
1064 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1066 gimple stmt = gsi_stmt (*i);
1068 if (dump_file && (dump_flags & TDF_DETAILS))
1070 fprintf (dump_file, "Deleting : ");
1071 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1072 fprintf (dump_file, "\n");
1075 stats.removed++;
1077 /* If we have determined that a conditional branch statement contributes
1078 nothing to the program, then we not only remove it, but we also change
1079 the flow graph so that the current block will simply fall-thru to its
1080 immediate post-dominator. The blocks we are circumventing will be
1081 removed by cleanup_tree_cfg if this change in the flow graph makes them
1082 unreachable. */
1083 if (is_ctrl_stmt (stmt))
1085 basic_block post_dom_bb;
1086 edge e, e2;
1087 edge_iterator ei;
1089 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1091 e = find_edge (bb, post_dom_bb);
1093 /* If edge is already there, try to use it. This avoids need to update
1094 PHI nodes. Also watch for cases where post dominator does not exists
1095 or is exit block. These can happen for infinite loops as we create
1096 fake edges in the dominator tree. */
1097 if (e)
1099 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1100 e = EDGE_SUCC (bb, 0);
1101 else
1102 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1103 gcc_assert (e);
1104 e->probability = REG_BR_PROB_BASE;
1105 e->count = bb->count;
1107 /* The edge is no longer associated with a conditional, so it does
1108 not have TRUE/FALSE flags. */
1109 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1111 /* The lone outgoing edge from BB will be a fallthru edge. */
1112 e->flags |= EDGE_FALLTHRU;
1114 /* Remove the remaining outgoing edges. */
1115 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1116 if (e != e2)
1118 cfg_altered = true;
1119 remove_edge (e2);
1121 else
1122 ei_next (&ei);
1125 /* If this is a store into a variable that is being optimized away,
1126 add a debug bind stmt if possible. */
1127 if (MAY_HAVE_DEBUG_STMTS
1128 && gimple_assign_single_p (stmt)
1129 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1131 tree lhs = gimple_assign_lhs (stmt);
1132 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1133 && !DECL_IGNORED_P (lhs)
1134 && is_gimple_reg_type (TREE_TYPE (lhs))
1135 && !is_global_var (lhs)
1136 && !DECL_HAS_VALUE_EXPR_P (lhs))
1138 tree rhs = gimple_assign_rhs1 (stmt);
1139 gimple note
1140 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1141 gsi_insert_after (i, note, GSI_SAME_STMT);
1145 unlink_stmt_vdef (stmt);
1146 gsi_remove (i, true);
1147 release_defs (stmt);
1150 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1151 contributes nothing to the program, and can be deleted. */
1153 static bool
1154 eliminate_unnecessary_stmts (void)
1156 bool something_changed = false;
1157 basic_block bb;
1158 gimple_stmt_iterator gsi, psi;
1159 gimple stmt;
1160 tree call;
1161 vec<basic_block> h;
1163 if (dump_file && (dump_flags & TDF_DETAILS))
1164 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1166 clear_special_calls ();
1168 /* Walking basic blocks and statements in reverse order avoids
1169 releasing SSA names before any other DEFs that refer to them are
1170 released. This helps avoid loss of debug information, as we get
1171 a chance to propagate all RHSs of removed SSAs into debug uses,
1172 rather than only the latest ones. E.g., consider:
1174 x_3 = y_1 + z_2;
1175 a_5 = x_3 - b_4;
1176 # DEBUG a => a_5
1178 If we were to release x_3 before a_5, when we reached a_5 and
1179 tried to substitute it into the debug stmt, we'd see x_3 there,
1180 but x_3's DEF, type, etc would have already been disconnected.
1181 By going backwards, the debug stmt first changes to:
1183 # DEBUG a => x_3 - b_4
1185 and then to:
1187 # DEBUG a => y_1 + z_2 - b_4
1189 as desired. */
1190 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1191 h = get_all_dominated_blocks (CDI_DOMINATORS,
1192 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1194 while (h.length ())
1196 bb = h.pop ();
1198 /* Remove dead statements. */
1199 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1201 stmt = gsi_stmt (gsi);
1203 psi = gsi;
1204 gsi_prev (&psi);
1206 stats.total++;
1208 /* We can mark a call to free as not necessary if the
1209 defining statement of its argument is not necessary
1210 (and thus is getting removed). */
1211 if (gimple_plf (stmt, STMT_NECESSARY)
1212 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1214 tree ptr = gimple_call_arg (stmt, 0);
1215 if (TREE_CODE (ptr) == SSA_NAME)
1217 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1218 if (!gimple_nop_p (def_stmt)
1219 && !gimple_plf (def_stmt, STMT_NECESSARY))
1220 gimple_set_plf (stmt, STMT_NECESSARY, false);
1224 /* If GSI is not necessary then remove it. */
1225 if (!gimple_plf (stmt, STMT_NECESSARY))
1227 if (!is_gimple_debug (stmt))
1228 something_changed = true;
1229 remove_dead_stmt (&gsi, bb);
1231 else if (is_gimple_call (stmt))
1233 tree name = gimple_call_lhs (stmt);
1235 notice_special_calls (stmt);
1237 /* When LHS of var = call (); is dead, simplify it into
1238 call (); saving one operand. */
1239 if (name
1240 && TREE_CODE (name) == SSA_NAME
1241 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1242 /* Avoid doing so for allocation calls which we
1243 did not mark as necessary, it will confuse the
1244 special logic we apply to malloc/free pair removal. */
1245 && (!(call = gimple_call_fndecl (stmt))
1246 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1247 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1248 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1249 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1250 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1251 && (DECL_FUNCTION_CODE (call)
1252 != BUILT_IN_ALLOCA_WITH_ALIGN))))
1254 something_changed = true;
1255 if (dump_file && (dump_flags & TDF_DETAILS))
1257 fprintf (dump_file, "Deleting LHS of call: ");
1258 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1259 fprintf (dump_file, "\n");
1262 gimple_call_set_lhs (stmt, NULL_TREE);
1263 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1264 update_stmt (stmt);
1265 release_ssa_name (name);
1271 h.release ();
1273 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1274 rendered some PHI nodes unreachable while they are still in use.
1275 Mark them for renaming. */
1276 if (cfg_altered)
1278 basic_block prev_bb;
1280 find_unreachable_blocks ();
1282 /* Delete all unreachable basic blocks in reverse dominator order. */
1283 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1284 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1286 prev_bb = bb->prev_bb;
1288 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1289 || !(bb->flags & BB_REACHABLE))
1291 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1292 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1294 bool found = false;
1295 imm_use_iterator iter;
1297 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1299 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1300 continue;
1301 if (gimple_code (stmt) == GIMPLE_PHI
1302 || gimple_plf (stmt, STMT_NECESSARY))
1304 found = true;
1305 BREAK_FROM_IMM_USE_STMT (iter);
1308 if (found)
1309 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1312 if (!(bb->flags & BB_REACHABLE))
1314 /* Speed up the removal of blocks that don't
1315 dominate others. Walking backwards, this should
1316 be the common case. ??? Do we need to recompute
1317 dominators because of cfg_altered? */
1318 if (!MAY_HAVE_DEBUG_STMTS
1319 || !first_dom_son (CDI_DOMINATORS, bb))
1320 delete_basic_block (bb);
1321 else
1323 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1325 while (h.length ())
1327 bb = h.pop ();
1328 prev_bb = bb->prev_bb;
1329 /* Rearrangements to the CFG may have failed
1330 to update the dominators tree, so that
1331 formerly-dominated blocks are now
1332 otherwise reachable. */
1333 if (!!(bb->flags & BB_REACHABLE))
1334 continue;
1335 delete_basic_block (bb);
1338 h.release ();
1344 FOR_EACH_BB_FN (bb, cfun)
1346 /* Remove dead PHI nodes. */
1347 something_changed |= remove_dead_phis (bb);
1350 return something_changed;
1354 /* Print out removed statement statistics. */
1356 static void
1357 print_stats (void)
1359 float percg;
1361 percg = ((float) stats.removed / (float) stats.total) * 100;
1362 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1363 stats.removed, stats.total, (int) percg);
1365 if (stats.total_phis == 0)
1366 percg = 0;
1367 else
1368 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1370 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1371 stats.removed_phis, stats.total_phis, (int) percg);
1374 /* Initialization for this pass. Set up the used data structures. */
1376 static void
1377 tree_dce_init (bool aggressive)
1379 memset ((void *) &stats, 0, sizeof (stats));
1381 if (aggressive)
1383 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1384 bitmap_clear (last_stmt_necessary);
1385 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1386 bitmap_clear (bb_contains_live_stmts);
1389 processed = sbitmap_alloc (num_ssa_names + 1);
1390 bitmap_clear (processed);
1392 worklist.create (64);
1393 cfg_altered = false;
1396 /* Cleanup after this pass. */
1398 static void
1399 tree_dce_done (bool aggressive)
1401 if (aggressive)
1403 delete cd;
1404 sbitmap_free (visited_control_parents);
1405 sbitmap_free (last_stmt_necessary);
1406 sbitmap_free (bb_contains_live_stmts);
1407 bb_contains_live_stmts = NULL;
1410 sbitmap_free (processed);
1412 worklist.release ();
1415 /* Main routine to eliminate dead code.
1417 AGGRESSIVE controls the aggressiveness of the algorithm.
1418 In conservative mode, we ignore control dependence and simply declare
1419 all but the most trivially dead branches necessary. This mode is fast.
1420 In aggressive mode, control dependences are taken into account, which
1421 results in more dead code elimination, but at the cost of some time.
1423 FIXME: Aggressive mode before PRE doesn't work currently because
1424 the dominance info is not invalidated after DCE1. This is
1425 not an issue right now because we only run aggressive DCE
1426 as the last tree SSA pass, but keep this in mind when you
1427 start experimenting with pass ordering. */
1429 static unsigned int
1430 perform_tree_ssa_dce (bool aggressive)
1432 bool something_changed = 0;
1434 calculate_dominance_info (CDI_DOMINATORS);
1436 /* Preheaders are needed for SCEV to work.
1437 Simple lateches and recorded exits improve chances that loop will
1438 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1439 if (aggressive)
1440 loop_optimizer_init (LOOPS_NORMAL
1441 | LOOPS_HAVE_RECORDED_EXITS);
1443 tree_dce_init (aggressive);
1445 if (aggressive)
1447 /* Compute control dependence. */
1448 calculate_dominance_info (CDI_POST_DOMINATORS);
1449 cd = new control_dependences (create_edge_list ());
1451 visited_control_parents =
1452 sbitmap_alloc (last_basic_block_for_fn (cfun));
1453 bitmap_clear (visited_control_parents);
1455 mark_dfs_back_edges ();
1458 find_obviously_necessary_stmts (aggressive);
1460 if (aggressive)
1461 loop_optimizer_finalize ();
1463 longest_chain = 0;
1464 total_chain = 0;
1465 nr_walks = 0;
1466 chain_ovfl = false;
1467 visited = BITMAP_ALLOC (NULL);
1468 propagate_necessity (aggressive);
1469 BITMAP_FREE (visited);
1471 something_changed |= eliminate_unnecessary_stmts ();
1472 something_changed |= cfg_altered;
1474 /* We do not update postdominators, so free them unconditionally. */
1475 free_dominance_info (CDI_POST_DOMINATORS);
1477 /* If we removed paths in the CFG, then we need to update
1478 dominators as well. I haven't investigated the possibility
1479 of incrementally updating dominators. */
1480 if (cfg_altered)
1481 free_dominance_info (CDI_DOMINATORS);
1483 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1484 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1486 /* Debugging dumps. */
1487 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1488 print_stats ();
1490 tree_dce_done (aggressive);
1492 if (something_changed)
1494 free_numbers_of_iterations_estimates ();
1495 if (scev_initialized_p ())
1496 scev_reset ();
1497 return TODO_update_ssa | TODO_cleanup_cfg;
1499 return 0;
1502 /* Pass entry points. */
1503 static unsigned int
1504 tree_ssa_dce (void)
1506 return perform_tree_ssa_dce (/*aggressive=*/false);
1509 static unsigned int
1510 tree_ssa_cd_dce (void)
1512 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1515 namespace {
1517 const pass_data pass_data_dce =
1519 GIMPLE_PASS, /* type */
1520 "dce", /* name */
1521 OPTGROUP_NONE, /* optinfo_flags */
1522 TV_TREE_DCE, /* tv_id */
1523 ( PROP_cfg | PROP_ssa ), /* properties_required */
1524 0, /* properties_provided */
1525 0, /* properties_destroyed */
1526 0, /* todo_flags_start */
1527 0, /* todo_flags_finish */
1530 class pass_dce : public gimple_opt_pass
1532 public:
1533 pass_dce (gcc::context *ctxt)
1534 : gimple_opt_pass (pass_data_dce, ctxt)
1537 /* opt_pass methods: */
1538 opt_pass * clone () { return new pass_dce (m_ctxt); }
1539 virtual bool gate (function *) { return flag_tree_dce != 0; }
1540 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1542 }; // class pass_dce
1544 } // anon namespace
1546 gimple_opt_pass *
1547 make_pass_dce (gcc::context *ctxt)
1549 return new pass_dce (ctxt);
1552 namespace {
1554 const pass_data pass_data_cd_dce =
1556 GIMPLE_PASS, /* type */
1557 "cddce", /* name */
1558 OPTGROUP_NONE, /* optinfo_flags */
1559 TV_TREE_CD_DCE, /* tv_id */
1560 ( PROP_cfg | PROP_ssa ), /* properties_required */
1561 0, /* properties_provided */
1562 0, /* properties_destroyed */
1563 0, /* todo_flags_start */
1564 0, /* todo_flags_finish */
1567 class pass_cd_dce : public gimple_opt_pass
1569 public:
1570 pass_cd_dce (gcc::context *ctxt)
1571 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1574 /* opt_pass methods: */
1575 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1576 virtual bool gate (function *) { return flag_tree_dce != 0; }
1577 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1579 }; // class pass_cd_dce
1581 } // anon namespace
1583 gimple_opt_pass *
1584 make_pass_cd_dce (gcc::context *ctxt)
1586 return new pass_cd_dce (ctxt);