Rebase.
[official-gcc.git] / gcc / tree-ssa-dce.c
blobd675d187ba94233d6e39037418098943c641a1eb
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
50 #include "tree.h"
51 #include "calls.h"
52 #include "gimple-pretty-print.h"
53 #include "basic-block.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "tree-eh.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "gimple-ssa.h"
63 #include "tree-cfg.h"
64 #include "tree-phinodes.h"
65 #include "ssa-iterators.h"
66 #include "stringpool.h"
67 #include "tree-ssanames.h"
68 #include "tree-ssa-loop-niter.h"
69 #include "tree-into-ssa.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "tree-pass.h"
73 #include "flags.h"
74 #include "cfgloop.h"
75 #include "tree-scalar-evolution.h"
76 #include "tree-chkp.h"
78 static struct stmt_stats
80 int total;
81 int total_phis;
82 int removed;
83 int removed_phis;
84 } stats;
86 #define STMT_NECESSARY GF_PLF_1
88 static vec<gimple> worklist;
90 /* Vector indicating an SSA name has already been processed and marked
91 as necessary. */
92 static sbitmap processed;
94 /* Vector indicating that the last statement of a basic block has already
95 been marked as necessary. */
96 static sbitmap last_stmt_necessary;
98 /* Vector indicating that BB contains statements that are live. */
99 static sbitmap bb_contains_live_stmts;
101 /* Before we can determine whether a control branch is dead, we need to
102 compute which blocks are control dependent on which edges.
104 We expect each block to be control dependent on very few edges so we
105 use a bitmap for each block recording its edges. An array holds the
106 bitmap. The Ith bit in the bitmap is set if that block is dependent
107 on the Ith edge. */
108 static control_dependences *cd;
110 /* Vector indicating that a basic block has already had all the edges
111 processed that it is control dependent on. */
112 static sbitmap visited_control_parents;
114 /* TRUE if this pass alters the CFG (by removing control statements).
115 FALSE otherwise.
117 If this pass alters the CFG, then it will arrange for the dominators
118 to be recomputed. */
119 static bool cfg_altered;
122 /* If STMT is not already marked necessary, mark it, and add it to the
123 worklist if ADD_TO_WORKLIST is true. */
125 static inline void
126 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
128 gcc_assert (stmt);
130 if (gimple_plf (stmt, STMT_NECESSARY))
131 return;
133 if (dump_file && (dump_flags & TDF_DETAILS))
135 fprintf (dump_file, "Marking useful stmt: ");
136 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
137 fprintf (dump_file, "\n");
140 gimple_set_plf (stmt, STMT_NECESSARY, true);
141 if (add_to_worklist)
142 worklist.safe_push (stmt);
143 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
144 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
148 /* Mark the statement defining operand OP as necessary. */
150 static inline void
151 mark_operand_necessary (tree op)
153 gimple stmt;
154 int ver;
156 gcc_assert (op);
158 ver = SSA_NAME_VERSION (op);
159 if (bitmap_bit_p (processed, ver))
161 stmt = SSA_NAME_DEF_STMT (op);
162 gcc_assert (gimple_nop_p (stmt)
163 || gimple_plf (stmt, STMT_NECESSARY));
164 return;
166 bitmap_set_bit (processed, ver);
168 stmt = SSA_NAME_DEF_STMT (op);
169 gcc_assert (stmt);
171 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
172 return;
174 if (dump_file && (dump_flags & TDF_DETAILS))
176 fprintf (dump_file, "marking necessary through ");
177 print_generic_expr (dump_file, op, 0);
178 fprintf (dump_file, " stmt ");
179 print_gimple_stmt (dump_file, stmt, 0, 0);
182 gimple_set_plf (stmt, STMT_NECESSARY, true);
183 if (bb_contains_live_stmts)
184 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
185 worklist.safe_push (stmt);
189 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
190 it can make other statements necessary.
192 If AGGRESSIVE is false, control statements are conservatively marked as
193 necessary. */
195 static void
196 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
198 /* With non-call exceptions, we have to assume that all statements could
199 throw. If a statement could throw, it can be deemed necessary. */
200 if (cfun->can_throw_non_call_exceptions
201 && !cfun->can_delete_dead_exceptions
202 && stmt_could_throw_p (stmt))
204 mark_stmt_necessary (stmt, true);
205 return;
208 /* Statements that are implicitly live. Most function calls, asm
209 and return statements are required. Labels and GIMPLE_BIND nodes
210 are kept because they are control flow, and we have no way of
211 knowing whether they can be removed. DCE can eliminate all the
212 other statements in a block, and CFG can then remove the block
213 and labels. */
214 switch (gimple_code (stmt))
216 case GIMPLE_PREDICT:
217 case GIMPLE_LABEL:
218 mark_stmt_necessary (stmt, false);
219 return;
221 case GIMPLE_ASM:
222 case GIMPLE_RESX:
223 case GIMPLE_RETURN:
224 mark_stmt_necessary (stmt, true);
225 return;
227 case GIMPLE_CALL:
229 tree callee = gimple_call_fndecl (stmt);
230 if (callee != NULL_TREE
231 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
232 switch (DECL_FUNCTION_CODE (callee))
234 case BUILT_IN_MALLOC:
235 case BUILT_IN_ALIGNED_ALLOC:
236 case BUILT_IN_CALLOC:
237 case BUILT_IN_ALLOCA:
238 case BUILT_IN_ALLOCA_WITH_ALIGN:
239 return;
241 default:;
243 /* Most, but not all function calls are required. Function calls that
244 produce no result and have no side effects (i.e. const pure
245 functions) are unnecessary. */
246 if (gimple_has_side_effects (stmt))
248 mark_stmt_necessary (stmt, true);
249 return;
251 if (!gimple_call_lhs (stmt))
252 return;
253 break;
256 case GIMPLE_DEBUG:
257 /* Debug temps without a value are not useful. ??? If we could
258 easily locate the debug temp bind stmt for a use thereof,
259 would could refrain from marking all debug temps here, and
260 mark them only if they're used. */
261 if (!gimple_debug_bind_p (stmt)
262 || gimple_debug_bind_has_value_p (stmt)
263 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
264 mark_stmt_necessary (stmt, false);
265 return;
267 case GIMPLE_GOTO:
268 gcc_assert (!simple_goto_p (stmt));
269 mark_stmt_necessary (stmt, true);
270 return;
272 case GIMPLE_COND:
273 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
274 /* Fall through. */
276 case GIMPLE_SWITCH:
277 if (! aggressive)
278 mark_stmt_necessary (stmt, true);
279 break;
281 case GIMPLE_ASSIGN:
282 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
283 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
284 return;
285 break;
287 default:
288 break;
291 /* If the statement has volatile operands, it needs to be preserved.
292 Same for statements that can alter control flow in unpredictable
293 ways. */
294 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
296 mark_stmt_necessary (stmt, true);
297 return;
300 if (stmt_may_clobber_global_p (stmt))
302 mark_stmt_necessary (stmt, true);
303 return;
306 return;
310 /* Mark the last statement of BB as necessary. */
312 static void
313 mark_last_stmt_necessary (basic_block bb)
315 gimple stmt = last_stmt (bb);
317 bitmap_set_bit (last_stmt_necessary, bb->index);
318 bitmap_set_bit (bb_contains_live_stmts, bb->index);
320 /* We actually mark the statement only if it is a control statement. */
321 if (stmt && is_ctrl_stmt (stmt))
322 mark_stmt_necessary (stmt, true);
326 /* Mark control dependent edges of BB as necessary. We have to do this only
327 once for each basic block so we set the appropriate bit after we're done.
329 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
331 static void
332 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
334 bitmap_iterator bi;
335 unsigned edge_number;
336 bool skipped = false;
338 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
340 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
341 return;
343 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
344 0, edge_number, bi)
346 basic_block cd_bb = cd->get_edge (edge_number)->src;
348 if (ignore_self && cd_bb == bb)
350 skipped = true;
351 continue;
354 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
355 mark_last_stmt_necessary (cd_bb);
358 if (!skipped)
359 bitmap_set_bit (visited_control_parents, bb->index);
363 /* Find obviously necessary statements. These are things like most function
364 calls, and stores to file level variables.
366 If EL is NULL, control statements are conservatively marked as
367 necessary. Otherwise it contains the list of edges used by control
368 dependence analysis. */
370 static void
371 find_obviously_necessary_stmts (bool aggressive)
373 basic_block bb;
374 gimple_stmt_iterator gsi;
375 edge e;
376 gimple phi, stmt;
377 int flags;
379 FOR_EACH_BB_FN (bb, cfun)
381 /* PHI nodes are never inherently necessary. */
382 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
384 phi = gsi_stmt (gsi);
385 gimple_set_plf (phi, STMT_NECESSARY, false);
388 /* Check all statements in the block. */
389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
391 stmt = gsi_stmt (gsi);
392 gimple_set_plf (stmt, STMT_NECESSARY, false);
393 mark_stmt_if_obviously_necessary (stmt, aggressive);
397 /* Pure and const functions are finite and thus have no infinite loops in
398 them. */
399 flags = flags_from_decl_or_type (current_function_decl);
400 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
401 return;
403 /* Prevent the empty possibly infinite loops from being removed. */
404 if (aggressive)
406 struct loop *loop;
407 scev_initialize ();
408 if (mark_irreducible_loops ())
409 FOR_EACH_BB_FN (bb, cfun)
411 edge_iterator ei;
412 FOR_EACH_EDGE (e, ei, bb->succs)
413 if ((e->flags & EDGE_DFS_BACK)
414 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
416 if (dump_file)
417 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
418 e->src->index, e->dest->index);
419 mark_control_dependent_edges_necessary (e->dest, false);
423 FOR_EACH_LOOP (loop, 0)
424 if (!finite_loop_p (loop))
426 if (dump_file)
427 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
428 mark_control_dependent_edges_necessary (loop->latch, false);
430 scev_finalize ();
435 /* Return true if REF is based on an aliased base, otherwise false. */
437 static bool
438 ref_may_be_aliased (tree ref)
440 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
441 while (handled_component_p (ref))
442 ref = TREE_OPERAND (ref, 0);
443 if (TREE_CODE (ref) == MEM_REF
444 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
445 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
446 return !(DECL_P (ref)
447 && !may_be_aliased (ref));
450 static bitmap visited = NULL;
451 static unsigned int longest_chain = 0;
452 static unsigned int total_chain = 0;
453 static unsigned int nr_walks = 0;
454 static bool chain_ovfl = false;
456 /* Worker for the walker that marks reaching definitions of REF,
457 which is based on a non-aliased decl, necessary. It returns
458 true whenever the defining statement of the current VDEF is
459 a kill for REF, as no dominating may-defs are necessary for REF
460 anymore. DATA points to the basic-block that contains the
461 stmt that refers to REF. */
463 static bool
464 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
466 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
468 /* All stmts we visit are necessary. */
469 mark_operand_necessary (vdef);
471 /* If the stmt lhs kills ref, then we can stop walking. */
472 if (gimple_has_lhs (def_stmt)
473 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
474 /* The assignment is not necessarily carried out if it can throw
475 and we can catch it in the current function where we could inspect
476 the previous value.
477 ??? We only need to care about the RHS throwing. For aggregate
478 assignments or similar calls and non-call exceptions the LHS
479 might throw as well. */
480 && !stmt_can_throw_internal (def_stmt))
482 tree base, lhs = gimple_get_lhs (def_stmt);
483 HOST_WIDE_INT size, offset, max_size;
484 ao_ref_base (ref);
485 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
486 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
487 so base == refd->base does not always hold. */
488 if (base == ref->base)
490 /* For a must-alias check we need to be able to constrain
491 the accesses properly. */
492 if (size != -1 && size == max_size
493 && ref->max_size != -1)
495 if (offset <= ref->offset
496 && offset + size >= ref->offset + ref->max_size)
497 return true;
499 /* Or they need to be exactly the same. */
500 else if (ref->ref
501 /* Make sure there is no induction variable involved
502 in the references (gcc.c-torture/execute/pr42142.c).
503 The simplest way is to check if the kill dominates
504 the use. */
505 /* But when both are in the same block we cannot
506 easily tell whether we came from a backedge
507 unless we decide to compute stmt UIDs
508 (see PR58246). */
509 && (basic_block) data != gimple_bb (def_stmt)
510 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
511 gimple_bb (def_stmt))
512 && operand_equal_p (ref->ref, lhs, 0))
513 return true;
517 /* Otherwise keep walking. */
518 return false;
521 static void
522 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
524 unsigned int chain;
525 ao_ref refd;
526 gcc_assert (!chain_ovfl);
527 ao_ref_init (&refd, ref);
528 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
529 mark_aliased_reaching_defs_necessary_1,
530 gimple_bb (stmt), NULL);
531 if (chain > longest_chain)
532 longest_chain = chain;
533 total_chain += chain;
534 nr_walks++;
537 /* Worker for the walker that marks reaching definitions of REF, which
538 is not based on a non-aliased decl. For simplicity we need to end
539 up marking all may-defs necessary that are not based on a non-aliased
540 decl. The only job of this walker is to skip may-defs based on
541 a non-aliased decl. */
543 static bool
544 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
545 tree vdef, void *data ATTRIBUTE_UNUSED)
547 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
549 /* We have to skip already visited (and thus necessary) statements
550 to make the chaining work after we dropped back to simple mode. */
551 if (chain_ovfl
552 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
554 gcc_assert (gimple_nop_p (def_stmt)
555 || gimple_plf (def_stmt, STMT_NECESSARY));
556 return false;
559 /* We want to skip stores to non-aliased variables. */
560 if (!chain_ovfl
561 && gimple_assign_single_p (def_stmt))
563 tree lhs = gimple_assign_lhs (def_stmt);
564 if (!ref_may_be_aliased (lhs))
565 return false;
568 /* We want to skip statments that do not constitute stores but have
569 a virtual definition. */
570 if (is_gimple_call (def_stmt))
572 tree callee = gimple_call_fndecl (def_stmt);
573 if (callee != NULL_TREE
574 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
575 switch (DECL_FUNCTION_CODE (callee))
577 case BUILT_IN_MALLOC:
578 case BUILT_IN_ALIGNED_ALLOC:
579 case BUILT_IN_CALLOC:
580 case BUILT_IN_ALLOCA:
581 case BUILT_IN_ALLOCA_WITH_ALIGN:
582 case BUILT_IN_FREE:
583 return false;
585 default:;
589 mark_operand_necessary (vdef);
591 return false;
594 static void
595 mark_all_reaching_defs_necessary (gimple stmt)
597 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
598 mark_all_reaching_defs_necessary_1, NULL, &visited);
601 /* Return true for PHI nodes with one or identical arguments
602 can be removed. */
603 static bool
604 degenerate_phi_p (gimple phi)
606 unsigned int i;
607 tree op = gimple_phi_arg_def (phi, 0);
608 for (i = 1; i < gimple_phi_num_args (phi); i++)
609 if (gimple_phi_arg_def (phi, i) != op)
610 return false;
611 return true;
614 /* Propagate necessity using the operands of necessary statements.
615 Process the uses on each statement in the worklist, and add all
616 feeding statements which contribute to the calculation of this
617 value to the worklist.
619 In conservative mode, EL is NULL. */
621 static void
622 propagate_necessity (bool aggressive)
624 gimple stmt;
626 if (dump_file && (dump_flags & TDF_DETAILS))
627 fprintf (dump_file, "\nProcessing worklist:\n");
629 while (worklist.length () > 0)
631 /* Take STMT from worklist. */
632 stmt = worklist.pop ();
634 if (dump_file && (dump_flags & TDF_DETAILS))
636 fprintf (dump_file, "processing: ");
637 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
638 fprintf (dump_file, "\n");
641 if (aggressive)
643 /* Mark the last statement of the basic blocks on which the block
644 containing STMT is control dependent, but only if we haven't
645 already done so. */
646 basic_block bb = gimple_bb (stmt);
647 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
648 && !bitmap_bit_p (visited_control_parents, bb->index))
649 mark_control_dependent_edges_necessary (bb, false);
652 if (gimple_code (stmt) == GIMPLE_PHI
653 /* We do not process virtual PHI nodes nor do we track their
654 necessity. */
655 && !virtual_operand_p (gimple_phi_result (stmt)))
657 /* PHI nodes are somewhat special in that each PHI alternative has
658 data and control dependencies. All the statements feeding the
659 PHI node's arguments are always necessary. In aggressive mode,
660 we also consider the control dependent edges leading to the
661 predecessor block associated with each PHI alternative as
662 necessary. */
663 size_t k;
665 for (k = 0; k < gimple_phi_num_args (stmt); k++)
667 tree arg = PHI_ARG_DEF (stmt, k);
668 if (TREE_CODE (arg) == SSA_NAME)
669 mark_operand_necessary (arg);
672 /* For PHI operands it matters from where the control flow arrives
673 to the BB. Consider the following example:
675 a=exp1;
676 b=exp2;
677 if (test)
679 else
681 c=PHI(a,b)
683 We need to mark control dependence of the empty basic blocks, since they
684 contains computation of PHI operands.
686 Doing so is too restrictive in the case the predecestor block is in
687 the loop. Consider:
689 if (b)
691 int i;
692 for (i = 0; i<1000; ++i)
694 j = 0;
696 return j;
698 There is PHI for J in the BB containing return statement.
699 In this case the control dependence of predecestor block (that is
700 within the empty loop) also contains the block determining number
701 of iterations of the block that would prevent removing of empty
702 loop in this case.
704 This scenario can be avoided by splitting critical edges.
705 To save the critical edge splitting pass we identify how the control
706 dependence would look like if the edge was split.
708 Consider the modified CFG created from current CFG by splitting
709 edge B->C. In the postdominance tree of modified CFG, C' is
710 always child of C. There are two cases how chlids of C' can look
711 like:
713 1) C' is leaf
715 In this case the only basic block C' is control dependent on is B.
717 2) C' has single child that is B
719 In this case control dependence of C' is same as control
720 dependence of B in original CFG except for block B itself.
721 (since C' postdominate B in modified CFG)
723 Now how to decide what case happens? There are two basic options:
725 a) C postdominate B. Then C immediately postdominate B and
726 case 2 happens iff there is no other way from B to C except
727 the edge B->C.
729 There is other way from B to C iff there is succesor of B that
730 is not postdominated by B. Testing this condition is somewhat
731 expensive, because we need to iterate all succesors of B.
732 We are safe to assume that this does not happen: we will mark B
733 as needed when processing the other path from B to C that is
734 conrol dependent on B and marking control dependencies of B
735 itself is harmless because they will be processed anyway after
736 processing control statement in B.
738 b) C does not postdominate B. Always case 1 happens since there is
739 path from C to exit that does not go through B and thus also C'. */
741 if (aggressive && !degenerate_phi_p (stmt))
743 for (k = 0; k < gimple_phi_num_args (stmt); k++)
745 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
747 if (gimple_bb (stmt)
748 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
750 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
751 mark_last_stmt_necessary (arg_bb);
753 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
754 && !bitmap_bit_p (visited_control_parents,
755 arg_bb->index))
756 mark_control_dependent_edges_necessary (arg_bb, true);
760 else
762 /* Propagate through the operands. Examine all the USE, VUSE and
763 VDEF operands in this statement. Mark all the statements
764 which feed this statement's uses as necessary. */
765 ssa_op_iter iter;
766 tree use;
768 /* If this is a call to free which is directly fed by an
769 allocation function do not mark that necessary through
770 processing the argument. */
771 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
773 tree ptr = gimple_call_arg (stmt, 0);
774 gimple def_stmt;
775 tree def_callee;
776 /* If the pointer we free is defined by an allocation
777 function do not add the call to the worklist. */
778 if (TREE_CODE (ptr) == SSA_NAME
779 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
780 && (def_callee = gimple_call_fndecl (def_stmt))
781 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
782 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
783 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
784 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
786 gimple bounds_def_stmt;
787 tree bounds;
789 /* For instrumented calls we should also check used
790 bounds are returned by the same allocation call. */
791 if (!gimple_call_with_bounds_p (stmt)
792 || ((bounds = gimple_call_arg (stmt, 1))
793 && TREE_CODE (bounds) == SSA_NAME
794 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
795 && is_gimple_call (bounds_def_stmt)
796 && chkp_gimple_call_builtin_p (bounds_def_stmt,
797 BUILT_IN_CHKP_BNDRET)
798 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
799 continue;
803 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
804 mark_operand_necessary (use);
806 use = gimple_vuse (stmt);
807 if (!use)
808 continue;
810 /* If we dropped to simple mode make all immediately
811 reachable definitions necessary. */
812 if (chain_ovfl)
814 mark_all_reaching_defs_necessary (stmt);
815 continue;
818 /* For statements that may load from memory (have a VUSE) we
819 have to mark all reaching (may-)definitions as necessary.
820 We partition this task into two cases:
821 1) explicit loads based on decls that are not aliased
822 2) implicit loads (like calls) and explicit loads not
823 based on decls that are not aliased (like indirect
824 references or loads from globals)
825 For 1) we mark all reaching may-defs as necessary, stopping
826 at dominating kills. For 2) we want to mark all dominating
827 references necessary, but non-aliased ones which we handle
828 in 1). By keeping a global visited bitmap for references
829 we walk for 2) we avoid quadratic behavior for those. */
831 if (is_gimple_call (stmt))
833 tree callee = gimple_call_fndecl (stmt);
834 unsigned i;
836 /* Calls to functions that are merely acting as barriers
837 or that only store to memory do not make any previous
838 stores necessary. */
839 if (callee != NULL_TREE
840 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
841 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
844 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
845 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
847 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
849 || (DECL_FUNCTION_CODE (callee)
850 == BUILT_IN_ALLOCA_WITH_ALIGN)
851 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
852 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
853 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
854 continue;
856 /* Calls implicitly load from memory, their arguments
857 in addition may explicitly perform memory loads. */
858 mark_all_reaching_defs_necessary (stmt);
859 for (i = 0; i < gimple_call_num_args (stmt); ++i)
861 tree arg = gimple_call_arg (stmt, i);
862 if (TREE_CODE (arg) == SSA_NAME
863 || is_gimple_min_invariant (arg))
864 continue;
865 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
866 arg = TREE_OPERAND (arg, 0);
867 if (!ref_may_be_aliased (arg))
868 mark_aliased_reaching_defs_necessary (stmt, arg);
871 else if (gimple_assign_single_p (stmt))
873 tree rhs;
874 /* If this is a load mark things necessary. */
875 rhs = gimple_assign_rhs1 (stmt);
876 if (TREE_CODE (rhs) != SSA_NAME
877 && !is_gimple_min_invariant (rhs)
878 && TREE_CODE (rhs) != CONSTRUCTOR)
880 if (!ref_may_be_aliased (rhs))
881 mark_aliased_reaching_defs_necessary (stmt, rhs);
882 else
883 mark_all_reaching_defs_necessary (stmt);
886 else if (gimple_code (stmt) == GIMPLE_RETURN)
888 tree rhs = gimple_return_retval (stmt);
889 /* A return statement may perform a load. */
890 if (rhs
891 && TREE_CODE (rhs) != SSA_NAME
892 && !is_gimple_min_invariant (rhs)
893 && TREE_CODE (rhs) != CONSTRUCTOR)
895 if (!ref_may_be_aliased (rhs))
896 mark_aliased_reaching_defs_necessary (stmt, rhs);
897 else
898 mark_all_reaching_defs_necessary (stmt);
901 else if (gimple_code (stmt) == GIMPLE_ASM)
903 unsigned i;
904 mark_all_reaching_defs_necessary (stmt);
905 /* Inputs may perform loads. */
906 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
908 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
909 if (TREE_CODE (op) != SSA_NAME
910 && !is_gimple_min_invariant (op)
911 && TREE_CODE (op) != CONSTRUCTOR
912 && !ref_may_be_aliased (op))
913 mark_aliased_reaching_defs_necessary (stmt, op);
916 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
918 /* The beginning of a transaction is a memory barrier. */
919 /* ??? If we were really cool, we'd only be a barrier
920 for the memories touched within the transaction. */
921 mark_all_reaching_defs_necessary (stmt);
923 else
924 gcc_unreachable ();
926 /* If we over-used our alias oracle budget drop to simple
927 mode. The cost metric allows quadratic behavior
928 (number of uses times number of may-defs queries) up to
929 a constant maximal number of queries and after that falls back to
930 super-linear complexity. */
931 if (/* Constant but quadratic for small functions. */
932 total_chain > 128 * 128
933 /* Linear in the number of may-defs. */
934 && total_chain > 32 * longest_chain
935 /* Linear in the number of uses. */
936 && total_chain > nr_walks * 32)
938 chain_ovfl = true;
939 if (visited)
940 bitmap_clear (visited);
946 /* Remove dead PHI nodes from block BB. */
948 static bool
949 remove_dead_phis (basic_block bb)
951 bool something_changed = false;
952 gimple phi;
953 gimple_stmt_iterator gsi;
955 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
957 stats.total_phis++;
958 phi = gsi_stmt (gsi);
960 /* We do not track necessity of virtual PHI nodes. Instead do
961 very simple dead PHI removal here. */
962 if (virtual_operand_p (gimple_phi_result (phi)))
964 /* Virtual PHI nodes with one or identical arguments
965 can be removed. */
966 if (degenerate_phi_p (phi))
968 tree vdef = gimple_phi_result (phi);
969 tree vuse = gimple_phi_arg_def (phi, 0);
971 use_operand_p use_p;
972 imm_use_iterator iter;
973 gimple use_stmt;
974 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
975 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
976 SET_USE (use_p, vuse);
977 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
978 && TREE_CODE (vuse) == SSA_NAME)
979 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
981 else
982 gimple_set_plf (phi, STMT_NECESSARY, true);
985 if (!gimple_plf (phi, STMT_NECESSARY))
987 something_changed = true;
988 if (dump_file && (dump_flags & TDF_DETAILS))
990 fprintf (dump_file, "Deleting : ");
991 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
992 fprintf (dump_file, "\n");
995 remove_phi_node (&gsi, true);
996 stats.removed_phis++;
997 continue;
1000 gsi_next (&gsi);
1002 return something_changed;
1005 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1007 static edge
1008 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1010 gimple_stmt_iterator gsi;
1011 edge e2 = NULL;
1012 edge_iterator ei;
1014 if (dump_file && (dump_flags & TDF_DETAILS))
1015 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1016 e->dest->index, post_dom_bb->index);
1018 e2 = redirect_edge_and_branch (e, post_dom_bb);
1019 cfg_altered = true;
1021 /* If edge was already around, no updating is necessary. */
1022 if (e2 != e)
1023 return e2;
1025 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1027 /* We are sure that for every live PHI we are seeing control dependent BB.
1028 This means that we can pick any edge to duplicate PHI args from. */
1029 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1030 if (e2 != e)
1031 break;
1032 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1034 gimple phi = gsi_stmt (gsi);
1035 tree op;
1036 source_location locus;
1038 /* PHIs for virtuals have no control dependency relation on them.
1039 We are lost here and must force renaming of the symbol. */
1040 if (virtual_operand_p (gimple_phi_result (phi)))
1042 mark_virtual_phi_result_for_renaming (phi);
1043 remove_phi_node (&gsi, true);
1044 continue;
1047 /* Dead PHI do not imply control dependency. */
1048 if (!gimple_plf (phi, STMT_NECESSARY))
1050 gsi_next (&gsi);
1051 continue;
1054 op = gimple_phi_arg_def (phi, e2->dest_idx);
1055 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1056 add_phi_arg (phi, op, e, locus);
1057 /* The resulting PHI if not dead can only be degenerate. */
1058 gcc_assert (degenerate_phi_p (phi));
1059 gsi_next (&gsi);
1062 return e;
1065 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1066 containing I so that we don't have to look it up. */
1068 static void
1069 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1071 gimple stmt = gsi_stmt (*i);
1073 if (dump_file && (dump_flags & TDF_DETAILS))
1075 fprintf (dump_file, "Deleting : ");
1076 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1077 fprintf (dump_file, "\n");
1080 stats.removed++;
1082 /* If we have determined that a conditional branch statement contributes
1083 nothing to the program, then we not only remove it, but we also change
1084 the flow graph so that the current block will simply fall-thru to its
1085 immediate post-dominator. The blocks we are circumventing will be
1086 removed by cleanup_tree_cfg if this change in the flow graph makes them
1087 unreachable. */
1088 if (is_ctrl_stmt (stmt))
1090 basic_block post_dom_bb;
1091 edge e, e2;
1092 edge_iterator ei;
1094 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1096 e = find_edge (bb, post_dom_bb);
1098 /* If edge is already there, try to use it. This avoids need to update
1099 PHI nodes. Also watch for cases where post dominator does not exists
1100 or is exit block. These can happen for infinite loops as we create
1101 fake edges in the dominator tree. */
1102 if (e)
1104 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1105 e = EDGE_SUCC (bb, 0);
1106 else
1107 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1108 gcc_assert (e);
1109 e->probability = REG_BR_PROB_BASE;
1110 e->count = bb->count;
1112 /* The edge is no longer associated with a conditional, so it does
1113 not have TRUE/FALSE flags. */
1114 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1116 /* The lone outgoing edge from BB will be a fallthru edge. */
1117 e->flags |= EDGE_FALLTHRU;
1119 /* Remove the remaining outgoing edges. */
1120 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1121 if (e != e2)
1123 cfg_altered = true;
1124 remove_edge (e2);
1126 else
1127 ei_next (&ei);
1130 /* If this is a store into a variable that is being optimized away,
1131 add a debug bind stmt if possible. */
1132 if (MAY_HAVE_DEBUG_STMTS
1133 && gimple_assign_single_p (stmt)
1134 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1136 tree lhs = gimple_assign_lhs (stmt);
1137 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1138 && !DECL_IGNORED_P (lhs)
1139 && is_gimple_reg_type (TREE_TYPE (lhs))
1140 && !is_global_var (lhs)
1141 && !DECL_HAS_VALUE_EXPR_P (lhs))
1143 tree rhs = gimple_assign_rhs1 (stmt);
1144 gimple note
1145 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1146 gsi_insert_after (i, note, GSI_SAME_STMT);
1150 unlink_stmt_vdef (stmt);
1151 gsi_remove (i, true);
1152 release_defs (stmt);
1155 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1156 contributes nothing to the program, and can be deleted. */
1158 static bool
1159 eliminate_unnecessary_stmts (void)
1161 bool something_changed = false;
1162 basic_block bb;
1163 gimple_stmt_iterator gsi, psi;
1164 gimple stmt;
1165 tree call;
1166 vec<basic_block> h;
1168 if (dump_file && (dump_flags & TDF_DETAILS))
1169 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1171 clear_special_calls ();
1173 /* Walking basic blocks and statements in reverse order avoids
1174 releasing SSA names before any other DEFs that refer to them are
1175 released. This helps avoid loss of debug information, as we get
1176 a chance to propagate all RHSs of removed SSAs into debug uses,
1177 rather than only the latest ones. E.g., consider:
1179 x_3 = y_1 + z_2;
1180 a_5 = x_3 - b_4;
1181 # DEBUG a => a_5
1183 If we were to release x_3 before a_5, when we reached a_5 and
1184 tried to substitute it into the debug stmt, we'd see x_3 there,
1185 but x_3's DEF, type, etc would have already been disconnected.
1186 By going backwards, the debug stmt first changes to:
1188 # DEBUG a => x_3 - b_4
1190 and then to:
1192 # DEBUG a => y_1 + z_2 - b_4
1194 as desired. */
1195 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1196 h = get_all_dominated_blocks (CDI_DOMINATORS,
1197 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1199 while (h.length ())
1201 bb = h.pop ();
1203 /* Remove dead statements. */
1204 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1206 stmt = gsi_stmt (gsi);
1208 psi = gsi;
1209 gsi_prev (&psi);
1211 stats.total++;
1213 /* We can mark a call to free as not necessary if the
1214 defining statement of its argument is not necessary
1215 (and thus is getting removed). */
1216 if (gimple_plf (stmt, STMT_NECESSARY)
1217 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1219 tree ptr = gimple_call_arg (stmt, 0);
1220 if (TREE_CODE (ptr) == SSA_NAME)
1222 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1223 if (!gimple_nop_p (def_stmt)
1224 && !gimple_plf (def_stmt, STMT_NECESSARY))
1225 gimple_set_plf (stmt, STMT_NECESSARY, false);
1227 /* We did not propagate necessity for free calls fed
1228 by allocation function to allow unnecessary
1229 alloc-free sequence elimination. For instrumented
1230 calls it also means we did not mark bounds producer
1231 as necessary and it is time to do it in case free
1232 call is not removed. */
1233 if (gimple_call_with_bounds_p (stmt))
1235 gimple bounds_def_stmt;
1236 tree bounds = gimple_call_arg (stmt, 1);
1237 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1238 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1239 if (bounds_def_stmt
1240 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1241 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1242 gimple_plf (stmt, STMT_NECESSARY));
1246 /* If GSI is not necessary then remove it. */
1247 if (!gimple_plf (stmt, STMT_NECESSARY))
1249 if (!is_gimple_debug (stmt))
1250 something_changed = true;
1251 remove_dead_stmt (&gsi, bb);
1253 else if (is_gimple_call (stmt))
1255 tree name = gimple_call_lhs (stmt);
1257 notice_special_calls (stmt);
1259 /* When LHS of var = call (); is dead, simplify it into
1260 call (); saving one operand. */
1261 if (name
1262 && TREE_CODE (name) == SSA_NAME
1263 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1264 /* Avoid doing so for allocation calls which we
1265 did not mark as necessary, it will confuse the
1266 special logic we apply to malloc/free pair removal. */
1267 && (!(call = gimple_call_fndecl (stmt))
1268 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1269 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1270 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1271 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1272 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1273 && (DECL_FUNCTION_CODE (call)
1274 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1275 /* Avoid doing so for bndret calls for the same reason. */
1276 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1278 something_changed = true;
1279 if (dump_file && (dump_flags & TDF_DETAILS))
1281 fprintf (dump_file, "Deleting LHS of call: ");
1282 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1283 fprintf (dump_file, "\n");
1286 gimple_call_set_lhs (stmt, NULL_TREE);
1287 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1288 update_stmt (stmt);
1289 release_ssa_name (name);
1295 h.release ();
1297 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1298 rendered some PHI nodes unreachable while they are still in use.
1299 Mark them for renaming. */
1300 if (cfg_altered)
1302 basic_block prev_bb;
1304 find_unreachable_blocks ();
1306 /* Delete all unreachable basic blocks in reverse dominator order. */
1307 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1308 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1310 prev_bb = bb->prev_bb;
1312 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1313 || !(bb->flags & BB_REACHABLE))
1315 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1316 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1318 bool found = false;
1319 imm_use_iterator iter;
1321 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1323 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1324 continue;
1325 if (gimple_code (stmt) == GIMPLE_PHI
1326 || gimple_plf (stmt, STMT_NECESSARY))
1328 found = true;
1329 BREAK_FROM_IMM_USE_STMT (iter);
1332 if (found)
1333 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1336 if (!(bb->flags & BB_REACHABLE))
1338 /* Speed up the removal of blocks that don't
1339 dominate others. Walking backwards, this should
1340 be the common case. ??? Do we need to recompute
1341 dominators because of cfg_altered? */
1342 if (!MAY_HAVE_DEBUG_STMTS
1343 || !first_dom_son (CDI_DOMINATORS, bb))
1344 delete_basic_block (bb);
1345 else
1347 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1349 while (h.length ())
1351 bb = h.pop ();
1352 prev_bb = bb->prev_bb;
1353 /* Rearrangements to the CFG may have failed
1354 to update the dominators tree, so that
1355 formerly-dominated blocks are now
1356 otherwise reachable. */
1357 if (!!(bb->flags & BB_REACHABLE))
1358 continue;
1359 delete_basic_block (bb);
1362 h.release ();
1368 FOR_EACH_BB_FN (bb, cfun)
1370 /* Remove dead PHI nodes. */
1371 something_changed |= remove_dead_phis (bb);
1374 return something_changed;
1378 /* Print out removed statement statistics. */
1380 static void
1381 print_stats (void)
1383 float percg;
1385 percg = ((float) stats.removed / (float) stats.total) * 100;
1386 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1387 stats.removed, stats.total, (int) percg);
1389 if (stats.total_phis == 0)
1390 percg = 0;
1391 else
1392 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1394 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1395 stats.removed_phis, stats.total_phis, (int) percg);
1398 /* Initialization for this pass. Set up the used data structures. */
1400 static void
1401 tree_dce_init (bool aggressive)
1403 memset ((void *) &stats, 0, sizeof (stats));
1405 if (aggressive)
1407 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1408 bitmap_clear (last_stmt_necessary);
1409 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1410 bitmap_clear (bb_contains_live_stmts);
1413 processed = sbitmap_alloc (num_ssa_names + 1);
1414 bitmap_clear (processed);
1416 worklist.create (64);
1417 cfg_altered = false;
1420 /* Cleanup after this pass. */
1422 static void
1423 tree_dce_done (bool aggressive)
1425 if (aggressive)
1427 delete cd;
1428 sbitmap_free (visited_control_parents);
1429 sbitmap_free (last_stmt_necessary);
1430 sbitmap_free (bb_contains_live_stmts);
1431 bb_contains_live_stmts = NULL;
1434 sbitmap_free (processed);
1436 worklist.release ();
1439 /* Main routine to eliminate dead code.
1441 AGGRESSIVE controls the aggressiveness of the algorithm.
1442 In conservative mode, we ignore control dependence and simply declare
1443 all but the most trivially dead branches necessary. This mode is fast.
1444 In aggressive mode, control dependences are taken into account, which
1445 results in more dead code elimination, but at the cost of some time.
1447 FIXME: Aggressive mode before PRE doesn't work currently because
1448 the dominance info is not invalidated after DCE1. This is
1449 not an issue right now because we only run aggressive DCE
1450 as the last tree SSA pass, but keep this in mind when you
1451 start experimenting with pass ordering. */
1453 static unsigned int
1454 perform_tree_ssa_dce (bool aggressive)
1456 bool something_changed = 0;
1458 calculate_dominance_info (CDI_DOMINATORS);
1460 /* Preheaders are needed for SCEV to work.
1461 Simple lateches and recorded exits improve chances that loop will
1462 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1463 if (aggressive)
1464 loop_optimizer_init (LOOPS_NORMAL
1465 | LOOPS_HAVE_RECORDED_EXITS);
1467 tree_dce_init (aggressive);
1469 if (aggressive)
1471 /* Compute control dependence. */
1472 calculate_dominance_info (CDI_POST_DOMINATORS);
1473 cd = new control_dependences (create_edge_list ());
1475 visited_control_parents =
1476 sbitmap_alloc (last_basic_block_for_fn (cfun));
1477 bitmap_clear (visited_control_parents);
1479 mark_dfs_back_edges ();
1482 find_obviously_necessary_stmts (aggressive);
1484 if (aggressive)
1485 loop_optimizer_finalize ();
1487 longest_chain = 0;
1488 total_chain = 0;
1489 nr_walks = 0;
1490 chain_ovfl = false;
1491 visited = BITMAP_ALLOC (NULL);
1492 propagate_necessity (aggressive);
1493 BITMAP_FREE (visited);
1495 something_changed |= eliminate_unnecessary_stmts ();
1496 something_changed |= cfg_altered;
1498 /* We do not update postdominators, so free them unconditionally. */
1499 free_dominance_info (CDI_POST_DOMINATORS);
1501 /* If we removed paths in the CFG, then we need to update
1502 dominators as well. I haven't investigated the possibility
1503 of incrementally updating dominators. */
1504 if (cfg_altered)
1505 free_dominance_info (CDI_DOMINATORS);
1507 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1508 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1510 /* Debugging dumps. */
1511 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1512 print_stats ();
1514 tree_dce_done (aggressive);
1516 if (something_changed)
1518 free_numbers_of_iterations_estimates ();
1519 if (scev_initialized_p ())
1520 scev_reset ();
1521 return TODO_update_ssa | TODO_cleanup_cfg;
1523 return 0;
1526 /* Pass entry points. */
1527 static unsigned int
1528 tree_ssa_dce (void)
1530 return perform_tree_ssa_dce (/*aggressive=*/false);
1533 static unsigned int
1534 tree_ssa_cd_dce (void)
1536 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1539 namespace {
1541 const pass_data pass_data_dce =
1543 GIMPLE_PASS, /* type */
1544 "dce", /* name */
1545 OPTGROUP_NONE, /* optinfo_flags */
1546 TV_TREE_DCE, /* tv_id */
1547 ( PROP_cfg | PROP_ssa ), /* properties_required */
1548 0, /* properties_provided */
1549 0, /* properties_destroyed */
1550 0, /* todo_flags_start */
1551 0, /* todo_flags_finish */
1554 class pass_dce : public gimple_opt_pass
1556 public:
1557 pass_dce (gcc::context *ctxt)
1558 : gimple_opt_pass (pass_data_dce, ctxt)
1561 /* opt_pass methods: */
1562 opt_pass * clone () { return new pass_dce (m_ctxt); }
1563 virtual bool gate (function *) { return flag_tree_dce != 0; }
1564 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1566 }; // class pass_dce
1568 } // anon namespace
1570 gimple_opt_pass *
1571 make_pass_dce (gcc::context *ctxt)
1573 return new pass_dce (ctxt);
1576 namespace {
1578 const pass_data pass_data_cd_dce =
1580 GIMPLE_PASS, /* type */
1581 "cddce", /* name */
1582 OPTGROUP_NONE, /* optinfo_flags */
1583 TV_TREE_CD_DCE, /* tv_id */
1584 ( PROP_cfg | PROP_ssa ), /* properties_required */
1585 0, /* properties_provided */
1586 0, /* properties_destroyed */
1587 0, /* todo_flags_start */
1588 0, /* todo_flags_finish */
1591 class pass_cd_dce : public gimple_opt_pass
1593 public:
1594 pass_cd_dce (gcc::context *ctxt)
1595 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1598 /* opt_pass methods: */
1599 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1600 virtual bool gate (function *) { return flag_tree_dce != 0; }
1601 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1603 }; // class pass_cd_dce
1605 } // anon namespace
1607 gimple_opt_pass *
1608 make_pass_cd_dce (gcc::context *ctxt)
1610 return new pass_cd_dce (ctxt);