1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
47 #include "coretypes.h"
55 #include "fold-const.h"
57 #include "gimple-pretty-print.h"
59 #include "internal-fn.h"
62 #include "gimple-iterator.h"
64 #include "tree-ssa-loop-niter.h"
65 #include "tree-into-ssa.h"
67 #include "insn-config.h"
76 #include "tree-pass.h"
78 #include "tree-scalar-evolution.h"
79 #include "tree-chkp.h"
80 #include "tree-ssa-propagate.h"
81 #include "gimple-fold.h"
83 static struct stmt_stats
91 #define STMT_NECESSARY GF_PLF_1
93 static vec
<gimple
> worklist
;
95 /* Vector indicating an SSA name has already been processed and marked
97 static sbitmap processed
;
99 /* Vector indicating that the last statement of a basic block has already
100 been marked as necessary. */
101 static sbitmap last_stmt_necessary
;
103 /* Vector indicating that BB contains statements that are live. */
104 static sbitmap bb_contains_live_stmts
;
106 /* Before we can determine whether a control branch is dead, we need to
107 compute which blocks are control dependent on which edges.
109 We expect each block to be control dependent on very few edges so we
110 use a bitmap for each block recording its edges. An array holds the
111 bitmap. The Ith bit in the bitmap is set if that block is dependent
113 static control_dependences
*cd
;
115 /* Vector indicating that a basic block has already had all the edges
116 processed that it is control dependent on. */
117 static sbitmap visited_control_parents
;
119 /* TRUE if this pass alters the CFG (by removing control statements).
122 If this pass alters the CFG, then it will arrange for the dominators
124 static bool cfg_altered
;
127 /* If STMT is not already marked necessary, mark it, and add it to the
128 worklist if ADD_TO_WORKLIST is true. */
131 mark_stmt_necessary (gimple stmt
, bool add_to_worklist
)
135 if (gimple_plf (stmt
, STMT_NECESSARY
))
138 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
140 fprintf (dump_file
, "Marking useful stmt: ");
141 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
142 fprintf (dump_file
, "\n");
145 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
147 worklist
.safe_push (stmt
);
148 if (bb_contains_live_stmts
&& !is_gimple_debug (stmt
))
149 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
153 /* Mark the statement defining operand OP as necessary. */
156 mark_operand_necessary (tree op
)
163 ver
= SSA_NAME_VERSION (op
);
164 if (bitmap_bit_p (processed
, ver
))
166 stmt
= SSA_NAME_DEF_STMT (op
);
167 gcc_assert (gimple_nop_p (stmt
)
168 || gimple_plf (stmt
, STMT_NECESSARY
));
171 bitmap_set_bit (processed
, ver
);
173 stmt
= SSA_NAME_DEF_STMT (op
);
176 if (gimple_plf (stmt
, STMT_NECESSARY
) || gimple_nop_p (stmt
))
179 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
181 fprintf (dump_file
, "marking necessary through ");
182 print_generic_expr (dump_file
, op
, 0);
183 fprintf (dump_file
, " stmt ");
184 print_gimple_stmt (dump_file
, stmt
, 0, 0);
187 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
188 if (bb_contains_live_stmts
)
189 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
190 worklist
.safe_push (stmt
);
194 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 it can make other statements necessary.
197 If AGGRESSIVE is false, control statements are conservatively marked as
201 mark_stmt_if_obviously_necessary (gimple stmt
, bool aggressive
)
203 /* With non-call exceptions, we have to assume that all statements could
204 throw. If a statement could throw, it can be deemed necessary. */
205 if (cfun
->can_throw_non_call_exceptions
206 && !cfun
->can_delete_dead_exceptions
207 && stmt_could_throw_p (stmt
))
209 mark_stmt_necessary (stmt
, true);
213 /* Statements that are implicitly live. Most function calls, asm
214 and return statements are required. Labels and GIMPLE_BIND nodes
215 are kept because they are control flow, and we have no way of
216 knowing whether they can be removed. DCE can eliminate all the
217 other statements in a block, and CFG can then remove the block
219 switch (gimple_code (stmt
))
223 mark_stmt_necessary (stmt
, false);
229 mark_stmt_necessary (stmt
, true);
234 tree callee
= gimple_call_fndecl (stmt
);
235 if (callee
!= NULL_TREE
236 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
237 switch (DECL_FUNCTION_CODE (callee
))
239 case BUILT_IN_MALLOC
:
240 case BUILT_IN_ALIGNED_ALLOC
:
241 case BUILT_IN_CALLOC
:
242 case BUILT_IN_ALLOCA
:
243 case BUILT_IN_ALLOCA_WITH_ALIGN
:
248 /* Most, but not all function calls are required. Function calls that
249 produce no result and have no side effects (i.e. const pure
250 functions) are unnecessary. */
251 if (gimple_has_side_effects (stmt
))
253 mark_stmt_necessary (stmt
, true);
256 if (!gimple_call_lhs (stmt
))
262 /* Debug temps without a value are not useful. ??? If we could
263 easily locate the debug temp bind stmt for a use thereof,
264 would could refrain from marking all debug temps here, and
265 mark them only if they're used. */
266 if (!gimple_debug_bind_p (stmt
)
267 || gimple_debug_bind_has_value_p (stmt
)
268 || TREE_CODE (gimple_debug_bind_get_var (stmt
)) != DEBUG_EXPR_DECL
)
269 mark_stmt_necessary (stmt
, false);
273 gcc_assert (!simple_goto_p (stmt
));
274 mark_stmt_necessary (stmt
, true);
278 gcc_assert (EDGE_COUNT (gimple_bb (stmt
)->succs
) == 2);
283 mark_stmt_necessary (stmt
, true);
287 if (gimple_clobber_p (stmt
))
295 /* If the statement has volatile operands, it needs to be preserved.
296 Same for statements that can alter control flow in unpredictable
298 if (gimple_has_volatile_ops (stmt
) || is_ctrl_altering_stmt (stmt
))
300 mark_stmt_necessary (stmt
, true);
304 if (stmt_may_clobber_global_p (stmt
))
306 mark_stmt_necessary (stmt
, true);
314 /* Mark the last statement of BB as necessary. */
317 mark_last_stmt_necessary (basic_block bb
)
319 gimple stmt
= last_stmt (bb
);
321 bitmap_set_bit (last_stmt_necessary
, bb
->index
);
322 bitmap_set_bit (bb_contains_live_stmts
, bb
->index
);
324 /* We actually mark the statement only if it is a control statement. */
325 if (stmt
&& is_ctrl_stmt (stmt
))
326 mark_stmt_necessary (stmt
, true);
330 /* Mark control dependent edges of BB as necessary. We have to do this only
331 once for each basic block so we set the appropriate bit after we're done.
333 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
336 mark_control_dependent_edges_necessary (basic_block bb
, bool ignore_self
)
339 unsigned edge_number
;
340 bool skipped
= false;
342 gcc_assert (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
344 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
347 EXECUTE_IF_SET_IN_BITMAP (cd
->get_edges_dependent_on (bb
->index
),
350 basic_block cd_bb
= cd
->get_edge (edge_number
)->src
;
352 if (ignore_self
&& cd_bb
== bb
)
358 if (!bitmap_bit_p (last_stmt_necessary
, cd_bb
->index
))
359 mark_last_stmt_necessary (cd_bb
);
363 bitmap_set_bit (visited_control_parents
, bb
->index
);
367 /* Find obviously necessary statements. These are things like most function
368 calls, and stores to file level variables.
370 If EL is NULL, control statements are conservatively marked as
371 necessary. Otherwise it contains the list of edges used by control
372 dependence analysis. */
375 find_obviously_necessary_stmts (bool aggressive
)
378 gimple_stmt_iterator gsi
;
383 FOR_EACH_BB_FN (bb
, cfun
)
385 /* PHI nodes are never inherently necessary. */
386 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
388 phi
= gsi_stmt (gsi
);
389 gimple_set_plf (phi
, STMT_NECESSARY
, false);
392 /* Check all statements in the block. */
393 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
395 stmt
= gsi_stmt (gsi
);
396 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
397 mark_stmt_if_obviously_necessary (stmt
, aggressive
);
401 /* Pure and const functions are finite and thus have no infinite loops in
403 flags
= flags_from_decl_or_type (current_function_decl
);
404 if ((flags
& (ECF_CONST
|ECF_PURE
)) && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
407 /* Prevent the empty possibly infinite loops from being removed. */
412 if (mark_irreducible_loops ())
413 FOR_EACH_BB_FN (bb
, cfun
)
416 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
417 if ((e
->flags
& EDGE_DFS_BACK
)
418 && (e
->flags
& EDGE_IRREDUCIBLE_LOOP
))
421 fprintf (dump_file
, "Marking back edge of irreducible loop %i->%i\n",
422 e
->src
->index
, e
->dest
->index
);
423 mark_control_dependent_edges_necessary (e
->dest
, false);
427 FOR_EACH_LOOP (loop
, 0)
428 if (!finite_loop_p (loop
))
431 fprintf (dump_file
, "can not prove finiteness of loop %i\n", loop
->num
);
432 mark_control_dependent_edges_necessary (loop
->latch
, false);
439 /* Return true if REF is based on an aliased base, otherwise false. */
442 ref_may_be_aliased (tree ref
)
444 gcc_assert (TREE_CODE (ref
) != WITH_SIZE_EXPR
);
445 while (handled_component_p (ref
))
446 ref
= TREE_OPERAND (ref
, 0);
447 if (TREE_CODE (ref
) == MEM_REF
448 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
449 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
450 return !(DECL_P (ref
)
451 && !may_be_aliased (ref
));
454 static bitmap visited
= NULL
;
455 static unsigned int longest_chain
= 0;
456 static unsigned int total_chain
= 0;
457 static unsigned int nr_walks
= 0;
458 static bool chain_ovfl
= false;
460 /* Worker for the walker that marks reaching definitions of REF,
461 which is based on a non-aliased decl, necessary. It returns
462 true whenever the defining statement of the current VDEF is
463 a kill for REF, as no dominating may-defs are necessary for REF
464 anymore. DATA points to the basic-block that contains the
465 stmt that refers to REF. */
468 mark_aliased_reaching_defs_necessary_1 (ao_ref
*ref
, tree vdef
, void *data
)
470 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
472 /* All stmts we visit are necessary. */
473 mark_operand_necessary (vdef
);
475 /* If the stmt lhs kills ref, then we can stop walking. */
476 if (gimple_has_lhs (def_stmt
)
477 && TREE_CODE (gimple_get_lhs (def_stmt
)) != SSA_NAME
478 /* The assignment is not necessarily carried out if it can throw
479 and we can catch it in the current function where we could inspect
481 ??? We only need to care about the RHS throwing. For aggregate
482 assignments or similar calls and non-call exceptions the LHS
483 might throw as well. */
484 && !stmt_can_throw_internal (def_stmt
))
486 tree base
, lhs
= gimple_get_lhs (def_stmt
);
487 HOST_WIDE_INT size
, offset
, max_size
;
489 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
490 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
491 so base == refd->base does not always hold. */
492 if (base
== ref
->base
)
494 /* For a must-alias check we need to be able to constrain
495 the accesses properly. */
496 if (size
!= -1 && size
== max_size
497 && ref
->max_size
!= -1)
499 if (offset
<= ref
->offset
500 && offset
+ size
>= ref
->offset
+ ref
->max_size
)
503 /* Or they need to be exactly the same. */
505 /* Make sure there is no induction variable involved
506 in the references (gcc.c-torture/execute/pr42142.c).
507 The simplest way is to check if the kill dominates
509 /* But when both are in the same block we cannot
510 easily tell whether we came from a backedge
511 unless we decide to compute stmt UIDs
513 && (basic_block
) data
!= gimple_bb (def_stmt
)
514 && dominated_by_p (CDI_DOMINATORS
, (basic_block
) data
,
515 gimple_bb (def_stmt
))
516 && operand_equal_p (ref
->ref
, lhs
, 0))
521 /* Otherwise keep walking. */
526 mark_aliased_reaching_defs_necessary (gimple stmt
, tree ref
)
530 gcc_assert (!chain_ovfl
);
531 ao_ref_init (&refd
, ref
);
532 chain
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
),
533 mark_aliased_reaching_defs_necessary_1
,
534 gimple_bb (stmt
), NULL
);
535 if (chain
> longest_chain
)
536 longest_chain
= chain
;
537 total_chain
+= chain
;
541 /* Worker for the walker that marks reaching definitions of REF, which
542 is not based on a non-aliased decl. For simplicity we need to end
543 up marking all may-defs necessary that are not based on a non-aliased
544 decl. The only job of this walker is to skip may-defs based on
545 a non-aliased decl. */
548 mark_all_reaching_defs_necessary_1 (ao_ref
*ref ATTRIBUTE_UNUSED
,
549 tree vdef
, void *data ATTRIBUTE_UNUSED
)
551 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
553 /* We have to skip already visited (and thus necessary) statements
554 to make the chaining work after we dropped back to simple mode. */
556 && bitmap_bit_p (processed
, SSA_NAME_VERSION (vdef
)))
558 gcc_assert (gimple_nop_p (def_stmt
)
559 || gimple_plf (def_stmt
, STMT_NECESSARY
));
563 /* We want to skip stores to non-aliased variables. */
565 && gimple_assign_single_p (def_stmt
))
567 tree lhs
= gimple_assign_lhs (def_stmt
);
568 if (!ref_may_be_aliased (lhs
))
572 /* We want to skip statments that do not constitute stores but have
573 a virtual definition. */
574 if (is_gimple_call (def_stmt
))
576 tree callee
= gimple_call_fndecl (def_stmt
);
577 if (callee
!= NULL_TREE
578 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
579 switch (DECL_FUNCTION_CODE (callee
))
581 case BUILT_IN_MALLOC
:
582 case BUILT_IN_ALIGNED_ALLOC
:
583 case BUILT_IN_CALLOC
:
584 case BUILT_IN_ALLOCA
:
585 case BUILT_IN_ALLOCA_WITH_ALIGN
:
593 mark_operand_necessary (vdef
);
599 mark_all_reaching_defs_necessary (gimple stmt
)
601 walk_aliased_vdefs (NULL
, gimple_vuse (stmt
),
602 mark_all_reaching_defs_necessary_1
, NULL
, &visited
);
605 /* Return true for PHI nodes with one or identical arguments
608 degenerate_phi_p (gimple phi
)
611 tree op
= gimple_phi_arg_def (phi
, 0);
612 for (i
= 1; i
< gimple_phi_num_args (phi
); i
++)
613 if (gimple_phi_arg_def (phi
, i
) != op
)
618 /* Propagate necessity using the operands of necessary statements.
619 Process the uses on each statement in the worklist, and add all
620 feeding statements which contribute to the calculation of this
621 value to the worklist.
623 In conservative mode, EL is NULL. */
626 propagate_necessity (bool aggressive
)
630 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
631 fprintf (dump_file
, "\nProcessing worklist:\n");
633 while (worklist
.length () > 0)
635 /* Take STMT from worklist. */
636 stmt
= worklist
.pop ();
638 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
640 fprintf (dump_file
, "processing: ");
641 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
642 fprintf (dump_file
, "\n");
647 /* Mark the last statement of the basic blocks on which the block
648 containing STMT is control dependent, but only if we haven't
650 basic_block bb
= gimple_bb (stmt
);
651 if (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
652 && !bitmap_bit_p (visited_control_parents
, bb
->index
))
653 mark_control_dependent_edges_necessary (bb
, false);
656 if (gimple_code (stmt
) == GIMPLE_PHI
657 /* We do not process virtual PHI nodes nor do we track their
659 && !virtual_operand_p (gimple_phi_result (stmt
)))
661 /* PHI nodes are somewhat special in that each PHI alternative has
662 data and control dependencies. All the statements feeding the
663 PHI node's arguments are always necessary. In aggressive mode,
664 we also consider the control dependent edges leading to the
665 predecessor block associated with each PHI alternative as
667 gphi
*phi
= as_a
<gphi
*> (stmt
);
670 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
672 tree arg
= PHI_ARG_DEF (stmt
, k
);
673 if (TREE_CODE (arg
) == SSA_NAME
)
674 mark_operand_necessary (arg
);
677 /* For PHI operands it matters from where the control flow arrives
678 to the BB. Consider the following example:
688 We need to mark control dependence of the empty basic blocks, since they
689 contains computation of PHI operands.
691 Doing so is too restrictive in the case the predecestor block is in
697 for (i = 0; i<1000; ++i)
703 There is PHI for J in the BB containing return statement.
704 In this case the control dependence of predecestor block (that is
705 within the empty loop) also contains the block determining number
706 of iterations of the block that would prevent removing of empty
709 This scenario can be avoided by splitting critical edges.
710 To save the critical edge splitting pass we identify how the control
711 dependence would look like if the edge was split.
713 Consider the modified CFG created from current CFG by splitting
714 edge B->C. In the postdominance tree of modified CFG, C' is
715 always child of C. There are two cases how chlids of C' can look
720 In this case the only basic block C' is control dependent on is B.
722 2) C' has single child that is B
724 In this case control dependence of C' is same as control
725 dependence of B in original CFG except for block B itself.
726 (since C' postdominate B in modified CFG)
728 Now how to decide what case happens? There are two basic options:
730 a) C postdominate B. Then C immediately postdominate B and
731 case 2 happens iff there is no other way from B to C except
734 There is other way from B to C iff there is succesor of B that
735 is not postdominated by B. Testing this condition is somewhat
736 expensive, because we need to iterate all succesors of B.
737 We are safe to assume that this does not happen: we will mark B
738 as needed when processing the other path from B to C that is
739 conrol dependent on B and marking control dependencies of B
740 itself is harmless because they will be processed anyway after
741 processing control statement in B.
743 b) C does not postdominate B. Always case 1 happens since there is
744 path from C to exit that does not go through B and thus also C'. */
746 if (aggressive
&& !degenerate_phi_p (stmt
))
748 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
750 basic_block arg_bb
= gimple_phi_arg_edge (phi
, k
)->src
;
753 != get_immediate_dominator (CDI_POST_DOMINATORS
, arg_bb
))
755 if (!bitmap_bit_p (last_stmt_necessary
, arg_bb
->index
))
756 mark_last_stmt_necessary (arg_bb
);
758 else if (arg_bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
759 && !bitmap_bit_p (visited_control_parents
,
761 mark_control_dependent_edges_necessary (arg_bb
, true);
767 /* Propagate through the operands. Examine all the USE, VUSE and
768 VDEF operands in this statement. Mark all the statements
769 which feed this statement's uses as necessary. */
773 /* If this is a call to free which is directly fed by an
774 allocation function do not mark that necessary through
775 processing the argument. */
776 if (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
778 tree ptr
= gimple_call_arg (stmt
, 0);
781 /* If the pointer we free is defined by an allocation
782 function do not add the call to the worklist. */
783 if (TREE_CODE (ptr
) == SSA_NAME
784 && is_gimple_call (def_stmt
= SSA_NAME_DEF_STMT (ptr
))
785 && (def_callee
= gimple_call_fndecl (def_stmt
))
786 && DECL_BUILT_IN_CLASS (def_callee
) == BUILT_IN_NORMAL
787 && (DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_ALIGNED_ALLOC
788 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_MALLOC
789 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_CALLOC
))
791 gimple bounds_def_stmt
;
794 /* For instrumented calls we should also check used
795 bounds are returned by the same allocation call. */
796 if (!gimple_call_with_bounds_p (stmt
)
797 || ((bounds
= gimple_call_arg (stmt
, 1))
798 && TREE_CODE (bounds
) == SSA_NAME
799 && (bounds_def_stmt
= SSA_NAME_DEF_STMT (bounds
))
800 && chkp_gimple_call_builtin_p (bounds_def_stmt
,
801 BUILT_IN_CHKP_BNDRET
)
802 && gimple_call_arg (bounds_def_stmt
, 0) == ptr
))
807 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
808 mark_operand_necessary (use
);
810 use
= gimple_vuse (stmt
);
814 /* If we dropped to simple mode make all immediately
815 reachable definitions necessary. */
818 mark_all_reaching_defs_necessary (stmt
);
822 /* For statements that may load from memory (have a VUSE) we
823 have to mark all reaching (may-)definitions as necessary.
824 We partition this task into two cases:
825 1) explicit loads based on decls that are not aliased
826 2) implicit loads (like calls) and explicit loads not
827 based on decls that are not aliased (like indirect
828 references or loads from globals)
829 For 1) we mark all reaching may-defs as necessary, stopping
830 at dominating kills. For 2) we want to mark all dominating
831 references necessary, but non-aliased ones which we handle
832 in 1). By keeping a global visited bitmap for references
833 we walk for 2) we avoid quadratic behavior for those. */
835 if (is_gimple_call (stmt
))
837 tree callee
= gimple_call_fndecl (stmt
);
840 /* Calls to functions that are merely acting as barriers
841 or that only store to memory do not make any previous
843 if (callee
!= NULL_TREE
844 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
845 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET
846 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET_CHK
847 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MALLOC
848 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALIGNED_ALLOC
849 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
850 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_FREE
851 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
852 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
853 || (DECL_FUNCTION_CODE (callee
)
854 == BUILT_IN_ALLOCA_WITH_ALIGN
)
855 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
856 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
857 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ASSUME_ALIGNED
))
860 /* Calls implicitly load from memory, their arguments
861 in addition may explicitly perform memory loads. */
862 mark_all_reaching_defs_necessary (stmt
);
863 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
865 tree arg
= gimple_call_arg (stmt
, i
);
866 if (TREE_CODE (arg
) == SSA_NAME
867 || is_gimple_min_invariant (arg
))
869 if (TREE_CODE (arg
) == WITH_SIZE_EXPR
)
870 arg
= TREE_OPERAND (arg
, 0);
871 if (!ref_may_be_aliased (arg
))
872 mark_aliased_reaching_defs_necessary (stmt
, arg
);
875 else if (gimple_assign_single_p (stmt
))
878 /* If this is a load mark things necessary. */
879 rhs
= gimple_assign_rhs1 (stmt
);
880 if (TREE_CODE (rhs
) != SSA_NAME
881 && !is_gimple_min_invariant (rhs
)
882 && TREE_CODE (rhs
) != CONSTRUCTOR
)
884 if (!ref_may_be_aliased (rhs
))
885 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
887 mark_all_reaching_defs_necessary (stmt
);
890 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
892 tree rhs
= gimple_return_retval (return_stmt
);
893 /* A return statement may perform a load. */
895 && TREE_CODE (rhs
) != SSA_NAME
896 && !is_gimple_min_invariant (rhs
)
897 && TREE_CODE (rhs
) != CONSTRUCTOR
)
899 if (!ref_may_be_aliased (rhs
))
900 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
902 mark_all_reaching_defs_necessary (stmt
);
905 else if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (stmt
))
908 mark_all_reaching_defs_necessary (stmt
);
909 /* Inputs may perform loads. */
910 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
912 tree op
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
913 if (TREE_CODE (op
) != SSA_NAME
914 && !is_gimple_min_invariant (op
)
915 && TREE_CODE (op
) != CONSTRUCTOR
916 && !ref_may_be_aliased (op
))
917 mark_aliased_reaching_defs_necessary (stmt
, op
);
920 else if (gimple_code (stmt
) == GIMPLE_TRANSACTION
)
922 /* The beginning of a transaction is a memory barrier. */
923 /* ??? If we were really cool, we'd only be a barrier
924 for the memories touched within the transaction. */
925 mark_all_reaching_defs_necessary (stmt
);
930 /* If we over-used our alias oracle budget drop to simple
931 mode. The cost metric allows quadratic behavior
932 (number of uses times number of may-defs queries) up to
933 a constant maximal number of queries and after that falls back to
934 super-linear complexity. */
935 if (/* Constant but quadratic for small functions. */
936 total_chain
> 128 * 128
937 /* Linear in the number of may-defs. */
938 && total_chain
> 32 * longest_chain
939 /* Linear in the number of uses. */
940 && total_chain
> nr_walks
* 32)
944 bitmap_clear (visited
);
950 /* Remove dead PHI nodes from block BB. */
953 remove_dead_phis (basic_block bb
)
955 bool something_changed
= false;
959 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);)
964 /* We do not track necessity of virtual PHI nodes. Instead do
965 very simple dead PHI removal here. */
966 if (virtual_operand_p (gimple_phi_result (phi
)))
968 /* Virtual PHI nodes with one or identical arguments
970 if (degenerate_phi_p (phi
))
972 tree vdef
= gimple_phi_result (phi
);
973 tree vuse
= gimple_phi_arg_def (phi
, 0);
976 imm_use_iterator iter
;
978 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
979 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
980 SET_USE (use_p
, vuse
);
981 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
)
982 && TREE_CODE (vuse
) == SSA_NAME
)
983 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
986 gimple_set_plf (phi
, STMT_NECESSARY
, true);
989 if (!gimple_plf (phi
, STMT_NECESSARY
))
991 something_changed
= true;
992 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
994 fprintf (dump_file
, "Deleting : ");
995 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
996 fprintf (dump_file
, "\n");
999 remove_phi_node (&gsi
, true);
1000 stats
.removed_phis
++;
1006 return something_changed
;
1009 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1012 forward_edge_to_pdom (edge e
, basic_block post_dom_bb
)
1018 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1019 fprintf (dump_file
, "Redirecting edge %i->%i to %i\n", e
->src
->index
,
1020 e
->dest
->index
, post_dom_bb
->index
);
1022 e2
= redirect_edge_and_branch (e
, post_dom_bb
);
1025 /* If edge was already around, no updating is necessary. */
1029 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb
)))
1031 /* We are sure that for every live PHI we are seeing control dependent BB.
1032 This means that we can pick any edge to duplicate PHI args from. */
1033 FOR_EACH_EDGE (e2
, ei
, post_dom_bb
->preds
)
1036 for (gsi
= gsi_start_phis (post_dom_bb
); !gsi_end_p (gsi
);)
1038 gphi
*phi
= gsi
.phi ();
1040 source_location locus
;
1042 /* PHIs for virtuals have no control dependency relation on them.
1043 We are lost here and must force renaming of the symbol. */
1044 if (virtual_operand_p (gimple_phi_result (phi
)))
1046 mark_virtual_phi_result_for_renaming (phi
);
1047 remove_phi_node (&gsi
, true);
1051 /* Dead PHI do not imply control dependency. */
1052 if (!gimple_plf (phi
, STMT_NECESSARY
))
1058 op
= gimple_phi_arg_def (phi
, e2
->dest_idx
);
1059 locus
= gimple_phi_arg_location (phi
, e2
->dest_idx
);
1060 add_phi_arg (phi
, op
, e
, locus
);
1061 /* The resulting PHI if not dead can only be degenerate. */
1062 gcc_assert (degenerate_phi_p (phi
));
1069 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1070 containing I so that we don't have to look it up. */
1073 remove_dead_stmt (gimple_stmt_iterator
*i
, basic_block bb
)
1075 gimple stmt
= gsi_stmt (*i
);
1077 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1079 fprintf (dump_file
, "Deleting : ");
1080 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1081 fprintf (dump_file
, "\n");
1086 /* If we have determined that a conditional branch statement contributes
1087 nothing to the program, then we not only remove it, but we also change
1088 the flow graph so that the current block will simply fall-thru to its
1089 immediate post-dominator. The blocks we are circumventing will be
1090 removed by cleanup_tree_cfg if this change in the flow graph makes them
1092 if (is_ctrl_stmt (stmt
))
1094 basic_block post_dom_bb
;
1098 post_dom_bb
= get_immediate_dominator (CDI_POST_DOMINATORS
, bb
);
1100 e
= find_edge (bb
, post_dom_bb
);
1102 /* If edge is already there, try to use it. This avoids need to update
1103 PHI nodes. Also watch for cases where post dominator does not exists
1104 or is exit block. These can happen for infinite loops as we create
1105 fake edges in the dominator tree. */
1108 else if (! post_dom_bb
|| post_dom_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1109 e
= EDGE_SUCC (bb
, 0);
1111 e
= forward_edge_to_pdom (EDGE_SUCC (bb
, 0), post_dom_bb
);
1113 e
->probability
= REG_BR_PROB_BASE
;
1114 e
->count
= bb
->count
;
1116 /* The edge is no longer associated with a conditional, so it does
1117 not have TRUE/FALSE flags. */
1118 e
->flags
&= ~(EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
);
1120 /* The lone outgoing edge from BB will be a fallthru edge. */
1121 e
->flags
|= EDGE_FALLTHRU
;
1123 /* Remove the remaining outgoing edges. */
1124 for (ei
= ei_start (bb
->succs
); (e2
= ei_safe_edge (ei
)); )
1128 /* If we made a BB unconditionally exit a loop then this
1129 transform alters the set of BBs in the loop. Schedule
1131 if (loop_exit_edge_p (bb
->loop_father
, e
))
1132 loops_state_set (LOOPS_NEED_FIXUP
);
1139 /* If this is a store into a variable that is being optimized away,
1140 add a debug bind stmt if possible. */
1141 if (MAY_HAVE_DEBUG_STMTS
1142 && gimple_assign_single_p (stmt
)
1143 && is_gimple_val (gimple_assign_rhs1 (stmt
)))
1145 tree lhs
= gimple_assign_lhs (stmt
);
1146 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1147 && !DECL_IGNORED_P (lhs
)
1148 && is_gimple_reg_type (TREE_TYPE (lhs
))
1149 && !is_global_var (lhs
)
1150 && !DECL_HAS_VALUE_EXPR_P (lhs
))
1152 tree rhs
= gimple_assign_rhs1 (stmt
);
1154 = gimple_build_debug_bind (lhs
, unshare_expr (rhs
), stmt
);
1155 gsi_insert_after (i
, note
, GSI_SAME_STMT
);
1159 unlink_stmt_vdef (stmt
);
1160 gsi_remove (i
, true);
1161 release_defs (stmt
);
1164 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1165 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1168 find_non_realpart_uses (tree
*tp
, int *walk_subtrees
, void *data
)
1170 if (TYPE_P (*tp
) || TREE_CODE (*tp
) == REALPART_EXPR
)
1172 if (*tp
== (tree
) data
)
1177 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1178 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1179 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1183 maybe_optimize_arith_overflow (gimple_stmt_iterator
*gsi
,
1184 enum tree_code subcode
)
1186 gimple stmt
= gsi_stmt (*gsi
);
1187 tree lhs
= gimple_call_lhs (stmt
);
1189 if (lhs
== NULL
|| TREE_CODE (lhs
) != SSA_NAME
)
1192 imm_use_iterator imm_iter
;
1193 use_operand_p use_p
;
1194 bool has_debug_uses
= false;
1195 bool has_realpart_uses
= false;
1196 bool has_other_uses
= false;
1197 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, lhs
)
1199 gimple use_stmt
= USE_STMT (use_p
);
1200 if (is_gimple_debug (use_stmt
))
1201 has_debug_uses
= true;
1202 else if (is_gimple_assign (use_stmt
)
1203 && gimple_assign_rhs_code (use_stmt
) == REALPART_EXPR
1204 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) == lhs
)
1205 has_realpart_uses
= true;
1208 has_other_uses
= true;
1213 if (!has_realpart_uses
|| has_other_uses
)
1216 tree arg0
= gimple_call_arg (stmt
, 0);
1217 tree arg1
= gimple_call_arg (stmt
, 1);
1218 location_t loc
= gimple_location (stmt
);
1219 tree type
= TREE_TYPE (TREE_TYPE (lhs
));
1221 if (!TYPE_UNSIGNED (type
))
1222 utype
= build_nonstandard_integer_type (TYPE_PRECISION (type
), 1);
1223 tree result
= fold_build2_loc (loc
, subcode
, utype
,
1224 fold_convert_loc (loc
, utype
, arg0
),
1225 fold_convert_loc (loc
, utype
, arg1
));
1226 result
= fold_convert_loc (loc
, type
, result
);
1231 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, lhs
)
1233 if (!gimple_debug_bind_p (use_stmt
))
1235 tree v
= gimple_debug_bind_get_value (use_stmt
);
1236 if (walk_tree (&v
, find_non_realpart_uses
, lhs
, NULL
))
1238 gimple_debug_bind_reset_value (use_stmt
);
1239 update_stmt (use_stmt
);
1244 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
1245 result
= drop_tree_overflow (result
);
1246 tree overflow
= build_zero_cst (type
);
1247 tree ctype
= build_complex_type (type
);
1248 if (TREE_CODE (result
) == INTEGER_CST
)
1249 result
= build_complex (ctype
, result
, overflow
);
1251 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
1252 ctype
, result
, overflow
);
1254 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1256 fprintf (dump_file
, "Transforming call: ");
1257 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1258 fprintf (dump_file
, "because the overflow result is never used into: ");
1259 print_generic_stmt (dump_file
, result
, TDF_SLIM
);
1260 fprintf (dump_file
, "\n");
1263 if (!update_call_from_tree (gsi
, result
))
1264 gimplify_and_update_call_from_tree (gsi
, result
);
1267 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1268 contributes nothing to the program, and can be deleted. */
1271 eliminate_unnecessary_stmts (void)
1273 bool something_changed
= false;
1275 gimple_stmt_iterator gsi
, psi
;
1280 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1281 fprintf (dump_file
, "\nEliminating unnecessary statements:\n");
1283 clear_special_calls ();
1285 /* Walking basic blocks and statements in reverse order avoids
1286 releasing SSA names before any other DEFs that refer to them are
1287 released. This helps avoid loss of debug information, as we get
1288 a chance to propagate all RHSs of removed SSAs into debug uses,
1289 rather than only the latest ones. E.g., consider:
1295 If we were to release x_3 before a_5, when we reached a_5 and
1296 tried to substitute it into the debug stmt, we'd see x_3 there,
1297 but x_3's DEF, type, etc would have already been disconnected.
1298 By going backwards, the debug stmt first changes to:
1300 # DEBUG a => x_3 - b_4
1304 # DEBUG a => y_1 + z_2 - b_4
1307 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
1308 h
= get_all_dominated_blocks (CDI_DOMINATORS
,
1309 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1315 /* Remove dead statements. */
1316 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi
= psi
)
1318 stmt
= gsi_stmt (gsi
);
1325 /* We can mark a call to free as not necessary if the
1326 defining statement of its argument is not necessary
1327 (and thus is getting removed). */
1328 if (gimple_plf (stmt
, STMT_NECESSARY
)
1329 && gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
1331 tree ptr
= gimple_call_arg (stmt
, 0);
1332 if (TREE_CODE (ptr
) == SSA_NAME
)
1334 gimple def_stmt
= SSA_NAME_DEF_STMT (ptr
);
1335 if (!gimple_nop_p (def_stmt
)
1336 && !gimple_plf (def_stmt
, STMT_NECESSARY
))
1337 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
1339 /* We did not propagate necessity for free calls fed
1340 by allocation function to allow unnecessary
1341 alloc-free sequence elimination. For instrumented
1342 calls it also means we did not mark bounds producer
1343 as necessary and it is time to do it in case free
1344 call is not removed. */
1345 if (gimple_call_with_bounds_p (stmt
))
1347 gimple bounds_def_stmt
;
1348 tree bounds
= gimple_call_arg (stmt
, 1);
1349 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
1350 bounds_def_stmt
= SSA_NAME_DEF_STMT (bounds
);
1352 && !gimple_plf (bounds_def_stmt
, STMT_NECESSARY
))
1353 gimple_set_plf (bounds_def_stmt
, STMT_NECESSARY
,
1354 gimple_plf (stmt
, STMT_NECESSARY
));
1358 /* If GSI is not necessary then remove it. */
1359 if (!gimple_plf (stmt
, STMT_NECESSARY
))
1361 /* Keep clobbers that we can keep live live. */
1362 if (gimple_clobber_p (stmt
))
1365 use_operand_p use_p
;
1367 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1369 tree name
= USE_FROM_PTR (use_p
);
1370 if (!SSA_NAME_IS_DEFAULT_DEF (name
)
1371 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
)))
1380 if (!is_gimple_debug (stmt
))
1381 something_changed
= true;
1382 remove_dead_stmt (&gsi
, bb
);
1384 else if (is_gimple_call (stmt
))
1386 tree name
= gimple_call_lhs (stmt
);
1388 notice_special_calls (as_a
<gcall
*> (stmt
));
1390 /* When LHS of var = call (); is dead, simplify it into
1391 call (); saving one operand. */
1393 && TREE_CODE (name
) == SSA_NAME
1394 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
))
1395 /* Avoid doing so for allocation calls which we
1396 did not mark as necessary, it will confuse the
1397 special logic we apply to malloc/free pair removal. */
1398 && (!(call
= gimple_call_fndecl (stmt
))
1399 || DECL_BUILT_IN_CLASS (call
) != BUILT_IN_NORMAL
1400 || (DECL_FUNCTION_CODE (call
) != BUILT_IN_ALIGNED_ALLOC
1401 && DECL_FUNCTION_CODE (call
) != BUILT_IN_MALLOC
1402 && DECL_FUNCTION_CODE (call
) != BUILT_IN_CALLOC
1403 && DECL_FUNCTION_CODE (call
) != BUILT_IN_ALLOCA
1404 && (DECL_FUNCTION_CODE (call
)
1405 != BUILT_IN_ALLOCA_WITH_ALIGN
)))
1406 /* Avoid doing so for bndret calls for the same reason. */
1407 && !chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
))
1409 something_changed
= true;
1410 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1412 fprintf (dump_file
, "Deleting LHS of call: ");
1413 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1414 fprintf (dump_file
, "\n");
1417 gimple_call_set_lhs (stmt
, NULL_TREE
);
1418 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1420 release_ssa_name (name
);
1422 /* GOMP_SIMD_LANE without lhs is not needed. */
1423 if (gimple_call_internal_p (stmt
)
1424 && gimple_call_internal_fn (stmt
) == IFN_GOMP_SIMD_LANE
)
1425 remove_dead_stmt (&gsi
, bb
);
1427 else if (gimple_call_internal_p (stmt
))
1428 switch (gimple_call_internal_fn (stmt
))
1430 case IFN_ADD_OVERFLOW
:
1431 maybe_optimize_arith_overflow (&gsi
, PLUS_EXPR
);
1433 case IFN_SUB_OVERFLOW
:
1434 maybe_optimize_arith_overflow (&gsi
, MINUS_EXPR
);
1436 case IFN_MUL_OVERFLOW
:
1437 maybe_optimize_arith_overflow (&gsi
, MULT_EXPR
);
1448 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1449 rendered some PHI nodes unreachable while they are still in use.
1450 Mark them for renaming. */
1453 basic_block prev_bb
;
1455 find_unreachable_blocks ();
1457 /* Delete all unreachable basic blocks in reverse dominator order. */
1458 for (bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
1459 bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
); bb
= prev_bb
)
1461 prev_bb
= bb
->prev_bb
;
1463 if (!bitmap_bit_p (bb_contains_live_stmts
, bb
->index
)
1464 || !(bb
->flags
& BB_REACHABLE
))
1466 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1468 if (virtual_operand_p (gimple_phi_result (gsi
.phi ())))
1471 imm_use_iterator iter
;
1473 FOR_EACH_IMM_USE_STMT (stmt
, iter
,
1474 gimple_phi_result (gsi
.phi ()))
1476 if (!(gimple_bb (stmt
)->flags
& BB_REACHABLE
))
1478 if (gimple_code (stmt
) == GIMPLE_PHI
1479 || gimple_plf (stmt
, STMT_NECESSARY
))
1482 BREAK_FROM_IMM_USE_STMT (iter
);
1486 mark_virtual_phi_result_for_renaming (gsi
.phi ());
1489 if (!(bb
->flags
& BB_REACHABLE
))
1491 /* Speed up the removal of blocks that don't
1492 dominate others. Walking backwards, this should
1493 be the common case. ??? Do we need to recompute
1494 dominators because of cfg_altered? */
1495 if (!MAY_HAVE_DEBUG_STMTS
1496 || !first_dom_son (CDI_DOMINATORS
, bb
))
1497 delete_basic_block (bb
);
1500 h
= get_all_dominated_blocks (CDI_DOMINATORS
, bb
);
1505 prev_bb
= bb
->prev_bb
;
1506 /* Rearrangements to the CFG may have failed
1507 to update the dominators tree, so that
1508 formerly-dominated blocks are now
1509 otherwise reachable. */
1510 if (!!(bb
->flags
& BB_REACHABLE
))
1512 delete_basic_block (bb
);
1521 FOR_EACH_BB_FN (bb
, cfun
)
1523 /* Remove dead PHI nodes. */
1524 something_changed
|= remove_dead_phis (bb
);
1527 return something_changed
;
1531 /* Print out removed statement statistics. */
1538 percg
= ((float) stats
.removed
/ (float) stats
.total
) * 100;
1539 fprintf (dump_file
, "Removed %d of %d statements (%d%%)\n",
1540 stats
.removed
, stats
.total
, (int) percg
);
1542 if (stats
.total_phis
== 0)
1545 percg
= ((float) stats
.removed_phis
/ (float) stats
.total_phis
) * 100;
1547 fprintf (dump_file
, "Removed %d of %d PHI nodes (%d%%)\n",
1548 stats
.removed_phis
, stats
.total_phis
, (int) percg
);
1551 /* Initialization for this pass. Set up the used data structures. */
1554 tree_dce_init (bool aggressive
)
1556 memset ((void *) &stats
, 0, sizeof (stats
));
1560 last_stmt_necessary
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1561 bitmap_clear (last_stmt_necessary
);
1562 bb_contains_live_stmts
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1563 bitmap_clear (bb_contains_live_stmts
);
1566 processed
= sbitmap_alloc (num_ssa_names
+ 1);
1567 bitmap_clear (processed
);
1569 worklist
.create (64);
1570 cfg_altered
= false;
1573 /* Cleanup after this pass. */
1576 tree_dce_done (bool aggressive
)
1581 sbitmap_free (visited_control_parents
);
1582 sbitmap_free (last_stmt_necessary
);
1583 sbitmap_free (bb_contains_live_stmts
);
1584 bb_contains_live_stmts
= NULL
;
1587 sbitmap_free (processed
);
1589 worklist
.release ();
1592 /* Main routine to eliminate dead code.
1594 AGGRESSIVE controls the aggressiveness of the algorithm.
1595 In conservative mode, we ignore control dependence and simply declare
1596 all but the most trivially dead branches necessary. This mode is fast.
1597 In aggressive mode, control dependences are taken into account, which
1598 results in more dead code elimination, but at the cost of some time.
1600 FIXME: Aggressive mode before PRE doesn't work currently because
1601 the dominance info is not invalidated after DCE1. This is
1602 not an issue right now because we only run aggressive DCE
1603 as the last tree SSA pass, but keep this in mind when you
1604 start experimenting with pass ordering. */
1607 perform_tree_ssa_dce (bool aggressive
)
1609 bool something_changed
= 0;
1611 calculate_dominance_info (CDI_DOMINATORS
);
1613 /* Preheaders are needed for SCEV to work.
1614 Simple lateches and recorded exits improve chances that loop will
1615 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1617 loop_optimizer_init (LOOPS_NORMAL
1618 | LOOPS_HAVE_RECORDED_EXITS
);
1620 tree_dce_init (aggressive
);
1624 /* Compute control dependence. */
1625 calculate_dominance_info (CDI_POST_DOMINATORS
);
1626 cd
= new control_dependences (create_edge_list ());
1628 visited_control_parents
=
1629 sbitmap_alloc (last_basic_block_for_fn (cfun
));
1630 bitmap_clear (visited_control_parents
);
1632 mark_dfs_back_edges ();
1635 find_obviously_necessary_stmts (aggressive
);
1638 loop_optimizer_finalize ();
1644 visited
= BITMAP_ALLOC (NULL
);
1645 propagate_necessity (aggressive
);
1646 BITMAP_FREE (visited
);
1648 something_changed
|= eliminate_unnecessary_stmts ();
1649 something_changed
|= cfg_altered
;
1651 /* We do not update postdominators, so free them unconditionally. */
1652 free_dominance_info (CDI_POST_DOMINATORS
);
1654 /* If we removed paths in the CFG, then we need to update
1655 dominators as well. I haven't investigated the possibility
1656 of incrementally updating dominators. */
1658 free_dominance_info (CDI_DOMINATORS
);
1660 statistics_counter_event (cfun
, "Statements deleted", stats
.removed
);
1661 statistics_counter_event (cfun
, "PHI nodes deleted", stats
.removed_phis
);
1663 /* Debugging dumps. */
1664 if (dump_file
&& (dump_flags
& (TDF_STATS
|TDF_DETAILS
)))
1667 tree_dce_done (aggressive
);
1669 if (something_changed
)
1671 free_numbers_of_iterations_estimates ();
1672 if (scev_initialized_p ())
1674 return TODO_update_ssa
| TODO_cleanup_cfg
;
1679 /* Pass entry points. */
1683 return perform_tree_ssa_dce (/*aggressive=*/false);
1687 tree_ssa_cd_dce (void)
1689 return perform_tree_ssa_dce (/*aggressive=*/optimize
>= 2);
1694 const pass_data pass_data_dce
=
1696 GIMPLE_PASS
, /* type */
1698 OPTGROUP_NONE
, /* optinfo_flags */
1699 TV_TREE_DCE
, /* tv_id */
1700 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1701 0, /* properties_provided */
1702 0, /* properties_destroyed */
1703 0, /* todo_flags_start */
1704 0, /* todo_flags_finish */
1707 class pass_dce
: public gimple_opt_pass
1710 pass_dce (gcc::context
*ctxt
)
1711 : gimple_opt_pass (pass_data_dce
, ctxt
)
1714 /* opt_pass methods: */
1715 opt_pass
* clone () { return new pass_dce (m_ctxt
); }
1716 virtual bool gate (function
*) { return flag_tree_dce
!= 0; }
1717 virtual unsigned int execute (function
*) { return tree_ssa_dce (); }
1719 }; // class pass_dce
1724 make_pass_dce (gcc::context
*ctxt
)
1726 return new pass_dce (ctxt
);
1731 const pass_data pass_data_cd_dce
=
1733 GIMPLE_PASS
, /* type */
1735 OPTGROUP_NONE
, /* optinfo_flags */
1736 TV_TREE_CD_DCE
, /* tv_id */
1737 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1738 0, /* properties_provided */
1739 0, /* properties_destroyed */
1740 0, /* todo_flags_start */
1741 0, /* todo_flags_finish */
1744 class pass_cd_dce
: public gimple_opt_pass
1747 pass_cd_dce (gcc::context
*ctxt
)
1748 : gimple_opt_pass (pass_data_cd_dce
, ctxt
)
1751 /* opt_pass methods: */
1752 opt_pass
* clone () { return new pass_cd_dce (m_ctxt
); }
1753 virtual bool gate (function
*) { return flag_tree_dce
!= 0; }
1754 virtual unsigned int execute (function
*) { return tree_ssa_cd_dce (); }
1756 }; // class pass_cd_dce
1761 make_pass_cd_dce (gcc::context
*ctxt
)
1763 return new pass_cd_dce (ctxt
);