1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
47 #include "coretypes.h"
51 #include "gimple-pretty-print.h"
52 #include "basic-block.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "tree-ssanames.h"
61 #include "tree-ssa-loop-niter.h"
62 #include "tree-into-ssa.h"
64 #include "tree-pass.h"
67 #include "tree-scalar-evolution.h"
69 static struct stmt_stats
77 #define STMT_NECESSARY GF_PLF_1
79 static vec
<gimple
> worklist
;
81 /* Vector indicating an SSA name has already been processed and marked
83 static sbitmap processed
;
85 /* Vector indicating that the last statement of a basic block has already
86 been marked as necessary. */
87 static sbitmap last_stmt_necessary
;
89 /* Vector indicating that BB contains statements that are live. */
90 static sbitmap bb_contains_live_stmts
;
92 /* Before we can determine whether a control branch is dead, we need to
93 compute which blocks are control dependent on which edges.
95 We expect each block to be control dependent on very few edges so we
96 use a bitmap for each block recording its edges. An array holds the
97 bitmap. The Ith bit in the bitmap is set if that block is dependent
99 static control_dependences
*cd
;
101 /* Vector indicating that a basic block has already had all the edges
102 processed that it is control dependent on. */
103 static sbitmap visited_control_parents
;
105 /* TRUE if this pass alters the CFG (by removing control statements).
108 If this pass alters the CFG, then it will arrange for the dominators
110 static bool cfg_altered
;
113 /* If STMT is not already marked necessary, mark it, and add it to the
114 worklist if ADD_TO_WORKLIST is true. */
117 mark_stmt_necessary (gimple stmt
, bool add_to_worklist
)
121 if (gimple_plf (stmt
, STMT_NECESSARY
))
124 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
126 fprintf (dump_file
, "Marking useful stmt: ");
127 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
128 fprintf (dump_file
, "\n");
131 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
133 worklist
.safe_push (stmt
);
134 if (bb_contains_live_stmts
&& !is_gimple_debug (stmt
))
135 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
139 /* Mark the statement defining operand OP as necessary. */
142 mark_operand_necessary (tree op
)
149 ver
= SSA_NAME_VERSION (op
);
150 if (bitmap_bit_p (processed
, ver
))
152 stmt
= SSA_NAME_DEF_STMT (op
);
153 gcc_assert (gimple_nop_p (stmt
)
154 || gimple_plf (stmt
, STMT_NECESSARY
));
157 bitmap_set_bit (processed
, ver
);
159 stmt
= SSA_NAME_DEF_STMT (op
);
162 if (gimple_plf (stmt
, STMT_NECESSARY
) || gimple_nop_p (stmt
))
165 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
167 fprintf (dump_file
, "marking necessary through ");
168 print_generic_expr (dump_file
, op
, 0);
169 fprintf (dump_file
, " stmt ");
170 print_gimple_stmt (dump_file
, stmt
, 0, 0);
173 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
174 if (bb_contains_live_stmts
)
175 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
176 worklist
.safe_push (stmt
);
180 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
181 it can make other statements necessary.
183 If AGGRESSIVE is false, control statements are conservatively marked as
187 mark_stmt_if_obviously_necessary (gimple stmt
, bool aggressive
)
189 /* With non-call exceptions, we have to assume that all statements could
190 throw. If a statement could throw, it can be deemed necessary. */
191 if (cfun
->can_throw_non_call_exceptions
192 && !cfun
->can_delete_dead_exceptions
193 && stmt_could_throw_p (stmt
))
195 mark_stmt_necessary (stmt
, true);
199 /* Statements that are implicitly live. Most function calls, asm
200 and return statements are required. Labels and GIMPLE_BIND nodes
201 are kept because they are control flow, and we have no way of
202 knowing whether they can be removed. DCE can eliminate all the
203 other statements in a block, and CFG can then remove the block
205 switch (gimple_code (stmt
))
209 mark_stmt_necessary (stmt
, false);
215 mark_stmt_necessary (stmt
, true);
220 tree callee
= gimple_call_fndecl (stmt
);
221 if (callee
!= NULL_TREE
222 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
223 switch (DECL_FUNCTION_CODE (callee
))
225 case BUILT_IN_MALLOC
:
226 case BUILT_IN_CALLOC
:
227 case BUILT_IN_ALLOCA
:
228 case BUILT_IN_ALLOCA_WITH_ALIGN
:
233 /* Most, but not all function calls are required. Function calls that
234 produce no result and have no side effects (i.e. const pure
235 functions) are unnecessary. */
236 if (gimple_has_side_effects (stmt
))
238 mark_stmt_necessary (stmt
, true);
241 if (!gimple_call_lhs (stmt
))
247 /* Debug temps without a value are not useful. ??? If we could
248 easily locate the debug temp bind stmt for a use thereof,
249 would could refrain from marking all debug temps here, and
250 mark them only if they're used. */
251 if (!gimple_debug_bind_p (stmt
)
252 || gimple_debug_bind_has_value_p (stmt
)
253 || TREE_CODE (gimple_debug_bind_get_var (stmt
)) != DEBUG_EXPR_DECL
)
254 mark_stmt_necessary (stmt
, false);
258 gcc_assert (!simple_goto_p (stmt
));
259 mark_stmt_necessary (stmt
, true);
263 gcc_assert (EDGE_COUNT (gimple_bb (stmt
)->succs
) == 2);
268 mark_stmt_necessary (stmt
, true);
272 if (TREE_CODE (gimple_assign_lhs (stmt
)) == SSA_NAME
273 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt
)))
281 /* If the statement has volatile operands, it needs to be preserved.
282 Same for statements that can alter control flow in unpredictable
284 if (gimple_has_volatile_ops (stmt
) || is_ctrl_altering_stmt (stmt
))
286 mark_stmt_necessary (stmt
, true);
290 if (stmt_may_clobber_global_p (stmt
))
292 mark_stmt_necessary (stmt
, true);
300 /* Mark the last statement of BB as necessary. */
303 mark_last_stmt_necessary (basic_block bb
)
305 gimple stmt
= last_stmt (bb
);
307 bitmap_set_bit (last_stmt_necessary
, bb
->index
);
308 bitmap_set_bit (bb_contains_live_stmts
, bb
->index
);
310 /* We actually mark the statement only if it is a control statement. */
311 if (stmt
&& is_ctrl_stmt (stmt
))
312 mark_stmt_necessary (stmt
, true);
316 /* Mark control dependent edges of BB as necessary. We have to do this only
317 once for each basic block so we set the appropriate bit after we're done.
319 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
322 mark_control_dependent_edges_necessary (basic_block bb
, bool ignore_self
)
325 unsigned edge_number
;
326 bool skipped
= false;
328 gcc_assert (bb
!= EXIT_BLOCK_PTR
);
330 if (bb
== ENTRY_BLOCK_PTR
)
333 EXECUTE_IF_SET_IN_BITMAP (cd
->get_edges_dependent_on (bb
->index
),
336 basic_block cd_bb
= cd
->get_edge (edge_number
)->src
;
338 if (ignore_self
&& cd_bb
== bb
)
344 if (!bitmap_bit_p (last_stmt_necessary
, cd_bb
->index
))
345 mark_last_stmt_necessary (cd_bb
);
349 bitmap_set_bit (visited_control_parents
, bb
->index
);
353 /* Find obviously necessary statements. These are things like most function
354 calls, and stores to file level variables.
356 If EL is NULL, control statements are conservatively marked as
357 necessary. Otherwise it contains the list of edges used by control
358 dependence analysis. */
361 find_obviously_necessary_stmts (bool aggressive
)
364 gimple_stmt_iterator gsi
;
371 /* PHI nodes are never inherently necessary. */
372 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
374 phi
= gsi_stmt (gsi
);
375 gimple_set_plf (phi
, STMT_NECESSARY
, false);
378 /* Check all statements in the block. */
379 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
381 stmt
= gsi_stmt (gsi
);
382 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
383 mark_stmt_if_obviously_necessary (stmt
, aggressive
);
387 /* Pure and const functions are finite and thus have no infinite loops in
389 flags
= flags_from_decl_or_type (current_function_decl
);
390 if ((flags
& (ECF_CONST
|ECF_PURE
)) && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
393 /* Prevent the empty possibly infinite loops from being removed. */
399 if (mark_irreducible_loops ())
403 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
404 if ((e
->flags
& EDGE_DFS_BACK
)
405 && (e
->flags
& EDGE_IRREDUCIBLE_LOOP
))
408 fprintf (dump_file
, "Marking back edge of irreducible loop %i->%i\n",
409 e
->src
->index
, e
->dest
->index
);
410 mark_control_dependent_edges_necessary (e
->dest
, false);
414 FOR_EACH_LOOP (li
, loop
, 0)
415 if (!finite_loop_p (loop
))
418 fprintf (dump_file
, "can not prove finiteness of loop %i\n", loop
->num
);
419 mark_control_dependent_edges_necessary (loop
->latch
, false);
426 /* Return true if REF is based on an aliased base, otherwise false. */
429 ref_may_be_aliased (tree ref
)
431 gcc_assert (TREE_CODE (ref
) != WITH_SIZE_EXPR
);
432 while (handled_component_p (ref
))
433 ref
= TREE_OPERAND (ref
, 0);
434 if (TREE_CODE (ref
) == MEM_REF
435 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
436 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
437 return !(DECL_P (ref
)
438 && !may_be_aliased (ref
));
441 static bitmap visited
= NULL
;
442 static unsigned int longest_chain
= 0;
443 static unsigned int total_chain
= 0;
444 static unsigned int nr_walks
= 0;
445 static bool chain_ovfl
= false;
447 /* Worker for the walker that marks reaching definitions of REF,
448 which is based on a non-aliased decl, necessary. It returns
449 true whenever the defining statement of the current VDEF is
450 a kill for REF, as no dominating may-defs are necessary for REF
451 anymore. DATA points to the basic-block that contains the
452 stmt that refers to REF. */
455 mark_aliased_reaching_defs_necessary_1 (ao_ref
*ref
, tree vdef
, void *data
)
457 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
459 /* All stmts we visit are necessary. */
460 mark_operand_necessary (vdef
);
462 /* If the stmt lhs kills ref, then we can stop walking. */
463 if (gimple_has_lhs (def_stmt
)
464 && TREE_CODE (gimple_get_lhs (def_stmt
)) != SSA_NAME
465 /* The assignment is not necessarily carried out if it can throw
466 and we can catch it in the current function where we could inspect
468 ??? We only need to care about the RHS throwing. For aggregate
469 assignments or similar calls and non-call exceptions the LHS
470 might throw as well. */
471 && !stmt_can_throw_internal (def_stmt
))
473 tree base
, lhs
= gimple_get_lhs (def_stmt
);
474 HOST_WIDE_INT size
, offset
, max_size
;
476 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
477 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
478 so base == refd->base does not always hold. */
479 if (base
== ref
->base
)
481 /* For a must-alias check we need to be able to constrain
482 the accesses properly. */
483 if (size
!= -1 && size
== max_size
484 && ref
->max_size
!= -1)
486 if (offset
<= ref
->offset
487 && offset
+ size
>= ref
->offset
+ ref
->max_size
)
490 /* Or they need to be exactly the same. */
492 /* Make sure there is no induction variable involved
493 in the references (gcc.c-torture/execute/pr42142.c).
494 The simplest way is to check if the kill dominates
496 /* But when both are in the same block we cannot
497 easily tell whether we came from a backedge
498 unless we decide to compute stmt UIDs
500 && (basic_block
) data
!= gimple_bb (def_stmt
)
501 && dominated_by_p (CDI_DOMINATORS
, (basic_block
) data
,
502 gimple_bb (def_stmt
))
503 && operand_equal_p (ref
->ref
, lhs
, 0))
508 /* Otherwise keep walking. */
513 mark_aliased_reaching_defs_necessary (gimple stmt
, tree ref
)
517 gcc_assert (!chain_ovfl
);
518 ao_ref_init (&refd
, ref
);
519 chain
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
),
520 mark_aliased_reaching_defs_necessary_1
,
521 gimple_bb (stmt
), NULL
);
522 if (chain
> longest_chain
)
523 longest_chain
= chain
;
524 total_chain
+= chain
;
528 /* Worker for the walker that marks reaching definitions of REF, which
529 is not based on a non-aliased decl. For simplicity we need to end
530 up marking all may-defs necessary that are not based on a non-aliased
531 decl. The only job of this walker is to skip may-defs based on
532 a non-aliased decl. */
535 mark_all_reaching_defs_necessary_1 (ao_ref
*ref ATTRIBUTE_UNUSED
,
536 tree vdef
, void *data ATTRIBUTE_UNUSED
)
538 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
540 /* We have to skip already visited (and thus necessary) statements
541 to make the chaining work after we dropped back to simple mode. */
543 && bitmap_bit_p (processed
, SSA_NAME_VERSION (vdef
)))
545 gcc_assert (gimple_nop_p (def_stmt
)
546 || gimple_plf (def_stmt
, STMT_NECESSARY
));
550 /* We want to skip stores to non-aliased variables. */
552 && gimple_assign_single_p (def_stmt
))
554 tree lhs
= gimple_assign_lhs (def_stmt
);
555 if (!ref_may_be_aliased (lhs
))
559 /* We want to skip statments that do not constitute stores but have
560 a virtual definition. */
561 if (is_gimple_call (def_stmt
))
563 tree callee
= gimple_call_fndecl (def_stmt
);
564 if (callee
!= NULL_TREE
565 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
566 switch (DECL_FUNCTION_CODE (callee
))
568 case BUILT_IN_MALLOC
:
569 case BUILT_IN_CALLOC
:
570 case BUILT_IN_ALLOCA
:
571 case BUILT_IN_ALLOCA_WITH_ALIGN
:
579 mark_operand_necessary (vdef
);
585 mark_all_reaching_defs_necessary (gimple stmt
)
587 walk_aliased_vdefs (NULL
, gimple_vuse (stmt
),
588 mark_all_reaching_defs_necessary_1
, NULL
, &visited
);
591 /* Return true for PHI nodes with one or identical arguments
594 degenerate_phi_p (gimple phi
)
597 tree op
= gimple_phi_arg_def (phi
, 0);
598 for (i
= 1; i
< gimple_phi_num_args (phi
); i
++)
599 if (gimple_phi_arg_def (phi
, i
) != op
)
604 /* Propagate necessity using the operands of necessary statements.
605 Process the uses on each statement in the worklist, and add all
606 feeding statements which contribute to the calculation of this
607 value to the worklist.
609 In conservative mode, EL is NULL. */
612 propagate_necessity (bool aggressive
)
616 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
617 fprintf (dump_file
, "\nProcessing worklist:\n");
619 while (worklist
.length () > 0)
621 /* Take STMT from worklist. */
622 stmt
= worklist
.pop ();
624 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
626 fprintf (dump_file
, "processing: ");
627 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
628 fprintf (dump_file
, "\n");
633 /* Mark the last statement of the basic blocks on which the block
634 containing STMT is control dependent, but only if we haven't
636 basic_block bb
= gimple_bb (stmt
);
637 if (bb
!= ENTRY_BLOCK_PTR
638 && !bitmap_bit_p (visited_control_parents
, bb
->index
))
639 mark_control_dependent_edges_necessary (bb
, false);
642 if (gimple_code (stmt
) == GIMPLE_PHI
643 /* We do not process virtual PHI nodes nor do we track their
645 && !virtual_operand_p (gimple_phi_result (stmt
)))
647 /* PHI nodes are somewhat special in that each PHI alternative has
648 data and control dependencies. All the statements feeding the
649 PHI node's arguments are always necessary. In aggressive mode,
650 we also consider the control dependent edges leading to the
651 predecessor block associated with each PHI alternative as
655 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
657 tree arg
= PHI_ARG_DEF (stmt
, k
);
658 if (TREE_CODE (arg
) == SSA_NAME
)
659 mark_operand_necessary (arg
);
662 /* For PHI operands it matters from where the control flow arrives
663 to the BB. Consider the following example:
673 We need to mark control dependence of the empty basic blocks, since they
674 contains computation of PHI operands.
676 Doing so is too restrictive in the case the predecestor block is in
682 for (i = 0; i<1000; ++i)
688 There is PHI for J in the BB containing return statement.
689 In this case the control dependence of predecestor block (that is
690 within the empty loop) also contains the block determining number
691 of iterations of the block that would prevent removing of empty
694 This scenario can be avoided by splitting critical edges.
695 To save the critical edge splitting pass we identify how the control
696 dependence would look like if the edge was split.
698 Consider the modified CFG created from current CFG by splitting
699 edge B->C. In the postdominance tree of modified CFG, C' is
700 always child of C. There are two cases how chlids of C' can look
705 In this case the only basic block C' is control dependent on is B.
707 2) C' has single child that is B
709 In this case control dependence of C' is same as control
710 dependence of B in original CFG except for block B itself.
711 (since C' postdominate B in modified CFG)
713 Now how to decide what case happens? There are two basic options:
715 a) C postdominate B. Then C immediately postdominate B and
716 case 2 happens iff there is no other way from B to C except
719 There is other way from B to C iff there is succesor of B that
720 is not postdominated by B. Testing this condition is somewhat
721 expensive, because we need to iterate all succesors of B.
722 We are safe to assume that this does not happen: we will mark B
723 as needed when processing the other path from B to C that is
724 conrol dependent on B and marking control dependencies of B
725 itself is harmless because they will be processed anyway after
726 processing control statement in B.
728 b) C does not postdominate B. Always case 1 happens since there is
729 path from C to exit that does not go through B and thus also C'. */
731 if (aggressive
&& !degenerate_phi_p (stmt
))
733 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
735 basic_block arg_bb
= gimple_phi_arg_edge (stmt
, k
)->src
;
738 != get_immediate_dominator (CDI_POST_DOMINATORS
, arg_bb
))
740 if (!bitmap_bit_p (last_stmt_necessary
, arg_bb
->index
))
741 mark_last_stmt_necessary (arg_bb
);
743 else if (arg_bb
!= ENTRY_BLOCK_PTR
744 && !bitmap_bit_p (visited_control_parents
,
746 mark_control_dependent_edges_necessary (arg_bb
, true);
752 /* Propagate through the operands. Examine all the USE, VUSE and
753 VDEF operands in this statement. Mark all the statements
754 which feed this statement's uses as necessary. */
758 /* If this is a call to free which is directly fed by an
759 allocation function do not mark that necessary through
760 processing the argument. */
761 if (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
763 tree ptr
= gimple_call_arg (stmt
, 0);
766 /* If the pointer we free is defined by an allocation
767 function do not add the call to the worklist. */
768 if (TREE_CODE (ptr
) == SSA_NAME
769 && is_gimple_call (def_stmt
= SSA_NAME_DEF_STMT (ptr
))
770 && (def_callee
= gimple_call_fndecl (def_stmt
))
771 && DECL_BUILT_IN_CLASS (def_callee
) == BUILT_IN_NORMAL
772 && (DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_MALLOC
773 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_CALLOC
))
777 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
778 mark_operand_necessary (use
);
780 use
= gimple_vuse (stmt
);
784 /* If we dropped to simple mode make all immediately
785 reachable definitions necessary. */
788 mark_all_reaching_defs_necessary (stmt
);
792 /* For statements that may load from memory (have a VUSE) we
793 have to mark all reaching (may-)definitions as necessary.
794 We partition this task into two cases:
795 1) explicit loads based on decls that are not aliased
796 2) implicit loads (like calls) and explicit loads not
797 based on decls that are not aliased (like indirect
798 references or loads from globals)
799 For 1) we mark all reaching may-defs as necessary, stopping
800 at dominating kills. For 2) we want to mark all dominating
801 references necessary, but non-aliased ones which we handle
802 in 1). By keeping a global visited bitmap for references
803 we walk for 2) we avoid quadratic behavior for those. */
805 if (is_gimple_call (stmt
))
807 tree callee
= gimple_call_fndecl (stmt
);
810 /* Calls to functions that are merely acting as barriers
811 or that only store to memory do not make any previous
813 if (callee
!= NULL_TREE
814 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
815 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET
816 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET_CHK
817 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MALLOC
818 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
819 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_FREE
820 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
821 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
822 || (DECL_FUNCTION_CODE (callee
)
823 == BUILT_IN_ALLOCA_WITH_ALIGN
)
824 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
825 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
826 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ASSUME_ALIGNED
))
829 /* Calls implicitly load from memory, their arguments
830 in addition may explicitly perform memory loads. */
831 mark_all_reaching_defs_necessary (stmt
);
832 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
834 tree arg
= gimple_call_arg (stmt
, i
);
835 if (TREE_CODE (arg
) == SSA_NAME
836 || is_gimple_min_invariant (arg
))
838 if (TREE_CODE (arg
) == WITH_SIZE_EXPR
)
839 arg
= TREE_OPERAND (arg
, 0);
840 if (!ref_may_be_aliased (arg
))
841 mark_aliased_reaching_defs_necessary (stmt
, arg
);
844 else if (gimple_assign_single_p (stmt
))
847 /* If this is a load mark things necessary. */
848 rhs
= gimple_assign_rhs1 (stmt
);
849 if (TREE_CODE (rhs
) != SSA_NAME
850 && !is_gimple_min_invariant (rhs
)
851 && TREE_CODE (rhs
) != CONSTRUCTOR
)
853 if (!ref_may_be_aliased (rhs
))
854 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
856 mark_all_reaching_defs_necessary (stmt
);
859 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
861 tree rhs
= gimple_return_retval (stmt
);
862 /* A return statement may perform a load. */
864 && TREE_CODE (rhs
) != SSA_NAME
865 && !is_gimple_min_invariant (rhs
)
866 && TREE_CODE (rhs
) != CONSTRUCTOR
)
868 if (!ref_may_be_aliased (rhs
))
869 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
871 mark_all_reaching_defs_necessary (stmt
);
874 else if (gimple_code (stmt
) == GIMPLE_ASM
)
877 mark_all_reaching_defs_necessary (stmt
);
878 /* Inputs may perform loads. */
879 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
881 tree op
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
882 if (TREE_CODE (op
) != SSA_NAME
883 && !is_gimple_min_invariant (op
)
884 && TREE_CODE (op
) != CONSTRUCTOR
885 && !ref_may_be_aliased (op
))
886 mark_aliased_reaching_defs_necessary (stmt
, op
);
889 else if (gimple_code (stmt
) == GIMPLE_TRANSACTION
)
891 /* The beginning of a transaction is a memory barrier. */
892 /* ??? If we were really cool, we'd only be a barrier
893 for the memories touched within the transaction. */
894 mark_all_reaching_defs_necessary (stmt
);
899 /* If we over-used our alias oracle budget drop to simple
900 mode. The cost metric allows quadratic behavior
901 (number of uses times number of may-defs queries) up to
902 a constant maximal number of queries and after that falls back to
903 super-linear complexity. */
904 if (/* Constant but quadratic for small functions. */
905 total_chain
> 128 * 128
906 /* Linear in the number of may-defs. */
907 && total_chain
> 32 * longest_chain
908 /* Linear in the number of uses. */
909 && total_chain
> nr_walks
* 32)
913 bitmap_clear (visited
);
919 /* Remove dead PHI nodes from block BB. */
922 remove_dead_phis (basic_block bb
)
924 bool something_changed
= false;
926 gimple_stmt_iterator gsi
;
928 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);)
931 phi
= gsi_stmt (gsi
);
933 /* We do not track necessity of virtual PHI nodes. Instead do
934 very simple dead PHI removal here. */
935 if (virtual_operand_p (gimple_phi_result (phi
)))
937 /* Virtual PHI nodes with one or identical arguments
939 if (degenerate_phi_p (phi
))
941 tree vdef
= gimple_phi_result (phi
);
942 tree vuse
= gimple_phi_arg_def (phi
, 0);
945 imm_use_iterator iter
;
947 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
948 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
949 SET_USE (use_p
, vuse
);
950 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
)
951 && TREE_CODE (vuse
) == SSA_NAME
)
952 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
955 gimple_set_plf (phi
, STMT_NECESSARY
, true);
958 if (!gimple_plf (phi
, STMT_NECESSARY
))
960 something_changed
= true;
961 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
963 fprintf (dump_file
, "Deleting : ");
964 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
965 fprintf (dump_file
, "\n");
968 remove_phi_node (&gsi
, true);
969 stats
.removed_phis
++;
975 return something_changed
;
978 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
981 forward_edge_to_pdom (edge e
, basic_block post_dom_bb
)
983 gimple_stmt_iterator gsi
;
987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
988 fprintf (dump_file
, "Redirecting edge %i->%i to %i\n", e
->src
->index
,
989 e
->dest
->index
, post_dom_bb
->index
);
991 e2
= redirect_edge_and_branch (e
, post_dom_bb
);
994 /* If edge was already around, no updating is necessary. */
998 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb
)))
1000 /* We are sure that for every live PHI we are seeing control dependent BB.
1001 This means that we can pick any edge to duplicate PHI args from. */
1002 FOR_EACH_EDGE (e2
, ei
, post_dom_bb
->preds
)
1005 for (gsi
= gsi_start_phis (post_dom_bb
); !gsi_end_p (gsi
);)
1007 gimple phi
= gsi_stmt (gsi
);
1009 source_location locus
;
1011 /* PHIs for virtuals have no control dependency relation on them.
1012 We are lost here and must force renaming of the symbol. */
1013 if (virtual_operand_p (gimple_phi_result (phi
)))
1015 mark_virtual_phi_result_for_renaming (phi
);
1016 remove_phi_node (&gsi
, true);
1020 /* Dead PHI do not imply control dependency. */
1021 if (!gimple_plf (phi
, STMT_NECESSARY
))
1027 op
= gimple_phi_arg_def (phi
, e2
->dest_idx
);
1028 locus
= gimple_phi_arg_location (phi
, e2
->dest_idx
);
1029 add_phi_arg (phi
, op
, e
, locus
);
1030 /* The resulting PHI if not dead can only be degenerate. */
1031 gcc_assert (degenerate_phi_p (phi
));
1038 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1039 containing I so that we don't have to look it up. */
1042 remove_dead_stmt (gimple_stmt_iterator
*i
, basic_block bb
)
1044 gimple stmt
= gsi_stmt (*i
);
1046 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1048 fprintf (dump_file
, "Deleting : ");
1049 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1050 fprintf (dump_file
, "\n");
1055 /* If we have determined that a conditional branch statement contributes
1056 nothing to the program, then we not only remove it, but we also change
1057 the flow graph so that the current block will simply fall-thru to its
1058 immediate post-dominator. The blocks we are circumventing will be
1059 removed by cleanup_tree_cfg if this change in the flow graph makes them
1061 if (is_ctrl_stmt (stmt
))
1063 basic_block post_dom_bb
;
1067 post_dom_bb
= get_immediate_dominator (CDI_POST_DOMINATORS
, bb
);
1069 e
= find_edge (bb
, post_dom_bb
);
1071 /* If edge is already there, try to use it. This avoids need to update
1072 PHI nodes. Also watch for cases where post dominator does not exists
1073 or is exit block. These can happen for infinite loops as we create
1074 fake edges in the dominator tree. */
1077 else if (! post_dom_bb
|| post_dom_bb
== EXIT_BLOCK_PTR
)
1078 e
= EDGE_SUCC (bb
, 0);
1080 e
= forward_edge_to_pdom (EDGE_SUCC (bb
, 0), post_dom_bb
);
1082 e
->probability
= REG_BR_PROB_BASE
;
1083 e
->count
= bb
->count
;
1085 /* The edge is no longer associated with a conditional, so it does
1086 not have TRUE/FALSE flags. */
1087 e
->flags
&= ~(EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
);
1089 /* The lone outgoing edge from BB will be a fallthru edge. */
1090 e
->flags
|= EDGE_FALLTHRU
;
1092 /* Remove the remaining outgoing edges. */
1093 for (ei
= ei_start (bb
->succs
); (e2
= ei_safe_edge (ei
)); )
1103 /* If this is a store into a variable that is being optimized away,
1104 add a debug bind stmt if possible. */
1105 if (MAY_HAVE_DEBUG_STMTS
1106 && gimple_assign_single_p (stmt
)
1107 && is_gimple_val (gimple_assign_rhs1 (stmt
)))
1109 tree lhs
= gimple_assign_lhs (stmt
);
1110 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1111 && !DECL_IGNORED_P (lhs
)
1112 && is_gimple_reg_type (TREE_TYPE (lhs
))
1113 && !is_global_var (lhs
)
1114 && !DECL_HAS_VALUE_EXPR_P (lhs
))
1116 tree rhs
= gimple_assign_rhs1 (stmt
);
1118 = gimple_build_debug_bind (lhs
, unshare_expr (rhs
), stmt
);
1119 gsi_insert_after (i
, note
, GSI_SAME_STMT
);
1123 unlink_stmt_vdef (stmt
);
1124 gsi_remove (i
, true);
1125 release_defs (stmt
);
1128 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1129 contributes nothing to the program, and can be deleted. */
1132 eliminate_unnecessary_stmts (void)
1134 bool something_changed
= false;
1136 gimple_stmt_iterator gsi
, psi
;
1141 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1142 fprintf (dump_file
, "\nEliminating unnecessary statements:\n");
1144 clear_special_calls ();
1146 /* Walking basic blocks and statements in reverse order avoids
1147 releasing SSA names before any other DEFs that refer to them are
1148 released. This helps avoid loss of debug information, as we get
1149 a chance to propagate all RHSs of removed SSAs into debug uses,
1150 rather than only the latest ones. E.g., consider:
1156 If we were to release x_3 before a_5, when we reached a_5 and
1157 tried to substitute it into the debug stmt, we'd see x_3 there,
1158 but x_3's DEF, type, etc would have already been disconnected.
1159 By going backwards, the debug stmt first changes to:
1161 # DEBUG a => x_3 - b_4
1165 # DEBUG a => y_1 + z_2 - b_4
1168 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
1169 h
= get_all_dominated_blocks (CDI_DOMINATORS
, single_succ (ENTRY_BLOCK_PTR
));
1175 /* Remove dead statements. */
1176 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi
= psi
)
1178 stmt
= gsi_stmt (gsi
);
1185 /* We can mark a call to free as not necessary if the
1186 defining statement of its argument is an allocation
1187 function and that is not necessary itself. */
1188 if (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
1190 tree ptr
= gimple_call_arg (stmt
, 0);
1193 if (TREE_CODE (ptr
) != SSA_NAME
)
1195 def_stmt
= SSA_NAME_DEF_STMT (ptr
);
1196 if (!is_gimple_call (def_stmt
)
1197 || gimple_plf (def_stmt
, STMT_NECESSARY
))
1199 callee2
= gimple_call_fndecl (def_stmt
);
1200 if (callee2
== NULL_TREE
1201 || DECL_BUILT_IN_CLASS (callee2
) != BUILT_IN_NORMAL
1202 || (DECL_FUNCTION_CODE (callee2
) != BUILT_IN_MALLOC
1203 && DECL_FUNCTION_CODE (callee2
) != BUILT_IN_CALLOC
))
1205 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
1208 /* If GSI is not necessary then remove it. */
1209 if (!gimple_plf (stmt
, STMT_NECESSARY
))
1211 if (!is_gimple_debug (stmt
))
1212 something_changed
= true;
1213 remove_dead_stmt (&gsi
, bb
);
1215 else if (is_gimple_call (stmt
))
1217 tree name
= gimple_call_lhs (stmt
);
1219 notice_special_calls (stmt
);
1221 /* When LHS of var = call (); is dead, simplify it into
1222 call (); saving one operand. */
1224 && TREE_CODE (name
) == SSA_NAME
1225 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
))
1226 /* Avoid doing so for allocation calls which we
1227 did not mark as necessary, it will confuse the
1228 special logic we apply to malloc/free pair removal. */
1229 && (!(call
= gimple_call_fndecl (stmt
))
1230 || DECL_BUILT_IN_CLASS (call
) != BUILT_IN_NORMAL
1231 || (DECL_FUNCTION_CODE (call
) != BUILT_IN_MALLOC
1232 && DECL_FUNCTION_CODE (call
) != BUILT_IN_CALLOC
1233 && DECL_FUNCTION_CODE (call
) != BUILT_IN_ALLOCA
1234 && (DECL_FUNCTION_CODE (call
)
1235 != BUILT_IN_ALLOCA_WITH_ALIGN
))))
1237 something_changed
= true;
1238 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1240 fprintf (dump_file
, "Deleting LHS of call: ");
1241 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1242 fprintf (dump_file
, "\n");
1245 gimple_call_set_lhs (stmt
, NULL_TREE
);
1246 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1248 release_ssa_name (name
);
1256 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1257 rendered some PHI nodes unreachable while they are still in use.
1258 Mark them for renaming. */
1261 basic_block prev_bb
;
1263 find_unreachable_blocks ();
1265 /* Delete all unreachable basic blocks in reverse dominator order. */
1266 for (bb
= EXIT_BLOCK_PTR
->prev_bb
; bb
!= ENTRY_BLOCK_PTR
; bb
= prev_bb
)
1268 prev_bb
= bb
->prev_bb
;
1270 if (!bitmap_bit_p (bb_contains_live_stmts
, bb
->index
)
1271 || !(bb
->flags
& BB_REACHABLE
))
1273 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1274 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi
))))
1277 imm_use_iterator iter
;
1279 FOR_EACH_IMM_USE_STMT (stmt
, iter
, gimple_phi_result (gsi_stmt (gsi
)))
1281 if (!(gimple_bb (stmt
)->flags
& BB_REACHABLE
))
1283 if (gimple_code (stmt
) == GIMPLE_PHI
1284 || gimple_plf (stmt
, STMT_NECESSARY
))
1287 BREAK_FROM_IMM_USE_STMT (iter
);
1291 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi
));
1294 if (!(bb
->flags
& BB_REACHABLE
))
1296 /* Speed up the removal of blocks that don't
1297 dominate others. Walking backwards, this should
1298 be the common case. ??? Do we need to recompute
1299 dominators because of cfg_altered? */
1300 if (!MAY_HAVE_DEBUG_STMTS
1301 || !first_dom_son (CDI_DOMINATORS
, bb
))
1302 delete_basic_block (bb
);
1305 h
= get_all_dominated_blocks (CDI_DOMINATORS
, bb
);
1310 prev_bb
= bb
->prev_bb
;
1311 /* Rearrangements to the CFG may have failed
1312 to update the dominators tree, so that
1313 formerly-dominated blocks are now
1314 otherwise reachable. */
1315 if (!!(bb
->flags
& BB_REACHABLE
))
1317 delete_basic_block (bb
);
1328 /* Remove dead PHI nodes. */
1329 something_changed
|= remove_dead_phis (bb
);
1332 return something_changed
;
1336 /* Print out removed statement statistics. */
1343 percg
= ((float) stats
.removed
/ (float) stats
.total
) * 100;
1344 fprintf (dump_file
, "Removed %d of %d statements (%d%%)\n",
1345 stats
.removed
, stats
.total
, (int) percg
);
1347 if (stats
.total_phis
== 0)
1350 percg
= ((float) stats
.removed_phis
/ (float) stats
.total_phis
) * 100;
1352 fprintf (dump_file
, "Removed %d of %d PHI nodes (%d%%)\n",
1353 stats
.removed_phis
, stats
.total_phis
, (int) percg
);
1356 /* Initialization for this pass. Set up the used data structures. */
1359 tree_dce_init (bool aggressive
)
1361 memset ((void *) &stats
, 0, sizeof (stats
));
1365 last_stmt_necessary
= sbitmap_alloc (last_basic_block
);
1366 bitmap_clear (last_stmt_necessary
);
1367 bb_contains_live_stmts
= sbitmap_alloc (last_basic_block
);
1368 bitmap_clear (bb_contains_live_stmts
);
1371 processed
= sbitmap_alloc (num_ssa_names
+ 1);
1372 bitmap_clear (processed
);
1374 worklist
.create (64);
1375 cfg_altered
= false;
1378 /* Cleanup after this pass. */
1381 tree_dce_done (bool aggressive
)
1386 sbitmap_free (visited_control_parents
);
1387 sbitmap_free (last_stmt_necessary
);
1388 sbitmap_free (bb_contains_live_stmts
);
1389 bb_contains_live_stmts
= NULL
;
1392 sbitmap_free (processed
);
1394 worklist
.release ();
1397 /* Main routine to eliminate dead code.
1399 AGGRESSIVE controls the aggressiveness of the algorithm.
1400 In conservative mode, we ignore control dependence and simply declare
1401 all but the most trivially dead branches necessary. This mode is fast.
1402 In aggressive mode, control dependences are taken into account, which
1403 results in more dead code elimination, but at the cost of some time.
1405 FIXME: Aggressive mode before PRE doesn't work currently because
1406 the dominance info is not invalidated after DCE1. This is
1407 not an issue right now because we only run aggressive DCE
1408 as the last tree SSA pass, but keep this in mind when you
1409 start experimenting with pass ordering. */
1412 perform_tree_ssa_dce (bool aggressive
)
1414 bool something_changed
= 0;
1416 calculate_dominance_info (CDI_DOMINATORS
);
1418 /* Preheaders are needed for SCEV to work.
1419 Simple lateches and recorded exits improve chances that loop will
1420 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1422 loop_optimizer_init (LOOPS_NORMAL
1423 | LOOPS_HAVE_RECORDED_EXITS
);
1425 tree_dce_init (aggressive
);
1429 /* Compute control dependence. */
1430 calculate_dominance_info (CDI_POST_DOMINATORS
);
1431 cd
= new control_dependences (create_edge_list ());
1433 visited_control_parents
= sbitmap_alloc (last_basic_block
);
1434 bitmap_clear (visited_control_parents
);
1436 mark_dfs_back_edges ();
1439 find_obviously_necessary_stmts (aggressive
);
1442 loop_optimizer_finalize ();
1448 visited
= BITMAP_ALLOC (NULL
);
1449 propagate_necessity (aggressive
);
1450 BITMAP_FREE (visited
);
1452 something_changed
|= eliminate_unnecessary_stmts ();
1453 something_changed
|= cfg_altered
;
1455 /* We do not update postdominators, so free them unconditionally. */
1456 free_dominance_info (CDI_POST_DOMINATORS
);
1458 /* If we removed paths in the CFG, then we need to update
1459 dominators as well. I haven't investigated the possibility
1460 of incrementally updating dominators. */
1462 free_dominance_info (CDI_DOMINATORS
);
1464 statistics_counter_event (cfun
, "Statements deleted", stats
.removed
);
1465 statistics_counter_event (cfun
, "PHI nodes deleted", stats
.removed_phis
);
1467 /* Debugging dumps. */
1468 if (dump_file
&& (dump_flags
& (TDF_STATS
|TDF_DETAILS
)))
1471 tree_dce_done (aggressive
);
1473 if (something_changed
)
1474 return TODO_update_ssa
| TODO_cleanup_cfg
;
1478 /* Pass entry points. */
1482 return perform_tree_ssa_dce (/*aggressive=*/false);
1486 tree_ssa_dce_loop (void)
1489 todo
= perform_tree_ssa_dce (/*aggressive=*/false);
1492 free_numbers_of_iterations_estimates ();
1499 tree_ssa_cd_dce (void)
1501 return perform_tree_ssa_dce (/*aggressive=*/optimize
>= 2);
1507 return flag_tree_dce
!= 0;
1512 const pass_data pass_data_dce
=
1514 GIMPLE_PASS
, /* type */
1516 OPTGROUP_NONE
, /* optinfo_flags */
1517 true, /* has_gate */
1518 true, /* has_execute */
1519 TV_TREE_DCE
, /* tv_id */
1520 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1521 0, /* properties_provided */
1522 0, /* properties_destroyed */
1523 0, /* todo_flags_start */
1524 TODO_verify_ssa
, /* todo_flags_finish */
1527 class pass_dce
: public gimple_opt_pass
1530 pass_dce (gcc::context
*ctxt
)
1531 : gimple_opt_pass (pass_data_dce
, ctxt
)
1534 /* opt_pass methods: */
1535 opt_pass
* clone () { return new pass_dce (m_ctxt
); }
1536 bool gate () { return gate_dce (); }
1537 unsigned int execute () { return tree_ssa_dce (); }
1539 }; // class pass_dce
1544 make_pass_dce (gcc::context
*ctxt
)
1546 return new pass_dce (ctxt
);
1551 const pass_data pass_data_dce_loop
=
1553 GIMPLE_PASS
, /* type */
1554 "dceloop", /* name */
1555 OPTGROUP_NONE
, /* optinfo_flags */
1556 true, /* has_gate */
1557 true, /* has_execute */
1558 TV_TREE_DCE
, /* tv_id */
1559 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1560 0, /* properties_provided */
1561 0, /* properties_destroyed */
1562 0, /* todo_flags_start */
1563 TODO_verify_ssa
, /* todo_flags_finish */
1566 class pass_dce_loop
: public gimple_opt_pass
1569 pass_dce_loop (gcc::context
*ctxt
)
1570 : gimple_opt_pass (pass_data_dce_loop
, ctxt
)
1573 /* opt_pass methods: */
1574 opt_pass
* clone () { return new pass_dce_loop (m_ctxt
); }
1575 bool gate () { return gate_dce (); }
1576 unsigned int execute () { return tree_ssa_dce_loop (); }
1578 }; // class pass_dce_loop
1583 make_pass_dce_loop (gcc::context
*ctxt
)
1585 return new pass_dce_loop (ctxt
);
1590 const pass_data pass_data_cd_dce
=
1592 GIMPLE_PASS
, /* type */
1594 OPTGROUP_NONE
, /* optinfo_flags */
1595 true, /* has_gate */
1596 true, /* has_execute */
1597 TV_TREE_CD_DCE
, /* tv_id */
1598 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1599 0, /* properties_provided */
1600 0, /* properties_destroyed */
1601 0, /* todo_flags_start */
1602 ( TODO_verify_ssa
| TODO_verify_flow
), /* todo_flags_finish */
1605 class pass_cd_dce
: public gimple_opt_pass
1608 pass_cd_dce (gcc::context
*ctxt
)
1609 : gimple_opt_pass (pass_data_cd_dce
, ctxt
)
1612 /* opt_pass methods: */
1613 opt_pass
* clone () { return new pass_cd_dce (m_ctxt
); }
1614 bool gate () { return gate_dce (); }
1615 unsigned int execute () { return tree_ssa_cd_dce (); }
1617 }; // class pass_cd_dce
1622 make_pass_cd_dce (gcc::context
*ctxt
)
1624 return new pass_cd_dce (ctxt
);