1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
47 #include "coretypes.h"
52 #include "gimple-pretty-print.h"
53 #include "basic-block.h"
56 #include "gimple-iterator.h"
57 #include "gimple-ssa.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
67 #include "tree-pass.h"
70 #include "tree-scalar-evolution.h"
72 static struct stmt_stats
80 #define STMT_NECESSARY GF_PLF_1
82 static vec
<gimple
> worklist
;
84 /* Vector indicating an SSA name has already been processed and marked
86 static sbitmap processed
;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary
;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts
;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
102 static control_dependences
*cd
;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents
;
108 /* TRUE if this pass alters the CFG (by removing control statements).
111 If this pass alters the CFG, then it will arrange for the dominators
113 static bool cfg_altered
;
116 /* If STMT is not already marked necessary, mark it, and add it to the
117 worklist if ADD_TO_WORKLIST is true. */
120 mark_stmt_necessary (gimple stmt
, bool add_to_worklist
)
124 if (gimple_plf (stmt
, STMT_NECESSARY
))
127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
129 fprintf (dump_file
, "Marking useful stmt: ");
130 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
131 fprintf (dump_file
, "\n");
134 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
136 worklist
.safe_push (stmt
);
137 if (bb_contains_live_stmts
&& !is_gimple_debug (stmt
))
138 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
142 /* Mark the statement defining operand OP as necessary. */
145 mark_operand_necessary (tree op
)
152 ver
= SSA_NAME_VERSION (op
);
153 if (bitmap_bit_p (processed
, ver
))
155 stmt
= SSA_NAME_DEF_STMT (op
);
156 gcc_assert (gimple_nop_p (stmt
)
157 || gimple_plf (stmt
, STMT_NECESSARY
));
160 bitmap_set_bit (processed
, ver
);
162 stmt
= SSA_NAME_DEF_STMT (op
);
165 if (gimple_plf (stmt
, STMT_NECESSARY
) || gimple_nop_p (stmt
))
168 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
170 fprintf (dump_file
, "marking necessary through ");
171 print_generic_expr (dump_file
, op
, 0);
172 fprintf (dump_file
, " stmt ");
173 print_gimple_stmt (dump_file
, stmt
, 0, 0);
176 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
177 if (bb_contains_live_stmts
)
178 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
179 worklist
.safe_push (stmt
);
183 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
184 it can make other statements necessary.
186 If AGGRESSIVE is false, control statements are conservatively marked as
190 mark_stmt_if_obviously_necessary (gimple stmt
, bool aggressive
)
192 /* With non-call exceptions, we have to assume that all statements could
193 throw. If a statement could throw, it can be deemed necessary. */
194 if (cfun
->can_throw_non_call_exceptions
195 && !cfun
->can_delete_dead_exceptions
196 && stmt_could_throw_p (stmt
))
198 mark_stmt_necessary (stmt
, true);
202 /* Statements that are implicitly live. Most function calls, asm
203 and return statements are required. Labels and GIMPLE_BIND nodes
204 are kept because they are control flow, and we have no way of
205 knowing whether they can be removed. DCE can eliminate all the
206 other statements in a block, and CFG can then remove the block
208 switch (gimple_code (stmt
))
212 mark_stmt_necessary (stmt
, false);
218 mark_stmt_necessary (stmt
, true);
223 tree callee
= gimple_call_fndecl (stmt
);
224 if (callee
!= NULL_TREE
225 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
226 switch (DECL_FUNCTION_CODE (callee
))
228 case BUILT_IN_MALLOC
:
229 case BUILT_IN_CALLOC
:
230 case BUILT_IN_ALLOCA
:
231 case BUILT_IN_ALLOCA_WITH_ALIGN
:
236 /* Most, but not all function calls are required. Function calls that
237 produce no result and have no side effects (i.e. const pure
238 functions) are unnecessary. */
239 if (gimple_has_side_effects (stmt
))
241 mark_stmt_necessary (stmt
, true);
244 if (!gimple_call_lhs (stmt
))
250 /* Debug temps without a value are not useful. ??? If we could
251 easily locate the debug temp bind stmt for a use thereof,
252 would could refrain from marking all debug temps here, and
253 mark them only if they're used. */
254 if (!gimple_debug_bind_p (stmt
)
255 || gimple_debug_bind_has_value_p (stmt
)
256 || TREE_CODE (gimple_debug_bind_get_var (stmt
)) != DEBUG_EXPR_DECL
)
257 mark_stmt_necessary (stmt
, false);
261 gcc_assert (!simple_goto_p (stmt
));
262 mark_stmt_necessary (stmt
, true);
266 gcc_assert (EDGE_COUNT (gimple_bb (stmt
)->succs
) == 2);
271 mark_stmt_necessary (stmt
, true);
275 if (TREE_CODE (gimple_assign_lhs (stmt
)) == SSA_NAME
276 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt
)))
284 /* If the statement has volatile operands, it needs to be preserved.
285 Same for statements that can alter control flow in unpredictable
287 if (gimple_has_volatile_ops (stmt
) || is_ctrl_altering_stmt (stmt
))
289 mark_stmt_necessary (stmt
, true);
293 if (stmt_may_clobber_global_p (stmt
))
295 mark_stmt_necessary (stmt
, true);
303 /* Mark the last statement of BB as necessary. */
306 mark_last_stmt_necessary (basic_block bb
)
308 gimple stmt
= last_stmt (bb
);
310 bitmap_set_bit (last_stmt_necessary
, bb
->index
);
311 bitmap_set_bit (bb_contains_live_stmts
, bb
->index
);
313 /* We actually mark the statement only if it is a control statement. */
314 if (stmt
&& is_ctrl_stmt (stmt
))
315 mark_stmt_necessary (stmt
, true);
319 /* Mark control dependent edges of BB as necessary. We have to do this only
320 once for each basic block so we set the appropriate bit after we're done.
322 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
325 mark_control_dependent_edges_necessary (basic_block bb
, bool ignore_self
)
328 unsigned edge_number
;
329 bool skipped
= false;
331 gcc_assert (bb
!= EXIT_BLOCK_PTR
);
333 if (bb
== ENTRY_BLOCK_PTR
)
336 EXECUTE_IF_SET_IN_BITMAP (cd
->get_edges_dependent_on (bb
->index
),
339 basic_block cd_bb
= cd
->get_edge (edge_number
)->src
;
341 if (ignore_self
&& cd_bb
== bb
)
347 if (!bitmap_bit_p (last_stmt_necessary
, cd_bb
->index
))
348 mark_last_stmt_necessary (cd_bb
);
352 bitmap_set_bit (visited_control_parents
, bb
->index
);
356 /* Find obviously necessary statements. These are things like most function
357 calls, and stores to file level variables.
359 If EL is NULL, control statements are conservatively marked as
360 necessary. Otherwise it contains the list of edges used by control
361 dependence analysis. */
364 find_obviously_necessary_stmts (bool aggressive
)
367 gimple_stmt_iterator gsi
;
374 /* PHI nodes are never inherently necessary. */
375 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
377 phi
= gsi_stmt (gsi
);
378 gimple_set_plf (phi
, STMT_NECESSARY
, false);
381 /* Check all statements in the block. */
382 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
384 stmt
= gsi_stmt (gsi
);
385 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
386 mark_stmt_if_obviously_necessary (stmt
, aggressive
);
390 /* Pure and const functions are finite and thus have no infinite loops in
392 flags
= flags_from_decl_or_type (current_function_decl
);
393 if ((flags
& (ECF_CONST
|ECF_PURE
)) && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
396 /* Prevent the empty possibly infinite loops from being removed. */
401 if (mark_irreducible_loops ())
405 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
406 if ((e
->flags
& EDGE_DFS_BACK
)
407 && (e
->flags
& EDGE_IRREDUCIBLE_LOOP
))
410 fprintf (dump_file
, "Marking back edge of irreducible loop %i->%i\n",
411 e
->src
->index
, e
->dest
->index
);
412 mark_control_dependent_edges_necessary (e
->dest
, false);
416 FOR_EACH_LOOP (loop
, 0)
417 if (!finite_loop_p (loop
))
420 fprintf (dump_file
, "can not prove finiteness of loop %i\n", loop
->num
);
421 mark_control_dependent_edges_necessary (loop
->latch
, false);
428 /* Return true if REF is based on an aliased base, otherwise false. */
431 ref_may_be_aliased (tree ref
)
433 gcc_assert (TREE_CODE (ref
) != WITH_SIZE_EXPR
);
434 while (handled_component_p (ref
))
435 ref
= TREE_OPERAND (ref
, 0);
436 if (TREE_CODE (ref
) == MEM_REF
437 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
438 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
439 return !(DECL_P (ref
)
440 && !may_be_aliased (ref
));
443 static bitmap visited
= NULL
;
444 static unsigned int longest_chain
= 0;
445 static unsigned int total_chain
= 0;
446 static unsigned int nr_walks
= 0;
447 static bool chain_ovfl
= false;
449 /* Worker for the walker that marks reaching definitions of REF,
450 which is based on a non-aliased decl, necessary. It returns
451 true whenever the defining statement of the current VDEF is
452 a kill for REF, as no dominating may-defs are necessary for REF
453 anymore. DATA points to the basic-block that contains the
454 stmt that refers to REF. */
457 mark_aliased_reaching_defs_necessary_1 (ao_ref
*ref
, tree vdef
, void *data
)
459 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
461 /* All stmts we visit are necessary. */
462 mark_operand_necessary (vdef
);
464 /* If the stmt lhs kills ref, then we can stop walking. */
465 if (gimple_has_lhs (def_stmt
)
466 && TREE_CODE (gimple_get_lhs (def_stmt
)) != SSA_NAME
467 /* The assignment is not necessarily carried out if it can throw
468 and we can catch it in the current function where we could inspect
470 ??? We only need to care about the RHS throwing. For aggregate
471 assignments or similar calls and non-call exceptions the LHS
472 might throw as well. */
473 && !stmt_can_throw_internal (def_stmt
))
475 tree base
, lhs
= gimple_get_lhs (def_stmt
);
476 HOST_WIDE_INT size
, offset
, max_size
;
478 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
479 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
480 so base == refd->base does not always hold. */
481 if (base
== ref
->base
)
483 /* For a must-alias check we need to be able to constrain
484 the accesses properly. */
485 if (size
!= -1 && size
== max_size
486 && ref
->max_size
!= -1)
488 if (offset
<= ref
->offset
489 && offset
+ size
>= ref
->offset
+ ref
->max_size
)
492 /* Or they need to be exactly the same. */
494 /* Make sure there is no induction variable involved
495 in the references (gcc.c-torture/execute/pr42142.c).
496 The simplest way is to check if the kill dominates
498 /* But when both are in the same block we cannot
499 easily tell whether we came from a backedge
500 unless we decide to compute stmt UIDs
502 && (basic_block
) data
!= gimple_bb (def_stmt
)
503 && dominated_by_p (CDI_DOMINATORS
, (basic_block
) data
,
504 gimple_bb (def_stmt
))
505 && operand_equal_p (ref
->ref
, lhs
, 0))
510 /* Otherwise keep walking. */
515 mark_aliased_reaching_defs_necessary (gimple stmt
, tree ref
)
519 gcc_assert (!chain_ovfl
);
520 ao_ref_init (&refd
, ref
);
521 chain
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
),
522 mark_aliased_reaching_defs_necessary_1
,
523 gimple_bb (stmt
), NULL
);
524 if (chain
> longest_chain
)
525 longest_chain
= chain
;
526 total_chain
+= chain
;
530 /* Worker for the walker that marks reaching definitions of REF, which
531 is not based on a non-aliased decl. For simplicity we need to end
532 up marking all may-defs necessary that are not based on a non-aliased
533 decl. The only job of this walker is to skip may-defs based on
534 a non-aliased decl. */
537 mark_all_reaching_defs_necessary_1 (ao_ref
*ref ATTRIBUTE_UNUSED
,
538 tree vdef
, void *data ATTRIBUTE_UNUSED
)
540 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
542 /* We have to skip already visited (and thus necessary) statements
543 to make the chaining work after we dropped back to simple mode. */
545 && bitmap_bit_p (processed
, SSA_NAME_VERSION (vdef
)))
547 gcc_assert (gimple_nop_p (def_stmt
)
548 || gimple_plf (def_stmt
, STMT_NECESSARY
));
552 /* We want to skip stores to non-aliased variables. */
554 && gimple_assign_single_p (def_stmt
))
556 tree lhs
= gimple_assign_lhs (def_stmt
);
557 if (!ref_may_be_aliased (lhs
))
561 /* We want to skip statments that do not constitute stores but have
562 a virtual definition. */
563 if (is_gimple_call (def_stmt
))
565 tree callee
= gimple_call_fndecl (def_stmt
);
566 if (callee
!= NULL_TREE
567 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
568 switch (DECL_FUNCTION_CODE (callee
))
570 case BUILT_IN_MALLOC
:
571 case BUILT_IN_CALLOC
:
572 case BUILT_IN_ALLOCA
:
573 case BUILT_IN_ALLOCA_WITH_ALIGN
:
581 mark_operand_necessary (vdef
);
587 mark_all_reaching_defs_necessary (gimple stmt
)
589 walk_aliased_vdefs (NULL
, gimple_vuse (stmt
),
590 mark_all_reaching_defs_necessary_1
, NULL
, &visited
);
593 /* Return true for PHI nodes with one or identical arguments
596 degenerate_phi_p (gimple phi
)
599 tree op
= gimple_phi_arg_def (phi
, 0);
600 for (i
= 1; i
< gimple_phi_num_args (phi
); i
++)
601 if (gimple_phi_arg_def (phi
, i
) != op
)
606 /* Propagate necessity using the operands of necessary statements.
607 Process the uses on each statement in the worklist, and add all
608 feeding statements which contribute to the calculation of this
609 value to the worklist.
611 In conservative mode, EL is NULL. */
614 propagate_necessity (bool aggressive
)
618 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
619 fprintf (dump_file
, "\nProcessing worklist:\n");
621 while (worklist
.length () > 0)
623 /* Take STMT from worklist. */
624 stmt
= worklist
.pop ();
626 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
628 fprintf (dump_file
, "processing: ");
629 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
630 fprintf (dump_file
, "\n");
635 /* Mark the last statement of the basic blocks on which the block
636 containing STMT is control dependent, but only if we haven't
638 basic_block bb
= gimple_bb (stmt
);
639 if (bb
!= ENTRY_BLOCK_PTR
640 && !bitmap_bit_p (visited_control_parents
, bb
->index
))
641 mark_control_dependent_edges_necessary (bb
, false);
644 if (gimple_code (stmt
) == GIMPLE_PHI
645 /* We do not process virtual PHI nodes nor do we track their
647 && !virtual_operand_p (gimple_phi_result (stmt
)))
649 /* PHI nodes are somewhat special in that each PHI alternative has
650 data and control dependencies. All the statements feeding the
651 PHI node's arguments are always necessary. In aggressive mode,
652 we also consider the control dependent edges leading to the
653 predecessor block associated with each PHI alternative as
657 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
659 tree arg
= PHI_ARG_DEF (stmt
, k
);
660 if (TREE_CODE (arg
) == SSA_NAME
)
661 mark_operand_necessary (arg
);
664 /* For PHI operands it matters from where the control flow arrives
665 to the BB. Consider the following example:
675 We need to mark control dependence of the empty basic blocks, since they
676 contains computation of PHI operands.
678 Doing so is too restrictive in the case the predecestor block is in
684 for (i = 0; i<1000; ++i)
690 There is PHI for J in the BB containing return statement.
691 In this case the control dependence of predecestor block (that is
692 within the empty loop) also contains the block determining number
693 of iterations of the block that would prevent removing of empty
696 This scenario can be avoided by splitting critical edges.
697 To save the critical edge splitting pass we identify how the control
698 dependence would look like if the edge was split.
700 Consider the modified CFG created from current CFG by splitting
701 edge B->C. In the postdominance tree of modified CFG, C' is
702 always child of C. There are two cases how chlids of C' can look
707 In this case the only basic block C' is control dependent on is B.
709 2) C' has single child that is B
711 In this case control dependence of C' is same as control
712 dependence of B in original CFG except for block B itself.
713 (since C' postdominate B in modified CFG)
715 Now how to decide what case happens? There are two basic options:
717 a) C postdominate B. Then C immediately postdominate B and
718 case 2 happens iff there is no other way from B to C except
721 There is other way from B to C iff there is succesor of B that
722 is not postdominated by B. Testing this condition is somewhat
723 expensive, because we need to iterate all succesors of B.
724 We are safe to assume that this does not happen: we will mark B
725 as needed when processing the other path from B to C that is
726 conrol dependent on B and marking control dependencies of B
727 itself is harmless because they will be processed anyway after
728 processing control statement in B.
730 b) C does not postdominate B. Always case 1 happens since there is
731 path from C to exit that does not go through B and thus also C'. */
733 if (aggressive
&& !degenerate_phi_p (stmt
))
735 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
737 basic_block arg_bb
= gimple_phi_arg_edge (stmt
, k
)->src
;
740 != get_immediate_dominator (CDI_POST_DOMINATORS
, arg_bb
))
742 if (!bitmap_bit_p (last_stmt_necessary
, arg_bb
->index
))
743 mark_last_stmt_necessary (arg_bb
);
745 else if (arg_bb
!= ENTRY_BLOCK_PTR
746 && !bitmap_bit_p (visited_control_parents
,
748 mark_control_dependent_edges_necessary (arg_bb
, true);
754 /* Propagate through the operands. Examine all the USE, VUSE and
755 VDEF operands in this statement. Mark all the statements
756 which feed this statement's uses as necessary. */
760 /* If this is a call to free which is directly fed by an
761 allocation function do not mark that necessary through
762 processing the argument. */
763 if (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
765 tree ptr
= gimple_call_arg (stmt
, 0);
768 /* If the pointer we free is defined by an allocation
769 function do not add the call to the worklist. */
770 if (TREE_CODE (ptr
) == SSA_NAME
771 && is_gimple_call (def_stmt
= SSA_NAME_DEF_STMT (ptr
))
772 && (def_callee
= gimple_call_fndecl (def_stmt
))
773 && DECL_BUILT_IN_CLASS (def_callee
) == BUILT_IN_NORMAL
774 && (DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_MALLOC
775 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_CALLOC
))
779 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
780 mark_operand_necessary (use
);
782 use
= gimple_vuse (stmt
);
786 /* If we dropped to simple mode make all immediately
787 reachable definitions necessary. */
790 mark_all_reaching_defs_necessary (stmt
);
794 /* For statements that may load from memory (have a VUSE) we
795 have to mark all reaching (may-)definitions as necessary.
796 We partition this task into two cases:
797 1) explicit loads based on decls that are not aliased
798 2) implicit loads (like calls) and explicit loads not
799 based on decls that are not aliased (like indirect
800 references or loads from globals)
801 For 1) we mark all reaching may-defs as necessary, stopping
802 at dominating kills. For 2) we want to mark all dominating
803 references necessary, but non-aliased ones which we handle
804 in 1). By keeping a global visited bitmap for references
805 we walk for 2) we avoid quadratic behavior for those. */
807 if (is_gimple_call (stmt
))
809 tree callee
= gimple_call_fndecl (stmt
);
812 /* Calls to functions that are merely acting as barriers
813 or that only store to memory do not make any previous
815 if (callee
!= NULL_TREE
816 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
817 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET
818 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET_CHK
819 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MALLOC
820 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
821 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_FREE
822 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
823 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
824 || (DECL_FUNCTION_CODE (callee
)
825 == BUILT_IN_ALLOCA_WITH_ALIGN
)
826 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
827 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
828 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ASSUME_ALIGNED
))
831 /* Calls implicitly load from memory, their arguments
832 in addition may explicitly perform memory loads. */
833 mark_all_reaching_defs_necessary (stmt
);
834 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
836 tree arg
= gimple_call_arg (stmt
, i
);
837 if (TREE_CODE (arg
) == SSA_NAME
838 || is_gimple_min_invariant (arg
))
840 if (TREE_CODE (arg
) == WITH_SIZE_EXPR
)
841 arg
= TREE_OPERAND (arg
, 0);
842 if (!ref_may_be_aliased (arg
))
843 mark_aliased_reaching_defs_necessary (stmt
, arg
);
846 else if (gimple_assign_single_p (stmt
))
849 /* If this is a load mark things necessary. */
850 rhs
= gimple_assign_rhs1 (stmt
);
851 if (TREE_CODE (rhs
) != SSA_NAME
852 && !is_gimple_min_invariant (rhs
)
853 && TREE_CODE (rhs
) != CONSTRUCTOR
)
855 if (!ref_may_be_aliased (rhs
))
856 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
858 mark_all_reaching_defs_necessary (stmt
);
861 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
863 tree rhs
= gimple_return_retval (stmt
);
864 /* A return statement may perform a load. */
866 && TREE_CODE (rhs
) != SSA_NAME
867 && !is_gimple_min_invariant (rhs
)
868 && TREE_CODE (rhs
) != CONSTRUCTOR
)
870 if (!ref_may_be_aliased (rhs
))
871 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
873 mark_all_reaching_defs_necessary (stmt
);
876 else if (gimple_code (stmt
) == GIMPLE_ASM
)
879 mark_all_reaching_defs_necessary (stmt
);
880 /* Inputs may perform loads. */
881 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
883 tree op
= TREE_VALUE (gimple_asm_input_op (stmt
, i
));
884 if (TREE_CODE (op
) != SSA_NAME
885 && !is_gimple_min_invariant (op
)
886 && TREE_CODE (op
) != CONSTRUCTOR
887 && !ref_may_be_aliased (op
))
888 mark_aliased_reaching_defs_necessary (stmt
, op
);
891 else if (gimple_code (stmt
) == GIMPLE_TRANSACTION
)
893 /* The beginning of a transaction is a memory barrier. */
894 /* ??? If we were really cool, we'd only be a barrier
895 for the memories touched within the transaction. */
896 mark_all_reaching_defs_necessary (stmt
);
901 /* If we over-used our alias oracle budget drop to simple
902 mode. The cost metric allows quadratic behavior
903 (number of uses times number of may-defs queries) up to
904 a constant maximal number of queries and after that falls back to
905 super-linear complexity. */
906 if (/* Constant but quadratic for small functions. */
907 total_chain
> 128 * 128
908 /* Linear in the number of may-defs. */
909 && total_chain
> 32 * longest_chain
910 /* Linear in the number of uses. */
911 && total_chain
> nr_walks
* 32)
915 bitmap_clear (visited
);
921 /* Remove dead PHI nodes from block BB. */
924 remove_dead_phis (basic_block bb
)
926 bool something_changed
= false;
928 gimple_stmt_iterator gsi
;
930 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);)
933 phi
= gsi_stmt (gsi
);
935 /* We do not track necessity of virtual PHI nodes. Instead do
936 very simple dead PHI removal here. */
937 if (virtual_operand_p (gimple_phi_result (phi
)))
939 /* Virtual PHI nodes with one or identical arguments
941 if (degenerate_phi_p (phi
))
943 tree vdef
= gimple_phi_result (phi
);
944 tree vuse
= gimple_phi_arg_def (phi
, 0);
947 imm_use_iterator iter
;
949 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
950 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
951 SET_USE (use_p
, vuse
);
952 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
)
953 && TREE_CODE (vuse
) == SSA_NAME
)
954 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
957 gimple_set_plf (phi
, STMT_NECESSARY
, true);
960 if (!gimple_plf (phi
, STMT_NECESSARY
))
962 something_changed
= true;
963 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
965 fprintf (dump_file
, "Deleting : ");
966 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
967 fprintf (dump_file
, "\n");
970 remove_phi_node (&gsi
, true);
971 stats
.removed_phis
++;
977 return something_changed
;
980 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
983 forward_edge_to_pdom (edge e
, basic_block post_dom_bb
)
985 gimple_stmt_iterator gsi
;
989 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
990 fprintf (dump_file
, "Redirecting edge %i->%i to %i\n", e
->src
->index
,
991 e
->dest
->index
, post_dom_bb
->index
);
993 e2
= redirect_edge_and_branch (e
, post_dom_bb
);
996 /* If edge was already around, no updating is necessary. */
1000 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb
)))
1002 /* We are sure that for every live PHI we are seeing control dependent BB.
1003 This means that we can pick any edge to duplicate PHI args from. */
1004 FOR_EACH_EDGE (e2
, ei
, post_dom_bb
->preds
)
1007 for (gsi
= gsi_start_phis (post_dom_bb
); !gsi_end_p (gsi
);)
1009 gimple phi
= gsi_stmt (gsi
);
1011 source_location locus
;
1013 /* PHIs for virtuals have no control dependency relation on them.
1014 We are lost here and must force renaming of the symbol. */
1015 if (virtual_operand_p (gimple_phi_result (phi
)))
1017 mark_virtual_phi_result_for_renaming (phi
);
1018 remove_phi_node (&gsi
, true);
1022 /* Dead PHI do not imply control dependency. */
1023 if (!gimple_plf (phi
, STMT_NECESSARY
))
1029 op
= gimple_phi_arg_def (phi
, e2
->dest_idx
);
1030 locus
= gimple_phi_arg_location (phi
, e2
->dest_idx
);
1031 add_phi_arg (phi
, op
, e
, locus
);
1032 /* The resulting PHI if not dead can only be degenerate. */
1033 gcc_assert (degenerate_phi_p (phi
));
1040 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1041 containing I so that we don't have to look it up. */
1044 remove_dead_stmt (gimple_stmt_iterator
*i
, basic_block bb
)
1046 gimple stmt
= gsi_stmt (*i
);
1048 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1050 fprintf (dump_file
, "Deleting : ");
1051 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1052 fprintf (dump_file
, "\n");
1057 /* If we have determined that a conditional branch statement contributes
1058 nothing to the program, then we not only remove it, but we also change
1059 the flow graph so that the current block will simply fall-thru to its
1060 immediate post-dominator. The blocks we are circumventing will be
1061 removed by cleanup_tree_cfg if this change in the flow graph makes them
1063 if (is_ctrl_stmt (stmt
))
1065 basic_block post_dom_bb
;
1069 post_dom_bb
= get_immediate_dominator (CDI_POST_DOMINATORS
, bb
);
1071 e
= find_edge (bb
, post_dom_bb
);
1073 /* If edge is already there, try to use it. This avoids need to update
1074 PHI nodes. Also watch for cases where post dominator does not exists
1075 or is exit block. These can happen for infinite loops as we create
1076 fake edges in the dominator tree. */
1079 else if (! post_dom_bb
|| post_dom_bb
== EXIT_BLOCK_PTR
)
1080 e
= EDGE_SUCC (bb
, 0);
1082 e
= forward_edge_to_pdom (EDGE_SUCC (bb
, 0), post_dom_bb
);
1084 e
->probability
= REG_BR_PROB_BASE
;
1085 e
->count
= bb
->count
;
1087 /* The edge is no longer associated with a conditional, so it does
1088 not have TRUE/FALSE flags. */
1089 e
->flags
&= ~(EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
);
1091 /* The lone outgoing edge from BB will be a fallthru edge. */
1092 e
->flags
|= EDGE_FALLTHRU
;
1094 /* Remove the remaining outgoing edges. */
1095 for (ei
= ei_start (bb
->succs
); (e2
= ei_safe_edge (ei
)); )
1105 /* If this is a store into a variable that is being optimized away,
1106 add a debug bind stmt if possible. */
1107 if (MAY_HAVE_DEBUG_STMTS
1108 && gimple_assign_single_p (stmt
)
1109 && is_gimple_val (gimple_assign_rhs1 (stmt
)))
1111 tree lhs
= gimple_assign_lhs (stmt
);
1112 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1113 && !DECL_IGNORED_P (lhs
)
1114 && is_gimple_reg_type (TREE_TYPE (lhs
))
1115 && !is_global_var (lhs
)
1116 && !DECL_HAS_VALUE_EXPR_P (lhs
))
1118 tree rhs
= gimple_assign_rhs1 (stmt
);
1120 = gimple_build_debug_bind (lhs
, unshare_expr (rhs
), stmt
);
1121 gsi_insert_after (i
, note
, GSI_SAME_STMT
);
1125 unlink_stmt_vdef (stmt
);
1126 gsi_remove (i
, true);
1127 release_defs (stmt
);
1130 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1131 contributes nothing to the program, and can be deleted. */
1134 eliminate_unnecessary_stmts (void)
1136 bool something_changed
= false;
1138 gimple_stmt_iterator gsi
, psi
;
1143 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1144 fprintf (dump_file
, "\nEliminating unnecessary statements:\n");
1146 clear_special_calls ();
1148 /* Walking basic blocks and statements in reverse order avoids
1149 releasing SSA names before any other DEFs that refer to them are
1150 released. This helps avoid loss of debug information, as we get
1151 a chance to propagate all RHSs of removed SSAs into debug uses,
1152 rather than only the latest ones. E.g., consider:
1158 If we were to release x_3 before a_5, when we reached a_5 and
1159 tried to substitute it into the debug stmt, we'd see x_3 there,
1160 but x_3's DEF, type, etc would have already been disconnected.
1161 By going backwards, the debug stmt first changes to:
1163 # DEBUG a => x_3 - b_4
1167 # DEBUG a => y_1 + z_2 - b_4
1170 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
1171 h
= get_all_dominated_blocks (CDI_DOMINATORS
, single_succ (ENTRY_BLOCK_PTR
));
1177 /* Remove dead statements. */
1178 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi
= psi
)
1180 stmt
= gsi_stmt (gsi
);
1187 /* We can mark a call to free as not necessary if the
1188 defining statement of its argument is an allocation
1189 function and that is not necessary itself. */
1190 if (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
1192 tree ptr
= gimple_call_arg (stmt
, 0);
1195 if (TREE_CODE (ptr
) != SSA_NAME
)
1197 def_stmt
= SSA_NAME_DEF_STMT (ptr
);
1198 if (!is_gimple_call (def_stmt
)
1199 || gimple_plf (def_stmt
, STMT_NECESSARY
))
1201 callee2
= gimple_call_fndecl (def_stmt
);
1202 if (callee2
== NULL_TREE
1203 || DECL_BUILT_IN_CLASS (callee2
) != BUILT_IN_NORMAL
1204 || (DECL_FUNCTION_CODE (callee2
) != BUILT_IN_MALLOC
1205 && DECL_FUNCTION_CODE (callee2
) != BUILT_IN_CALLOC
))
1207 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
1210 /* If GSI is not necessary then remove it. */
1211 if (!gimple_plf (stmt
, STMT_NECESSARY
))
1213 if (!is_gimple_debug (stmt
))
1214 something_changed
= true;
1215 remove_dead_stmt (&gsi
, bb
);
1217 else if (is_gimple_call (stmt
))
1219 tree name
= gimple_call_lhs (stmt
);
1221 notice_special_calls (stmt
);
1223 /* When LHS of var = call (); is dead, simplify it into
1224 call (); saving one operand. */
1226 && TREE_CODE (name
) == SSA_NAME
1227 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
))
1228 /* Avoid doing so for allocation calls which we
1229 did not mark as necessary, it will confuse the
1230 special logic we apply to malloc/free pair removal. */
1231 && (!(call
= gimple_call_fndecl (stmt
))
1232 || DECL_BUILT_IN_CLASS (call
) != BUILT_IN_NORMAL
1233 || (DECL_FUNCTION_CODE (call
) != BUILT_IN_MALLOC
1234 && DECL_FUNCTION_CODE (call
) != BUILT_IN_CALLOC
1235 && DECL_FUNCTION_CODE (call
) != BUILT_IN_ALLOCA
1236 && (DECL_FUNCTION_CODE (call
)
1237 != BUILT_IN_ALLOCA_WITH_ALIGN
))))
1239 something_changed
= true;
1240 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1242 fprintf (dump_file
, "Deleting LHS of call: ");
1243 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1244 fprintf (dump_file
, "\n");
1247 gimple_call_set_lhs (stmt
, NULL_TREE
);
1248 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1250 release_ssa_name (name
);
1258 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1259 rendered some PHI nodes unreachable while they are still in use.
1260 Mark them for renaming. */
1263 basic_block prev_bb
;
1265 find_unreachable_blocks ();
1267 /* Delete all unreachable basic blocks in reverse dominator order. */
1268 for (bb
= EXIT_BLOCK_PTR
->prev_bb
; bb
!= ENTRY_BLOCK_PTR
; bb
= prev_bb
)
1270 prev_bb
= bb
->prev_bb
;
1272 if (!bitmap_bit_p (bb_contains_live_stmts
, bb
->index
)
1273 || !(bb
->flags
& BB_REACHABLE
))
1275 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1276 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi
))))
1279 imm_use_iterator iter
;
1281 FOR_EACH_IMM_USE_STMT (stmt
, iter
, gimple_phi_result (gsi_stmt (gsi
)))
1283 if (!(gimple_bb (stmt
)->flags
& BB_REACHABLE
))
1285 if (gimple_code (stmt
) == GIMPLE_PHI
1286 || gimple_plf (stmt
, STMT_NECESSARY
))
1289 BREAK_FROM_IMM_USE_STMT (iter
);
1293 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi
));
1296 if (!(bb
->flags
& BB_REACHABLE
))
1298 /* Speed up the removal of blocks that don't
1299 dominate others. Walking backwards, this should
1300 be the common case. ??? Do we need to recompute
1301 dominators because of cfg_altered? */
1302 if (!MAY_HAVE_DEBUG_STMTS
1303 || !first_dom_son (CDI_DOMINATORS
, bb
))
1304 delete_basic_block (bb
);
1307 h
= get_all_dominated_blocks (CDI_DOMINATORS
, bb
);
1312 prev_bb
= bb
->prev_bb
;
1313 /* Rearrangements to the CFG may have failed
1314 to update the dominators tree, so that
1315 formerly-dominated blocks are now
1316 otherwise reachable. */
1317 if (!!(bb
->flags
& BB_REACHABLE
))
1319 delete_basic_block (bb
);
1330 /* Remove dead PHI nodes. */
1331 something_changed
|= remove_dead_phis (bb
);
1334 return something_changed
;
1338 /* Print out removed statement statistics. */
1345 percg
= ((float) stats
.removed
/ (float) stats
.total
) * 100;
1346 fprintf (dump_file
, "Removed %d of %d statements (%d%%)\n",
1347 stats
.removed
, stats
.total
, (int) percg
);
1349 if (stats
.total_phis
== 0)
1352 percg
= ((float) stats
.removed_phis
/ (float) stats
.total_phis
) * 100;
1354 fprintf (dump_file
, "Removed %d of %d PHI nodes (%d%%)\n",
1355 stats
.removed_phis
, stats
.total_phis
, (int) percg
);
1358 /* Initialization for this pass. Set up the used data structures. */
1361 tree_dce_init (bool aggressive
)
1363 memset ((void *) &stats
, 0, sizeof (stats
));
1367 last_stmt_necessary
= sbitmap_alloc (last_basic_block
);
1368 bitmap_clear (last_stmt_necessary
);
1369 bb_contains_live_stmts
= sbitmap_alloc (last_basic_block
);
1370 bitmap_clear (bb_contains_live_stmts
);
1373 processed
= sbitmap_alloc (num_ssa_names
+ 1);
1374 bitmap_clear (processed
);
1376 worklist
.create (64);
1377 cfg_altered
= false;
1380 /* Cleanup after this pass. */
1383 tree_dce_done (bool aggressive
)
1388 sbitmap_free (visited_control_parents
);
1389 sbitmap_free (last_stmt_necessary
);
1390 sbitmap_free (bb_contains_live_stmts
);
1391 bb_contains_live_stmts
= NULL
;
1394 sbitmap_free (processed
);
1396 worklist
.release ();
1399 /* Main routine to eliminate dead code.
1401 AGGRESSIVE controls the aggressiveness of the algorithm.
1402 In conservative mode, we ignore control dependence and simply declare
1403 all but the most trivially dead branches necessary. This mode is fast.
1404 In aggressive mode, control dependences are taken into account, which
1405 results in more dead code elimination, but at the cost of some time.
1407 FIXME: Aggressive mode before PRE doesn't work currently because
1408 the dominance info is not invalidated after DCE1. This is
1409 not an issue right now because we only run aggressive DCE
1410 as the last tree SSA pass, but keep this in mind when you
1411 start experimenting with pass ordering. */
1414 perform_tree_ssa_dce (bool aggressive
)
1416 bool something_changed
= 0;
1418 calculate_dominance_info (CDI_DOMINATORS
);
1420 /* Preheaders are needed for SCEV to work.
1421 Simple lateches and recorded exits improve chances that loop will
1422 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1424 loop_optimizer_init (LOOPS_NORMAL
1425 | LOOPS_HAVE_RECORDED_EXITS
);
1427 tree_dce_init (aggressive
);
1431 /* Compute control dependence. */
1432 calculate_dominance_info (CDI_POST_DOMINATORS
);
1433 cd
= new control_dependences (create_edge_list ());
1435 visited_control_parents
= sbitmap_alloc (last_basic_block
);
1436 bitmap_clear (visited_control_parents
);
1438 mark_dfs_back_edges ();
1441 find_obviously_necessary_stmts (aggressive
);
1444 loop_optimizer_finalize ();
1450 visited
= BITMAP_ALLOC (NULL
);
1451 propagate_necessity (aggressive
);
1452 BITMAP_FREE (visited
);
1454 something_changed
|= eliminate_unnecessary_stmts ();
1455 something_changed
|= cfg_altered
;
1457 /* We do not update postdominators, so free them unconditionally. */
1458 free_dominance_info (CDI_POST_DOMINATORS
);
1460 /* If we removed paths in the CFG, then we need to update
1461 dominators as well. I haven't investigated the possibility
1462 of incrementally updating dominators. */
1464 free_dominance_info (CDI_DOMINATORS
);
1466 statistics_counter_event (cfun
, "Statements deleted", stats
.removed
);
1467 statistics_counter_event (cfun
, "PHI nodes deleted", stats
.removed_phis
);
1469 /* Debugging dumps. */
1470 if (dump_file
&& (dump_flags
& (TDF_STATS
|TDF_DETAILS
)))
1473 tree_dce_done (aggressive
);
1475 if (something_changed
)
1476 return TODO_update_ssa
| TODO_cleanup_cfg
;
1480 /* Pass entry points. */
1484 return perform_tree_ssa_dce (/*aggressive=*/false);
1488 tree_ssa_dce_loop (void)
1491 todo
= perform_tree_ssa_dce (/*aggressive=*/false);
1494 free_numbers_of_iterations_estimates ();
1501 tree_ssa_cd_dce (void)
1503 return perform_tree_ssa_dce (/*aggressive=*/optimize
>= 2);
1509 return flag_tree_dce
!= 0;
1514 const pass_data pass_data_dce
=
1516 GIMPLE_PASS
, /* type */
1518 OPTGROUP_NONE
, /* optinfo_flags */
1519 true, /* has_gate */
1520 true, /* has_execute */
1521 TV_TREE_DCE
, /* tv_id */
1522 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1523 0, /* properties_provided */
1524 0, /* properties_destroyed */
1525 0, /* todo_flags_start */
1526 TODO_verify_ssa
, /* todo_flags_finish */
1529 class pass_dce
: public gimple_opt_pass
1532 pass_dce (gcc::context
*ctxt
)
1533 : gimple_opt_pass (pass_data_dce
, ctxt
)
1536 /* opt_pass methods: */
1537 opt_pass
* clone () { return new pass_dce (m_ctxt
); }
1538 bool gate () { return gate_dce (); }
1539 unsigned int execute () { return tree_ssa_dce (); }
1541 }; // class pass_dce
1546 make_pass_dce (gcc::context
*ctxt
)
1548 return new pass_dce (ctxt
);
1553 const pass_data pass_data_dce_loop
=
1555 GIMPLE_PASS
, /* type */
1556 "dceloop", /* name */
1557 OPTGROUP_NONE
, /* optinfo_flags */
1558 true, /* has_gate */
1559 true, /* has_execute */
1560 TV_TREE_DCE
, /* tv_id */
1561 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1562 0, /* properties_provided */
1563 0, /* properties_destroyed */
1564 0, /* todo_flags_start */
1565 TODO_verify_ssa
, /* todo_flags_finish */
1568 class pass_dce_loop
: public gimple_opt_pass
1571 pass_dce_loop (gcc::context
*ctxt
)
1572 : gimple_opt_pass (pass_data_dce_loop
, ctxt
)
1575 /* opt_pass methods: */
1576 opt_pass
* clone () { return new pass_dce_loop (m_ctxt
); }
1577 bool gate () { return gate_dce (); }
1578 unsigned int execute () { return tree_ssa_dce_loop (); }
1580 }; // class pass_dce_loop
1585 make_pass_dce_loop (gcc::context
*ctxt
)
1587 return new pass_dce_loop (ctxt
);
1592 const pass_data pass_data_cd_dce
=
1594 GIMPLE_PASS
, /* type */
1596 OPTGROUP_NONE
, /* optinfo_flags */
1597 true, /* has_gate */
1598 true, /* has_execute */
1599 TV_TREE_CD_DCE
, /* tv_id */
1600 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1601 0, /* properties_provided */
1602 0, /* properties_destroyed */
1603 0, /* todo_flags_start */
1604 ( TODO_verify_ssa
| TODO_verify_flow
), /* todo_flags_finish */
1607 class pass_cd_dce
: public gimple_opt_pass
1610 pass_cd_dce (gcc::context
*ctxt
)
1611 : gimple_opt_pass (pass_data_cd_dce
, ctxt
)
1614 /* opt_pass methods: */
1615 opt_pass
* clone () { return new pass_cd_dce (m_ctxt
); }
1616 bool gate () { return gate_dce (); }
1617 unsigned int execute () { return tree_ssa_cd_dce (); }
1619 }; // class pass_cd_dce
1624 make_pass_cd_dce (gcc::context
*ctxt
)
1626 return new pass_cd_dce (ctxt
);