Daily bump.
[official-gcc.git] / gcc / tree-ssa-dce.c
blob1d887c28f866bf67b20926e49af7b18c58661b48
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
50 #include "tree.h"
51 #include "calls.h"
52 #include "gimple-pretty-print.h"
53 #include "predict.h"
54 #include "vec.h"
55 #include "hashtab.h"
56 #include "hash-set.h"
57 #include "machmode.h"
58 #include "hard-reg-set.h"
59 #include "input.h"
60 #include "function.h"
61 #include "dominance.h"
62 #include "cfg.h"
63 #include "cfganal.h"
64 #include "basic-block.h"
65 #include "tree-ssa-alias.h"
66 #include "internal-fn.h"
67 #include "tree-eh.h"
68 #include "gimple-expr.h"
69 #include "is-a.h"
70 #include "gimple.h"
71 #include "gimplify.h"
72 #include "gimple-iterator.h"
73 #include "gimple-ssa.h"
74 #include "tree-cfg.h"
75 #include "tree-phinodes.h"
76 #include "ssa-iterators.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79 #include "tree-ssa-loop-niter.h"
80 #include "tree-into-ssa.h"
81 #include "expr.h"
82 #include "tree-dfa.h"
83 #include "tree-pass.h"
84 #include "flags.h"
85 #include "cfgloop.h"
86 #include "tree-scalar-evolution.h"
87 #include "tree-chkp.h"
88 #include "tree-ssa-propagate.h"
89 #include "gimple-fold.h"
91 static struct stmt_stats
93 int total;
94 int total_phis;
95 int removed;
96 int removed_phis;
97 } stats;
99 #define STMT_NECESSARY GF_PLF_1
101 static vec<gimple> worklist;
103 /* Vector indicating an SSA name has already been processed and marked
104 as necessary. */
105 static sbitmap processed;
107 /* Vector indicating that the last statement of a basic block has already
108 been marked as necessary. */
109 static sbitmap last_stmt_necessary;
111 /* Vector indicating that BB contains statements that are live. */
112 static sbitmap bb_contains_live_stmts;
114 /* Before we can determine whether a control branch is dead, we need to
115 compute which blocks are control dependent on which edges.
117 We expect each block to be control dependent on very few edges so we
118 use a bitmap for each block recording its edges. An array holds the
119 bitmap. The Ith bit in the bitmap is set if that block is dependent
120 on the Ith edge. */
121 static control_dependences *cd;
123 /* Vector indicating that a basic block has already had all the edges
124 processed that it is control dependent on. */
125 static sbitmap visited_control_parents;
127 /* TRUE if this pass alters the CFG (by removing control statements).
128 FALSE otherwise.
130 If this pass alters the CFG, then it will arrange for the dominators
131 to be recomputed. */
132 static bool cfg_altered;
135 /* If STMT is not already marked necessary, mark it, and add it to the
136 worklist if ADD_TO_WORKLIST is true. */
138 static inline void
139 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
141 gcc_assert (stmt);
143 if (gimple_plf (stmt, STMT_NECESSARY))
144 return;
146 if (dump_file && (dump_flags & TDF_DETAILS))
148 fprintf (dump_file, "Marking useful stmt: ");
149 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
150 fprintf (dump_file, "\n");
153 gimple_set_plf (stmt, STMT_NECESSARY, true);
154 if (add_to_worklist)
155 worklist.safe_push (stmt);
156 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
157 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
161 /* Mark the statement defining operand OP as necessary. */
163 static inline void
164 mark_operand_necessary (tree op)
166 gimple stmt;
167 int ver;
169 gcc_assert (op);
171 ver = SSA_NAME_VERSION (op);
172 if (bitmap_bit_p (processed, ver))
174 stmt = SSA_NAME_DEF_STMT (op);
175 gcc_assert (gimple_nop_p (stmt)
176 || gimple_plf (stmt, STMT_NECESSARY));
177 return;
179 bitmap_set_bit (processed, ver);
181 stmt = SSA_NAME_DEF_STMT (op);
182 gcc_assert (stmt);
184 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
185 return;
187 if (dump_file && (dump_flags & TDF_DETAILS))
189 fprintf (dump_file, "marking necessary through ");
190 print_generic_expr (dump_file, op, 0);
191 fprintf (dump_file, " stmt ");
192 print_gimple_stmt (dump_file, stmt, 0, 0);
195 gimple_set_plf (stmt, STMT_NECESSARY, true);
196 if (bb_contains_live_stmts)
197 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
198 worklist.safe_push (stmt);
202 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
203 it can make other statements necessary.
205 If AGGRESSIVE is false, control statements are conservatively marked as
206 necessary. */
208 static void
209 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
211 /* With non-call exceptions, we have to assume that all statements could
212 throw. If a statement could throw, it can be deemed necessary. */
213 if (cfun->can_throw_non_call_exceptions
214 && !cfun->can_delete_dead_exceptions
215 && stmt_could_throw_p (stmt))
217 mark_stmt_necessary (stmt, true);
218 return;
221 /* Statements that are implicitly live. Most function calls, asm
222 and return statements are required. Labels and GIMPLE_BIND nodes
223 are kept because they are control flow, and we have no way of
224 knowing whether they can be removed. DCE can eliminate all the
225 other statements in a block, and CFG can then remove the block
226 and labels. */
227 switch (gimple_code (stmt))
229 case GIMPLE_PREDICT:
230 case GIMPLE_LABEL:
231 mark_stmt_necessary (stmt, false);
232 return;
234 case GIMPLE_ASM:
235 case GIMPLE_RESX:
236 case GIMPLE_RETURN:
237 mark_stmt_necessary (stmt, true);
238 return;
240 case GIMPLE_CALL:
242 tree callee = gimple_call_fndecl (stmt);
243 if (callee != NULL_TREE
244 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
245 switch (DECL_FUNCTION_CODE (callee))
247 case BUILT_IN_MALLOC:
248 case BUILT_IN_ALIGNED_ALLOC:
249 case BUILT_IN_CALLOC:
250 case BUILT_IN_ALLOCA:
251 case BUILT_IN_ALLOCA_WITH_ALIGN:
252 return;
254 default:;
256 /* Most, but not all function calls are required. Function calls that
257 produce no result and have no side effects (i.e. const pure
258 functions) are unnecessary. */
259 if (gimple_has_side_effects (stmt))
261 mark_stmt_necessary (stmt, true);
262 return;
264 if (!gimple_call_lhs (stmt))
265 return;
266 break;
269 case GIMPLE_DEBUG:
270 /* Debug temps without a value are not useful. ??? If we could
271 easily locate the debug temp bind stmt for a use thereof,
272 would could refrain from marking all debug temps here, and
273 mark them only if they're used. */
274 if (!gimple_debug_bind_p (stmt)
275 || gimple_debug_bind_has_value_p (stmt)
276 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
277 mark_stmt_necessary (stmt, false);
278 return;
280 case GIMPLE_GOTO:
281 gcc_assert (!simple_goto_p (stmt));
282 mark_stmt_necessary (stmt, true);
283 return;
285 case GIMPLE_COND:
286 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
287 /* Fall through. */
289 case GIMPLE_SWITCH:
290 if (! aggressive)
291 mark_stmt_necessary (stmt, true);
292 break;
294 case GIMPLE_ASSIGN:
295 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
296 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
297 return;
298 break;
300 default:
301 break;
304 /* If the statement has volatile operands, it needs to be preserved.
305 Same for statements that can alter control flow in unpredictable
306 ways. */
307 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
309 mark_stmt_necessary (stmt, true);
310 return;
313 if (stmt_may_clobber_global_p (stmt))
315 mark_stmt_necessary (stmt, true);
316 return;
319 return;
323 /* Mark the last statement of BB as necessary. */
325 static void
326 mark_last_stmt_necessary (basic_block bb)
328 gimple stmt = last_stmt (bb);
330 bitmap_set_bit (last_stmt_necessary, bb->index);
331 bitmap_set_bit (bb_contains_live_stmts, bb->index);
333 /* We actually mark the statement only if it is a control statement. */
334 if (stmt && is_ctrl_stmt (stmt))
335 mark_stmt_necessary (stmt, true);
339 /* Mark control dependent edges of BB as necessary. We have to do this only
340 once for each basic block so we set the appropriate bit after we're done.
342 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
344 static void
345 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
347 bitmap_iterator bi;
348 unsigned edge_number;
349 bool skipped = false;
351 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
353 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
354 return;
356 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
357 0, edge_number, bi)
359 basic_block cd_bb = cd->get_edge (edge_number)->src;
361 if (ignore_self && cd_bb == bb)
363 skipped = true;
364 continue;
367 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
368 mark_last_stmt_necessary (cd_bb);
371 if (!skipped)
372 bitmap_set_bit (visited_control_parents, bb->index);
376 /* Find obviously necessary statements. These are things like most function
377 calls, and stores to file level variables.
379 If EL is NULL, control statements are conservatively marked as
380 necessary. Otherwise it contains the list of edges used by control
381 dependence analysis. */
383 static void
384 find_obviously_necessary_stmts (bool aggressive)
386 basic_block bb;
387 gimple_stmt_iterator gsi;
388 edge e;
389 gimple phi, stmt;
390 int flags;
392 FOR_EACH_BB_FN (bb, cfun)
394 /* PHI nodes are never inherently necessary. */
395 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
397 phi = gsi_stmt (gsi);
398 gimple_set_plf (phi, STMT_NECESSARY, false);
401 /* Check all statements in the block. */
402 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
404 stmt = gsi_stmt (gsi);
405 gimple_set_plf (stmt, STMT_NECESSARY, false);
406 mark_stmt_if_obviously_necessary (stmt, aggressive);
410 /* Pure and const functions are finite and thus have no infinite loops in
411 them. */
412 flags = flags_from_decl_or_type (current_function_decl);
413 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
414 return;
416 /* Prevent the empty possibly infinite loops from being removed. */
417 if (aggressive)
419 struct loop *loop;
420 scev_initialize ();
421 if (mark_irreducible_loops ())
422 FOR_EACH_BB_FN (bb, cfun)
424 edge_iterator ei;
425 FOR_EACH_EDGE (e, ei, bb->succs)
426 if ((e->flags & EDGE_DFS_BACK)
427 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
429 if (dump_file)
430 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
431 e->src->index, e->dest->index);
432 mark_control_dependent_edges_necessary (e->dest, false);
436 FOR_EACH_LOOP (loop, 0)
437 if (!finite_loop_p (loop))
439 if (dump_file)
440 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
441 mark_control_dependent_edges_necessary (loop->latch, false);
443 scev_finalize ();
448 /* Return true if REF is based on an aliased base, otherwise false. */
450 static bool
451 ref_may_be_aliased (tree ref)
453 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
454 while (handled_component_p (ref))
455 ref = TREE_OPERAND (ref, 0);
456 if (TREE_CODE (ref) == MEM_REF
457 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
458 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
459 return !(DECL_P (ref)
460 && !may_be_aliased (ref));
463 static bitmap visited = NULL;
464 static unsigned int longest_chain = 0;
465 static unsigned int total_chain = 0;
466 static unsigned int nr_walks = 0;
467 static bool chain_ovfl = false;
469 /* Worker for the walker that marks reaching definitions of REF,
470 which is based on a non-aliased decl, necessary. It returns
471 true whenever the defining statement of the current VDEF is
472 a kill for REF, as no dominating may-defs are necessary for REF
473 anymore. DATA points to the basic-block that contains the
474 stmt that refers to REF. */
476 static bool
477 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
479 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
481 /* All stmts we visit are necessary. */
482 mark_operand_necessary (vdef);
484 /* If the stmt lhs kills ref, then we can stop walking. */
485 if (gimple_has_lhs (def_stmt)
486 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
487 /* The assignment is not necessarily carried out if it can throw
488 and we can catch it in the current function where we could inspect
489 the previous value.
490 ??? We only need to care about the RHS throwing. For aggregate
491 assignments or similar calls and non-call exceptions the LHS
492 might throw as well. */
493 && !stmt_can_throw_internal (def_stmt))
495 tree base, lhs = gimple_get_lhs (def_stmt);
496 HOST_WIDE_INT size, offset, max_size;
497 ao_ref_base (ref);
498 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
499 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
500 so base == refd->base does not always hold. */
501 if (base == ref->base)
503 /* For a must-alias check we need to be able to constrain
504 the accesses properly. */
505 if (size != -1 && size == max_size
506 && ref->max_size != -1)
508 if (offset <= ref->offset
509 && offset + size >= ref->offset + ref->max_size)
510 return true;
512 /* Or they need to be exactly the same. */
513 else if (ref->ref
514 /* Make sure there is no induction variable involved
515 in the references (gcc.c-torture/execute/pr42142.c).
516 The simplest way is to check if the kill dominates
517 the use. */
518 /* But when both are in the same block we cannot
519 easily tell whether we came from a backedge
520 unless we decide to compute stmt UIDs
521 (see PR58246). */
522 && (basic_block) data != gimple_bb (def_stmt)
523 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
524 gimple_bb (def_stmt))
525 && operand_equal_p (ref->ref, lhs, 0))
526 return true;
530 /* Otherwise keep walking. */
531 return false;
534 static void
535 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
537 unsigned int chain;
538 ao_ref refd;
539 gcc_assert (!chain_ovfl);
540 ao_ref_init (&refd, ref);
541 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
542 mark_aliased_reaching_defs_necessary_1,
543 gimple_bb (stmt), NULL);
544 if (chain > longest_chain)
545 longest_chain = chain;
546 total_chain += chain;
547 nr_walks++;
550 /* Worker for the walker that marks reaching definitions of REF, which
551 is not based on a non-aliased decl. For simplicity we need to end
552 up marking all may-defs necessary that are not based on a non-aliased
553 decl. The only job of this walker is to skip may-defs based on
554 a non-aliased decl. */
556 static bool
557 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
558 tree vdef, void *data ATTRIBUTE_UNUSED)
560 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
562 /* We have to skip already visited (and thus necessary) statements
563 to make the chaining work after we dropped back to simple mode. */
564 if (chain_ovfl
565 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
567 gcc_assert (gimple_nop_p (def_stmt)
568 || gimple_plf (def_stmt, STMT_NECESSARY));
569 return false;
572 /* We want to skip stores to non-aliased variables. */
573 if (!chain_ovfl
574 && gimple_assign_single_p (def_stmt))
576 tree lhs = gimple_assign_lhs (def_stmt);
577 if (!ref_may_be_aliased (lhs))
578 return false;
581 /* We want to skip statments that do not constitute stores but have
582 a virtual definition. */
583 if (is_gimple_call (def_stmt))
585 tree callee = gimple_call_fndecl (def_stmt);
586 if (callee != NULL_TREE
587 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
588 switch (DECL_FUNCTION_CODE (callee))
590 case BUILT_IN_MALLOC:
591 case BUILT_IN_ALIGNED_ALLOC:
592 case BUILT_IN_CALLOC:
593 case BUILT_IN_ALLOCA:
594 case BUILT_IN_ALLOCA_WITH_ALIGN:
595 case BUILT_IN_FREE:
596 return false;
598 default:;
602 mark_operand_necessary (vdef);
604 return false;
607 static void
608 mark_all_reaching_defs_necessary (gimple stmt)
610 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
611 mark_all_reaching_defs_necessary_1, NULL, &visited);
614 /* Return true for PHI nodes with one or identical arguments
615 can be removed. */
616 static bool
617 degenerate_phi_p (gimple phi)
619 unsigned int i;
620 tree op = gimple_phi_arg_def (phi, 0);
621 for (i = 1; i < gimple_phi_num_args (phi); i++)
622 if (gimple_phi_arg_def (phi, i) != op)
623 return false;
624 return true;
627 /* Propagate necessity using the operands of necessary statements.
628 Process the uses on each statement in the worklist, and add all
629 feeding statements which contribute to the calculation of this
630 value to the worklist.
632 In conservative mode, EL is NULL. */
634 static void
635 propagate_necessity (bool aggressive)
637 gimple stmt;
639 if (dump_file && (dump_flags & TDF_DETAILS))
640 fprintf (dump_file, "\nProcessing worklist:\n");
642 while (worklist.length () > 0)
644 /* Take STMT from worklist. */
645 stmt = worklist.pop ();
647 if (dump_file && (dump_flags & TDF_DETAILS))
649 fprintf (dump_file, "processing: ");
650 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
651 fprintf (dump_file, "\n");
654 if (aggressive)
656 /* Mark the last statement of the basic blocks on which the block
657 containing STMT is control dependent, but only if we haven't
658 already done so. */
659 basic_block bb = gimple_bb (stmt);
660 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
661 && !bitmap_bit_p (visited_control_parents, bb->index))
662 mark_control_dependent_edges_necessary (bb, false);
665 if (gimple_code (stmt) == GIMPLE_PHI
666 /* We do not process virtual PHI nodes nor do we track their
667 necessity. */
668 && !virtual_operand_p (gimple_phi_result (stmt)))
670 /* PHI nodes are somewhat special in that each PHI alternative has
671 data and control dependencies. All the statements feeding the
672 PHI node's arguments are always necessary. In aggressive mode,
673 we also consider the control dependent edges leading to the
674 predecessor block associated with each PHI alternative as
675 necessary. */
676 gphi *phi = as_a <gphi *> (stmt);
677 size_t k;
679 for (k = 0; k < gimple_phi_num_args (stmt); k++)
681 tree arg = PHI_ARG_DEF (stmt, k);
682 if (TREE_CODE (arg) == SSA_NAME)
683 mark_operand_necessary (arg);
686 /* For PHI operands it matters from where the control flow arrives
687 to the BB. Consider the following example:
689 a=exp1;
690 b=exp2;
691 if (test)
693 else
695 c=PHI(a,b)
697 We need to mark control dependence of the empty basic blocks, since they
698 contains computation of PHI operands.
700 Doing so is too restrictive in the case the predecestor block is in
701 the loop. Consider:
703 if (b)
705 int i;
706 for (i = 0; i<1000; ++i)
708 j = 0;
710 return j;
712 There is PHI for J in the BB containing return statement.
713 In this case the control dependence of predecestor block (that is
714 within the empty loop) also contains the block determining number
715 of iterations of the block that would prevent removing of empty
716 loop in this case.
718 This scenario can be avoided by splitting critical edges.
719 To save the critical edge splitting pass we identify how the control
720 dependence would look like if the edge was split.
722 Consider the modified CFG created from current CFG by splitting
723 edge B->C. In the postdominance tree of modified CFG, C' is
724 always child of C. There are two cases how chlids of C' can look
725 like:
727 1) C' is leaf
729 In this case the only basic block C' is control dependent on is B.
731 2) C' has single child that is B
733 In this case control dependence of C' is same as control
734 dependence of B in original CFG except for block B itself.
735 (since C' postdominate B in modified CFG)
737 Now how to decide what case happens? There are two basic options:
739 a) C postdominate B. Then C immediately postdominate B and
740 case 2 happens iff there is no other way from B to C except
741 the edge B->C.
743 There is other way from B to C iff there is succesor of B that
744 is not postdominated by B. Testing this condition is somewhat
745 expensive, because we need to iterate all succesors of B.
746 We are safe to assume that this does not happen: we will mark B
747 as needed when processing the other path from B to C that is
748 conrol dependent on B and marking control dependencies of B
749 itself is harmless because they will be processed anyway after
750 processing control statement in B.
752 b) C does not postdominate B. Always case 1 happens since there is
753 path from C to exit that does not go through B and thus also C'. */
755 if (aggressive && !degenerate_phi_p (stmt))
757 for (k = 0; k < gimple_phi_num_args (stmt); k++)
759 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
761 if (gimple_bb (stmt)
762 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
764 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
765 mark_last_stmt_necessary (arg_bb);
767 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
768 && !bitmap_bit_p (visited_control_parents,
769 arg_bb->index))
770 mark_control_dependent_edges_necessary (arg_bb, true);
774 else
776 /* Propagate through the operands. Examine all the USE, VUSE and
777 VDEF operands in this statement. Mark all the statements
778 which feed this statement's uses as necessary. */
779 ssa_op_iter iter;
780 tree use;
782 /* If this is a call to free which is directly fed by an
783 allocation function do not mark that necessary through
784 processing the argument. */
785 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
787 tree ptr = gimple_call_arg (stmt, 0);
788 gimple def_stmt;
789 tree def_callee;
790 /* If the pointer we free is defined by an allocation
791 function do not add the call to the worklist. */
792 if (TREE_CODE (ptr) == SSA_NAME
793 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
794 && (def_callee = gimple_call_fndecl (def_stmt))
795 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
796 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
797 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
798 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
800 gimple bounds_def_stmt;
801 tree bounds;
803 /* For instrumented calls we should also check used
804 bounds are returned by the same allocation call. */
805 if (!gimple_call_with_bounds_p (stmt)
806 || ((bounds = gimple_call_arg (stmt, 1))
807 && TREE_CODE (bounds) == SSA_NAME
808 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
809 && chkp_gimple_call_builtin_p (bounds_def_stmt,
810 BUILT_IN_CHKP_BNDRET)
811 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
812 continue;
816 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
817 mark_operand_necessary (use);
819 use = gimple_vuse (stmt);
820 if (!use)
821 continue;
823 /* If we dropped to simple mode make all immediately
824 reachable definitions necessary. */
825 if (chain_ovfl)
827 mark_all_reaching_defs_necessary (stmt);
828 continue;
831 /* For statements that may load from memory (have a VUSE) we
832 have to mark all reaching (may-)definitions as necessary.
833 We partition this task into two cases:
834 1) explicit loads based on decls that are not aliased
835 2) implicit loads (like calls) and explicit loads not
836 based on decls that are not aliased (like indirect
837 references or loads from globals)
838 For 1) we mark all reaching may-defs as necessary, stopping
839 at dominating kills. For 2) we want to mark all dominating
840 references necessary, but non-aliased ones which we handle
841 in 1). By keeping a global visited bitmap for references
842 we walk for 2) we avoid quadratic behavior for those. */
844 if (is_gimple_call (stmt))
846 tree callee = gimple_call_fndecl (stmt);
847 unsigned i;
849 /* Calls to functions that are merely acting as barriers
850 or that only store to memory do not make any previous
851 stores necessary. */
852 if (callee != NULL_TREE
853 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
854 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
855 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
856 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
857 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
858 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
859 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
860 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
861 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
862 || (DECL_FUNCTION_CODE (callee)
863 == BUILT_IN_ALLOCA_WITH_ALIGN)
864 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
865 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
866 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
867 continue;
869 /* Calls implicitly load from memory, their arguments
870 in addition may explicitly perform memory loads. */
871 mark_all_reaching_defs_necessary (stmt);
872 for (i = 0; i < gimple_call_num_args (stmt); ++i)
874 tree arg = gimple_call_arg (stmt, i);
875 if (TREE_CODE (arg) == SSA_NAME
876 || is_gimple_min_invariant (arg))
877 continue;
878 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
879 arg = TREE_OPERAND (arg, 0);
880 if (!ref_may_be_aliased (arg))
881 mark_aliased_reaching_defs_necessary (stmt, arg);
884 else if (gimple_assign_single_p (stmt))
886 tree rhs;
887 /* If this is a load mark things necessary. */
888 rhs = gimple_assign_rhs1 (stmt);
889 if (TREE_CODE (rhs) != SSA_NAME
890 && !is_gimple_min_invariant (rhs)
891 && TREE_CODE (rhs) != CONSTRUCTOR)
893 if (!ref_may_be_aliased (rhs))
894 mark_aliased_reaching_defs_necessary (stmt, rhs);
895 else
896 mark_all_reaching_defs_necessary (stmt);
899 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
901 tree rhs = gimple_return_retval (return_stmt);
902 /* A return statement may perform a load. */
903 if (rhs
904 && TREE_CODE (rhs) != SSA_NAME
905 && !is_gimple_min_invariant (rhs)
906 && TREE_CODE (rhs) != CONSTRUCTOR)
908 if (!ref_may_be_aliased (rhs))
909 mark_aliased_reaching_defs_necessary (stmt, rhs);
910 else
911 mark_all_reaching_defs_necessary (stmt);
914 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
916 unsigned i;
917 mark_all_reaching_defs_necessary (stmt);
918 /* Inputs may perform loads. */
919 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
921 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
922 if (TREE_CODE (op) != SSA_NAME
923 && !is_gimple_min_invariant (op)
924 && TREE_CODE (op) != CONSTRUCTOR
925 && !ref_may_be_aliased (op))
926 mark_aliased_reaching_defs_necessary (stmt, op);
929 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
931 /* The beginning of a transaction is a memory barrier. */
932 /* ??? If we were really cool, we'd only be a barrier
933 for the memories touched within the transaction. */
934 mark_all_reaching_defs_necessary (stmt);
936 else
937 gcc_unreachable ();
939 /* If we over-used our alias oracle budget drop to simple
940 mode. The cost metric allows quadratic behavior
941 (number of uses times number of may-defs queries) up to
942 a constant maximal number of queries and after that falls back to
943 super-linear complexity. */
944 if (/* Constant but quadratic for small functions. */
945 total_chain > 128 * 128
946 /* Linear in the number of may-defs. */
947 && total_chain > 32 * longest_chain
948 /* Linear in the number of uses. */
949 && total_chain > nr_walks * 32)
951 chain_ovfl = true;
952 if (visited)
953 bitmap_clear (visited);
959 /* Remove dead PHI nodes from block BB. */
961 static bool
962 remove_dead_phis (basic_block bb)
964 bool something_changed = false;
965 gphi *phi;
966 gphi_iterator gsi;
968 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
970 stats.total_phis++;
971 phi = gsi.phi ();
973 /* We do not track necessity of virtual PHI nodes. Instead do
974 very simple dead PHI removal here. */
975 if (virtual_operand_p (gimple_phi_result (phi)))
977 /* Virtual PHI nodes with one or identical arguments
978 can be removed. */
979 if (degenerate_phi_p (phi))
981 tree vdef = gimple_phi_result (phi);
982 tree vuse = gimple_phi_arg_def (phi, 0);
984 use_operand_p use_p;
985 imm_use_iterator iter;
986 gimple use_stmt;
987 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
988 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
989 SET_USE (use_p, vuse);
990 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
991 && TREE_CODE (vuse) == SSA_NAME)
992 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
994 else
995 gimple_set_plf (phi, STMT_NECESSARY, true);
998 if (!gimple_plf (phi, STMT_NECESSARY))
1000 something_changed = true;
1001 if (dump_file && (dump_flags & TDF_DETAILS))
1003 fprintf (dump_file, "Deleting : ");
1004 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1005 fprintf (dump_file, "\n");
1008 remove_phi_node (&gsi, true);
1009 stats.removed_phis++;
1010 continue;
1013 gsi_next (&gsi);
1015 return something_changed;
1018 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1020 static edge
1021 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1023 gphi_iterator gsi;
1024 edge e2 = NULL;
1025 edge_iterator ei;
1027 if (dump_file && (dump_flags & TDF_DETAILS))
1028 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1029 e->dest->index, post_dom_bb->index);
1031 e2 = redirect_edge_and_branch (e, post_dom_bb);
1032 cfg_altered = true;
1034 /* If edge was already around, no updating is necessary. */
1035 if (e2 != e)
1036 return e2;
1038 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1040 /* We are sure that for every live PHI we are seeing control dependent BB.
1041 This means that we can pick any edge to duplicate PHI args from. */
1042 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1043 if (e2 != e)
1044 break;
1045 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1047 gphi *phi = gsi.phi ();
1048 tree op;
1049 source_location locus;
1051 /* PHIs for virtuals have no control dependency relation on them.
1052 We are lost here and must force renaming of the symbol. */
1053 if (virtual_operand_p (gimple_phi_result (phi)))
1055 mark_virtual_phi_result_for_renaming (phi);
1056 remove_phi_node (&gsi, true);
1057 continue;
1060 /* Dead PHI do not imply control dependency. */
1061 if (!gimple_plf (phi, STMT_NECESSARY))
1063 gsi_next (&gsi);
1064 continue;
1067 op = gimple_phi_arg_def (phi, e2->dest_idx);
1068 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1069 add_phi_arg (phi, op, e, locus);
1070 /* The resulting PHI if not dead can only be degenerate. */
1071 gcc_assert (degenerate_phi_p (phi));
1072 gsi_next (&gsi);
1075 return e;
1078 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1079 containing I so that we don't have to look it up. */
1081 static void
1082 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1084 gimple stmt = gsi_stmt (*i);
1086 if (dump_file && (dump_flags & TDF_DETAILS))
1088 fprintf (dump_file, "Deleting : ");
1089 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1090 fprintf (dump_file, "\n");
1093 stats.removed++;
1095 /* If we have determined that a conditional branch statement contributes
1096 nothing to the program, then we not only remove it, but we also change
1097 the flow graph so that the current block will simply fall-thru to its
1098 immediate post-dominator. The blocks we are circumventing will be
1099 removed by cleanup_tree_cfg if this change in the flow graph makes them
1100 unreachable. */
1101 if (is_ctrl_stmt (stmt))
1103 basic_block post_dom_bb;
1104 edge e, e2;
1105 edge_iterator ei;
1107 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1109 e = find_edge (bb, post_dom_bb);
1111 /* If edge is already there, try to use it. This avoids need to update
1112 PHI nodes. Also watch for cases where post dominator does not exists
1113 or is exit block. These can happen for infinite loops as we create
1114 fake edges in the dominator tree. */
1115 if (e)
1117 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1118 e = EDGE_SUCC (bb, 0);
1119 else
1120 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1121 gcc_assert (e);
1122 e->probability = REG_BR_PROB_BASE;
1123 e->count = bb->count;
1125 /* The edge is no longer associated with a conditional, so it does
1126 not have TRUE/FALSE flags. */
1127 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1129 /* The lone outgoing edge from BB will be a fallthru edge. */
1130 e->flags |= EDGE_FALLTHRU;
1132 /* Remove the remaining outgoing edges. */
1133 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1134 if (e != e2)
1136 cfg_altered = true;
1137 remove_edge (e2);
1139 else
1140 ei_next (&ei);
1143 /* If this is a store into a variable that is being optimized away,
1144 add a debug bind stmt if possible. */
1145 if (MAY_HAVE_DEBUG_STMTS
1146 && gimple_assign_single_p (stmt)
1147 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1149 tree lhs = gimple_assign_lhs (stmt);
1150 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1151 && !DECL_IGNORED_P (lhs)
1152 && is_gimple_reg_type (TREE_TYPE (lhs))
1153 && !is_global_var (lhs)
1154 && !DECL_HAS_VALUE_EXPR_P (lhs))
1156 tree rhs = gimple_assign_rhs1 (stmt);
1157 gdebug *note
1158 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1159 gsi_insert_after (i, note, GSI_SAME_STMT);
1163 unlink_stmt_vdef (stmt);
1164 gsi_remove (i, true);
1165 release_defs (stmt);
1168 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1169 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1171 static tree
1172 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1174 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1175 *walk_subtrees = 0;
1176 if (*tp == (tree) data)
1177 return *tp;
1178 return NULL_TREE;
1181 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1182 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1183 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1184 uses. */
1186 static void
1187 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1188 enum tree_code subcode)
1190 gimple stmt = gsi_stmt (*gsi);
1191 tree lhs = gimple_call_lhs (stmt);
1193 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1194 return;
1196 imm_use_iterator imm_iter;
1197 use_operand_p use_p;
1198 bool has_debug_uses = false;
1199 bool has_realpart_uses = false;
1200 bool has_other_uses = false;
1201 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1203 gimple use_stmt = USE_STMT (use_p);
1204 if (is_gimple_debug (use_stmt))
1205 has_debug_uses = true;
1206 else if (is_gimple_assign (use_stmt)
1207 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1208 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1209 has_realpart_uses = true;
1210 else
1212 has_other_uses = true;
1213 break;
1217 if (!has_realpart_uses || has_other_uses)
1218 return;
1220 tree arg0 = gimple_call_arg (stmt, 0);
1221 tree arg1 = gimple_call_arg (stmt, 1);
1222 location_t loc = gimple_location (stmt);
1223 tree type = TREE_TYPE (TREE_TYPE (lhs));
1224 tree utype = type;
1225 if (!TYPE_UNSIGNED (type))
1226 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1227 tree result = fold_build2_loc (loc, subcode, utype,
1228 fold_convert_loc (loc, utype, arg0),
1229 fold_convert_loc (loc, utype, arg1));
1230 result = fold_convert_loc (loc, type, result);
1232 if (has_debug_uses)
1234 gimple use_stmt;
1235 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1237 if (!gimple_debug_bind_p (use_stmt))
1238 continue;
1239 tree v = gimple_debug_bind_get_value (use_stmt);
1240 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1242 gimple_debug_bind_reset_value (use_stmt);
1243 update_stmt (use_stmt);
1248 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1249 result = drop_tree_overflow (result);
1250 tree overflow = build_zero_cst (type);
1251 tree ctype = build_complex_type (type);
1252 if (TREE_CODE (result) == INTEGER_CST)
1253 result = build_complex (ctype, result, overflow);
1254 else
1255 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1256 ctype, result, overflow);
1258 if (dump_file && (dump_flags & TDF_DETAILS))
1260 fprintf (dump_file, "Transforming call: ");
1261 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1262 fprintf (dump_file, "because the overflow result is never used into: ");
1263 print_generic_stmt (dump_file, result, TDF_SLIM);
1264 fprintf (dump_file, "\n");
1267 if (!update_call_from_tree (gsi, result))
1268 gimplify_and_update_call_from_tree (gsi, result);
1271 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1272 contributes nothing to the program, and can be deleted. */
1274 static bool
1275 eliminate_unnecessary_stmts (void)
1277 bool something_changed = false;
1278 basic_block bb;
1279 gimple_stmt_iterator gsi, psi;
1280 gimple stmt;
1281 tree call;
1282 vec<basic_block> h;
1284 if (dump_file && (dump_flags & TDF_DETAILS))
1285 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1287 clear_special_calls ();
1289 /* Walking basic blocks and statements in reverse order avoids
1290 releasing SSA names before any other DEFs that refer to them are
1291 released. This helps avoid loss of debug information, as we get
1292 a chance to propagate all RHSs of removed SSAs into debug uses,
1293 rather than only the latest ones. E.g., consider:
1295 x_3 = y_1 + z_2;
1296 a_5 = x_3 - b_4;
1297 # DEBUG a => a_5
1299 If we were to release x_3 before a_5, when we reached a_5 and
1300 tried to substitute it into the debug stmt, we'd see x_3 there,
1301 but x_3's DEF, type, etc would have already been disconnected.
1302 By going backwards, the debug stmt first changes to:
1304 # DEBUG a => x_3 - b_4
1306 and then to:
1308 # DEBUG a => y_1 + z_2 - b_4
1310 as desired. */
1311 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1312 h = get_all_dominated_blocks (CDI_DOMINATORS,
1313 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1315 while (h.length ())
1317 bb = h.pop ();
1319 /* Remove dead statements. */
1320 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1322 stmt = gsi_stmt (gsi);
1324 psi = gsi;
1325 gsi_prev (&psi);
1327 stats.total++;
1329 /* We can mark a call to free as not necessary if the
1330 defining statement of its argument is not necessary
1331 (and thus is getting removed). */
1332 if (gimple_plf (stmt, STMT_NECESSARY)
1333 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1335 tree ptr = gimple_call_arg (stmt, 0);
1336 if (TREE_CODE (ptr) == SSA_NAME)
1338 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1339 if (!gimple_nop_p (def_stmt)
1340 && !gimple_plf (def_stmt, STMT_NECESSARY))
1341 gimple_set_plf (stmt, STMT_NECESSARY, false);
1343 /* We did not propagate necessity for free calls fed
1344 by allocation function to allow unnecessary
1345 alloc-free sequence elimination. For instrumented
1346 calls it also means we did not mark bounds producer
1347 as necessary and it is time to do it in case free
1348 call is not removed. */
1349 if (gimple_call_with_bounds_p (stmt))
1351 gimple bounds_def_stmt;
1352 tree bounds = gimple_call_arg (stmt, 1);
1353 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1354 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1355 if (bounds_def_stmt
1356 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1357 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1358 gimple_plf (stmt, STMT_NECESSARY));
1362 /* If GSI is not necessary then remove it. */
1363 if (!gimple_plf (stmt, STMT_NECESSARY))
1365 if (!is_gimple_debug (stmt))
1366 something_changed = true;
1367 remove_dead_stmt (&gsi, bb);
1369 else if (is_gimple_call (stmt))
1371 tree name = gimple_call_lhs (stmt);
1373 notice_special_calls (as_a <gcall *> (stmt));
1375 /* When LHS of var = call (); is dead, simplify it into
1376 call (); saving one operand. */
1377 if (name
1378 && TREE_CODE (name) == SSA_NAME
1379 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1380 /* Avoid doing so for allocation calls which we
1381 did not mark as necessary, it will confuse the
1382 special logic we apply to malloc/free pair removal. */
1383 && (!(call = gimple_call_fndecl (stmt))
1384 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1385 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1386 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1387 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1388 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1389 && (DECL_FUNCTION_CODE (call)
1390 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1391 /* Avoid doing so for bndret calls for the same reason. */
1392 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1394 something_changed = true;
1395 if (dump_file && (dump_flags & TDF_DETAILS))
1397 fprintf (dump_file, "Deleting LHS of call: ");
1398 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1399 fprintf (dump_file, "\n");
1402 gimple_call_set_lhs (stmt, NULL_TREE);
1403 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1404 update_stmt (stmt);
1405 release_ssa_name (name);
1407 /* GOMP_SIMD_LANE without lhs is not needed. */
1408 if (gimple_call_internal_p (stmt)
1409 && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
1410 remove_dead_stmt (&gsi, bb);
1412 else if (gimple_call_internal_p (stmt))
1413 switch (gimple_call_internal_fn (stmt))
1415 case IFN_ADD_OVERFLOW:
1416 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1417 break;
1418 case IFN_SUB_OVERFLOW:
1419 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1420 break;
1421 case IFN_MUL_OVERFLOW:
1422 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1423 break;
1424 default:
1425 break;
1431 h.release ();
1433 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1434 rendered some PHI nodes unreachable while they are still in use.
1435 Mark them for renaming. */
1436 if (cfg_altered)
1438 basic_block prev_bb;
1440 find_unreachable_blocks ();
1442 /* Delete all unreachable basic blocks in reverse dominator order. */
1443 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1444 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1446 prev_bb = bb->prev_bb;
1448 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1449 || !(bb->flags & BB_REACHABLE))
1451 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1452 gsi_next (&gsi))
1453 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1455 bool found = false;
1456 imm_use_iterator iter;
1458 FOR_EACH_IMM_USE_STMT (stmt, iter,
1459 gimple_phi_result (gsi.phi ()))
1461 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1462 continue;
1463 if (gimple_code (stmt) == GIMPLE_PHI
1464 || gimple_plf (stmt, STMT_NECESSARY))
1466 found = true;
1467 BREAK_FROM_IMM_USE_STMT (iter);
1470 if (found)
1471 mark_virtual_phi_result_for_renaming (gsi.phi ());
1474 if (!(bb->flags & BB_REACHABLE))
1476 /* Speed up the removal of blocks that don't
1477 dominate others. Walking backwards, this should
1478 be the common case. ??? Do we need to recompute
1479 dominators because of cfg_altered? */
1480 if (!MAY_HAVE_DEBUG_STMTS
1481 || !first_dom_son (CDI_DOMINATORS, bb))
1482 delete_basic_block (bb);
1483 else
1485 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1487 while (h.length ())
1489 bb = h.pop ();
1490 prev_bb = bb->prev_bb;
1491 /* Rearrangements to the CFG may have failed
1492 to update the dominators tree, so that
1493 formerly-dominated blocks are now
1494 otherwise reachable. */
1495 if (!!(bb->flags & BB_REACHABLE))
1496 continue;
1497 delete_basic_block (bb);
1500 h.release ();
1506 FOR_EACH_BB_FN (bb, cfun)
1508 /* Remove dead PHI nodes. */
1509 something_changed |= remove_dead_phis (bb);
1512 return something_changed;
1516 /* Print out removed statement statistics. */
1518 static void
1519 print_stats (void)
1521 float percg;
1523 percg = ((float) stats.removed / (float) stats.total) * 100;
1524 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1525 stats.removed, stats.total, (int) percg);
1527 if (stats.total_phis == 0)
1528 percg = 0;
1529 else
1530 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1532 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1533 stats.removed_phis, stats.total_phis, (int) percg);
1536 /* Initialization for this pass. Set up the used data structures. */
1538 static void
1539 tree_dce_init (bool aggressive)
1541 memset ((void *) &stats, 0, sizeof (stats));
1543 if (aggressive)
1545 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1546 bitmap_clear (last_stmt_necessary);
1547 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1548 bitmap_clear (bb_contains_live_stmts);
1551 processed = sbitmap_alloc (num_ssa_names + 1);
1552 bitmap_clear (processed);
1554 worklist.create (64);
1555 cfg_altered = false;
1558 /* Cleanup after this pass. */
1560 static void
1561 tree_dce_done (bool aggressive)
1563 if (aggressive)
1565 delete cd;
1566 sbitmap_free (visited_control_parents);
1567 sbitmap_free (last_stmt_necessary);
1568 sbitmap_free (bb_contains_live_stmts);
1569 bb_contains_live_stmts = NULL;
1572 sbitmap_free (processed);
1574 worklist.release ();
1577 /* Main routine to eliminate dead code.
1579 AGGRESSIVE controls the aggressiveness of the algorithm.
1580 In conservative mode, we ignore control dependence and simply declare
1581 all but the most trivially dead branches necessary. This mode is fast.
1582 In aggressive mode, control dependences are taken into account, which
1583 results in more dead code elimination, but at the cost of some time.
1585 FIXME: Aggressive mode before PRE doesn't work currently because
1586 the dominance info is not invalidated after DCE1. This is
1587 not an issue right now because we only run aggressive DCE
1588 as the last tree SSA pass, but keep this in mind when you
1589 start experimenting with pass ordering. */
1591 static unsigned int
1592 perform_tree_ssa_dce (bool aggressive)
1594 bool something_changed = 0;
1596 calculate_dominance_info (CDI_DOMINATORS);
1598 /* Preheaders are needed for SCEV to work.
1599 Simple lateches and recorded exits improve chances that loop will
1600 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1601 if (aggressive)
1602 loop_optimizer_init (LOOPS_NORMAL
1603 | LOOPS_HAVE_RECORDED_EXITS);
1605 tree_dce_init (aggressive);
1607 if (aggressive)
1609 /* Compute control dependence. */
1610 calculate_dominance_info (CDI_POST_DOMINATORS);
1611 cd = new control_dependences (create_edge_list ());
1613 visited_control_parents =
1614 sbitmap_alloc (last_basic_block_for_fn (cfun));
1615 bitmap_clear (visited_control_parents);
1617 mark_dfs_back_edges ();
1620 find_obviously_necessary_stmts (aggressive);
1622 if (aggressive)
1623 loop_optimizer_finalize ();
1625 longest_chain = 0;
1626 total_chain = 0;
1627 nr_walks = 0;
1628 chain_ovfl = false;
1629 visited = BITMAP_ALLOC (NULL);
1630 propagate_necessity (aggressive);
1631 BITMAP_FREE (visited);
1633 something_changed |= eliminate_unnecessary_stmts ();
1634 something_changed |= cfg_altered;
1636 /* We do not update postdominators, so free them unconditionally. */
1637 free_dominance_info (CDI_POST_DOMINATORS);
1639 /* If we removed paths in the CFG, then we need to update
1640 dominators as well. I haven't investigated the possibility
1641 of incrementally updating dominators. */
1642 if (cfg_altered)
1643 free_dominance_info (CDI_DOMINATORS);
1645 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1646 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1648 /* Debugging dumps. */
1649 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1650 print_stats ();
1652 tree_dce_done (aggressive);
1654 if (something_changed)
1656 free_numbers_of_iterations_estimates ();
1657 if (scev_initialized_p ())
1658 scev_reset ();
1659 return TODO_update_ssa | TODO_cleanup_cfg;
1661 return 0;
1664 /* Pass entry points. */
1665 static unsigned int
1666 tree_ssa_dce (void)
1668 return perform_tree_ssa_dce (/*aggressive=*/false);
1671 static unsigned int
1672 tree_ssa_cd_dce (void)
1674 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1677 namespace {
1679 const pass_data pass_data_dce =
1681 GIMPLE_PASS, /* type */
1682 "dce", /* name */
1683 OPTGROUP_NONE, /* optinfo_flags */
1684 TV_TREE_DCE, /* tv_id */
1685 ( PROP_cfg | PROP_ssa ), /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 0, /* todo_flags_finish */
1692 class pass_dce : public gimple_opt_pass
1694 public:
1695 pass_dce (gcc::context *ctxt)
1696 : gimple_opt_pass (pass_data_dce, ctxt)
1699 /* opt_pass methods: */
1700 opt_pass * clone () { return new pass_dce (m_ctxt); }
1701 virtual bool gate (function *) { return flag_tree_dce != 0; }
1702 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1704 }; // class pass_dce
1706 } // anon namespace
1708 gimple_opt_pass *
1709 make_pass_dce (gcc::context *ctxt)
1711 return new pass_dce (ctxt);
1714 namespace {
1716 const pass_data pass_data_cd_dce =
1718 GIMPLE_PASS, /* type */
1719 "cddce", /* name */
1720 OPTGROUP_NONE, /* optinfo_flags */
1721 TV_TREE_CD_DCE, /* tv_id */
1722 ( PROP_cfg | PROP_ssa ), /* properties_required */
1723 0, /* properties_provided */
1724 0, /* properties_destroyed */
1725 0, /* todo_flags_start */
1726 0, /* todo_flags_finish */
1729 class pass_cd_dce : public gimple_opt_pass
1731 public:
1732 pass_cd_dce (gcc::context *ctxt)
1733 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1736 /* opt_pass methods: */
1737 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1738 virtual bool gate (function *) { return flag_tree_dce != 0; }
1739 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1741 }; // class pass_cd_dce
1743 } // anon namespace
1745 gimple_opt_pass *
1746 make_pass_cd_dce (gcc::context *ctxt)
1748 return new pass_cd_dce (ctxt);