2016-09-13 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-dce.c
blob954fc67ee0da83b936415cfcf8cc6dfbe5ff1f7b
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2016 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
72 static struct stmt_stats
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
80 #define STMT_NECESSARY GF_PLF_1
82 static vec<gimple *> worklist;
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
119 /* If STMT is not already marked necessary, mark it, and add it to the
120 worklist if ADD_TO_WORKLIST is true. */
122 static inline void
123 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
125 gcc_assert (stmt);
127 if (gimple_plf (stmt, STMT_NECESSARY))
128 return;
130 if (dump_file && (dump_flags & TDF_DETAILS))
132 fprintf (dump_file, "Marking useful stmt: ");
133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
134 fprintf (dump_file, "\n");
137 gimple_set_plf (stmt, STMT_NECESSARY, true);
138 if (add_to_worklist)
139 worklist.safe_push (stmt);
140 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
141 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
145 /* Mark the statement defining operand OP as necessary. */
147 static inline void
148 mark_operand_necessary (tree op)
150 gimple *stmt;
151 int ver;
153 gcc_assert (op);
155 ver = SSA_NAME_VERSION (op);
156 if (bitmap_bit_p (processed, ver))
158 stmt = SSA_NAME_DEF_STMT (op);
159 gcc_assert (gimple_nop_p (stmt)
160 || gimple_plf (stmt, STMT_NECESSARY));
161 return;
163 bitmap_set_bit (processed, ver);
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (stmt);
168 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
169 return;
171 if (dump_file && (dump_flags & TDF_DETAILS))
173 fprintf (dump_file, "marking necessary through ");
174 print_generic_expr (dump_file, op, 0);
175 fprintf (dump_file, " stmt ");
176 print_gimple_stmt (dump_file, stmt, 0, 0);
179 gimple_set_plf (stmt, STMT_NECESSARY, true);
180 if (bb_contains_live_stmts)
181 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
182 worklist.safe_push (stmt);
186 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
187 it can make other statements necessary.
189 If AGGRESSIVE is false, control statements are conservatively marked as
190 necessary. */
192 static void
193 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
195 /* With non-call exceptions, we have to assume that all statements could
196 throw. If a statement could throw, it can be deemed necessary. */
197 if (cfun->can_throw_non_call_exceptions
198 && !cfun->can_delete_dead_exceptions
199 && stmt_could_throw_p (stmt))
201 mark_stmt_necessary (stmt, true);
202 return;
205 /* Statements that are implicitly live. Most function calls, asm
206 and return statements are required. Labels and GIMPLE_BIND nodes
207 are kept because they are control flow, and we have no way of
208 knowing whether they can be removed. DCE can eliminate all the
209 other statements in a block, and CFG can then remove the block
210 and labels. */
211 switch (gimple_code (stmt))
213 case GIMPLE_PREDICT:
214 case GIMPLE_LABEL:
215 mark_stmt_necessary (stmt, false);
216 return;
218 case GIMPLE_ASM:
219 case GIMPLE_RESX:
220 case GIMPLE_RETURN:
221 mark_stmt_necessary (stmt, true);
222 return;
224 case GIMPLE_CALL:
226 tree callee = gimple_call_fndecl (stmt);
227 if (callee != NULL_TREE
228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
229 switch (DECL_FUNCTION_CODE (callee))
231 case BUILT_IN_MALLOC:
232 case BUILT_IN_ALIGNED_ALLOC:
233 case BUILT_IN_CALLOC:
234 case BUILT_IN_ALLOCA:
235 case BUILT_IN_ALLOCA_WITH_ALIGN:
236 return;
238 default:;
240 /* Most, but not all function calls are required. Function calls that
241 produce no result and have no side effects (i.e. const pure
242 functions) are unnecessary. */
243 if (gimple_has_side_effects (stmt))
245 mark_stmt_necessary (stmt, true);
246 return;
248 if (!gimple_call_lhs (stmt))
249 return;
250 break;
253 case GIMPLE_DEBUG:
254 /* Debug temps without a value are not useful. ??? If we could
255 easily locate the debug temp bind stmt for a use thereof,
256 would could refrain from marking all debug temps here, and
257 mark them only if they're used. */
258 if (!gimple_debug_bind_p (stmt)
259 || gimple_debug_bind_has_value_p (stmt)
260 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
261 mark_stmt_necessary (stmt, false);
262 return;
264 case GIMPLE_GOTO:
265 gcc_assert (!simple_goto_p (stmt));
266 mark_stmt_necessary (stmt, true);
267 return;
269 case GIMPLE_COND:
270 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
271 /* Fall through. */
273 case GIMPLE_SWITCH:
274 if (! aggressive)
275 mark_stmt_necessary (stmt, true);
276 break;
278 case GIMPLE_ASSIGN:
279 if (gimple_clobber_p (stmt))
280 return;
281 break;
283 default:
284 break;
287 /* If the statement has volatile operands, it needs to be preserved.
288 Same for statements that can alter control flow in unpredictable
289 ways. */
290 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
292 mark_stmt_necessary (stmt, true);
293 return;
296 if (stmt_may_clobber_global_p (stmt))
298 mark_stmt_necessary (stmt, true);
299 return;
302 return;
306 /* Mark the last statement of BB as necessary. */
308 static void
309 mark_last_stmt_necessary (basic_block bb)
311 gimple *stmt = last_stmt (bb);
313 bitmap_set_bit (last_stmt_necessary, bb->index);
314 bitmap_set_bit (bb_contains_live_stmts, bb->index);
316 /* We actually mark the statement only if it is a control statement. */
317 if (stmt && is_ctrl_stmt (stmt))
318 mark_stmt_necessary (stmt, true);
322 /* Mark control dependent edges of BB as necessary. We have to do this only
323 once for each basic block so we set the appropriate bit after we're done.
325 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
327 static void
328 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
330 bitmap_iterator bi;
331 unsigned edge_number;
332 bool skipped = false;
334 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
336 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
337 return;
339 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
340 0, edge_number, bi)
342 basic_block cd_bb = cd->get_edge_src (edge_number);
344 if (ignore_self && cd_bb == bb)
346 skipped = true;
347 continue;
350 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
351 mark_last_stmt_necessary (cd_bb);
354 if (!skipped)
355 bitmap_set_bit (visited_control_parents, bb->index);
359 /* Find obviously necessary statements. These are things like most function
360 calls, and stores to file level variables.
362 If EL is NULL, control statements are conservatively marked as
363 necessary. Otherwise it contains the list of edges used by control
364 dependence analysis. */
366 static void
367 find_obviously_necessary_stmts (bool aggressive)
369 basic_block bb;
370 gimple_stmt_iterator gsi;
371 edge e;
372 gimple *phi, *stmt;
373 int flags;
375 FOR_EACH_BB_FN (bb, cfun)
377 /* PHI nodes are never inherently necessary. */
378 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
380 phi = gsi_stmt (gsi);
381 gimple_set_plf (phi, STMT_NECESSARY, false);
384 /* Check all statements in the block. */
385 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
387 stmt = gsi_stmt (gsi);
388 gimple_set_plf (stmt, STMT_NECESSARY, false);
389 mark_stmt_if_obviously_necessary (stmt, aggressive);
393 /* Pure and const functions are finite and thus have no infinite loops in
394 them. */
395 flags = flags_from_decl_or_type (current_function_decl);
396 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
397 return;
399 /* Prevent the empty possibly infinite loops from being removed. */
400 if (aggressive)
402 struct loop *loop;
403 scev_initialize ();
404 if (mark_irreducible_loops ())
405 FOR_EACH_BB_FN (bb, cfun)
407 edge_iterator ei;
408 FOR_EACH_EDGE (e, ei, bb->succs)
409 if ((e->flags & EDGE_DFS_BACK)
410 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
412 if (dump_file)
413 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
414 e->src->index, e->dest->index);
415 mark_control_dependent_edges_necessary (e->dest, false);
419 FOR_EACH_LOOP (loop, 0)
420 if (!finite_loop_p (loop))
422 if (dump_file)
423 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
424 mark_control_dependent_edges_necessary (loop->latch, false);
426 scev_finalize ();
431 /* Return true if REF is based on an aliased base, otherwise false. */
433 static bool
434 ref_may_be_aliased (tree ref)
436 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
437 while (handled_component_p (ref))
438 ref = TREE_OPERAND (ref, 0);
439 if (TREE_CODE (ref) == MEM_REF
440 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
441 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
442 return !(DECL_P (ref)
443 && !may_be_aliased (ref));
446 static bitmap visited = NULL;
447 static unsigned int longest_chain = 0;
448 static unsigned int total_chain = 0;
449 static unsigned int nr_walks = 0;
450 static bool chain_ovfl = false;
452 /* Worker for the walker that marks reaching definitions of REF,
453 which is based on a non-aliased decl, necessary. It returns
454 true whenever the defining statement of the current VDEF is
455 a kill for REF, as no dominating may-defs are necessary for REF
456 anymore. DATA points to the basic-block that contains the
457 stmt that refers to REF. */
459 static bool
460 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
462 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
464 /* All stmts we visit are necessary. */
465 if (! gimple_clobber_p (def_stmt))
466 mark_operand_necessary (vdef);
468 /* If the stmt lhs kills ref, then we can stop walking. */
469 if (gimple_has_lhs (def_stmt)
470 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
471 /* The assignment is not necessarily carried out if it can throw
472 and we can catch it in the current function where we could inspect
473 the previous value.
474 ??? We only need to care about the RHS throwing. For aggregate
475 assignments or similar calls and non-call exceptions the LHS
476 might throw as well. */
477 && !stmt_can_throw_internal (def_stmt))
479 tree base, lhs = gimple_get_lhs (def_stmt);
480 HOST_WIDE_INT size, offset, max_size;
481 bool reverse;
482 ao_ref_base (ref);
483 base
484 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
485 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
486 so base == refd->base does not always hold. */
487 if (base == ref->base)
489 /* For a must-alias check we need to be able to constrain
490 the accesses properly. */
491 if (size != -1 && size == max_size
492 && ref->max_size != -1)
494 if (offset <= ref->offset
495 && offset + size >= ref->offset + ref->max_size)
496 return true;
498 /* Or they need to be exactly the same. */
499 else if (ref->ref
500 /* Make sure there is no induction variable involved
501 in the references (gcc.c-torture/execute/pr42142.c).
502 The simplest way is to check if the kill dominates
503 the use. */
504 /* But when both are in the same block we cannot
505 easily tell whether we came from a backedge
506 unless we decide to compute stmt UIDs
507 (see PR58246). */
508 && (basic_block) data != gimple_bb (def_stmt)
509 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
510 gimple_bb (def_stmt))
511 && operand_equal_p (ref->ref, lhs, 0))
512 return true;
516 /* Otherwise keep walking. */
517 return false;
520 static void
521 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
523 unsigned int chain;
524 ao_ref refd;
525 gcc_assert (!chain_ovfl);
526 ao_ref_init (&refd, ref);
527 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
528 mark_aliased_reaching_defs_necessary_1,
529 gimple_bb (stmt), NULL);
530 if (chain > longest_chain)
531 longest_chain = chain;
532 total_chain += chain;
533 nr_walks++;
536 /* Worker for the walker that marks reaching definitions of REF, which
537 is not based on a non-aliased decl. For simplicity we need to end
538 up marking all may-defs necessary that are not based on a non-aliased
539 decl. The only job of this walker is to skip may-defs based on
540 a non-aliased decl. */
542 static bool
543 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
544 tree vdef, void *data ATTRIBUTE_UNUSED)
546 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
548 /* We have to skip already visited (and thus necessary) statements
549 to make the chaining work after we dropped back to simple mode. */
550 if (chain_ovfl
551 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
553 gcc_assert (gimple_nop_p (def_stmt)
554 || gimple_plf (def_stmt, STMT_NECESSARY));
555 return false;
558 /* We want to skip stores to non-aliased variables. */
559 if (!chain_ovfl
560 && gimple_assign_single_p (def_stmt))
562 tree lhs = gimple_assign_lhs (def_stmt);
563 if (!ref_may_be_aliased (lhs))
564 return false;
567 /* We want to skip statments that do not constitute stores but have
568 a virtual definition. */
569 if (is_gimple_call (def_stmt))
571 tree callee = gimple_call_fndecl (def_stmt);
572 if (callee != NULL_TREE
573 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
574 switch (DECL_FUNCTION_CODE (callee))
576 case BUILT_IN_MALLOC:
577 case BUILT_IN_ALIGNED_ALLOC:
578 case BUILT_IN_CALLOC:
579 case BUILT_IN_ALLOCA:
580 case BUILT_IN_ALLOCA_WITH_ALIGN:
581 case BUILT_IN_FREE:
582 return false;
584 default:;
588 if (! gimple_clobber_p (def_stmt))
589 mark_operand_necessary (vdef);
591 return false;
594 static void
595 mark_all_reaching_defs_necessary (gimple *stmt)
597 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
598 mark_all_reaching_defs_necessary_1, NULL, &visited);
601 /* Return true for PHI nodes with one or identical arguments
602 can be removed. */
603 static bool
604 degenerate_phi_p (gimple *phi)
606 unsigned int i;
607 tree op = gimple_phi_arg_def (phi, 0);
608 for (i = 1; i < gimple_phi_num_args (phi); i++)
609 if (gimple_phi_arg_def (phi, i) != op)
610 return false;
611 return true;
614 /* Propagate necessity using the operands of necessary statements.
615 Process the uses on each statement in the worklist, and add all
616 feeding statements which contribute to the calculation of this
617 value to the worklist.
619 In conservative mode, EL is NULL. */
621 static void
622 propagate_necessity (bool aggressive)
624 gimple *stmt;
626 if (dump_file && (dump_flags & TDF_DETAILS))
627 fprintf (dump_file, "\nProcessing worklist:\n");
629 while (worklist.length () > 0)
631 /* Take STMT from worklist. */
632 stmt = worklist.pop ();
634 if (dump_file && (dump_flags & TDF_DETAILS))
636 fprintf (dump_file, "processing: ");
637 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
638 fprintf (dump_file, "\n");
641 if (aggressive)
643 /* Mark the last statement of the basic blocks on which the block
644 containing STMT is control dependent, but only if we haven't
645 already done so. */
646 basic_block bb = gimple_bb (stmt);
647 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
648 && !bitmap_bit_p (visited_control_parents, bb->index))
649 mark_control_dependent_edges_necessary (bb, false);
652 if (gimple_code (stmt) == GIMPLE_PHI
653 /* We do not process virtual PHI nodes nor do we track their
654 necessity. */
655 && !virtual_operand_p (gimple_phi_result (stmt)))
657 /* PHI nodes are somewhat special in that each PHI alternative has
658 data and control dependencies. All the statements feeding the
659 PHI node's arguments are always necessary. In aggressive mode,
660 we also consider the control dependent edges leading to the
661 predecessor block associated with each PHI alternative as
662 necessary. */
663 gphi *phi = as_a <gphi *> (stmt);
664 size_t k;
666 for (k = 0; k < gimple_phi_num_args (stmt); k++)
668 tree arg = PHI_ARG_DEF (stmt, k);
669 if (TREE_CODE (arg) == SSA_NAME)
670 mark_operand_necessary (arg);
673 /* For PHI operands it matters from where the control flow arrives
674 to the BB. Consider the following example:
676 a=exp1;
677 b=exp2;
678 if (test)
680 else
682 c=PHI(a,b)
684 We need to mark control dependence of the empty basic blocks, since they
685 contains computation of PHI operands.
687 Doing so is too restrictive in the case the predecestor block is in
688 the loop. Consider:
690 if (b)
692 int i;
693 for (i = 0; i<1000; ++i)
695 j = 0;
697 return j;
699 There is PHI for J in the BB containing return statement.
700 In this case the control dependence of predecestor block (that is
701 within the empty loop) also contains the block determining number
702 of iterations of the block that would prevent removing of empty
703 loop in this case.
705 This scenario can be avoided by splitting critical edges.
706 To save the critical edge splitting pass we identify how the control
707 dependence would look like if the edge was split.
709 Consider the modified CFG created from current CFG by splitting
710 edge B->C. In the postdominance tree of modified CFG, C' is
711 always child of C. There are two cases how chlids of C' can look
712 like:
714 1) C' is leaf
716 In this case the only basic block C' is control dependent on is B.
718 2) C' has single child that is B
720 In this case control dependence of C' is same as control
721 dependence of B in original CFG except for block B itself.
722 (since C' postdominate B in modified CFG)
724 Now how to decide what case happens? There are two basic options:
726 a) C postdominate B. Then C immediately postdominate B and
727 case 2 happens iff there is no other way from B to C except
728 the edge B->C.
730 There is other way from B to C iff there is succesor of B that
731 is not postdominated by B. Testing this condition is somewhat
732 expensive, because we need to iterate all succesors of B.
733 We are safe to assume that this does not happen: we will mark B
734 as needed when processing the other path from B to C that is
735 conrol dependent on B and marking control dependencies of B
736 itself is harmless because they will be processed anyway after
737 processing control statement in B.
739 b) C does not postdominate B. Always case 1 happens since there is
740 path from C to exit that does not go through B and thus also C'. */
742 if (aggressive && !degenerate_phi_p (stmt))
744 for (k = 0; k < gimple_phi_num_args (stmt); k++)
746 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
748 if (gimple_bb (stmt)
749 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
751 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
752 mark_last_stmt_necessary (arg_bb);
754 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
755 && !bitmap_bit_p (visited_control_parents,
756 arg_bb->index))
757 mark_control_dependent_edges_necessary (arg_bb, true);
761 else
763 /* Propagate through the operands. Examine all the USE, VUSE and
764 VDEF operands in this statement. Mark all the statements
765 which feed this statement's uses as necessary. */
766 ssa_op_iter iter;
767 tree use;
769 /* If this is a call to free which is directly fed by an
770 allocation function do not mark that necessary through
771 processing the argument. */
772 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
774 tree ptr = gimple_call_arg (stmt, 0);
775 gimple *def_stmt;
776 tree def_callee;
777 /* If the pointer we free is defined by an allocation
778 function do not add the call to the worklist. */
779 if (TREE_CODE (ptr) == SSA_NAME
780 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
781 && (def_callee = gimple_call_fndecl (def_stmt))
782 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
783 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
784 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
785 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
787 gimple *bounds_def_stmt;
788 tree bounds;
790 /* For instrumented calls we should also check used
791 bounds are returned by the same allocation call. */
792 if (!gimple_call_with_bounds_p (stmt)
793 || ((bounds = gimple_call_arg (stmt, 1))
794 && TREE_CODE (bounds) == SSA_NAME
795 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
796 && chkp_gimple_call_builtin_p (bounds_def_stmt,
797 BUILT_IN_CHKP_BNDRET)
798 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
799 continue;
803 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
804 mark_operand_necessary (use);
806 use = gimple_vuse (stmt);
807 if (!use)
808 continue;
810 /* If we dropped to simple mode make all immediately
811 reachable definitions necessary. */
812 if (chain_ovfl)
814 mark_all_reaching_defs_necessary (stmt);
815 continue;
818 /* For statements that may load from memory (have a VUSE) we
819 have to mark all reaching (may-)definitions as necessary.
820 We partition this task into two cases:
821 1) explicit loads based on decls that are not aliased
822 2) implicit loads (like calls) and explicit loads not
823 based on decls that are not aliased (like indirect
824 references or loads from globals)
825 For 1) we mark all reaching may-defs as necessary, stopping
826 at dominating kills. For 2) we want to mark all dominating
827 references necessary, but non-aliased ones which we handle
828 in 1). By keeping a global visited bitmap for references
829 we walk for 2) we avoid quadratic behavior for those. */
831 if (is_gimple_call (stmt))
833 tree callee = gimple_call_fndecl (stmt);
834 unsigned i;
836 /* Calls to functions that are merely acting as barriers
837 or that only store to memory do not make any previous
838 stores necessary. */
839 if (callee != NULL_TREE
840 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
841 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
844 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
845 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
847 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
849 || (DECL_FUNCTION_CODE (callee)
850 == BUILT_IN_ALLOCA_WITH_ALIGN)
851 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
852 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
853 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
854 continue;
856 /* Calls implicitly load from memory, their arguments
857 in addition may explicitly perform memory loads. */
858 mark_all_reaching_defs_necessary (stmt);
859 for (i = 0; i < gimple_call_num_args (stmt); ++i)
861 tree arg = gimple_call_arg (stmt, i);
862 if (TREE_CODE (arg) == SSA_NAME
863 || is_gimple_min_invariant (arg))
864 continue;
865 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
866 arg = TREE_OPERAND (arg, 0);
867 if (!ref_may_be_aliased (arg))
868 mark_aliased_reaching_defs_necessary (stmt, arg);
871 else if (gimple_assign_single_p (stmt))
873 tree rhs;
874 /* If this is a load mark things necessary. */
875 rhs = gimple_assign_rhs1 (stmt);
876 if (TREE_CODE (rhs) != SSA_NAME
877 && !is_gimple_min_invariant (rhs)
878 && TREE_CODE (rhs) != CONSTRUCTOR)
880 if (!ref_may_be_aliased (rhs))
881 mark_aliased_reaching_defs_necessary (stmt, rhs);
882 else
883 mark_all_reaching_defs_necessary (stmt);
886 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
888 tree rhs = gimple_return_retval (return_stmt);
889 /* A return statement may perform a load. */
890 if (rhs
891 && TREE_CODE (rhs) != SSA_NAME
892 && !is_gimple_min_invariant (rhs)
893 && TREE_CODE (rhs) != CONSTRUCTOR)
895 if (!ref_may_be_aliased (rhs))
896 mark_aliased_reaching_defs_necessary (stmt, rhs);
897 else
898 mark_all_reaching_defs_necessary (stmt);
901 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
903 unsigned i;
904 mark_all_reaching_defs_necessary (stmt);
905 /* Inputs may perform loads. */
906 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
908 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
909 if (TREE_CODE (op) != SSA_NAME
910 && !is_gimple_min_invariant (op)
911 && TREE_CODE (op) != CONSTRUCTOR
912 && !ref_may_be_aliased (op))
913 mark_aliased_reaching_defs_necessary (stmt, op);
916 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
918 /* The beginning of a transaction is a memory barrier. */
919 /* ??? If we were really cool, we'd only be a barrier
920 for the memories touched within the transaction. */
921 mark_all_reaching_defs_necessary (stmt);
923 else
924 gcc_unreachable ();
926 /* If we over-used our alias oracle budget drop to simple
927 mode. The cost metric allows quadratic behavior
928 (number of uses times number of may-defs queries) up to
929 a constant maximal number of queries and after that falls back to
930 super-linear complexity. */
931 if (/* Constant but quadratic for small functions. */
932 total_chain > 128 * 128
933 /* Linear in the number of may-defs. */
934 && total_chain > 32 * longest_chain
935 /* Linear in the number of uses. */
936 && total_chain > nr_walks * 32)
938 chain_ovfl = true;
939 if (visited)
940 bitmap_clear (visited);
946 /* Remove dead PHI nodes from block BB. */
948 static bool
949 remove_dead_phis (basic_block bb)
951 bool something_changed = false;
952 gphi *phi;
953 gphi_iterator gsi;
955 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
957 stats.total_phis++;
958 phi = gsi.phi ();
960 /* We do not track necessity of virtual PHI nodes. Instead do
961 very simple dead PHI removal here. */
962 if (virtual_operand_p (gimple_phi_result (phi)))
964 /* Virtual PHI nodes with one or identical arguments
965 can be removed. */
966 if (degenerate_phi_p (phi))
968 tree vdef = gimple_phi_result (phi);
969 tree vuse = gimple_phi_arg_def (phi, 0);
971 use_operand_p use_p;
972 imm_use_iterator iter;
973 gimple *use_stmt;
974 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
975 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
976 SET_USE (use_p, vuse);
977 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
978 && TREE_CODE (vuse) == SSA_NAME)
979 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
981 else
982 gimple_set_plf (phi, STMT_NECESSARY, true);
985 if (!gimple_plf (phi, STMT_NECESSARY))
987 something_changed = true;
988 if (dump_file && (dump_flags & TDF_DETAILS))
990 fprintf (dump_file, "Deleting : ");
991 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
992 fprintf (dump_file, "\n");
995 remove_phi_node (&gsi, true);
996 stats.removed_phis++;
997 continue;
1000 gsi_next (&gsi);
1002 return something_changed;
1006 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1007 containing I so that we don't have to look it up. */
1009 static void
1010 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1012 gimple *stmt = gsi_stmt (*i);
1014 if (dump_file && (dump_flags & TDF_DETAILS))
1016 fprintf (dump_file, "Deleting : ");
1017 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1018 fprintf (dump_file, "\n");
1021 stats.removed++;
1023 /* If we have determined that a conditional branch statement contributes
1024 nothing to the program, then we not only remove it, but we need to update
1025 the CFG. We can chose any of edges out of BB as long as we are sure to not
1026 close infinite loops. This is done by always choosing the edge closer to
1027 exit in inverted_post_order_compute order. */
1028 if (is_ctrl_stmt (stmt))
1030 edge_iterator ei;
1031 edge e = NULL, e2;
1033 /* See if there is only one non-abnormal edge. */
1034 if (single_succ_p (bb))
1035 e = single_succ_edge (bb);
1036 /* Otherwise chose one that is closer to bb with live statement in it.
1037 To be able to chose one, we compute inverted post order starting from
1038 all BBs with live statements. */
1039 if (!e)
1041 if (!bb_postorder)
1043 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1044 int postorder_num
1045 = inverted_post_order_compute (postorder,
1046 &bb_contains_live_stmts);
1047 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1048 for (int i = 0; i < postorder_num; ++i)
1049 bb_postorder[postorder[i]] = i;
1050 free (postorder);
1052 FOR_EACH_EDGE (e2, ei, bb->succs)
1053 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1054 || bb_postorder [e->dest->index]
1055 < bb_postorder [e2->dest->index])
1056 e = e2;
1058 gcc_assert (e);
1059 e->probability = REG_BR_PROB_BASE;
1060 e->count = bb->count;
1062 /* The edge is no longer associated with a conditional, so it does
1063 not have TRUE/FALSE flags.
1064 We are also safe to drop EH/ABNORMAL flags and turn them into
1065 normal control flow, because we know that all the destinations (including
1066 those odd edges) are equivalent for program execution. */
1067 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1069 /* The lone outgoing edge from BB will be a fallthru edge. */
1070 e->flags |= EDGE_FALLTHRU;
1072 /* Remove the remaining outgoing edges. */
1073 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1074 if (e != e2)
1076 cfg_altered = true;
1077 /* If we made a BB unconditionally exit a loop or removed
1078 an entry into an irreducible region, then this transform
1079 alters the set of BBs in the loop. Schedule a fixup. */
1080 if (loop_exit_edge_p (bb->loop_father, e)
1081 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1082 loops_state_set (LOOPS_NEED_FIXUP);
1083 remove_edge (e2);
1085 else
1086 ei_next (&ei);
1089 /* If this is a store into a variable that is being optimized away,
1090 add a debug bind stmt if possible. */
1091 if (MAY_HAVE_DEBUG_STMTS
1092 && gimple_assign_single_p (stmt)
1093 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1095 tree lhs = gimple_assign_lhs (stmt);
1096 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1097 && !DECL_IGNORED_P (lhs)
1098 && is_gimple_reg_type (TREE_TYPE (lhs))
1099 && !is_global_var (lhs)
1100 && !DECL_HAS_VALUE_EXPR_P (lhs))
1102 tree rhs = gimple_assign_rhs1 (stmt);
1103 gdebug *note
1104 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1105 gsi_insert_after (i, note, GSI_SAME_STMT);
1109 unlink_stmt_vdef (stmt);
1110 gsi_remove (i, true);
1111 release_defs (stmt);
1114 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1115 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1117 static tree
1118 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1120 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1121 *walk_subtrees = 0;
1122 if (*tp == (tree) data)
1123 return *tp;
1124 return NULL_TREE;
1127 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1128 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1129 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1130 uses. */
1132 static void
1133 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1134 enum tree_code subcode)
1136 gimple *stmt = gsi_stmt (*gsi);
1137 tree lhs = gimple_call_lhs (stmt);
1139 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1140 return;
1142 imm_use_iterator imm_iter;
1143 use_operand_p use_p;
1144 bool has_debug_uses = false;
1145 bool has_realpart_uses = false;
1146 bool has_other_uses = false;
1147 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1149 gimple *use_stmt = USE_STMT (use_p);
1150 if (is_gimple_debug (use_stmt))
1151 has_debug_uses = true;
1152 else if (is_gimple_assign (use_stmt)
1153 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1154 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1155 has_realpart_uses = true;
1156 else
1158 has_other_uses = true;
1159 break;
1163 if (!has_realpart_uses || has_other_uses)
1164 return;
1166 tree arg0 = gimple_call_arg (stmt, 0);
1167 tree arg1 = gimple_call_arg (stmt, 1);
1168 location_t loc = gimple_location (stmt);
1169 tree type = TREE_TYPE (TREE_TYPE (lhs));
1170 tree utype = type;
1171 if (!TYPE_UNSIGNED (type))
1172 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1173 tree result = fold_build2_loc (loc, subcode, utype,
1174 fold_convert_loc (loc, utype, arg0),
1175 fold_convert_loc (loc, utype, arg1));
1176 result = fold_convert_loc (loc, type, result);
1178 if (has_debug_uses)
1180 gimple *use_stmt;
1181 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1183 if (!gimple_debug_bind_p (use_stmt))
1184 continue;
1185 tree v = gimple_debug_bind_get_value (use_stmt);
1186 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1188 gimple_debug_bind_reset_value (use_stmt);
1189 update_stmt (use_stmt);
1194 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1195 result = drop_tree_overflow (result);
1196 tree overflow = build_zero_cst (type);
1197 tree ctype = build_complex_type (type);
1198 if (TREE_CODE (result) == INTEGER_CST)
1199 result = build_complex (ctype, result, overflow);
1200 else
1201 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1202 ctype, result, overflow);
1204 if (dump_file && (dump_flags & TDF_DETAILS))
1206 fprintf (dump_file, "Transforming call: ");
1207 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1208 fprintf (dump_file, "because the overflow result is never used into: ");
1209 print_generic_stmt (dump_file, result, TDF_SLIM);
1210 fprintf (dump_file, "\n");
1213 if (!update_call_from_tree (gsi, result))
1214 gimplify_and_update_call_from_tree (gsi, result);
1217 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1218 contributes nothing to the program, and can be deleted. */
1220 static bool
1221 eliminate_unnecessary_stmts (void)
1223 bool something_changed = false;
1224 basic_block bb;
1225 gimple_stmt_iterator gsi, psi;
1226 gimple *stmt;
1227 tree call;
1228 vec<basic_block> h;
1230 if (dump_file && (dump_flags & TDF_DETAILS))
1231 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1233 clear_special_calls ();
1235 /* Walking basic blocks and statements in reverse order avoids
1236 releasing SSA names before any other DEFs that refer to them are
1237 released. This helps avoid loss of debug information, as we get
1238 a chance to propagate all RHSs of removed SSAs into debug uses,
1239 rather than only the latest ones. E.g., consider:
1241 x_3 = y_1 + z_2;
1242 a_5 = x_3 - b_4;
1243 # DEBUG a => a_5
1245 If we were to release x_3 before a_5, when we reached a_5 and
1246 tried to substitute it into the debug stmt, we'd see x_3 there,
1247 but x_3's DEF, type, etc would have already been disconnected.
1248 By going backwards, the debug stmt first changes to:
1250 # DEBUG a => x_3 - b_4
1252 and then to:
1254 # DEBUG a => y_1 + z_2 - b_4
1256 as desired. */
1257 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1258 h = get_all_dominated_blocks (CDI_DOMINATORS,
1259 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1261 while (h.length ())
1263 bb = h.pop ();
1265 /* Remove dead statements. */
1266 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1268 stmt = gsi_stmt (gsi);
1270 psi = gsi;
1271 gsi_prev (&psi);
1273 stats.total++;
1275 /* We can mark a call to free as not necessary if the
1276 defining statement of its argument is not necessary
1277 (and thus is getting removed). */
1278 if (gimple_plf (stmt, STMT_NECESSARY)
1279 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1281 tree ptr = gimple_call_arg (stmt, 0);
1282 if (TREE_CODE (ptr) == SSA_NAME)
1284 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1285 if (!gimple_nop_p (def_stmt)
1286 && !gimple_plf (def_stmt, STMT_NECESSARY))
1287 gimple_set_plf (stmt, STMT_NECESSARY, false);
1289 /* We did not propagate necessity for free calls fed
1290 by allocation function to allow unnecessary
1291 alloc-free sequence elimination. For instrumented
1292 calls it also means we did not mark bounds producer
1293 as necessary and it is time to do it in case free
1294 call is not removed. */
1295 if (gimple_call_with_bounds_p (stmt))
1297 gimple *bounds_def_stmt;
1298 tree bounds = gimple_call_arg (stmt, 1);
1299 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1300 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1301 if (bounds_def_stmt
1302 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1303 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1304 gimple_plf (stmt, STMT_NECESSARY));
1308 /* If GSI is not necessary then remove it. */
1309 if (!gimple_plf (stmt, STMT_NECESSARY))
1311 /* Keep clobbers that we can keep live live. */
1312 if (gimple_clobber_p (stmt))
1314 ssa_op_iter iter;
1315 use_operand_p use_p;
1316 bool dead = false;
1317 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1319 tree name = USE_FROM_PTR (use_p);
1320 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1321 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1323 dead = true;
1324 break;
1327 if (!dead)
1328 continue;
1330 if (!is_gimple_debug (stmt))
1331 something_changed = true;
1332 remove_dead_stmt (&gsi, bb);
1334 else if (is_gimple_call (stmt))
1336 tree name = gimple_call_lhs (stmt);
1338 notice_special_calls (as_a <gcall *> (stmt));
1340 /* When LHS of var = call (); is dead, simplify it into
1341 call (); saving one operand. */
1342 if (name
1343 && TREE_CODE (name) == SSA_NAME
1344 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1345 /* Avoid doing so for allocation calls which we
1346 did not mark as necessary, it will confuse the
1347 special logic we apply to malloc/free pair removal. */
1348 && (!(call = gimple_call_fndecl (stmt))
1349 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1350 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1351 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1352 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1353 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1354 && (DECL_FUNCTION_CODE (call)
1355 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1356 /* Avoid doing so for bndret calls for the same reason. */
1357 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1359 something_changed = true;
1360 if (dump_file && (dump_flags & TDF_DETAILS))
1362 fprintf (dump_file, "Deleting LHS of call: ");
1363 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1364 fprintf (dump_file, "\n");
1367 gimple_call_set_lhs (stmt, NULL_TREE);
1368 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1369 update_stmt (stmt);
1370 release_ssa_name (name);
1372 /* GOMP_SIMD_LANE without lhs is not needed. */
1373 if (gimple_call_internal_p (stmt)
1374 && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
1375 remove_dead_stmt (&gsi, bb);
1377 else if (gimple_call_internal_p (stmt))
1378 switch (gimple_call_internal_fn (stmt))
1380 case IFN_ADD_OVERFLOW:
1381 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1382 break;
1383 case IFN_SUB_OVERFLOW:
1384 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1385 break;
1386 case IFN_MUL_OVERFLOW:
1387 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1388 break;
1389 default:
1390 break;
1396 h.release ();
1398 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1399 rendered some PHI nodes unreachable while they are still in use.
1400 Mark them for renaming. */
1401 if (cfg_altered)
1403 basic_block prev_bb;
1405 find_unreachable_blocks ();
1407 /* Delete all unreachable basic blocks in reverse dominator order. */
1408 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1409 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1411 prev_bb = bb->prev_bb;
1413 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1414 || !(bb->flags & BB_REACHABLE))
1416 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1417 gsi_next (&gsi))
1418 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1420 bool found = false;
1421 imm_use_iterator iter;
1423 FOR_EACH_IMM_USE_STMT (stmt, iter,
1424 gimple_phi_result (gsi.phi ()))
1426 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1427 continue;
1428 if (gimple_code (stmt) == GIMPLE_PHI
1429 || gimple_plf (stmt, STMT_NECESSARY))
1431 found = true;
1432 BREAK_FROM_IMM_USE_STMT (iter);
1435 if (found)
1436 mark_virtual_phi_result_for_renaming (gsi.phi ());
1439 if (!(bb->flags & BB_REACHABLE))
1441 /* Speed up the removal of blocks that don't
1442 dominate others. Walking backwards, this should
1443 be the common case. ??? Do we need to recompute
1444 dominators because of cfg_altered? */
1445 if (!MAY_HAVE_DEBUG_STMTS
1446 || !first_dom_son (CDI_DOMINATORS, bb))
1447 delete_basic_block (bb);
1448 else
1450 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1452 while (h.length ())
1454 bb = h.pop ();
1455 prev_bb = bb->prev_bb;
1456 /* Rearrangements to the CFG may have failed
1457 to update the dominators tree, so that
1458 formerly-dominated blocks are now
1459 otherwise reachable. */
1460 if (!!(bb->flags & BB_REACHABLE))
1461 continue;
1462 delete_basic_block (bb);
1465 h.release ();
1471 FOR_EACH_BB_FN (bb, cfun)
1473 /* Remove dead PHI nodes. */
1474 something_changed |= remove_dead_phis (bb);
1477 if (bb_postorder)
1478 free (bb_postorder);
1479 bb_postorder = NULL;
1481 return something_changed;
1485 /* Print out removed statement statistics. */
1487 static void
1488 print_stats (void)
1490 float percg;
1492 percg = ((float) stats.removed / (float) stats.total) * 100;
1493 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1494 stats.removed, stats.total, (int) percg);
1496 if (stats.total_phis == 0)
1497 percg = 0;
1498 else
1499 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1501 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1502 stats.removed_phis, stats.total_phis, (int) percg);
1505 /* Initialization for this pass. Set up the used data structures. */
1507 static void
1508 tree_dce_init (bool aggressive)
1510 memset ((void *) &stats, 0, sizeof (stats));
1512 if (aggressive)
1514 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1515 bitmap_clear (last_stmt_necessary);
1516 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1517 bitmap_clear (bb_contains_live_stmts);
1520 processed = sbitmap_alloc (num_ssa_names + 1);
1521 bitmap_clear (processed);
1523 worklist.create (64);
1524 cfg_altered = false;
1527 /* Cleanup after this pass. */
1529 static void
1530 tree_dce_done (bool aggressive)
1532 if (aggressive)
1534 delete cd;
1535 sbitmap_free (visited_control_parents);
1536 sbitmap_free (last_stmt_necessary);
1537 sbitmap_free (bb_contains_live_stmts);
1538 bb_contains_live_stmts = NULL;
1541 sbitmap_free (processed);
1543 worklist.release ();
1546 /* Main routine to eliminate dead code.
1548 AGGRESSIVE controls the aggressiveness of the algorithm.
1549 In conservative mode, we ignore control dependence and simply declare
1550 all but the most trivially dead branches necessary. This mode is fast.
1551 In aggressive mode, control dependences are taken into account, which
1552 results in more dead code elimination, but at the cost of some time.
1554 FIXME: Aggressive mode before PRE doesn't work currently because
1555 the dominance info is not invalidated after DCE1. This is
1556 not an issue right now because we only run aggressive DCE
1557 as the last tree SSA pass, but keep this in mind when you
1558 start experimenting with pass ordering. */
1560 static unsigned int
1561 perform_tree_ssa_dce (bool aggressive)
1563 bool something_changed = 0;
1565 calculate_dominance_info (CDI_DOMINATORS);
1567 /* Preheaders are needed for SCEV to work.
1568 Simple lateches and recorded exits improve chances that loop will
1569 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1570 if (aggressive)
1571 loop_optimizer_init (LOOPS_NORMAL
1572 | LOOPS_HAVE_RECORDED_EXITS);
1574 tree_dce_init (aggressive);
1576 if (aggressive)
1578 /* Compute control dependence. */
1579 calculate_dominance_info (CDI_POST_DOMINATORS);
1580 cd = new control_dependences ();
1582 visited_control_parents =
1583 sbitmap_alloc (last_basic_block_for_fn (cfun));
1584 bitmap_clear (visited_control_parents);
1586 mark_dfs_back_edges ();
1589 find_obviously_necessary_stmts (aggressive);
1591 if (aggressive)
1592 loop_optimizer_finalize ();
1594 longest_chain = 0;
1595 total_chain = 0;
1596 nr_walks = 0;
1597 chain_ovfl = false;
1598 visited = BITMAP_ALLOC (NULL);
1599 propagate_necessity (aggressive);
1600 BITMAP_FREE (visited);
1602 something_changed |= eliminate_unnecessary_stmts ();
1603 something_changed |= cfg_altered;
1605 /* We do not update postdominators, so free them unconditionally. */
1606 free_dominance_info (CDI_POST_DOMINATORS);
1608 /* If we removed paths in the CFG, then we need to update
1609 dominators as well. I haven't investigated the possibility
1610 of incrementally updating dominators. */
1611 if (cfg_altered)
1612 free_dominance_info (CDI_DOMINATORS);
1614 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1615 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1617 /* Debugging dumps. */
1618 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1619 print_stats ();
1621 tree_dce_done (aggressive);
1623 if (something_changed)
1625 free_numbers_of_iterations_estimates (cfun);
1626 if (scev_initialized_p ())
1627 scev_reset ();
1628 return TODO_update_ssa | TODO_cleanup_cfg;
1630 return 0;
1633 /* Pass entry points. */
1634 static unsigned int
1635 tree_ssa_dce (void)
1637 return perform_tree_ssa_dce (/*aggressive=*/false);
1640 static unsigned int
1641 tree_ssa_cd_dce (void)
1643 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1646 namespace {
1648 const pass_data pass_data_dce =
1650 GIMPLE_PASS, /* type */
1651 "dce", /* name */
1652 OPTGROUP_NONE, /* optinfo_flags */
1653 TV_TREE_DCE, /* tv_id */
1654 ( PROP_cfg | PROP_ssa ), /* properties_required */
1655 0, /* properties_provided */
1656 0, /* properties_destroyed */
1657 0, /* todo_flags_start */
1658 0, /* todo_flags_finish */
1661 class pass_dce : public gimple_opt_pass
1663 public:
1664 pass_dce (gcc::context *ctxt)
1665 : gimple_opt_pass (pass_data_dce, ctxt)
1668 /* opt_pass methods: */
1669 opt_pass * clone () { return new pass_dce (m_ctxt); }
1670 virtual bool gate (function *) { return flag_tree_dce != 0; }
1671 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1673 }; // class pass_dce
1675 } // anon namespace
1677 gimple_opt_pass *
1678 make_pass_dce (gcc::context *ctxt)
1680 return new pass_dce (ctxt);
1683 namespace {
1685 const pass_data pass_data_cd_dce =
1687 GIMPLE_PASS, /* type */
1688 "cddce", /* name */
1689 OPTGROUP_NONE, /* optinfo_flags */
1690 TV_TREE_CD_DCE, /* tv_id */
1691 ( PROP_cfg | PROP_ssa ), /* properties_required */
1692 0, /* properties_provided */
1693 0, /* properties_destroyed */
1694 0, /* todo_flags_start */
1695 0, /* todo_flags_finish */
1698 class pass_cd_dce : public gimple_opt_pass
1700 public:
1701 pass_cd_dce (gcc::context *ctxt)
1702 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1705 /* opt_pass methods: */
1706 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1707 virtual bool gate (function *) { return flag_tree_dce != 0; }
1708 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1710 }; // class pass_cd_dce
1712 } // anon namespace
1714 gimple_opt_pass *
1715 make_pass_cd_dce (gcc::context *ctxt)
1717 return new pass_cd_dce (ctxt);