runtime: set library name based on compiler name
[official-gcc.git] / gcc / tree-ssa-dce.c
blob6543dc7d26add40a10e3ab31a73f6d88a0d06e3a
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
50 #include "tree.h"
51 #include "calls.h"
52 #include "gimple-pretty-print.h"
53 #include "predict.h"
54 #include "vec.h"
55 #include "hashtab.h"
56 #include "hash-set.h"
57 #include "machmode.h"
58 #include "hard-reg-set.h"
59 #include "input.h"
60 #include "function.h"
61 #include "dominance.h"
62 #include "cfg.h"
63 #include "cfganal.h"
64 #include "basic-block.h"
65 #include "tree-ssa-alias.h"
66 #include "internal-fn.h"
67 #include "tree-eh.h"
68 #include "gimple-expr.h"
69 #include "is-a.h"
70 #include "gimple.h"
71 #include "gimplify.h"
72 #include "gimple-iterator.h"
73 #include "gimple-ssa.h"
74 #include "tree-cfg.h"
75 #include "tree-phinodes.h"
76 #include "ssa-iterators.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79 #include "tree-ssa-loop-niter.h"
80 #include "tree-into-ssa.h"
81 #include "expr.h"
82 #include "tree-dfa.h"
83 #include "tree-pass.h"
84 #include "flags.h"
85 #include "cfgloop.h"
86 #include "tree-scalar-evolution.h"
87 #include "tree-chkp.h"
88 #include "tree-ssa-propagate.h"
89 #include "gimple-fold.h"
91 static struct stmt_stats
93 int total;
94 int total_phis;
95 int removed;
96 int removed_phis;
97 } stats;
99 #define STMT_NECESSARY GF_PLF_1
101 static vec<gimple> worklist;
103 /* Vector indicating an SSA name has already been processed and marked
104 as necessary. */
105 static sbitmap processed;
107 /* Vector indicating that the last statement of a basic block has already
108 been marked as necessary. */
109 static sbitmap last_stmt_necessary;
111 /* Vector indicating that BB contains statements that are live. */
112 static sbitmap bb_contains_live_stmts;
114 /* Before we can determine whether a control branch is dead, we need to
115 compute which blocks are control dependent on which edges.
117 We expect each block to be control dependent on very few edges so we
118 use a bitmap for each block recording its edges. An array holds the
119 bitmap. The Ith bit in the bitmap is set if that block is dependent
120 on the Ith edge. */
121 static control_dependences *cd;
123 /* Vector indicating that a basic block has already had all the edges
124 processed that it is control dependent on. */
125 static sbitmap visited_control_parents;
127 /* TRUE if this pass alters the CFG (by removing control statements).
128 FALSE otherwise.
130 If this pass alters the CFG, then it will arrange for the dominators
131 to be recomputed. */
132 static bool cfg_altered;
135 /* If STMT is not already marked necessary, mark it, and add it to the
136 worklist if ADD_TO_WORKLIST is true. */
138 static inline void
139 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
141 gcc_assert (stmt);
143 if (gimple_plf (stmt, STMT_NECESSARY))
144 return;
146 if (dump_file && (dump_flags & TDF_DETAILS))
148 fprintf (dump_file, "Marking useful stmt: ");
149 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
150 fprintf (dump_file, "\n");
153 gimple_set_plf (stmt, STMT_NECESSARY, true);
154 if (add_to_worklist)
155 worklist.safe_push (stmt);
156 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
157 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
161 /* Mark the statement defining operand OP as necessary. */
163 static inline void
164 mark_operand_necessary (tree op)
166 gimple stmt;
167 int ver;
169 gcc_assert (op);
171 ver = SSA_NAME_VERSION (op);
172 if (bitmap_bit_p (processed, ver))
174 stmt = SSA_NAME_DEF_STMT (op);
175 gcc_assert (gimple_nop_p (stmt)
176 || gimple_plf (stmt, STMT_NECESSARY));
177 return;
179 bitmap_set_bit (processed, ver);
181 stmt = SSA_NAME_DEF_STMT (op);
182 gcc_assert (stmt);
184 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
185 return;
187 if (dump_file && (dump_flags & TDF_DETAILS))
189 fprintf (dump_file, "marking necessary through ");
190 print_generic_expr (dump_file, op, 0);
191 fprintf (dump_file, " stmt ");
192 print_gimple_stmt (dump_file, stmt, 0, 0);
195 gimple_set_plf (stmt, STMT_NECESSARY, true);
196 if (bb_contains_live_stmts)
197 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
198 worklist.safe_push (stmt);
202 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
203 it can make other statements necessary.
205 If AGGRESSIVE is false, control statements are conservatively marked as
206 necessary. */
208 static void
209 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
211 /* With non-call exceptions, we have to assume that all statements could
212 throw. If a statement could throw, it can be deemed necessary. */
213 if (cfun->can_throw_non_call_exceptions
214 && !cfun->can_delete_dead_exceptions
215 && stmt_could_throw_p (stmt))
217 mark_stmt_necessary (stmt, true);
218 return;
221 /* Statements that are implicitly live. Most function calls, asm
222 and return statements are required. Labels and GIMPLE_BIND nodes
223 are kept because they are control flow, and we have no way of
224 knowing whether they can be removed. DCE can eliminate all the
225 other statements in a block, and CFG can then remove the block
226 and labels. */
227 switch (gimple_code (stmt))
229 case GIMPLE_PREDICT:
230 case GIMPLE_LABEL:
231 mark_stmt_necessary (stmt, false);
232 return;
234 case GIMPLE_ASM:
235 case GIMPLE_RESX:
236 case GIMPLE_RETURN:
237 mark_stmt_necessary (stmt, true);
238 return;
240 case GIMPLE_CALL:
242 tree callee = gimple_call_fndecl (stmt);
243 if (callee != NULL_TREE
244 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
245 switch (DECL_FUNCTION_CODE (callee))
247 case BUILT_IN_MALLOC:
248 case BUILT_IN_ALIGNED_ALLOC:
249 case BUILT_IN_CALLOC:
250 case BUILT_IN_ALLOCA:
251 case BUILT_IN_ALLOCA_WITH_ALIGN:
252 return;
254 default:;
256 /* Most, but not all function calls are required. Function calls that
257 produce no result and have no side effects (i.e. const pure
258 functions) are unnecessary. */
259 if (gimple_has_side_effects (stmt))
261 mark_stmt_necessary (stmt, true);
262 return;
264 if (!gimple_call_lhs (stmt))
265 return;
266 break;
269 case GIMPLE_DEBUG:
270 /* Debug temps without a value are not useful. ??? If we could
271 easily locate the debug temp bind stmt for a use thereof,
272 would could refrain from marking all debug temps here, and
273 mark them only if they're used. */
274 if (!gimple_debug_bind_p (stmt)
275 || gimple_debug_bind_has_value_p (stmt)
276 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
277 mark_stmt_necessary (stmt, false);
278 return;
280 case GIMPLE_GOTO:
281 gcc_assert (!simple_goto_p (stmt));
282 mark_stmt_necessary (stmt, true);
283 return;
285 case GIMPLE_COND:
286 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
287 /* Fall through. */
289 case GIMPLE_SWITCH:
290 if (! aggressive)
291 mark_stmt_necessary (stmt, true);
292 break;
294 case GIMPLE_ASSIGN:
295 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
296 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
297 return;
298 break;
300 default:
301 break;
304 /* If the statement has volatile operands, it needs to be preserved.
305 Same for statements that can alter control flow in unpredictable
306 ways. */
307 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
309 mark_stmt_necessary (stmt, true);
310 return;
313 if (stmt_may_clobber_global_p (stmt))
315 mark_stmt_necessary (stmt, true);
316 return;
319 return;
323 /* Mark the last statement of BB as necessary. */
325 static void
326 mark_last_stmt_necessary (basic_block bb)
328 gimple stmt = last_stmt (bb);
330 bitmap_set_bit (last_stmt_necessary, bb->index);
331 bitmap_set_bit (bb_contains_live_stmts, bb->index);
333 /* We actually mark the statement only if it is a control statement. */
334 if (stmt && is_ctrl_stmt (stmt))
335 mark_stmt_necessary (stmt, true);
339 /* Mark control dependent edges of BB as necessary. We have to do this only
340 once for each basic block so we set the appropriate bit after we're done.
342 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
344 static void
345 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
347 bitmap_iterator bi;
348 unsigned edge_number;
349 bool skipped = false;
351 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
353 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
354 return;
356 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
357 0, edge_number, bi)
359 basic_block cd_bb = cd->get_edge (edge_number)->src;
361 if (ignore_self && cd_bb == bb)
363 skipped = true;
364 continue;
367 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
368 mark_last_stmt_necessary (cd_bb);
371 if (!skipped)
372 bitmap_set_bit (visited_control_parents, bb->index);
376 /* Find obviously necessary statements. These are things like most function
377 calls, and stores to file level variables.
379 If EL is NULL, control statements are conservatively marked as
380 necessary. Otherwise it contains the list of edges used by control
381 dependence analysis. */
383 static void
384 find_obviously_necessary_stmts (bool aggressive)
386 basic_block bb;
387 gimple_stmt_iterator gsi;
388 edge e;
389 gimple phi, stmt;
390 int flags;
392 FOR_EACH_BB_FN (bb, cfun)
394 /* PHI nodes are never inherently necessary. */
395 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
397 phi = gsi_stmt (gsi);
398 gimple_set_plf (phi, STMT_NECESSARY, false);
401 /* Check all statements in the block. */
402 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
404 stmt = gsi_stmt (gsi);
405 gimple_set_plf (stmt, STMT_NECESSARY, false);
406 mark_stmt_if_obviously_necessary (stmt, aggressive);
410 /* Pure and const functions are finite and thus have no infinite loops in
411 them. */
412 flags = flags_from_decl_or_type (current_function_decl);
413 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
414 return;
416 /* Prevent the empty possibly infinite loops from being removed. */
417 if (aggressive)
419 struct loop *loop;
420 scev_initialize ();
421 if (mark_irreducible_loops ())
422 FOR_EACH_BB_FN (bb, cfun)
424 edge_iterator ei;
425 FOR_EACH_EDGE (e, ei, bb->succs)
426 if ((e->flags & EDGE_DFS_BACK)
427 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
429 if (dump_file)
430 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
431 e->src->index, e->dest->index);
432 mark_control_dependent_edges_necessary (e->dest, false);
436 FOR_EACH_LOOP (loop, 0)
437 if (!finite_loop_p (loop))
439 if (dump_file)
440 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
441 mark_control_dependent_edges_necessary (loop->latch, false);
443 scev_finalize ();
448 /* Return true if REF is based on an aliased base, otherwise false. */
450 static bool
451 ref_may_be_aliased (tree ref)
453 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
454 while (handled_component_p (ref))
455 ref = TREE_OPERAND (ref, 0);
456 if (TREE_CODE (ref) == MEM_REF
457 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
458 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
459 return !(DECL_P (ref)
460 && !may_be_aliased (ref));
463 static bitmap visited = NULL;
464 static unsigned int longest_chain = 0;
465 static unsigned int total_chain = 0;
466 static unsigned int nr_walks = 0;
467 static bool chain_ovfl = false;
469 /* Worker for the walker that marks reaching definitions of REF,
470 which is based on a non-aliased decl, necessary. It returns
471 true whenever the defining statement of the current VDEF is
472 a kill for REF, as no dominating may-defs are necessary for REF
473 anymore. DATA points to the basic-block that contains the
474 stmt that refers to REF. */
476 static bool
477 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
479 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
481 /* All stmts we visit are necessary. */
482 mark_operand_necessary (vdef);
484 /* If the stmt lhs kills ref, then we can stop walking. */
485 if (gimple_has_lhs (def_stmt)
486 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
487 /* The assignment is not necessarily carried out if it can throw
488 and we can catch it in the current function where we could inspect
489 the previous value.
490 ??? We only need to care about the RHS throwing. For aggregate
491 assignments or similar calls and non-call exceptions the LHS
492 might throw as well. */
493 && !stmt_can_throw_internal (def_stmt))
495 tree base, lhs = gimple_get_lhs (def_stmt);
496 HOST_WIDE_INT size, offset, max_size;
497 ao_ref_base (ref);
498 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
499 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
500 so base == refd->base does not always hold. */
501 if (base == ref->base)
503 /* For a must-alias check we need to be able to constrain
504 the accesses properly. */
505 if (size != -1 && size == max_size
506 && ref->max_size != -1)
508 if (offset <= ref->offset
509 && offset + size >= ref->offset + ref->max_size)
510 return true;
512 /* Or they need to be exactly the same. */
513 else if (ref->ref
514 /* Make sure there is no induction variable involved
515 in the references (gcc.c-torture/execute/pr42142.c).
516 The simplest way is to check if the kill dominates
517 the use. */
518 /* But when both are in the same block we cannot
519 easily tell whether we came from a backedge
520 unless we decide to compute stmt UIDs
521 (see PR58246). */
522 && (basic_block) data != gimple_bb (def_stmt)
523 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
524 gimple_bb (def_stmt))
525 && operand_equal_p (ref->ref, lhs, 0))
526 return true;
530 /* Otherwise keep walking. */
531 return false;
534 static void
535 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
537 unsigned int chain;
538 ao_ref refd;
539 gcc_assert (!chain_ovfl);
540 ao_ref_init (&refd, ref);
541 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
542 mark_aliased_reaching_defs_necessary_1,
543 gimple_bb (stmt), NULL);
544 if (chain > longest_chain)
545 longest_chain = chain;
546 total_chain += chain;
547 nr_walks++;
550 /* Worker for the walker that marks reaching definitions of REF, which
551 is not based on a non-aliased decl. For simplicity we need to end
552 up marking all may-defs necessary that are not based on a non-aliased
553 decl. The only job of this walker is to skip may-defs based on
554 a non-aliased decl. */
556 static bool
557 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
558 tree vdef, void *data ATTRIBUTE_UNUSED)
560 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
562 /* We have to skip already visited (and thus necessary) statements
563 to make the chaining work after we dropped back to simple mode. */
564 if (chain_ovfl
565 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
567 gcc_assert (gimple_nop_p (def_stmt)
568 || gimple_plf (def_stmt, STMT_NECESSARY));
569 return false;
572 /* We want to skip stores to non-aliased variables. */
573 if (!chain_ovfl
574 && gimple_assign_single_p (def_stmt))
576 tree lhs = gimple_assign_lhs (def_stmt);
577 if (!ref_may_be_aliased (lhs))
578 return false;
581 /* We want to skip statments that do not constitute stores but have
582 a virtual definition. */
583 if (is_gimple_call (def_stmt))
585 tree callee = gimple_call_fndecl (def_stmt);
586 if (callee != NULL_TREE
587 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
588 switch (DECL_FUNCTION_CODE (callee))
590 case BUILT_IN_MALLOC:
591 case BUILT_IN_ALIGNED_ALLOC:
592 case BUILT_IN_CALLOC:
593 case BUILT_IN_ALLOCA:
594 case BUILT_IN_ALLOCA_WITH_ALIGN:
595 case BUILT_IN_FREE:
596 return false;
598 default:;
602 mark_operand_necessary (vdef);
604 return false;
607 static void
608 mark_all_reaching_defs_necessary (gimple stmt)
610 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
611 mark_all_reaching_defs_necessary_1, NULL, &visited);
614 /* Return true for PHI nodes with one or identical arguments
615 can be removed. */
616 static bool
617 degenerate_phi_p (gimple phi)
619 unsigned int i;
620 tree op = gimple_phi_arg_def (phi, 0);
621 for (i = 1; i < gimple_phi_num_args (phi); i++)
622 if (gimple_phi_arg_def (phi, i) != op)
623 return false;
624 return true;
627 /* Propagate necessity using the operands of necessary statements.
628 Process the uses on each statement in the worklist, and add all
629 feeding statements which contribute to the calculation of this
630 value to the worklist.
632 In conservative mode, EL is NULL. */
634 static void
635 propagate_necessity (bool aggressive)
637 gimple stmt;
639 if (dump_file && (dump_flags & TDF_DETAILS))
640 fprintf (dump_file, "\nProcessing worklist:\n");
642 while (worklist.length () > 0)
644 /* Take STMT from worklist. */
645 stmt = worklist.pop ();
647 if (dump_file && (dump_flags & TDF_DETAILS))
649 fprintf (dump_file, "processing: ");
650 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
651 fprintf (dump_file, "\n");
654 if (aggressive)
656 /* Mark the last statement of the basic blocks on which the block
657 containing STMT is control dependent, but only if we haven't
658 already done so. */
659 basic_block bb = gimple_bb (stmt);
660 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
661 && !bitmap_bit_p (visited_control_parents, bb->index))
662 mark_control_dependent_edges_necessary (bb, false);
665 if (gimple_code (stmt) == GIMPLE_PHI
666 /* We do not process virtual PHI nodes nor do we track their
667 necessity. */
668 && !virtual_operand_p (gimple_phi_result (stmt)))
670 /* PHI nodes are somewhat special in that each PHI alternative has
671 data and control dependencies. All the statements feeding the
672 PHI node's arguments are always necessary. In aggressive mode,
673 we also consider the control dependent edges leading to the
674 predecessor block associated with each PHI alternative as
675 necessary. */
676 size_t k;
678 for (k = 0; k < gimple_phi_num_args (stmt); k++)
680 tree arg = PHI_ARG_DEF (stmt, k);
681 if (TREE_CODE (arg) == SSA_NAME)
682 mark_operand_necessary (arg);
685 /* For PHI operands it matters from where the control flow arrives
686 to the BB. Consider the following example:
688 a=exp1;
689 b=exp2;
690 if (test)
692 else
694 c=PHI(a,b)
696 We need to mark control dependence of the empty basic blocks, since they
697 contains computation of PHI operands.
699 Doing so is too restrictive in the case the predecestor block is in
700 the loop. Consider:
702 if (b)
704 int i;
705 for (i = 0; i<1000; ++i)
707 j = 0;
709 return j;
711 There is PHI for J in the BB containing return statement.
712 In this case the control dependence of predecestor block (that is
713 within the empty loop) also contains the block determining number
714 of iterations of the block that would prevent removing of empty
715 loop in this case.
717 This scenario can be avoided by splitting critical edges.
718 To save the critical edge splitting pass we identify how the control
719 dependence would look like if the edge was split.
721 Consider the modified CFG created from current CFG by splitting
722 edge B->C. In the postdominance tree of modified CFG, C' is
723 always child of C. There are two cases how chlids of C' can look
724 like:
726 1) C' is leaf
728 In this case the only basic block C' is control dependent on is B.
730 2) C' has single child that is B
732 In this case control dependence of C' is same as control
733 dependence of B in original CFG except for block B itself.
734 (since C' postdominate B in modified CFG)
736 Now how to decide what case happens? There are two basic options:
738 a) C postdominate B. Then C immediately postdominate B and
739 case 2 happens iff there is no other way from B to C except
740 the edge B->C.
742 There is other way from B to C iff there is succesor of B that
743 is not postdominated by B. Testing this condition is somewhat
744 expensive, because we need to iterate all succesors of B.
745 We are safe to assume that this does not happen: we will mark B
746 as needed when processing the other path from B to C that is
747 conrol dependent on B and marking control dependencies of B
748 itself is harmless because they will be processed anyway after
749 processing control statement in B.
751 b) C does not postdominate B. Always case 1 happens since there is
752 path from C to exit that does not go through B and thus also C'. */
754 if (aggressive && !degenerate_phi_p (stmt))
756 for (k = 0; k < gimple_phi_num_args (stmt); k++)
758 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
760 if (gimple_bb (stmt)
761 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
763 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
764 mark_last_stmt_necessary (arg_bb);
766 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
767 && !bitmap_bit_p (visited_control_parents,
768 arg_bb->index))
769 mark_control_dependent_edges_necessary (arg_bb, true);
773 else
775 /* Propagate through the operands. Examine all the USE, VUSE and
776 VDEF operands in this statement. Mark all the statements
777 which feed this statement's uses as necessary. */
778 ssa_op_iter iter;
779 tree use;
781 /* If this is a call to free which is directly fed by an
782 allocation function do not mark that necessary through
783 processing the argument. */
784 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
786 tree ptr = gimple_call_arg (stmt, 0);
787 gimple def_stmt;
788 tree def_callee;
789 /* If the pointer we free is defined by an allocation
790 function do not add the call to the worklist. */
791 if (TREE_CODE (ptr) == SSA_NAME
792 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
793 && (def_callee = gimple_call_fndecl (def_stmt))
794 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
795 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
796 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
797 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
799 gimple bounds_def_stmt;
800 tree bounds;
802 /* For instrumented calls we should also check used
803 bounds are returned by the same allocation call. */
804 if (!gimple_call_with_bounds_p (stmt)
805 || ((bounds = gimple_call_arg (stmt, 1))
806 && TREE_CODE (bounds) == SSA_NAME
807 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
808 && chkp_gimple_call_builtin_p (bounds_def_stmt,
809 BUILT_IN_CHKP_BNDRET)
810 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
811 continue;
815 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
816 mark_operand_necessary (use);
818 use = gimple_vuse (stmt);
819 if (!use)
820 continue;
822 /* If we dropped to simple mode make all immediately
823 reachable definitions necessary. */
824 if (chain_ovfl)
826 mark_all_reaching_defs_necessary (stmt);
827 continue;
830 /* For statements that may load from memory (have a VUSE) we
831 have to mark all reaching (may-)definitions as necessary.
832 We partition this task into two cases:
833 1) explicit loads based on decls that are not aliased
834 2) implicit loads (like calls) and explicit loads not
835 based on decls that are not aliased (like indirect
836 references or loads from globals)
837 For 1) we mark all reaching may-defs as necessary, stopping
838 at dominating kills. For 2) we want to mark all dominating
839 references necessary, but non-aliased ones which we handle
840 in 1). By keeping a global visited bitmap for references
841 we walk for 2) we avoid quadratic behavior for those. */
843 if (is_gimple_call (stmt))
845 tree callee = gimple_call_fndecl (stmt);
846 unsigned i;
848 /* Calls to functions that are merely acting as barriers
849 or that only store to memory do not make any previous
850 stores necessary. */
851 if (callee != NULL_TREE
852 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
853 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
854 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
855 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
856 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
857 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
858 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
859 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
860 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
861 || (DECL_FUNCTION_CODE (callee)
862 == BUILT_IN_ALLOCA_WITH_ALIGN)
863 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
864 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
865 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
866 continue;
868 /* Calls implicitly load from memory, their arguments
869 in addition may explicitly perform memory loads. */
870 mark_all_reaching_defs_necessary (stmt);
871 for (i = 0; i < gimple_call_num_args (stmt); ++i)
873 tree arg = gimple_call_arg (stmt, i);
874 if (TREE_CODE (arg) == SSA_NAME
875 || is_gimple_min_invariant (arg))
876 continue;
877 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
878 arg = TREE_OPERAND (arg, 0);
879 if (!ref_may_be_aliased (arg))
880 mark_aliased_reaching_defs_necessary (stmt, arg);
883 else if (gimple_assign_single_p (stmt))
885 tree rhs;
886 /* If this is a load mark things necessary. */
887 rhs = gimple_assign_rhs1 (stmt);
888 if (TREE_CODE (rhs) != SSA_NAME
889 && !is_gimple_min_invariant (rhs)
890 && TREE_CODE (rhs) != CONSTRUCTOR)
892 if (!ref_may_be_aliased (rhs))
893 mark_aliased_reaching_defs_necessary (stmt, rhs);
894 else
895 mark_all_reaching_defs_necessary (stmt);
898 else if (gimple_code (stmt) == GIMPLE_RETURN)
900 tree rhs = gimple_return_retval (stmt);
901 /* A return statement may perform a load. */
902 if (rhs
903 && TREE_CODE (rhs) != SSA_NAME
904 && !is_gimple_min_invariant (rhs)
905 && TREE_CODE (rhs) != CONSTRUCTOR)
907 if (!ref_may_be_aliased (rhs))
908 mark_aliased_reaching_defs_necessary (stmt, rhs);
909 else
910 mark_all_reaching_defs_necessary (stmt);
913 else if (gimple_code (stmt) == GIMPLE_ASM)
915 unsigned i;
916 mark_all_reaching_defs_necessary (stmt);
917 /* Inputs may perform loads. */
918 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
920 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
921 if (TREE_CODE (op) != SSA_NAME
922 && !is_gimple_min_invariant (op)
923 && TREE_CODE (op) != CONSTRUCTOR
924 && !ref_may_be_aliased (op))
925 mark_aliased_reaching_defs_necessary (stmt, op);
928 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
930 /* The beginning of a transaction is a memory barrier. */
931 /* ??? If we were really cool, we'd only be a barrier
932 for the memories touched within the transaction. */
933 mark_all_reaching_defs_necessary (stmt);
935 else
936 gcc_unreachable ();
938 /* If we over-used our alias oracle budget drop to simple
939 mode. The cost metric allows quadratic behavior
940 (number of uses times number of may-defs queries) up to
941 a constant maximal number of queries and after that falls back to
942 super-linear complexity. */
943 if (/* Constant but quadratic for small functions. */
944 total_chain > 128 * 128
945 /* Linear in the number of may-defs. */
946 && total_chain > 32 * longest_chain
947 /* Linear in the number of uses. */
948 && total_chain > nr_walks * 32)
950 chain_ovfl = true;
951 if (visited)
952 bitmap_clear (visited);
958 /* Remove dead PHI nodes from block BB. */
960 static bool
961 remove_dead_phis (basic_block bb)
963 bool something_changed = false;
964 gimple phi;
965 gimple_stmt_iterator gsi;
967 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
969 stats.total_phis++;
970 phi = gsi_stmt (gsi);
972 /* We do not track necessity of virtual PHI nodes. Instead do
973 very simple dead PHI removal here. */
974 if (virtual_operand_p (gimple_phi_result (phi)))
976 /* Virtual PHI nodes with one or identical arguments
977 can be removed. */
978 if (degenerate_phi_p (phi))
980 tree vdef = gimple_phi_result (phi);
981 tree vuse = gimple_phi_arg_def (phi, 0);
983 use_operand_p use_p;
984 imm_use_iterator iter;
985 gimple use_stmt;
986 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
987 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
988 SET_USE (use_p, vuse);
989 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
990 && TREE_CODE (vuse) == SSA_NAME)
991 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
993 else
994 gimple_set_plf (phi, STMT_NECESSARY, true);
997 if (!gimple_plf (phi, STMT_NECESSARY))
999 something_changed = true;
1000 if (dump_file && (dump_flags & TDF_DETAILS))
1002 fprintf (dump_file, "Deleting : ");
1003 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1004 fprintf (dump_file, "\n");
1007 remove_phi_node (&gsi, true);
1008 stats.removed_phis++;
1009 continue;
1012 gsi_next (&gsi);
1014 return something_changed;
1017 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1019 static edge
1020 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1022 gimple_stmt_iterator gsi;
1023 edge e2 = NULL;
1024 edge_iterator ei;
1026 if (dump_file && (dump_flags & TDF_DETAILS))
1027 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1028 e->dest->index, post_dom_bb->index);
1030 e2 = redirect_edge_and_branch (e, post_dom_bb);
1031 cfg_altered = true;
1033 /* If edge was already around, no updating is necessary. */
1034 if (e2 != e)
1035 return e2;
1037 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1039 /* We are sure that for every live PHI we are seeing control dependent BB.
1040 This means that we can pick any edge to duplicate PHI args from. */
1041 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1042 if (e2 != e)
1043 break;
1044 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1046 gimple phi = gsi_stmt (gsi);
1047 tree op;
1048 source_location locus;
1050 /* PHIs for virtuals have no control dependency relation on them.
1051 We are lost here and must force renaming of the symbol. */
1052 if (virtual_operand_p (gimple_phi_result (phi)))
1054 mark_virtual_phi_result_for_renaming (phi);
1055 remove_phi_node (&gsi, true);
1056 continue;
1059 /* Dead PHI do not imply control dependency. */
1060 if (!gimple_plf (phi, STMT_NECESSARY))
1062 gsi_next (&gsi);
1063 continue;
1066 op = gimple_phi_arg_def (phi, e2->dest_idx);
1067 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1068 add_phi_arg (phi, op, e, locus);
1069 /* The resulting PHI if not dead can only be degenerate. */
1070 gcc_assert (degenerate_phi_p (phi));
1071 gsi_next (&gsi);
1074 return e;
1077 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1078 containing I so that we don't have to look it up. */
1080 static void
1081 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1083 gimple stmt = gsi_stmt (*i);
1085 if (dump_file && (dump_flags & TDF_DETAILS))
1087 fprintf (dump_file, "Deleting : ");
1088 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1089 fprintf (dump_file, "\n");
1092 stats.removed++;
1094 /* If we have determined that a conditional branch statement contributes
1095 nothing to the program, then we not only remove it, but we also change
1096 the flow graph so that the current block will simply fall-thru to its
1097 immediate post-dominator. The blocks we are circumventing will be
1098 removed by cleanup_tree_cfg if this change in the flow graph makes them
1099 unreachable. */
1100 if (is_ctrl_stmt (stmt))
1102 basic_block post_dom_bb;
1103 edge e, e2;
1104 edge_iterator ei;
1106 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1108 e = find_edge (bb, post_dom_bb);
1110 /* If edge is already there, try to use it. This avoids need to update
1111 PHI nodes. Also watch for cases where post dominator does not exists
1112 or is exit block. These can happen for infinite loops as we create
1113 fake edges in the dominator tree. */
1114 if (e)
1116 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1117 e = EDGE_SUCC (bb, 0);
1118 else
1119 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1120 gcc_assert (e);
1121 e->probability = REG_BR_PROB_BASE;
1122 e->count = bb->count;
1124 /* The edge is no longer associated with a conditional, so it does
1125 not have TRUE/FALSE flags. */
1126 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1128 /* The lone outgoing edge from BB will be a fallthru edge. */
1129 e->flags |= EDGE_FALLTHRU;
1131 /* Remove the remaining outgoing edges. */
1132 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1133 if (e != e2)
1135 cfg_altered = true;
1136 remove_edge (e2);
1138 else
1139 ei_next (&ei);
1142 /* If this is a store into a variable that is being optimized away,
1143 add a debug bind stmt if possible. */
1144 if (MAY_HAVE_DEBUG_STMTS
1145 && gimple_assign_single_p (stmt)
1146 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1148 tree lhs = gimple_assign_lhs (stmt);
1149 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1150 && !DECL_IGNORED_P (lhs)
1151 && is_gimple_reg_type (TREE_TYPE (lhs))
1152 && !is_global_var (lhs)
1153 && !DECL_HAS_VALUE_EXPR_P (lhs))
1155 tree rhs = gimple_assign_rhs1 (stmt);
1156 gimple note
1157 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1158 gsi_insert_after (i, note, GSI_SAME_STMT);
1162 unlink_stmt_vdef (stmt);
1163 gsi_remove (i, true);
1164 release_defs (stmt);
1167 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1168 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1170 static tree
1171 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1173 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1174 *walk_subtrees = 0;
1175 if (*tp == (tree) data)
1176 return *tp;
1177 return NULL_TREE;
1180 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1181 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1182 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1183 uses. */
1185 static void
1186 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1187 enum tree_code subcode)
1189 gimple stmt = gsi_stmt (*gsi);
1190 tree lhs = gimple_call_lhs (stmt);
1192 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1193 return;
1195 imm_use_iterator imm_iter;
1196 use_operand_p use_p;
1197 bool has_debug_uses = false;
1198 bool has_realpart_uses = false;
1199 bool has_other_uses = false;
1200 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1202 gimple use_stmt = USE_STMT (use_p);
1203 if (is_gimple_debug (use_stmt))
1204 has_debug_uses = true;
1205 else if (is_gimple_assign (use_stmt)
1206 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1207 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1208 has_realpart_uses = true;
1209 else
1211 has_other_uses = true;
1212 break;
1216 if (!has_realpart_uses || has_other_uses)
1217 return;
1219 tree arg0 = gimple_call_arg (stmt, 0);
1220 tree arg1 = gimple_call_arg (stmt, 1);
1221 location_t loc = gimple_location (stmt);
1222 tree type = TREE_TYPE (TREE_TYPE (lhs));
1223 tree utype = type;
1224 if (!TYPE_UNSIGNED (type))
1225 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1226 tree result = fold_build2_loc (loc, subcode, utype,
1227 fold_convert_loc (loc, utype, arg0),
1228 fold_convert_loc (loc, utype, arg1));
1229 result = fold_convert_loc (loc, type, result);
1231 if (has_debug_uses)
1233 gimple use_stmt;
1234 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1236 if (!gimple_debug_bind_p (use_stmt))
1237 continue;
1238 tree v = gimple_debug_bind_get_value (use_stmt);
1239 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1241 gimple_debug_bind_reset_value (use_stmt);
1242 update_stmt (use_stmt);
1247 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1248 result = drop_tree_overflow (result);
1249 tree overflow = build_zero_cst (type);
1250 tree ctype = build_complex_type (type);
1251 if (TREE_CODE (result) == INTEGER_CST)
1252 result = build_complex (ctype, result, overflow);
1253 else
1254 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1255 ctype, result, overflow);
1257 if (dump_file && (dump_flags & TDF_DETAILS))
1259 fprintf (dump_file, "Transforming call: ");
1260 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1261 fprintf (dump_file, "because the overflow result is never used into: ");
1262 print_generic_stmt (dump_file, result, TDF_SLIM);
1263 fprintf (dump_file, "\n");
1266 if (!update_call_from_tree (gsi, result))
1267 gimplify_and_update_call_from_tree (gsi, result);
1270 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1271 contributes nothing to the program, and can be deleted. */
1273 static bool
1274 eliminate_unnecessary_stmts (void)
1276 bool something_changed = false;
1277 basic_block bb;
1278 gimple_stmt_iterator gsi, psi;
1279 gimple stmt;
1280 tree call;
1281 vec<basic_block> h;
1283 if (dump_file && (dump_flags & TDF_DETAILS))
1284 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1286 clear_special_calls ();
1288 /* Walking basic blocks and statements in reverse order avoids
1289 releasing SSA names before any other DEFs that refer to them are
1290 released. This helps avoid loss of debug information, as we get
1291 a chance to propagate all RHSs of removed SSAs into debug uses,
1292 rather than only the latest ones. E.g., consider:
1294 x_3 = y_1 + z_2;
1295 a_5 = x_3 - b_4;
1296 # DEBUG a => a_5
1298 If we were to release x_3 before a_5, when we reached a_5 and
1299 tried to substitute it into the debug stmt, we'd see x_3 there,
1300 but x_3's DEF, type, etc would have already been disconnected.
1301 By going backwards, the debug stmt first changes to:
1303 # DEBUG a => x_3 - b_4
1305 and then to:
1307 # DEBUG a => y_1 + z_2 - b_4
1309 as desired. */
1310 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1311 h = get_all_dominated_blocks (CDI_DOMINATORS,
1312 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1314 while (h.length ())
1316 bb = h.pop ();
1318 /* Remove dead statements. */
1319 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1321 stmt = gsi_stmt (gsi);
1323 psi = gsi;
1324 gsi_prev (&psi);
1326 stats.total++;
1328 /* We can mark a call to free as not necessary if the
1329 defining statement of its argument is not necessary
1330 (and thus is getting removed). */
1331 if (gimple_plf (stmt, STMT_NECESSARY)
1332 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1334 tree ptr = gimple_call_arg (stmt, 0);
1335 if (TREE_CODE (ptr) == SSA_NAME)
1337 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1338 if (!gimple_nop_p (def_stmt)
1339 && !gimple_plf (def_stmt, STMT_NECESSARY))
1340 gimple_set_plf (stmt, STMT_NECESSARY, false);
1342 /* We did not propagate necessity for free calls fed
1343 by allocation function to allow unnecessary
1344 alloc-free sequence elimination. For instrumented
1345 calls it also means we did not mark bounds producer
1346 as necessary and it is time to do it in case free
1347 call is not removed. */
1348 if (gimple_call_with_bounds_p (stmt))
1350 gimple bounds_def_stmt;
1351 tree bounds = gimple_call_arg (stmt, 1);
1352 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1353 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1354 if (bounds_def_stmt
1355 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1356 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1357 gimple_plf (stmt, STMT_NECESSARY));
1361 /* If GSI is not necessary then remove it. */
1362 if (!gimple_plf (stmt, STMT_NECESSARY))
1364 if (!is_gimple_debug (stmt))
1365 something_changed = true;
1366 remove_dead_stmt (&gsi, bb);
1368 else if (is_gimple_call (stmt))
1370 tree name = gimple_call_lhs (stmt);
1372 notice_special_calls (stmt);
1374 /* When LHS of var = call (); is dead, simplify it into
1375 call (); saving one operand. */
1376 if (name
1377 && TREE_CODE (name) == SSA_NAME
1378 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1379 /* Avoid doing so for allocation calls which we
1380 did not mark as necessary, it will confuse the
1381 special logic we apply to malloc/free pair removal. */
1382 && (!(call = gimple_call_fndecl (stmt))
1383 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1384 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1385 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1386 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1387 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1388 && (DECL_FUNCTION_CODE (call)
1389 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1390 /* Avoid doing so for bndret calls for the same reason. */
1391 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1393 something_changed = true;
1394 if (dump_file && (dump_flags & TDF_DETAILS))
1396 fprintf (dump_file, "Deleting LHS of call: ");
1397 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1398 fprintf (dump_file, "\n");
1401 gimple_call_set_lhs (stmt, NULL_TREE);
1402 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1403 update_stmt (stmt);
1404 release_ssa_name (name);
1406 else if (gimple_call_internal_p (stmt))
1407 switch (gimple_call_internal_fn (stmt))
1409 case IFN_ADD_OVERFLOW:
1410 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1411 break;
1412 case IFN_SUB_OVERFLOW:
1413 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1414 break;
1415 case IFN_MUL_OVERFLOW:
1416 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1417 break;
1418 default:
1419 break;
1425 h.release ();
1427 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1428 rendered some PHI nodes unreachable while they are still in use.
1429 Mark them for renaming. */
1430 if (cfg_altered)
1432 basic_block prev_bb;
1434 find_unreachable_blocks ();
1436 /* Delete all unreachable basic blocks in reverse dominator order. */
1437 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1438 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1440 prev_bb = bb->prev_bb;
1442 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1443 || !(bb->flags & BB_REACHABLE))
1445 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1446 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1448 bool found = false;
1449 imm_use_iterator iter;
1451 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1453 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1454 continue;
1455 if (gimple_code (stmt) == GIMPLE_PHI
1456 || gimple_plf (stmt, STMT_NECESSARY))
1458 found = true;
1459 BREAK_FROM_IMM_USE_STMT (iter);
1462 if (found)
1463 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1466 if (!(bb->flags & BB_REACHABLE))
1468 /* Speed up the removal of blocks that don't
1469 dominate others. Walking backwards, this should
1470 be the common case. ??? Do we need to recompute
1471 dominators because of cfg_altered? */
1472 if (!MAY_HAVE_DEBUG_STMTS
1473 || !first_dom_son (CDI_DOMINATORS, bb))
1474 delete_basic_block (bb);
1475 else
1477 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1479 while (h.length ())
1481 bb = h.pop ();
1482 prev_bb = bb->prev_bb;
1483 /* Rearrangements to the CFG may have failed
1484 to update the dominators tree, so that
1485 formerly-dominated blocks are now
1486 otherwise reachable. */
1487 if (!!(bb->flags & BB_REACHABLE))
1488 continue;
1489 delete_basic_block (bb);
1492 h.release ();
1498 FOR_EACH_BB_FN (bb, cfun)
1500 /* Remove dead PHI nodes. */
1501 something_changed |= remove_dead_phis (bb);
1504 return something_changed;
1508 /* Print out removed statement statistics. */
1510 static void
1511 print_stats (void)
1513 float percg;
1515 percg = ((float) stats.removed / (float) stats.total) * 100;
1516 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1517 stats.removed, stats.total, (int) percg);
1519 if (stats.total_phis == 0)
1520 percg = 0;
1521 else
1522 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1524 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1525 stats.removed_phis, stats.total_phis, (int) percg);
1528 /* Initialization for this pass. Set up the used data structures. */
1530 static void
1531 tree_dce_init (bool aggressive)
1533 memset ((void *) &stats, 0, sizeof (stats));
1535 if (aggressive)
1537 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1538 bitmap_clear (last_stmt_necessary);
1539 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1540 bitmap_clear (bb_contains_live_stmts);
1543 processed = sbitmap_alloc (num_ssa_names + 1);
1544 bitmap_clear (processed);
1546 worklist.create (64);
1547 cfg_altered = false;
1550 /* Cleanup after this pass. */
1552 static void
1553 tree_dce_done (bool aggressive)
1555 if (aggressive)
1557 delete cd;
1558 sbitmap_free (visited_control_parents);
1559 sbitmap_free (last_stmt_necessary);
1560 sbitmap_free (bb_contains_live_stmts);
1561 bb_contains_live_stmts = NULL;
1564 sbitmap_free (processed);
1566 worklist.release ();
1569 /* Main routine to eliminate dead code.
1571 AGGRESSIVE controls the aggressiveness of the algorithm.
1572 In conservative mode, we ignore control dependence and simply declare
1573 all but the most trivially dead branches necessary. This mode is fast.
1574 In aggressive mode, control dependences are taken into account, which
1575 results in more dead code elimination, but at the cost of some time.
1577 FIXME: Aggressive mode before PRE doesn't work currently because
1578 the dominance info is not invalidated after DCE1. This is
1579 not an issue right now because we only run aggressive DCE
1580 as the last tree SSA pass, but keep this in mind when you
1581 start experimenting with pass ordering. */
1583 static unsigned int
1584 perform_tree_ssa_dce (bool aggressive)
1586 bool something_changed = 0;
1588 calculate_dominance_info (CDI_DOMINATORS);
1590 /* Preheaders are needed for SCEV to work.
1591 Simple lateches and recorded exits improve chances that loop will
1592 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1593 if (aggressive)
1594 loop_optimizer_init (LOOPS_NORMAL
1595 | LOOPS_HAVE_RECORDED_EXITS);
1597 tree_dce_init (aggressive);
1599 if (aggressive)
1601 /* Compute control dependence. */
1602 calculate_dominance_info (CDI_POST_DOMINATORS);
1603 cd = new control_dependences (create_edge_list ());
1605 visited_control_parents =
1606 sbitmap_alloc (last_basic_block_for_fn (cfun));
1607 bitmap_clear (visited_control_parents);
1609 mark_dfs_back_edges ();
1612 find_obviously_necessary_stmts (aggressive);
1614 if (aggressive)
1615 loop_optimizer_finalize ();
1617 longest_chain = 0;
1618 total_chain = 0;
1619 nr_walks = 0;
1620 chain_ovfl = false;
1621 visited = BITMAP_ALLOC (NULL);
1622 propagate_necessity (aggressive);
1623 BITMAP_FREE (visited);
1625 something_changed |= eliminate_unnecessary_stmts ();
1626 something_changed |= cfg_altered;
1628 /* We do not update postdominators, so free them unconditionally. */
1629 free_dominance_info (CDI_POST_DOMINATORS);
1631 /* If we removed paths in the CFG, then we need to update
1632 dominators as well. I haven't investigated the possibility
1633 of incrementally updating dominators. */
1634 if (cfg_altered)
1635 free_dominance_info (CDI_DOMINATORS);
1637 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1638 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1640 /* Debugging dumps. */
1641 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1642 print_stats ();
1644 tree_dce_done (aggressive);
1646 if (something_changed)
1648 free_numbers_of_iterations_estimates ();
1649 if (scev_initialized_p ())
1650 scev_reset ();
1651 return TODO_update_ssa | TODO_cleanup_cfg;
1653 return 0;
1656 /* Pass entry points. */
1657 static unsigned int
1658 tree_ssa_dce (void)
1660 return perform_tree_ssa_dce (/*aggressive=*/false);
1663 static unsigned int
1664 tree_ssa_cd_dce (void)
1666 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1669 namespace {
1671 const pass_data pass_data_dce =
1673 GIMPLE_PASS, /* type */
1674 "dce", /* name */
1675 OPTGROUP_NONE, /* optinfo_flags */
1676 TV_TREE_DCE, /* tv_id */
1677 ( PROP_cfg | PROP_ssa ), /* properties_required */
1678 0, /* properties_provided */
1679 0, /* properties_destroyed */
1680 0, /* todo_flags_start */
1681 0, /* todo_flags_finish */
1684 class pass_dce : public gimple_opt_pass
1686 public:
1687 pass_dce (gcc::context *ctxt)
1688 : gimple_opt_pass (pass_data_dce, ctxt)
1691 /* opt_pass methods: */
1692 opt_pass * clone () { return new pass_dce (m_ctxt); }
1693 virtual bool gate (function *) { return flag_tree_dce != 0; }
1694 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1696 }; // class pass_dce
1698 } // anon namespace
1700 gimple_opt_pass *
1701 make_pass_dce (gcc::context *ctxt)
1703 return new pass_dce (ctxt);
1706 namespace {
1708 const pass_data pass_data_cd_dce =
1710 GIMPLE_PASS, /* type */
1711 "cddce", /* name */
1712 OPTGROUP_NONE, /* optinfo_flags */
1713 TV_TREE_CD_DCE, /* tv_id */
1714 ( PROP_cfg | PROP_ssa ), /* properties_required */
1715 0, /* properties_provided */
1716 0, /* properties_destroyed */
1717 0, /* todo_flags_start */
1718 0, /* todo_flags_finish */
1721 class pass_cd_dce : public gimple_opt_pass
1723 public:
1724 pass_cd_dce (gcc::context *ctxt)
1725 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1728 /* opt_pass methods: */
1729 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1730 virtual bool gate (function *) { return flag_tree_dce != 0; }
1731 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1733 }; // class pass_cd_dce
1735 } // anon namespace
1737 gimple_opt_pass *
1738 make_pass_cd_dce (gcc::context *ctxt)
1740 return new pass_cd_dce (ctxt);