* asan.c (handle_builtin_alloca): Deal with all alloca variants.
[official-gcc.git] / gcc / tree-ssa-dce.c
blob59dd3f3038a86e573dc3ce9df7394082a11cc03e
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
72 static struct stmt_stats
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
80 #define STMT_NECESSARY GF_PLF_1
82 static vec<gimple *> worklist;
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
119 /* If STMT is not already marked necessary, mark it, and add it to the
120 worklist if ADD_TO_WORKLIST is true. */
122 static inline void
123 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
125 gcc_assert (stmt);
127 if (gimple_plf (stmt, STMT_NECESSARY))
128 return;
130 if (dump_file && (dump_flags & TDF_DETAILS))
132 fprintf (dump_file, "Marking useful stmt: ");
133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
134 fprintf (dump_file, "\n");
137 gimple_set_plf (stmt, STMT_NECESSARY, true);
138 if (add_to_worklist)
139 worklist.safe_push (stmt);
140 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
141 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
145 /* Mark the statement defining operand OP as necessary. */
147 static inline void
148 mark_operand_necessary (tree op)
150 gimple *stmt;
151 int ver;
153 gcc_assert (op);
155 ver = SSA_NAME_VERSION (op);
156 if (bitmap_bit_p (processed, ver))
158 stmt = SSA_NAME_DEF_STMT (op);
159 gcc_assert (gimple_nop_p (stmt)
160 || gimple_plf (stmt, STMT_NECESSARY));
161 return;
163 bitmap_set_bit (processed, ver);
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (stmt);
168 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
169 return;
171 if (dump_file && (dump_flags & TDF_DETAILS))
173 fprintf (dump_file, "marking necessary through ");
174 print_generic_expr (dump_file, op);
175 fprintf (dump_file, " stmt ");
176 print_gimple_stmt (dump_file, stmt, 0);
179 gimple_set_plf (stmt, STMT_NECESSARY, true);
180 if (bb_contains_live_stmts)
181 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
182 worklist.safe_push (stmt);
186 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
187 it can make other statements necessary.
189 If AGGRESSIVE is false, control statements are conservatively marked as
190 necessary. */
192 static void
193 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
195 /* With non-call exceptions, we have to assume that all statements could
196 throw. If a statement could throw, it can be deemed necessary. */
197 if (cfun->can_throw_non_call_exceptions
198 && !cfun->can_delete_dead_exceptions
199 && stmt_could_throw_p (stmt))
201 mark_stmt_necessary (stmt, true);
202 return;
205 /* Statements that are implicitly live. Most function calls, asm
206 and return statements are required. Labels and GIMPLE_BIND nodes
207 are kept because they are control flow, and we have no way of
208 knowing whether they can be removed. DCE can eliminate all the
209 other statements in a block, and CFG can then remove the block
210 and labels. */
211 switch (gimple_code (stmt))
213 case GIMPLE_PREDICT:
214 case GIMPLE_LABEL:
215 mark_stmt_necessary (stmt, false);
216 return;
218 case GIMPLE_ASM:
219 case GIMPLE_RESX:
220 case GIMPLE_RETURN:
221 mark_stmt_necessary (stmt, true);
222 return;
224 case GIMPLE_CALL:
226 tree callee = gimple_call_fndecl (stmt);
227 if (callee != NULL_TREE
228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
229 switch (DECL_FUNCTION_CODE (callee))
231 case BUILT_IN_MALLOC:
232 case BUILT_IN_ALIGNED_ALLOC:
233 case BUILT_IN_CALLOC:
234 CASE_BUILT_IN_ALLOCA:
235 case BUILT_IN_STRDUP:
236 case BUILT_IN_STRNDUP:
237 return;
239 default:;
241 /* Most, but not all function calls are required. Function calls that
242 produce no result and have no side effects (i.e. const pure
243 functions) are unnecessary. */
244 if (gimple_has_side_effects (stmt))
246 mark_stmt_necessary (stmt, true);
247 return;
249 if (!gimple_call_lhs (stmt))
250 return;
251 break;
254 case GIMPLE_DEBUG:
255 /* Debug temps without a value are not useful. ??? If we could
256 easily locate the debug temp bind stmt for a use thereof,
257 would could refrain from marking all debug temps here, and
258 mark them only if they're used. */
259 if (!gimple_debug_bind_p (stmt)
260 || gimple_debug_bind_has_value_p (stmt)
261 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
262 mark_stmt_necessary (stmt, false);
263 return;
265 case GIMPLE_GOTO:
266 gcc_assert (!simple_goto_p (stmt));
267 mark_stmt_necessary (stmt, true);
268 return;
270 case GIMPLE_COND:
271 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
272 /* Fall through. */
274 case GIMPLE_SWITCH:
275 if (! aggressive)
276 mark_stmt_necessary (stmt, true);
277 break;
279 case GIMPLE_ASSIGN:
280 if (gimple_clobber_p (stmt))
281 return;
282 break;
284 default:
285 break;
288 /* If the statement has volatile operands, it needs to be preserved.
289 Same for statements that can alter control flow in unpredictable
290 ways. */
291 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
293 mark_stmt_necessary (stmt, true);
294 return;
297 if (stmt_may_clobber_global_p (stmt))
299 mark_stmt_necessary (stmt, true);
300 return;
303 return;
307 /* Mark the last statement of BB as necessary. */
309 static void
310 mark_last_stmt_necessary (basic_block bb)
312 gimple *stmt = last_stmt (bb);
314 bitmap_set_bit (last_stmt_necessary, bb->index);
315 bitmap_set_bit (bb_contains_live_stmts, bb->index);
317 /* We actually mark the statement only if it is a control statement. */
318 if (stmt && is_ctrl_stmt (stmt))
319 mark_stmt_necessary (stmt, true);
323 /* Mark control dependent edges of BB as necessary. We have to do this only
324 once for each basic block so we set the appropriate bit after we're done.
326 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
328 static void
329 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
331 bitmap_iterator bi;
332 unsigned edge_number;
333 bool skipped = false;
335 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
337 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
338 return;
340 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
341 0, edge_number, bi)
343 basic_block cd_bb = cd->get_edge_src (edge_number);
345 if (ignore_self && cd_bb == bb)
347 skipped = true;
348 continue;
351 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
352 mark_last_stmt_necessary (cd_bb);
355 if (!skipped)
356 bitmap_set_bit (visited_control_parents, bb->index);
360 /* Find obviously necessary statements. These are things like most function
361 calls, and stores to file level variables.
363 If EL is NULL, control statements are conservatively marked as
364 necessary. Otherwise it contains the list of edges used by control
365 dependence analysis. */
367 static void
368 find_obviously_necessary_stmts (bool aggressive)
370 basic_block bb;
371 gimple_stmt_iterator gsi;
372 edge e;
373 gimple *phi, *stmt;
374 int flags;
376 FOR_EACH_BB_FN (bb, cfun)
378 /* PHI nodes are never inherently necessary. */
379 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
381 phi = gsi_stmt (gsi);
382 gimple_set_plf (phi, STMT_NECESSARY, false);
385 /* Check all statements in the block. */
386 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
388 stmt = gsi_stmt (gsi);
389 gimple_set_plf (stmt, STMT_NECESSARY, false);
390 mark_stmt_if_obviously_necessary (stmt, aggressive);
394 /* Pure and const functions are finite and thus have no infinite loops in
395 them. */
396 flags = flags_from_decl_or_type (current_function_decl);
397 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
398 return;
400 /* Prevent the empty possibly infinite loops from being removed. */
401 if (aggressive)
403 struct loop *loop;
404 if (mark_irreducible_loops ())
405 FOR_EACH_BB_FN (bb, cfun)
407 edge_iterator ei;
408 FOR_EACH_EDGE (e, ei, bb->succs)
409 if ((e->flags & EDGE_DFS_BACK)
410 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
412 if (dump_file)
413 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
414 e->src->index, e->dest->index);
415 mark_control_dependent_edges_necessary (e->dest, false);
419 FOR_EACH_LOOP (loop, 0)
420 if (!finite_loop_p (loop))
422 if (dump_file)
423 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
424 mark_control_dependent_edges_necessary (loop->latch, false);
430 /* Return true if REF is based on an aliased base, otherwise false. */
432 static bool
433 ref_may_be_aliased (tree ref)
435 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
436 while (handled_component_p (ref))
437 ref = TREE_OPERAND (ref, 0);
438 if (TREE_CODE (ref) == MEM_REF
439 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
440 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
441 return !(DECL_P (ref)
442 && !may_be_aliased (ref));
445 static bitmap visited = NULL;
446 static unsigned int longest_chain = 0;
447 static unsigned int total_chain = 0;
448 static unsigned int nr_walks = 0;
449 static bool chain_ovfl = false;
451 /* Worker for the walker that marks reaching definitions of REF,
452 which is based on a non-aliased decl, necessary. It returns
453 true whenever the defining statement of the current VDEF is
454 a kill for REF, as no dominating may-defs are necessary for REF
455 anymore. DATA points to the basic-block that contains the
456 stmt that refers to REF. */
458 static bool
459 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
461 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
463 /* All stmts we visit are necessary. */
464 if (! gimple_clobber_p (def_stmt))
465 mark_operand_necessary (vdef);
467 /* If the stmt lhs kills ref, then we can stop walking. */
468 if (gimple_has_lhs (def_stmt)
469 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
470 /* The assignment is not necessarily carried out if it can throw
471 and we can catch it in the current function where we could inspect
472 the previous value.
473 ??? We only need to care about the RHS throwing. For aggregate
474 assignments or similar calls and non-call exceptions the LHS
475 might throw as well. */
476 && !stmt_can_throw_internal (def_stmt))
478 tree base, lhs = gimple_get_lhs (def_stmt);
479 HOST_WIDE_INT size, offset, max_size;
480 bool reverse;
481 ao_ref_base (ref);
482 base
483 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
484 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
485 so base == refd->base does not always hold. */
486 if (base == ref->base)
488 /* For a must-alias check we need to be able to constrain
489 the accesses properly. */
490 if (size != -1 && size == max_size
491 && ref->max_size != -1)
493 if (offset <= ref->offset
494 && offset + size >= ref->offset + ref->max_size)
495 return true;
497 /* Or they need to be exactly the same. */
498 else if (ref->ref
499 /* Make sure there is no induction variable involved
500 in the references (gcc.c-torture/execute/pr42142.c).
501 The simplest way is to check if the kill dominates
502 the use. */
503 /* But when both are in the same block we cannot
504 easily tell whether we came from a backedge
505 unless we decide to compute stmt UIDs
506 (see PR58246). */
507 && (basic_block) data != gimple_bb (def_stmt)
508 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
509 gimple_bb (def_stmt))
510 && operand_equal_p (ref->ref, lhs, 0))
511 return true;
515 /* Otherwise keep walking. */
516 return false;
519 static void
520 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
522 unsigned int chain;
523 ao_ref refd;
524 gcc_assert (!chain_ovfl);
525 ao_ref_init (&refd, ref);
526 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
527 mark_aliased_reaching_defs_necessary_1,
528 gimple_bb (stmt), NULL);
529 if (chain > longest_chain)
530 longest_chain = chain;
531 total_chain += chain;
532 nr_walks++;
535 /* Worker for the walker that marks reaching definitions of REF, which
536 is not based on a non-aliased decl. For simplicity we need to end
537 up marking all may-defs necessary that are not based on a non-aliased
538 decl. The only job of this walker is to skip may-defs based on
539 a non-aliased decl. */
541 static bool
542 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
543 tree vdef, void *data ATTRIBUTE_UNUSED)
545 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
547 /* We have to skip already visited (and thus necessary) statements
548 to make the chaining work after we dropped back to simple mode. */
549 if (chain_ovfl
550 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
552 gcc_assert (gimple_nop_p (def_stmt)
553 || gimple_plf (def_stmt, STMT_NECESSARY));
554 return false;
557 /* We want to skip stores to non-aliased variables. */
558 if (!chain_ovfl
559 && gimple_assign_single_p (def_stmt))
561 tree lhs = gimple_assign_lhs (def_stmt);
562 if (!ref_may_be_aliased (lhs))
563 return false;
566 /* We want to skip statments that do not constitute stores but have
567 a virtual definition. */
568 if (is_gimple_call (def_stmt))
570 tree callee = gimple_call_fndecl (def_stmt);
571 if (callee != NULL_TREE
572 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
573 switch (DECL_FUNCTION_CODE (callee))
575 case BUILT_IN_MALLOC:
576 case BUILT_IN_ALIGNED_ALLOC:
577 case BUILT_IN_CALLOC:
578 CASE_BUILT_IN_ALLOCA:
579 case BUILT_IN_FREE:
580 return false;
582 default:;
586 if (! gimple_clobber_p (def_stmt))
587 mark_operand_necessary (vdef);
589 return false;
592 static void
593 mark_all_reaching_defs_necessary (gimple *stmt)
595 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
596 mark_all_reaching_defs_necessary_1, NULL, &visited);
599 /* Return true for PHI nodes with one or identical arguments
600 can be removed. */
601 static bool
602 degenerate_phi_p (gimple *phi)
604 unsigned int i;
605 tree op = gimple_phi_arg_def (phi, 0);
606 for (i = 1; i < gimple_phi_num_args (phi); i++)
607 if (gimple_phi_arg_def (phi, i) != op)
608 return false;
609 return true;
612 /* Propagate necessity using the operands of necessary statements.
613 Process the uses on each statement in the worklist, and add all
614 feeding statements which contribute to the calculation of this
615 value to the worklist.
617 In conservative mode, EL is NULL. */
619 static void
620 propagate_necessity (bool aggressive)
622 gimple *stmt;
624 if (dump_file && (dump_flags & TDF_DETAILS))
625 fprintf (dump_file, "\nProcessing worklist:\n");
627 while (worklist.length () > 0)
629 /* Take STMT from worklist. */
630 stmt = worklist.pop ();
632 if (dump_file && (dump_flags & TDF_DETAILS))
634 fprintf (dump_file, "processing: ");
635 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
636 fprintf (dump_file, "\n");
639 if (aggressive)
641 /* Mark the last statement of the basic blocks on which the block
642 containing STMT is control dependent, but only if we haven't
643 already done so. */
644 basic_block bb = gimple_bb (stmt);
645 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
646 && !bitmap_bit_p (visited_control_parents, bb->index))
647 mark_control_dependent_edges_necessary (bb, false);
650 if (gimple_code (stmt) == GIMPLE_PHI
651 /* We do not process virtual PHI nodes nor do we track their
652 necessity. */
653 && !virtual_operand_p (gimple_phi_result (stmt)))
655 /* PHI nodes are somewhat special in that each PHI alternative has
656 data and control dependencies. All the statements feeding the
657 PHI node's arguments are always necessary. In aggressive mode,
658 we also consider the control dependent edges leading to the
659 predecessor block associated with each PHI alternative as
660 necessary. */
661 gphi *phi = as_a <gphi *> (stmt);
662 size_t k;
664 for (k = 0; k < gimple_phi_num_args (stmt); k++)
666 tree arg = PHI_ARG_DEF (stmt, k);
667 if (TREE_CODE (arg) == SSA_NAME)
668 mark_operand_necessary (arg);
671 /* For PHI operands it matters from where the control flow arrives
672 to the BB. Consider the following example:
674 a=exp1;
675 b=exp2;
676 if (test)
678 else
680 c=PHI(a,b)
682 We need to mark control dependence of the empty basic blocks, since they
683 contains computation of PHI operands.
685 Doing so is too restrictive in the case the predecestor block is in
686 the loop. Consider:
688 if (b)
690 int i;
691 for (i = 0; i<1000; ++i)
693 j = 0;
695 return j;
697 There is PHI for J in the BB containing return statement.
698 In this case the control dependence of predecestor block (that is
699 within the empty loop) also contains the block determining number
700 of iterations of the block that would prevent removing of empty
701 loop in this case.
703 This scenario can be avoided by splitting critical edges.
704 To save the critical edge splitting pass we identify how the control
705 dependence would look like if the edge was split.
707 Consider the modified CFG created from current CFG by splitting
708 edge B->C. In the postdominance tree of modified CFG, C' is
709 always child of C. There are two cases how chlids of C' can look
710 like:
712 1) C' is leaf
714 In this case the only basic block C' is control dependent on is B.
716 2) C' has single child that is B
718 In this case control dependence of C' is same as control
719 dependence of B in original CFG except for block B itself.
720 (since C' postdominate B in modified CFG)
722 Now how to decide what case happens? There are two basic options:
724 a) C postdominate B. Then C immediately postdominate B and
725 case 2 happens iff there is no other way from B to C except
726 the edge B->C.
728 There is other way from B to C iff there is succesor of B that
729 is not postdominated by B. Testing this condition is somewhat
730 expensive, because we need to iterate all succesors of B.
731 We are safe to assume that this does not happen: we will mark B
732 as needed when processing the other path from B to C that is
733 conrol dependent on B and marking control dependencies of B
734 itself is harmless because they will be processed anyway after
735 processing control statement in B.
737 b) C does not postdominate B. Always case 1 happens since there is
738 path from C to exit that does not go through B and thus also C'. */
740 if (aggressive && !degenerate_phi_p (stmt))
742 for (k = 0; k < gimple_phi_num_args (stmt); k++)
744 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
746 if (gimple_bb (stmt)
747 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
749 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
750 mark_last_stmt_necessary (arg_bb);
752 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
753 && !bitmap_bit_p (visited_control_parents,
754 arg_bb->index))
755 mark_control_dependent_edges_necessary (arg_bb, true);
759 else
761 /* Propagate through the operands. Examine all the USE, VUSE and
762 VDEF operands in this statement. Mark all the statements
763 which feed this statement's uses as necessary. */
764 ssa_op_iter iter;
765 tree use;
767 /* If this is a call to free which is directly fed by an
768 allocation function do not mark that necessary through
769 processing the argument. */
770 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
772 tree ptr = gimple_call_arg (stmt, 0);
773 gimple *def_stmt;
774 tree def_callee;
775 /* If the pointer we free is defined by an allocation
776 function do not add the call to the worklist. */
777 if (TREE_CODE (ptr) == SSA_NAME
778 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
779 && (def_callee = gimple_call_fndecl (def_stmt))
780 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
781 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
782 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
783 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
785 gimple *bounds_def_stmt;
786 tree bounds;
788 /* For instrumented calls we should also check used
789 bounds are returned by the same allocation call. */
790 if (!gimple_call_with_bounds_p (stmt)
791 || ((bounds = gimple_call_arg (stmt, 1))
792 && TREE_CODE (bounds) == SSA_NAME
793 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
794 && chkp_gimple_call_builtin_p (bounds_def_stmt,
795 BUILT_IN_CHKP_BNDRET)
796 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
797 continue;
801 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
802 mark_operand_necessary (use);
804 use = gimple_vuse (stmt);
805 if (!use)
806 continue;
808 /* If we dropped to simple mode make all immediately
809 reachable definitions necessary. */
810 if (chain_ovfl)
812 mark_all_reaching_defs_necessary (stmt);
813 continue;
816 /* For statements that may load from memory (have a VUSE) we
817 have to mark all reaching (may-)definitions as necessary.
818 We partition this task into two cases:
819 1) explicit loads based on decls that are not aliased
820 2) implicit loads (like calls) and explicit loads not
821 based on decls that are not aliased (like indirect
822 references or loads from globals)
823 For 1) we mark all reaching may-defs as necessary, stopping
824 at dominating kills. For 2) we want to mark all dominating
825 references necessary, but non-aliased ones which we handle
826 in 1). By keeping a global visited bitmap for references
827 we walk for 2) we avoid quadratic behavior for those. */
829 if (is_gimple_call (stmt))
831 tree callee = gimple_call_fndecl (stmt);
832 unsigned i;
834 /* Calls to functions that are merely acting as barriers
835 or that only store to memory do not make any previous
836 stores necessary. */
837 if (callee != NULL_TREE
838 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
839 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
840 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
841 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
844 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
845 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
846 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
847 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
849 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
850 continue;
852 /* Calls implicitly load from memory, their arguments
853 in addition may explicitly perform memory loads. */
854 mark_all_reaching_defs_necessary (stmt);
855 for (i = 0; i < gimple_call_num_args (stmt); ++i)
857 tree arg = gimple_call_arg (stmt, i);
858 if (TREE_CODE (arg) == SSA_NAME
859 || is_gimple_min_invariant (arg))
860 continue;
861 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
862 arg = TREE_OPERAND (arg, 0);
863 if (!ref_may_be_aliased (arg))
864 mark_aliased_reaching_defs_necessary (stmt, arg);
867 else if (gimple_assign_single_p (stmt))
869 tree rhs;
870 /* If this is a load mark things necessary. */
871 rhs = gimple_assign_rhs1 (stmt);
872 if (TREE_CODE (rhs) != SSA_NAME
873 && !is_gimple_min_invariant (rhs)
874 && TREE_CODE (rhs) != CONSTRUCTOR)
876 if (!ref_may_be_aliased (rhs))
877 mark_aliased_reaching_defs_necessary (stmt, rhs);
878 else
879 mark_all_reaching_defs_necessary (stmt);
882 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
884 tree rhs = gimple_return_retval (return_stmt);
885 /* A return statement may perform a load. */
886 if (rhs
887 && TREE_CODE (rhs) != SSA_NAME
888 && !is_gimple_min_invariant (rhs)
889 && TREE_CODE (rhs) != CONSTRUCTOR)
891 if (!ref_may_be_aliased (rhs))
892 mark_aliased_reaching_defs_necessary (stmt, rhs);
893 else
894 mark_all_reaching_defs_necessary (stmt);
897 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
899 unsigned i;
900 mark_all_reaching_defs_necessary (stmt);
901 /* Inputs may perform loads. */
902 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
904 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
905 if (TREE_CODE (op) != SSA_NAME
906 && !is_gimple_min_invariant (op)
907 && TREE_CODE (op) != CONSTRUCTOR
908 && !ref_may_be_aliased (op))
909 mark_aliased_reaching_defs_necessary (stmt, op);
912 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
914 /* The beginning of a transaction is a memory barrier. */
915 /* ??? If we were really cool, we'd only be a barrier
916 for the memories touched within the transaction. */
917 mark_all_reaching_defs_necessary (stmt);
919 else
920 gcc_unreachable ();
922 /* If we over-used our alias oracle budget drop to simple
923 mode. The cost metric allows quadratic behavior
924 (number of uses times number of may-defs queries) up to
925 a constant maximal number of queries and after that falls back to
926 super-linear complexity. */
927 if (/* Constant but quadratic for small functions. */
928 total_chain > 128 * 128
929 /* Linear in the number of may-defs. */
930 && total_chain > 32 * longest_chain
931 /* Linear in the number of uses. */
932 && total_chain > nr_walks * 32)
934 chain_ovfl = true;
935 if (visited)
936 bitmap_clear (visited);
942 /* Remove dead PHI nodes from block BB. */
944 static bool
945 remove_dead_phis (basic_block bb)
947 bool something_changed = false;
948 gphi *phi;
949 gphi_iterator gsi;
951 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
953 stats.total_phis++;
954 phi = gsi.phi ();
956 /* We do not track necessity of virtual PHI nodes. Instead do
957 very simple dead PHI removal here. */
958 if (virtual_operand_p (gimple_phi_result (phi)))
960 /* Virtual PHI nodes with one or identical arguments
961 can be removed. */
962 if (degenerate_phi_p (phi))
964 tree vdef = gimple_phi_result (phi);
965 tree vuse = gimple_phi_arg_def (phi, 0);
967 use_operand_p use_p;
968 imm_use_iterator iter;
969 gimple *use_stmt;
970 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
971 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
972 SET_USE (use_p, vuse);
973 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
974 && TREE_CODE (vuse) == SSA_NAME)
975 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
977 else
978 gimple_set_plf (phi, STMT_NECESSARY, true);
981 if (!gimple_plf (phi, STMT_NECESSARY))
983 something_changed = true;
984 if (dump_file && (dump_flags & TDF_DETAILS))
986 fprintf (dump_file, "Deleting : ");
987 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
988 fprintf (dump_file, "\n");
991 remove_phi_node (&gsi, true);
992 stats.removed_phis++;
993 continue;
996 gsi_next (&gsi);
998 return something_changed;
1002 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1003 containing I so that we don't have to look it up. */
1005 static void
1006 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1008 gimple *stmt = gsi_stmt (*i);
1010 if (dump_file && (dump_flags & TDF_DETAILS))
1012 fprintf (dump_file, "Deleting : ");
1013 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1014 fprintf (dump_file, "\n");
1017 stats.removed++;
1019 /* If we have determined that a conditional branch statement contributes
1020 nothing to the program, then we not only remove it, but we need to update
1021 the CFG. We can chose any of edges out of BB as long as we are sure to not
1022 close infinite loops. This is done by always choosing the edge closer to
1023 exit in inverted_post_order_compute order. */
1024 if (is_ctrl_stmt (stmt))
1026 edge_iterator ei;
1027 edge e = NULL, e2;
1029 /* See if there is only one non-abnormal edge. */
1030 if (single_succ_p (bb))
1031 e = single_succ_edge (bb);
1032 /* Otherwise chose one that is closer to bb with live statement in it.
1033 To be able to chose one, we compute inverted post order starting from
1034 all BBs with live statements. */
1035 if (!e)
1037 if (!bb_postorder)
1039 auto_vec<int, 20> postorder;
1040 inverted_post_order_compute (&postorder,
1041 &bb_contains_live_stmts);
1042 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1043 for (unsigned int i = 0; i < postorder.length (); ++i)
1044 bb_postorder[postorder[i]] = i;
1046 FOR_EACH_EDGE (e2, ei, bb->succs)
1047 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1048 || bb_postorder [e->dest->index]
1049 < bb_postorder [e2->dest->index])
1050 e = e2;
1052 gcc_assert (e);
1053 e->probability = profile_probability::always ();
1054 e->count = bb->count;
1056 /* The edge is no longer associated with a conditional, so it does
1057 not have TRUE/FALSE flags.
1058 We are also safe to drop EH/ABNORMAL flags and turn them into
1059 normal control flow, because we know that all the destinations (including
1060 those odd edges) are equivalent for program execution. */
1061 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1063 /* The lone outgoing edge from BB will be a fallthru edge. */
1064 e->flags |= EDGE_FALLTHRU;
1066 /* Remove the remaining outgoing edges. */
1067 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1068 if (e != e2)
1070 cfg_altered = true;
1071 /* If we made a BB unconditionally exit a loop or removed
1072 an entry into an irreducible region, then this transform
1073 alters the set of BBs in the loop. Schedule a fixup. */
1074 if (loop_exit_edge_p (bb->loop_father, e)
1075 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1076 loops_state_set (LOOPS_NEED_FIXUP);
1077 remove_edge (e2);
1079 else
1080 ei_next (&ei);
1083 /* If this is a store into a variable that is being optimized away,
1084 add a debug bind stmt if possible. */
1085 if (MAY_HAVE_DEBUG_STMTS
1086 && gimple_assign_single_p (stmt)
1087 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1089 tree lhs = gimple_assign_lhs (stmt);
1090 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1091 && !DECL_IGNORED_P (lhs)
1092 && is_gimple_reg_type (TREE_TYPE (lhs))
1093 && !is_global_var (lhs)
1094 && !DECL_HAS_VALUE_EXPR_P (lhs))
1096 tree rhs = gimple_assign_rhs1 (stmt);
1097 gdebug *note
1098 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1099 gsi_insert_after (i, note, GSI_SAME_STMT);
1103 unlink_stmt_vdef (stmt);
1104 gsi_remove (i, true);
1105 release_defs (stmt);
1108 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1109 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1111 static tree
1112 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1114 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1115 *walk_subtrees = 0;
1116 if (*tp == (tree) data)
1117 return *tp;
1118 return NULL_TREE;
1121 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1122 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1123 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1124 uses. */
1126 static void
1127 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1128 enum tree_code subcode)
1130 gimple *stmt = gsi_stmt (*gsi);
1131 tree lhs = gimple_call_lhs (stmt);
1133 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1134 return;
1136 imm_use_iterator imm_iter;
1137 use_operand_p use_p;
1138 bool has_debug_uses = false;
1139 bool has_realpart_uses = false;
1140 bool has_other_uses = false;
1141 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1143 gimple *use_stmt = USE_STMT (use_p);
1144 if (is_gimple_debug (use_stmt))
1145 has_debug_uses = true;
1146 else if (is_gimple_assign (use_stmt)
1147 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1148 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1149 has_realpart_uses = true;
1150 else
1152 has_other_uses = true;
1153 break;
1157 if (!has_realpart_uses || has_other_uses)
1158 return;
1160 tree arg0 = gimple_call_arg (stmt, 0);
1161 tree arg1 = gimple_call_arg (stmt, 1);
1162 location_t loc = gimple_location (stmt);
1163 tree type = TREE_TYPE (TREE_TYPE (lhs));
1164 tree utype = type;
1165 if (!TYPE_UNSIGNED (type))
1166 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1167 tree result = fold_build2_loc (loc, subcode, utype,
1168 fold_convert_loc (loc, utype, arg0),
1169 fold_convert_loc (loc, utype, arg1));
1170 result = fold_convert_loc (loc, type, result);
1172 if (has_debug_uses)
1174 gimple *use_stmt;
1175 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1177 if (!gimple_debug_bind_p (use_stmt))
1178 continue;
1179 tree v = gimple_debug_bind_get_value (use_stmt);
1180 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1182 gimple_debug_bind_reset_value (use_stmt);
1183 update_stmt (use_stmt);
1188 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1189 result = drop_tree_overflow (result);
1190 tree overflow = build_zero_cst (type);
1191 tree ctype = build_complex_type (type);
1192 if (TREE_CODE (result) == INTEGER_CST)
1193 result = build_complex (ctype, result, overflow);
1194 else
1195 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1196 ctype, result, overflow);
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1200 fprintf (dump_file, "Transforming call: ");
1201 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1202 fprintf (dump_file, "because the overflow result is never used into: ");
1203 print_generic_stmt (dump_file, result, TDF_SLIM);
1204 fprintf (dump_file, "\n");
1207 if (!update_call_from_tree (gsi, result))
1208 gimplify_and_update_call_from_tree (gsi, result);
1211 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1212 contributes nothing to the program, and can be deleted. */
1214 static bool
1215 eliminate_unnecessary_stmts (void)
1217 bool something_changed = false;
1218 basic_block bb;
1219 gimple_stmt_iterator gsi, psi;
1220 gimple *stmt;
1221 tree call;
1222 vec<basic_block> h;
1224 if (dump_file && (dump_flags & TDF_DETAILS))
1225 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1227 clear_special_calls ();
1229 /* Walking basic blocks and statements in reverse order avoids
1230 releasing SSA names before any other DEFs that refer to them are
1231 released. This helps avoid loss of debug information, as we get
1232 a chance to propagate all RHSs of removed SSAs into debug uses,
1233 rather than only the latest ones. E.g., consider:
1235 x_3 = y_1 + z_2;
1236 a_5 = x_3 - b_4;
1237 # DEBUG a => a_5
1239 If we were to release x_3 before a_5, when we reached a_5 and
1240 tried to substitute it into the debug stmt, we'd see x_3 there,
1241 but x_3's DEF, type, etc would have already been disconnected.
1242 By going backwards, the debug stmt first changes to:
1244 # DEBUG a => x_3 - b_4
1246 and then to:
1248 # DEBUG a => y_1 + z_2 - b_4
1250 as desired. */
1251 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1252 h = get_all_dominated_blocks (CDI_DOMINATORS,
1253 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1255 while (h.length ())
1257 bb = h.pop ();
1259 /* Remove dead statements. */
1260 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1262 stmt = gsi_stmt (gsi);
1264 psi = gsi;
1265 gsi_prev (&psi);
1267 stats.total++;
1269 /* We can mark a call to free as not necessary if the
1270 defining statement of its argument is not necessary
1271 (and thus is getting removed). */
1272 if (gimple_plf (stmt, STMT_NECESSARY)
1273 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1275 tree ptr = gimple_call_arg (stmt, 0);
1276 if (TREE_CODE (ptr) == SSA_NAME)
1278 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1279 if (!gimple_nop_p (def_stmt)
1280 && !gimple_plf (def_stmt, STMT_NECESSARY))
1281 gimple_set_plf (stmt, STMT_NECESSARY, false);
1283 /* We did not propagate necessity for free calls fed
1284 by allocation function to allow unnecessary
1285 alloc-free sequence elimination. For instrumented
1286 calls it also means we did not mark bounds producer
1287 as necessary and it is time to do it in case free
1288 call is not removed. */
1289 if (gimple_call_with_bounds_p (stmt))
1291 gimple *bounds_def_stmt;
1292 tree bounds = gimple_call_arg (stmt, 1);
1293 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1294 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1295 if (bounds_def_stmt
1296 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1297 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1298 gimple_plf (stmt, STMT_NECESSARY));
1302 /* If GSI is not necessary then remove it. */
1303 if (!gimple_plf (stmt, STMT_NECESSARY))
1305 /* Keep clobbers that we can keep live live. */
1306 if (gimple_clobber_p (stmt))
1308 ssa_op_iter iter;
1309 use_operand_p use_p;
1310 bool dead = false;
1311 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1313 tree name = USE_FROM_PTR (use_p);
1314 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1315 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1317 dead = true;
1318 break;
1321 if (!dead)
1322 continue;
1324 if (!is_gimple_debug (stmt))
1325 something_changed = true;
1326 remove_dead_stmt (&gsi, bb);
1328 else if (is_gimple_call (stmt))
1330 tree name = gimple_call_lhs (stmt);
1332 notice_special_calls (as_a <gcall *> (stmt));
1334 /* When LHS of var = call (); is dead, simplify it into
1335 call (); saving one operand. */
1336 if (name
1337 && TREE_CODE (name) == SSA_NAME
1338 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1339 /* Avoid doing so for allocation calls which we
1340 did not mark as necessary, it will confuse the
1341 special logic we apply to malloc/free pair removal. */
1342 && (!(call = gimple_call_fndecl (stmt))
1343 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1344 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1345 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1346 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1347 && !ALLOCA_FUNCTION_CODE_P
1348 (DECL_FUNCTION_CODE (call))))
1349 /* Avoid doing so for bndret calls for the same reason. */
1350 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1352 something_changed = true;
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1355 fprintf (dump_file, "Deleting LHS of call: ");
1356 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1357 fprintf (dump_file, "\n");
1360 gimple_call_set_lhs (stmt, NULL_TREE);
1361 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1362 update_stmt (stmt);
1363 release_ssa_name (name);
1365 /* GOMP_SIMD_LANE or ASAN_POISON without lhs is not
1366 needed. */
1367 if (gimple_call_internal_p (stmt))
1368 switch (gimple_call_internal_fn (stmt))
1370 case IFN_GOMP_SIMD_LANE:
1371 case IFN_ASAN_POISON:
1372 remove_dead_stmt (&gsi, bb);
1373 break;
1374 default:
1375 break;
1378 else if (gimple_call_internal_p (stmt))
1379 switch (gimple_call_internal_fn (stmt))
1381 case IFN_ADD_OVERFLOW:
1382 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1383 break;
1384 case IFN_SUB_OVERFLOW:
1385 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1386 break;
1387 case IFN_MUL_OVERFLOW:
1388 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1389 break;
1390 default:
1391 break;
1397 h.release ();
1399 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1400 rendered some PHI nodes unreachable while they are still in use.
1401 Mark them for renaming. */
1402 if (cfg_altered)
1404 basic_block prev_bb;
1406 find_unreachable_blocks ();
1408 /* Delete all unreachable basic blocks in reverse dominator order. */
1409 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1410 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1412 prev_bb = bb->prev_bb;
1414 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1415 || !(bb->flags & BB_REACHABLE))
1417 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1418 gsi_next (&gsi))
1419 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1421 bool found = false;
1422 imm_use_iterator iter;
1424 FOR_EACH_IMM_USE_STMT (stmt, iter,
1425 gimple_phi_result (gsi.phi ()))
1427 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1428 continue;
1429 if (gimple_code (stmt) == GIMPLE_PHI
1430 || gimple_plf (stmt, STMT_NECESSARY))
1432 found = true;
1433 BREAK_FROM_IMM_USE_STMT (iter);
1436 if (found)
1437 mark_virtual_phi_result_for_renaming (gsi.phi ());
1440 if (!(bb->flags & BB_REACHABLE))
1442 /* Speed up the removal of blocks that don't
1443 dominate others. Walking backwards, this should
1444 be the common case. ??? Do we need to recompute
1445 dominators because of cfg_altered? */
1446 if (!MAY_HAVE_DEBUG_STMTS
1447 || !first_dom_son (CDI_DOMINATORS, bb))
1448 delete_basic_block (bb);
1449 else
1451 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1453 while (h.length ())
1455 bb = h.pop ();
1456 prev_bb = bb->prev_bb;
1457 /* Rearrangements to the CFG may have failed
1458 to update the dominators tree, so that
1459 formerly-dominated blocks are now
1460 otherwise reachable. */
1461 if (!!(bb->flags & BB_REACHABLE))
1462 continue;
1463 delete_basic_block (bb);
1466 h.release ();
1472 FOR_EACH_BB_FN (bb, cfun)
1474 /* Remove dead PHI nodes. */
1475 something_changed |= remove_dead_phis (bb);
1478 if (bb_postorder)
1479 free (bb_postorder);
1480 bb_postorder = NULL;
1482 return something_changed;
1486 /* Print out removed statement statistics. */
1488 static void
1489 print_stats (void)
1491 float percg;
1493 percg = ((float) stats.removed / (float) stats.total) * 100;
1494 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1495 stats.removed, stats.total, (int) percg);
1497 if (stats.total_phis == 0)
1498 percg = 0;
1499 else
1500 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1502 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1503 stats.removed_phis, stats.total_phis, (int) percg);
1506 /* Initialization for this pass. Set up the used data structures. */
1508 static void
1509 tree_dce_init (bool aggressive)
1511 memset ((void *) &stats, 0, sizeof (stats));
1513 if (aggressive)
1515 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1516 bitmap_clear (last_stmt_necessary);
1517 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1518 bitmap_clear (bb_contains_live_stmts);
1521 processed = sbitmap_alloc (num_ssa_names + 1);
1522 bitmap_clear (processed);
1524 worklist.create (64);
1525 cfg_altered = false;
1528 /* Cleanup after this pass. */
1530 static void
1531 tree_dce_done (bool aggressive)
1533 if (aggressive)
1535 delete cd;
1536 sbitmap_free (visited_control_parents);
1537 sbitmap_free (last_stmt_necessary);
1538 sbitmap_free (bb_contains_live_stmts);
1539 bb_contains_live_stmts = NULL;
1542 sbitmap_free (processed);
1544 worklist.release ();
1547 /* Main routine to eliminate dead code.
1549 AGGRESSIVE controls the aggressiveness of the algorithm.
1550 In conservative mode, we ignore control dependence and simply declare
1551 all but the most trivially dead branches necessary. This mode is fast.
1552 In aggressive mode, control dependences are taken into account, which
1553 results in more dead code elimination, but at the cost of some time.
1555 FIXME: Aggressive mode before PRE doesn't work currently because
1556 the dominance info is not invalidated after DCE1. This is
1557 not an issue right now because we only run aggressive DCE
1558 as the last tree SSA pass, but keep this in mind when you
1559 start experimenting with pass ordering. */
1561 static unsigned int
1562 perform_tree_ssa_dce (bool aggressive)
1564 bool something_changed = 0;
1566 calculate_dominance_info (CDI_DOMINATORS);
1568 /* Preheaders are needed for SCEV to work.
1569 Simple lateches and recorded exits improve chances that loop will
1570 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1571 bool in_loop_pipeline = scev_initialized_p ();
1572 if (aggressive && ! in_loop_pipeline)
1574 scev_initialize ();
1575 loop_optimizer_init (LOOPS_NORMAL
1576 | LOOPS_HAVE_RECORDED_EXITS);
1579 tree_dce_init (aggressive);
1581 if (aggressive)
1583 /* Compute control dependence. */
1584 calculate_dominance_info (CDI_POST_DOMINATORS);
1585 cd = new control_dependences ();
1587 visited_control_parents =
1588 sbitmap_alloc (last_basic_block_for_fn (cfun));
1589 bitmap_clear (visited_control_parents);
1591 mark_dfs_back_edges ();
1594 find_obviously_necessary_stmts (aggressive);
1596 if (aggressive && ! in_loop_pipeline)
1598 loop_optimizer_finalize ();
1599 scev_finalize ();
1602 longest_chain = 0;
1603 total_chain = 0;
1604 nr_walks = 0;
1605 chain_ovfl = false;
1606 visited = BITMAP_ALLOC (NULL);
1607 propagate_necessity (aggressive);
1608 BITMAP_FREE (visited);
1610 something_changed |= eliminate_unnecessary_stmts ();
1611 something_changed |= cfg_altered;
1613 /* We do not update postdominators, so free them unconditionally. */
1614 free_dominance_info (CDI_POST_DOMINATORS);
1616 /* If we removed paths in the CFG, then we need to update
1617 dominators as well. I haven't investigated the possibility
1618 of incrementally updating dominators. */
1619 if (cfg_altered)
1620 free_dominance_info (CDI_DOMINATORS);
1622 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1623 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1625 /* Debugging dumps. */
1626 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1627 print_stats ();
1629 tree_dce_done (aggressive);
1631 if (something_changed)
1633 free_numbers_of_iterations_estimates (cfun);
1634 if (in_loop_pipeline)
1635 scev_reset ();
1636 return TODO_update_ssa | TODO_cleanup_cfg;
1638 return 0;
1641 /* Pass entry points. */
1642 static unsigned int
1643 tree_ssa_dce (void)
1645 return perform_tree_ssa_dce (/*aggressive=*/false);
1648 static unsigned int
1649 tree_ssa_cd_dce (void)
1651 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1654 namespace {
1656 const pass_data pass_data_dce =
1658 GIMPLE_PASS, /* type */
1659 "dce", /* name */
1660 OPTGROUP_NONE, /* optinfo_flags */
1661 TV_TREE_DCE, /* tv_id */
1662 ( PROP_cfg | PROP_ssa ), /* properties_required */
1663 0, /* properties_provided */
1664 0, /* properties_destroyed */
1665 0, /* todo_flags_start */
1666 0, /* todo_flags_finish */
1669 class pass_dce : public gimple_opt_pass
1671 public:
1672 pass_dce (gcc::context *ctxt)
1673 : gimple_opt_pass (pass_data_dce, ctxt)
1676 /* opt_pass methods: */
1677 opt_pass * clone () { return new pass_dce (m_ctxt); }
1678 virtual bool gate (function *) { return flag_tree_dce != 0; }
1679 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1681 }; // class pass_dce
1683 } // anon namespace
1685 gimple_opt_pass *
1686 make_pass_dce (gcc::context *ctxt)
1688 return new pass_dce (ctxt);
1691 namespace {
1693 const pass_data pass_data_cd_dce =
1695 GIMPLE_PASS, /* type */
1696 "cddce", /* name */
1697 OPTGROUP_NONE, /* optinfo_flags */
1698 TV_TREE_CD_DCE, /* tv_id */
1699 ( PROP_cfg | PROP_ssa ), /* properties_required */
1700 0, /* properties_provided */
1701 0, /* properties_destroyed */
1702 0, /* todo_flags_start */
1703 0, /* todo_flags_finish */
1706 class pass_cd_dce : public gimple_opt_pass
1708 public:
1709 pass_cd_dce (gcc::context *ctxt)
1710 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1713 /* opt_pass methods: */
1714 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1715 virtual bool gate (function *) { return flag_tree_dce != 0; }
1716 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1718 }; // class pass_cd_dce
1720 } // anon namespace
1722 gimple_opt_pass *
1723 make_pass_cd_dce (gcc::context *ctxt)
1725 return new pass_cd_dce (ctxt);