2017-12-12 Jerry DeLisle <jvdelisle@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-dce.c
blob3b9e1076506a4815977f3cd08bd652c945f80241
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
72 static struct stmt_stats
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
80 #define STMT_NECESSARY GF_PLF_1
82 static vec<gimple *> worklist;
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
119 /* If STMT is not already marked necessary, mark it, and add it to the
120 worklist if ADD_TO_WORKLIST is true. */
122 static inline void
123 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
125 gcc_assert (stmt);
127 if (gimple_plf (stmt, STMT_NECESSARY))
128 return;
130 if (dump_file && (dump_flags & TDF_DETAILS))
132 fprintf (dump_file, "Marking useful stmt: ");
133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
134 fprintf (dump_file, "\n");
137 gimple_set_plf (stmt, STMT_NECESSARY, true);
138 if (add_to_worklist)
139 worklist.safe_push (stmt);
140 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
141 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
145 /* Mark the statement defining operand OP as necessary. */
147 static inline void
148 mark_operand_necessary (tree op)
150 gimple *stmt;
151 int ver;
153 gcc_assert (op);
155 ver = SSA_NAME_VERSION (op);
156 if (bitmap_bit_p (processed, ver))
158 stmt = SSA_NAME_DEF_STMT (op);
159 gcc_assert (gimple_nop_p (stmt)
160 || gimple_plf (stmt, STMT_NECESSARY));
161 return;
163 bitmap_set_bit (processed, ver);
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (stmt);
168 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
169 return;
171 if (dump_file && (dump_flags & TDF_DETAILS))
173 fprintf (dump_file, "marking necessary through ");
174 print_generic_expr (dump_file, op);
175 fprintf (dump_file, " stmt ");
176 print_gimple_stmt (dump_file, stmt, 0);
179 gimple_set_plf (stmt, STMT_NECESSARY, true);
180 if (bb_contains_live_stmts)
181 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
182 worklist.safe_push (stmt);
186 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
187 it can make other statements necessary.
189 If AGGRESSIVE is false, control statements are conservatively marked as
190 necessary. */
192 static void
193 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
195 /* With non-call exceptions, we have to assume that all statements could
196 throw. If a statement could throw, it can be deemed necessary. */
197 if (cfun->can_throw_non_call_exceptions
198 && !cfun->can_delete_dead_exceptions
199 && stmt_could_throw_p (stmt))
201 mark_stmt_necessary (stmt, true);
202 return;
205 /* Statements that are implicitly live. Most function calls, asm
206 and return statements are required. Labels and GIMPLE_BIND nodes
207 are kept because they are control flow, and we have no way of
208 knowing whether they can be removed. DCE can eliminate all the
209 other statements in a block, and CFG can then remove the block
210 and labels. */
211 switch (gimple_code (stmt))
213 case GIMPLE_PREDICT:
214 case GIMPLE_LABEL:
215 mark_stmt_necessary (stmt, false);
216 return;
218 case GIMPLE_ASM:
219 case GIMPLE_RESX:
220 case GIMPLE_RETURN:
221 mark_stmt_necessary (stmt, true);
222 return;
224 case GIMPLE_CALL:
226 tree callee = gimple_call_fndecl (stmt);
227 if (callee != NULL_TREE
228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
229 switch (DECL_FUNCTION_CODE (callee))
231 case BUILT_IN_MALLOC:
232 case BUILT_IN_ALIGNED_ALLOC:
233 case BUILT_IN_CALLOC:
234 CASE_BUILT_IN_ALLOCA:
235 case BUILT_IN_STRDUP:
236 case BUILT_IN_STRNDUP:
237 return;
239 default:;
241 /* Most, but not all function calls are required. Function calls that
242 produce no result and have no side effects (i.e. const pure
243 functions) are unnecessary. */
244 if (gimple_has_side_effects (stmt))
246 mark_stmt_necessary (stmt, true);
247 return;
249 if (!gimple_call_lhs (stmt))
250 return;
251 break;
254 case GIMPLE_DEBUG:
255 /* Debug temps without a value are not useful. ??? If we could
256 easily locate the debug temp bind stmt for a use thereof,
257 would could refrain from marking all debug temps here, and
258 mark them only if they're used. */
259 if (gimple_debug_nonbind_marker_p (stmt)
260 || !gimple_debug_bind_p (stmt)
261 || gimple_debug_bind_has_value_p (stmt)
262 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
263 mark_stmt_necessary (stmt, false);
264 return;
266 case GIMPLE_GOTO:
267 gcc_assert (!simple_goto_p (stmt));
268 mark_stmt_necessary (stmt, true);
269 return;
271 case GIMPLE_COND:
272 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
273 /* Fall through. */
275 case GIMPLE_SWITCH:
276 if (! aggressive)
277 mark_stmt_necessary (stmt, true);
278 break;
280 case GIMPLE_ASSIGN:
281 if (gimple_clobber_p (stmt))
282 return;
283 break;
285 default:
286 break;
289 /* If the statement has volatile operands, it needs to be preserved.
290 Same for statements that can alter control flow in unpredictable
291 ways. */
292 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
294 mark_stmt_necessary (stmt, true);
295 return;
298 if (stmt_may_clobber_global_p (stmt))
300 mark_stmt_necessary (stmt, true);
301 return;
304 return;
308 /* Mark the last statement of BB as necessary. */
310 static void
311 mark_last_stmt_necessary (basic_block bb)
313 gimple *stmt = last_stmt (bb);
315 bitmap_set_bit (last_stmt_necessary, bb->index);
316 bitmap_set_bit (bb_contains_live_stmts, bb->index);
318 /* We actually mark the statement only if it is a control statement. */
319 if (stmt && is_ctrl_stmt (stmt))
320 mark_stmt_necessary (stmt, true);
324 /* Mark control dependent edges of BB as necessary. We have to do this only
325 once for each basic block so we set the appropriate bit after we're done.
327 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
329 static void
330 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
332 bitmap_iterator bi;
333 unsigned edge_number;
334 bool skipped = false;
336 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
338 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
339 return;
341 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
342 0, edge_number, bi)
344 basic_block cd_bb = cd->get_edge_src (edge_number);
346 if (ignore_self && cd_bb == bb)
348 skipped = true;
349 continue;
352 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
353 mark_last_stmt_necessary (cd_bb);
356 if (!skipped)
357 bitmap_set_bit (visited_control_parents, bb->index);
361 /* Find obviously necessary statements. These are things like most function
362 calls, and stores to file level variables.
364 If EL is NULL, control statements are conservatively marked as
365 necessary. Otherwise it contains the list of edges used by control
366 dependence analysis. */
368 static void
369 find_obviously_necessary_stmts (bool aggressive)
371 basic_block bb;
372 gimple_stmt_iterator gsi;
373 edge e;
374 gimple *phi, *stmt;
375 int flags;
377 FOR_EACH_BB_FN (bb, cfun)
379 /* PHI nodes are never inherently necessary. */
380 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
382 phi = gsi_stmt (gsi);
383 gimple_set_plf (phi, STMT_NECESSARY, false);
386 /* Check all statements in the block. */
387 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
389 stmt = gsi_stmt (gsi);
390 gimple_set_plf (stmt, STMT_NECESSARY, false);
391 mark_stmt_if_obviously_necessary (stmt, aggressive);
395 /* Pure and const functions are finite and thus have no infinite loops in
396 them. */
397 flags = flags_from_decl_or_type (current_function_decl);
398 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
399 return;
401 /* Prevent the empty possibly infinite loops from being removed. */
402 if (aggressive)
404 struct loop *loop;
405 if (mark_irreducible_loops ())
406 FOR_EACH_BB_FN (bb, cfun)
408 edge_iterator ei;
409 FOR_EACH_EDGE (e, ei, bb->succs)
410 if ((e->flags & EDGE_DFS_BACK)
411 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
413 if (dump_file)
414 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
415 e->src->index, e->dest->index);
416 mark_control_dependent_edges_necessary (e->dest, false);
420 FOR_EACH_LOOP (loop, 0)
421 if (!finite_loop_p (loop))
423 if (dump_file)
424 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
425 mark_control_dependent_edges_necessary (loop->latch, false);
431 /* Return true if REF is based on an aliased base, otherwise false. */
433 static bool
434 ref_may_be_aliased (tree ref)
436 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
437 while (handled_component_p (ref))
438 ref = TREE_OPERAND (ref, 0);
439 if (TREE_CODE (ref) == MEM_REF
440 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
441 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
442 return !(DECL_P (ref)
443 && !may_be_aliased (ref));
446 static bitmap visited = NULL;
447 static unsigned int longest_chain = 0;
448 static unsigned int total_chain = 0;
449 static unsigned int nr_walks = 0;
450 static bool chain_ovfl = false;
452 /* Worker for the walker that marks reaching definitions of REF,
453 which is based on a non-aliased decl, necessary. It returns
454 true whenever the defining statement of the current VDEF is
455 a kill for REF, as no dominating may-defs are necessary for REF
456 anymore. DATA points to the basic-block that contains the
457 stmt that refers to REF. */
459 static bool
460 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
462 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
464 /* All stmts we visit are necessary. */
465 if (! gimple_clobber_p (def_stmt))
466 mark_operand_necessary (vdef);
468 /* If the stmt lhs kills ref, then we can stop walking. */
469 if (gimple_has_lhs (def_stmt)
470 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
471 /* The assignment is not necessarily carried out if it can throw
472 and we can catch it in the current function where we could inspect
473 the previous value.
474 ??? We only need to care about the RHS throwing. For aggregate
475 assignments or similar calls and non-call exceptions the LHS
476 might throw as well. */
477 && !stmt_can_throw_internal (def_stmt))
479 tree base, lhs = gimple_get_lhs (def_stmt);
480 HOST_WIDE_INT size, offset, max_size;
481 bool reverse;
482 ao_ref_base (ref);
483 base
484 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
485 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
486 so base == refd->base does not always hold. */
487 if (base == ref->base)
489 /* For a must-alias check we need to be able to constrain
490 the accesses properly. */
491 if (size != -1 && size == max_size
492 && ref->max_size != -1)
494 if (offset <= ref->offset
495 && offset + size >= ref->offset + ref->max_size)
496 return true;
498 /* Or they need to be exactly the same. */
499 else if (ref->ref
500 /* Make sure there is no induction variable involved
501 in the references (gcc.c-torture/execute/pr42142.c).
502 The simplest way is to check if the kill dominates
503 the use. */
504 /* But when both are in the same block we cannot
505 easily tell whether we came from a backedge
506 unless we decide to compute stmt UIDs
507 (see PR58246). */
508 && (basic_block) data != gimple_bb (def_stmt)
509 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
510 gimple_bb (def_stmt))
511 && operand_equal_p (ref->ref, lhs, 0))
512 return true;
516 /* Otherwise keep walking. */
517 return false;
520 static void
521 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
523 unsigned int chain;
524 ao_ref refd;
525 gcc_assert (!chain_ovfl);
526 ao_ref_init (&refd, ref);
527 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
528 mark_aliased_reaching_defs_necessary_1,
529 gimple_bb (stmt), NULL);
530 if (chain > longest_chain)
531 longest_chain = chain;
532 total_chain += chain;
533 nr_walks++;
536 /* Worker for the walker that marks reaching definitions of REF, which
537 is not based on a non-aliased decl. For simplicity we need to end
538 up marking all may-defs necessary that are not based on a non-aliased
539 decl. The only job of this walker is to skip may-defs based on
540 a non-aliased decl. */
542 static bool
543 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
544 tree vdef, void *data ATTRIBUTE_UNUSED)
546 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
548 /* We have to skip already visited (and thus necessary) statements
549 to make the chaining work after we dropped back to simple mode. */
550 if (chain_ovfl
551 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
553 gcc_assert (gimple_nop_p (def_stmt)
554 || gimple_plf (def_stmt, STMT_NECESSARY));
555 return false;
558 /* We want to skip stores to non-aliased variables. */
559 if (!chain_ovfl
560 && gimple_assign_single_p (def_stmt))
562 tree lhs = gimple_assign_lhs (def_stmt);
563 if (!ref_may_be_aliased (lhs))
564 return false;
567 /* We want to skip statments that do not constitute stores but have
568 a virtual definition. */
569 if (is_gimple_call (def_stmt))
571 tree callee = gimple_call_fndecl (def_stmt);
572 if (callee != NULL_TREE
573 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
574 switch (DECL_FUNCTION_CODE (callee))
576 case BUILT_IN_MALLOC:
577 case BUILT_IN_ALIGNED_ALLOC:
578 case BUILT_IN_CALLOC:
579 CASE_BUILT_IN_ALLOCA:
580 case BUILT_IN_FREE:
581 return false;
583 default:;
587 if (! gimple_clobber_p (def_stmt))
588 mark_operand_necessary (vdef);
590 return false;
593 static void
594 mark_all_reaching_defs_necessary (gimple *stmt)
596 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
597 mark_all_reaching_defs_necessary_1, NULL, &visited);
600 /* Return true for PHI nodes with one or identical arguments
601 can be removed. */
602 static bool
603 degenerate_phi_p (gimple *phi)
605 unsigned int i;
606 tree op = gimple_phi_arg_def (phi, 0);
607 for (i = 1; i < gimple_phi_num_args (phi); i++)
608 if (gimple_phi_arg_def (phi, i) != op)
609 return false;
610 return true;
613 /* Propagate necessity using the operands of necessary statements.
614 Process the uses on each statement in the worklist, and add all
615 feeding statements which contribute to the calculation of this
616 value to the worklist.
618 In conservative mode, EL is NULL. */
620 static void
621 propagate_necessity (bool aggressive)
623 gimple *stmt;
625 if (dump_file && (dump_flags & TDF_DETAILS))
626 fprintf (dump_file, "\nProcessing worklist:\n");
628 while (worklist.length () > 0)
630 /* Take STMT from worklist. */
631 stmt = worklist.pop ();
633 if (dump_file && (dump_flags & TDF_DETAILS))
635 fprintf (dump_file, "processing: ");
636 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
637 fprintf (dump_file, "\n");
640 if (aggressive)
642 /* Mark the last statement of the basic blocks on which the block
643 containing STMT is control dependent, but only if we haven't
644 already done so. */
645 basic_block bb = gimple_bb (stmt);
646 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
647 && !bitmap_bit_p (visited_control_parents, bb->index))
648 mark_control_dependent_edges_necessary (bb, false);
651 if (gimple_code (stmt) == GIMPLE_PHI
652 /* We do not process virtual PHI nodes nor do we track their
653 necessity. */
654 && !virtual_operand_p (gimple_phi_result (stmt)))
656 /* PHI nodes are somewhat special in that each PHI alternative has
657 data and control dependencies. All the statements feeding the
658 PHI node's arguments are always necessary. In aggressive mode,
659 we also consider the control dependent edges leading to the
660 predecessor block associated with each PHI alternative as
661 necessary. */
662 gphi *phi = as_a <gphi *> (stmt);
663 size_t k;
665 for (k = 0; k < gimple_phi_num_args (stmt); k++)
667 tree arg = PHI_ARG_DEF (stmt, k);
668 if (TREE_CODE (arg) == SSA_NAME)
669 mark_operand_necessary (arg);
672 /* For PHI operands it matters from where the control flow arrives
673 to the BB. Consider the following example:
675 a=exp1;
676 b=exp2;
677 if (test)
679 else
681 c=PHI(a,b)
683 We need to mark control dependence of the empty basic blocks, since they
684 contains computation of PHI operands.
686 Doing so is too restrictive in the case the predecestor block is in
687 the loop. Consider:
689 if (b)
691 int i;
692 for (i = 0; i<1000; ++i)
694 j = 0;
696 return j;
698 There is PHI for J in the BB containing return statement.
699 In this case the control dependence of predecestor block (that is
700 within the empty loop) also contains the block determining number
701 of iterations of the block that would prevent removing of empty
702 loop in this case.
704 This scenario can be avoided by splitting critical edges.
705 To save the critical edge splitting pass we identify how the control
706 dependence would look like if the edge was split.
708 Consider the modified CFG created from current CFG by splitting
709 edge B->C. In the postdominance tree of modified CFG, C' is
710 always child of C. There are two cases how chlids of C' can look
711 like:
713 1) C' is leaf
715 In this case the only basic block C' is control dependent on is B.
717 2) C' has single child that is B
719 In this case control dependence of C' is same as control
720 dependence of B in original CFG except for block B itself.
721 (since C' postdominate B in modified CFG)
723 Now how to decide what case happens? There are two basic options:
725 a) C postdominate B. Then C immediately postdominate B and
726 case 2 happens iff there is no other way from B to C except
727 the edge B->C.
729 There is other way from B to C iff there is succesor of B that
730 is not postdominated by B. Testing this condition is somewhat
731 expensive, because we need to iterate all succesors of B.
732 We are safe to assume that this does not happen: we will mark B
733 as needed when processing the other path from B to C that is
734 conrol dependent on B and marking control dependencies of B
735 itself is harmless because they will be processed anyway after
736 processing control statement in B.
738 b) C does not postdominate B. Always case 1 happens since there is
739 path from C to exit that does not go through B and thus also C'. */
741 if (aggressive && !degenerate_phi_p (stmt))
743 for (k = 0; k < gimple_phi_num_args (stmt); k++)
745 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
747 if (gimple_bb (stmt)
748 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
750 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
751 mark_last_stmt_necessary (arg_bb);
753 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
754 && !bitmap_bit_p (visited_control_parents,
755 arg_bb->index))
756 mark_control_dependent_edges_necessary (arg_bb, true);
760 else
762 /* Propagate through the operands. Examine all the USE, VUSE and
763 VDEF operands in this statement. Mark all the statements
764 which feed this statement's uses as necessary. */
765 ssa_op_iter iter;
766 tree use;
768 /* If this is a call to free which is directly fed by an
769 allocation function do not mark that necessary through
770 processing the argument. */
771 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
773 tree ptr = gimple_call_arg (stmt, 0);
774 gimple *def_stmt;
775 tree def_callee;
776 /* If the pointer we free is defined by an allocation
777 function do not add the call to the worklist. */
778 if (TREE_CODE (ptr) == SSA_NAME
779 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
780 && (def_callee = gimple_call_fndecl (def_stmt))
781 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
782 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
783 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
784 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
786 gimple *bounds_def_stmt;
787 tree bounds;
789 /* For instrumented calls we should also check used
790 bounds are returned by the same allocation call. */
791 if (!gimple_call_with_bounds_p (stmt)
792 || ((bounds = gimple_call_arg (stmt, 1))
793 && TREE_CODE (bounds) == SSA_NAME
794 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
795 && chkp_gimple_call_builtin_p (bounds_def_stmt,
796 BUILT_IN_CHKP_BNDRET)
797 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
798 continue;
802 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
803 mark_operand_necessary (use);
805 use = gimple_vuse (stmt);
806 if (!use)
807 continue;
809 /* If we dropped to simple mode make all immediately
810 reachable definitions necessary. */
811 if (chain_ovfl)
813 mark_all_reaching_defs_necessary (stmt);
814 continue;
817 /* For statements that may load from memory (have a VUSE) we
818 have to mark all reaching (may-)definitions as necessary.
819 We partition this task into two cases:
820 1) explicit loads based on decls that are not aliased
821 2) implicit loads (like calls) and explicit loads not
822 based on decls that are not aliased (like indirect
823 references or loads from globals)
824 For 1) we mark all reaching may-defs as necessary, stopping
825 at dominating kills. For 2) we want to mark all dominating
826 references necessary, but non-aliased ones which we handle
827 in 1). By keeping a global visited bitmap for references
828 we walk for 2) we avoid quadratic behavior for those. */
830 if (is_gimple_call (stmt))
832 tree callee = gimple_call_fndecl (stmt);
833 unsigned i;
835 /* Calls to functions that are merely acting as barriers
836 or that only store to memory do not make any previous
837 stores necessary. */
838 if (callee != NULL_TREE
839 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
840 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
841 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
844 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
845 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
847 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
849 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
850 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
851 continue;
853 /* Calls implicitly load from memory, their arguments
854 in addition may explicitly perform memory loads. */
855 mark_all_reaching_defs_necessary (stmt);
856 for (i = 0; i < gimple_call_num_args (stmt); ++i)
858 tree arg = gimple_call_arg (stmt, i);
859 if (TREE_CODE (arg) == SSA_NAME
860 || is_gimple_min_invariant (arg))
861 continue;
862 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
863 arg = TREE_OPERAND (arg, 0);
864 if (!ref_may_be_aliased (arg))
865 mark_aliased_reaching_defs_necessary (stmt, arg);
868 else if (gimple_assign_single_p (stmt))
870 tree rhs;
871 /* If this is a load mark things necessary. */
872 rhs = gimple_assign_rhs1 (stmt);
873 if (TREE_CODE (rhs) != SSA_NAME
874 && !is_gimple_min_invariant (rhs)
875 && TREE_CODE (rhs) != CONSTRUCTOR)
877 if (!ref_may_be_aliased (rhs))
878 mark_aliased_reaching_defs_necessary (stmt, rhs);
879 else
880 mark_all_reaching_defs_necessary (stmt);
883 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
885 tree rhs = gimple_return_retval (return_stmt);
886 /* A return statement may perform a load. */
887 if (rhs
888 && TREE_CODE (rhs) != SSA_NAME
889 && !is_gimple_min_invariant (rhs)
890 && TREE_CODE (rhs) != CONSTRUCTOR)
892 if (!ref_may_be_aliased (rhs))
893 mark_aliased_reaching_defs_necessary (stmt, rhs);
894 else
895 mark_all_reaching_defs_necessary (stmt);
898 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
900 unsigned i;
901 mark_all_reaching_defs_necessary (stmt);
902 /* Inputs may perform loads. */
903 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
905 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
906 if (TREE_CODE (op) != SSA_NAME
907 && !is_gimple_min_invariant (op)
908 && TREE_CODE (op) != CONSTRUCTOR
909 && !ref_may_be_aliased (op))
910 mark_aliased_reaching_defs_necessary (stmt, op);
913 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
915 /* The beginning of a transaction is a memory barrier. */
916 /* ??? If we were really cool, we'd only be a barrier
917 for the memories touched within the transaction. */
918 mark_all_reaching_defs_necessary (stmt);
920 else
921 gcc_unreachable ();
923 /* If we over-used our alias oracle budget drop to simple
924 mode. The cost metric allows quadratic behavior
925 (number of uses times number of may-defs queries) up to
926 a constant maximal number of queries and after that falls back to
927 super-linear complexity. */
928 if (/* Constant but quadratic for small functions. */
929 total_chain > 128 * 128
930 /* Linear in the number of may-defs. */
931 && total_chain > 32 * longest_chain
932 /* Linear in the number of uses. */
933 && total_chain > nr_walks * 32)
935 chain_ovfl = true;
936 if (visited)
937 bitmap_clear (visited);
943 /* Remove dead PHI nodes from block BB. */
945 static bool
946 remove_dead_phis (basic_block bb)
948 bool something_changed = false;
949 gphi *phi;
950 gphi_iterator gsi;
952 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
954 stats.total_phis++;
955 phi = gsi.phi ();
957 /* We do not track necessity of virtual PHI nodes. Instead do
958 very simple dead PHI removal here. */
959 if (virtual_operand_p (gimple_phi_result (phi)))
961 /* Virtual PHI nodes with one or identical arguments
962 can be removed. */
963 if (degenerate_phi_p (phi))
965 tree vdef = gimple_phi_result (phi);
966 tree vuse = gimple_phi_arg_def (phi, 0);
968 use_operand_p use_p;
969 imm_use_iterator iter;
970 gimple *use_stmt;
971 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
972 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
973 SET_USE (use_p, vuse);
974 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
975 && TREE_CODE (vuse) == SSA_NAME)
976 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
978 else
979 gimple_set_plf (phi, STMT_NECESSARY, true);
982 if (!gimple_plf (phi, STMT_NECESSARY))
984 something_changed = true;
985 if (dump_file && (dump_flags & TDF_DETAILS))
987 fprintf (dump_file, "Deleting : ");
988 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
989 fprintf (dump_file, "\n");
992 remove_phi_node (&gsi, true);
993 stats.removed_phis++;
994 continue;
997 gsi_next (&gsi);
999 return something_changed;
1003 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1004 containing I so that we don't have to look it up. */
1006 static void
1007 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1009 gimple *stmt = gsi_stmt (*i);
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1013 fprintf (dump_file, "Deleting : ");
1014 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1015 fprintf (dump_file, "\n");
1018 stats.removed++;
1020 /* If we have determined that a conditional branch statement contributes
1021 nothing to the program, then we not only remove it, but we need to update
1022 the CFG. We can chose any of edges out of BB as long as we are sure to not
1023 close infinite loops. This is done by always choosing the edge closer to
1024 exit in inverted_post_order_compute order. */
1025 if (is_ctrl_stmt (stmt))
1027 edge_iterator ei;
1028 edge e = NULL, e2;
1030 /* See if there is only one non-abnormal edge. */
1031 if (single_succ_p (bb))
1032 e = single_succ_edge (bb);
1033 /* Otherwise chose one that is closer to bb with live statement in it.
1034 To be able to chose one, we compute inverted post order starting from
1035 all BBs with live statements. */
1036 if (!e)
1038 if (!bb_postorder)
1040 auto_vec<int, 20> postorder;
1041 inverted_post_order_compute (&postorder,
1042 &bb_contains_live_stmts);
1043 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1044 for (unsigned int i = 0; i < postorder.length (); ++i)
1045 bb_postorder[postorder[i]] = i;
1047 FOR_EACH_EDGE (e2, ei, bb->succs)
1048 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1049 || bb_postorder [e->dest->index]
1050 < bb_postorder [e2->dest->index])
1051 e = e2;
1053 gcc_assert (e);
1054 e->probability = profile_probability::always ();
1056 /* The edge is no longer associated with a conditional, so it does
1057 not have TRUE/FALSE flags.
1058 We are also safe to drop EH/ABNORMAL flags and turn them into
1059 normal control flow, because we know that all the destinations (including
1060 those odd edges) are equivalent for program execution. */
1061 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1063 /* The lone outgoing edge from BB will be a fallthru edge. */
1064 e->flags |= EDGE_FALLTHRU;
1066 /* Remove the remaining outgoing edges. */
1067 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1068 if (e != e2)
1070 cfg_altered = true;
1071 /* If we made a BB unconditionally exit a loop or removed
1072 an entry into an irreducible region, then this transform
1073 alters the set of BBs in the loop. Schedule a fixup. */
1074 if (loop_exit_edge_p (bb->loop_father, e)
1075 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1076 loops_state_set (LOOPS_NEED_FIXUP);
1077 remove_edge (e2);
1079 else
1080 ei_next (&ei);
1083 /* If this is a store into a variable that is being optimized away,
1084 add a debug bind stmt if possible. */
1085 if (MAY_HAVE_DEBUG_BIND_STMTS
1086 && gimple_assign_single_p (stmt)
1087 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1089 tree lhs = gimple_assign_lhs (stmt);
1090 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1091 && !DECL_IGNORED_P (lhs)
1092 && is_gimple_reg_type (TREE_TYPE (lhs))
1093 && !is_global_var (lhs)
1094 && !DECL_HAS_VALUE_EXPR_P (lhs))
1096 tree rhs = gimple_assign_rhs1 (stmt);
1097 gdebug *note
1098 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1099 gsi_insert_after (i, note, GSI_SAME_STMT);
1103 unlink_stmt_vdef (stmt);
1104 gsi_remove (i, true);
1105 release_defs (stmt);
1108 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1109 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1111 static tree
1112 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1114 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1115 *walk_subtrees = 0;
1116 if (*tp == (tree) data)
1117 return *tp;
1118 return NULL_TREE;
1121 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1122 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1123 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1124 uses. */
1126 static void
1127 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1128 enum tree_code subcode)
1130 gimple *stmt = gsi_stmt (*gsi);
1131 tree lhs = gimple_call_lhs (stmt);
1133 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1134 return;
1136 imm_use_iterator imm_iter;
1137 use_operand_p use_p;
1138 bool has_debug_uses = false;
1139 bool has_realpart_uses = false;
1140 bool has_other_uses = false;
1141 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1143 gimple *use_stmt = USE_STMT (use_p);
1144 if (is_gimple_debug (use_stmt))
1145 has_debug_uses = true;
1146 else if (is_gimple_assign (use_stmt)
1147 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1148 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1149 has_realpart_uses = true;
1150 else
1152 has_other_uses = true;
1153 break;
1157 if (!has_realpart_uses || has_other_uses)
1158 return;
1160 tree arg0 = gimple_call_arg (stmt, 0);
1161 tree arg1 = gimple_call_arg (stmt, 1);
1162 location_t loc = gimple_location (stmt);
1163 tree type = TREE_TYPE (TREE_TYPE (lhs));
1164 tree utype = type;
1165 if (!TYPE_UNSIGNED (type))
1166 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1167 tree result = fold_build2_loc (loc, subcode, utype,
1168 fold_convert_loc (loc, utype, arg0),
1169 fold_convert_loc (loc, utype, arg1));
1170 result = fold_convert_loc (loc, type, result);
1172 if (has_debug_uses)
1174 gimple *use_stmt;
1175 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1177 if (!gimple_debug_bind_p (use_stmt))
1178 continue;
1179 tree v = gimple_debug_bind_get_value (use_stmt);
1180 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1182 gimple_debug_bind_reset_value (use_stmt);
1183 update_stmt (use_stmt);
1188 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1189 result = drop_tree_overflow (result);
1190 tree overflow = build_zero_cst (type);
1191 tree ctype = build_complex_type (type);
1192 if (TREE_CODE (result) == INTEGER_CST)
1193 result = build_complex (ctype, result, overflow);
1194 else
1195 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1196 ctype, result, overflow);
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1200 fprintf (dump_file, "Transforming call: ");
1201 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1202 fprintf (dump_file, "because the overflow result is never used into: ");
1203 print_generic_stmt (dump_file, result, TDF_SLIM);
1204 fprintf (dump_file, "\n");
1207 if (!update_call_from_tree (gsi, result))
1208 gimplify_and_update_call_from_tree (gsi, result);
1211 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1212 contributes nothing to the program, and can be deleted. */
1214 static bool
1215 eliminate_unnecessary_stmts (void)
1217 bool something_changed = false;
1218 basic_block bb;
1219 gimple_stmt_iterator gsi, psi;
1220 gimple *stmt;
1221 tree call;
1222 vec<basic_block> h;
1224 if (dump_file && (dump_flags & TDF_DETAILS))
1225 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1227 clear_special_calls ();
1229 /* Walking basic blocks and statements in reverse order avoids
1230 releasing SSA names before any other DEFs that refer to them are
1231 released. This helps avoid loss of debug information, as we get
1232 a chance to propagate all RHSs of removed SSAs into debug uses,
1233 rather than only the latest ones. E.g., consider:
1235 x_3 = y_1 + z_2;
1236 a_5 = x_3 - b_4;
1237 # DEBUG a => a_5
1239 If we were to release x_3 before a_5, when we reached a_5 and
1240 tried to substitute it into the debug stmt, we'd see x_3 there,
1241 but x_3's DEF, type, etc would have already been disconnected.
1242 By going backwards, the debug stmt first changes to:
1244 # DEBUG a => x_3 - b_4
1246 and then to:
1248 # DEBUG a => y_1 + z_2 - b_4
1250 as desired. */
1251 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1252 h = get_all_dominated_blocks (CDI_DOMINATORS,
1253 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1255 while (h.length ())
1257 bb = h.pop ();
1259 /* Remove dead statements. */
1260 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1262 stmt = gsi_stmt (gsi);
1264 psi = gsi;
1265 gsi_prev (&psi);
1267 stats.total++;
1269 /* We can mark a call to free as not necessary if the
1270 defining statement of its argument is not necessary
1271 (and thus is getting removed). */
1272 if (gimple_plf (stmt, STMT_NECESSARY)
1273 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1275 tree ptr = gimple_call_arg (stmt, 0);
1276 if (TREE_CODE (ptr) == SSA_NAME)
1278 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1279 if (!gimple_nop_p (def_stmt)
1280 && !gimple_plf (def_stmt, STMT_NECESSARY))
1281 gimple_set_plf (stmt, STMT_NECESSARY, false);
1283 /* We did not propagate necessity for free calls fed
1284 by allocation function to allow unnecessary
1285 alloc-free sequence elimination. For instrumented
1286 calls it also means we did not mark bounds producer
1287 as necessary and it is time to do it in case free
1288 call is not removed. */
1289 if (gimple_call_with_bounds_p (stmt))
1291 gimple *bounds_def_stmt;
1292 tree bounds = gimple_call_arg (stmt, 1);
1293 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1294 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1295 if (bounds_def_stmt
1296 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1297 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1298 gimple_plf (stmt, STMT_NECESSARY));
1302 /* If GSI is not necessary then remove it. */
1303 if (!gimple_plf (stmt, STMT_NECESSARY))
1305 /* Keep clobbers that we can keep live live. */
1306 if (gimple_clobber_p (stmt))
1308 ssa_op_iter iter;
1309 use_operand_p use_p;
1310 bool dead = false;
1311 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1313 tree name = USE_FROM_PTR (use_p);
1314 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1315 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1317 dead = true;
1318 break;
1321 if (!dead)
1322 continue;
1324 if (!is_gimple_debug (stmt))
1325 something_changed = true;
1326 remove_dead_stmt (&gsi, bb);
1328 else if (is_gimple_call (stmt))
1330 tree name = gimple_call_lhs (stmt);
1332 notice_special_calls (as_a <gcall *> (stmt));
1334 /* When LHS of var = call (); is dead, simplify it into
1335 call (); saving one operand. */
1336 if (name
1337 && TREE_CODE (name) == SSA_NAME
1338 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1339 /* Avoid doing so for allocation calls which we
1340 did not mark as necessary, it will confuse the
1341 special logic we apply to malloc/free pair removal. */
1342 && (!(call = gimple_call_fndecl (stmt))
1343 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1344 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1345 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1346 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1347 && !ALLOCA_FUNCTION_CODE_P
1348 (DECL_FUNCTION_CODE (call))))
1349 /* Avoid doing so for bndret calls for the same reason. */
1350 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1352 something_changed = true;
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1355 fprintf (dump_file, "Deleting LHS of call: ");
1356 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1357 fprintf (dump_file, "\n");
1360 gimple_call_set_lhs (stmt, NULL_TREE);
1361 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1362 update_stmt (stmt);
1363 release_ssa_name (name);
1365 /* GOMP_SIMD_LANE or ASAN_POISON without lhs is not
1366 needed. */
1367 if (gimple_call_internal_p (stmt))
1368 switch (gimple_call_internal_fn (stmt))
1370 case IFN_GOMP_SIMD_LANE:
1371 case IFN_ASAN_POISON:
1372 remove_dead_stmt (&gsi, bb);
1373 break;
1374 default:
1375 break;
1378 else if (gimple_call_internal_p (stmt))
1379 switch (gimple_call_internal_fn (stmt))
1381 case IFN_ADD_OVERFLOW:
1382 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1383 break;
1384 case IFN_SUB_OVERFLOW:
1385 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1386 break;
1387 case IFN_MUL_OVERFLOW:
1388 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1389 break;
1390 default:
1391 break;
1397 h.release ();
1399 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1400 rendered some PHI nodes unreachable while they are still in use.
1401 Mark them for renaming. */
1402 if (cfg_altered)
1404 basic_block prev_bb;
1406 find_unreachable_blocks ();
1408 /* Delete all unreachable basic blocks in reverse dominator order. */
1409 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1410 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1412 prev_bb = bb->prev_bb;
1414 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1415 || !(bb->flags & BB_REACHABLE))
1417 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1418 gsi_next (&gsi))
1419 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1421 bool found = false;
1422 imm_use_iterator iter;
1424 FOR_EACH_IMM_USE_STMT (stmt, iter,
1425 gimple_phi_result (gsi.phi ()))
1427 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1428 continue;
1429 if (gimple_code (stmt) == GIMPLE_PHI
1430 || gimple_plf (stmt, STMT_NECESSARY))
1432 found = true;
1433 BREAK_FROM_IMM_USE_STMT (iter);
1436 if (found)
1437 mark_virtual_phi_result_for_renaming (gsi.phi ());
1440 if (!(bb->flags & BB_REACHABLE))
1442 /* Speed up the removal of blocks that don't
1443 dominate others. Walking backwards, this should
1444 be the common case. ??? Do we need to recompute
1445 dominators because of cfg_altered? */
1446 if (!first_dom_son (CDI_DOMINATORS, bb))
1447 delete_basic_block (bb);
1448 else
1450 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1452 while (h.length ())
1454 bb = h.pop ();
1455 prev_bb = bb->prev_bb;
1456 /* Rearrangements to the CFG may have failed
1457 to update the dominators tree, so that
1458 formerly-dominated blocks are now
1459 otherwise reachable. */
1460 if (!!(bb->flags & BB_REACHABLE))
1461 continue;
1462 delete_basic_block (bb);
1465 h.release ();
1471 FOR_EACH_BB_FN (bb, cfun)
1473 /* Remove dead PHI nodes. */
1474 something_changed |= remove_dead_phis (bb);
1477 if (bb_postorder)
1478 free (bb_postorder);
1479 bb_postorder = NULL;
1481 return something_changed;
1485 /* Print out removed statement statistics. */
1487 static void
1488 print_stats (void)
1490 float percg;
1492 percg = ((float) stats.removed / (float) stats.total) * 100;
1493 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1494 stats.removed, stats.total, (int) percg);
1496 if (stats.total_phis == 0)
1497 percg = 0;
1498 else
1499 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1501 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1502 stats.removed_phis, stats.total_phis, (int) percg);
1505 /* Initialization for this pass. Set up the used data structures. */
1507 static void
1508 tree_dce_init (bool aggressive)
1510 memset ((void *) &stats, 0, sizeof (stats));
1512 if (aggressive)
1514 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1515 bitmap_clear (last_stmt_necessary);
1516 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1517 bitmap_clear (bb_contains_live_stmts);
1520 processed = sbitmap_alloc (num_ssa_names + 1);
1521 bitmap_clear (processed);
1523 worklist.create (64);
1524 cfg_altered = false;
1527 /* Cleanup after this pass. */
1529 static void
1530 tree_dce_done (bool aggressive)
1532 if (aggressive)
1534 delete cd;
1535 sbitmap_free (visited_control_parents);
1536 sbitmap_free (last_stmt_necessary);
1537 sbitmap_free (bb_contains_live_stmts);
1538 bb_contains_live_stmts = NULL;
1541 sbitmap_free (processed);
1543 worklist.release ();
1546 /* Main routine to eliminate dead code.
1548 AGGRESSIVE controls the aggressiveness of the algorithm.
1549 In conservative mode, we ignore control dependence and simply declare
1550 all but the most trivially dead branches necessary. This mode is fast.
1551 In aggressive mode, control dependences are taken into account, which
1552 results in more dead code elimination, but at the cost of some time.
1554 FIXME: Aggressive mode before PRE doesn't work currently because
1555 the dominance info is not invalidated after DCE1. This is
1556 not an issue right now because we only run aggressive DCE
1557 as the last tree SSA pass, but keep this in mind when you
1558 start experimenting with pass ordering. */
1560 static unsigned int
1561 perform_tree_ssa_dce (bool aggressive)
1563 bool something_changed = 0;
1565 calculate_dominance_info (CDI_DOMINATORS);
1567 /* Preheaders are needed for SCEV to work.
1568 Simple lateches and recorded exits improve chances that loop will
1569 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1570 bool in_loop_pipeline = scev_initialized_p ();
1571 if (aggressive && ! in_loop_pipeline)
1573 scev_initialize ();
1574 loop_optimizer_init (LOOPS_NORMAL
1575 | LOOPS_HAVE_RECORDED_EXITS);
1578 tree_dce_init (aggressive);
1580 if (aggressive)
1582 /* Compute control dependence. */
1583 calculate_dominance_info (CDI_POST_DOMINATORS);
1584 cd = new control_dependences ();
1586 visited_control_parents =
1587 sbitmap_alloc (last_basic_block_for_fn (cfun));
1588 bitmap_clear (visited_control_parents);
1590 mark_dfs_back_edges ();
1593 find_obviously_necessary_stmts (aggressive);
1595 if (aggressive && ! in_loop_pipeline)
1597 loop_optimizer_finalize ();
1598 scev_finalize ();
1601 longest_chain = 0;
1602 total_chain = 0;
1603 nr_walks = 0;
1604 chain_ovfl = false;
1605 visited = BITMAP_ALLOC (NULL);
1606 propagate_necessity (aggressive);
1607 BITMAP_FREE (visited);
1609 something_changed |= eliminate_unnecessary_stmts ();
1610 something_changed |= cfg_altered;
1612 /* We do not update postdominators, so free them unconditionally. */
1613 free_dominance_info (CDI_POST_DOMINATORS);
1615 /* If we removed paths in the CFG, then we need to update
1616 dominators as well. I haven't investigated the possibility
1617 of incrementally updating dominators. */
1618 if (cfg_altered)
1619 free_dominance_info (CDI_DOMINATORS);
1621 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1622 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1624 /* Debugging dumps. */
1625 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1626 print_stats ();
1628 tree_dce_done (aggressive);
1630 if (something_changed)
1632 free_numbers_of_iterations_estimates (cfun);
1633 if (in_loop_pipeline)
1634 scev_reset ();
1635 return TODO_update_ssa | TODO_cleanup_cfg;
1637 return 0;
1640 /* Pass entry points. */
1641 static unsigned int
1642 tree_ssa_dce (void)
1644 return perform_tree_ssa_dce (/*aggressive=*/false);
1647 static unsigned int
1648 tree_ssa_cd_dce (void)
1650 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1653 namespace {
1655 const pass_data pass_data_dce =
1657 GIMPLE_PASS, /* type */
1658 "dce", /* name */
1659 OPTGROUP_NONE, /* optinfo_flags */
1660 TV_TREE_DCE, /* tv_id */
1661 ( PROP_cfg | PROP_ssa ), /* properties_required */
1662 0, /* properties_provided */
1663 0, /* properties_destroyed */
1664 0, /* todo_flags_start */
1665 0, /* todo_flags_finish */
1668 class pass_dce : public gimple_opt_pass
1670 public:
1671 pass_dce (gcc::context *ctxt)
1672 : gimple_opt_pass (pass_data_dce, ctxt)
1675 /* opt_pass methods: */
1676 opt_pass * clone () { return new pass_dce (m_ctxt); }
1677 virtual bool gate (function *) { return flag_tree_dce != 0; }
1678 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1680 }; // class pass_dce
1682 } // anon namespace
1684 gimple_opt_pass *
1685 make_pass_dce (gcc::context *ctxt)
1687 return new pass_dce (ctxt);
1690 namespace {
1692 const pass_data pass_data_cd_dce =
1694 GIMPLE_PASS, /* type */
1695 "cddce", /* name */
1696 OPTGROUP_NONE, /* optinfo_flags */
1697 TV_TREE_CD_DCE, /* tv_id */
1698 ( PROP_cfg | PROP_ssa ), /* properties_required */
1699 0, /* properties_provided */
1700 0, /* properties_destroyed */
1701 0, /* todo_flags_start */
1702 0, /* todo_flags_finish */
1705 class pass_cd_dce : public gimple_opt_pass
1707 public:
1708 pass_cd_dce (gcc::context *ctxt)
1709 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1712 /* opt_pass methods: */
1713 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1714 virtual bool gate (function *) { return flag_tree_dce != 0; }
1715 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1717 }; // class pass_cd_dce
1719 } // anon namespace
1721 gimple_opt_pass *
1722 make_pass_cd_dce (gcc::context *ctxt)
1724 return new pass_cd_dce (ctxt);
1728 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1729 is consumed by this function. The function has linear complexity in
1730 the number of dead stmts with a constant factor like the average SSA
1731 use operands number. */
1733 void
1734 simple_dce_from_worklist (bitmap worklist)
1736 while (! bitmap_empty_p (worklist))
1738 /* Pop item. */
1739 unsigned i = bitmap_first_set_bit (worklist);
1740 bitmap_clear_bit (worklist, i);
1742 tree def = ssa_name (i);
1743 /* Removed by somebody else or still in use. */
1744 if (! def || ! has_zero_uses (def))
1745 continue;
1747 gimple *t = SSA_NAME_DEF_STMT (def);
1748 if (gimple_has_side_effects (t))
1749 continue;
1751 /* Add uses to the worklist. */
1752 ssa_op_iter iter;
1753 use_operand_p use_p;
1754 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1756 tree use = USE_FROM_PTR (use_p);
1757 if (TREE_CODE (use) == SSA_NAME
1758 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1759 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1762 /* Remove stmt. */
1763 if (dump_file && (dump_flags & TDF_DETAILS))
1765 fprintf (dump_file, "Removing dead stmt:");
1766 print_gimple_stmt (dump_file, t, 0);
1768 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1769 if (gimple_code (t) == GIMPLE_PHI)
1770 remove_phi_node (&gsi, true);
1771 else
1773 gsi_remove (&gsi, true);
1774 release_defs (t);