PR sanitizer/83987
[official-gcc.git] / gcc / tree-ssa-dce.c
blob7ec3cc15854bb6ac457db93c1d9f46a5a79b50f0
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2018 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
72 static struct stmt_stats
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
80 #define STMT_NECESSARY GF_PLF_1
82 static vec<gimple *> worklist;
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
119 /* If STMT is not already marked necessary, mark it, and add it to the
120 worklist if ADD_TO_WORKLIST is true. */
122 static inline void
123 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
125 gcc_assert (stmt);
127 if (gimple_plf (stmt, STMT_NECESSARY))
128 return;
130 if (dump_file && (dump_flags & TDF_DETAILS))
132 fprintf (dump_file, "Marking useful stmt: ");
133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
134 fprintf (dump_file, "\n");
137 gimple_set_plf (stmt, STMT_NECESSARY, true);
138 if (add_to_worklist)
139 worklist.safe_push (stmt);
140 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
141 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
145 /* Mark the statement defining operand OP as necessary. */
147 static inline void
148 mark_operand_necessary (tree op)
150 gimple *stmt;
151 int ver;
153 gcc_assert (op);
155 ver = SSA_NAME_VERSION (op);
156 if (bitmap_bit_p (processed, ver))
158 stmt = SSA_NAME_DEF_STMT (op);
159 gcc_assert (gimple_nop_p (stmt)
160 || gimple_plf (stmt, STMT_NECESSARY));
161 return;
163 bitmap_set_bit (processed, ver);
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (stmt);
168 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
169 return;
171 if (dump_file && (dump_flags & TDF_DETAILS))
173 fprintf (dump_file, "marking necessary through ");
174 print_generic_expr (dump_file, op);
175 fprintf (dump_file, " stmt ");
176 print_gimple_stmt (dump_file, stmt, 0);
179 gimple_set_plf (stmt, STMT_NECESSARY, true);
180 if (bb_contains_live_stmts)
181 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
182 worklist.safe_push (stmt);
186 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
187 it can make other statements necessary.
189 If AGGRESSIVE is false, control statements are conservatively marked as
190 necessary. */
192 static void
193 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
195 /* With non-call exceptions, we have to assume that all statements could
196 throw. If a statement could throw, it can be deemed necessary. */
197 if (cfun->can_throw_non_call_exceptions
198 && !cfun->can_delete_dead_exceptions
199 && stmt_could_throw_p (stmt))
201 mark_stmt_necessary (stmt, true);
202 return;
205 /* Statements that are implicitly live. Most function calls, asm
206 and return statements are required. Labels and GIMPLE_BIND nodes
207 are kept because they are control flow, and we have no way of
208 knowing whether they can be removed. DCE can eliminate all the
209 other statements in a block, and CFG can then remove the block
210 and labels. */
211 switch (gimple_code (stmt))
213 case GIMPLE_PREDICT:
214 case GIMPLE_LABEL:
215 mark_stmt_necessary (stmt, false);
216 return;
218 case GIMPLE_ASM:
219 case GIMPLE_RESX:
220 case GIMPLE_RETURN:
221 mark_stmt_necessary (stmt, true);
222 return;
224 case GIMPLE_CALL:
226 tree callee = gimple_call_fndecl (stmt);
227 if (callee != NULL_TREE
228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
229 switch (DECL_FUNCTION_CODE (callee))
231 case BUILT_IN_MALLOC:
232 case BUILT_IN_ALIGNED_ALLOC:
233 case BUILT_IN_CALLOC:
234 CASE_BUILT_IN_ALLOCA:
235 case BUILT_IN_STRDUP:
236 case BUILT_IN_STRNDUP:
237 return;
239 default:;
241 /* Most, but not all function calls are required. Function calls that
242 produce no result and have no side effects (i.e. const pure
243 functions) are unnecessary. */
244 if (gimple_has_side_effects (stmt))
246 mark_stmt_necessary (stmt, true);
247 return;
249 if (!gimple_call_lhs (stmt))
250 return;
251 break;
254 case GIMPLE_DEBUG:
255 /* Debug temps without a value are not useful. ??? If we could
256 easily locate the debug temp bind stmt for a use thereof,
257 would could refrain from marking all debug temps here, and
258 mark them only if they're used. */
259 if (gimple_debug_nonbind_marker_p (stmt)
260 || !gimple_debug_bind_p (stmt)
261 || gimple_debug_bind_has_value_p (stmt)
262 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
263 mark_stmt_necessary (stmt, false);
264 return;
266 case GIMPLE_GOTO:
267 gcc_assert (!simple_goto_p (stmt));
268 mark_stmt_necessary (stmt, true);
269 return;
271 case GIMPLE_COND:
272 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
273 /* Fall through. */
275 case GIMPLE_SWITCH:
276 if (! aggressive)
277 mark_stmt_necessary (stmt, true);
278 break;
280 case GIMPLE_ASSIGN:
281 if (gimple_clobber_p (stmt))
282 return;
283 break;
285 default:
286 break;
289 /* If the statement has volatile operands, it needs to be preserved.
290 Same for statements that can alter control flow in unpredictable
291 ways. */
292 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
294 mark_stmt_necessary (stmt, true);
295 return;
298 if (stmt_may_clobber_global_p (stmt))
300 mark_stmt_necessary (stmt, true);
301 return;
304 return;
308 /* Mark the last statement of BB as necessary. */
310 static void
311 mark_last_stmt_necessary (basic_block bb)
313 gimple *stmt = last_stmt (bb);
315 bitmap_set_bit (last_stmt_necessary, bb->index);
316 bitmap_set_bit (bb_contains_live_stmts, bb->index);
318 /* We actually mark the statement only if it is a control statement. */
319 if (stmt && is_ctrl_stmt (stmt))
320 mark_stmt_necessary (stmt, true);
324 /* Mark control dependent edges of BB as necessary. We have to do this only
325 once for each basic block so we set the appropriate bit after we're done.
327 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
329 static void
330 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
332 bitmap_iterator bi;
333 unsigned edge_number;
334 bool skipped = false;
336 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
338 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
339 return;
341 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
342 0, edge_number, bi)
344 basic_block cd_bb = cd->get_edge_src (edge_number);
346 if (ignore_self && cd_bb == bb)
348 skipped = true;
349 continue;
352 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
353 mark_last_stmt_necessary (cd_bb);
356 if (!skipped)
357 bitmap_set_bit (visited_control_parents, bb->index);
361 /* Find obviously necessary statements. These are things like most function
362 calls, and stores to file level variables.
364 If EL is NULL, control statements are conservatively marked as
365 necessary. Otherwise it contains the list of edges used by control
366 dependence analysis. */
368 static void
369 find_obviously_necessary_stmts (bool aggressive)
371 basic_block bb;
372 gimple_stmt_iterator gsi;
373 edge e;
374 gimple *phi, *stmt;
375 int flags;
377 FOR_EACH_BB_FN (bb, cfun)
379 /* PHI nodes are never inherently necessary. */
380 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
382 phi = gsi_stmt (gsi);
383 gimple_set_plf (phi, STMT_NECESSARY, false);
386 /* Check all statements in the block. */
387 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
389 stmt = gsi_stmt (gsi);
390 gimple_set_plf (stmt, STMT_NECESSARY, false);
391 mark_stmt_if_obviously_necessary (stmt, aggressive);
395 /* Pure and const functions are finite and thus have no infinite loops in
396 them. */
397 flags = flags_from_decl_or_type (current_function_decl);
398 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
399 return;
401 /* Prevent the empty possibly infinite loops from being removed. */
402 if (aggressive)
404 struct loop *loop;
405 if (mark_irreducible_loops ())
406 FOR_EACH_BB_FN (bb, cfun)
408 edge_iterator ei;
409 FOR_EACH_EDGE (e, ei, bb->succs)
410 if ((e->flags & EDGE_DFS_BACK)
411 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
413 if (dump_file)
414 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
415 e->src->index, e->dest->index);
416 mark_control_dependent_edges_necessary (e->dest, false);
420 FOR_EACH_LOOP (loop, 0)
421 if (!finite_loop_p (loop))
423 if (dump_file)
424 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
425 mark_control_dependent_edges_necessary (loop->latch, false);
431 /* Return true if REF is based on an aliased base, otherwise false. */
433 static bool
434 ref_may_be_aliased (tree ref)
436 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
437 while (handled_component_p (ref))
438 ref = TREE_OPERAND (ref, 0);
439 if (TREE_CODE (ref) == MEM_REF
440 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
441 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
442 return !(DECL_P (ref)
443 && !may_be_aliased (ref));
446 static bitmap visited = NULL;
447 static unsigned int longest_chain = 0;
448 static unsigned int total_chain = 0;
449 static unsigned int nr_walks = 0;
450 static bool chain_ovfl = false;
452 /* Worker for the walker that marks reaching definitions of REF,
453 which is based on a non-aliased decl, necessary. It returns
454 true whenever the defining statement of the current VDEF is
455 a kill for REF, as no dominating may-defs are necessary for REF
456 anymore. DATA points to the basic-block that contains the
457 stmt that refers to REF. */
459 static bool
460 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
462 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
464 /* All stmts we visit are necessary. */
465 if (! gimple_clobber_p (def_stmt))
466 mark_operand_necessary (vdef);
468 /* If the stmt lhs kills ref, then we can stop walking. */
469 if (gimple_has_lhs (def_stmt)
470 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
471 /* The assignment is not necessarily carried out if it can throw
472 and we can catch it in the current function where we could inspect
473 the previous value.
474 ??? We only need to care about the RHS throwing. For aggregate
475 assignments or similar calls and non-call exceptions the LHS
476 might throw as well. */
477 && !stmt_can_throw_internal (def_stmt))
479 tree base, lhs = gimple_get_lhs (def_stmt);
480 poly_int64 size, offset, max_size;
481 bool reverse;
482 ao_ref_base (ref);
483 base
484 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
485 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
486 so base == refd->base does not always hold. */
487 if (base == ref->base)
489 /* For a must-alias check we need to be able to constrain
490 the accesses properly. */
491 if (known_eq (size, max_size)
492 && known_subrange_p (ref->offset, ref->max_size, offset, size))
493 return true;
494 /* Or they need to be exactly the same. */
495 else if (ref->ref
496 /* Make sure there is no induction variable involved
497 in the references (gcc.c-torture/execute/pr42142.c).
498 The simplest way is to check if the kill dominates
499 the use. */
500 /* But when both are in the same block we cannot
501 easily tell whether we came from a backedge
502 unless we decide to compute stmt UIDs
503 (see PR58246). */
504 && (basic_block) data != gimple_bb (def_stmt)
505 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
506 gimple_bb (def_stmt))
507 && operand_equal_p (ref->ref, lhs, 0))
508 return true;
512 /* Otherwise keep walking. */
513 return false;
516 static void
517 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
519 unsigned int chain;
520 ao_ref refd;
521 gcc_assert (!chain_ovfl);
522 ao_ref_init (&refd, ref);
523 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
524 mark_aliased_reaching_defs_necessary_1,
525 gimple_bb (stmt), NULL);
526 if (chain > longest_chain)
527 longest_chain = chain;
528 total_chain += chain;
529 nr_walks++;
532 /* Worker for the walker that marks reaching definitions of REF, which
533 is not based on a non-aliased decl. For simplicity we need to end
534 up marking all may-defs necessary that are not based on a non-aliased
535 decl. The only job of this walker is to skip may-defs based on
536 a non-aliased decl. */
538 static bool
539 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
540 tree vdef, void *data ATTRIBUTE_UNUSED)
542 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
544 /* We have to skip already visited (and thus necessary) statements
545 to make the chaining work after we dropped back to simple mode. */
546 if (chain_ovfl
547 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
549 gcc_assert (gimple_nop_p (def_stmt)
550 || gimple_plf (def_stmt, STMT_NECESSARY));
551 return false;
554 /* We want to skip stores to non-aliased variables. */
555 if (!chain_ovfl
556 && gimple_assign_single_p (def_stmt))
558 tree lhs = gimple_assign_lhs (def_stmt);
559 if (!ref_may_be_aliased (lhs))
560 return false;
563 /* We want to skip statments that do not constitute stores but have
564 a virtual definition. */
565 if (is_gimple_call (def_stmt))
567 tree callee = gimple_call_fndecl (def_stmt);
568 if (callee != NULL_TREE
569 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
570 switch (DECL_FUNCTION_CODE (callee))
572 case BUILT_IN_MALLOC:
573 case BUILT_IN_ALIGNED_ALLOC:
574 case BUILT_IN_CALLOC:
575 CASE_BUILT_IN_ALLOCA:
576 case BUILT_IN_FREE:
577 return false;
579 default:;
583 if (! gimple_clobber_p (def_stmt))
584 mark_operand_necessary (vdef);
586 return false;
589 static void
590 mark_all_reaching_defs_necessary (gimple *stmt)
592 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
593 mark_all_reaching_defs_necessary_1, NULL, &visited);
596 /* Return true for PHI nodes with one or identical arguments
597 can be removed. */
598 static bool
599 degenerate_phi_p (gimple *phi)
601 unsigned int i;
602 tree op = gimple_phi_arg_def (phi, 0);
603 for (i = 1; i < gimple_phi_num_args (phi); i++)
604 if (gimple_phi_arg_def (phi, i) != op)
605 return false;
606 return true;
609 /* Propagate necessity using the operands of necessary statements.
610 Process the uses on each statement in the worklist, and add all
611 feeding statements which contribute to the calculation of this
612 value to the worklist.
614 In conservative mode, EL is NULL. */
616 static void
617 propagate_necessity (bool aggressive)
619 gimple *stmt;
621 if (dump_file && (dump_flags & TDF_DETAILS))
622 fprintf (dump_file, "\nProcessing worklist:\n");
624 while (worklist.length () > 0)
626 /* Take STMT from worklist. */
627 stmt = worklist.pop ();
629 if (dump_file && (dump_flags & TDF_DETAILS))
631 fprintf (dump_file, "processing: ");
632 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
633 fprintf (dump_file, "\n");
636 if (aggressive)
638 /* Mark the last statement of the basic blocks on which the block
639 containing STMT is control dependent, but only if we haven't
640 already done so. */
641 basic_block bb = gimple_bb (stmt);
642 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
643 && !bitmap_bit_p (visited_control_parents, bb->index))
644 mark_control_dependent_edges_necessary (bb, false);
647 if (gimple_code (stmt) == GIMPLE_PHI
648 /* We do not process virtual PHI nodes nor do we track their
649 necessity. */
650 && !virtual_operand_p (gimple_phi_result (stmt)))
652 /* PHI nodes are somewhat special in that each PHI alternative has
653 data and control dependencies. All the statements feeding the
654 PHI node's arguments are always necessary. In aggressive mode,
655 we also consider the control dependent edges leading to the
656 predecessor block associated with each PHI alternative as
657 necessary. */
658 gphi *phi = as_a <gphi *> (stmt);
659 size_t k;
661 for (k = 0; k < gimple_phi_num_args (stmt); k++)
663 tree arg = PHI_ARG_DEF (stmt, k);
664 if (TREE_CODE (arg) == SSA_NAME)
665 mark_operand_necessary (arg);
668 /* For PHI operands it matters from where the control flow arrives
669 to the BB. Consider the following example:
671 a=exp1;
672 b=exp2;
673 if (test)
675 else
677 c=PHI(a,b)
679 We need to mark control dependence of the empty basic blocks, since they
680 contains computation of PHI operands.
682 Doing so is too restrictive in the case the predecestor block is in
683 the loop. Consider:
685 if (b)
687 int i;
688 for (i = 0; i<1000; ++i)
690 j = 0;
692 return j;
694 There is PHI for J in the BB containing return statement.
695 In this case the control dependence of predecestor block (that is
696 within the empty loop) also contains the block determining number
697 of iterations of the block that would prevent removing of empty
698 loop in this case.
700 This scenario can be avoided by splitting critical edges.
701 To save the critical edge splitting pass we identify how the control
702 dependence would look like if the edge was split.
704 Consider the modified CFG created from current CFG by splitting
705 edge B->C. In the postdominance tree of modified CFG, C' is
706 always child of C. There are two cases how chlids of C' can look
707 like:
709 1) C' is leaf
711 In this case the only basic block C' is control dependent on is B.
713 2) C' has single child that is B
715 In this case control dependence of C' is same as control
716 dependence of B in original CFG except for block B itself.
717 (since C' postdominate B in modified CFG)
719 Now how to decide what case happens? There are two basic options:
721 a) C postdominate B. Then C immediately postdominate B and
722 case 2 happens iff there is no other way from B to C except
723 the edge B->C.
725 There is other way from B to C iff there is succesor of B that
726 is not postdominated by B. Testing this condition is somewhat
727 expensive, because we need to iterate all succesors of B.
728 We are safe to assume that this does not happen: we will mark B
729 as needed when processing the other path from B to C that is
730 conrol dependent on B and marking control dependencies of B
731 itself is harmless because they will be processed anyway after
732 processing control statement in B.
734 b) C does not postdominate B. Always case 1 happens since there is
735 path from C to exit that does not go through B and thus also C'. */
737 if (aggressive && !degenerate_phi_p (stmt))
739 for (k = 0; k < gimple_phi_num_args (stmt); k++)
741 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
743 if (gimple_bb (stmt)
744 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
746 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
747 mark_last_stmt_necessary (arg_bb);
749 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
750 && !bitmap_bit_p (visited_control_parents,
751 arg_bb->index))
752 mark_control_dependent_edges_necessary (arg_bb, true);
756 else
758 /* Propagate through the operands. Examine all the USE, VUSE and
759 VDEF operands in this statement. Mark all the statements
760 which feed this statement's uses as necessary. */
761 ssa_op_iter iter;
762 tree use;
764 /* If this is a call to free which is directly fed by an
765 allocation function do not mark that necessary through
766 processing the argument. */
767 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
769 tree ptr = gimple_call_arg (stmt, 0);
770 gimple *def_stmt;
771 tree def_callee;
772 /* If the pointer we free is defined by an allocation
773 function do not add the call to the worklist. */
774 if (TREE_CODE (ptr) == SSA_NAME
775 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
776 && (def_callee = gimple_call_fndecl (def_stmt))
777 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
778 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
779 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
780 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
782 gimple *bounds_def_stmt;
783 tree bounds;
785 /* For instrumented calls we should also check used
786 bounds are returned by the same allocation call. */
787 if (!gimple_call_with_bounds_p (stmt)
788 || ((bounds = gimple_call_arg (stmt, 1))
789 && TREE_CODE (bounds) == SSA_NAME
790 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
791 && chkp_gimple_call_builtin_p (bounds_def_stmt,
792 BUILT_IN_CHKP_BNDRET)
793 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
794 continue;
798 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
799 mark_operand_necessary (use);
801 use = gimple_vuse (stmt);
802 if (!use)
803 continue;
805 /* If we dropped to simple mode make all immediately
806 reachable definitions necessary. */
807 if (chain_ovfl)
809 mark_all_reaching_defs_necessary (stmt);
810 continue;
813 /* For statements that may load from memory (have a VUSE) we
814 have to mark all reaching (may-)definitions as necessary.
815 We partition this task into two cases:
816 1) explicit loads based on decls that are not aliased
817 2) implicit loads (like calls) and explicit loads not
818 based on decls that are not aliased (like indirect
819 references or loads from globals)
820 For 1) we mark all reaching may-defs as necessary, stopping
821 at dominating kills. For 2) we want to mark all dominating
822 references necessary, but non-aliased ones which we handle
823 in 1). By keeping a global visited bitmap for references
824 we walk for 2) we avoid quadratic behavior for those. */
826 if (is_gimple_call (stmt))
828 tree callee = gimple_call_fndecl (stmt);
829 unsigned i;
831 /* Calls to functions that are merely acting as barriers
832 or that only store to memory do not make any previous
833 stores necessary. */
834 if (callee != NULL_TREE
835 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
836 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
837 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
838 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
839 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
840 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
841 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
843 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
844 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
845 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
847 continue;
849 /* Calls implicitly load from memory, their arguments
850 in addition may explicitly perform memory loads. */
851 mark_all_reaching_defs_necessary (stmt);
852 for (i = 0; i < gimple_call_num_args (stmt); ++i)
854 tree arg = gimple_call_arg (stmt, i);
855 if (TREE_CODE (arg) == SSA_NAME
856 || is_gimple_min_invariant (arg))
857 continue;
858 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
859 arg = TREE_OPERAND (arg, 0);
860 if (!ref_may_be_aliased (arg))
861 mark_aliased_reaching_defs_necessary (stmt, arg);
864 else if (gimple_assign_single_p (stmt))
866 tree rhs;
867 /* If this is a load mark things necessary. */
868 rhs = gimple_assign_rhs1 (stmt);
869 if (TREE_CODE (rhs) != SSA_NAME
870 && !is_gimple_min_invariant (rhs)
871 && TREE_CODE (rhs) != CONSTRUCTOR)
873 if (!ref_may_be_aliased (rhs))
874 mark_aliased_reaching_defs_necessary (stmt, rhs);
875 else
876 mark_all_reaching_defs_necessary (stmt);
879 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
881 tree rhs = gimple_return_retval (return_stmt);
882 /* A return statement may perform a load. */
883 if (rhs
884 && TREE_CODE (rhs) != SSA_NAME
885 && !is_gimple_min_invariant (rhs)
886 && TREE_CODE (rhs) != CONSTRUCTOR)
888 if (!ref_may_be_aliased (rhs))
889 mark_aliased_reaching_defs_necessary (stmt, rhs);
890 else
891 mark_all_reaching_defs_necessary (stmt);
894 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
896 unsigned i;
897 mark_all_reaching_defs_necessary (stmt);
898 /* Inputs may perform loads. */
899 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
901 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
902 if (TREE_CODE (op) != SSA_NAME
903 && !is_gimple_min_invariant (op)
904 && TREE_CODE (op) != CONSTRUCTOR
905 && !ref_may_be_aliased (op))
906 mark_aliased_reaching_defs_necessary (stmt, op);
909 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
911 /* The beginning of a transaction is a memory barrier. */
912 /* ??? If we were really cool, we'd only be a barrier
913 for the memories touched within the transaction. */
914 mark_all_reaching_defs_necessary (stmt);
916 else
917 gcc_unreachable ();
919 /* If we over-used our alias oracle budget drop to simple
920 mode. The cost metric allows quadratic behavior
921 (number of uses times number of may-defs queries) up to
922 a constant maximal number of queries and after that falls back to
923 super-linear complexity. */
924 if (/* Constant but quadratic for small functions. */
925 total_chain > 128 * 128
926 /* Linear in the number of may-defs. */
927 && total_chain > 32 * longest_chain
928 /* Linear in the number of uses. */
929 && total_chain > nr_walks * 32)
931 chain_ovfl = true;
932 if (visited)
933 bitmap_clear (visited);
939 /* Remove dead PHI nodes from block BB. */
941 static bool
942 remove_dead_phis (basic_block bb)
944 bool something_changed = false;
945 gphi *phi;
946 gphi_iterator gsi;
948 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
950 stats.total_phis++;
951 phi = gsi.phi ();
953 /* We do not track necessity of virtual PHI nodes. Instead do
954 very simple dead PHI removal here. */
955 if (virtual_operand_p (gimple_phi_result (phi)))
957 /* Virtual PHI nodes with one or identical arguments
958 can be removed. */
959 if (degenerate_phi_p (phi))
961 tree vdef = gimple_phi_result (phi);
962 tree vuse = gimple_phi_arg_def (phi, 0);
964 use_operand_p use_p;
965 imm_use_iterator iter;
966 gimple *use_stmt;
967 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
968 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
969 SET_USE (use_p, vuse);
970 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
971 && TREE_CODE (vuse) == SSA_NAME)
972 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
974 else
975 gimple_set_plf (phi, STMT_NECESSARY, true);
978 if (!gimple_plf (phi, STMT_NECESSARY))
980 something_changed = true;
981 if (dump_file && (dump_flags & TDF_DETAILS))
983 fprintf (dump_file, "Deleting : ");
984 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
985 fprintf (dump_file, "\n");
988 remove_phi_node (&gsi, true);
989 stats.removed_phis++;
990 continue;
993 gsi_next (&gsi);
995 return something_changed;
999 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1000 containing I so that we don't have to look it up. */
1002 static void
1003 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1005 gimple *stmt = gsi_stmt (*i);
1007 if (dump_file && (dump_flags & TDF_DETAILS))
1009 fprintf (dump_file, "Deleting : ");
1010 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1011 fprintf (dump_file, "\n");
1014 stats.removed++;
1016 /* If we have determined that a conditional branch statement contributes
1017 nothing to the program, then we not only remove it, but we need to update
1018 the CFG. We can chose any of edges out of BB as long as we are sure to not
1019 close infinite loops. This is done by always choosing the edge closer to
1020 exit in inverted_post_order_compute order. */
1021 if (is_ctrl_stmt (stmt))
1023 edge_iterator ei;
1024 edge e = NULL, e2;
1026 /* See if there is only one non-abnormal edge. */
1027 if (single_succ_p (bb))
1028 e = single_succ_edge (bb);
1029 /* Otherwise chose one that is closer to bb with live statement in it.
1030 To be able to chose one, we compute inverted post order starting from
1031 all BBs with live statements. */
1032 if (!e)
1034 if (!bb_postorder)
1036 auto_vec<int, 20> postorder;
1037 inverted_post_order_compute (&postorder,
1038 &bb_contains_live_stmts);
1039 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1040 for (unsigned int i = 0; i < postorder.length (); ++i)
1041 bb_postorder[postorder[i]] = i;
1043 FOR_EACH_EDGE (e2, ei, bb->succs)
1044 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1045 || bb_postorder [e->dest->index]
1046 < bb_postorder [e2->dest->index])
1047 e = e2;
1049 gcc_assert (e);
1050 e->probability = profile_probability::always ();
1052 /* The edge is no longer associated with a conditional, so it does
1053 not have TRUE/FALSE flags.
1054 We are also safe to drop EH/ABNORMAL flags and turn them into
1055 normal control flow, because we know that all the destinations (including
1056 those odd edges) are equivalent for program execution. */
1057 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1059 /* The lone outgoing edge from BB will be a fallthru edge. */
1060 e->flags |= EDGE_FALLTHRU;
1062 /* Remove the remaining outgoing edges. */
1063 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1064 if (e != e2)
1066 cfg_altered = true;
1067 /* If we made a BB unconditionally exit a loop or removed
1068 an entry into an irreducible region, then this transform
1069 alters the set of BBs in the loop. Schedule a fixup. */
1070 if (loop_exit_edge_p (bb->loop_father, e)
1071 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1072 loops_state_set (LOOPS_NEED_FIXUP);
1073 remove_edge (e2);
1075 else
1076 ei_next (&ei);
1079 /* If this is a store into a variable that is being optimized away,
1080 add a debug bind stmt if possible. */
1081 if (MAY_HAVE_DEBUG_BIND_STMTS
1082 && gimple_assign_single_p (stmt)
1083 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1085 tree lhs = gimple_assign_lhs (stmt);
1086 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1087 && !DECL_IGNORED_P (lhs)
1088 && is_gimple_reg_type (TREE_TYPE (lhs))
1089 && !is_global_var (lhs)
1090 && !DECL_HAS_VALUE_EXPR_P (lhs))
1092 tree rhs = gimple_assign_rhs1 (stmt);
1093 gdebug *note
1094 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1095 gsi_insert_after (i, note, GSI_SAME_STMT);
1099 unlink_stmt_vdef (stmt);
1100 gsi_remove (i, true);
1101 release_defs (stmt);
1104 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1105 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1107 static tree
1108 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1110 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1111 *walk_subtrees = 0;
1112 if (*tp == (tree) data)
1113 return *tp;
1114 return NULL_TREE;
1117 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1118 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1119 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1120 uses. */
1122 static void
1123 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1124 enum tree_code subcode)
1126 gimple *stmt = gsi_stmt (*gsi);
1127 tree lhs = gimple_call_lhs (stmt);
1129 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1130 return;
1132 imm_use_iterator imm_iter;
1133 use_operand_p use_p;
1134 bool has_debug_uses = false;
1135 bool has_realpart_uses = false;
1136 bool has_other_uses = false;
1137 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1139 gimple *use_stmt = USE_STMT (use_p);
1140 if (is_gimple_debug (use_stmt))
1141 has_debug_uses = true;
1142 else if (is_gimple_assign (use_stmt)
1143 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1144 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1145 has_realpart_uses = true;
1146 else
1148 has_other_uses = true;
1149 break;
1153 if (!has_realpart_uses || has_other_uses)
1154 return;
1156 tree arg0 = gimple_call_arg (stmt, 0);
1157 tree arg1 = gimple_call_arg (stmt, 1);
1158 location_t loc = gimple_location (stmt);
1159 tree type = TREE_TYPE (TREE_TYPE (lhs));
1160 tree utype = type;
1161 if (!TYPE_UNSIGNED (type))
1162 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1163 tree result = fold_build2_loc (loc, subcode, utype,
1164 fold_convert_loc (loc, utype, arg0),
1165 fold_convert_loc (loc, utype, arg1));
1166 result = fold_convert_loc (loc, type, result);
1168 if (has_debug_uses)
1170 gimple *use_stmt;
1171 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1173 if (!gimple_debug_bind_p (use_stmt))
1174 continue;
1175 tree v = gimple_debug_bind_get_value (use_stmt);
1176 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1178 gimple_debug_bind_reset_value (use_stmt);
1179 update_stmt (use_stmt);
1184 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1185 result = drop_tree_overflow (result);
1186 tree overflow = build_zero_cst (type);
1187 tree ctype = build_complex_type (type);
1188 if (TREE_CODE (result) == INTEGER_CST)
1189 result = build_complex (ctype, result, overflow);
1190 else
1191 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1192 ctype, result, overflow);
1194 if (dump_file && (dump_flags & TDF_DETAILS))
1196 fprintf (dump_file, "Transforming call: ");
1197 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1198 fprintf (dump_file, "because the overflow result is never used into: ");
1199 print_generic_stmt (dump_file, result, TDF_SLIM);
1200 fprintf (dump_file, "\n");
1203 if (!update_call_from_tree (gsi, result))
1204 gimplify_and_update_call_from_tree (gsi, result);
1207 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1208 contributes nothing to the program, and can be deleted. */
1210 static bool
1211 eliminate_unnecessary_stmts (void)
1213 bool something_changed = false;
1214 basic_block bb;
1215 gimple_stmt_iterator gsi, psi;
1216 gimple *stmt;
1217 tree call;
1218 vec<basic_block> h;
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1221 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1223 clear_special_calls ();
1225 /* Walking basic blocks and statements in reverse order avoids
1226 releasing SSA names before any other DEFs that refer to them are
1227 released. This helps avoid loss of debug information, as we get
1228 a chance to propagate all RHSs of removed SSAs into debug uses,
1229 rather than only the latest ones. E.g., consider:
1231 x_3 = y_1 + z_2;
1232 a_5 = x_3 - b_4;
1233 # DEBUG a => a_5
1235 If we were to release x_3 before a_5, when we reached a_5 and
1236 tried to substitute it into the debug stmt, we'd see x_3 there,
1237 but x_3's DEF, type, etc would have already been disconnected.
1238 By going backwards, the debug stmt first changes to:
1240 # DEBUG a => x_3 - b_4
1242 and then to:
1244 # DEBUG a => y_1 + z_2 - b_4
1246 as desired. */
1247 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1248 h = get_all_dominated_blocks (CDI_DOMINATORS,
1249 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1251 while (h.length ())
1253 bb = h.pop ();
1255 /* Remove dead statements. */
1256 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1258 stmt = gsi_stmt (gsi);
1260 psi = gsi;
1261 gsi_prev (&psi);
1263 stats.total++;
1265 /* We can mark a call to free as not necessary if the
1266 defining statement of its argument is not necessary
1267 (and thus is getting removed). */
1268 if (gimple_plf (stmt, STMT_NECESSARY)
1269 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1271 tree ptr = gimple_call_arg (stmt, 0);
1272 if (TREE_CODE (ptr) == SSA_NAME)
1274 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1275 if (!gimple_nop_p (def_stmt)
1276 && !gimple_plf (def_stmt, STMT_NECESSARY))
1277 gimple_set_plf (stmt, STMT_NECESSARY, false);
1279 /* We did not propagate necessity for free calls fed
1280 by allocation function to allow unnecessary
1281 alloc-free sequence elimination. For instrumented
1282 calls it also means we did not mark bounds producer
1283 as necessary and it is time to do it in case free
1284 call is not removed. */
1285 if (gimple_call_with_bounds_p (stmt))
1287 gimple *bounds_def_stmt;
1288 tree bounds = gimple_call_arg (stmt, 1);
1289 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1290 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1291 if (bounds_def_stmt
1292 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1293 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1294 gimple_plf (stmt, STMT_NECESSARY));
1298 /* If GSI is not necessary then remove it. */
1299 if (!gimple_plf (stmt, STMT_NECESSARY))
1301 /* Keep clobbers that we can keep live live. */
1302 if (gimple_clobber_p (stmt))
1304 ssa_op_iter iter;
1305 use_operand_p use_p;
1306 bool dead = false;
1307 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1309 tree name = USE_FROM_PTR (use_p);
1310 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1311 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1313 dead = true;
1314 break;
1317 if (!dead)
1318 continue;
1320 if (!is_gimple_debug (stmt))
1321 something_changed = true;
1322 remove_dead_stmt (&gsi, bb);
1324 else if (is_gimple_call (stmt))
1326 tree name = gimple_call_lhs (stmt);
1328 notice_special_calls (as_a <gcall *> (stmt));
1330 /* When LHS of var = call (); is dead, simplify it into
1331 call (); saving one operand. */
1332 if (name
1333 && TREE_CODE (name) == SSA_NAME
1334 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1335 /* Avoid doing so for allocation calls which we
1336 did not mark as necessary, it will confuse the
1337 special logic we apply to malloc/free pair removal. */
1338 && (!(call = gimple_call_fndecl (stmt))
1339 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1340 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1341 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1342 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1343 && !ALLOCA_FUNCTION_CODE_P
1344 (DECL_FUNCTION_CODE (call))))
1345 /* Avoid doing so for bndret calls for the same reason. */
1346 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1348 something_changed = true;
1349 if (dump_file && (dump_flags & TDF_DETAILS))
1351 fprintf (dump_file, "Deleting LHS of call: ");
1352 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1353 fprintf (dump_file, "\n");
1356 gimple_call_set_lhs (stmt, NULL_TREE);
1357 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1358 update_stmt (stmt);
1359 release_ssa_name (name);
1361 /* GOMP_SIMD_LANE or ASAN_POISON without lhs is not
1362 needed. */
1363 if (gimple_call_internal_p (stmt))
1364 switch (gimple_call_internal_fn (stmt))
1366 case IFN_GOMP_SIMD_LANE:
1367 case IFN_ASAN_POISON:
1368 remove_dead_stmt (&gsi, bb);
1369 break;
1370 default:
1371 break;
1374 else if (gimple_call_internal_p (stmt))
1375 switch (gimple_call_internal_fn (stmt))
1377 case IFN_ADD_OVERFLOW:
1378 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1379 break;
1380 case IFN_SUB_OVERFLOW:
1381 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1382 break;
1383 case IFN_MUL_OVERFLOW:
1384 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1385 break;
1386 default:
1387 break;
1393 h.release ();
1395 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1396 rendered some PHI nodes unreachable while they are still in use.
1397 Mark them for renaming. */
1398 if (cfg_altered)
1400 basic_block prev_bb;
1402 find_unreachable_blocks ();
1404 /* Delete all unreachable basic blocks in reverse dominator order. */
1405 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1406 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1408 prev_bb = bb->prev_bb;
1410 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1411 || !(bb->flags & BB_REACHABLE))
1413 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1414 gsi_next (&gsi))
1415 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1417 bool found = false;
1418 imm_use_iterator iter;
1420 FOR_EACH_IMM_USE_STMT (stmt, iter,
1421 gimple_phi_result (gsi.phi ()))
1423 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1424 continue;
1425 if (gimple_code (stmt) == GIMPLE_PHI
1426 || gimple_plf (stmt, STMT_NECESSARY))
1428 found = true;
1429 BREAK_FROM_IMM_USE_STMT (iter);
1432 if (found)
1433 mark_virtual_phi_result_for_renaming (gsi.phi ());
1436 if (!(bb->flags & BB_REACHABLE))
1438 /* Speed up the removal of blocks that don't
1439 dominate others. Walking backwards, this should
1440 be the common case. ??? Do we need to recompute
1441 dominators because of cfg_altered? */
1442 if (!first_dom_son (CDI_DOMINATORS, bb))
1443 delete_basic_block (bb);
1444 else
1446 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1448 while (h.length ())
1450 bb = h.pop ();
1451 prev_bb = bb->prev_bb;
1452 /* Rearrangements to the CFG may have failed
1453 to update the dominators tree, so that
1454 formerly-dominated blocks are now
1455 otherwise reachable. */
1456 if (!!(bb->flags & BB_REACHABLE))
1457 continue;
1458 delete_basic_block (bb);
1461 h.release ();
1467 FOR_EACH_BB_FN (bb, cfun)
1469 /* Remove dead PHI nodes. */
1470 something_changed |= remove_dead_phis (bb);
1473 if (bb_postorder)
1474 free (bb_postorder);
1475 bb_postorder = NULL;
1477 return something_changed;
1481 /* Print out removed statement statistics. */
1483 static void
1484 print_stats (void)
1486 float percg;
1488 percg = ((float) stats.removed / (float) stats.total) * 100;
1489 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1490 stats.removed, stats.total, (int) percg);
1492 if (stats.total_phis == 0)
1493 percg = 0;
1494 else
1495 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1497 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1498 stats.removed_phis, stats.total_phis, (int) percg);
1501 /* Initialization for this pass. Set up the used data structures. */
1503 static void
1504 tree_dce_init (bool aggressive)
1506 memset ((void *) &stats, 0, sizeof (stats));
1508 if (aggressive)
1510 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1511 bitmap_clear (last_stmt_necessary);
1512 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1513 bitmap_clear (bb_contains_live_stmts);
1516 processed = sbitmap_alloc (num_ssa_names + 1);
1517 bitmap_clear (processed);
1519 worklist.create (64);
1520 cfg_altered = false;
1523 /* Cleanup after this pass. */
1525 static void
1526 tree_dce_done (bool aggressive)
1528 if (aggressive)
1530 delete cd;
1531 sbitmap_free (visited_control_parents);
1532 sbitmap_free (last_stmt_necessary);
1533 sbitmap_free (bb_contains_live_stmts);
1534 bb_contains_live_stmts = NULL;
1537 sbitmap_free (processed);
1539 worklist.release ();
1542 /* Main routine to eliminate dead code.
1544 AGGRESSIVE controls the aggressiveness of the algorithm.
1545 In conservative mode, we ignore control dependence and simply declare
1546 all but the most trivially dead branches necessary. This mode is fast.
1547 In aggressive mode, control dependences are taken into account, which
1548 results in more dead code elimination, but at the cost of some time.
1550 FIXME: Aggressive mode before PRE doesn't work currently because
1551 the dominance info is not invalidated after DCE1. This is
1552 not an issue right now because we only run aggressive DCE
1553 as the last tree SSA pass, but keep this in mind when you
1554 start experimenting with pass ordering. */
1556 static unsigned int
1557 perform_tree_ssa_dce (bool aggressive)
1559 bool something_changed = 0;
1561 calculate_dominance_info (CDI_DOMINATORS);
1563 /* Preheaders are needed for SCEV to work.
1564 Simple lateches and recorded exits improve chances that loop will
1565 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1566 bool in_loop_pipeline = scev_initialized_p ();
1567 if (aggressive && ! in_loop_pipeline)
1569 scev_initialize ();
1570 loop_optimizer_init (LOOPS_NORMAL
1571 | LOOPS_HAVE_RECORDED_EXITS);
1574 tree_dce_init (aggressive);
1576 if (aggressive)
1578 /* Compute control dependence. */
1579 calculate_dominance_info (CDI_POST_DOMINATORS);
1580 cd = new control_dependences ();
1582 visited_control_parents =
1583 sbitmap_alloc (last_basic_block_for_fn (cfun));
1584 bitmap_clear (visited_control_parents);
1586 mark_dfs_back_edges ();
1589 find_obviously_necessary_stmts (aggressive);
1591 if (aggressive && ! in_loop_pipeline)
1593 loop_optimizer_finalize ();
1594 scev_finalize ();
1597 longest_chain = 0;
1598 total_chain = 0;
1599 nr_walks = 0;
1600 chain_ovfl = false;
1601 visited = BITMAP_ALLOC (NULL);
1602 propagate_necessity (aggressive);
1603 BITMAP_FREE (visited);
1605 something_changed |= eliminate_unnecessary_stmts ();
1606 something_changed |= cfg_altered;
1608 /* We do not update postdominators, so free them unconditionally. */
1609 free_dominance_info (CDI_POST_DOMINATORS);
1611 /* If we removed paths in the CFG, then we need to update
1612 dominators as well. I haven't investigated the possibility
1613 of incrementally updating dominators. */
1614 if (cfg_altered)
1615 free_dominance_info (CDI_DOMINATORS);
1617 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1618 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1620 /* Debugging dumps. */
1621 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1622 print_stats ();
1624 tree_dce_done (aggressive);
1626 if (something_changed)
1628 free_numbers_of_iterations_estimates (cfun);
1629 if (in_loop_pipeline)
1630 scev_reset ();
1631 return TODO_update_ssa | TODO_cleanup_cfg;
1633 return 0;
1636 /* Pass entry points. */
1637 static unsigned int
1638 tree_ssa_dce (void)
1640 return perform_tree_ssa_dce (/*aggressive=*/false);
1643 static unsigned int
1644 tree_ssa_cd_dce (void)
1646 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1649 namespace {
1651 const pass_data pass_data_dce =
1653 GIMPLE_PASS, /* type */
1654 "dce", /* name */
1655 OPTGROUP_NONE, /* optinfo_flags */
1656 TV_TREE_DCE, /* tv_id */
1657 ( PROP_cfg | PROP_ssa ), /* properties_required */
1658 0, /* properties_provided */
1659 0, /* properties_destroyed */
1660 0, /* todo_flags_start */
1661 0, /* todo_flags_finish */
1664 class pass_dce : public gimple_opt_pass
1666 public:
1667 pass_dce (gcc::context *ctxt)
1668 : gimple_opt_pass (pass_data_dce, ctxt)
1671 /* opt_pass methods: */
1672 opt_pass * clone () { return new pass_dce (m_ctxt); }
1673 virtual bool gate (function *) { return flag_tree_dce != 0; }
1674 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1676 }; // class pass_dce
1678 } // anon namespace
1680 gimple_opt_pass *
1681 make_pass_dce (gcc::context *ctxt)
1683 return new pass_dce (ctxt);
1686 namespace {
1688 const pass_data pass_data_cd_dce =
1690 GIMPLE_PASS, /* type */
1691 "cddce", /* name */
1692 OPTGROUP_NONE, /* optinfo_flags */
1693 TV_TREE_CD_DCE, /* tv_id */
1694 ( PROP_cfg | PROP_ssa ), /* properties_required */
1695 0, /* properties_provided */
1696 0, /* properties_destroyed */
1697 0, /* todo_flags_start */
1698 0, /* todo_flags_finish */
1701 class pass_cd_dce : public gimple_opt_pass
1703 public:
1704 pass_cd_dce (gcc::context *ctxt)
1705 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1708 /* opt_pass methods: */
1709 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1710 virtual bool gate (function *) { return flag_tree_dce != 0; }
1711 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1713 }; // class pass_cd_dce
1715 } // anon namespace
1717 gimple_opt_pass *
1718 make_pass_cd_dce (gcc::context *ctxt)
1720 return new pass_cd_dce (ctxt);
1724 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1725 is consumed by this function. The function has linear complexity in
1726 the number of dead stmts with a constant factor like the average SSA
1727 use operands number. */
1729 void
1730 simple_dce_from_worklist (bitmap worklist)
1732 while (! bitmap_empty_p (worklist))
1734 /* Pop item. */
1735 unsigned i = bitmap_first_set_bit (worklist);
1736 bitmap_clear_bit (worklist, i);
1738 tree def = ssa_name (i);
1739 /* Removed by somebody else or still in use. */
1740 if (! def || ! has_zero_uses (def))
1741 continue;
1743 gimple *t = SSA_NAME_DEF_STMT (def);
1744 if (gimple_has_side_effects (t))
1745 continue;
1747 /* Add uses to the worklist. */
1748 ssa_op_iter iter;
1749 use_operand_p use_p;
1750 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1752 tree use = USE_FROM_PTR (use_p);
1753 if (TREE_CODE (use) == SSA_NAME
1754 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1755 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1758 /* Remove stmt. */
1759 if (dump_file && (dump_flags & TDF_DETAILS))
1761 fprintf (dump_file, "Removing dead stmt:");
1762 print_gimple_stmt (dump_file, t, 0);
1764 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1765 if (gimple_code (t) == GIMPLE_PHI)
1766 remove_phi_node (&gsi, true);
1767 else
1769 gsi_remove (&gsi, true);
1770 release_defs (t);