* testsuite/libgomp.oacc-c-c++-common/collapse-2.c: Sequential
[official-gcc.git] / gcc / tree-ssa-dce.c
blob67f2603ab17c0e8364f623ad213b94f2f23adbca
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
72 static struct stmt_stats
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
80 #define STMT_NECESSARY GF_PLF_1
82 static vec<gimple *> worklist;
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
116 /* If STMT is not already marked necessary, mark it, and add it to the
117 worklist if ADD_TO_WORKLIST is true. */
119 static inline void
120 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
122 gcc_assert (stmt);
124 if (gimple_plf (stmt, STMT_NECESSARY))
125 return;
127 if (dump_file && (dump_flags & TDF_DETAILS))
129 fprintf (dump_file, "Marking useful stmt: ");
130 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
131 fprintf (dump_file, "\n");
134 gimple_set_plf (stmt, STMT_NECESSARY, true);
135 if (add_to_worklist)
136 worklist.safe_push (stmt);
137 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
138 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
142 /* Mark the statement defining operand OP as necessary. */
144 static inline void
145 mark_operand_necessary (tree op)
147 gimple *stmt;
148 int ver;
150 gcc_assert (op);
152 ver = SSA_NAME_VERSION (op);
153 if (bitmap_bit_p (processed, ver))
155 stmt = SSA_NAME_DEF_STMT (op);
156 gcc_assert (gimple_nop_p (stmt)
157 || gimple_plf (stmt, STMT_NECESSARY));
158 return;
160 bitmap_set_bit (processed, ver);
162 stmt = SSA_NAME_DEF_STMT (op);
163 gcc_assert (stmt);
165 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
166 return;
168 if (dump_file && (dump_flags & TDF_DETAILS))
170 fprintf (dump_file, "marking necessary through ");
171 print_generic_expr (dump_file, op, 0);
172 fprintf (dump_file, " stmt ");
173 print_gimple_stmt (dump_file, stmt, 0, 0);
176 gimple_set_plf (stmt, STMT_NECESSARY, true);
177 if (bb_contains_live_stmts)
178 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
179 worklist.safe_push (stmt);
183 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
184 it can make other statements necessary.
186 If AGGRESSIVE is false, control statements are conservatively marked as
187 necessary. */
189 static void
190 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
192 /* With non-call exceptions, we have to assume that all statements could
193 throw. If a statement could throw, it can be deemed necessary. */
194 if (cfun->can_throw_non_call_exceptions
195 && !cfun->can_delete_dead_exceptions
196 && stmt_could_throw_p (stmt))
198 mark_stmt_necessary (stmt, true);
199 return;
202 /* Statements that are implicitly live. Most function calls, asm
203 and return statements are required. Labels and GIMPLE_BIND nodes
204 are kept because they are control flow, and we have no way of
205 knowing whether they can be removed. DCE can eliminate all the
206 other statements in a block, and CFG can then remove the block
207 and labels. */
208 switch (gimple_code (stmt))
210 case GIMPLE_PREDICT:
211 case GIMPLE_LABEL:
212 mark_stmt_necessary (stmt, false);
213 return;
215 case GIMPLE_ASM:
216 case GIMPLE_RESX:
217 case GIMPLE_RETURN:
218 mark_stmt_necessary (stmt, true);
219 return;
221 case GIMPLE_CALL:
223 tree callee = gimple_call_fndecl (stmt);
224 if (callee != NULL_TREE
225 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
226 switch (DECL_FUNCTION_CODE (callee))
228 case BUILT_IN_MALLOC:
229 case BUILT_IN_ALIGNED_ALLOC:
230 case BUILT_IN_CALLOC:
231 case BUILT_IN_ALLOCA:
232 case BUILT_IN_ALLOCA_WITH_ALIGN:
233 return;
235 default:;
237 /* Most, but not all function calls are required. Function calls that
238 produce no result and have no side effects (i.e. const pure
239 functions) are unnecessary. */
240 if (gimple_has_side_effects (stmt))
242 mark_stmt_necessary (stmt, true);
243 return;
245 if (!gimple_call_lhs (stmt))
246 return;
247 break;
250 case GIMPLE_DEBUG:
251 /* Debug temps without a value are not useful. ??? If we could
252 easily locate the debug temp bind stmt for a use thereof,
253 would could refrain from marking all debug temps here, and
254 mark them only if they're used. */
255 if (!gimple_debug_bind_p (stmt)
256 || gimple_debug_bind_has_value_p (stmt)
257 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
258 mark_stmt_necessary (stmt, false);
259 return;
261 case GIMPLE_GOTO:
262 gcc_assert (!simple_goto_p (stmt));
263 mark_stmt_necessary (stmt, true);
264 return;
266 case GIMPLE_COND:
267 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
268 /* Fall through. */
270 case GIMPLE_SWITCH:
271 if (! aggressive)
272 mark_stmt_necessary (stmt, true);
273 break;
275 case GIMPLE_ASSIGN:
276 if (gimple_clobber_p (stmt))
277 return;
278 break;
280 default:
281 break;
284 /* If the statement has volatile operands, it needs to be preserved.
285 Same for statements that can alter control flow in unpredictable
286 ways. */
287 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
289 mark_stmt_necessary (stmt, true);
290 return;
293 if (stmt_may_clobber_global_p (stmt))
295 mark_stmt_necessary (stmt, true);
296 return;
299 return;
303 /* Mark the last statement of BB as necessary. */
305 static void
306 mark_last_stmt_necessary (basic_block bb)
308 gimple *stmt = last_stmt (bb);
310 bitmap_set_bit (last_stmt_necessary, bb->index);
311 bitmap_set_bit (bb_contains_live_stmts, bb->index);
313 /* We actually mark the statement only if it is a control statement. */
314 if (stmt && is_ctrl_stmt (stmt))
315 mark_stmt_necessary (stmt, true);
319 /* Mark control dependent edges of BB as necessary. We have to do this only
320 once for each basic block so we set the appropriate bit after we're done.
322 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
324 static void
325 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
327 bitmap_iterator bi;
328 unsigned edge_number;
329 bool skipped = false;
331 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
333 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
334 return;
336 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
337 0, edge_number, bi)
339 basic_block cd_bb = cd->get_edge (edge_number)->src;
341 if (ignore_self && cd_bb == bb)
343 skipped = true;
344 continue;
347 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
348 mark_last_stmt_necessary (cd_bb);
351 if (!skipped)
352 bitmap_set_bit (visited_control_parents, bb->index);
356 /* Find obviously necessary statements. These are things like most function
357 calls, and stores to file level variables.
359 If EL is NULL, control statements are conservatively marked as
360 necessary. Otherwise it contains the list of edges used by control
361 dependence analysis. */
363 static void
364 find_obviously_necessary_stmts (bool aggressive)
366 basic_block bb;
367 gimple_stmt_iterator gsi;
368 edge e;
369 gimple *phi, *stmt;
370 int flags;
372 FOR_EACH_BB_FN (bb, cfun)
374 /* PHI nodes are never inherently necessary. */
375 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
377 phi = gsi_stmt (gsi);
378 gimple_set_plf (phi, STMT_NECESSARY, false);
381 /* Check all statements in the block. */
382 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
384 stmt = gsi_stmt (gsi);
385 gimple_set_plf (stmt, STMT_NECESSARY, false);
386 mark_stmt_if_obviously_necessary (stmt, aggressive);
390 /* Pure and const functions are finite and thus have no infinite loops in
391 them. */
392 flags = flags_from_decl_or_type (current_function_decl);
393 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
394 return;
396 /* Prevent the empty possibly infinite loops from being removed. */
397 if (aggressive)
399 struct loop *loop;
400 scev_initialize ();
401 if (mark_irreducible_loops ())
402 FOR_EACH_BB_FN (bb, cfun)
404 edge_iterator ei;
405 FOR_EACH_EDGE (e, ei, bb->succs)
406 if ((e->flags & EDGE_DFS_BACK)
407 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
409 if (dump_file)
410 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
411 e->src->index, e->dest->index);
412 mark_control_dependent_edges_necessary (e->dest, false);
416 FOR_EACH_LOOP (loop, 0)
417 if (!finite_loop_p (loop))
419 if (dump_file)
420 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
421 mark_control_dependent_edges_necessary (loop->latch, false);
423 scev_finalize ();
428 /* Return true if REF is based on an aliased base, otherwise false. */
430 static bool
431 ref_may_be_aliased (tree ref)
433 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
434 while (handled_component_p (ref))
435 ref = TREE_OPERAND (ref, 0);
436 if (TREE_CODE (ref) == MEM_REF
437 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
438 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
439 return !(DECL_P (ref)
440 && !may_be_aliased (ref));
443 static bitmap visited = NULL;
444 static unsigned int longest_chain = 0;
445 static unsigned int total_chain = 0;
446 static unsigned int nr_walks = 0;
447 static bool chain_ovfl = false;
449 /* Worker for the walker that marks reaching definitions of REF,
450 which is based on a non-aliased decl, necessary. It returns
451 true whenever the defining statement of the current VDEF is
452 a kill for REF, as no dominating may-defs are necessary for REF
453 anymore. DATA points to the basic-block that contains the
454 stmt that refers to REF. */
456 static bool
457 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
459 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
461 /* All stmts we visit are necessary. */
462 mark_operand_necessary (vdef);
464 /* If the stmt lhs kills ref, then we can stop walking. */
465 if (gimple_has_lhs (def_stmt)
466 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
467 /* The assignment is not necessarily carried out if it can throw
468 and we can catch it in the current function where we could inspect
469 the previous value.
470 ??? We only need to care about the RHS throwing. For aggregate
471 assignments or similar calls and non-call exceptions the LHS
472 might throw as well. */
473 && !stmt_can_throw_internal (def_stmt))
475 tree base, lhs = gimple_get_lhs (def_stmt);
476 HOST_WIDE_INT size, offset, max_size;
477 bool reverse;
478 ao_ref_base (ref);
479 base
480 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
481 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
482 so base == refd->base does not always hold. */
483 if (base == ref->base)
485 /* For a must-alias check we need to be able to constrain
486 the accesses properly. */
487 if (size != -1 && size == max_size
488 && ref->max_size != -1)
490 if (offset <= ref->offset
491 && offset + size >= ref->offset + ref->max_size)
492 return true;
494 /* Or they need to be exactly the same. */
495 else if (ref->ref
496 /* Make sure there is no induction variable involved
497 in the references (gcc.c-torture/execute/pr42142.c).
498 The simplest way is to check if the kill dominates
499 the use. */
500 /* But when both are in the same block we cannot
501 easily tell whether we came from a backedge
502 unless we decide to compute stmt UIDs
503 (see PR58246). */
504 && (basic_block) data != gimple_bb (def_stmt)
505 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
506 gimple_bb (def_stmt))
507 && operand_equal_p (ref->ref, lhs, 0))
508 return true;
512 /* Otherwise keep walking. */
513 return false;
516 static void
517 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
519 unsigned int chain;
520 ao_ref refd;
521 gcc_assert (!chain_ovfl);
522 ao_ref_init (&refd, ref);
523 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
524 mark_aliased_reaching_defs_necessary_1,
525 gimple_bb (stmt), NULL);
526 if (chain > longest_chain)
527 longest_chain = chain;
528 total_chain += chain;
529 nr_walks++;
532 /* Worker for the walker that marks reaching definitions of REF, which
533 is not based on a non-aliased decl. For simplicity we need to end
534 up marking all may-defs necessary that are not based on a non-aliased
535 decl. The only job of this walker is to skip may-defs based on
536 a non-aliased decl. */
538 static bool
539 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
540 tree vdef, void *data ATTRIBUTE_UNUSED)
542 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
544 /* We have to skip already visited (and thus necessary) statements
545 to make the chaining work after we dropped back to simple mode. */
546 if (chain_ovfl
547 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
549 gcc_assert (gimple_nop_p (def_stmt)
550 || gimple_plf (def_stmt, STMT_NECESSARY));
551 return false;
554 /* We want to skip stores to non-aliased variables. */
555 if (!chain_ovfl
556 && gimple_assign_single_p (def_stmt))
558 tree lhs = gimple_assign_lhs (def_stmt);
559 if (!ref_may_be_aliased (lhs))
560 return false;
563 /* We want to skip statments that do not constitute stores but have
564 a virtual definition. */
565 if (is_gimple_call (def_stmt))
567 tree callee = gimple_call_fndecl (def_stmt);
568 if (callee != NULL_TREE
569 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
570 switch (DECL_FUNCTION_CODE (callee))
572 case BUILT_IN_MALLOC:
573 case BUILT_IN_ALIGNED_ALLOC:
574 case BUILT_IN_CALLOC:
575 case BUILT_IN_ALLOCA:
576 case BUILT_IN_ALLOCA_WITH_ALIGN:
577 case BUILT_IN_FREE:
578 return false;
580 default:;
584 mark_operand_necessary (vdef);
586 return false;
589 static void
590 mark_all_reaching_defs_necessary (gimple *stmt)
592 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
593 mark_all_reaching_defs_necessary_1, NULL, &visited);
596 /* Return true for PHI nodes with one or identical arguments
597 can be removed. */
598 static bool
599 degenerate_phi_p (gimple *phi)
601 unsigned int i;
602 tree op = gimple_phi_arg_def (phi, 0);
603 for (i = 1; i < gimple_phi_num_args (phi); i++)
604 if (gimple_phi_arg_def (phi, i) != op)
605 return false;
606 return true;
609 /* Propagate necessity using the operands of necessary statements.
610 Process the uses on each statement in the worklist, and add all
611 feeding statements which contribute to the calculation of this
612 value to the worklist.
614 In conservative mode, EL is NULL. */
616 static void
617 propagate_necessity (bool aggressive)
619 gimple *stmt;
621 if (dump_file && (dump_flags & TDF_DETAILS))
622 fprintf (dump_file, "\nProcessing worklist:\n");
624 while (worklist.length () > 0)
626 /* Take STMT from worklist. */
627 stmt = worklist.pop ();
629 if (dump_file && (dump_flags & TDF_DETAILS))
631 fprintf (dump_file, "processing: ");
632 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
633 fprintf (dump_file, "\n");
636 if (aggressive)
638 /* Mark the last statement of the basic blocks on which the block
639 containing STMT is control dependent, but only if we haven't
640 already done so. */
641 basic_block bb = gimple_bb (stmt);
642 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
643 && !bitmap_bit_p (visited_control_parents, bb->index))
644 mark_control_dependent_edges_necessary (bb, false);
647 if (gimple_code (stmt) == GIMPLE_PHI
648 /* We do not process virtual PHI nodes nor do we track their
649 necessity. */
650 && !virtual_operand_p (gimple_phi_result (stmt)))
652 /* PHI nodes are somewhat special in that each PHI alternative has
653 data and control dependencies. All the statements feeding the
654 PHI node's arguments are always necessary. In aggressive mode,
655 we also consider the control dependent edges leading to the
656 predecessor block associated with each PHI alternative as
657 necessary. */
658 gphi *phi = as_a <gphi *> (stmt);
659 size_t k;
661 for (k = 0; k < gimple_phi_num_args (stmt); k++)
663 tree arg = PHI_ARG_DEF (stmt, k);
664 if (TREE_CODE (arg) == SSA_NAME)
665 mark_operand_necessary (arg);
668 /* For PHI operands it matters from where the control flow arrives
669 to the BB. Consider the following example:
671 a=exp1;
672 b=exp2;
673 if (test)
675 else
677 c=PHI(a,b)
679 We need to mark control dependence of the empty basic blocks, since they
680 contains computation of PHI operands.
682 Doing so is too restrictive in the case the predecestor block is in
683 the loop. Consider:
685 if (b)
687 int i;
688 for (i = 0; i<1000; ++i)
690 j = 0;
692 return j;
694 There is PHI for J in the BB containing return statement.
695 In this case the control dependence of predecestor block (that is
696 within the empty loop) also contains the block determining number
697 of iterations of the block that would prevent removing of empty
698 loop in this case.
700 This scenario can be avoided by splitting critical edges.
701 To save the critical edge splitting pass we identify how the control
702 dependence would look like if the edge was split.
704 Consider the modified CFG created from current CFG by splitting
705 edge B->C. In the postdominance tree of modified CFG, C' is
706 always child of C. There are two cases how chlids of C' can look
707 like:
709 1) C' is leaf
711 In this case the only basic block C' is control dependent on is B.
713 2) C' has single child that is B
715 In this case control dependence of C' is same as control
716 dependence of B in original CFG except for block B itself.
717 (since C' postdominate B in modified CFG)
719 Now how to decide what case happens? There are two basic options:
721 a) C postdominate B. Then C immediately postdominate B and
722 case 2 happens iff there is no other way from B to C except
723 the edge B->C.
725 There is other way from B to C iff there is succesor of B that
726 is not postdominated by B. Testing this condition is somewhat
727 expensive, because we need to iterate all succesors of B.
728 We are safe to assume that this does not happen: we will mark B
729 as needed when processing the other path from B to C that is
730 conrol dependent on B and marking control dependencies of B
731 itself is harmless because they will be processed anyway after
732 processing control statement in B.
734 b) C does not postdominate B. Always case 1 happens since there is
735 path from C to exit that does not go through B and thus also C'. */
737 if (aggressive && !degenerate_phi_p (stmt))
739 for (k = 0; k < gimple_phi_num_args (stmt); k++)
741 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
743 if (gimple_bb (stmt)
744 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
746 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
747 mark_last_stmt_necessary (arg_bb);
749 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
750 && !bitmap_bit_p (visited_control_parents,
751 arg_bb->index))
752 mark_control_dependent_edges_necessary (arg_bb, true);
756 else
758 /* Propagate through the operands. Examine all the USE, VUSE and
759 VDEF operands in this statement. Mark all the statements
760 which feed this statement's uses as necessary. */
761 ssa_op_iter iter;
762 tree use;
764 /* If this is a call to free which is directly fed by an
765 allocation function do not mark that necessary through
766 processing the argument. */
767 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
769 tree ptr = gimple_call_arg (stmt, 0);
770 gimple *def_stmt;
771 tree def_callee;
772 /* If the pointer we free is defined by an allocation
773 function do not add the call to the worklist. */
774 if (TREE_CODE (ptr) == SSA_NAME
775 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
776 && (def_callee = gimple_call_fndecl (def_stmt))
777 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
778 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
779 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
780 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
782 gimple *bounds_def_stmt;
783 tree bounds;
785 /* For instrumented calls we should also check used
786 bounds are returned by the same allocation call. */
787 if (!gimple_call_with_bounds_p (stmt)
788 || ((bounds = gimple_call_arg (stmt, 1))
789 && TREE_CODE (bounds) == SSA_NAME
790 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
791 && chkp_gimple_call_builtin_p (bounds_def_stmt,
792 BUILT_IN_CHKP_BNDRET)
793 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
794 continue;
798 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
799 mark_operand_necessary (use);
801 use = gimple_vuse (stmt);
802 if (!use)
803 continue;
805 /* If we dropped to simple mode make all immediately
806 reachable definitions necessary. */
807 if (chain_ovfl)
809 mark_all_reaching_defs_necessary (stmt);
810 continue;
813 /* For statements that may load from memory (have a VUSE) we
814 have to mark all reaching (may-)definitions as necessary.
815 We partition this task into two cases:
816 1) explicit loads based on decls that are not aliased
817 2) implicit loads (like calls) and explicit loads not
818 based on decls that are not aliased (like indirect
819 references or loads from globals)
820 For 1) we mark all reaching may-defs as necessary, stopping
821 at dominating kills. For 2) we want to mark all dominating
822 references necessary, but non-aliased ones which we handle
823 in 1). By keeping a global visited bitmap for references
824 we walk for 2) we avoid quadratic behavior for those. */
826 if (is_gimple_call (stmt))
828 tree callee = gimple_call_fndecl (stmt);
829 unsigned i;
831 /* Calls to functions that are merely acting as barriers
832 or that only store to memory do not make any previous
833 stores necessary. */
834 if (callee != NULL_TREE
835 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
836 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
837 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
838 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
839 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
840 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
841 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
844 || (DECL_FUNCTION_CODE (callee)
845 == BUILT_IN_ALLOCA_WITH_ALIGN)
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
847 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
849 continue;
851 /* Calls implicitly load from memory, their arguments
852 in addition may explicitly perform memory loads. */
853 mark_all_reaching_defs_necessary (stmt);
854 for (i = 0; i < gimple_call_num_args (stmt); ++i)
856 tree arg = gimple_call_arg (stmt, i);
857 if (TREE_CODE (arg) == SSA_NAME
858 || is_gimple_min_invariant (arg))
859 continue;
860 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
861 arg = TREE_OPERAND (arg, 0);
862 if (!ref_may_be_aliased (arg))
863 mark_aliased_reaching_defs_necessary (stmt, arg);
866 else if (gimple_assign_single_p (stmt))
868 tree rhs;
869 /* If this is a load mark things necessary. */
870 rhs = gimple_assign_rhs1 (stmt);
871 if (TREE_CODE (rhs) != SSA_NAME
872 && !is_gimple_min_invariant (rhs)
873 && TREE_CODE (rhs) != CONSTRUCTOR)
875 if (!ref_may_be_aliased (rhs))
876 mark_aliased_reaching_defs_necessary (stmt, rhs);
877 else
878 mark_all_reaching_defs_necessary (stmt);
881 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
883 tree rhs = gimple_return_retval (return_stmt);
884 /* A return statement may perform a load. */
885 if (rhs
886 && TREE_CODE (rhs) != SSA_NAME
887 && !is_gimple_min_invariant (rhs)
888 && TREE_CODE (rhs) != CONSTRUCTOR)
890 if (!ref_may_be_aliased (rhs))
891 mark_aliased_reaching_defs_necessary (stmt, rhs);
892 else
893 mark_all_reaching_defs_necessary (stmt);
896 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
898 unsigned i;
899 mark_all_reaching_defs_necessary (stmt);
900 /* Inputs may perform loads. */
901 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
903 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
904 if (TREE_CODE (op) != SSA_NAME
905 && !is_gimple_min_invariant (op)
906 && TREE_CODE (op) != CONSTRUCTOR
907 && !ref_may_be_aliased (op))
908 mark_aliased_reaching_defs_necessary (stmt, op);
911 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
913 /* The beginning of a transaction is a memory barrier. */
914 /* ??? If we were really cool, we'd only be a barrier
915 for the memories touched within the transaction. */
916 mark_all_reaching_defs_necessary (stmt);
918 else
919 gcc_unreachable ();
921 /* If we over-used our alias oracle budget drop to simple
922 mode. The cost metric allows quadratic behavior
923 (number of uses times number of may-defs queries) up to
924 a constant maximal number of queries and after that falls back to
925 super-linear complexity. */
926 if (/* Constant but quadratic for small functions. */
927 total_chain > 128 * 128
928 /* Linear in the number of may-defs. */
929 && total_chain > 32 * longest_chain
930 /* Linear in the number of uses. */
931 && total_chain > nr_walks * 32)
933 chain_ovfl = true;
934 if (visited)
935 bitmap_clear (visited);
941 /* Remove dead PHI nodes from block BB. */
943 static bool
944 remove_dead_phis (basic_block bb)
946 bool something_changed = false;
947 gphi *phi;
948 gphi_iterator gsi;
950 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
952 stats.total_phis++;
953 phi = gsi.phi ();
955 /* We do not track necessity of virtual PHI nodes. Instead do
956 very simple dead PHI removal here. */
957 if (virtual_operand_p (gimple_phi_result (phi)))
959 /* Virtual PHI nodes with one or identical arguments
960 can be removed. */
961 if (degenerate_phi_p (phi))
963 tree vdef = gimple_phi_result (phi);
964 tree vuse = gimple_phi_arg_def (phi, 0);
966 use_operand_p use_p;
967 imm_use_iterator iter;
968 gimple *use_stmt;
969 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
970 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
971 SET_USE (use_p, vuse);
972 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
973 && TREE_CODE (vuse) == SSA_NAME)
974 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
976 else
977 gimple_set_plf (phi, STMT_NECESSARY, true);
980 if (!gimple_plf (phi, STMT_NECESSARY))
982 something_changed = true;
983 if (dump_file && (dump_flags & TDF_DETAILS))
985 fprintf (dump_file, "Deleting : ");
986 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
987 fprintf (dump_file, "\n");
990 remove_phi_node (&gsi, true);
991 stats.removed_phis++;
992 continue;
995 gsi_next (&gsi);
997 return something_changed;
1000 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1002 static edge
1003 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1005 gphi_iterator gsi;
1006 edge e2 = NULL;
1007 edge_iterator ei;
1009 if (dump_file && (dump_flags & TDF_DETAILS))
1010 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1011 e->dest->index, post_dom_bb->index);
1013 e2 = redirect_edge_and_branch (e, post_dom_bb);
1014 cfg_altered = true;
1016 /* If edge was already around, no updating is necessary. */
1017 if (e2 != e)
1018 return e2;
1020 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1022 /* We are sure that for every live PHI we are seeing control dependent BB.
1023 This means that we can pick any edge to duplicate PHI args from. */
1024 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1025 if (e2 != e)
1026 break;
1027 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1029 gphi *phi = gsi.phi ();
1030 tree op;
1031 source_location locus;
1033 /* PHIs for virtuals have no control dependency relation on them.
1034 We are lost here and must force renaming of the symbol. */
1035 if (virtual_operand_p (gimple_phi_result (phi)))
1037 mark_virtual_phi_result_for_renaming (phi);
1038 remove_phi_node (&gsi, true);
1039 continue;
1042 /* Dead PHI do not imply control dependency. */
1043 if (!gimple_plf (phi, STMT_NECESSARY))
1045 gsi_next (&gsi);
1046 continue;
1049 op = gimple_phi_arg_def (phi, e2->dest_idx);
1050 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1051 add_phi_arg (phi, op, e, locus);
1052 /* The resulting PHI if not dead can only be degenerate. */
1053 gcc_assert (degenerate_phi_p (phi));
1054 gsi_next (&gsi);
1057 return e;
1060 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1061 containing I so that we don't have to look it up. */
1063 static void
1064 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1066 gimple *stmt = gsi_stmt (*i);
1068 if (dump_file && (dump_flags & TDF_DETAILS))
1070 fprintf (dump_file, "Deleting : ");
1071 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1072 fprintf (dump_file, "\n");
1075 stats.removed++;
1077 /* If we have determined that a conditional branch statement contributes
1078 nothing to the program, then we not only remove it, but we also change
1079 the flow graph so that the current block will simply fall-thru to its
1080 immediate post-dominator. The blocks we are circumventing will be
1081 removed by cleanup_tree_cfg if this change in the flow graph makes them
1082 unreachable. */
1083 if (is_ctrl_stmt (stmt))
1085 basic_block post_dom_bb;
1086 edge e, e2;
1087 edge_iterator ei;
1089 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1091 e = find_edge (bb, post_dom_bb);
1093 /* If edge is already there, try to use it. This avoids need to update
1094 PHI nodes. Also watch for cases where post dominator does not exists
1095 or is exit block. These can happen for infinite loops as we create
1096 fake edges in the dominator tree. */
1097 if (e)
1099 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1100 e = EDGE_SUCC (bb, 0);
1101 else
1102 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1103 gcc_assert (e);
1104 e->probability = REG_BR_PROB_BASE;
1105 e->count = bb->count;
1107 /* The edge is no longer associated with a conditional, so it does
1108 not have TRUE/FALSE flags. */
1109 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1111 /* The lone outgoing edge from BB will be a fallthru edge. */
1112 e->flags |= EDGE_FALLTHRU;
1114 /* Remove the remaining outgoing edges. */
1115 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1116 if (e != e2)
1118 cfg_altered = true;
1119 /* If we made a BB unconditionally exit a loop or removed
1120 an entry into an irreducible region, then this transform
1121 alters the set of BBs in the loop. Schedule a fixup. */
1122 if (loop_exit_edge_p (bb->loop_father, e)
1123 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1124 loops_state_set (LOOPS_NEED_FIXUP);
1125 remove_edge (e2);
1127 else
1128 ei_next (&ei);
1131 /* If this is a store into a variable that is being optimized away,
1132 add a debug bind stmt if possible. */
1133 if (MAY_HAVE_DEBUG_STMTS
1134 && gimple_assign_single_p (stmt)
1135 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1137 tree lhs = gimple_assign_lhs (stmt);
1138 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1139 && !DECL_IGNORED_P (lhs)
1140 && is_gimple_reg_type (TREE_TYPE (lhs))
1141 && !is_global_var (lhs)
1142 && !DECL_HAS_VALUE_EXPR_P (lhs))
1144 tree rhs = gimple_assign_rhs1 (stmt);
1145 gdebug *note
1146 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1147 gsi_insert_after (i, note, GSI_SAME_STMT);
1151 unlink_stmt_vdef (stmt);
1152 gsi_remove (i, true);
1153 release_defs (stmt);
1156 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1157 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1159 static tree
1160 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1162 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1163 *walk_subtrees = 0;
1164 if (*tp == (tree) data)
1165 return *tp;
1166 return NULL_TREE;
1169 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1170 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1171 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1172 uses. */
1174 static void
1175 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1176 enum tree_code subcode)
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree lhs = gimple_call_lhs (stmt);
1181 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1182 return;
1184 imm_use_iterator imm_iter;
1185 use_operand_p use_p;
1186 bool has_debug_uses = false;
1187 bool has_realpart_uses = false;
1188 bool has_other_uses = false;
1189 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1191 gimple *use_stmt = USE_STMT (use_p);
1192 if (is_gimple_debug (use_stmt))
1193 has_debug_uses = true;
1194 else if (is_gimple_assign (use_stmt)
1195 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1196 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1197 has_realpart_uses = true;
1198 else
1200 has_other_uses = true;
1201 break;
1205 if (!has_realpart_uses || has_other_uses)
1206 return;
1208 tree arg0 = gimple_call_arg (stmt, 0);
1209 tree arg1 = gimple_call_arg (stmt, 1);
1210 location_t loc = gimple_location (stmt);
1211 tree type = TREE_TYPE (TREE_TYPE (lhs));
1212 tree utype = type;
1213 if (!TYPE_UNSIGNED (type))
1214 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1215 tree result = fold_build2_loc (loc, subcode, utype,
1216 fold_convert_loc (loc, utype, arg0),
1217 fold_convert_loc (loc, utype, arg1));
1218 result = fold_convert_loc (loc, type, result);
1220 if (has_debug_uses)
1222 gimple *use_stmt;
1223 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1225 if (!gimple_debug_bind_p (use_stmt))
1226 continue;
1227 tree v = gimple_debug_bind_get_value (use_stmt);
1228 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1230 gimple_debug_bind_reset_value (use_stmt);
1231 update_stmt (use_stmt);
1236 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1237 result = drop_tree_overflow (result);
1238 tree overflow = build_zero_cst (type);
1239 tree ctype = build_complex_type (type);
1240 if (TREE_CODE (result) == INTEGER_CST)
1241 result = build_complex (ctype, result, overflow);
1242 else
1243 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1244 ctype, result, overflow);
1246 if (dump_file && (dump_flags & TDF_DETAILS))
1248 fprintf (dump_file, "Transforming call: ");
1249 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1250 fprintf (dump_file, "because the overflow result is never used into: ");
1251 print_generic_stmt (dump_file, result, TDF_SLIM);
1252 fprintf (dump_file, "\n");
1255 if (!update_call_from_tree (gsi, result))
1256 gimplify_and_update_call_from_tree (gsi, result);
1259 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1260 contributes nothing to the program, and can be deleted. */
1262 static bool
1263 eliminate_unnecessary_stmts (void)
1265 bool something_changed = false;
1266 basic_block bb;
1267 gimple_stmt_iterator gsi, psi;
1268 gimple *stmt;
1269 tree call;
1270 vec<basic_block> h;
1272 if (dump_file && (dump_flags & TDF_DETAILS))
1273 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1275 clear_special_calls ();
1277 /* Walking basic blocks and statements in reverse order avoids
1278 releasing SSA names before any other DEFs that refer to them are
1279 released. This helps avoid loss of debug information, as we get
1280 a chance to propagate all RHSs of removed SSAs into debug uses,
1281 rather than only the latest ones. E.g., consider:
1283 x_3 = y_1 + z_2;
1284 a_5 = x_3 - b_4;
1285 # DEBUG a => a_5
1287 If we were to release x_3 before a_5, when we reached a_5 and
1288 tried to substitute it into the debug stmt, we'd see x_3 there,
1289 but x_3's DEF, type, etc would have already been disconnected.
1290 By going backwards, the debug stmt first changes to:
1292 # DEBUG a => x_3 - b_4
1294 and then to:
1296 # DEBUG a => y_1 + z_2 - b_4
1298 as desired. */
1299 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1300 h = get_all_dominated_blocks (CDI_DOMINATORS,
1301 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1303 while (h.length ())
1305 bb = h.pop ();
1307 /* Remove dead statements. */
1308 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1310 stmt = gsi_stmt (gsi);
1312 psi = gsi;
1313 gsi_prev (&psi);
1315 stats.total++;
1317 /* We can mark a call to free as not necessary if the
1318 defining statement of its argument is not necessary
1319 (and thus is getting removed). */
1320 if (gimple_plf (stmt, STMT_NECESSARY)
1321 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1323 tree ptr = gimple_call_arg (stmt, 0);
1324 if (TREE_CODE (ptr) == SSA_NAME)
1326 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1327 if (!gimple_nop_p (def_stmt)
1328 && !gimple_plf (def_stmt, STMT_NECESSARY))
1329 gimple_set_plf (stmt, STMT_NECESSARY, false);
1331 /* We did not propagate necessity for free calls fed
1332 by allocation function to allow unnecessary
1333 alloc-free sequence elimination. For instrumented
1334 calls it also means we did not mark bounds producer
1335 as necessary and it is time to do it in case free
1336 call is not removed. */
1337 if (gimple_call_with_bounds_p (stmt))
1339 gimple *bounds_def_stmt;
1340 tree bounds = gimple_call_arg (stmt, 1);
1341 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1342 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1343 if (bounds_def_stmt
1344 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1345 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1346 gimple_plf (stmt, STMT_NECESSARY));
1350 /* If GSI is not necessary then remove it. */
1351 if (!gimple_plf (stmt, STMT_NECESSARY))
1353 /* Keep clobbers that we can keep live live. */
1354 if (gimple_clobber_p (stmt))
1356 ssa_op_iter iter;
1357 use_operand_p use_p;
1358 bool dead = false;
1359 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1361 tree name = USE_FROM_PTR (use_p);
1362 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1363 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1365 dead = true;
1366 break;
1369 if (!dead)
1370 continue;
1372 if (!is_gimple_debug (stmt))
1373 something_changed = true;
1374 remove_dead_stmt (&gsi, bb);
1376 else if (is_gimple_call (stmt))
1378 tree name = gimple_call_lhs (stmt);
1380 notice_special_calls (as_a <gcall *> (stmt));
1382 /* When LHS of var = call (); is dead, simplify it into
1383 call (); saving one operand. */
1384 if (name
1385 && TREE_CODE (name) == SSA_NAME
1386 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1387 /* Avoid doing so for allocation calls which we
1388 did not mark as necessary, it will confuse the
1389 special logic we apply to malloc/free pair removal. */
1390 && (!(call = gimple_call_fndecl (stmt))
1391 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1392 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1393 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1394 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1395 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1396 && (DECL_FUNCTION_CODE (call)
1397 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1398 /* Avoid doing so for bndret calls for the same reason. */
1399 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1401 something_changed = true;
1402 if (dump_file && (dump_flags & TDF_DETAILS))
1404 fprintf (dump_file, "Deleting LHS of call: ");
1405 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1406 fprintf (dump_file, "\n");
1409 gimple_call_set_lhs (stmt, NULL_TREE);
1410 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1411 update_stmt (stmt);
1412 release_ssa_name (name);
1414 /* GOMP_SIMD_LANE without lhs is not needed. */
1415 if (gimple_call_internal_p (stmt)
1416 && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
1417 remove_dead_stmt (&gsi, bb);
1419 else if (gimple_call_internal_p (stmt))
1420 switch (gimple_call_internal_fn (stmt))
1422 case IFN_ADD_OVERFLOW:
1423 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1424 break;
1425 case IFN_SUB_OVERFLOW:
1426 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1427 break;
1428 case IFN_MUL_OVERFLOW:
1429 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1430 break;
1431 default:
1432 break;
1438 h.release ();
1440 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1441 rendered some PHI nodes unreachable while they are still in use.
1442 Mark them for renaming. */
1443 if (cfg_altered)
1445 basic_block prev_bb;
1447 find_unreachable_blocks ();
1449 /* Delete all unreachable basic blocks in reverse dominator order. */
1450 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1451 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1453 prev_bb = bb->prev_bb;
1455 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1456 || !(bb->flags & BB_REACHABLE))
1458 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1459 gsi_next (&gsi))
1460 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1462 bool found = false;
1463 imm_use_iterator iter;
1465 FOR_EACH_IMM_USE_STMT (stmt, iter,
1466 gimple_phi_result (gsi.phi ()))
1468 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1469 continue;
1470 if (gimple_code (stmt) == GIMPLE_PHI
1471 || gimple_plf (stmt, STMT_NECESSARY))
1473 found = true;
1474 BREAK_FROM_IMM_USE_STMT (iter);
1477 if (found)
1478 mark_virtual_phi_result_for_renaming (gsi.phi ());
1481 if (!(bb->flags & BB_REACHABLE))
1483 /* Speed up the removal of blocks that don't
1484 dominate others. Walking backwards, this should
1485 be the common case. ??? Do we need to recompute
1486 dominators because of cfg_altered? */
1487 if (!MAY_HAVE_DEBUG_STMTS
1488 || !first_dom_son (CDI_DOMINATORS, bb))
1489 delete_basic_block (bb);
1490 else
1492 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1494 while (h.length ())
1496 bb = h.pop ();
1497 prev_bb = bb->prev_bb;
1498 /* Rearrangements to the CFG may have failed
1499 to update the dominators tree, so that
1500 formerly-dominated blocks are now
1501 otherwise reachable. */
1502 if (!!(bb->flags & BB_REACHABLE))
1503 continue;
1504 delete_basic_block (bb);
1507 h.release ();
1513 FOR_EACH_BB_FN (bb, cfun)
1515 /* Remove dead PHI nodes. */
1516 something_changed |= remove_dead_phis (bb);
1519 return something_changed;
1523 /* Print out removed statement statistics. */
1525 static void
1526 print_stats (void)
1528 float percg;
1530 percg = ((float) stats.removed / (float) stats.total) * 100;
1531 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1532 stats.removed, stats.total, (int) percg);
1534 if (stats.total_phis == 0)
1535 percg = 0;
1536 else
1537 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1539 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1540 stats.removed_phis, stats.total_phis, (int) percg);
1543 /* Initialization for this pass. Set up the used data structures. */
1545 static void
1546 tree_dce_init (bool aggressive)
1548 memset ((void *) &stats, 0, sizeof (stats));
1550 if (aggressive)
1552 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1553 bitmap_clear (last_stmt_necessary);
1554 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1555 bitmap_clear (bb_contains_live_stmts);
1558 processed = sbitmap_alloc (num_ssa_names + 1);
1559 bitmap_clear (processed);
1561 worklist.create (64);
1562 cfg_altered = false;
1565 /* Cleanup after this pass. */
1567 static void
1568 tree_dce_done (bool aggressive)
1570 if (aggressive)
1572 delete cd;
1573 sbitmap_free (visited_control_parents);
1574 sbitmap_free (last_stmt_necessary);
1575 sbitmap_free (bb_contains_live_stmts);
1576 bb_contains_live_stmts = NULL;
1579 sbitmap_free (processed);
1581 worklist.release ();
1584 /* Main routine to eliminate dead code.
1586 AGGRESSIVE controls the aggressiveness of the algorithm.
1587 In conservative mode, we ignore control dependence and simply declare
1588 all but the most trivially dead branches necessary. This mode is fast.
1589 In aggressive mode, control dependences are taken into account, which
1590 results in more dead code elimination, but at the cost of some time.
1592 FIXME: Aggressive mode before PRE doesn't work currently because
1593 the dominance info is not invalidated after DCE1. This is
1594 not an issue right now because we only run aggressive DCE
1595 as the last tree SSA pass, but keep this in mind when you
1596 start experimenting with pass ordering. */
1598 static unsigned int
1599 perform_tree_ssa_dce (bool aggressive)
1601 bool something_changed = 0;
1603 calculate_dominance_info (CDI_DOMINATORS);
1605 /* Preheaders are needed for SCEV to work.
1606 Simple lateches and recorded exits improve chances that loop will
1607 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1608 if (aggressive)
1609 loop_optimizer_init (LOOPS_NORMAL
1610 | LOOPS_HAVE_RECORDED_EXITS);
1612 tree_dce_init (aggressive);
1614 if (aggressive)
1616 /* Compute control dependence. */
1617 calculate_dominance_info (CDI_POST_DOMINATORS);
1618 cd = new control_dependences (create_edge_list ());
1620 visited_control_parents =
1621 sbitmap_alloc (last_basic_block_for_fn (cfun));
1622 bitmap_clear (visited_control_parents);
1624 mark_dfs_back_edges ();
1627 find_obviously_necessary_stmts (aggressive);
1629 if (aggressive)
1630 loop_optimizer_finalize ();
1632 longest_chain = 0;
1633 total_chain = 0;
1634 nr_walks = 0;
1635 chain_ovfl = false;
1636 visited = BITMAP_ALLOC (NULL);
1637 propagate_necessity (aggressive);
1638 BITMAP_FREE (visited);
1640 something_changed |= eliminate_unnecessary_stmts ();
1641 something_changed |= cfg_altered;
1643 /* We do not update postdominators, so free them unconditionally. */
1644 free_dominance_info (CDI_POST_DOMINATORS);
1646 /* If we removed paths in the CFG, then we need to update
1647 dominators as well. I haven't investigated the possibility
1648 of incrementally updating dominators. */
1649 if (cfg_altered)
1650 free_dominance_info (CDI_DOMINATORS);
1652 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1653 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1655 /* Debugging dumps. */
1656 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1657 print_stats ();
1659 tree_dce_done (aggressive);
1661 if (something_changed)
1663 free_numbers_of_iterations_estimates (cfun);
1664 if (scev_initialized_p ())
1665 scev_reset ();
1666 return TODO_update_ssa | TODO_cleanup_cfg;
1668 return 0;
1671 /* Pass entry points. */
1672 static unsigned int
1673 tree_ssa_dce (void)
1675 return perform_tree_ssa_dce (/*aggressive=*/false);
1678 static unsigned int
1679 tree_ssa_cd_dce (void)
1681 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1684 namespace {
1686 const pass_data pass_data_dce =
1688 GIMPLE_PASS, /* type */
1689 "dce", /* name */
1690 OPTGROUP_NONE, /* optinfo_flags */
1691 TV_TREE_DCE, /* tv_id */
1692 ( PROP_cfg | PROP_ssa ), /* properties_required */
1693 0, /* properties_provided */
1694 0, /* properties_destroyed */
1695 0, /* todo_flags_start */
1696 0, /* todo_flags_finish */
1699 class pass_dce : public gimple_opt_pass
1701 public:
1702 pass_dce (gcc::context *ctxt)
1703 : gimple_opt_pass (pass_data_dce, ctxt)
1706 /* opt_pass methods: */
1707 opt_pass * clone () { return new pass_dce (m_ctxt); }
1708 virtual bool gate (function *) { return flag_tree_dce != 0; }
1709 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1711 }; // class pass_dce
1713 } // anon namespace
1715 gimple_opt_pass *
1716 make_pass_dce (gcc::context *ctxt)
1718 return new pass_dce (ctxt);
1721 namespace {
1723 const pass_data pass_data_cd_dce =
1725 GIMPLE_PASS, /* type */
1726 "cddce", /* name */
1727 OPTGROUP_NONE, /* optinfo_flags */
1728 TV_TREE_CD_DCE, /* tv_id */
1729 ( PROP_cfg | PROP_ssa ), /* properties_required */
1730 0, /* properties_provided */
1731 0, /* properties_destroyed */
1732 0, /* todo_flags_start */
1733 0, /* todo_flags_finish */
1736 class pass_cd_dce : public gimple_opt_pass
1738 public:
1739 pass_cd_dce (gcc::context *ctxt)
1740 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1743 /* opt_pass methods: */
1744 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1745 virtual bool gate (function *) { return flag_tree_dce != 0; }
1746 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1748 }; // class pass_cd_dce
1750 } // anon namespace
1752 gimple_opt_pass *
1753 make_pass_cd_dce (gcc::context *ctxt)
1755 return new pass_cd_dce (ctxt);