Fix typo in t-dimode
[official-gcc.git] / gcc / tree-ssa-dce.c
blobe3e6f0955b7d15d3afbeb6898f53ad21741ef591
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2021 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
25 References:
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70 #include "tree-ssa.h"
72 static struct stmt_stats
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
80 #define STMT_NECESSARY GF_PLF_1
82 static vec<gimple *> worklist;
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
119 /* True if we should treat any stmt with a vdef as necessary. */
121 static inline bool
122 keep_all_vdefs_p ()
124 return optimize_debug;
127 /* If STMT is not already marked necessary, mark it, and add it to the
128 worklist if ADD_TO_WORKLIST is true. */
130 static inline void
131 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
133 gcc_assert (stmt);
135 if (gimple_plf (stmt, STMT_NECESSARY))
136 return;
138 if (dump_file && (dump_flags & TDF_DETAILS))
140 fprintf (dump_file, "Marking useful stmt: ");
141 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
142 fprintf (dump_file, "\n");
145 gimple_set_plf (stmt, STMT_NECESSARY, true);
146 if (add_to_worklist)
147 worklist.safe_push (stmt);
148 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
149 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
153 /* Mark the statement defining operand OP as necessary. */
155 static inline void
156 mark_operand_necessary (tree op)
158 gimple *stmt;
159 int ver;
161 gcc_assert (op);
163 ver = SSA_NAME_VERSION (op);
164 if (bitmap_bit_p (processed, ver))
166 stmt = SSA_NAME_DEF_STMT (op);
167 gcc_assert (gimple_nop_p (stmt)
168 || gimple_plf (stmt, STMT_NECESSARY));
169 return;
171 bitmap_set_bit (processed, ver);
173 stmt = SSA_NAME_DEF_STMT (op);
174 gcc_assert (stmt);
176 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
177 return;
179 if (dump_file && (dump_flags & TDF_DETAILS))
181 fprintf (dump_file, "marking necessary through ");
182 print_generic_expr (dump_file, op);
183 fprintf (dump_file, " stmt ");
184 print_gimple_stmt (dump_file, stmt, 0);
187 gimple_set_plf (stmt, STMT_NECESSARY, true);
188 if (bb_contains_live_stmts)
189 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
190 worklist.safe_push (stmt);
194 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 it can make other statements necessary.
197 If AGGRESSIVE is false, control statements are conservatively marked as
198 necessary. */
200 static void
201 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
203 /* Statements that are implicitly live. Most function calls, asm
204 and return statements are required. Labels and GIMPLE_BIND nodes
205 are kept because they are control flow, and we have no way of
206 knowing whether they can be removed. DCE can eliminate all the
207 other statements in a block, and CFG can then remove the block
208 and labels. */
209 switch (gimple_code (stmt))
211 case GIMPLE_PREDICT:
212 case GIMPLE_LABEL:
213 mark_stmt_necessary (stmt, false);
214 return;
216 case GIMPLE_ASM:
217 case GIMPLE_RESX:
218 case GIMPLE_RETURN:
219 mark_stmt_necessary (stmt, true);
220 return;
222 case GIMPLE_CALL:
224 tree callee = gimple_call_fndecl (stmt);
225 if (callee != NULL_TREE
226 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
227 switch (DECL_FUNCTION_CODE (callee))
229 case BUILT_IN_MALLOC:
230 case BUILT_IN_ALIGNED_ALLOC:
231 case BUILT_IN_CALLOC:
232 CASE_BUILT_IN_ALLOCA:
233 case BUILT_IN_STRDUP:
234 case BUILT_IN_STRNDUP:
235 case BUILT_IN_GOMP_ALLOC:
236 return;
238 default:;
241 if (callee != NULL_TREE
242 && flag_allocation_dce
243 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
244 return;
246 /* IFN_GOACC_LOOP calls are necessary in that they are used to
247 represent parameter (i.e. step, bound) of a lowered OpenACC
248 partitioned loop. But this kind of partitioned loop might not
249 survive from aggressive loop removal for it has loop exit and
250 is assumed to be finite. Therefore, we need to explicitly mark
251 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
252 if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
254 mark_stmt_necessary (stmt, true);
255 return;
257 break;
260 case GIMPLE_DEBUG:
261 /* Debug temps without a value are not useful. ??? If we could
262 easily locate the debug temp bind stmt for a use thereof,
263 would could refrain from marking all debug temps here, and
264 mark them only if they're used. */
265 if (gimple_debug_nonbind_marker_p (stmt)
266 || !gimple_debug_bind_p (stmt)
267 || gimple_debug_bind_has_value_p (stmt)
268 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
269 mark_stmt_necessary (stmt, false);
270 return;
272 case GIMPLE_GOTO:
273 gcc_assert (!simple_goto_p (stmt));
274 mark_stmt_necessary (stmt, true);
275 return;
277 case GIMPLE_COND:
278 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
279 /* Fall through. */
281 case GIMPLE_SWITCH:
282 if (! aggressive)
283 mark_stmt_necessary (stmt, true);
284 break;
286 case GIMPLE_ASSIGN:
287 if (gimple_clobber_p (stmt))
288 return;
289 break;
291 default:
292 break;
295 /* If the statement has volatile operands, it needs to be preserved.
296 Same for statements that can alter control flow in unpredictable
297 ways. */
298 if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
300 mark_stmt_necessary (stmt, true);
301 return;
304 /* If a statement could throw, it can be deemed necessary unless we
305 are allowed to remove dead EH. Test this after checking for
306 new/delete operators since we always elide their EH. */
307 if (!cfun->can_delete_dead_exceptions
308 && stmt_could_throw_p (cfun, stmt))
310 mark_stmt_necessary (stmt, true);
311 return;
314 if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
315 || stmt_may_clobber_global_p (stmt))
317 mark_stmt_necessary (stmt, true);
318 return;
321 return;
325 /* Mark the last statement of BB as necessary. */
327 static void
328 mark_last_stmt_necessary (basic_block bb)
330 gimple *stmt = last_stmt (bb);
332 bitmap_set_bit (last_stmt_necessary, bb->index);
333 bitmap_set_bit (bb_contains_live_stmts, bb->index);
335 /* We actually mark the statement only if it is a control statement. */
336 if (stmt && is_ctrl_stmt (stmt))
337 mark_stmt_necessary (stmt, true);
341 /* Mark control dependent edges of BB as necessary. We have to do this only
342 once for each basic block so we set the appropriate bit after we're done.
344 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
346 static void
347 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
349 bitmap_iterator bi;
350 unsigned edge_number;
351 bool skipped = false;
353 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
355 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
356 return;
358 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
359 0, edge_number, bi)
361 basic_block cd_bb = cd->get_edge_src (edge_number);
363 if (ignore_self && cd_bb == bb)
365 skipped = true;
366 continue;
369 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
370 mark_last_stmt_necessary (cd_bb);
373 if (!skipped)
374 bitmap_set_bit (visited_control_parents, bb->index);
378 /* Find obviously necessary statements. These are things like most function
379 calls, and stores to file level variables.
381 If EL is NULL, control statements are conservatively marked as
382 necessary. Otherwise it contains the list of edges used by control
383 dependence analysis. */
385 static void
386 find_obviously_necessary_stmts (bool aggressive)
388 basic_block bb;
389 gimple_stmt_iterator gsi;
390 edge e;
391 gimple *phi, *stmt;
392 int flags;
394 FOR_EACH_BB_FN (bb, cfun)
396 /* PHI nodes are never inherently necessary. */
397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
399 phi = gsi_stmt (gsi);
400 gimple_set_plf (phi, STMT_NECESSARY, false);
403 /* Check all statements in the block. */
404 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
406 stmt = gsi_stmt (gsi);
407 gimple_set_plf (stmt, STMT_NECESSARY, false);
408 mark_stmt_if_obviously_necessary (stmt, aggressive);
412 /* Pure and const functions are finite and thus have no infinite loops in
413 them. */
414 flags = flags_from_decl_or_type (current_function_decl);
415 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
416 return;
418 /* Prevent the empty possibly infinite loops from being removed. This is
419 needed to make the logic in remove_dead_stmt work to identify the
420 correct edge to keep when removing a controlling condition. */
421 if (aggressive)
423 if (mark_irreducible_loops ())
424 FOR_EACH_BB_FN (bb, cfun)
426 edge_iterator ei;
427 FOR_EACH_EDGE (e, ei, bb->succs)
428 if ((e->flags & EDGE_DFS_BACK)
429 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
431 if (dump_file)
432 fprintf (dump_file, "Marking back edge of irreducible "
433 "loop %i->%i\n", e->src->index, e->dest->index);
434 mark_control_dependent_edges_necessary (e->dest, false);
438 for (auto loop : loops_list (cfun, 0))
439 /* For loops without an exit do not mark any condition. */
440 if (loop->exits->next->e && !finite_loop_p (loop))
442 if (dump_file)
443 fprintf (dump_file, "cannot prove finiteness of loop %i\n",
444 loop->num);
445 mark_control_dependent_edges_necessary (loop->latch, false);
451 /* Return true if REF is based on an aliased base, otherwise false. */
453 static bool
454 ref_may_be_aliased (tree ref)
456 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
457 while (handled_component_p (ref))
458 ref = TREE_OPERAND (ref, 0);
459 if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
460 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
461 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
462 return !(DECL_P (ref)
463 && !may_be_aliased (ref));
466 static bitmap visited = NULL;
467 static unsigned int longest_chain = 0;
468 static unsigned int total_chain = 0;
469 static unsigned int nr_walks = 0;
470 static bool chain_ovfl = false;
472 /* Worker for the walker that marks reaching definitions of REF,
473 which is based on a non-aliased decl, necessary. It returns
474 true whenever the defining statement of the current VDEF is
475 a kill for REF, as no dominating may-defs are necessary for REF
476 anymore. DATA points to the basic-block that contains the
477 stmt that refers to REF. */
479 static bool
480 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
482 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
484 /* All stmts we visit are necessary. */
485 if (! gimple_clobber_p (def_stmt))
486 mark_operand_necessary (vdef);
488 /* If the stmt lhs kills ref, then we can stop walking. */
489 if (gimple_has_lhs (def_stmt)
490 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
491 /* The assignment is not necessarily carried out if it can throw
492 and we can catch it in the current function where we could inspect
493 the previous value.
494 ??? We only need to care about the RHS throwing. For aggregate
495 assignments or similar calls and non-call exceptions the LHS
496 might throw as well. */
497 && !stmt_can_throw_internal (cfun, def_stmt))
499 tree base, lhs = gimple_get_lhs (def_stmt);
500 poly_int64 size, offset, max_size;
501 bool reverse;
502 ao_ref_base (ref);
503 base
504 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
505 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
506 so base == refd->base does not always hold. */
507 if (base == ref->base)
509 /* For a must-alias check we need to be able to constrain
510 the accesses properly. */
511 if (known_eq (size, max_size)
512 && known_subrange_p (ref->offset, ref->max_size, offset, size))
513 return true;
514 /* Or they need to be exactly the same. */
515 else if (ref->ref
516 /* Make sure there is no induction variable involved
517 in the references (gcc.c-torture/execute/pr42142.c).
518 The simplest way is to check if the kill dominates
519 the use. */
520 /* But when both are in the same block we cannot
521 easily tell whether we came from a backedge
522 unless we decide to compute stmt UIDs
523 (see PR58246). */
524 && (basic_block) data != gimple_bb (def_stmt)
525 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
526 gimple_bb (def_stmt))
527 && operand_equal_p (ref->ref, lhs, 0))
528 return true;
532 /* Otherwise keep walking. */
533 return false;
536 static void
537 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
539 /* Should have been caught before calling this function. */
540 gcc_checking_assert (!keep_all_vdefs_p ());
542 unsigned int chain;
543 ao_ref refd;
544 gcc_assert (!chain_ovfl);
545 ao_ref_init (&refd, ref);
546 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
547 mark_aliased_reaching_defs_necessary_1,
548 gimple_bb (stmt), NULL);
549 if (chain > longest_chain)
550 longest_chain = chain;
551 total_chain += chain;
552 nr_walks++;
555 /* Worker for the walker that marks reaching definitions of REF, which
556 is not based on a non-aliased decl. For simplicity we need to end
557 up marking all may-defs necessary that are not based on a non-aliased
558 decl. The only job of this walker is to skip may-defs based on
559 a non-aliased decl. */
561 static bool
562 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
563 tree vdef, void *data ATTRIBUTE_UNUSED)
565 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
567 /* We have to skip already visited (and thus necessary) statements
568 to make the chaining work after we dropped back to simple mode. */
569 if (chain_ovfl
570 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
572 gcc_assert (gimple_nop_p (def_stmt)
573 || gimple_plf (def_stmt, STMT_NECESSARY));
574 return false;
577 /* We want to skip stores to non-aliased variables. */
578 if (!chain_ovfl
579 && gimple_assign_single_p (def_stmt))
581 tree lhs = gimple_assign_lhs (def_stmt);
582 if (!ref_may_be_aliased (lhs))
583 return false;
586 /* We want to skip statments that do not constitute stores but have
587 a virtual definition. */
588 if (gcall *call = dyn_cast <gcall *> (def_stmt))
590 tree callee = gimple_call_fndecl (call);
591 if (callee != NULL_TREE
592 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
593 switch (DECL_FUNCTION_CODE (callee))
595 case BUILT_IN_MALLOC:
596 case BUILT_IN_ALIGNED_ALLOC:
597 case BUILT_IN_CALLOC:
598 CASE_BUILT_IN_ALLOCA:
599 case BUILT_IN_FREE:
600 case BUILT_IN_GOMP_ALLOC:
601 case BUILT_IN_GOMP_FREE:
602 return false;
604 default:;
607 if (callee != NULL_TREE
608 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
609 || DECL_IS_OPERATOR_DELETE_P (callee))
610 && gimple_call_from_new_or_delete (call))
611 return false;
614 if (! gimple_clobber_p (def_stmt))
615 mark_operand_necessary (vdef);
617 return false;
620 static void
621 mark_all_reaching_defs_necessary (gimple *stmt)
623 /* Should have been caught before calling this function. */
624 gcc_checking_assert (!keep_all_vdefs_p ());
625 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
626 mark_all_reaching_defs_necessary_1, NULL, &visited);
629 /* Return true for PHI nodes with one or identical arguments
630 can be removed. */
631 static bool
632 degenerate_phi_p (gimple *phi)
634 unsigned int i;
635 tree op = gimple_phi_arg_def (phi, 0);
636 for (i = 1; i < gimple_phi_num_args (phi); i++)
637 if (gimple_phi_arg_def (phi, i) != op)
638 return false;
639 return true;
642 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
643 and delete operators. */
645 static bool
646 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
648 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
649 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
650 return valid_new_delete_pair_p (new_asm, delete_asm);
653 /* Propagate necessity using the operands of necessary statements.
654 Process the uses on each statement in the worklist, and add all
655 feeding statements which contribute to the calculation of this
656 value to the worklist.
658 In conservative mode, EL is NULL. */
660 static void
661 propagate_necessity (bool aggressive)
663 gimple *stmt;
665 if (dump_file && (dump_flags & TDF_DETAILS))
666 fprintf (dump_file, "\nProcessing worklist:\n");
668 while (worklist.length () > 0)
670 /* Take STMT from worklist. */
671 stmt = worklist.pop ();
673 if (dump_file && (dump_flags & TDF_DETAILS))
675 fprintf (dump_file, "processing: ");
676 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
677 fprintf (dump_file, "\n");
680 if (aggressive)
682 /* Mark the last statement of the basic blocks on which the block
683 containing STMT is control dependent, but only if we haven't
684 already done so. */
685 basic_block bb = gimple_bb (stmt);
686 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
687 && !bitmap_bit_p (visited_control_parents, bb->index))
688 mark_control_dependent_edges_necessary (bb, false);
691 if (gimple_code (stmt) == GIMPLE_PHI
692 /* We do not process virtual PHI nodes nor do we track their
693 necessity. */
694 && !virtual_operand_p (gimple_phi_result (stmt)))
696 /* PHI nodes are somewhat special in that each PHI alternative has
697 data and control dependencies. All the statements feeding the
698 PHI node's arguments are always necessary. In aggressive mode,
699 we also consider the control dependent edges leading to the
700 predecessor block associated with each PHI alternative as
701 necessary. */
702 gphi *phi = as_a <gphi *> (stmt);
703 size_t k;
705 for (k = 0; k < gimple_phi_num_args (stmt); k++)
707 tree arg = PHI_ARG_DEF (stmt, k);
708 if (TREE_CODE (arg) == SSA_NAME)
709 mark_operand_necessary (arg);
712 /* For PHI operands it matters from where the control flow arrives
713 to the BB. Consider the following example:
715 a=exp1;
716 b=exp2;
717 if (test)
719 else
721 c=PHI(a,b)
723 We need to mark control dependence of the empty basic blocks, since they
724 contains computation of PHI operands.
726 Doing so is too restrictive in the case the predecestor block is in
727 the loop. Consider:
729 if (b)
731 int i;
732 for (i = 0; i<1000; ++i)
734 j = 0;
736 return j;
738 There is PHI for J in the BB containing return statement.
739 In this case the control dependence of predecestor block (that is
740 within the empty loop) also contains the block determining number
741 of iterations of the block that would prevent removing of empty
742 loop in this case.
744 This scenario can be avoided by splitting critical edges.
745 To save the critical edge splitting pass we identify how the control
746 dependence would look like if the edge was split.
748 Consider the modified CFG created from current CFG by splitting
749 edge B->C. In the postdominance tree of modified CFG, C' is
750 always child of C. There are two cases how chlids of C' can look
751 like:
753 1) C' is leaf
755 In this case the only basic block C' is control dependent on is B.
757 2) C' has single child that is B
759 In this case control dependence of C' is same as control
760 dependence of B in original CFG except for block B itself.
761 (since C' postdominate B in modified CFG)
763 Now how to decide what case happens? There are two basic options:
765 a) C postdominate B. Then C immediately postdominate B and
766 case 2 happens iff there is no other way from B to C except
767 the edge B->C.
769 There is other way from B to C iff there is succesor of B that
770 is not postdominated by B. Testing this condition is somewhat
771 expensive, because we need to iterate all succesors of B.
772 We are safe to assume that this does not happen: we will mark B
773 as needed when processing the other path from B to C that is
774 conrol dependent on B and marking control dependencies of B
775 itself is harmless because they will be processed anyway after
776 processing control statement in B.
778 b) C does not postdominate B. Always case 1 happens since there is
779 path from C to exit that does not go through B and thus also C'. */
781 if (aggressive && !degenerate_phi_p (stmt))
783 for (k = 0; k < gimple_phi_num_args (stmt); k++)
785 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
787 if (gimple_bb (stmt)
788 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
790 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
791 mark_last_stmt_necessary (arg_bb);
793 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
794 && !bitmap_bit_p (visited_control_parents,
795 arg_bb->index))
796 mark_control_dependent_edges_necessary (arg_bb, true);
800 else
802 /* Propagate through the operands. Examine all the USE, VUSE and
803 VDEF operands in this statement. Mark all the statements
804 which feed this statement's uses as necessary. */
805 ssa_op_iter iter;
806 tree use;
808 /* If this is a call to free which is directly fed by an
809 allocation function do not mark that necessary through
810 processing the argument. */
811 bool is_delete_operator
812 = (is_gimple_call (stmt)
813 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
814 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
815 if (is_delete_operator
816 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
817 || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
819 tree ptr = gimple_call_arg (stmt, 0);
820 gcall *def_stmt;
821 tree def_callee;
822 /* If the pointer we free is defined by an allocation
823 function do not add the call to the worklist. */
824 if (TREE_CODE (ptr) == SSA_NAME
825 && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
826 && (def_callee = gimple_call_fndecl (def_stmt))
827 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
828 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
829 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
830 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
831 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
832 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
833 && gimple_call_from_new_or_delete (def_stmt))))
835 if (is_delete_operator
836 && !valid_new_delete_pair_p (def_stmt, stmt))
837 mark_operand_necessary (gimple_call_arg (stmt, 0));
839 /* Delete operators can have alignment and (or) size
840 as next arguments. When being a SSA_NAME, they
841 must be marked as necessary. Similarly GOMP_free. */
842 if (gimple_call_num_args (stmt) >= 2)
843 for (unsigned i = 1; i < gimple_call_num_args (stmt);
844 i++)
846 tree arg = gimple_call_arg (stmt, i);
847 if (TREE_CODE (arg) == SSA_NAME)
848 mark_operand_necessary (arg);
851 continue;
855 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
856 mark_operand_necessary (use);
858 use = gimple_vuse (stmt);
859 if (!use)
860 continue;
862 /* No need to search for vdefs if we intrinsicly keep them all. */
863 if (keep_all_vdefs_p ())
864 continue;
866 /* If we dropped to simple mode make all immediately
867 reachable definitions necessary. */
868 if (chain_ovfl)
870 mark_all_reaching_defs_necessary (stmt);
871 continue;
874 /* For statements that may load from memory (have a VUSE) we
875 have to mark all reaching (may-)definitions as necessary.
876 We partition this task into two cases:
877 1) explicit loads based on decls that are not aliased
878 2) implicit loads (like calls) and explicit loads not
879 based on decls that are not aliased (like indirect
880 references or loads from globals)
881 For 1) we mark all reaching may-defs as necessary, stopping
882 at dominating kills. For 2) we want to mark all dominating
883 references necessary, but non-aliased ones which we handle
884 in 1). By keeping a global visited bitmap for references
885 we walk for 2) we avoid quadratic behavior for those. */
887 if (gcall *call = dyn_cast <gcall *> (stmt))
889 tree callee = gimple_call_fndecl (call);
890 unsigned i;
892 /* Calls to functions that are merely acting as barriers
893 or that only store to memory do not make any previous
894 stores necessary. */
895 if (callee != NULL_TREE
896 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
897 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
898 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
899 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
900 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
901 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
902 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
903 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
904 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
905 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
906 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
907 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
908 continue;
910 if (callee != NULL_TREE
911 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
912 || DECL_IS_OPERATOR_DELETE_P (callee))
913 && gimple_call_from_new_or_delete (call))
914 continue;
916 /* Calls implicitly load from memory, their arguments
917 in addition may explicitly perform memory loads. */
918 mark_all_reaching_defs_necessary (call);
919 for (i = 0; i < gimple_call_num_args (call); ++i)
921 tree arg = gimple_call_arg (call, i);
922 if (TREE_CODE (arg) == SSA_NAME
923 || is_gimple_min_invariant (arg))
924 continue;
925 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
926 arg = TREE_OPERAND (arg, 0);
927 if (!ref_may_be_aliased (arg))
928 mark_aliased_reaching_defs_necessary (call, arg);
931 else if (gimple_assign_single_p (stmt))
933 tree rhs;
934 /* If this is a load mark things necessary. */
935 rhs = gimple_assign_rhs1 (stmt);
936 if (TREE_CODE (rhs) != SSA_NAME
937 && !is_gimple_min_invariant (rhs)
938 && TREE_CODE (rhs) != CONSTRUCTOR)
940 if (!ref_may_be_aliased (rhs))
941 mark_aliased_reaching_defs_necessary (stmt, rhs);
942 else
943 mark_all_reaching_defs_necessary (stmt);
946 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
948 tree rhs = gimple_return_retval (return_stmt);
949 /* A return statement may perform a load. */
950 if (rhs
951 && TREE_CODE (rhs) != SSA_NAME
952 && !is_gimple_min_invariant (rhs)
953 && TREE_CODE (rhs) != CONSTRUCTOR)
955 if (!ref_may_be_aliased (rhs))
956 mark_aliased_reaching_defs_necessary (stmt, rhs);
957 else
958 mark_all_reaching_defs_necessary (stmt);
961 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
963 unsigned i;
964 mark_all_reaching_defs_necessary (stmt);
965 /* Inputs may perform loads. */
966 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
968 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
969 if (TREE_CODE (op) != SSA_NAME
970 && !is_gimple_min_invariant (op)
971 && TREE_CODE (op) != CONSTRUCTOR
972 && !ref_may_be_aliased (op))
973 mark_aliased_reaching_defs_necessary (stmt, op);
976 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
978 /* The beginning of a transaction is a memory barrier. */
979 /* ??? If we were really cool, we'd only be a barrier
980 for the memories touched within the transaction. */
981 mark_all_reaching_defs_necessary (stmt);
983 else
984 gcc_unreachable ();
986 /* If we over-used our alias oracle budget drop to simple
987 mode. The cost metric allows quadratic behavior
988 (number of uses times number of may-defs queries) up to
989 a constant maximal number of queries and after that falls back to
990 super-linear complexity. */
991 if (/* Constant but quadratic for small functions. */
992 total_chain > 128 * 128
993 /* Linear in the number of may-defs. */
994 && total_chain > 32 * longest_chain
995 /* Linear in the number of uses. */
996 && total_chain > nr_walks * 32)
998 chain_ovfl = true;
999 if (visited)
1000 bitmap_clear (visited);
1006 /* Remove dead PHI nodes from block BB. */
1008 static bool
1009 remove_dead_phis (basic_block bb)
1011 bool something_changed = false;
1012 gphi *phi;
1013 gphi_iterator gsi;
1015 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1017 stats.total_phis++;
1018 phi = gsi.phi ();
1020 /* We do not track necessity of virtual PHI nodes. Instead do
1021 very simple dead PHI removal here. */
1022 if (virtual_operand_p (gimple_phi_result (phi)))
1024 /* Virtual PHI nodes with one or identical arguments
1025 can be removed. */
1026 if (degenerate_phi_p (phi))
1028 tree vdef = gimple_phi_result (phi);
1029 tree vuse = gimple_phi_arg_def (phi, 0);
1031 use_operand_p use_p;
1032 imm_use_iterator iter;
1033 gimple *use_stmt;
1034 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1035 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1036 SET_USE (use_p, vuse);
1037 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1038 && TREE_CODE (vuse) == SSA_NAME)
1039 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1041 else
1042 gimple_set_plf (phi, STMT_NECESSARY, true);
1045 if (!gimple_plf (phi, STMT_NECESSARY))
1047 something_changed = true;
1048 if (dump_file && (dump_flags & TDF_DETAILS))
1050 fprintf (dump_file, "Deleting : ");
1051 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1052 fprintf (dump_file, "\n");
1055 remove_phi_node (&gsi, true);
1056 stats.removed_phis++;
1057 continue;
1060 gsi_next (&gsi);
1062 return something_changed;
1066 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1067 containing I so that we don't have to look it up. */
1069 static void
1070 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1071 vec<edge> &to_remove_edges)
1073 gimple *stmt = gsi_stmt (*i);
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1077 fprintf (dump_file, "Deleting : ");
1078 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1079 fprintf (dump_file, "\n");
1082 stats.removed++;
1084 /* If we have determined that a conditional branch statement contributes
1085 nothing to the program, then we not only remove it, but we need to update
1086 the CFG. We can chose any of edges out of BB as long as we are sure to not
1087 close infinite loops. This is done by always choosing the edge closer to
1088 exit in inverted_post_order_compute order. */
1089 if (is_ctrl_stmt (stmt))
1091 edge_iterator ei;
1092 edge e = NULL, e2;
1094 /* See if there is only one non-abnormal edge. */
1095 if (single_succ_p (bb))
1096 e = single_succ_edge (bb);
1097 /* Otherwise chose one that is closer to bb with live statement in it.
1098 To be able to chose one, we compute inverted post order starting from
1099 all BBs with live statements. */
1100 if (!e)
1102 if (!bb_postorder)
1104 auto_vec<int, 20> postorder;
1105 inverted_post_order_compute (&postorder,
1106 &bb_contains_live_stmts);
1107 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1108 for (unsigned int i = 0; i < postorder.length (); ++i)
1109 bb_postorder[postorder[i]] = i;
1111 FOR_EACH_EDGE (e2, ei, bb->succs)
1112 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1113 || bb_postorder [e->dest->index]
1114 < bb_postorder [e2->dest->index])
1115 e = e2;
1117 gcc_assert (e);
1118 e->probability = profile_probability::always ();
1120 /* The edge is no longer associated with a conditional, so it does
1121 not have TRUE/FALSE flags.
1122 We are also safe to drop EH/ABNORMAL flags and turn them into
1123 normal control flow, because we know that all the destinations (including
1124 those odd edges) are equivalent for program execution. */
1125 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1127 /* The lone outgoing edge from BB will be a fallthru edge. */
1128 e->flags |= EDGE_FALLTHRU;
1130 /* Remove the remaining outgoing edges. */
1131 FOR_EACH_EDGE (e2, ei, bb->succs)
1132 if (e != e2)
1134 /* If we made a BB unconditionally exit a loop or removed
1135 an entry into an irreducible region, then this transform
1136 alters the set of BBs in the loop. Schedule a fixup. */
1137 if (loop_exit_edge_p (bb->loop_father, e)
1138 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1139 loops_state_set (LOOPS_NEED_FIXUP);
1140 to_remove_edges.safe_push (e2);
1144 /* If this is a store into a variable that is being optimized away,
1145 add a debug bind stmt if possible. */
1146 if (MAY_HAVE_DEBUG_BIND_STMTS
1147 && gimple_assign_single_p (stmt)
1148 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1150 tree lhs = gimple_assign_lhs (stmt);
1151 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1152 && !DECL_IGNORED_P (lhs)
1153 && is_gimple_reg_type (TREE_TYPE (lhs))
1154 && !is_global_var (lhs)
1155 && !DECL_HAS_VALUE_EXPR_P (lhs))
1157 tree rhs = gimple_assign_rhs1 (stmt);
1158 gdebug *note
1159 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1160 gsi_insert_after (i, note, GSI_SAME_STMT);
1164 unlink_stmt_vdef (stmt);
1165 gsi_remove (i, true);
1166 release_defs (stmt);
1169 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1170 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1172 static tree
1173 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1175 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1176 *walk_subtrees = 0;
1177 if (*tp == (tree) data)
1178 return *tp;
1179 return NULL_TREE;
1182 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1183 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1184 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1185 uses. */
1187 static void
1188 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1189 enum tree_code subcode)
1191 gimple *stmt = gsi_stmt (*gsi);
1192 tree lhs = gimple_call_lhs (stmt);
1194 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1195 return;
1197 imm_use_iterator imm_iter;
1198 use_operand_p use_p;
1199 bool has_debug_uses = false;
1200 bool has_realpart_uses = false;
1201 bool has_other_uses = false;
1202 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1204 gimple *use_stmt = USE_STMT (use_p);
1205 if (is_gimple_debug (use_stmt))
1206 has_debug_uses = true;
1207 else if (is_gimple_assign (use_stmt)
1208 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1209 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1210 has_realpart_uses = true;
1211 else
1213 has_other_uses = true;
1214 break;
1218 if (!has_realpart_uses || has_other_uses)
1219 return;
1221 tree arg0 = gimple_call_arg (stmt, 0);
1222 tree arg1 = gimple_call_arg (stmt, 1);
1223 location_t loc = gimple_location (stmt);
1224 tree type = TREE_TYPE (TREE_TYPE (lhs));
1225 tree utype = type;
1226 if (!TYPE_UNSIGNED (type))
1227 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1228 tree result = fold_build2_loc (loc, subcode, utype,
1229 fold_convert_loc (loc, utype, arg0),
1230 fold_convert_loc (loc, utype, arg1));
1231 result = fold_convert_loc (loc, type, result);
1233 if (has_debug_uses)
1235 gimple *use_stmt;
1236 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1238 if (!gimple_debug_bind_p (use_stmt))
1239 continue;
1240 tree v = gimple_debug_bind_get_value (use_stmt);
1241 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1243 gimple_debug_bind_reset_value (use_stmt);
1244 update_stmt (use_stmt);
1249 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1250 result = drop_tree_overflow (result);
1251 tree overflow = build_zero_cst (type);
1252 tree ctype = build_complex_type (type);
1253 if (TREE_CODE (result) == INTEGER_CST)
1254 result = build_complex (ctype, result, overflow);
1255 else
1256 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1257 ctype, result, overflow);
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1261 fprintf (dump_file, "Transforming call: ");
1262 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1263 fprintf (dump_file, "because the overflow result is never used into: ");
1264 print_generic_stmt (dump_file, result, TDF_SLIM);
1265 fprintf (dump_file, "\n");
1268 gimplify_and_update_call_from_tree (gsi, result);
1271 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1272 contributes nothing to the program, and can be deleted. */
1274 static bool
1275 eliminate_unnecessary_stmts (void)
1277 bool something_changed = false;
1278 basic_block bb;
1279 gimple_stmt_iterator gsi, psi;
1280 gimple *stmt;
1281 tree call;
1282 auto_vec<edge> to_remove_edges;
1284 if (dump_file && (dump_flags & TDF_DETAILS))
1285 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1287 clear_special_calls ();
1289 /* Walking basic blocks and statements in reverse order avoids
1290 releasing SSA names before any other DEFs that refer to them are
1291 released. This helps avoid loss of debug information, as we get
1292 a chance to propagate all RHSs of removed SSAs into debug uses,
1293 rather than only the latest ones. E.g., consider:
1295 x_3 = y_1 + z_2;
1296 a_5 = x_3 - b_4;
1297 # DEBUG a => a_5
1299 If we were to release x_3 before a_5, when we reached a_5 and
1300 tried to substitute it into the debug stmt, we'd see x_3 there,
1301 but x_3's DEF, type, etc would have already been disconnected.
1302 By going backwards, the debug stmt first changes to:
1304 # DEBUG a => x_3 - b_4
1306 and then to:
1308 # DEBUG a => y_1 + z_2 - b_4
1310 as desired. */
1311 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1312 auto_vec<basic_block> h;
1313 h = get_all_dominated_blocks (CDI_DOMINATORS,
1314 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1316 while (h.length ())
1318 bb = h.pop ();
1320 /* Remove dead statements. */
1321 auto_bitmap debug_seen;
1322 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1324 stmt = gsi_stmt (gsi);
1326 psi = gsi;
1327 gsi_prev (&psi);
1329 stats.total++;
1331 /* We can mark a call to free as not necessary if the
1332 defining statement of its argument is not necessary
1333 (and thus is getting removed). */
1334 if (gimple_plf (stmt, STMT_NECESSARY)
1335 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1336 || (is_gimple_call (stmt)
1337 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1338 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1340 tree ptr = gimple_call_arg (stmt, 0);
1341 if (TREE_CODE (ptr) == SSA_NAME)
1343 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1344 if (!gimple_nop_p (def_stmt)
1345 && !gimple_plf (def_stmt, STMT_NECESSARY))
1346 gimple_set_plf (stmt, STMT_NECESSARY, false);
1350 /* If GSI is not necessary then remove it. */
1351 if (!gimple_plf (stmt, STMT_NECESSARY))
1353 /* Keep clobbers that we can keep live live. */
1354 if (gimple_clobber_p (stmt))
1356 ssa_op_iter iter;
1357 use_operand_p use_p;
1358 bool dead = false;
1359 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1361 tree name = USE_FROM_PTR (use_p);
1362 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1363 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1365 dead = true;
1366 break;
1369 if (!dead)
1371 bitmap_clear (debug_seen);
1372 continue;
1375 if (!is_gimple_debug (stmt))
1376 something_changed = true;
1377 remove_dead_stmt (&gsi, bb, to_remove_edges);
1378 continue;
1380 else if (is_gimple_call (stmt))
1382 tree name = gimple_call_lhs (stmt);
1384 notice_special_calls (as_a <gcall *> (stmt));
1386 /* When LHS of var = call (); is dead, simplify it into
1387 call (); saving one operand. */
1388 if (name
1389 && TREE_CODE (name) == SSA_NAME
1390 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1391 /* Avoid doing so for allocation calls which we
1392 did not mark as necessary, it will confuse the
1393 special logic we apply to malloc/free pair removal. */
1394 && (!(call = gimple_call_fndecl (stmt))
1395 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1396 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1397 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1398 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1399 && !ALLOCA_FUNCTION_CODE_P
1400 (DECL_FUNCTION_CODE (call))))
1401 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1403 something_changed = true;
1404 if (dump_file && (dump_flags & TDF_DETAILS))
1406 fprintf (dump_file, "Deleting LHS of call: ");
1407 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1408 fprintf (dump_file, "\n");
1411 gimple_call_set_lhs (stmt, NULL_TREE);
1412 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1413 update_stmt (stmt);
1414 release_ssa_name (name);
1416 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1417 without lhs is not needed. */
1418 if (gimple_call_internal_p (stmt))
1419 switch (gimple_call_internal_fn (stmt))
1421 case IFN_GOMP_SIMD_LANE:
1422 if (gimple_call_num_args (stmt) >= 3
1423 && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1424 break;
1425 /* FALLTHRU */
1426 case IFN_ASAN_POISON:
1427 remove_dead_stmt (&gsi, bb, to_remove_edges);
1428 break;
1429 default:
1430 break;
1433 else if (gimple_call_internal_p (stmt))
1434 switch (gimple_call_internal_fn (stmt))
1436 case IFN_ADD_OVERFLOW:
1437 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1438 break;
1439 case IFN_SUB_OVERFLOW:
1440 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1441 break;
1442 case IFN_MUL_OVERFLOW:
1443 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1444 break;
1445 default:
1446 break;
1449 else if (gimple_debug_bind_p (stmt))
1451 /* We are only keeping the last debug-bind of a
1452 non-DEBUG_EXPR_DECL variable in a series of
1453 debug-bind stmts. */
1454 tree var = gimple_debug_bind_get_var (stmt);
1455 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1456 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1457 remove_dead_stmt (&gsi, bb, to_remove_edges);
1458 continue;
1460 bitmap_clear (debug_seen);
1463 /* Remove dead PHI nodes. */
1464 something_changed |= remove_dead_phis (bb);
1468 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1469 rendered some PHI nodes unreachable while they are still in use.
1470 Mark them for renaming. */
1471 if (!to_remove_edges.is_empty ())
1473 basic_block prev_bb;
1475 /* Remove edges. We've delayed this to not get bogus debug stmts
1476 during PHI node removal. */
1477 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1478 remove_edge (to_remove_edges[i]);
1479 cfg_altered = true;
1481 find_unreachable_blocks ();
1483 /* Delete all unreachable basic blocks in reverse dominator order. */
1484 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1485 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1487 prev_bb = bb->prev_bb;
1489 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1490 || !(bb->flags & BB_REACHABLE))
1492 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1493 gsi_next (&gsi))
1494 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1496 bool found = false;
1497 imm_use_iterator iter;
1499 FOR_EACH_IMM_USE_STMT (stmt, iter,
1500 gimple_phi_result (gsi.phi ()))
1502 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1503 continue;
1504 if (gimple_code (stmt) == GIMPLE_PHI
1505 || gimple_plf (stmt, STMT_NECESSARY))
1507 found = true;
1508 break;
1511 if (found)
1512 mark_virtual_phi_result_for_renaming (gsi.phi ());
1515 if (!(bb->flags & BB_REACHABLE))
1517 /* Speed up the removal of blocks that don't
1518 dominate others. Walking backwards, this should
1519 be the common case. ??? Do we need to recompute
1520 dominators because of cfg_altered? */
1521 if (!first_dom_son (CDI_DOMINATORS, bb))
1522 delete_basic_block (bb);
1523 else
1525 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1527 while (h.length ())
1529 bb = h.pop ();
1530 prev_bb = bb->prev_bb;
1531 /* Rearrangements to the CFG may have failed
1532 to update the dominators tree, so that
1533 formerly-dominated blocks are now
1534 otherwise reachable. */
1535 if (!!(bb->flags & BB_REACHABLE))
1536 continue;
1537 delete_basic_block (bb);
1540 h.release ();
1547 if (bb_postorder)
1548 free (bb_postorder);
1549 bb_postorder = NULL;
1551 return something_changed;
1555 /* Print out removed statement statistics. */
1557 static void
1558 print_stats (void)
1560 float percg;
1562 percg = ((float) stats.removed / (float) stats.total) * 100;
1563 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1564 stats.removed, stats.total, (int) percg);
1566 if (stats.total_phis == 0)
1567 percg = 0;
1568 else
1569 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1571 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1572 stats.removed_phis, stats.total_phis, (int) percg);
1575 /* Initialization for this pass. Set up the used data structures. */
1577 static void
1578 tree_dce_init (bool aggressive)
1580 memset ((void *) &stats, 0, sizeof (stats));
1582 if (aggressive)
1584 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1585 bitmap_clear (last_stmt_necessary);
1586 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1587 bitmap_clear (bb_contains_live_stmts);
1590 processed = sbitmap_alloc (num_ssa_names + 1);
1591 bitmap_clear (processed);
1593 worklist.create (64);
1594 cfg_altered = false;
1597 /* Cleanup after this pass. */
1599 static void
1600 tree_dce_done (bool aggressive)
1602 if (aggressive)
1604 delete cd;
1605 sbitmap_free (visited_control_parents);
1606 sbitmap_free (last_stmt_necessary);
1607 sbitmap_free (bb_contains_live_stmts);
1608 bb_contains_live_stmts = NULL;
1611 sbitmap_free (processed);
1613 worklist.release ();
1616 /* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1618 static int
1619 sort_phi_args (const void *a_, const void *b_)
1621 auto *a = (const std::pair<edge, hashval_t> *) a_;
1622 auto *b = (const std::pair<edge, hashval_t> *) b_;
1623 hashval_t ha = a->second;
1624 hashval_t hb = b->second;
1625 if (ha < hb)
1626 return -1;
1627 else if (ha > hb)
1628 return 1;
1629 else if (a->first->dest_idx < b->first->dest_idx)
1630 return -1;
1631 else if (a->first->dest_idx > b->first->dest_idx)
1632 return 1;
1633 else
1634 return 0;
1637 /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1638 have the same argument on a set of edges. This is to not consider
1639 control dependences of individual edges for same values but only for
1640 the common set. */
1642 static unsigned
1643 make_forwarders_with_degenerate_phis (function *fn)
1645 unsigned todo = 0;
1647 basic_block bb;
1648 FOR_EACH_BB_FN (bb, fn)
1650 /* Only PHIs with three or more arguments have opportunities. */
1651 if (EDGE_COUNT (bb->preds) < 3)
1652 continue;
1653 /* Do not touch loop headers. */
1654 if (bb->loop_father->header == bb)
1655 continue;
1657 /* Take one PHI node as template to look for identical
1658 arguments. Build a vector of candidates forming sets
1659 of argument edges with equal values. Note optimality
1660 depends on the particular choice of the template PHI
1661 since equal arguments are unordered leaving other PHIs
1662 with more than one set of equal arguments within this
1663 argument range unsorted. We'd have to break ties by
1664 looking at other PHI nodes. */
1665 gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1666 if (gsi_end_p (gsi))
1667 continue;
1668 gphi *phi = gsi.phi ();
1669 auto_vec<std::pair<edge, hashval_t>, 8> args;
1670 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1672 edge e = gimple_phi_arg_edge (phi, i);
1673 /* Skip abnormal edges since we cannot redirect them. */
1674 if (e->flags & EDGE_ABNORMAL)
1675 continue;
1676 /* Skip loop exit edges when we are in loop-closed SSA form
1677 since the forwarder we'd create does not have a PHI node. */
1678 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1679 && loop_exit_edge_p (e->src->loop_father, e))
1680 continue;
1681 args.safe_push (std::make_pair (e, iterative_hash_expr
1682 (gimple_phi_arg_def (phi, i), 0)));
1684 if (args.length () < 2)
1685 continue;
1686 args.qsort (sort_phi_args);
1688 /* From the candidates vector now verify true candidates for
1689 forwarders and create them. */
1690 gphi *vphi = get_virtual_phi (bb);
1691 unsigned start = 0;
1692 while (start < args.length () - 1)
1694 unsigned i;
1695 for (i = start + 1; i < args.length (); ++i)
1696 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[start].first),
1697 PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1698 break;
1699 /* args[start]..args[i-1] are equal. */
1700 if (start != i - 1)
1702 /* Check all PHI nodes for argument equality. */
1703 bool equal = true;
1704 gphi_iterator gsi2 = gsi;
1705 gsi_next (&gsi2);
1706 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
1708 gphi *phi2 = gsi2.phi ();
1709 if (virtual_operand_p (gimple_phi_result (phi2)))
1710 continue;
1711 tree start_arg
1712 = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1713 for (unsigned j = start + 1; j < i; ++j)
1715 if (!operand_equal_p (start_arg,
1716 PHI_ARG_DEF_FROM_EDGE
1717 (phi2, args[j].first)))
1719 /* Another PHI might have a shorter set of
1720 equivalent args. Go for that. */
1721 i = j;
1722 if (j == start + 1)
1723 equal = false;
1724 break;
1727 if (!equal)
1728 break;
1730 if (equal)
1732 /* If we are asked to forward all edges the block
1733 has all degenerate PHIs. Do nothing in that case. */
1734 if (start == 0
1735 && i == args.length ()
1736 && args.length () == gimple_phi_num_args (phi))
1737 break;
1738 /* Instead of using make_forwarder_block we are
1739 rolling our own variant knowing that the forwarder
1740 does not need PHI nodes apart from eventually
1741 a virtual one. */
1742 auto_vec<tree, 8> vphi_args;
1743 if (vphi)
1745 vphi_args.reserve_exact (i - start);
1746 for (unsigned j = start; j < i; ++j)
1747 vphi_args.quick_push
1748 (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
1750 free_dominance_info (fn, CDI_DOMINATORS);
1751 basic_block forwarder = split_edge (args[start].first);
1752 for (unsigned j = start + 1; j < i; ++j)
1754 edge e = args[j].first;
1755 redirect_edge_and_branch_force (e, forwarder);
1756 redirect_edge_var_map_clear (e);
1758 if (vphi)
1760 tree def = copy_ssa_name (vphi_args[0]);
1761 gphi *vphi_copy = create_phi_node (def, forwarder);
1762 for (unsigned j = start; j < i; ++j)
1763 add_phi_arg (vphi_copy, vphi_args[j - start],
1764 args[j].first, UNKNOWN_LOCATION);
1765 SET_PHI_ARG_DEF
1766 (vphi, single_succ_edge (forwarder)->dest_idx, def);
1768 todo |= TODO_cleanup_cfg;
1771 /* Continue searching for more opportunities. */
1772 start = i;
1775 return todo;
1778 /* Main routine to eliminate dead code.
1780 AGGRESSIVE controls the aggressiveness of the algorithm.
1781 In conservative mode, we ignore control dependence and simply declare
1782 all but the most trivially dead branches necessary. This mode is fast.
1783 In aggressive mode, control dependences are taken into account, which
1784 results in more dead code elimination, but at the cost of some time.
1786 FIXME: Aggressive mode before PRE doesn't work currently because
1787 the dominance info is not invalidated after DCE1. This is
1788 not an issue right now because we only run aggressive DCE
1789 as the last tree SSA pass, but keep this in mind when you
1790 start experimenting with pass ordering. */
1792 static unsigned int
1793 perform_tree_ssa_dce (bool aggressive)
1795 bool something_changed = 0;
1796 unsigned todo = 0;
1798 /* Preheaders are needed for SCEV to work.
1799 Simple lateches and recorded exits improve chances that loop will
1800 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1801 bool in_loop_pipeline = scev_initialized_p ();
1802 if (aggressive && ! in_loop_pipeline)
1804 scev_initialize ();
1805 loop_optimizer_init (LOOPS_NORMAL
1806 | LOOPS_HAVE_RECORDED_EXITS);
1809 if (aggressive)
1810 todo |= make_forwarders_with_degenerate_phis (cfun);
1812 calculate_dominance_info (CDI_DOMINATORS);
1814 tree_dce_init (aggressive);
1816 if (aggressive)
1818 /* Compute control dependence. */
1819 calculate_dominance_info (CDI_POST_DOMINATORS);
1820 cd = new control_dependences ();
1822 visited_control_parents =
1823 sbitmap_alloc (last_basic_block_for_fn (cfun));
1824 bitmap_clear (visited_control_parents);
1826 mark_dfs_back_edges ();
1829 find_obviously_necessary_stmts (aggressive);
1831 if (aggressive && ! in_loop_pipeline)
1833 loop_optimizer_finalize ();
1834 scev_finalize ();
1837 longest_chain = 0;
1838 total_chain = 0;
1839 nr_walks = 0;
1840 chain_ovfl = false;
1841 visited = BITMAP_ALLOC (NULL);
1842 propagate_necessity (aggressive);
1843 BITMAP_FREE (visited);
1845 something_changed |= eliminate_unnecessary_stmts ();
1846 something_changed |= cfg_altered;
1848 /* We do not update postdominators, so free them unconditionally. */
1849 free_dominance_info (CDI_POST_DOMINATORS);
1851 /* If we removed paths in the CFG, then we need to update
1852 dominators as well. I haven't investigated the possibility
1853 of incrementally updating dominators. */
1854 if (cfg_altered)
1855 free_dominance_info (CDI_DOMINATORS);
1857 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1858 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1860 /* Debugging dumps. */
1861 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1862 print_stats ();
1864 tree_dce_done (aggressive);
1866 if (something_changed)
1868 free_numbers_of_iterations_estimates (cfun);
1869 if (in_loop_pipeline)
1870 scev_reset ();
1871 todo |= TODO_update_ssa | TODO_cleanup_cfg;
1873 return todo;
1876 /* Pass entry points. */
1877 static unsigned int
1878 tree_ssa_dce (void)
1880 return perform_tree_ssa_dce (/*aggressive=*/false);
1883 static unsigned int
1884 tree_ssa_cd_dce (void)
1886 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1889 namespace {
1891 const pass_data pass_data_dce =
1893 GIMPLE_PASS, /* type */
1894 "dce", /* name */
1895 OPTGROUP_NONE, /* optinfo_flags */
1896 TV_TREE_DCE, /* tv_id */
1897 ( PROP_cfg | PROP_ssa ), /* properties_required */
1898 0, /* properties_provided */
1899 0, /* properties_destroyed */
1900 0, /* todo_flags_start */
1901 0, /* todo_flags_finish */
1904 class pass_dce : public gimple_opt_pass
1906 public:
1907 pass_dce (gcc::context *ctxt)
1908 : gimple_opt_pass (pass_data_dce, ctxt)
1911 /* opt_pass methods: */
1912 opt_pass * clone () { return new pass_dce (m_ctxt); }
1913 virtual bool gate (function *) { return flag_tree_dce != 0; }
1914 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1916 }; // class pass_dce
1918 } // anon namespace
1920 gimple_opt_pass *
1921 make_pass_dce (gcc::context *ctxt)
1923 return new pass_dce (ctxt);
1926 namespace {
1928 const pass_data pass_data_cd_dce =
1930 GIMPLE_PASS, /* type */
1931 "cddce", /* name */
1932 OPTGROUP_NONE, /* optinfo_flags */
1933 TV_TREE_CD_DCE, /* tv_id */
1934 ( PROP_cfg | PROP_ssa ), /* properties_required */
1935 0, /* properties_provided */
1936 0, /* properties_destroyed */
1937 0, /* todo_flags_start */
1938 0, /* todo_flags_finish */
1941 class pass_cd_dce : public gimple_opt_pass
1943 public:
1944 pass_cd_dce (gcc::context *ctxt)
1945 : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
1948 /* opt_pass methods: */
1949 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1950 void set_pass_param (unsigned n, bool param)
1952 gcc_assert (n == 0);
1953 update_address_taken_p = param;
1955 virtual bool gate (function *) { return flag_tree_dce != 0; }
1956 virtual unsigned int execute (function *)
1958 return (tree_ssa_cd_dce ()
1959 | (update_address_taken_p ? TODO_update_address_taken : 0));
1962 private:
1963 bool update_address_taken_p;
1964 }; // class pass_cd_dce
1966 } // anon namespace
1968 gimple_opt_pass *
1969 make_pass_cd_dce (gcc::context *ctxt)
1971 return new pass_cd_dce (ctxt);
1975 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1976 is consumed by this function. The function has linear complexity in
1977 the number of dead stmts with a constant factor like the average SSA
1978 use operands number. */
1980 void
1981 simple_dce_from_worklist (bitmap worklist)
1983 while (! bitmap_empty_p (worklist))
1985 /* Pop item. */
1986 unsigned i = bitmap_first_set_bit (worklist);
1987 bitmap_clear_bit (worklist, i);
1989 tree def = ssa_name (i);
1990 /* Removed by somebody else or still in use. */
1991 if (! def || ! has_zero_uses (def))
1992 continue;
1994 gimple *t = SSA_NAME_DEF_STMT (def);
1995 if (gimple_has_side_effects (t))
1996 continue;
1998 /* Don't remove statements that are needed for non-call
1999 eh to work. */
2000 if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2001 continue;
2003 /* Add uses to the worklist. */
2004 ssa_op_iter iter;
2005 use_operand_p use_p;
2006 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2008 tree use = USE_FROM_PTR (use_p);
2009 if (TREE_CODE (use) == SSA_NAME
2010 && ! SSA_NAME_IS_DEFAULT_DEF (use))
2011 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2014 /* Remove stmt. */
2015 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (dump_file, "Removing dead stmt:");
2018 print_gimple_stmt (dump_file, t, 0);
2020 gimple_stmt_iterator gsi = gsi_for_stmt (t);
2021 if (gimple_code (t) == GIMPLE_PHI)
2022 remove_phi_node (&gsi, true);
2023 else
2025 gsi_remove (&gsi, true);
2026 release_defs (t);