RISC-V: Make stack_save_restore tests more robust
[official-gcc.git] / gcc / tree-ssa-propagate.cc
blobcb68b419b8ce03eec3afa0624768e92f96497f2a
1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "dumpfile.h"
30 #include "gimple-iterator.h"
31 #include "gimple-fold.h"
32 #include "tree-eh.h"
33 #include "gimplify.h"
34 #include "tree-cfg.h"
35 #include "tree-ssa.h"
36 #include "tree-ssa-propagate.h"
37 #include "domwalk.h"
38 #include "cfgloop.h"
39 #include "tree-cfgcleanup.h"
40 #include "cfganal.h"
41 #include "tree-ssa-dce.h"
43 /* This file implements a generic value propagation engine based on
44 the same propagation used by the SSA-CCP algorithm [1].
46 Propagation is performed by simulating the execution of every
47 statement that produces the value being propagated. Simulation
48 proceeds as follows:
50 1- Initially, all edges of the CFG are marked not executable and
51 the CFG worklist is seeded with all the statements in the entry
52 basic block (block 0).
54 2- Every statement S is simulated with a call to the call-back
55 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
56 results:
58 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
59 interest and does not affect any of the work lists.
60 The statement may be simulated again if any of its input
61 operands change in future iterations of the simulator.
63 SSA_PROP_VARYING: The value produced by S cannot be determined
64 at compile time. Further simulation of S is not required.
65 If S is a conditional jump, all the outgoing edges for the
66 block are considered executable and added to the work
67 list.
69 SSA_PROP_INTERESTING: S produces a value that can be computed
70 at compile time. Its result can be propagated into the
71 statements that feed from S. Furthermore, if S is a
72 conditional jump, only the edge known to be taken is added
73 to the work list. Edges that are known not to execute are
74 never simulated.
76 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
77 return value from SSA_PROP_VISIT_PHI has the same semantics as
78 described in #2.
80 4- Three work lists are kept. Statements are only added to these
81 lists if they produce one of SSA_PROP_INTERESTING or
82 SSA_PROP_VARYING.
84 CFG_BLOCKS contains the list of blocks to be simulated.
85 Blocks are added to this list if their incoming edges are
86 found executable.
88 SSA_EDGE_WORKLIST contains the list of statements that we
89 need to revisit.
91 5- Simulation terminates when all three work lists are drained.
93 Before calling ssa_propagate, it is important to clear
94 prop_simulate_again_p for all the statements in the program that
95 should be simulated. This initialization allows an implementation
96 to specify which statements should never be simulated.
98 It is also important to compute def-use information before calling
99 ssa_propagate.
101 References:
103 [1] Constant propagation with conditional branches,
104 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106 [2] Building an Optimizing Compiler,
107 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109 [3] Advanced Compiler Design and Implementation,
110 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112 /* Worklists of control flow edge destinations. This contains
113 the CFG order number of the blocks so we can iterate in CFG
114 order by visiting in bit-order. We use two worklists to
115 first make forward progress before iterating. */
116 static bitmap cfg_blocks;
117 static int *bb_to_cfg_order;
118 static int *cfg_order_to_bb;
120 /* Worklists of SSA edges which will need reexamination as their
121 definition has changed. SSA edges are def-use edges in the SSA
122 web. For each D-U edge, we store the target statement or PHI node
123 UID in a bitmap. UIDs order stmts in execution order. We use
124 two worklists to first make forward progress before iterating. */
125 static bitmap ssa_edge_worklist;
126 static vec<gimple *> uid_to_stmt;
128 /* Current RPO index in the iteration. */
129 static int curr_order;
132 /* We have just defined a new value for VAR. If IS_VARYING is true,
133 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
134 them to INTERESTING_SSA_EDGES. */
136 static void
137 add_ssa_edge (tree var)
139 imm_use_iterator iter;
140 use_operand_p use_p;
142 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
144 gimple *use_stmt = USE_STMT (use_p);
145 if (!prop_simulate_again_p (use_stmt))
146 continue;
148 /* If we did not yet simulate the block wait for this to happen
149 and do not add the stmt to the SSA edge worklist. */
150 basic_block use_bb = gimple_bb (use_stmt);
151 if (! (use_bb->flags & BB_VISITED))
152 continue;
154 /* If this is a use on a not yet executable edge do not bother to
155 queue it. */
156 if (gimple_code (use_stmt) == GIMPLE_PHI
157 && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags
158 & EDGE_EXECUTABLE))
159 continue;
161 if (bitmap_set_bit (ssa_edge_worklist, gimple_uid (use_stmt)))
163 uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
164 if (dump_file && (dump_flags & TDF_DETAILS))
166 fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
167 print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
174 /* Add edge E to the control flow worklist. */
176 static void
177 add_control_edge (edge e)
179 basic_block bb = e->dest;
180 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
181 return;
183 /* If the edge had already been executed, skip it. */
184 if (e->flags & EDGE_EXECUTABLE)
185 return;
187 e->flags |= EDGE_EXECUTABLE;
189 int bb_order = bb_to_cfg_order[bb->index];
190 bitmap_set_bit (cfg_blocks, bb_order);
192 if (dump_file && (dump_flags & TDF_DETAILS))
193 fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
194 e->src->index, e->dest->index);
198 /* Simulate the execution of STMT and update the work lists accordingly. */
200 void
201 ssa_propagation_engine::simulate_stmt (gimple *stmt)
203 enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
204 edge taken_edge = NULL;
205 tree output_name = NULL_TREE;
207 /* Pull the stmt off the SSA edge worklist. */
208 bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
210 /* Don't bother visiting statements that are already
211 considered varying by the propagator. */
212 if (!prop_simulate_again_p (stmt))
213 return;
215 if (gimple_code (stmt) == GIMPLE_PHI)
217 val = visit_phi (as_a <gphi *> (stmt));
218 output_name = gimple_phi_result (stmt);
220 else
221 val = visit_stmt (stmt, &taken_edge, &output_name);
223 if (val == SSA_PROP_VARYING)
225 prop_set_simulate_again (stmt, false);
227 /* If the statement produced a new varying value, add the SSA
228 edges coming out of OUTPUT_NAME. */
229 if (output_name)
230 add_ssa_edge (output_name);
232 /* If STMT transfers control out of its basic block, add
233 all outgoing edges to the work list. */
234 if (stmt_ends_bb_p (stmt))
236 edge e;
237 edge_iterator ei;
238 basic_block bb = gimple_bb (stmt);
239 FOR_EACH_EDGE (e, ei, bb->succs)
240 add_control_edge (e);
242 return;
244 else if (val == SSA_PROP_INTERESTING)
246 /* If the statement produced new value, add the SSA edges coming
247 out of OUTPUT_NAME. */
248 if (output_name)
249 add_ssa_edge (output_name);
251 /* If we know which edge is going to be taken out of this block,
252 add it to the CFG work list. */
253 if (taken_edge)
254 add_control_edge (taken_edge);
257 /* If there are no SSA uses on the stmt whose defs are simulated
258 again then this stmt will be never visited again. */
259 bool has_simulate_again_uses = false;
260 use_operand_p use_p;
261 ssa_op_iter iter;
262 if (gimple_code (stmt) == GIMPLE_PHI)
264 edge_iterator ei;
265 edge e;
266 tree arg;
267 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
268 if (!(e->flags & EDGE_EXECUTABLE)
269 || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
270 && TREE_CODE (arg) == SSA_NAME
271 && !SSA_NAME_IS_DEFAULT_DEF (arg)
272 && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
274 has_simulate_again_uses = true;
275 break;
278 else
279 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
281 gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
282 if (!gimple_nop_p (def_stmt)
283 && prop_simulate_again_p (def_stmt))
285 has_simulate_again_uses = true;
286 break;
289 if (!has_simulate_again_uses)
291 if (dump_file && (dump_flags & TDF_DETAILS))
292 fprintf (dump_file, "marking stmt to be not simulated again\n");
293 prop_set_simulate_again (stmt, false);
298 /* Simulate the execution of BLOCK. Evaluate the statement associated
299 with each variable reference inside the block. */
301 void
302 ssa_propagation_engine::simulate_block (basic_block block)
304 gimple_stmt_iterator gsi;
306 /* There is nothing to do for the exit block. */
307 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
308 return;
310 if (dump_file && (dump_flags & TDF_DETAILS))
311 fprintf (dump_file, "\nSimulating block %d\n", block->index);
313 /* Always simulate PHI nodes, even if we have simulated this block
314 before. */
315 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
316 simulate_stmt (gsi_stmt (gsi));
318 /* If this is the first time we've simulated this block, then we
319 must simulate each of its statements. */
320 if (! (block->flags & BB_VISITED))
322 gimple_stmt_iterator j;
323 unsigned int normal_edge_count;
324 edge e, normal_edge;
325 edge_iterator ei;
327 for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
328 simulate_stmt (gsi_stmt (j));
330 /* Note that we have simulated this block. */
331 block->flags |= BB_VISITED;
333 /* We cannot predict when abnormal and EH edges will be executed, so
334 once a block is considered executable, we consider any
335 outgoing abnormal edges as executable.
337 TODO: This is not exactly true. Simplifying statement might
338 prove it non-throwing and also computed goto can be handled
339 when destination is known.
341 At the same time, if this block has only one successor that is
342 reached by non-abnormal edges, then add that successor to the
343 worklist. */
344 normal_edge_count = 0;
345 normal_edge = NULL;
346 FOR_EACH_EDGE (e, ei, block->succs)
348 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
349 add_control_edge (e);
350 else
352 normal_edge_count++;
353 normal_edge = e;
357 if (normal_edge_count == 1)
358 add_control_edge (normal_edge);
363 /* Initialize local data structures and work lists. */
365 static void
366 ssa_prop_init (void)
368 edge e;
369 edge_iterator ei;
370 basic_block bb;
372 /* Worklists of SSA edges. */
373 ssa_edge_worklist = BITMAP_ALLOC (NULL);
374 bitmap_tree_view (ssa_edge_worklist);
376 /* Worklist of basic-blocks. */
377 bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
378 cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
379 int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
380 cfg_order_to_bb, false);
381 for (int i = 0; i < n; ++i)
382 bb_to_cfg_order[cfg_order_to_bb[i]] = i;
383 cfg_blocks = BITMAP_ALLOC (NULL);
385 /* Initially assume that every edge in the CFG is not executable.
386 (including the edges coming out of the entry block). Mark blocks
387 as not visited, blocks not yet visited will have all their statements
388 simulated once an incoming edge gets executable. */
389 set_gimple_stmt_max_uid (cfun, 0);
390 for (int i = 0; i < n; ++i)
392 gimple_stmt_iterator si;
393 bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
395 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
397 gimple *stmt = gsi_stmt (si);
398 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
401 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
403 gimple *stmt = gsi_stmt (si);
404 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
407 bb->flags &= ~BB_VISITED;
408 FOR_EACH_EDGE (e, ei, bb->succs)
409 e->flags &= ~EDGE_EXECUTABLE;
411 uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun), true);
415 /* Free allocated storage. */
417 static void
418 ssa_prop_fini (void)
420 BITMAP_FREE (cfg_blocks);
421 free (bb_to_cfg_order);
422 free (cfg_order_to_bb);
423 BITMAP_FREE (ssa_edge_worklist);
424 uid_to_stmt.release ();
428 /* Entry point to the propagation engine.
430 The VISIT_STMT virtual function is called for every statement
431 visited and the VISIT_PHI virtual function is called for every PHI
432 node visited. */
434 void
435 ssa_propagation_engine::ssa_propagate (void)
437 ssa_prop_init ();
439 curr_order = 0;
441 /* Iterate until the worklists are empty. We iterate both blocks
442 and stmts in RPO order, prioritizing backedge processing.
443 Seed the algorithm by adding the successors of the entry block to the
444 edge worklist. */
445 edge e;
446 edge_iterator ei;
447 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
449 e->flags &= ~EDGE_EXECUTABLE;
450 add_control_edge (e);
452 while (1)
454 int next_block_order = (bitmap_empty_p (cfg_blocks)
455 ? -1 : bitmap_first_set_bit (cfg_blocks));
456 int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist)
457 ? -1 : bitmap_first_set_bit (ssa_edge_worklist));
458 if (next_block_order == -1 && next_stmt_uid == -1)
459 break;
461 int next_stmt_bb_order = -1;
462 gimple *next_stmt = NULL;
463 if (next_stmt_uid != -1)
465 next_stmt = uid_to_stmt[next_stmt_uid];
466 next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index];
469 /* Pull the next block to simulate off the worklist if it comes first. */
470 if (next_block_order != -1
471 && (next_stmt_bb_order == -1
472 || next_block_order <= next_stmt_bb_order))
474 curr_order = next_block_order;
475 bitmap_clear_bit (cfg_blocks, next_block_order);
476 basic_block bb
477 = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]);
478 simulate_block (bb);
480 /* Else simulate from the SSA edge worklist. */
481 else
483 curr_order = next_stmt_bb_order;
484 if (dump_file && (dump_flags & TDF_DETAILS))
486 fprintf (dump_file, "\nSimulating statement: ");
487 print_gimple_stmt (dump_file, next_stmt, 0, dump_flags);
489 simulate_stmt (next_stmt);
493 ssa_prop_fini ();
496 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
497 is a non-volatile pointer dereference, a structure reference or a
498 reference to a single _DECL. Ignore volatile memory references
499 because they are not interesting for the optimizers. */
501 bool
502 stmt_makes_single_store (gimple *stmt)
504 tree lhs;
506 if (gimple_code (stmt) != GIMPLE_ASSIGN
507 && gimple_code (stmt) != GIMPLE_CALL)
508 return false;
510 if (!gimple_vdef (stmt))
511 return false;
513 lhs = gimple_get_lhs (stmt);
515 /* A call statement may have a null LHS. */
516 if (!lhs)
517 return false;
519 return (!TREE_THIS_VOLATILE (lhs)
520 && (DECL_P (lhs)
521 || REFERENCE_CLASS_P (lhs)));
525 /* Propagation statistics. */
526 struct prop_stats_d
528 long num_const_prop;
529 long num_copy_prop;
530 long num_stmts_folded;
533 static struct prop_stats_d prop_stats;
535 // range_query default methods to drive from a value_of_expr() ranther than
536 // range_of_expr.
538 tree
539 substitute_and_fold_engine::value_on_edge (edge, tree expr)
541 return value_of_expr (expr);
544 tree
545 substitute_and_fold_engine::value_of_stmt (gimple *stmt, tree name)
547 if (!name)
548 name = gimple_get_lhs (stmt);
550 gcc_checking_assert (!name || name == gimple_get_lhs (stmt));
552 if (name)
553 return value_of_expr (name);
554 return NULL_TREE;
557 bool
558 substitute_and_fold_engine::range_of_expr (vrange &, tree, gimple *)
560 return false;
563 /* Replace USE references in statement STMT with the values stored in
564 PROP_VALUE. Return true if at least one reference was replaced. */
566 bool
567 substitute_and_fold_engine::replace_uses_in (gimple *stmt)
569 bool replaced = false;
570 use_operand_p use;
571 ssa_op_iter iter;
573 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
575 tree tuse = USE_FROM_PTR (use);
576 tree val = value_of_expr (tuse, stmt);
578 if (val == tuse || val == NULL_TREE)
579 continue;
581 if (gimple_code (stmt) == GIMPLE_ASM
582 && !may_propagate_copy_into_asm (tuse))
583 continue;
585 if (!may_propagate_copy (tuse, val))
586 continue;
588 if (TREE_CODE (val) != SSA_NAME)
589 prop_stats.num_const_prop++;
590 else
591 prop_stats.num_copy_prop++;
593 propagate_value (use, val);
595 replaced = true;
598 return replaced;
602 /* Replace propagated values into all the arguments for PHI using the
603 values from PROP_VALUE. */
605 bool
606 substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
608 size_t i;
609 bool replaced = false;
611 for (i = 0; i < gimple_phi_num_args (phi); i++)
613 tree arg = gimple_phi_arg_def (phi, i);
615 if (TREE_CODE (arg) == SSA_NAME)
617 edge e = gimple_phi_arg_edge (phi, i);
618 tree val = value_on_edge (e, arg);
620 if (val && val != arg && may_propagate_copy (arg, val))
622 if (TREE_CODE (val) != SSA_NAME)
623 prop_stats.num_const_prop++;
624 else
625 prop_stats.num_copy_prop++;
627 propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
628 replaced = true;
630 /* If we propagated a copy and this argument flows
631 through an abnormal edge, update the replacement
632 accordingly. */
633 if (TREE_CODE (val) == SSA_NAME
634 && e->flags & EDGE_ABNORMAL
635 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
637 /* This can only occur for virtual operands, since
638 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
639 would prevent replacement. */
640 gcc_checking_assert (virtual_operand_p (val));
641 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
647 if (dump_file && (dump_flags & TDF_DETAILS))
649 if (!replaced)
650 fprintf (dump_file, "No folding possible\n");
651 else
653 fprintf (dump_file, "Folded into: ");
654 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
655 fprintf (dump_file, "\n");
659 return replaced;
663 class substitute_and_fold_dom_walker : public dom_walker
665 public:
666 substitute_and_fold_dom_walker (cdi_direction direction,
667 class substitute_and_fold_engine *engine)
668 : dom_walker (direction),
669 something_changed (false),
670 substitute_and_fold_engine (engine)
672 dceworklist = BITMAP_ALLOC (NULL);
673 stmts_to_fixup.create (0);
674 need_eh_cleanup = BITMAP_ALLOC (NULL);
675 need_ab_cleanup = BITMAP_ALLOC (NULL);
677 ~substitute_and_fold_dom_walker ()
679 BITMAP_FREE (dceworklist);
680 stmts_to_fixup.release ();
681 BITMAP_FREE (need_eh_cleanup);
682 BITMAP_FREE (need_ab_cleanup);
685 edge before_dom_children (basic_block) final override;
686 void after_dom_children (basic_block bb) final override
688 substitute_and_fold_engine->post_fold_bb (bb);
691 bool something_changed;
692 bitmap dceworklist;
693 vec<gimple *> stmts_to_fixup;
694 bitmap need_eh_cleanup;
695 bitmap need_ab_cleanup;
697 class substitute_and_fold_engine *substitute_and_fold_engine;
699 private:
700 void foreach_new_stmt_in_bb (gimple_stmt_iterator old_gsi,
701 gimple_stmt_iterator new_gsi);
704 /* Call post_new_stmt for each new statement that has been added
705 to the current BB. OLD_GSI is the statement iterator before the BB
706 changes ocurred. NEW_GSI is the iterator which may contain new
707 statements. */
709 void
710 substitute_and_fold_dom_walker::foreach_new_stmt_in_bb
711 (gimple_stmt_iterator old_gsi,
712 gimple_stmt_iterator new_gsi)
714 basic_block bb = gsi_bb (new_gsi);
715 if (gsi_end_p (old_gsi))
716 old_gsi = gsi_start_bb (bb);
717 else
718 gsi_next (&old_gsi);
719 while (gsi_stmt (old_gsi) != gsi_stmt (new_gsi))
721 gimple *stmt = gsi_stmt (old_gsi);
722 substitute_and_fold_engine->post_new_stmt (stmt);
723 gsi_next (&old_gsi);
727 bool
728 substitute_and_fold_engine::propagate_into_phi_args (basic_block bb)
730 edge e;
731 edge_iterator ei;
732 bool propagated = false;
734 /* Visit BB successor PHI nodes and replace PHI args. */
735 FOR_EACH_EDGE (e, ei, bb->succs)
737 for (gphi_iterator gpi = gsi_start_phis (e->dest);
738 !gsi_end_p (gpi); gsi_next (&gpi))
740 gphi *phi = gpi.phi ();
741 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
742 tree arg = USE_FROM_PTR (use_p);
743 if (TREE_CODE (arg) != SSA_NAME
744 || virtual_operand_p (arg))
745 continue;
746 tree val = value_on_edge (e, arg);
747 if (val
748 && is_gimple_min_invariant (val)
749 && may_propagate_copy (arg, val))
751 propagate_value (use_p, val);
752 propagated = true;
756 return propagated;
759 edge
760 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
762 substitute_and_fold_engine->pre_fold_bb (bb);
764 /* Propagate known values into PHI nodes. */
765 for (gphi_iterator i = gsi_start_phis (bb);
766 !gsi_end_p (i);
767 gsi_next (&i))
769 gphi *phi = i.phi ();
770 tree res = gimple_phi_result (phi);
771 if (virtual_operand_p (res))
772 continue;
773 if (dump_file && (dump_flags & TDF_DETAILS))
775 fprintf (dump_file, "Folding PHI node: ");
776 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
778 if (res && TREE_CODE (res) == SSA_NAME)
780 tree sprime = substitute_and_fold_engine->value_of_expr (res, phi);
781 if (sprime
782 && sprime != res
783 && may_propagate_copy (res, sprime))
785 if (dump_file && (dump_flags & TDF_DETAILS))
787 fprintf (dump_file, "Queued PHI for removal. Folds to: ");
788 print_generic_expr (dump_file, sprime);
789 fprintf (dump_file, "\n");
791 bitmap_set_bit (dceworklist, SSA_NAME_VERSION (res));
792 continue;
795 something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
798 /* Propagate known values into stmts. In some case it exposes
799 more trivially deletable stmts to walk backward. */
800 for (gimple_stmt_iterator i = gsi_start_bb (bb);
801 !gsi_end_p (i);
802 gsi_next (&i))
804 bool did_replace;
805 gimple *stmt = gsi_stmt (i);
807 substitute_and_fold_engine->pre_fold_stmt (stmt);
809 if (dump_file && (dump_flags & TDF_DETAILS))
811 fprintf (dump_file, "Folding statement: ");
812 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
815 /* No point propagating into a stmt we have a value for we
816 can propagate into all uses. Mark it for removal instead. */
817 tree lhs = gimple_get_lhs (stmt);
818 if (lhs && TREE_CODE (lhs) == SSA_NAME)
820 tree sprime = substitute_and_fold_engine->value_of_stmt (stmt, lhs);
821 if (sprime
822 && sprime != lhs
823 && may_propagate_copy (lhs, sprime)
824 && !stmt_could_throw_p (cfun, stmt)
825 && !gimple_has_side_effects (stmt))
827 if (dump_file && (dump_flags & TDF_DETAILS))
829 fprintf (dump_file, "Queued stmt for removal. Folds to: ");
830 print_generic_expr (dump_file, sprime);
831 fprintf (dump_file, "\n");
833 bitmap_set_bit (dceworklist, SSA_NAME_VERSION (lhs));
834 continue;
838 /* Replace the statement with its folded version and mark it
839 folded. */
840 did_replace = false;
841 gimple *old_stmt = stmt;
842 bool was_noreturn = false;
843 bool can_make_abnormal_goto = false;
844 if (is_gimple_call (stmt))
846 was_noreturn = gimple_call_noreturn_p (stmt);
847 can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
850 /* Replace real uses in the statement. */
851 did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
853 gimple_stmt_iterator prev_gsi = i;
854 gsi_prev (&prev_gsi);
856 /* If we made a replacement, fold the statement. */
857 if (did_replace)
859 fold_stmt (&i, follow_single_use_edges);
860 stmt = gsi_stmt (i);
861 gimple_set_modified (stmt, true);
863 /* Also fold if we want to fold all statements. */
864 else if (substitute_and_fold_engine->fold_all_stmts
865 && fold_stmt (&i, follow_single_use_edges))
867 did_replace = true;
868 stmt = gsi_stmt (i);
869 gimple_set_modified (stmt, true);
872 /* Some statements may be simplified using propagator
873 specific information. Do this before propagating
874 into the stmt to not disturb pass specific information. */
875 update_stmt_if_modified (stmt);
876 if (substitute_and_fold_engine->fold_stmt (&i))
878 did_replace = true;
879 prop_stats.num_stmts_folded++;
880 stmt = gsi_stmt (i);
881 gimple_set_modified (stmt, true);
884 /* If this is a control statement the propagator left edges
885 unexecuted on force the condition in a way consistent with
886 that. See PR66945 for cases where the propagator can end
887 up with a different idea of a taken edge than folding
888 (once undefined behavior is involved). */
889 if (gimple_code (stmt) == GIMPLE_COND)
891 if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
892 ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
894 if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
895 == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
896 gimple_cond_make_true (as_a <gcond *> (stmt));
897 else
898 gimple_cond_make_false (as_a <gcond *> (stmt));
899 gimple_set_modified (stmt, true);
900 did_replace = true;
904 /* Now cleanup. */
905 if (did_replace)
907 foreach_new_stmt_in_bb (prev_gsi, i);
909 /* If we cleaned up EH information from the statement,
910 remove EH edges. */
911 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
912 bitmap_set_bit (need_eh_cleanup, bb->index);
914 /* If we turned a call with possible abnormal control transfer
915 into one that doesn't, remove abnormal edges. */
916 if (can_make_abnormal_goto
917 && !stmt_can_make_abnormal_goto (stmt))
918 bitmap_set_bit (need_ab_cleanup, bb->index);
920 /* If we turned a not noreturn call into a noreturn one
921 schedule it for fixup. */
922 if (!was_noreturn
923 && is_gimple_call (stmt)
924 && gimple_call_noreturn_p (stmt))
925 stmts_to_fixup.safe_push (stmt);
927 if (gimple_assign_single_p (stmt))
929 tree rhs = gimple_assign_rhs1 (stmt);
931 if (TREE_CODE (rhs) == ADDR_EXPR)
932 recompute_tree_invariant_for_addr_expr (rhs);
935 /* Determine what needs to be done to update the SSA form. */
936 update_stmt_if_modified (stmt);
937 if (!is_gimple_debug (stmt))
938 something_changed = true;
941 if (dump_file && (dump_flags & TDF_DETAILS))
943 if (did_replace)
945 fprintf (dump_file, "Folded into: ");
946 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
947 fprintf (dump_file, "\n");
949 else
950 fprintf (dump_file, "Not folded\n");
954 something_changed |= substitute_and_fold_engine->propagate_into_phi_args (bb);
956 return NULL;
961 /* Perform final substitution and folding of propagated values.
962 Process the whole function if BLOCK is null, otherwise only
963 process the blocks that BLOCK dominates. In the latter case,
964 it is the caller's responsibility to ensure that dominator
965 information is available and up-to-date.
967 PROP_VALUE[I] contains the single value that should be substituted
968 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
969 substituted.
971 If FOLD_FN is non-NULL the function will be invoked on all statements
972 before propagating values for pass specific simplification.
974 DO_DCE is true if trivially dead stmts can be removed.
976 If DO_DCE is true, the statements within a BB are walked from
977 last to first element. Otherwise we scan from first to last element.
979 Return TRUE when something changed. */
981 bool
982 substitute_and_fold_engine::substitute_and_fold (basic_block block)
984 if (dump_file && (dump_flags & TDF_DETAILS))
985 fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
987 memset (&prop_stats, 0, sizeof (prop_stats));
989 /* Don't call calculate_dominance_info when iterating over a subgraph.
990 Callers that are using the interface this way are likely to want to
991 iterate over several disjoint subgraphs, and it would be expensive
992 in enable-checking builds to revalidate the whole dominance tree
993 each time. */
994 if (block)
995 gcc_assert (dom_info_state (CDI_DOMINATORS));
996 else
997 calculate_dominance_info (CDI_DOMINATORS);
998 substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
999 walker.walk (block ? block : ENTRY_BLOCK_PTR_FOR_FN (cfun));
1001 simple_dce_from_worklist (walker.dceworklist, walker.need_eh_cleanup);
1002 if (!bitmap_empty_p (walker.need_eh_cleanup))
1003 gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1004 if (!bitmap_empty_p (walker.need_ab_cleanup))
1005 gimple_purge_all_dead_abnormal_call_edges (walker.need_ab_cleanup);
1007 /* Fixup stmts that became noreturn calls. This may require splitting
1008 blocks and thus isn't possible during the dominator walk. Do this
1009 in reverse order so we don't inadvertedly remove a stmt we want to
1010 fixup by visiting a dominating now noreturn call first. */
1011 while (!walker.stmts_to_fixup.is_empty ())
1013 gimple *stmt = walker.stmts_to_fixup.pop ();
1014 if (dump_file && dump_flags & TDF_DETAILS)
1016 fprintf (dump_file, "Fixing up noreturn call ");
1017 print_gimple_stmt (dump_file, stmt, 0);
1018 fprintf (dump_file, "\n");
1020 fixup_noreturn_call (stmt);
1023 statistics_counter_event (cfun, "Constants propagated",
1024 prop_stats.num_const_prop);
1025 statistics_counter_event (cfun, "Copies propagated",
1026 prop_stats.num_copy_prop);
1027 statistics_counter_event (cfun, "Statements folded",
1028 prop_stats.num_stmts_folded);
1030 return walker.something_changed;
1034 /* Return true if we may propagate ORIG into DEST, false otherwise.
1035 If DEST_NOT_PHI_ARG_P is true then assume the propagation does
1036 not happen into a PHI argument which relaxes some constraints. */
1038 bool
1039 may_propagate_copy (tree dest, tree orig, bool dest_not_phi_arg_p)
1041 tree type_d = TREE_TYPE (dest);
1042 tree type_o = TREE_TYPE (orig);
1044 /* If ORIG is a default definition which flows in from an abnormal edge
1045 then the copy can be propagated. It is important that we do so to avoid
1046 uninitialized copies. */
1047 if (TREE_CODE (orig) == SSA_NAME
1048 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1049 && SSA_NAME_IS_DEFAULT_DEF (orig)
1050 && (SSA_NAME_VAR (orig) == NULL_TREE
1051 || VAR_P (SSA_NAME_VAR (orig))))
1053 /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1054 be propagated. */
1055 else if (TREE_CODE (orig) == SSA_NAME
1056 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1057 return false;
1058 /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1059 propagated. If we know we do not propagate into a PHI argument this
1060 does not apply. */
1061 else if (!dest_not_phi_arg_p
1062 && TREE_CODE (dest) == SSA_NAME
1063 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1064 return false;
1066 /* Do not copy between types for which we *do* need a conversion. */
1067 if (!useless_type_conversion_p (type_d, type_o))
1068 return false;
1070 /* Generally propagating virtual operands is not ok as that may
1071 create overlapping life-ranges. */
1072 if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1073 return false;
1075 /* Anything else is OK. */
1076 return true;
1079 /* Like may_propagate_copy, but use as the destination expression
1080 the principal expression (typically, the RHS) contained in
1081 statement DEST. This is more efficient when working with the
1082 gimple tuples representation. */
1084 bool
1085 may_propagate_copy_into_stmt (gimple *dest, tree orig)
1087 tree type_d;
1088 tree type_o;
1090 /* If the statement is a switch or a single-rhs assignment,
1091 then the expression to be replaced by the propagation may
1092 be an SSA_NAME. Fortunately, there is an explicit tree
1093 for the expression, so we delegate to may_propagate_copy. */
1095 if (gimple_assign_single_p (dest))
1096 return may_propagate_copy (gimple_assign_rhs1 (dest), orig, true);
1097 else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1098 return may_propagate_copy (gimple_switch_index (dest_swtch), orig, true);
1100 /* In other cases, the expression is not materialized, so there
1101 is no destination to pass to may_propagate_copy. On the other
1102 hand, the expression cannot be an SSA_NAME, so the analysis
1103 is much simpler. */
1105 if (TREE_CODE (orig) == SSA_NAME
1106 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1107 return false;
1109 if (is_gimple_assign (dest))
1110 type_d = TREE_TYPE (gimple_assign_lhs (dest));
1111 else if (gimple_code (dest) == GIMPLE_COND)
1112 type_d = boolean_type_node;
1113 else if (is_gimple_call (dest)
1114 && gimple_call_lhs (dest) != NULL_TREE)
1115 type_d = TREE_TYPE (gimple_call_lhs (dest));
1116 else
1117 gcc_unreachable ();
1119 type_o = TREE_TYPE (orig);
1121 if (!useless_type_conversion_p (type_d, type_o))
1122 return false;
1124 return true;
1127 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1129 bool
1130 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1132 return true;
1136 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1138 Use this version when not const/copy propagating values. For example,
1139 PRE uses this version when building expressions as they would appear
1140 in specific blocks taking into account actions of PHI nodes.
1142 The statement in which an expression has been replaced should be
1143 folded using fold_stmt_inplace. */
1145 void
1146 replace_exp (use_operand_p op_p, tree val)
1148 if (TREE_CODE (val) == SSA_NAME || CONSTANT_CLASS_P (val))
1149 SET_USE (op_p, val);
1150 else
1151 SET_USE (op_p, unshare_expr (val));
1155 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1156 into the operand pointed to by OP_P.
1158 Use this version for const/copy propagation as it will perform additional
1159 checks to ensure validity of the const/copy propagation. */
1161 void
1162 propagate_value (use_operand_p op_p, tree val)
1164 if (flag_checking)
1165 gcc_assert (may_propagate_copy (USE_FROM_PTR (op_p), val,
1166 !is_a <gphi *> (USE_STMT (op_p))));
1167 replace_exp (op_p, val);
1171 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1172 into the tree pointed to by OP_P.
1174 Use this version for const/copy propagation when SSA operands are not
1175 available. It will perform the additional checks to ensure validity of
1176 the const/copy propagation, but will not update any operand information.
1177 Be sure to mark the stmt as modified. */
1179 void
1180 propagate_tree_value (tree *op_p, tree val)
1182 if (TREE_CODE (val) == SSA_NAME)
1183 *op_p = val;
1184 else
1185 *op_p = unshare_expr (val);
1189 /* Like propagate_tree_value, but use as the operand to replace
1190 the principal expression (typically, the RHS) contained in the
1191 statement referenced by iterator GSI. Note that it is not
1192 always possible to update the statement in-place, so a new
1193 statement may be created to replace the original. */
1195 void
1196 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1198 gimple *stmt = gsi_stmt (*gsi);
1200 if (is_gimple_assign (stmt))
1202 tree expr = NULL_TREE;
1203 if (gimple_assign_single_p (stmt))
1204 expr = gimple_assign_rhs1 (stmt);
1205 propagate_tree_value (&expr, val);
1206 gimple_assign_set_rhs_from_tree (gsi, expr);
1208 else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1210 tree lhs = NULL_TREE;
1211 tree rhs = build_zero_cst (TREE_TYPE (val));
1212 propagate_tree_value (&lhs, val);
1213 gimple_cond_set_code (cond_stmt, NE_EXPR);
1214 gimple_cond_set_lhs (cond_stmt, lhs);
1215 gimple_cond_set_rhs (cond_stmt, rhs);
1217 else if (is_gimple_call (stmt)
1218 && gimple_call_lhs (stmt) != NULL_TREE)
1220 tree expr = NULL_TREE;
1221 propagate_tree_value (&expr, val);
1222 replace_call_with_value (gsi, expr);
1224 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1225 propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1226 else
1227 gcc_unreachable ();
1230 /* Check exits of each loop in FUN, walk over loop closed PHIs in
1231 each exit basic block and propagate degenerate PHIs. */
1233 unsigned
1234 clean_up_loop_closed_phi (function *fun)
1236 gphi *phi;
1237 tree rhs;
1238 tree lhs;
1239 gphi_iterator gsi;
1241 /* Avoid possibly quadratic work when scanning for loop exits across
1242 all loops of a nest. */
1243 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1244 return 0;
1246 /* replace_uses_by might purge dead EH edges and we want it to also
1247 remove dominated blocks. */
1248 calculate_dominance_info (CDI_DOMINATORS);
1250 /* Walk over loop in function. */
1251 for (auto loop : loops_list (fun, 0))
1253 /* Check each exit edege of loop. */
1254 auto_vec<edge> exits = get_loop_exit_edges (loop);
1255 for (edge e : exits)
1256 if (single_pred_p (e->dest))
1257 /* Walk over loop-closed PHIs. */
1258 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);)
1260 phi = gsi.phi ();
1261 rhs = gimple_phi_arg_def (phi, 0);
1262 lhs = gimple_phi_result (phi);
1264 if (virtual_operand_p (rhs))
1266 imm_use_iterator iter;
1267 use_operand_p use_p;
1268 gimple *stmt;
1270 FOR_EACH_IMM_USE_STMT (stmt, iter, lhs)
1271 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1272 SET_USE (use_p, rhs);
1274 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1275 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
1276 remove_phi_node (&gsi, true);
1278 else if (may_propagate_copy (lhs, rhs))
1280 /* Dump details. */
1281 if (dump_file && (dump_flags & TDF_DETAILS))
1283 fprintf (dump_file, " Replacing '");
1284 print_generic_expr (dump_file, lhs, dump_flags);
1285 fprintf (dump_file, "' with '");
1286 print_generic_expr (dump_file, rhs, dump_flags);
1287 fprintf (dump_file, "'\n");
1290 replace_uses_by (lhs, rhs);
1291 remove_phi_node (&gsi, true);
1293 else
1294 gsi_next (&gsi);
1298 return 0;