1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "gimple-pretty-print.h"
30 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
36 #include "tree-ssa-propagate.h"
39 #include "tree-cfgcleanup.h"
42 /* This file implements a generic value propagation engine based on
43 the same propagation used by the SSA-CCP algorithm [1].
45 Propagation is performed by simulating the execution of every
46 statement that produces the value being propagated. Simulation
49 1- Initially, all edges of the CFG are marked not executable and
50 the CFG worklist is seeded with all the statements in the entry
51 basic block (block 0).
53 2- Every statement S is simulated with a call to the call-back
54 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
57 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
58 interest and does not affect any of the work lists.
59 The statement may be simulated again if any of its input
60 operands change in future iterations of the simulator.
62 SSA_PROP_VARYING: The value produced by S cannot be determined
63 at compile time. Further simulation of S is not required.
64 If S is a conditional jump, all the outgoing edges for the
65 block are considered executable and added to the work
68 SSA_PROP_INTERESTING: S produces a value that can be computed
69 at compile time. Its result can be propagated into the
70 statements that feed from S. Furthermore, if S is a
71 conditional jump, only the edge known to be taken is added
72 to the work list. Edges that are known not to execute are
75 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
76 return value from SSA_PROP_VISIT_PHI has the same semantics as
79 4- Three work lists are kept. Statements are only added to these
80 lists if they produce one of SSA_PROP_INTERESTING or
83 CFG_BLOCKS contains the list of blocks to be simulated.
84 Blocks are added to this list if their incoming edges are
87 SSA_EDGE_WORKLIST contains the list of statements that we
90 5- Simulation terminates when all three work lists are drained.
92 Before calling ssa_propagate, it is important to clear
93 prop_simulate_again_p for all the statements in the program that
94 should be simulated. This initialization allows an implementation
95 to specify which statements should never be simulated.
97 It is also important to compute def-use information before calling
102 [1] Constant propagation with conditional branches,
103 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
105 [2] Building an Optimizing Compiler,
106 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
108 [3] Advanced Compiler Design and Implementation,
109 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
111 /* Function pointers used to parameterize the propagation engine. */
112 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt
;
113 static ssa_prop_visit_phi_fn ssa_prop_visit_phi
;
115 /* Worklist of control flow edge destinations. This contains
116 the CFG order number of the blocks so we can iterate in CFG
117 order by visiting in bit-order. */
118 static bitmap cfg_blocks
;
119 static int *bb_to_cfg_order
;
120 static int *cfg_order_to_bb
;
122 /* Worklist of SSA edges which will need reexamination as their
123 definition has changed. SSA edges are def-use edges in the SSA
124 web. For each D-U edge, we store the target statement or PHI node
125 UID in a bitmap. UIDs order stmts in execution order. */
126 static bitmap ssa_edge_worklist
;
127 static vec
<gimple
*> uid_to_stmt
;
129 /* Return true if the block worklist empty. */
132 cfg_blocks_empty_p (void)
134 return bitmap_empty_p (cfg_blocks
);
138 /* Add a basic block to the worklist. The block must not be the ENTRY
142 cfg_blocks_add (basic_block bb
)
144 gcc_assert (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
145 && bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
146 bitmap_set_bit (cfg_blocks
, bb_to_cfg_order
[bb
->index
]);
150 /* Remove a block from the worklist. */
153 cfg_blocks_get (void)
155 gcc_assert (!cfg_blocks_empty_p ());
156 int order_index
= bitmap_first_set_bit (cfg_blocks
);
157 bitmap_clear_bit (cfg_blocks
, order_index
);
158 return BASIC_BLOCK_FOR_FN (cfun
, cfg_order_to_bb
[order_index
]);
162 /* We have just defined a new value for VAR. If IS_VARYING is true,
163 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
164 them to INTERESTING_SSA_EDGES. */
167 add_ssa_edge (tree var
)
169 imm_use_iterator iter
;
172 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
174 gimple
*use_stmt
= USE_STMT (use_p
);
176 /* If we did not yet simulate the block wait for this to happen
177 and do not add the stmt to the SSA edge worklist. */
178 if (! (gimple_bb (use_stmt
)->flags
& BB_VISITED
))
181 if (prop_simulate_again_p (use_stmt
)
182 && bitmap_set_bit (ssa_edge_worklist
, gimple_uid (use_stmt
)))
184 uid_to_stmt
[gimple_uid (use_stmt
)] = use_stmt
;
185 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
187 fprintf (dump_file
, "ssa_edge_worklist: adding SSA use in ");
188 print_gimple_stmt (dump_file
, use_stmt
, 0, TDF_SLIM
);
195 /* Add edge E to the control flow worklist. */
198 add_control_edge (edge e
)
200 basic_block bb
= e
->dest
;
201 if (bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
204 /* If the edge had already been executed, skip it. */
205 if (e
->flags
& EDGE_EXECUTABLE
)
208 e
->flags
|= EDGE_EXECUTABLE
;
212 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
213 fprintf (dump_file
, "Adding destination of edge (%d -> %d) to worklist\n",
214 e
->src
->index
, e
->dest
->index
);
218 /* Simulate the execution of STMT and update the work lists accordingly. */
221 simulate_stmt (gimple
*stmt
)
223 enum ssa_prop_result val
= SSA_PROP_NOT_INTERESTING
;
224 edge taken_edge
= NULL
;
225 tree output_name
= NULL_TREE
;
227 /* Pull the stmt off the SSA edge worklist. */
228 bitmap_clear_bit (ssa_edge_worklist
, gimple_uid (stmt
));
230 /* Don't bother visiting statements that are already
231 considered varying by the propagator. */
232 if (!prop_simulate_again_p (stmt
))
235 if (gimple_code (stmt
) == GIMPLE_PHI
)
237 val
= ssa_prop_visit_phi (as_a
<gphi
*> (stmt
));
238 output_name
= gimple_phi_result (stmt
);
241 val
= ssa_prop_visit_stmt (stmt
, &taken_edge
, &output_name
);
243 if (val
== SSA_PROP_VARYING
)
245 prop_set_simulate_again (stmt
, false);
247 /* If the statement produced a new varying value, add the SSA
248 edges coming out of OUTPUT_NAME. */
250 add_ssa_edge (output_name
);
252 /* If STMT transfers control out of its basic block, add
253 all outgoing edges to the work list. */
254 if (stmt_ends_bb_p (stmt
))
258 basic_block bb
= gimple_bb (stmt
);
259 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
260 add_control_edge (e
);
264 else if (val
== SSA_PROP_INTERESTING
)
266 /* If the statement produced new value, add the SSA edges coming
267 out of OUTPUT_NAME. */
269 add_ssa_edge (output_name
);
271 /* If we know which edge is going to be taken out of this block,
272 add it to the CFG work list. */
274 add_control_edge (taken_edge
);
277 /* If there are no SSA uses on the stmt whose defs are simulated
278 again then this stmt will be never visited again. */
279 bool has_simulate_again_uses
= false;
282 if (gimple_code (stmt
) == GIMPLE_PHI
)
287 FOR_EACH_EDGE (e
, ei
, gimple_bb (stmt
)->preds
)
288 if (!(e
->flags
& EDGE_EXECUTABLE
)
289 || ((arg
= PHI_ARG_DEF_FROM_EDGE (stmt
, e
))
290 && TREE_CODE (arg
) == SSA_NAME
291 && !SSA_NAME_IS_DEFAULT_DEF (arg
)
292 && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg
))))
294 has_simulate_again_uses
= true;
299 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
301 gimple
*def_stmt
= SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p
));
302 if (!gimple_nop_p (def_stmt
)
303 && prop_simulate_again_p (def_stmt
))
305 has_simulate_again_uses
= true;
309 if (!has_simulate_again_uses
)
311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
312 fprintf (dump_file
, "marking stmt to be not simulated again\n");
313 prop_set_simulate_again (stmt
, false);
317 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
318 drain. This pops statements off the given WORKLIST and processes
319 them until one statement was simulated or there are no more statements
320 on WORKLIST. We take a pointer to WORKLIST because it may be reallocated
321 when an SSA edge is added to it in simulate_stmt. Return true if a stmt
325 process_ssa_edge_worklist ()
327 /* Process the next entry from the worklist. */
328 unsigned stmt_uid
= bitmap_first_set_bit (ssa_edge_worklist
);
329 bitmap_clear_bit (ssa_edge_worklist
, stmt_uid
);
330 gimple
*stmt
= uid_to_stmt
[stmt_uid
];
332 /* We should not have stmts in not yet simulated BBs on the worklist. */
333 gcc_assert (gimple_bb (stmt
)->flags
& BB_VISITED
);
335 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
337 fprintf (dump_file
, "\nSimulating statement: ");
338 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
341 simulate_stmt (stmt
);
345 /* Simulate the execution of BLOCK. Evaluate the statement associated
346 with each variable reference inside the block. */
349 simulate_block (basic_block block
)
351 gimple_stmt_iterator gsi
;
353 /* There is nothing to do for the exit block. */
354 if (block
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
357 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
358 fprintf (dump_file
, "\nSimulating block %d\n", block
->index
);
360 /* Always simulate PHI nodes, even if we have simulated this block
362 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
363 simulate_stmt (gsi_stmt (gsi
));
365 /* If this is the first time we've simulated this block, then we
366 must simulate each of its statements. */
367 if (! (block
->flags
& BB_VISITED
))
369 gimple_stmt_iterator j
;
370 unsigned int normal_edge_count
;
374 for (j
= gsi_start_bb (block
); !gsi_end_p (j
); gsi_next (&j
))
375 simulate_stmt (gsi_stmt (j
));
377 /* Note that we have simulated this block. */
378 block
->flags
|= BB_VISITED
;
380 /* We can not predict when abnormal and EH edges will be executed, so
381 once a block is considered executable, we consider any
382 outgoing abnormal edges as executable.
384 TODO: This is not exactly true. Simplifying statement might
385 prove it non-throwing and also computed goto can be handled
386 when destination is known.
388 At the same time, if this block has only one successor that is
389 reached by non-abnormal edges, then add that successor to the
391 normal_edge_count
= 0;
393 FOR_EACH_EDGE (e
, ei
, block
->succs
)
395 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
396 add_control_edge (e
);
404 if (normal_edge_count
== 1)
405 add_control_edge (normal_edge
);
410 /* Initialize local data structures and work lists. */
419 /* Worklists of SSA edges. */
420 ssa_edge_worklist
= BITMAP_ALLOC (NULL
);
422 /* Worklist of basic-blocks. */
423 bb_to_cfg_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
) + 1);
424 cfg_order_to_bb
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
425 int n
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
426 cfg_order_to_bb
, false);
427 for (int i
= 0; i
< n
; ++i
)
428 bb_to_cfg_order
[cfg_order_to_bb
[i
]] = i
;
429 cfg_blocks
= BITMAP_ALLOC (NULL
);
431 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
432 dump_immediate_uses (dump_file
);
434 /* Initially assume that every edge in the CFG is not executable.
435 (including the edges coming out of the entry block). Mark blocks
436 as not visited, blocks not yet visited will have all their statements
437 simulated once an incoming edge gets executable. */
438 set_gimple_stmt_max_uid (cfun
, 0);
439 for (int i
= 0; i
< n
; ++i
)
441 gimple_stmt_iterator si
;
442 bb
= BASIC_BLOCK_FOR_FN (cfun
, cfg_order_to_bb
[i
]);
444 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
446 gimple
*stmt
= gsi_stmt (si
);
447 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
450 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
452 gimple
*stmt
= gsi_stmt (si
);
453 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
456 bb
->flags
&= ~BB_VISITED
;
457 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
458 e
->flags
&= ~EDGE_EXECUTABLE
;
460 uid_to_stmt
.safe_grow (gimple_stmt_max_uid (cfun
));
462 /* Seed the algorithm by adding the successors of the entry block to the
464 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
466 e
->flags
&= ~EDGE_EXECUTABLE
;
467 add_control_edge (e
);
472 /* Free allocated storage. */
477 BITMAP_FREE (cfg_blocks
);
478 free (bb_to_cfg_order
);
479 free (cfg_order_to_bb
);
480 BITMAP_FREE (ssa_edge_worklist
);
481 uid_to_stmt
.release ();
483 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
484 bb
->flags
&= ~BB_VISITED
;
488 /* Return true if EXPR is an acceptable right-hand-side for a
489 GIMPLE assignment. We validate the entire tree, not just
490 the root node, thus catching expressions that embed complex
491 operands that are not permitted in GIMPLE. This function
492 is needed because the folding routines in fold-const.c
493 may return such expressions in some cases, e.g., an array
494 access with an embedded index addition. It may make more
495 sense to have folding routines that are sensitive to the
496 constraints on GIMPLE operands, rather than abandoning any
497 any attempt to fold if the usual folding turns out to be too
501 valid_gimple_rhs_p (tree expr
)
503 enum tree_code code
= TREE_CODE (expr
);
505 switch (TREE_CODE_CLASS (code
))
507 case tcc_declaration
:
508 if (!is_gimple_variable (expr
))
513 /* All constants are ok. */
517 /* GENERIC allows comparisons with non-boolean types, reject
518 those for GIMPLE. Let vector-typed comparisons pass - rules
519 for GENERIC and GIMPLE are the same here. */
520 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
521 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
522 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
523 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
528 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
529 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
534 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
544 if (is_gimple_min_invariant (expr
))
546 t
= TREE_OPERAND (expr
, 0);
547 while (handled_component_p (t
))
549 /* ??? More checks needed, see the GIMPLE verifier. */
550 if ((TREE_CODE (t
) == ARRAY_REF
551 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
552 && !is_gimple_val (TREE_OPERAND (t
, 1)))
554 t
= TREE_OPERAND (t
, 0);
556 if (!is_gimple_id (t
))
562 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
564 if (((code
== VEC_COND_EXPR
|| code
== COND_EXPR
)
565 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
566 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
567 || !is_gimple_val (TREE_OPERAND (expr
, 1))
568 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
579 case tcc_exceptional
:
580 if (code
== CONSTRUCTOR
)
584 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
585 if (!is_gimple_val (elt
))
589 if (code
!= SSA_NAME
)
594 if (code
== BIT_FIELD_REF
)
595 return is_gimple_val (TREE_OPERAND (expr
, 0));
606 /* Return true if EXPR is a CALL_EXPR suitable for representation
607 as a single GIMPLE_CALL statement. If the arguments require
608 further gimplification, return false. */
611 valid_gimple_call_p (tree expr
)
615 if (TREE_CODE (expr
) != CALL_EXPR
)
618 nargs
= call_expr_nargs (expr
);
619 for (i
= 0; i
< nargs
; i
++)
621 tree arg
= CALL_EXPR_ARG (expr
, i
);
622 if (is_gimple_reg_type (TREE_TYPE (arg
)))
624 if (!is_gimple_val (arg
))
628 if (!is_gimple_lvalue (arg
))
636 /* Make SSA names defined by OLD_STMT point to NEW_STMT
637 as their defining statement. */
640 move_ssa_defining_stmt_for_defs (gimple
*new_stmt
, gimple
*old_stmt
)
645 if (gimple_in_ssa_p (cfun
))
647 /* Make defined SSA_NAMEs point to the new
648 statement as their definition. */
649 FOR_EACH_SSA_TREE_OPERAND (var
, old_stmt
, iter
, SSA_OP_ALL_DEFS
)
651 if (TREE_CODE (var
) == SSA_NAME
)
652 SSA_NAME_DEF_STMT (var
) = new_stmt
;
657 /* Helper function for update_gimple_call and update_call_from_tree.
658 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
661 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
664 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
665 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
666 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
667 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
668 gimple_set_location (new_stmt
, gimple_location (stmt
));
669 if (gimple_block (new_stmt
) == NULL_TREE
)
670 gimple_set_block (new_stmt
, gimple_block (stmt
));
671 gsi_replace (si_p
, new_stmt
, false);
674 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
675 with number of arguments NARGS, where the arguments in GIMPLE form
676 follow NARGS argument. */
679 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
682 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
684 gcc_assert (is_gimple_call (stmt
));
685 va_start (ap
, nargs
);
686 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
687 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
692 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
693 value of EXPR, which is expected to be the result of folding the
694 call. This can only be done if EXPR is a CALL_EXPR with valid
695 GIMPLE operands as arguments, or if it is a suitable RHS expression
696 for a GIMPLE_ASSIGN. More complex expressions will require
697 gimplification, which will introduce additional statements. In this
698 event, no update is performed, and the function returns false.
699 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
700 replace the statement at *SI_P with an entirely new statement.
701 The new statement need not be a call, e.g., if the original call
702 folded to a constant. */
705 update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
707 gimple
*stmt
= gsi_stmt (*si_p
);
709 if (valid_gimple_call_p (expr
))
711 /* The call has simplified to another call. */
712 tree fn
= CALL_EXPR_FN (expr
);
714 unsigned nargs
= call_expr_nargs (expr
);
715 vec
<tree
> args
= vNULL
;
721 args
.safe_grow_cleared (nargs
);
723 for (i
= 0; i
< nargs
; i
++)
724 args
[i
] = CALL_EXPR_ARG (expr
, i
);
727 new_stmt
= gimple_build_call_vec (fn
, args
);
728 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
733 else if (valid_gimple_rhs_p (expr
))
735 tree lhs
= gimple_call_lhs (stmt
);
738 /* The call has simplified to an expression
739 that cannot be represented as a GIMPLE_CALL. */
742 /* A value is expected.
743 Introduce a new GIMPLE_ASSIGN statement. */
744 STRIP_USELESS_TYPE_CONVERSION (expr
);
745 new_stmt
= gimple_build_assign (lhs
, expr
);
746 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
747 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
748 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
750 else if (!TREE_SIDE_EFFECTS (expr
))
752 /* No value is expected, and EXPR has no effect.
753 Replace it with an empty statement. */
754 new_stmt
= gimple_build_nop ();
755 if (gimple_in_ssa_p (cfun
))
757 unlink_stmt_vdef (stmt
);
763 /* No value is expected, but EXPR has an effect,
764 e.g., it could be a reference to a volatile
765 variable. Create an assignment statement
766 with a dummy (unused) lhs variable. */
767 STRIP_USELESS_TYPE_CONVERSION (expr
);
768 if (gimple_in_ssa_p (cfun
))
769 lhs
= make_ssa_name (TREE_TYPE (expr
));
771 lhs
= create_tmp_var (TREE_TYPE (expr
));
772 new_stmt
= gimple_build_assign (lhs
, expr
);
773 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
774 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
775 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
777 gimple_set_location (new_stmt
, gimple_location (stmt
));
778 gsi_replace (si_p
, new_stmt
, false);
782 /* The call simplified to an expression that is
783 not a valid GIMPLE RHS. */
788 /* Entry point to the propagation engine.
790 VISIT_STMT is called for every statement visited.
791 VISIT_PHI is called for every PHI node visited. */
794 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt
,
795 ssa_prop_visit_phi_fn visit_phi
)
797 ssa_prop_visit_stmt
= visit_stmt
;
798 ssa_prop_visit_phi
= visit_phi
;
802 /* Iterate until the worklists are empty. */
803 while (! cfg_blocks_empty_p ()
804 || ! bitmap_empty_p (ssa_edge_worklist
))
806 /* First simulate whole blocks. */
807 if (! cfg_blocks_empty_p ())
809 /* Pull the next block to simulate off the worklist. */
810 basic_block dest_block
= cfg_blocks_get ();
811 simulate_block (dest_block
);
815 /* Then simulate from the SSA edge worklist. */
816 process_ssa_edge_worklist ();
823 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
824 is a non-volatile pointer dereference, a structure reference or a
825 reference to a single _DECL. Ignore volatile memory references
826 because they are not interesting for the optimizers. */
829 stmt_makes_single_store (gimple
*stmt
)
833 if (gimple_code (stmt
) != GIMPLE_ASSIGN
834 && gimple_code (stmt
) != GIMPLE_CALL
)
837 if (!gimple_vdef (stmt
))
840 lhs
= gimple_get_lhs (stmt
);
842 /* A call statement may have a null LHS. */
846 return (!TREE_THIS_VOLATILE (lhs
)
848 || REFERENCE_CLASS_P (lhs
)));
852 /* Propagation statistics. */
857 long num_stmts_folded
;
861 static struct prop_stats_d prop_stats
;
863 /* Replace USE references in statement STMT with the values stored in
864 PROP_VALUE. Return true if at least one reference was replaced. */
867 replace_uses_in (gimple
*stmt
, ssa_prop_get_value_fn get_value
)
869 bool replaced
= false;
873 FOR_EACH_SSA_USE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
875 tree tuse
= USE_FROM_PTR (use
);
876 tree val
= (*get_value
) (tuse
);
878 if (val
== tuse
|| val
== NULL_TREE
)
881 if (gimple_code (stmt
) == GIMPLE_ASM
882 && !may_propagate_copy_into_asm (tuse
))
885 if (!may_propagate_copy (tuse
, val
))
888 if (TREE_CODE (val
) != SSA_NAME
)
889 prop_stats
.num_const_prop
++;
891 prop_stats
.num_copy_prop
++;
893 propagate_value (use
, val
);
902 /* Replace propagated values into all the arguments for PHI using the
903 values from PROP_VALUE. */
906 replace_phi_args_in (gphi
*phi
, ssa_prop_get_value_fn get_value
)
909 bool replaced
= false;
911 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
913 fprintf (dump_file
, "Folding PHI node: ");
914 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
917 basic_block bb
= gimple_bb (phi
);
918 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
920 tree arg
= gimple_phi_arg_def (phi
, i
);
922 if (TREE_CODE (arg
) == SSA_NAME
)
924 tree val
= (*get_value
) (arg
);
926 if (val
&& val
!= arg
&& may_propagate_copy (arg
, val
))
928 edge e
= gimple_phi_arg_edge (phi
, i
);
930 /* Avoid propagating constants into loop latch edge
931 PHI arguments as this makes coalescing the copy
932 across this edge impossible. If the argument is
933 defined by an assert - otherwise the stmt will
934 get removed without replacing its uses. */
935 if (TREE_CODE (val
) != SSA_NAME
936 && bb
->loop_father
->header
== bb
937 && dominated_by_p (CDI_DOMINATORS
, e
->src
, bb
)
938 && is_gimple_assign (SSA_NAME_DEF_STMT (arg
))
939 && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (arg
))
943 if (TREE_CODE (val
) != SSA_NAME
)
944 prop_stats
.num_const_prop
++;
946 prop_stats
.num_copy_prop
++;
948 propagate_value (PHI_ARG_DEF_PTR (phi
, i
), val
);
951 /* If we propagated a copy and this argument flows
952 through an abnormal edge, update the replacement
954 if (TREE_CODE (val
) == SSA_NAME
955 && e
->flags
& EDGE_ABNORMAL
956 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
958 /* This can only occur for virtual operands, since
959 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
960 would prevent replacement. */
961 gcc_checking_assert (virtual_operand_p (val
));
962 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
968 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
971 fprintf (dump_file
, "No folding possible\n");
974 fprintf (dump_file
, "Folded into: ");
975 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
976 fprintf (dump_file
, "\n");
984 class substitute_and_fold_dom_walker
: public dom_walker
987 substitute_and_fold_dom_walker (cdi_direction direction
,
988 ssa_prop_get_value_fn get_value_fn_
,
989 ssa_prop_fold_stmt_fn fold_fn_
,
991 : dom_walker (direction
), get_value_fn (get_value_fn_
),
992 fold_fn (fold_fn_
), do_dce (do_dce_
), something_changed (false)
994 stmts_to_remove
.create (0);
995 stmts_to_fixup
.create (0);
996 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
998 ~substitute_and_fold_dom_walker ()
1000 stmts_to_remove
.release ();
1001 stmts_to_fixup
.release ();
1002 BITMAP_FREE (need_eh_cleanup
);
1005 virtual edge
before_dom_children (basic_block
);
1006 virtual void after_dom_children (basic_block
) {}
1008 ssa_prop_get_value_fn get_value_fn
;
1009 ssa_prop_fold_stmt_fn fold_fn
;
1011 bool something_changed
;
1012 vec
<gimple
*> stmts_to_remove
;
1013 vec
<gimple
*> stmts_to_fixup
;
1014 bitmap need_eh_cleanup
;
1018 substitute_and_fold_dom_walker::before_dom_children (basic_block bb
)
1020 /* Propagate known values into PHI nodes. */
1021 for (gphi_iterator i
= gsi_start_phis (bb
);
1025 gphi
*phi
= i
.phi ();
1026 tree res
= gimple_phi_result (phi
);
1027 if (virtual_operand_p (res
))
1030 && res
&& TREE_CODE (res
) == SSA_NAME
)
1032 tree sprime
= get_value_fn (res
);
1035 && may_propagate_copy (res
, sprime
))
1037 stmts_to_remove
.safe_push (phi
);
1041 something_changed
|= replace_phi_args_in (phi
, get_value_fn
);
1044 /* Propagate known values into stmts. In some case it exposes
1045 more trivially deletable stmts to walk backward. */
1046 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
1051 gimple
*stmt
= gsi_stmt (i
);
1052 enum gimple_code code
= gimple_code (stmt
);
1054 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1055 range information for names and they are discarded
1058 if (code
== GIMPLE_ASSIGN
1059 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ASSERT_EXPR
)
1062 /* No point propagating into a stmt we have a value for we
1063 can propagate into all uses. Mark it for removal instead. */
1064 tree lhs
= gimple_get_lhs (stmt
);
1066 && lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1068 tree sprime
= get_value_fn (lhs
);
1071 && may_propagate_copy (lhs
, sprime
)
1072 && !stmt_could_throw_p (stmt
)
1073 && !gimple_has_side_effects (stmt
))
1075 stmts_to_remove
.safe_push (stmt
);
1080 /* Replace the statement with its folded version and mark it
1082 did_replace
= false;
1083 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1085 fprintf (dump_file
, "Folding statement: ");
1086 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1089 gimple
*old_stmt
= stmt
;
1090 bool was_noreturn
= (is_gimple_call (stmt
)
1091 && gimple_call_noreturn_p (stmt
));
1093 /* Some statements may be simplified using propagator
1094 specific information. Do this before propagating
1095 into the stmt to not disturb pass specific information. */
1100 prop_stats
.num_stmts_folded
++;
1101 stmt
= gsi_stmt (i
);
1105 /* Replace real uses in the statement. */
1106 did_replace
|= replace_uses_in (stmt
, get_value_fn
);
1108 /* If we made a replacement, fold the statement. */
1111 fold_stmt (&i
, follow_single_use_edges
);
1112 stmt
= gsi_stmt (i
);
1115 /* If this is a control statement the propagator left edges
1116 unexecuted on force the condition in a way consistent with
1117 that. See PR66945 for cases where the propagator can end
1118 up with a different idea of a taken edge than folding
1119 (once undefined behavior is involved). */
1120 if (gimple_code (stmt
) == GIMPLE_COND
)
1122 if ((EDGE_SUCC (bb
, 0)->flags
& EDGE_EXECUTABLE
)
1123 ^ (EDGE_SUCC (bb
, 1)->flags
& EDGE_EXECUTABLE
))
1125 if (((EDGE_SUCC (bb
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
1126 == ((EDGE_SUCC (bb
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
1127 gimple_cond_make_true (as_a
<gcond
*> (stmt
));
1129 gimple_cond_make_false (as_a
<gcond
*> (stmt
));
1137 /* If we cleaned up EH information from the statement,
1139 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
1140 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1142 /* If we turned a not noreturn call into a noreturn one
1143 schedule it for fixup. */
1145 && is_gimple_call (stmt
)
1146 && gimple_call_noreturn_p (stmt
))
1147 stmts_to_fixup
.safe_push (stmt
);
1149 if (gimple_assign_single_p (stmt
))
1151 tree rhs
= gimple_assign_rhs1 (stmt
);
1153 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1154 recompute_tree_invariant_for_addr_expr (rhs
);
1157 /* Determine what needs to be done to update the SSA form. */
1159 if (!is_gimple_debug (stmt
))
1160 something_changed
= true;
1163 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1167 fprintf (dump_file
, "Folded into: ");
1168 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1169 fprintf (dump_file
, "\n");
1172 fprintf (dump_file
, "Not folded\n");
1180 /* Perform final substitution and folding of propagated values.
1182 PROP_VALUE[I] contains the single value that should be substituted
1183 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1186 If FOLD_FN is non-NULL the function will be invoked on all statements
1187 before propagating values for pass specific simplification.
1189 DO_DCE is true if trivially dead stmts can be removed.
1191 If DO_DCE is true, the statements within a BB are walked from
1192 last to first element. Otherwise we scan from first to last element.
1194 Return TRUE when something changed. */
1197 substitute_and_fold (ssa_prop_get_value_fn get_value_fn
,
1198 ssa_prop_fold_stmt_fn fold_fn
,
1201 gcc_assert (get_value_fn
);
1203 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1204 fprintf (dump_file
, "\nSubstituting values and folding statements\n\n");
1206 memset (&prop_stats
, 0, sizeof (prop_stats
));
1208 calculate_dominance_info (CDI_DOMINATORS
);
1209 substitute_and_fold_dom_walker
walker(CDI_DOMINATORS
,
1210 get_value_fn
, fold_fn
, do_dce
);
1211 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1213 /* We cannot remove stmts during the BB walk, especially not release
1214 SSA names there as that destroys the lattice of our callers.
1215 Remove stmts in reverse order to make debug stmt creation possible. */
1216 while (!walker
.stmts_to_remove
.is_empty ())
1218 gimple
*stmt
= walker
.stmts_to_remove
.pop ();
1219 if (dump_file
&& dump_flags
& TDF_DETAILS
)
1221 fprintf (dump_file
, "Removing dead stmt ");
1222 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1223 fprintf (dump_file
, "\n");
1225 prop_stats
.num_dce
++;
1226 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1227 if (gimple_code (stmt
) == GIMPLE_PHI
)
1228 remove_phi_node (&gsi
, true);
1231 unlink_stmt_vdef (stmt
);
1232 gsi_remove (&gsi
, true);
1233 release_defs (stmt
);
1237 if (!bitmap_empty_p (walker
.need_eh_cleanup
))
1238 gimple_purge_all_dead_eh_edges (walker
.need_eh_cleanup
);
1240 /* Fixup stmts that became noreturn calls. This may require splitting
1241 blocks and thus isn't possible during the dominator walk. Do this
1242 in reverse order so we don't inadvertedly remove a stmt we want to
1243 fixup by visiting a dominating now noreturn call first. */
1244 while (!walker
.stmts_to_fixup
.is_empty ())
1246 gimple
*stmt
= walker
.stmts_to_fixup
.pop ();
1247 if (dump_file
&& dump_flags
& TDF_DETAILS
)
1249 fprintf (dump_file
, "Fixing up noreturn call ");
1250 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1251 fprintf (dump_file
, "\n");
1253 fixup_noreturn_call (stmt
);
1256 statistics_counter_event (cfun
, "Constants propagated",
1257 prop_stats
.num_const_prop
);
1258 statistics_counter_event (cfun
, "Copies propagated",
1259 prop_stats
.num_copy_prop
);
1260 statistics_counter_event (cfun
, "Statements folded",
1261 prop_stats
.num_stmts_folded
);
1262 statistics_counter_event (cfun
, "Statements deleted",
1263 prop_stats
.num_dce
);
1265 return walker
.something_changed
;
1269 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1272 may_propagate_copy (tree dest
, tree orig
)
1274 tree type_d
= TREE_TYPE (dest
);
1275 tree type_o
= TREE_TYPE (orig
);
1277 /* If ORIG is a default definition which flows in from an abnormal edge
1278 then the copy can be propagated. It is important that we do so to avoid
1279 uninitialized copies. */
1280 if (TREE_CODE (orig
) == SSA_NAME
1281 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
)
1282 && SSA_NAME_IS_DEFAULT_DEF (orig
)
1283 && (SSA_NAME_VAR (orig
) == NULL_TREE
1284 || TREE_CODE (SSA_NAME_VAR (orig
)) == VAR_DECL
))
1286 /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1288 else if (TREE_CODE (orig
) == SSA_NAME
1289 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
))
1291 /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1293 else if (TREE_CODE (dest
) == SSA_NAME
1294 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest
))
1297 /* Do not copy between types for which we *do* need a conversion. */
1298 if (!useless_type_conversion_p (type_d
, type_o
))
1301 /* Generally propagating virtual operands is not ok as that may
1302 create overlapping life-ranges. */
1303 if (TREE_CODE (dest
) == SSA_NAME
&& virtual_operand_p (dest
))
1306 /* Anything else is OK. */
1310 /* Like may_propagate_copy, but use as the destination expression
1311 the principal expression (typically, the RHS) contained in
1312 statement DEST. This is more efficient when working with the
1313 gimple tuples representation. */
1316 may_propagate_copy_into_stmt (gimple
*dest
, tree orig
)
1321 /* If the statement is a switch or a single-rhs assignment,
1322 then the expression to be replaced by the propagation may
1323 be an SSA_NAME. Fortunately, there is an explicit tree
1324 for the expression, so we delegate to may_propagate_copy. */
1326 if (gimple_assign_single_p (dest
))
1327 return may_propagate_copy (gimple_assign_rhs1 (dest
), orig
);
1328 else if (gswitch
*dest_swtch
= dyn_cast
<gswitch
*> (dest
))
1329 return may_propagate_copy (gimple_switch_index (dest_swtch
), orig
);
1331 /* In other cases, the expression is not materialized, so there
1332 is no destination to pass to may_propagate_copy. On the other
1333 hand, the expression cannot be an SSA_NAME, so the analysis
1336 if (TREE_CODE (orig
) == SSA_NAME
1337 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
))
1340 if (is_gimple_assign (dest
))
1341 type_d
= TREE_TYPE (gimple_assign_lhs (dest
));
1342 else if (gimple_code (dest
) == GIMPLE_COND
)
1343 type_d
= boolean_type_node
;
1344 else if (is_gimple_call (dest
)
1345 && gimple_call_lhs (dest
) != NULL_TREE
)
1346 type_d
= TREE_TYPE (gimple_call_lhs (dest
));
1350 type_o
= TREE_TYPE (orig
);
1352 if (!useless_type_conversion_p (type_d
, type_o
))
1358 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1361 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED
)
1367 /* Common code for propagate_value and replace_exp.
1369 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1370 replacement is done to propagate a value or not. */
1373 replace_exp_1 (use_operand_p op_p
, tree val
,
1374 bool for_propagation ATTRIBUTE_UNUSED
)
1378 tree op
= USE_FROM_PTR (op_p
);
1379 gcc_assert (!(for_propagation
1380 && TREE_CODE (op
) == SSA_NAME
1381 && TREE_CODE (val
) == SSA_NAME
1382 && !may_propagate_copy (op
, val
)));
1385 if (TREE_CODE (val
) == SSA_NAME
)
1386 SET_USE (op_p
, val
);
1388 SET_USE (op_p
, unshare_expr (val
));
1392 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1393 into the operand pointed to by OP_P.
1395 Use this version for const/copy propagation as it will perform additional
1396 checks to ensure validity of the const/copy propagation. */
1399 propagate_value (use_operand_p op_p
, tree val
)
1401 replace_exp_1 (op_p
, val
, true);
1404 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1406 Use this version when not const/copy propagating values. For example,
1407 PRE uses this version when building expressions as they would appear
1408 in specific blocks taking into account actions of PHI nodes.
1410 The statement in which an expression has been replaced should be
1411 folded using fold_stmt_inplace. */
1414 replace_exp (use_operand_p op_p
, tree val
)
1416 replace_exp_1 (op_p
, val
, false);
1420 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1421 into the tree pointed to by OP_P.
1423 Use this version for const/copy propagation when SSA operands are not
1424 available. It will perform the additional checks to ensure validity of
1425 the const/copy propagation, but will not update any operand information.
1426 Be sure to mark the stmt as modified. */
1429 propagate_tree_value (tree
*op_p
, tree val
)
1431 if (TREE_CODE (val
) == SSA_NAME
)
1434 *op_p
= unshare_expr (val
);
1438 /* Like propagate_tree_value, but use as the operand to replace
1439 the principal expression (typically, the RHS) contained in the
1440 statement referenced by iterator GSI. Note that it is not
1441 always possible to update the statement in-place, so a new
1442 statement may be created to replace the original. */
1445 propagate_tree_value_into_stmt (gimple_stmt_iterator
*gsi
, tree val
)
1447 gimple
*stmt
= gsi_stmt (*gsi
);
1449 if (is_gimple_assign (stmt
))
1451 tree expr
= NULL_TREE
;
1452 if (gimple_assign_single_p (stmt
))
1453 expr
= gimple_assign_rhs1 (stmt
);
1454 propagate_tree_value (&expr
, val
);
1455 gimple_assign_set_rhs_from_tree (gsi
, expr
);
1457 else if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
1459 tree lhs
= NULL_TREE
;
1460 tree rhs
= build_zero_cst (TREE_TYPE (val
));
1461 propagate_tree_value (&lhs
, val
);
1462 gimple_cond_set_code (cond_stmt
, NE_EXPR
);
1463 gimple_cond_set_lhs (cond_stmt
, lhs
);
1464 gimple_cond_set_rhs (cond_stmt
, rhs
);
1466 else if (is_gimple_call (stmt
)
1467 && gimple_call_lhs (stmt
) != NULL_TREE
)
1469 tree expr
= NULL_TREE
;
1471 propagate_tree_value (&expr
, val
);
1472 res
= update_call_from_tree (gsi
, expr
);
1475 else if (gswitch
*swtch_stmt
= dyn_cast
<gswitch
*> (stmt
))
1476 propagate_tree_value (gimple_switch_index_ptr (swtch_stmt
), val
);