1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "gimple-pretty-print.h"
30 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
36 #include "tree-ssa-propagate.h"
39 #include "tree-cfgcleanup.h"
42 /* This file implements a generic value propagation engine based on
43 the same propagation used by the SSA-CCP algorithm [1].
45 Propagation is performed by simulating the execution of every
46 statement that produces the value being propagated. Simulation
49 1- Initially, all edges of the CFG are marked not executable and
50 the CFG worklist is seeded with all the statements in the entry
51 basic block (block 0).
53 2- Every statement S is simulated with a call to the call-back
54 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
57 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
58 interest and does not affect any of the work lists.
59 The statement may be simulated again if any of its input
60 operands change in future iterations of the simulator.
62 SSA_PROP_VARYING: The value produced by S cannot be determined
63 at compile time. Further simulation of S is not required.
64 If S is a conditional jump, all the outgoing edges for the
65 block are considered executable and added to the work
68 SSA_PROP_INTERESTING: S produces a value that can be computed
69 at compile time. Its result can be propagated into the
70 statements that feed from S. Furthermore, if S is a
71 conditional jump, only the edge known to be taken is added
72 to the work list. Edges that are known not to execute are
75 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
76 return value from SSA_PROP_VISIT_PHI has the same semantics as
79 4- Three work lists are kept. Statements are only added to these
80 lists if they produce one of SSA_PROP_INTERESTING or
83 CFG_BLOCKS contains the list of blocks to be simulated.
84 Blocks are added to this list if their incoming edges are
87 SSA_EDGE_WORKLIST contains the list of statements that we
90 5- Simulation terminates when all three work lists are drained.
92 Before calling ssa_propagate, it is important to clear
93 prop_simulate_again_p for all the statements in the program that
94 should be simulated. This initialization allows an implementation
95 to specify which statements should never be simulated.
97 It is also important to compute def-use information before calling
102 [1] Constant propagation with conditional branches,
103 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
105 [2] Building an Optimizing Compiler,
106 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
108 [3] Advanced Compiler Design and Implementation,
109 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
111 /* Function pointers used to parameterize the propagation engine. */
112 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt
;
113 static ssa_prop_visit_phi_fn ssa_prop_visit_phi
;
115 /* Worklist of control flow edge destinations. This contains
116 the CFG order number of the blocks so we can iterate in CFG
117 order by visiting in bit-order. */
118 static bitmap cfg_blocks
;
119 static int *bb_to_cfg_order
;
120 static int *cfg_order_to_bb
;
122 /* Worklist of SSA edges which will need reexamination as their
123 definition has changed. SSA edges are def-use edges in the SSA
124 web. For each D-U edge, we store the target statement or PHI node
125 UID in a bitmap. UIDs order stmts in execution order. */
126 static bitmap ssa_edge_worklist
;
127 static vec
<gimple
*> uid_to_stmt
;
129 /* Return true if the block worklist empty. */
132 cfg_blocks_empty_p (void)
134 return bitmap_empty_p (cfg_blocks
);
138 /* Add a basic block to the worklist. The block must not be the ENTRY
142 cfg_blocks_add (basic_block bb
)
144 gcc_assert (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
145 && bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
146 bitmap_set_bit (cfg_blocks
, bb_to_cfg_order
[bb
->index
]);
150 /* Remove a block from the worklist. */
153 cfg_blocks_get (void)
155 gcc_assert (!cfg_blocks_empty_p ());
156 int order_index
= bitmap_first_set_bit (cfg_blocks
);
157 bitmap_clear_bit (cfg_blocks
, order_index
);
158 return BASIC_BLOCK_FOR_FN (cfun
, cfg_order_to_bb
[order_index
]);
162 /* We have just defined a new value for VAR. If IS_VARYING is true,
163 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
164 them to INTERESTING_SSA_EDGES. */
167 add_ssa_edge (tree var
)
169 imm_use_iterator iter
;
172 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
174 gimple
*use_stmt
= USE_STMT (use_p
);
176 /* If we did not yet simulate the block wait for this to happen
177 and do not add the stmt to the SSA edge worklist. */
178 if (! (gimple_bb (use_stmt
)->flags
& BB_VISITED
))
181 if (prop_simulate_again_p (use_stmt
)
182 && bitmap_set_bit (ssa_edge_worklist
, gimple_uid (use_stmt
)))
184 uid_to_stmt
[gimple_uid (use_stmt
)] = use_stmt
;
185 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
187 fprintf (dump_file
, "ssa_edge_worklist: adding SSA use in ");
188 print_gimple_stmt (dump_file
, use_stmt
, 0, TDF_SLIM
);
195 /* Add edge E to the control flow worklist. */
198 add_control_edge (edge e
)
200 basic_block bb
= e
->dest
;
201 if (bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
204 /* If the edge had already been executed, skip it. */
205 if (e
->flags
& EDGE_EXECUTABLE
)
208 e
->flags
|= EDGE_EXECUTABLE
;
212 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
213 fprintf (dump_file
, "Adding destination of edge (%d -> %d) to worklist\n",
214 e
->src
->index
, e
->dest
->index
);
218 /* Simulate the execution of STMT and update the work lists accordingly. */
221 simulate_stmt (gimple
*stmt
)
223 enum ssa_prop_result val
= SSA_PROP_NOT_INTERESTING
;
224 edge taken_edge
= NULL
;
225 tree output_name
= NULL_TREE
;
227 /* Pull the stmt off the SSA edge worklist. */
228 bitmap_clear_bit (ssa_edge_worklist
, gimple_uid (stmt
));
230 /* Don't bother visiting statements that are already
231 considered varying by the propagator. */
232 if (!prop_simulate_again_p (stmt
))
235 if (gimple_code (stmt
) == GIMPLE_PHI
)
237 val
= ssa_prop_visit_phi (as_a
<gphi
*> (stmt
));
238 output_name
= gimple_phi_result (stmt
);
241 val
= ssa_prop_visit_stmt (stmt
, &taken_edge
, &output_name
);
243 if (val
== SSA_PROP_VARYING
)
245 prop_set_simulate_again (stmt
, false);
247 /* If the statement produced a new varying value, add the SSA
248 edges coming out of OUTPUT_NAME. */
250 add_ssa_edge (output_name
);
252 /* If STMT transfers control out of its basic block, add
253 all outgoing edges to the work list. */
254 if (stmt_ends_bb_p (stmt
))
258 basic_block bb
= gimple_bb (stmt
);
259 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
260 add_control_edge (e
);
264 else if (val
== SSA_PROP_INTERESTING
)
266 /* If the statement produced new value, add the SSA edges coming
267 out of OUTPUT_NAME. */
269 add_ssa_edge (output_name
);
271 /* If we know which edge is going to be taken out of this block,
272 add it to the CFG work list. */
274 add_control_edge (taken_edge
);
277 /* If there are no SSA uses on the stmt whose defs are simulated
278 again then this stmt will be never visited again. */
279 bool has_simulate_again_uses
= false;
282 if (gimple_code (stmt
) == GIMPLE_PHI
)
287 FOR_EACH_EDGE (e
, ei
, gimple_bb (stmt
)->preds
)
288 if (!(e
->flags
& EDGE_EXECUTABLE
)
289 || ((arg
= PHI_ARG_DEF_FROM_EDGE (stmt
, e
))
290 && TREE_CODE (arg
) == SSA_NAME
291 && !SSA_NAME_IS_DEFAULT_DEF (arg
)
292 && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg
))))
294 has_simulate_again_uses
= true;
299 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
301 gimple
*def_stmt
= SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p
));
302 if (!gimple_nop_p (def_stmt
)
303 && prop_simulate_again_p (def_stmt
))
305 has_simulate_again_uses
= true;
309 if (!has_simulate_again_uses
)
311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
312 fprintf (dump_file
, "marking stmt to be not simulated again\n");
313 prop_set_simulate_again (stmt
, false);
317 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
318 drain. This pops statements off the given WORKLIST and processes
319 them until one statement was simulated or there are no more statements
320 on WORKLIST. We take a pointer to WORKLIST because it may be reallocated
321 when an SSA edge is added to it in simulate_stmt. Return true if a stmt
325 process_ssa_edge_worklist ()
327 /* Process the next entry from the worklist. */
328 unsigned stmt_uid
= bitmap_first_set_bit (ssa_edge_worklist
);
329 bitmap_clear_bit (ssa_edge_worklist
, stmt_uid
);
330 gimple
*stmt
= uid_to_stmt
[stmt_uid
];
332 /* We should not have stmts in not yet simulated BBs on the worklist. */
333 gcc_assert (gimple_bb (stmt
)->flags
& BB_VISITED
);
335 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
337 fprintf (dump_file
, "\nSimulating statement: ");
338 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
341 simulate_stmt (stmt
);
345 /* Simulate the execution of BLOCK. Evaluate the statement associated
346 with each variable reference inside the block. */
349 simulate_block (basic_block block
)
351 gimple_stmt_iterator gsi
;
353 /* There is nothing to do for the exit block. */
354 if (block
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
357 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
358 fprintf (dump_file
, "\nSimulating block %d\n", block
->index
);
360 /* Always simulate PHI nodes, even if we have simulated this block
362 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
363 simulate_stmt (gsi_stmt (gsi
));
365 /* If this is the first time we've simulated this block, then we
366 must simulate each of its statements. */
367 if (! (block
->flags
& BB_VISITED
))
369 gimple_stmt_iterator j
;
370 unsigned int normal_edge_count
;
374 for (j
= gsi_start_bb (block
); !gsi_end_p (j
); gsi_next (&j
))
375 simulate_stmt (gsi_stmt (j
));
377 /* Note that we have simulated this block. */
378 block
->flags
|= BB_VISITED
;
380 /* We can not predict when abnormal and EH edges will be executed, so
381 once a block is considered executable, we consider any
382 outgoing abnormal edges as executable.
384 TODO: This is not exactly true. Simplifying statement might
385 prove it non-throwing and also computed goto can be handled
386 when destination is known.
388 At the same time, if this block has only one successor that is
389 reached by non-abnormal edges, then add that successor to the
391 normal_edge_count
= 0;
393 FOR_EACH_EDGE (e
, ei
, block
->succs
)
395 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
396 add_control_edge (e
);
404 if (normal_edge_count
== 1)
405 add_control_edge (normal_edge
);
410 /* Initialize local data structures and work lists. */
419 /* Worklists of SSA edges. */
420 ssa_edge_worklist
= BITMAP_ALLOC (NULL
);
422 /* Worklist of basic-blocks. */
423 bb_to_cfg_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
) + 1);
424 cfg_order_to_bb
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
));
425 int n
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
426 cfg_order_to_bb
, false);
427 for (int i
= 0; i
< n
; ++i
)
428 bb_to_cfg_order
[cfg_order_to_bb
[i
]] = i
;
429 cfg_blocks
= BITMAP_ALLOC (NULL
);
431 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
432 dump_immediate_uses (dump_file
);
434 /* Initially assume that every edge in the CFG is not executable.
435 (including the edges coming out of the entry block). Mark blocks
436 as not visited, blocks not yet visited will have all their statements
437 simulated once an incoming edge gets executable. */
438 set_gimple_stmt_max_uid (cfun
, 0);
439 for (int i
= 0; i
< n
; ++i
)
441 gimple_stmt_iterator si
;
442 bb
= BASIC_BLOCK_FOR_FN (cfun
, cfg_order_to_bb
[i
]);
444 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
446 gimple
*stmt
= gsi_stmt (si
);
447 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
450 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
452 gimple
*stmt
= gsi_stmt (si
);
453 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
456 bb
->flags
&= ~BB_VISITED
;
457 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
458 e
->flags
&= ~EDGE_EXECUTABLE
;
460 uid_to_stmt
.safe_grow (gimple_stmt_max_uid (cfun
));
462 /* Seed the algorithm by adding the successors of the entry block to the
464 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
466 e
->flags
&= ~EDGE_EXECUTABLE
;
467 add_control_edge (e
);
472 /* Free allocated storage. */
477 BITMAP_FREE (cfg_blocks
);
478 free (bb_to_cfg_order
);
479 free (cfg_order_to_bb
);
480 BITMAP_FREE (ssa_edge_worklist
);
481 uid_to_stmt
.release ();
483 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), NULL
, next_bb
)
484 bb
->flags
&= ~BB_VISITED
;
488 /* Return true if EXPR is an acceptable right-hand-side for a
489 GIMPLE assignment. We validate the entire tree, not just
490 the root node, thus catching expressions that embed complex
491 operands that are not permitted in GIMPLE. This function
492 is needed because the folding routines in fold-const.c
493 may return such expressions in some cases, e.g., an array
494 access with an embedded index addition. It may make more
495 sense to have folding routines that are sensitive to the
496 constraints on GIMPLE operands, rather than abandoning any
497 any attempt to fold if the usual folding turns out to be too
501 valid_gimple_rhs_p (tree expr
)
503 enum tree_code code
= TREE_CODE (expr
);
505 switch (TREE_CODE_CLASS (code
))
507 case tcc_declaration
:
508 if (!is_gimple_variable (expr
))
513 /* All constants are ok. */
517 /* GENERIC allows comparisons with non-boolean types, reject
518 those for GIMPLE. Let vector-typed comparisons pass - rules
519 for GENERIC and GIMPLE are the same here. */
520 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
521 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
522 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
523 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
528 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
529 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
534 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
544 if (is_gimple_min_invariant (expr
))
546 t
= TREE_OPERAND (expr
, 0);
547 while (handled_component_p (t
))
549 /* ??? More checks needed, see the GIMPLE verifier. */
550 if ((TREE_CODE (t
) == ARRAY_REF
551 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
552 && !is_gimple_val (TREE_OPERAND (t
, 1)))
554 t
= TREE_OPERAND (t
, 0);
556 if (!is_gimple_id (t
))
562 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
564 if (((code
== VEC_COND_EXPR
|| code
== COND_EXPR
)
565 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
566 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
567 || !is_gimple_val (TREE_OPERAND (expr
, 1))
568 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
579 case tcc_exceptional
:
580 if (code
== CONSTRUCTOR
)
584 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
585 if (!is_gimple_val (elt
))
589 if (code
!= SSA_NAME
)
594 if (code
== BIT_FIELD_REF
)
595 return is_gimple_val (TREE_OPERAND (expr
, 0));
606 /* Return true if EXPR is a CALL_EXPR suitable for representation
607 as a single GIMPLE_CALL statement. If the arguments require
608 further gimplification, return false. */
611 valid_gimple_call_p (tree expr
)
615 if (TREE_CODE (expr
) != CALL_EXPR
)
618 nargs
= call_expr_nargs (expr
);
619 for (i
= 0; i
< nargs
; i
++)
621 tree arg
= CALL_EXPR_ARG (expr
, i
);
622 if (is_gimple_reg_type (TREE_TYPE (arg
)))
624 if (!is_gimple_val (arg
))
628 if (!is_gimple_lvalue (arg
))
636 /* Make SSA names defined by OLD_STMT point to NEW_STMT
637 as their defining statement. */
640 move_ssa_defining_stmt_for_defs (gimple
*new_stmt
, gimple
*old_stmt
)
645 if (gimple_in_ssa_p (cfun
))
647 /* Make defined SSA_NAMEs point to the new
648 statement as their definition. */
649 FOR_EACH_SSA_TREE_OPERAND (var
, old_stmt
, iter
, SSA_OP_ALL_DEFS
)
651 if (TREE_CODE (var
) == SSA_NAME
)
652 SSA_NAME_DEF_STMT (var
) = new_stmt
;
657 /* Helper function for update_gimple_call and update_call_from_tree.
658 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
661 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
664 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
665 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
666 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
667 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
668 gimple_set_location (new_stmt
, gimple_location (stmt
));
669 if (gimple_block (new_stmt
) == NULL_TREE
)
670 gimple_set_block (new_stmt
, gimple_block (stmt
));
671 gsi_replace (si_p
, new_stmt
, false);
674 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
675 with number of arguments NARGS, where the arguments in GIMPLE form
676 follow NARGS argument. */
679 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
682 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
684 gcc_assert (is_gimple_call (stmt
));
685 va_start (ap
, nargs
);
686 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
687 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
692 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
693 value of EXPR, which is expected to be the result of folding the
694 call. This can only be done if EXPR is a CALL_EXPR with valid
695 GIMPLE operands as arguments, or if it is a suitable RHS expression
696 for a GIMPLE_ASSIGN. More complex expressions will require
697 gimplification, which will introduce additional statements. In this
698 event, no update is performed, and the function returns false.
699 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
700 replace the statement at *SI_P with an entirely new statement.
701 The new statement need not be a call, e.g., if the original call
702 folded to a constant. */
705 update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
707 gimple
*stmt
= gsi_stmt (*si_p
);
709 if (valid_gimple_call_p (expr
))
711 /* The call has simplified to another call. */
712 tree fn
= CALL_EXPR_FN (expr
);
714 unsigned nargs
= call_expr_nargs (expr
);
715 vec
<tree
> args
= vNULL
;
721 args
.safe_grow_cleared (nargs
);
723 for (i
= 0; i
< nargs
; i
++)
724 args
[i
] = CALL_EXPR_ARG (expr
, i
);
727 new_stmt
= gimple_build_call_vec (fn
, args
);
728 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
733 else if (valid_gimple_rhs_p (expr
))
735 tree lhs
= gimple_call_lhs (stmt
);
738 /* The call has simplified to an expression
739 that cannot be represented as a GIMPLE_CALL. */
742 /* A value is expected.
743 Introduce a new GIMPLE_ASSIGN statement. */
744 STRIP_USELESS_TYPE_CONVERSION (expr
);
745 new_stmt
= gimple_build_assign (lhs
, expr
);
746 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
747 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
748 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
750 else if (!TREE_SIDE_EFFECTS (expr
))
752 /* No value is expected, and EXPR has no effect.
753 Replace it with an empty statement. */
754 new_stmt
= gimple_build_nop ();
755 if (gimple_in_ssa_p (cfun
))
757 unlink_stmt_vdef (stmt
);
763 /* No value is expected, but EXPR has an effect,
764 e.g., it could be a reference to a volatile
765 variable. Create an assignment statement
766 with a dummy (unused) lhs variable. */
767 STRIP_USELESS_TYPE_CONVERSION (expr
);
768 if (gimple_in_ssa_p (cfun
))
769 lhs
= make_ssa_name (TREE_TYPE (expr
));
771 lhs
= create_tmp_var (TREE_TYPE (expr
));
772 new_stmt
= gimple_build_assign (lhs
, expr
);
773 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
774 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
775 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
777 gimple_set_location (new_stmt
, gimple_location (stmt
));
778 gsi_replace (si_p
, new_stmt
, false);
782 /* The call simplified to an expression that is
783 not a valid GIMPLE RHS. */
788 /* Entry point to the propagation engine.
790 VISIT_STMT is called for every statement visited.
791 VISIT_PHI is called for every PHI node visited. */
794 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt
,
795 ssa_prop_visit_phi_fn visit_phi
)
797 ssa_prop_visit_stmt
= visit_stmt
;
798 ssa_prop_visit_phi
= visit_phi
;
802 /* Iterate until the worklists are empty. */
803 while (! cfg_blocks_empty_p ()
804 || ! bitmap_empty_p (ssa_edge_worklist
))
806 /* First simulate whole blocks. */
807 if (! cfg_blocks_empty_p ())
809 /* Pull the next block to simulate off the worklist. */
810 basic_block dest_block
= cfg_blocks_get ();
811 simulate_block (dest_block
);
815 /* Then simulate from the SSA edge worklist. */
816 process_ssa_edge_worklist ();
823 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
824 is a non-volatile pointer dereference, a structure reference or a
825 reference to a single _DECL. Ignore volatile memory references
826 because they are not interesting for the optimizers. */
829 stmt_makes_single_store (gimple
*stmt
)
833 if (gimple_code (stmt
) != GIMPLE_ASSIGN
834 && gimple_code (stmt
) != GIMPLE_CALL
)
837 if (!gimple_vdef (stmt
))
840 lhs
= gimple_get_lhs (stmt
);
842 /* A call statement may have a null LHS. */
846 return (!TREE_THIS_VOLATILE (lhs
)
848 || REFERENCE_CLASS_P (lhs
)));
852 /* Propagation statistics. */
857 long num_stmts_folded
;
861 static struct prop_stats_d prop_stats
;
863 /* Replace USE references in statement STMT with the values stored in
864 PROP_VALUE. Return true if at least one reference was replaced. */
867 replace_uses_in (gimple
*stmt
, ssa_prop_get_value_fn get_value
)
869 bool replaced
= false;
873 FOR_EACH_SSA_USE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
875 tree tuse
= USE_FROM_PTR (use
);
876 tree val
= (*get_value
) (tuse
);
878 if (val
== tuse
|| val
== NULL_TREE
)
881 if (gimple_code (stmt
) == GIMPLE_ASM
882 && !may_propagate_copy_into_asm (tuse
))
885 if (!may_propagate_copy (tuse
, val
))
888 if (TREE_CODE (val
) != SSA_NAME
)
889 prop_stats
.num_const_prop
++;
891 prop_stats
.num_copy_prop
++;
893 propagate_value (use
, val
);
902 /* Replace propagated values into all the arguments for PHI using the
903 values from PROP_VALUE. */
906 replace_phi_args_in (gphi
*phi
, ssa_prop_get_value_fn get_value
)
909 bool replaced
= false;
911 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
913 fprintf (dump_file
, "Folding PHI node: ");
914 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
917 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
919 tree arg
= gimple_phi_arg_def (phi
, i
);
921 if (TREE_CODE (arg
) == SSA_NAME
)
923 tree val
= (*get_value
) (arg
);
925 if (val
&& val
!= arg
&& may_propagate_copy (arg
, val
))
927 edge e
= gimple_phi_arg_edge (phi
, i
);
929 if (TREE_CODE (val
) != SSA_NAME
)
930 prop_stats
.num_const_prop
++;
932 prop_stats
.num_copy_prop
++;
934 propagate_value (PHI_ARG_DEF_PTR (phi
, i
), val
);
937 /* If we propagated a copy and this argument flows
938 through an abnormal edge, update the replacement
940 if (TREE_CODE (val
) == SSA_NAME
941 && e
->flags
& EDGE_ABNORMAL
942 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
944 /* This can only occur for virtual operands, since
945 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
946 would prevent replacement. */
947 gcc_checking_assert (virtual_operand_p (val
));
948 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
954 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
957 fprintf (dump_file
, "No folding possible\n");
960 fprintf (dump_file
, "Folded into: ");
961 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
962 fprintf (dump_file
, "\n");
970 class substitute_and_fold_dom_walker
: public dom_walker
973 substitute_and_fold_dom_walker (cdi_direction direction
,
974 ssa_prop_get_value_fn get_value_fn_
,
975 ssa_prop_fold_stmt_fn fold_fn_
,
977 : dom_walker (direction
), get_value_fn (get_value_fn_
),
978 fold_fn (fold_fn_
), do_dce (do_dce_
), something_changed (false)
980 stmts_to_remove
.create (0);
981 stmts_to_fixup
.create (0);
982 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
984 ~substitute_and_fold_dom_walker ()
986 stmts_to_remove
.release ();
987 stmts_to_fixup
.release ();
988 BITMAP_FREE (need_eh_cleanup
);
991 virtual edge
before_dom_children (basic_block
);
992 virtual void after_dom_children (basic_block
) {}
994 ssa_prop_get_value_fn get_value_fn
;
995 ssa_prop_fold_stmt_fn fold_fn
;
997 bool something_changed
;
998 vec
<gimple
*> stmts_to_remove
;
999 vec
<gimple
*> stmts_to_fixup
;
1000 bitmap need_eh_cleanup
;
1004 substitute_and_fold_dom_walker::before_dom_children (basic_block bb
)
1006 /* Propagate known values into PHI nodes. */
1007 for (gphi_iterator i
= gsi_start_phis (bb
);
1011 gphi
*phi
= i
.phi ();
1012 tree res
= gimple_phi_result (phi
);
1013 if (virtual_operand_p (res
))
1016 && res
&& TREE_CODE (res
) == SSA_NAME
)
1018 tree sprime
= get_value_fn (res
);
1021 && may_propagate_copy (res
, sprime
))
1023 stmts_to_remove
.safe_push (phi
);
1027 something_changed
|= replace_phi_args_in (phi
, get_value_fn
);
1030 /* Propagate known values into stmts. In some case it exposes
1031 more trivially deletable stmts to walk backward. */
1032 for (gimple_stmt_iterator i
= gsi_start_bb (bb
);
1037 gimple
*stmt
= gsi_stmt (i
);
1039 /* No point propagating into a stmt we have a value for we
1040 can propagate into all uses. Mark it for removal instead. */
1041 tree lhs
= gimple_get_lhs (stmt
);
1043 && lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1045 tree sprime
= get_value_fn (lhs
);
1048 && may_propagate_copy (lhs
, sprime
)
1049 && !stmt_could_throw_p (stmt
)
1050 && !gimple_has_side_effects (stmt
)
1051 /* We have to leave ASSERT_EXPRs around for jump-threading. */
1052 && (!is_gimple_assign (stmt
)
1053 || gimple_assign_rhs_code (stmt
) != ASSERT_EXPR
))
1055 stmts_to_remove
.safe_push (stmt
);
1060 /* Replace the statement with its folded version and mark it
1062 did_replace
= false;
1063 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1065 fprintf (dump_file
, "Folding statement: ");
1066 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1069 gimple
*old_stmt
= stmt
;
1070 bool was_noreturn
= (is_gimple_call (stmt
)
1071 && gimple_call_noreturn_p (stmt
));
1073 /* Replace real uses in the statement. */
1074 did_replace
|= replace_uses_in (stmt
, get_value_fn
);
1076 /* If we made a replacement, fold the statement. */
1079 fold_stmt (&i
, follow_single_use_edges
);
1080 stmt
= gsi_stmt (i
);
1081 gimple_set_modified (stmt
, true);
1084 /* Some statements may be simplified using propagator
1085 specific information. Do this before propagating
1086 into the stmt to not disturb pass specific information. */
1089 update_stmt_if_modified (stmt
);
1093 prop_stats
.num_stmts_folded
++;
1094 stmt
= gsi_stmt (i
);
1095 gimple_set_modified (stmt
, true);
1099 /* If this is a control statement the propagator left edges
1100 unexecuted on force the condition in a way consistent with
1101 that. See PR66945 for cases where the propagator can end
1102 up with a different idea of a taken edge than folding
1103 (once undefined behavior is involved). */
1104 if (gimple_code (stmt
) == GIMPLE_COND
)
1106 if ((EDGE_SUCC (bb
, 0)->flags
& EDGE_EXECUTABLE
)
1107 ^ (EDGE_SUCC (bb
, 1)->flags
& EDGE_EXECUTABLE
))
1109 if (((EDGE_SUCC (bb
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
1110 == ((EDGE_SUCC (bb
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
1111 gimple_cond_make_true (as_a
<gcond
*> (stmt
));
1113 gimple_cond_make_false (as_a
<gcond
*> (stmt
));
1114 gimple_set_modified (stmt
, true);
1122 /* If we cleaned up EH information from the statement,
1124 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
1125 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1127 /* If we turned a not noreturn call into a noreturn one
1128 schedule it for fixup. */
1130 && is_gimple_call (stmt
)
1131 && gimple_call_noreturn_p (stmt
))
1132 stmts_to_fixup
.safe_push (stmt
);
1134 if (gimple_assign_single_p (stmt
))
1136 tree rhs
= gimple_assign_rhs1 (stmt
);
1138 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1139 recompute_tree_invariant_for_addr_expr (rhs
);
1142 /* Determine what needs to be done to update the SSA form. */
1143 update_stmt_if_modified (stmt
);
1144 if (!is_gimple_debug (stmt
))
1145 something_changed
= true;
1148 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1152 fprintf (dump_file
, "Folded into: ");
1153 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1154 fprintf (dump_file
, "\n");
1157 fprintf (dump_file
, "Not folded\n");
1165 /* Perform final substitution and folding of propagated values.
1167 PROP_VALUE[I] contains the single value that should be substituted
1168 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1171 If FOLD_FN is non-NULL the function will be invoked on all statements
1172 before propagating values for pass specific simplification.
1174 DO_DCE is true if trivially dead stmts can be removed.
1176 If DO_DCE is true, the statements within a BB are walked from
1177 last to first element. Otherwise we scan from first to last element.
1179 Return TRUE when something changed. */
1182 substitute_and_fold (ssa_prop_get_value_fn get_value_fn
,
1183 ssa_prop_fold_stmt_fn fold_fn
,
1186 gcc_assert (get_value_fn
);
1188 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1189 fprintf (dump_file
, "\nSubstituting values and folding statements\n\n");
1191 memset (&prop_stats
, 0, sizeof (prop_stats
));
1193 calculate_dominance_info (CDI_DOMINATORS
);
1194 substitute_and_fold_dom_walker
walker(CDI_DOMINATORS
,
1195 get_value_fn
, fold_fn
, do_dce
);
1196 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
1198 /* We cannot remove stmts during the BB walk, especially not release
1199 SSA names there as that destroys the lattice of our callers.
1200 Remove stmts in reverse order to make debug stmt creation possible. */
1201 while (!walker
.stmts_to_remove
.is_empty ())
1203 gimple
*stmt
= walker
.stmts_to_remove
.pop ();
1204 if (dump_file
&& dump_flags
& TDF_DETAILS
)
1206 fprintf (dump_file
, "Removing dead stmt ");
1207 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1208 fprintf (dump_file
, "\n");
1210 prop_stats
.num_dce
++;
1211 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1212 if (gimple_code (stmt
) == GIMPLE_PHI
)
1213 remove_phi_node (&gsi
, true);
1216 unlink_stmt_vdef (stmt
);
1217 gsi_remove (&gsi
, true);
1218 release_defs (stmt
);
1222 if (!bitmap_empty_p (walker
.need_eh_cleanup
))
1223 gimple_purge_all_dead_eh_edges (walker
.need_eh_cleanup
);
1225 /* Fixup stmts that became noreturn calls. This may require splitting
1226 blocks and thus isn't possible during the dominator walk. Do this
1227 in reverse order so we don't inadvertedly remove a stmt we want to
1228 fixup by visiting a dominating now noreturn call first. */
1229 while (!walker
.stmts_to_fixup
.is_empty ())
1231 gimple
*stmt
= walker
.stmts_to_fixup
.pop ();
1232 if (dump_file
&& dump_flags
& TDF_DETAILS
)
1234 fprintf (dump_file
, "Fixing up noreturn call ");
1235 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1236 fprintf (dump_file
, "\n");
1238 fixup_noreturn_call (stmt
);
1241 statistics_counter_event (cfun
, "Constants propagated",
1242 prop_stats
.num_const_prop
);
1243 statistics_counter_event (cfun
, "Copies propagated",
1244 prop_stats
.num_copy_prop
);
1245 statistics_counter_event (cfun
, "Statements folded",
1246 prop_stats
.num_stmts_folded
);
1247 statistics_counter_event (cfun
, "Statements deleted",
1248 prop_stats
.num_dce
);
1250 return walker
.something_changed
;
1254 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1257 may_propagate_copy (tree dest
, tree orig
)
1259 tree type_d
= TREE_TYPE (dest
);
1260 tree type_o
= TREE_TYPE (orig
);
1262 /* If ORIG is a default definition which flows in from an abnormal edge
1263 then the copy can be propagated. It is important that we do so to avoid
1264 uninitialized copies. */
1265 if (TREE_CODE (orig
) == SSA_NAME
1266 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
)
1267 && SSA_NAME_IS_DEFAULT_DEF (orig
)
1268 && (SSA_NAME_VAR (orig
) == NULL_TREE
1269 || TREE_CODE (SSA_NAME_VAR (orig
)) == VAR_DECL
))
1271 /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1273 else if (TREE_CODE (orig
) == SSA_NAME
1274 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
))
1276 /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1278 else if (TREE_CODE (dest
) == SSA_NAME
1279 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest
))
1282 /* Do not copy between types for which we *do* need a conversion. */
1283 if (!useless_type_conversion_p (type_d
, type_o
))
1286 /* Generally propagating virtual operands is not ok as that may
1287 create overlapping life-ranges. */
1288 if (TREE_CODE (dest
) == SSA_NAME
&& virtual_operand_p (dest
))
1291 /* Anything else is OK. */
1295 /* Like may_propagate_copy, but use as the destination expression
1296 the principal expression (typically, the RHS) contained in
1297 statement DEST. This is more efficient when working with the
1298 gimple tuples representation. */
1301 may_propagate_copy_into_stmt (gimple
*dest
, tree orig
)
1306 /* If the statement is a switch or a single-rhs assignment,
1307 then the expression to be replaced by the propagation may
1308 be an SSA_NAME. Fortunately, there is an explicit tree
1309 for the expression, so we delegate to may_propagate_copy. */
1311 if (gimple_assign_single_p (dest
))
1312 return may_propagate_copy (gimple_assign_rhs1 (dest
), orig
);
1313 else if (gswitch
*dest_swtch
= dyn_cast
<gswitch
*> (dest
))
1314 return may_propagate_copy (gimple_switch_index (dest_swtch
), orig
);
1316 /* In other cases, the expression is not materialized, so there
1317 is no destination to pass to may_propagate_copy. On the other
1318 hand, the expression cannot be an SSA_NAME, so the analysis
1321 if (TREE_CODE (orig
) == SSA_NAME
1322 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
))
1325 if (is_gimple_assign (dest
))
1326 type_d
= TREE_TYPE (gimple_assign_lhs (dest
));
1327 else if (gimple_code (dest
) == GIMPLE_COND
)
1328 type_d
= boolean_type_node
;
1329 else if (is_gimple_call (dest
)
1330 && gimple_call_lhs (dest
) != NULL_TREE
)
1331 type_d
= TREE_TYPE (gimple_call_lhs (dest
));
1335 type_o
= TREE_TYPE (orig
);
1337 if (!useless_type_conversion_p (type_d
, type_o
))
1343 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1346 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED
)
1352 /* Common code for propagate_value and replace_exp.
1354 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1355 replacement is done to propagate a value or not. */
1358 replace_exp_1 (use_operand_p op_p
, tree val
,
1359 bool for_propagation ATTRIBUTE_UNUSED
)
1363 tree op
= USE_FROM_PTR (op_p
);
1364 gcc_assert (!(for_propagation
1365 && TREE_CODE (op
) == SSA_NAME
1366 && TREE_CODE (val
) == SSA_NAME
1367 && !may_propagate_copy (op
, val
)));
1370 if (TREE_CODE (val
) == SSA_NAME
)
1371 SET_USE (op_p
, val
);
1373 SET_USE (op_p
, unshare_expr (val
));
1377 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1378 into the operand pointed to by OP_P.
1380 Use this version for const/copy propagation as it will perform additional
1381 checks to ensure validity of the const/copy propagation. */
1384 propagate_value (use_operand_p op_p
, tree val
)
1386 replace_exp_1 (op_p
, val
, true);
1389 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1391 Use this version when not const/copy propagating values. For example,
1392 PRE uses this version when building expressions as they would appear
1393 in specific blocks taking into account actions of PHI nodes.
1395 The statement in which an expression has been replaced should be
1396 folded using fold_stmt_inplace. */
1399 replace_exp (use_operand_p op_p
, tree val
)
1401 replace_exp_1 (op_p
, val
, false);
1405 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1406 into the tree pointed to by OP_P.
1408 Use this version for const/copy propagation when SSA operands are not
1409 available. It will perform the additional checks to ensure validity of
1410 the const/copy propagation, but will not update any operand information.
1411 Be sure to mark the stmt as modified. */
1414 propagate_tree_value (tree
*op_p
, tree val
)
1416 if (TREE_CODE (val
) == SSA_NAME
)
1419 *op_p
= unshare_expr (val
);
1423 /* Like propagate_tree_value, but use as the operand to replace
1424 the principal expression (typically, the RHS) contained in the
1425 statement referenced by iterator GSI. Note that it is not
1426 always possible to update the statement in-place, so a new
1427 statement may be created to replace the original. */
1430 propagate_tree_value_into_stmt (gimple_stmt_iterator
*gsi
, tree val
)
1432 gimple
*stmt
= gsi_stmt (*gsi
);
1434 if (is_gimple_assign (stmt
))
1436 tree expr
= NULL_TREE
;
1437 if (gimple_assign_single_p (stmt
))
1438 expr
= gimple_assign_rhs1 (stmt
);
1439 propagate_tree_value (&expr
, val
);
1440 gimple_assign_set_rhs_from_tree (gsi
, expr
);
1442 else if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
1444 tree lhs
= NULL_TREE
;
1445 tree rhs
= build_zero_cst (TREE_TYPE (val
));
1446 propagate_tree_value (&lhs
, val
);
1447 gimple_cond_set_code (cond_stmt
, NE_EXPR
);
1448 gimple_cond_set_lhs (cond_stmt
, lhs
);
1449 gimple_cond_set_rhs (cond_stmt
, rhs
);
1451 else if (is_gimple_call (stmt
)
1452 && gimple_call_lhs (stmt
) != NULL_TREE
)
1454 tree expr
= NULL_TREE
;
1456 propagate_tree_value (&expr
, val
);
1457 res
= update_call_from_tree (gsi
, expr
);
1460 else if (gswitch
*swtch_stmt
= dyn_cast
<gswitch
*> (stmt
))
1461 propagate_tree_value (gimple_switch_index_ptr (swtch_stmt
), val
);