1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
30 #include "gimple-pretty-print.h"
35 #include "gimple-iterator.h"
36 #include "gimple-ssa.h"
38 #include "tree-phinodes.h"
39 #include "ssa-iterators.h"
40 #include "tree-ssanames.h"
42 #include "tree-ssa-propagate.h"
43 #include "langhooks.h"
45 #include "value-prof.h"
47 /* This file implements a generic value propagation engine based on
48 the same propagation used by the SSA-CCP algorithm [1].
50 Propagation is performed by simulating the execution of every
51 statement that produces the value being propagated. Simulation
54 1- Initially, all edges of the CFG are marked not executable and
55 the CFG worklist is seeded with all the statements in the entry
56 basic block (block 0).
58 2- Every statement S is simulated with a call to the call-back
59 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
62 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
63 interest and does not affect any of the work lists.
65 SSA_PROP_VARYING: The value produced by S cannot be determined
66 at compile time. Further simulation of S is not required.
67 If S is a conditional jump, all the outgoing edges for the
68 block are considered executable and added to the work
71 SSA_PROP_INTERESTING: S produces a value that can be computed
72 at compile time. Its result can be propagated into the
73 statements that feed from S. Furthermore, if S is a
74 conditional jump, only the edge known to be taken is added
75 to the work list. Edges that are known not to execute are
78 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
79 return value from SSA_PROP_VISIT_PHI has the same semantics as
82 4- Three work lists are kept. Statements are only added to these
83 lists if they produce one of SSA_PROP_INTERESTING or
86 CFG_BLOCKS contains the list of blocks to be simulated.
87 Blocks are added to this list if their incoming edges are
90 VARYING_SSA_EDGES contains the list of statements that feed
91 from statements that produce an SSA_PROP_VARYING result.
92 These are simulated first to speed up processing.
94 INTERESTING_SSA_EDGES contains the list of statements that
95 feed from statements that produce an SSA_PROP_INTERESTING
98 5- Simulation terminates when all three work lists are drained.
100 Before calling ssa_propagate, it is important to clear
101 prop_simulate_again_p for all the statements in the program that
102 should be simulated. This initialization allows an implementation
103 to specify which statements should never be simulated.
105 It is also important to compute def-use information before calling
110 [1] Constant propagation with conditional branches,
111 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
113 [2] Building an Optimizing Compiler,
114 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
116 [3] Advanced Compiler Design and Implementation,
117 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
119 /* Function pointers used to parameterize the propagation engine. */
120 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt
;
121 static ssa_prop_visit_phi_fn ssa_prop_visit_phi
;
123 /* Keep track of statements that have been added to one of the SSA
124 edges worklists. This flag is used to avoid visiting statements
125 unnecessarily when draining an SSA edge worklist. If while
126 simulating a basic block, we find a statement with
127 STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
128 processing from visiting it again.
130 NOTE: users of the propagation engine are not allowed to use
131 the GF_PLF_1 flag. */
132 #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1
134 /* A bitmap to keep track of executable blocks in the CFG. */
135 static sbitmap executable_blocks
;
137 /* Array of control flow edges on the worklist. */
138 static vec
<basic_block
> cfg_blocks
;
140 static unsigned int cfg_blocks_num
= 0;
141 static int cfg_blocks_tail
;
142 static int cfg_blocks_head
;
144 static sbitmap bb_in_list
;
146 /* Worklist of SSA edges which will need reexamination as their
147 definition has changed. SSA edges are def-use edges in the SSA
148 web. For each D-U edge, we store the target statement or PHI node
150 static GTY(()) vec
<gimple
, va_gc
> *interesting_ssa_edges
;
152 /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
153 list of SSA edges is split into two. One contains all SSA edges
154 who need to be reexamined because their lattice value changed to
155 varying (this worklist), and the other contains all other SSA edges
156 to be reexamined (INTERESTING_SSA_EDGES).
158 Since most values in the program are VARYING, the ideal situation
159 is to move them to that lattice value as quickly as possible.
160 Thus, it doesn't make sense to process any other type of lattice
161 value until all VARYING values are propagated fully, which is one
162 thing using the VARYING worklist achieves. In addition, if we
163 don't use a separate worklist for VARYING edges, we end up with
164 situations where lattice values move from
165 UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
166 static GTY(()) vec
<gimple
, va_gc
> *varying_ssa_edges
;
169 /* Return true if the block worklist empty. */
172 cfg_blocks_empty_p (void)
174 return (cfg_blocks_num
== 0);
178 /* Add a basic block to the worklist. The block must not be already
179 in the worklist, and it must not be the ENTRY or EXIT block. */
182 cfg_blocks_add (basic_block bb
)
186 gcc_assert (bb
!= ENTRY_BLOCK_PTR
&& bb
!= EXIT_BLOCK_PTR
);
187 gcc_assert (!bitmap_bit_p (bb_in_list
, bb
->index
));
189 if (cfg_blocks_empty_p ())
191 cfg_blocks_tail
= cfg_blocks_head
= 0;
197 if (cfg_blocks_num
> cfg_blocks
.length ())
199 /* We have to grow the array now. Adjust to queue to occupy
200 the full space of the original array. We do not need to
201 initialize the newly allocated portion of the array
202 because we keep track of CFG_BLOCKS_HEAD and
204 cfg_blocks_tail
= cfg_blocks
.length ();
206 cfg_blocks
.safe_grow (2 * cfg_blocks_tail
);
208 /* Minor optimization: we prefer to see blocks with more
209 predecessors later, because there is more of a chance that
210 the incoming edges will be executable. */
211 else if (EDGE_COUNT (bb
->preds
)
212 >= EDGE_COUNT (cfg_blocks
[cfg_blocks_head
]->preds
))
213 cfg_blocks_tail
= ((cfg_blocks_tail
+ 1) % cfg_blocks
.length ());
216 if (cfg_blocks_head
== 0)
217 cfg_blocks_head
= cfg_blocks
.length ();
223 cfg_blocks
[head
? cfg_blocks_head
: cfg_blocks_tail
] = bb
;
224 bitmap_set_bit (bb_in_list
, bb
->index
);
228 /* Remove a block from the worklist. */
231 cfg_blocks_get (void)
235 bb
= cfg_blocks
[cfg_blocks_head
];
237 gcc_assert (!cfg_blocks_empty_p ());
240 cfg_blocks_head
= ((cfg_blocks_head
+ 1) % cfg_blocks
.length ());
242 bitmap_clear_bit (bb_in_list
, bb
->index
);
248 /* We have just defined a new value for VAR. If IS_VARYING is true,
249 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
250 them to INTERESTING_SSA_EDGES. */
253 add_ssa_edge (tree var
, bool is_varying
)
255 imm_use_iterator iter
;
258 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
260 gimple use_stmt
= USE_STMT (use_p
);
262 if (prop_simulate_again_p (use_stmt
)
263 && !gimple_plf (use_stmt
, STMT_IN_SSA_EDGE_WORKLIST
))
265 gimple_set_plf (use_stmt
, STMT_IN_SSA_EDGE_WORKLIST
, true);
267 vec_safe_push (varying_ssa_edges
, use_stmt
);
269 vec_safe_push (interesting_ssa_edges
, use_stmt
);
275 /* Add edge E to the control flow worklist. */
278 add_control_edge (edge e
)
280 basic_block bb
= e
->dest
;
281 if (bb
== EXIT_BLOCK_PTR
)
284 /* If the edge had already been executed, skip it. */
285 if (e
->flags
& EDGE_EXECUTABLE
)
288 e
->flags
|= EDGE_EXECUTABLE
;
290 /* If the block is already in the list, we're done. */
291 if (bitmap_bit_p (bb_in_list
, bb
->index
))
296 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
297 fprintf (dump_file
, "Adding Destination of edge (%d -> %d) to worklist\n\n",
298 e
->src
->index
, e
->dest
->index
);
302 /* Simulate the execution of STMT and update the work lists accordingly. */
305 simulate_stmt (gimple stmt
)
307 enum ssa_prop_result val
= SSA_PROP_NOT_INTERESTING
;
308 edge taken_edge
= NULL
;
309 tree output_name
= NULL_TREE
;
311 /* Don't bother visiting statements that are already
312 considered varying by the propagator. */
313 if (!prop_simulate_again_p (stmt
))
316 if (gimple_code (stmt
) == GIMPLE_PHI
)
318 val
= ssa_prop_visit_phi (stmt
);
319 output_name
= gimple_phi_result (stmt
);
322 val
= ssa_prop_visit_stmt (stmt
, &taken_edge
, &output_name
);
324 if (val
== SSA_PROP_VARYING
)
326 prop_set_simulate_again (stmt
, false);
328 /* If the statement produced a new varying value, add the SSA
329 edges coming out of OUTPUT_NAME. */
331 add_ssa_edge (output_name
, true);
333 /* If STMT transfers control out of its basic block, add
334 all outgoing edges to the work list. */
335 if (stmt_ends_bb_p (stmt
))
339 basic_block bb
= gimple_bb (stmt
);
340 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
341 add_control_edge (e
);
344 else if (val
== SSA_PROP_INTERESTING
)
346 /* If the statement produced new value, add the SSA edges coming
347 out of OUTPUT_NAME. */
349 add_ssa_edge (output_name
, false);
351 /* If we know which edge is going to be taken out of this block,
352 add it to the CFG work list. */
354 add_control_edge (taken_edge
);
358 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
359 drain. This pops statements off the given WORKLIST and processes
360 them until there are no more statements on WORKLIST.
361 We take a pointer to WORKLIST because it may be reallocated when an
362 SSA edge is added to it in simulate_stmt. */
365 process_ssa_edge_worklist (vec
<gimple
, va_gc
> **worklist
)
367 /* Drain the entire worklist. */
368 while ((*worklist
)->length () > 0)
372 /* Pull the statement to simulate off the worklist. */
373 gimple stmt
= (*worklist
)->pop ();
375 /* If this statement was already visited by simulate_block, then
376 we don't need to visit it again here. */
377 if (!gimple_plf (stmt
, STMT_IN_SSA_EDGE_WORKLIST
))
380 /* STMT is no longer in a worklist. */
381 gimple_set_plf (stmt
, STMT_IN_SSA_EDGE_WORKLIST
, false);
383 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
385 fprintf (dump_file
, "\nSimulating statement (from ssa_edges): ");
386 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
389 bb
= gimple_bb (stmt
);
391 /* PHI nodes are always visited, regardless of whether or not
392 the destination block is executable. Otherwise, visit the
393 statement only if its block is marked executable. */
394 if (gimple_code (stmt
) == GIMPLE_PHI
395 || bitmap_bit_p (executable_blocks
, bb
->index
))
396 simulate_stmt (stmt
);
401 /* Simulate the execution of BLOCK. Evaluate the statement associated
402 with each variable reference inside the block. */
405 simulate_block (basic_block block
)
407 gimple_stmt_iterator gsi
;
409 /* There is nothing to do for the exit block. */
410 if (block
== EXIT_BLOCK_PTR
)
413 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
414 fprintf (dump_file
, "\nSimulating block %d\n", block
->index
);
416 /* Always simulate PHI nodes, even if we have simulated this block
418 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
419 simulate_stmt (gsi_stmt (gsi
));
421 /* If this is the first time we've simulated this block, then we
422 must simulate each of its statements. */
423 if (!bitmap_bit_p (executable_blocks
, block
->index
))
425 gimple_stmt_iterator j
;
426 unsigned int normal_edge_count
;
430 /* Note that we have simulated this block. */
431 bitmap_set_bit (executable_blocks
, block
->index
);
433 for (j
= gsi_start_bb (block
); !gsi_end_p (j
); gsi_next (&j
))
435 gimple stmt
= gsi_stmt (j
);
437 /* If this statement is already in the worklist then
438 "cancel" it. The reevaluation implied by the worklist
439 entry will produce the same value we generate here and
440 thus reevaluating it again from the worklist is
442 if (gimple_plf (stmt
, STMT_IN_SSA_EDGE_WORKLIST
))
443 gimple_set_plf (stmt
, STMT_IN_SSA_EDGE_WORKLIST
, false);
445 simulate_stmt (stmt
);
448 /* We can not predict when abnormal and EH edges will be executed, so
449 once a block is considered executable, we consider any
450 outgoing abnormal edges as executable.
452 TODO: This is not exactly true. Simplifying statement might
453 prove it non-throwing and also computed goto can be handled
454 when destination is known.
456 At the same time, if this block has only one successor that is
457 reached by non-abnormal edges, then add that successor to the
459 normal_edge_count
= 0;
461 FOR_EACH_EDGE (e
, ei
, block
->succs
)
463 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
464 add_control_edge (e
);
472 if (normal_edge_count
== 1)
473 add_control_edge (normal_edge
);
478 /* Initialize local data structures and work lists. */
487 /* Worklists of SSA edges. */
488 vec_alloc (interesting_ssa_edges
, 20);
489 vec_alloc (varying_ssa_edges
, 20);
491 executable_blocks
= sbitmap_alloc (last_basic_block
);
492 bitmap_clear (executable_blocks
);
494 bb_in_list
= sbitmap_alloc (last_basic_block
);
495 bitmap_clear (bb_in_list
);
497 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
498 dump_immediate_uses (dump_file
);
500 cfg_blocks
.create (20);
501 cfg_blocks
.safe_grow_cleared (20);
503 /* Initially assume that every edge in the CFG is not executable.
504 (including the edges coming out of ENTRY_BLOCK_PTR). */
507 gimple_stmt_iterator si
;
509 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
510 gimple_set_plf (gsi_stmt (si
), STMT_IN_SSA_EDGE_WORKLIST
, false);
512 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
513 gimple_set_plf (gsi_stmt (si
), STMT_IN_SSA_EDGE_WORKLIST
, false);
515 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
516 e
->flags
&= ~EDGE_EXECUTABLE
;
519 /* Seed the algorithm by adding the successors of the entry block to the
521 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
522 add_control_edge (e
);
526 /* Free allocated storage. */
531 vec_free (interesting_ssa_edges
);
532 vec_free (varying_ssa_edges
);
533 cfg_blocks
.release ();
534 sbitmap_free (bb_in_list
);
535 sbitmap_free (executable_blocks
);
539 /* Return true if EXPR is an acceptable right-hand-side for a
540 GIMPLE assignment. We validate the entire tree, not just
541 the root node, thus catching expressions that embed complex
542 operands that are not permitted in GIMPLE. This function
543 is needed because the folding routines in fold-const.c
544 may return such expressions in some cases, e.g., an array
545 access with an embedded index addition. It may make more
546 sense to have folding routines that are sensitive to the
547 constraints on GIMPLE operands, rather than abandoning any
548 any attempt to fold if the usual folding turns out to be too
552 valid_gimple_rhs_p (tree expr
)
554 enum tree_code code
= TREE_CODE (expr
);
556 switch (TREE_CODE_CLASS (code
))
558 case tcc_declaration
:
559 if (!is_gimple_variable (expr
))
564 /* All constants are ok. */
569 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
570 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
575 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
585 if (is_gimple_min_invariant (expr
))
587 t
= TREE_OPERAND (expr
, 0);
588 while (handled_component_p (t
))
590 /* ??? More checks needed, see the GIMPLE verifier. */
591 if ((TREE_CODE (t
) == ARRAY_REF
592 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
593 && !is_gimple_val (TREE_OPERAND (t
, 1)))
595 t
= TREE_OPERAND (t
, 0);
597 if (!is_gimple_id (t
))
603 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
605 if (((code
== VEC_COND_EXPR
|| code
== COND_EXPR
)
606 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
607 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
608 || !is_gimple_val (TREE_OPERAND (expr
, 1))
609 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
620 case tcc_exceptional
:
621 if (code
== CONSTRUCTOR
)
625 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
626 if (!is_gimple_val (elt
))
630 if (code
!= SSA_NAME
)
635 if (code
== BIT_FIELD_REF
)
636 return is_gimple_val (TREE_OPERAND (expr
, 0));
647 /* Return true if EXPR is a CALL_EXPR suitable for representation
648 as a single GIMPLE_CALL statement. If the arguments require
649 further gimplification, return false. */
652 valid_gimple_call_p (tree expr
)
656 if (TREE_CODE (expr
) != CALL_EXPR
)
659 nargs
= call_expr_nargs (expr
);
660 for (i
= 0; i
< nargs
; i
++)
662 tree arg
= CALL_EXPR_ARG (expr
, i
);
663 if (is_gimple_reg_type (arg
))
665 if (!is_gimple_val (arg
))
669 if (!is_gimple_lvalue (arg
))
677 /* Make SSA names defined by OLD_STMT point to NEW_STMT
678 as their defining statement. */
681 move_ssa_defining_stmt_for_defs (gimple new_stmt
, gimple old_stmt
)
686 if (gimple_in_ssa_p (cfun
))
688 /* Make defined SSA_NAMEs point to the new
689 statement as their definition. */
690 FOR_EACH_SSA_TREE_OPERAND (var
, old_stmt
, iter
, SSA_OP_ALL_DEFS
)
692 if (TREE_CODE (var
) == SSA_NAME
)
693 SSA_NAME_DEF_STMT (var
) = new_stmt
;
698 /* Helper function for update_gimple_call and update_call_from_tree.
699 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
702 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple new_stmt
,
705 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
706 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
707 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
708 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
709 gimple_set_location (new_stmt
, gimple_location (stmt
));
710 if (gimple_block (new_stmt
) == NULL_TREE
)
711 gimple_set_block (new_stmt
, gimple_block (stmt
));
712 gsi_replace (si_p
, new_stmt
, false);
715 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
716 with number of arguments NARGS, where the arguments in GIMPLE form
717 follow NARGS argument. */
720 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
723 gimple new_stmt
, stmt
= gsi_stmt (*si_p
);
725 gcc_assert (is_gimple_call (stmt
));
726 va_start (ap
, nargs
);
727 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
728 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
733 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
734 value of EXPR, which is expected to be the result of folding the
735 call. This can only be done if EXPR is a CALL_EXPR with valid
736 GIMPLE operands as arguments, or if it is a suitable RHS expression
737 for a GIMPLE_ASSIGN. More complex expressions will require
738 gimplification, which will introduce additional statements. In this
739 event, no update is performed, and the function returns false.
740 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
741 replace the statement at *SI_P with an entirely new statement.
742 The new statement need not be a call, e.g., if the original call
743 folded to a constant. */
746 update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
748 gimple stmt
= gsi_stmt (*si_p
);
750 if (valid_gimple_call_p (expr
))
752 /* The call has simplified to another call. */
753 tree fn
= CALL_EXPR_FN (expr
);
755 unsigned nargs
= call_expr_nargs (expr
);
756 vec
<tree
> args
= vNULL
;
762 args
.safe_grow_cleared (nargs
);
764 for (i
= 0; i
< nargs
; i
++)
765 args
[i
] = CALL_EXPR_ARG (expr
, i
);
768 new_stmt
= gimple_build_call_vec (fn
, args
);
769 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
774 else if (valid_gimple_rhs_p (expr
))
776 tree lhs
= gimple_call_lhs (stmt
);
779 /* The call has simplified to an expression
780 that cannot be represented as a GIMPLE_CALL. */
783 /* A value is expected.
784 Introduce a new GIMPLE_ASSIGN statement. */
785 STRIP_USELESS_TYPE_CONVERSION (expr
);
786 new_stmt
= gimple_build_assign (lhs
, expr
);
787 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
788 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
789 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
791 else if (!TREE_SIDE_EFFECTS (expr
))
793 /* No value is expected, and EXPR has no effect.
794 Replace it with an empty statement. */
795 new_stmt
= gimple_build_nop ();
796 if (gimple_in_ssa_p (cfun
))
798 unlink_stmt_vdef (stmt
);
804 /* No value is expected, but EXPR has an effect,
805 e.g., it could be a reference to a volatile
806 variable. Create an assignment statement
807 with a dummy (unused) lhs variable. */
808 STRIP_USELESS_TYPE_CONVERSION (expr
);
809 if (gimple_in_ssa_p (cfun
))
810 lhs
= make_ssa_name (TREE_TYPE (expr
), NULL
);
812 lhs
= create_tmp_var (TREE_TYPE (expr
), NULL
);
813 new_stmt
= gimple_build_assign (lhs
, expr
);
814 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
815 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
816 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
818 gimple_set_location (new_stmt
, gimple_location (stmt
));
819 gsi_replace (si_p
, new_stmt
, false);
823 /* The call simplified to an expression that is
824 not a valid GIMPLE RHS. */
829 /* Entry point to the propagation engine.
831 VISIT_STMT is called for every statement visited.
832 VISIT_PHI is called for every PHI node visited. */
835 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt
,
836 ssa_prop_visit_phi_fn visit_phi
)
838 ssa_prop_visit_stmt
= visit_stmt
;
839 ssa_prop_visit_phi
= visit_phi
;
843 /* Iterate until the worklists are empty. */
844 while (!cfg_blocks_empty_p ()
845 || interesting_ssa_edges
->length () > 0
846 || varying_ssa_edges
->length () > 0)
848 if (!cfg_blocks_empty_p ())
850 /* Pull the next block to simulate off the worklist. */
851 basic_block dest_block
= cfg_blocks_get ();
852 simulate_block (dest_block
);
855 /* In order to move things to varying as quickly as
856 possible,process the VARYING_SSA_EDGES worklist first. */
857 process_ssa_edge_worklist (&varying_ssa_edges
);
859 /* Now process the INTERESTING_SSA_EDGES worklist. */
860 process_ssa_edge_worklist (&interesting_ssa_edges
);
867 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
868 is a non-volatile pointer dereference, a structure reference or a
869 reference to a single _DECL. Ignore volatile memory references
870 because they are not interesting for the optimizers. */
873 stmt_makes_single_store (gimple stmt
)
877 if (gimple_code (stmt
) != GIMPLE_ASSIGN
878 && gimple_code (stmt
) != GIMPLE_CALL
)
881 if (!gimple_vdef (stmt
))
884 lhs
= gimple_get_lhs (stmt
);
886 /* A call statement may have a null LHS. */
890 return (!TREE_THIS_VOLATILE (lhs
)
892 || REFERENCE_CLASS_P (lhs
)));
896 /* Propagation statistics. */
901 long num_stmts_folded
;
905 static struct prop_stats_d prop_stats
;
907 /* Replace USE references in statement STMT with the values stored in
908 PROP_VALUE. Return true if at least one reference was replaced. */
911 replace_uses_in (gimple stmt
, ssa_prop_get_value_fn get_value
)
913 bool replaced
= false;
917 FOR_EACH_SSA_USE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
919 tree tuse
= USE_FROM_PTR (use
);
920 tree val
= (*get_value
) (tuse
);
922 if (val
== tuse
|| val
== NULL_TREE
)
925 if (gimple_code (stmt
) == GIMPLE_ASM
926 && !may_propagate_copy_into_asm (tuse
))
929 if (!may_propagate_copy (tuse
, val
))
932 if (TREE_CODE (val
) != SSA_NAME
)
933 prop_stats
.num_const_prop
++;
935 prop_stats
.num_copy_prop
++;
937 propagate_value (use
, val
);
946 /* Replace propagated values into all the arguments for PHI using the
947 values from PROP_VALUE. */
950 replace_phi_args_in (gimple phi
, ssa_prop_get_value_fn get_value
)
953 bool replaced
= false;
955 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
957 fprintf (dump_file
, "Folding PHI node: ");
958 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
961 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
963 tree arg
= gimple_phi_arg_def (phi
, i
);
965 if (TREE_CODE (arg
) == SSA_NAME
)
967 tree val
= (*get_value
) (arg
);
969 if (val
&& val
!= arg
&& may_propagate_copy (arg
, val
))
971 if (TREE_CODE (val
) != SSA_NAME
)
972 prop_stats
.num_const_prop
++;
974 prop_stats
.num_copy_prop
++;
976 propagate_value (PHI_ARG_DEF_PTR (phi
, i
), val
);
979 /* If we propagated a copy and this argument flows
980 through an abnormal edge, update the replacement
982 if (TREE_CODE (val
) == SSA_NAME
983 && gimple_phi_arg_edge (phi
, i
)->flags
& EDGE_ABNORMAL
)
984 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
989 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
992 fprintf (dump_file
, "No folding possible\n");
995 fprintf (dump_file
, "Folded into: ");
996 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
997 fprintf (dump_file
, "\n");
1003 /* Perform final substitution and folding of propagated values.
1005 PROP_VALUE[I] contains the single value that should be substituted
1006 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1009 If FOLD_FN is non-NULL the function will be invoked on all statements
1010 before propagating values for pass specific simplification.
1012 DO_DCE is true if trivially dead stmts can be removed.
1014 If DO_DCE is true, the statements within a BB are walked from
1015 last to first element. Otherwise we scan from first to last element.
1017 Return TRUE when something changed. */
1020 substitute_and_fold (ssa_prop_get_value_fn get_value_fn
,
1021 ssa_prop_fold_stmt_fn fold_fn
,
1025 bool something_changed
= false;
1028 if (!get_value_fn
&& !fold_fn
)
1031 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1032 fprintf (dump_file
, "\nSubstituting values and folding statements\n\n");
1034 memset (&prop_stats
, 0, sizeof (prop_stats
));
1036 /* Substitute lattice values at definition sites. */
1038 for (i
= 1; i
< num_ssa_names
; ++i
)
1040 tree name
= ssa_name (i
);
1043 gimple_stmt_iterator gsi
;
1046 || virtual_operand_p (name
))
1049 def_stmt
= SSA_NAME_DEF_STMT (name
);
1050 if (gimple_nop_p (def_stmt
)
1051 /* Do not substitute ASSERT_EXPR rhs, this will confuse VRP. */
1052 || (gimple_assign_single_p (def_stmt
)
1053 && gimple_assign_rhs_code (def_stmt
) == ASSERT_EXPR
)
1054 || !(val
= (*get_value_fn
) (name
))
1055 || !may_propagate_copy (name
, val
))
1058 gsi
= gsi_for_stmt (def_stmt
);
1059 if (is_gimple_assign (def_stmt
))
1061 gimple_assign_set_rhs_with_ops (&gsi
, TREE_CODE (val
),
1063 gcc_assert (gsi_stmt (gsi
) == def_stmt
);
1064 if (maybe_clean_eh_stmt (def_stmt
))
1065 gimple_purge_dead_eh_edges (gimple_bb (def_stmt
));
1066 update_stmt (def_stmt
);
1068 else if (is_gimple_call (def_stmt
))
1070 int flags
= gimple_call_flags (def_stmt
);
1072 /* Don't optimize away calls that have side-effects. */
1073 if ((flags
& (ECF_CONST
|ECF_PURE
)) == 0
1074 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
1076 if (update_call_from_tree (&gsi
, val
)
1077 && maybe_clean_or_replace_eh_stmt (def_stmt
, gsi_stmt (gsi
)))
1078 gimple_purge_dead_eh_edges (gimple_bb (gsi_stmt (gsi
)));
1080 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
1082 gimple new_stmt
= gimple_build_assign (name
, val
);
1083 gimple_stmt_iterator gsi2
;
1084 gsi2
= gsi_after_labels (gimple_bb (def_stmt
));
1085 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
1086 remove_phi_node (&gsi
, false);
1089 something_changed
= true;
1092 /* Propagate into all uses and fold. */
1095 gimple_stmt_iterator i
;
1097 /* Propagate known values into PHI nodes. */
1099 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
1100 replace_phi_args_in (gsi_stmt (i
), get_value_fn
);
1102 /* Propagate known values into stmts. Do a backward walk if
1103 do_dce is true. In some case it exposes
1104 more trivially deletable stmts to walk backward. */
1105 for (i
= (do_dce
? gsi_last_bb (bb
) : gsi_start_bb (bb
)); !gsi_end_p (i
);)
1108 gimple stmt
= gsi_stmt (i
);
1110 enum gimple_code code
= gimple_code (stmt
);
1111 gimple_stmt_iterator oldi
;
1119 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1120 range information for names and they are discarded
1123 if (code
== GIMPLE_ASSIGN
1124 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ASSERT_EXPR
)
1127 /* No point propagating into a stmt whose result is not used,
1128 but instead we might be able to remove a trivially dead stmt.
1129 Don't do this when called from VRP, since the SSA_NAME which
1130 is going to be released could be still referenced in VRP
1133 && gimple_get_lhs (stmt
)
1134 && TREE_CODE (gimple_get_lhs (stmt
)) == SSA_NAME
1135 && has_zero_uses (gimple_get_lhs (stmt
))
1136 && !stmt_could_throw_p (stmt
)
1137 && !gimple_has_side_effects (stmt
))
1139 gimple_stmt_iterator i2
;
1141 if (dump_file
&& dump_flags
& TDF_DETAILS
)
1143 fprintf (dump_file
, "Removing dead stmt ");
1144 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1145 fprintf (dump_file
, "\n");
1147 prop_stats
.num_dce
++;
1148 i2
= gsi_for_stmt (stmt
);
1149 gsi_remove (&i2
, true);
1150 release_defs (stmt
);
1154 /* Replace the statement with its folded version and mark it
1156 did_replace
= false;
1157 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1159 fprintf (dump_file
, "Folding statement: ");
1160 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1165 /* Some statements may be simplified using propagator
1166 specific information. Do this before propagating
1167 into the stmt to not disturb pass specific information. */
1169 && (*fold_fn
)(&oldi
))
1172 prop_stats
.num_stmts_folded
++;
1173 stmt
= gsi_stmt (oldi
);
1177 /* Replace real uses in the statement. */
1179 did_replace
|= replace_uses_in (stmt
, get_value_fn
);
1181 /* If we made a replacement, fold the statement. */
1188 stmt
= gsi_stmt (oldi
);
1190 /* If we cleaned up EH information from the statement,
1192 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
1193 gimple_purge_dead_eh_edges (bb
);
1195 if (is_gimple_assign (stmt
)
1196 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
1197 == GIMPLE_SINGLE_RHS
))
1199 tree rhs
= gimple_assign_rhs1 (stmt
);
1201 if (TREE_CODE (rhs
) == ADDR_EXPR
)
1202 recompute_tree_invariant_for_addr_expr (rhs
);
1205 /* Determine what needs to be done to update the SSA form. */
1207 if (!is_gimple_debug (stmt
))
1208 something_changed
= true;
1211 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1215 fprintf (dump_file
, "Folded into: ");
1216 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1217 fprintf (dump_file
, "\n");
1220 fprintf (dump_file
, "Not folded\n");
1225 statistics_counter_event (cfun
, "Constants propagated",
1226 prop_stats
.num_const_prop
);
1227 statistics_counter_event (cfun
, "Copies propagated",
1228 prop_stats
.num_copy_prop
);
1229 statistics_counter_event (cfun
, "Statements folded",
1230 prop_stats
.num_stmts_folded
);
1231 statistics_counter_event (cfun
, "Statements deleted",
1232 prop_stats
.num_dce
);
1233 return something_changed
;
1237 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1240 may_propagate_copy (tree dest
, tree orig
)
1242 tree type_d
= TREE_TYPE (dest
);
1243 tree type_o
= TREE_TYPE (orig
);
1245 /* If ORIG flows in from an abnormal edge, it cannot be propagated. */
1246 if (TREE_CODE (orig
) == SSA_NAME
1247 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
)
1248 /* If it is the default definition and an automatic variable then
1249 we can though and it is important that we do to avoid
1250 uninitialized regular copies. */
1251 && !(SSA_NAME_IS_DEFAULT_DEF (orig
)
1252 && (SSA_NAME_VAR (orig
) == NULL_TREE
1253 || TREE_CODE (SSA_NAME_VAR (orig
)) == VAR_DECL
)))
1256 /* If DEST is an SSA_NAME that flows from an abnormal edge, then it
1257 cannot be replaced. */
1258 if (TREE_CODE (dest
) == SSA_NAME
1259 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest
))
1262 /* Do not copy between types for which we *do* need a conversion. */
1263 if (!useless_type_conversion_p (type_d
, type_o
))
1266 /* Generally propagating virtual operands is not ok as that may
1267 create overlapping life-ranges. */
1268 if (TREE_CODE (dest
) == SSA_NAME
&& virtual_operand_p (dest
))
1271 /* Anything else is OK. */
1275 /* Like may_propagate_copy, but use as the destination expression
1276 the principal expression (typically, the RHS) contained in
1277 statement DEST. This is more efficient when working with the
1278 gimple tuples representation. */
1281 may_propagate_copy_into_stmt (gimple dest
, tree orig
)
1286 /* If the statement is a switch or a single-rhs assignment,
1287 then the expression to be replaced by the propagation may
1288 be an SSA_NAME. Fortunately, there is an explicit tree
1289 for the expression, so we delegate to may_propagate_copy. */
1291 if (gimple_assign_single_p (dest
))
1292 return may_propagate_copy (gimple_assign_rhs1 (dest
), orig
);
1293 else if (gimple_code (dest
) == GIMPLE_SWITCH
)
1294 return may_propagate_copy (gimple_switch_index (dest
), orig
);
1296 /* In other cases, the expression is not materialized, so there
1297 is no destination to pass to may_propagate_copy. On the other
1298 hand, the expression cannot be an SSA_NAME, so the analysis
1301 if (TREE_CODE (orig
) == SSA_NAME
1302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig
))
1305 if (is_gimple_assign (dest
))
1306 type_d
= TREE_TYPE (gimple_assign_lhs (dest
));
1307 else if (gimple_code (dest
) == GIMPLE_COND
)
1308 type_d
= boolean_type_node
;
1309 else if (is_gimple_call (dest
)
1310 && gimple_call_lhs (dest
) != NULL_TREE
)
1311 type_d
= TREE_TYPE (gimple_call_lhs (dest
));
1315 type_o
= TREE_TYPE (orig
);
1317 if (!useless_type_conversion_p (type_d
, type_o
))
1323 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1326 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED
)
1332 /* Common code for propagate_value and replace_exp.
1334 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1335 replacement is done to propagate a value or not. */
1338 replace_exp_1 (use_operand_p op_p
, tree val
,
1339 bool for_propagation ATTRIBUTE_UNUSED
)
1341 #if defined ENABLE_CHECKING
1342 tree op
= USE_FROM_PTR (op_p
);
1344 gcc_assert (!(for_propagation
1345 && TREE_CODE (op
) == SSA_NAME
1346 && TREE_CODE (val
) == SSA_NAME
1347 && !may_propagate_copy (op
, val
)));
1350 if (TREE_CODE (val
) == SSA_NAME
)
1351 SET_USE (op_p
, val
);
1353 SET_USE (op_p
, unshare_expr (val
));
1357 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1358 into the operand pointed to by OP_P.
1360 Use this version for const/copy propagation as it will perform additional
1361 checks to ensure validity of the const/copy propagation. */
1364 propagate_value (use_operand_p op_p
, tree val
)
1366 replace_exp_1 (op_p
, val
, true);
1369 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1371 Use this version when not const/copy propagating values. For example,
1372 PRE uses this version when building expressions as they would appear
1373 in specific blocks taking into account actions of PHI nodes.
1375 The statement in which an expression has been replaced should be
1376 folded using fold_stmt_inplace. */
1379 replace_exp (use_operand_p op_p
, tree val
)
1381 replace_exp_1 (op_p
, val
, false);
1385 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1386 into the tree pointed to by OP_P.
1388 Use this version for const/copy propagation when SSA operands are not
1389 available. It will perform the additional checks to ensure validity of
1390 the const/copy propagation, but will not update any operand information.
1391 Be sure to mark the stmt as modified. */
1394 propagate_tree_value (tree
*op_p
, tree val
)
1396 gcc_checking_assert (!(TREE_CODE (val
) == SSA_NAME
1398 && TREE_CODE (*op_p
) == SSA_NAME
1399 && !may_propagate_copy (*op_p
, val
)));
1401 if (TREE_CODE (val
) == SSA_NAME
)
1404 *op_p
= unshare_expr (val
);
1408 /* Like propagate_tree_value, but use as the operand to replace
1409 the principal expression (typically, the RHS) contained in the
1410 statement referenced by iterator GSI. Note that it is not
1411 always possible to update the statement in-place, so a new
1412 statement may be created to replace the original. */
1415 propagate_tree_value_into_stmt (gimple_stmt_iterator
*gsi
, tree val
)
1417 gimple stmt
= gsi_stmt (*gsi
);
1419 if (is_gimple_assign (stmt
))
1421 tree expr
= NULL_TREE
;
1422 if (gimple_assign_single_p (stmt
))
1423 expr
= gimple_assign_rhs1 (stmt
);
1424 propagate_tree_value (&expr
, val
);
1425 gimple_assign_set_rhs_from_tree (gsi
, expr
);
1427 else if (gimple_code (stmt
) == GIMPLE_COND
)
1429 tree lhs
= NULL_TREE
;
1430 tree rhs
= build_zero_cst (TREE_TYPE (val
));
1431 propagate_tree_value (&lhs
, val
);
1432 gimple_cond_set_code (stmt
, NE_EXPR
);
1433 gimple_cond_set_lhs (stmt
, lhs
);
1434 gimple_cond_set_rhs (stmt
, rhs
);
1436 else if (is_gimple_call (stmt
)
1437 && gimple_call_lhs (stmt
) != NULL_TREE
)
1439 tree expr
= NULL_TREE
;
1441 propagate_tree_value (&expr
, val
);
1442 res
= update_call_from_tree (gsi
, expr
);
1445 else if (gimple_code (stmt
) == GIMPLE_SWITCH
)
1446 propagate_tree_value (gimple_switch_index_ptr (stmt
), val
);
1451 #include "gt-tree-ssa-propagate.h"