2013-11-12 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / tree-ssa-propagate.c
blob00780b9a74513f0bf03b0c4b249ffe4a89ca18e9
1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "gimple-pretty-print.h"
31 #include "dumpfile.h"
32 #include "sbitmap.h"
33 #include "gimplify.h"
34 #include "gimple-ssa.h"
35 #include "tree-cfg.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-ssa.h"
40 #include "tree-ssa-propagate.h"
41 #include "langhooks.h"
42 #include "vec.h"
43 #include "value-prof.h"
45 /* This file implements a generic value propagation engine based on
46 the same propagation used by the SSA-CCP algorithm [1].
48 Propagation is performed by simulating the execution of every
49 statement that produces the value being propagated. Simulation
50 proceeds as follows:
52 1- Initially, all edges of the CFG are marked not executable and
53 the CFG worklist is seeded with all the statements in the entry
54 basic block (block 0).
56 2- Every statement S is simulated with a call to the call-back
57 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
58 results:
60 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
61 interest and does not affect any of the work lists.
63 SSA_PROP_VARYING: The value produced by S cannot be determined
64 at compile time. Further simulation of S is not required.
65 If S is a conditional jump, all the outgoing edges for the
66 block are considered executable and added to the work
67 list.
69 SSA_PROP_INTERESTING: S produces a value that can be computed
70 at compile time. Its result can be propagated into the
71 statements that feed from S. Furthermore, if S is a
72 conditional jump, only the edge known to be taken is added
73 to the work list. Edges that are known not to execute are
74 never simulated.
76 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
77 return value from SSA_PROP_VISIT_PHI has the same semantics as
78 described in #2.
80 4- Three work lists are kept. Statements are only added to these
81 lists if they produce one of SSA_PROP_INTERESTING or
82 SSA_PROP_VARYING.
84 CFG_BLOCKS contains the list of blocks to be simulated.
85 Blocks are added to this list if their incoming edges are
86 found executable.
88 VARYING_SSA_EDGES contains the list of statements that feed
89 from statements that produce an SSA_PROP_VARYING result.
90 These are simulated first to speed up processing.
92 INTERESTING_SSA_EDGES contains the list of statements that
93 feed from statements that produce an SSA_PROP_INTERESTING
94 result.
96 5- Simulation terminates when all three work lists are drained.
98 Before calling ssa_propagate, it is important to clear
99 prop_simulate_again_p for all the statements in the program that
100 should be simulated. This initialization allows an implementation
101 to specify which statements should never be simulated.
103 It is also important to compute def-use information before calling
104 ssa_propagate.
106 References:
108 [1] Constant propagation with conditional branches,
109 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
111 [2] Building an Optimizing Compiler,
112 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
114 [3] Advanced Compiler Design and Implementation,
115 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
117 /* Function pointers used to parameterize the propagation engine. */
118 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
119 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
121 /* Keep track of statements that have been added to one of the SSA
122 edges worklists. This flag is used to avoid visiting statements
123 unnecessarily when draining an SSA edge worklist. If while
124 simulating a basic block, we find a statement with
125 STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
126 processing from visiting it again.
128 NOTE: users of the propagation engine are not allowed to use
129 the GF_PLF_1 flag. */
130 #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1
132 /* A bitmap to keep track of executable blocks in the CFG. */
133 static sbitmap executable_blocks;
135 /* Array of control flow edges on the worklist. */
136 static vec<basic_block> cfg_blocks;
138 static unsigned int cfg_blocks_num = 0;
139 static int cfg_blocks_tail;
140 static int cfg_blocks_head;
142 static sbitmap bb_in_list;
144 /* Worklist of SSA edges which will need reexamination as their
145 definition has changed. SSA edges are def-use edges in the SSA
146 web. For each D-U edge, we store the target statement or PHI node
147 U. */
148 static GTY(()) vec<gimple, va_gc> *interesting_ssa_edges;
150 /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
151 list of SSA edges is split into two. One contains all SSA edges
152 who need to be reexamined because their lattice value changed to
153 varying (this worklist), and the other contains all other SSA edges
154 to be reexamined (INTERESTING_SSA_EDGES).
156 Since most values in the program are VARYING, the ideal situation
157 is to move them to that lattice value as quickly as possible.
158 Thus, it doesn't make sense to process any other type of lattice
159 value until all VARYING values are propagated fully, which is one
160 thing using the VARYING worklist achieves. In addition, if we
161 don't use a separate worklist for VARYING edges, we end up with
162 situations where lattice values move from
163 UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
164 static GTY(()) vec<gimple, va_gc> *varying_ssa_edges;
167 /* Return true if the block worklist empty. */
169 static inline bool
170 cfg_blocks_empty_p (void)
172 return (cfg_blocks_num == 0);
176 /* Add a basic block to the worklist. The block must not be already
177 in the worklist, and it must not be the ENTRY or EXIT block. */
179 static void
180 cfg_blocks_add (basic_block bb)
182 bool head = false;
184 gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
185 gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
187 if (cfg_blocks_empty_p ())
189 cfg_blocks_tail = cfg_blocks_head = 0;
190 cfg_blocks_num = 1;
192 else
194 cfg_blocks_num++;
195 if (cfg_blocks_num > cfg_blocks.length ())
197 /* We have to grow the array now. Adjust to queue to occupy
198 the full space of the original array. We do not need to
199 initialize the newly allocated portion of the array
200 because we keep track of CFG_BLOCKS_HEAD and
201 CFG_BLOCKS_HEAD. */
202 cfg_blocks_tail = cfg_blocks.length ();
203 cfg_blocks_head = 0;
204 cfg_blocks.safe_grow (2 * cfg_blocks_tail);
206 /* Minor optimization: we prefer to see blocks with more
207 predecessors later, because there is more of a chance that
208 the incoming edges will be executable. */
209 else if (EDGE_COUNT (bb->preds)
210 >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
211 cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
212 else
214 if (cfg_blocks_head == 0)
215 cfg_blocks_head = cfg_blocks.length ();
216 --cfg_blocks_head;
217 head = true;
221 cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
222 bitmap_set_bit (bb_in_list, bb->index);
226 /* Remove a block from the worklist. */
228 static basic_block
229 cfg_blocks_get (void)
231 basic_block bb;
233 bb = cfg_blocks[cfg_blocks_head];
235 gcc_assert (!cfg_blocks_empty_p ());
236 gcc_assert (bb);
238 cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
239 --cfg_blocks_num;
240 bitmap_clear_bit (bb_in_list, bb->index);
242 return bb;
246 /* We have just defined a new value for VAR. If IS_VARYING is true,
247 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
248 them to INTERESTING_SSA_EDGES. */
250 static void
251 add_ssa_edge (tree var, bool is_varying)
253 imm_use_iterator iter;
254 use_operand_p use_p;
256 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
258 gimple use_stmt = USE_STMT (use_p);
260 if (prop_simulate_again_p (use_stmt)
261 && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
263 gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
264 if (is_varying)
265 vec_safe_push (varying_ssa_edges, use_stmt);
266 else
267 vec_safe_push (interesting_ssa_edges, use_stmt);
273 /* Add edge E to the control flow worklist. */
275 static void
276 add_control_edge (edge e)
278 basic_block bb = e->dest;
279 if (bb == EXIT_BLOCK_PTR)
280 return;
282 /* If the edge had already been executed, skip it. */
283 if (e->flags & EDGE_EXECUTABLE)
284 return;
286 e->flags |= EDGE_EXECUTABLE;
288 /* If the block is already in the list, we're done. */
289 if (bitmap_bit_p (bb_in_list, bb->index))
290 return;
292 cfg_blocks_add (bb);
294 if (dump_file && (dump_flags & TDF_DETAILS))
295 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
296 e->src->index, e->dest->index);
300 /* Simulate the execution of STMT and update the work lists accordingly. */
302 static void
303 simulate_stmt (gimple stmt)
305 enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
306 edge taken_edge = NULL;
307 tree output_name = NULL_TREE;
309 /* Don't bother visiting statements that are already
310 considered varying by the propagator. */
311 if (!prop_simulate_again_p (stmt))
312 return;
314 if (gimple_code (stmt) == GIMPLE_PHI)
316 val = ssa_prop_visit_phi (stmt);
317 output_name = gimple_phi_result (stmt);
319 else
320 val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
322 if (val == SSA_PROP_VARYING)
324 prop_set_simulate_again (stmt, false);
326 /* If the statement produced a new varying value, add the SSA
327 edges coming out of OUTPUT_NAME. */
328 if (output_name)
329 add_ssa_edge (output_name, true);
331 /* If STMT transfers control out of its basic block, add
332 all outgoing edges to the work list. */
333 if (stmt_ends_bb_p (stmt))
335 edge e;
336 edge_iterator ei;
337 basic_block bb = gimple_bb (stmt);
338 FOR_EACH_EDGE (e, ei, bb->succs)
339 add_control_edge (e);
342 else if (val == SSA_PROP_INTERESTING)
344 /* If the statement produced new value, add the SSA edges coming
345 out of OUTPUT_NAME. */
346 if (output_name)
347 add_ssa_edge (output_name, false);
349 /* If we know which edge is going to be taken out of this block,
350 add it to the CFG work list. */
351 if (taken_edge)
352 add_control_edge (taken_edge);
356 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
357 drain. This pops statements off the given WORKLIST and processes
358 them until there are no more statements on WORKLIST.
359 We take a pointer to WORKLIST because it may be reallocated when an
360 SSA edge is added to it in simulate_stmt. */
362 static void
363 process_ssa_edge_worklist (vec<gimple, va_gc> **worklist)
365 /* Drain the entire worklist. */
366 while ((*worklist)->length () > 0)
368 basic_block bb;
370 /* Pull the statement to simulate off the worklist. */
371 gimple stmt = (*worklist)->pop ();
373 /* If this statement was already visited by simulate_block, then
374 we don't need to visit it again here. */
375 if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
376 continue;
378 /* STMT is no longer in a worklist. */
379 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
381 if (dump_file && (dump_flags & TDF_DETAILS))
383 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
384 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
387 bb = gimple_bb (stmt);
389 /* PHI nodes are always visited, regardless of whether or not
390 the destination block is executable. Otherwise, visit the
391 statement only if its block is marked executable. */
392 if (gimple_code (stmt) == GIMPLE_PHI
393 || bitmap_bit_p (executable_blocks, bb->index))
394 simulate_stmt (stmt);
399 /* Simulate the execution of BLOCK. Evaluate the statement associated
400 with each variable reference inside the block. */
402 static void
403 simulate_block (basic_block block)
405 gimple_stmt_iterator gsi;
407 /* There is nothing to do for the exit block. */
408 if (block == EXIT_BLOCK_PTR)
409 return;
411 if (dump_file && (dump_flags & TDF_DETAILS))
412 fprintf (dump_file, "\nSimulating block %d\n", block->index);
414 /* Always simulate PHI nodes, even if we have simulated this block
415 before. */
416 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
417 simulate_stmt (gsi_stmt (gsi));
419 /* If this is the first time we've simulated this block, then we
420 must simulate each of its statements. */
421 if (!bitmap_bit_p (executable_blocks, block->index))
423 gimple_stmt_iterator j;
424 unsigned int normal_edge_count;
425 edge e, normal_edge;
426 edge_iterator ei;
428 /* Note that we have simulated this block. */
429 bitmap_set_bit (executable_blocks, block->index);
431 for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
433 gimple stmt = gsi_stmt (j);
435 /* If this statement is already in the worklist then
436 "cancel" it. The reevaluation implied by the worklist
437 entry will produce the same value we generate here and
438 thus reevaluating it again from the worklist is
439 pointless. */
440 if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
441 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
443 simulate_stmt (stmt);
446 /* We can not predict when abnormal and EH edges will be executed, so
447 once a block is considered executable, we consider any
448 outgoing abnormal edges as executable.
450 TODO: This is not exactly true. Simplifying statement might
451 prove it non-throwing and also computed goto can be handled
452 when destination is known.
454 At the same time, if this block has only one successor that is
455 reached by non-abnormal edges, then add that successor to the
456 worklist. */
457 normal_edge_count = 0;
458 normal_edge = NULL;
459 FOR_EACH_EDGE (e, ei, block->succs)
461 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
462 add_control_edge (e);
463 else
465 normal_edge_count++;
466 normal_edge = e;
470 if (normal_edge_count == 1)
471 add_control_edge (normal_edge);
476 /* Initialize local data structures and work lists. */
478 static void
479 ssa_prop_init (void)
481 edge e;
482 edge_iterator ei;
483 basic_block bb;
485 /* Worklists of SSA edges. */
486 vec_alloc (interesting_ssa_edges, 20);
487 vec_alloc (varying_ssa_edges, 20);
489 executable_blocks = sbitmap_alloc (last_basic_block);
490 bitmap_clear (executable_blocks);
492 bb_in_list = sbitmap_alloc (last_basic_block);
493 bitmap_clear (bb_in_list);
495 if (dump_file && (dump_flags & TDF_DETAILS))
496 dump_immediate_uses (dump_file);
498 cfg_blocks.create (20);
499 cfg_blocks.safe_grow_cleared (20);
501 /* Initially assume that every edge in the CFG is not executable.
502 (including the edges coming out of ENTRY_BLOCK_PTR). */
503 FOR_ALL_BB (bb)
505 gimple_stmt_iterator si;
507 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
508 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
510 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
511 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
513 FOR_EACH_EDGE (e, ei, bb->succs)
514 e->flags &= ~EDGE_EXECUTABLE;
517 /* Seed the algorithm by adding the successors of the entry block to the
518 edge worklist. */
519 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
520 add_control_edge (e);
524 /* Free allocated storage. */
526 static void
527 ssa_prop_fini (void)
529 vec_free (interesting_ssa_edges);
530 vec_free (varying_ssa_edges);
531 cfg_blocks.release ();
532 sbitmap_free (bb_in_list);
533 sbitmap_free (executable_blocks);
537 /* Return true if EXPR is an acceptable right-hand-side for a
538 GIMPLE assignment. We validate the entire tree, not just
539 the root node, thus catching expressions that embed complex
540 operands that are not permitted in GIMPLE. This function
541 is needed because the folding routines in fold-const.c
542 may return such expressions in some cases, e.g., an array
543 access with an embedded index addition. It may make more
544 sense to have folding routines that are sensitive to the
545 constraints on GIMPLE operands, rather than abandoning any
546 any attempt to fold if the usual folding turns out to be too
547 aggressive. */
549 bool
550 valid_gimple_rhs_p (tree expr)
552 enum tree_code code = TREE_CODE (expr);
554 switch (TREE_CODE_CLASS (code))
556 case tcc_declaration:
557 if (!is_gimple_variable (expr))
558 return false;
559 break;
561 case tcc_constant:
562 /* All constants are ok. */
563 break;
565 case tcc_binary:
566 case tcc_comparison:
567 if (!is_gimple_val (TREE_OPERAND (expr, 0))
568 || !is_gimple_val (TREE_OPERAND (expr, 1)))
569 return false;
570 break;
572 case tcc_unary:
573 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
574 return false;
575 break;
577 case tcc_expression:
578 switch (code)
580 case ADDR_EXPR:
582 tree t;
583 if (is_gimple_min_invariant (expr))
584 return true;
585 t = TREE_OPERAND (expr, 0);
586 while (handled_component_p (t))
588 /* ??? More checks needed, see the GIMPLE verifier. */
589 if ((TREE_CODE (t) == ARRAY_REF
590 || TREE_CODE (t) == ARRAY_RANGE_REF)
591 && !is_gimple_val (TREE_OPERAND (t, 1)))
592 return false;
593 t = TREE_OPERAND (t, 0);
595 if (!is_gimple_id (t))
596 return false;
598 break;
600 default:
601 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
603 if (((code == VEC_COND_EXPR || code == COND_EXPR)
604 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
605 : !is_gimple_val (TREE_OPERAND (expr, 0)))
606 || !is_gimple_val (TREE_OPERAND (expr, 1))
607 || !is_gimple_val (TREE_OPERAND (expr, 2)))
608 return false;
609 break;
611 return false;
613 break;
615 case tcc_vl_exp:
616 return false;
618 case tcc_exceptional:
619 if (code == CONSTRUCTOR)
621 unsigned i;
622 tree elt;
623 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
624 if (!is_gimple_val (elt))
625 return false;
626 return true;
628 if (code != SSA_NAME)
629 return false;
630 break;
632 case tcc_reference:
633 if (code == BIT_FIELD_REF)
634 return is_gimple_val (TREE_OPERAND (expr, 0));
635 return false;
637 default:
638 return false;
641 return true;
645 /* Return true if EXPR is a CALL_EXPR suitable for representation
646 as a single GIMPLE_CALL statement. If the arguments require
647 further gimplification, return false. */
649 static bool
650 valid_gimple_call_p (tree expr)
652 unsigned i, nargs;
654 if (TREE_CODE (expr) != CALL_EXPR)
655 return false;
657 nargs = call_expr_nargs (expr);
658 for (i = 0; i < nargs; i++)
660 tree arg = CALL_EXPR_ARG (expr, i);
661 if (is_gimple_reg_type (arg))
663 if (!is_gimple_val (arg))
664 return false;
666 else
667 if (!is_gimple_lvalue (arg))
668 return false;
671 return true;
675 /* Make SSA names defined by OLD_STMT point to NEW_STMT
676 as their defining statement. */
678 void
679 move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
681 tree var;
682 ssa_op_iter iter;
684 if (gimple_in_ssa_p (cfun))
686 /* Make defined SSA_NAMEs point to the new
687 statement as their definition. */
688 FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
690 if (TREE_CODE (var) == SSA_NAME)
691 SSA_NAME_DEF_STMT (var) = new_stmt;
696 /* Helper function for update_gimple_call and update_call_from_tree.
697 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
699 static void
700 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
701 gimple stmt)
703 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
704 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
705 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
706 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
707 gimple_set_location (new_stmt, gimple_location (stmt));
708 if (gimple_block (new_stmt) == NULL_TREE)
709 gimple_set_block (new_stmt, gimple_block (stmt));
710 gsi_replace (si_p, new_stmt, false);
713 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
714 with number of arguments NARGS, where the arguments in GIMPLE form
715 follow NARGS argument. */
717 bool
718 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
720 va_list ap;
721 gimple new_stmt, stmt = gsi_stmt (*si_p);
723 gcc_assert (is_gimple_call (stmt));
724 va_start (ap, nargs);
725 new_stmt = gimple_build_call_valist (fn, nargs, ap);
726 finish_update_gimple_call (si_p, new_stmt, stmt);
727 va_end (ap);
728 return true;
731 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
732 value of EXPR, which is expected to be the result of folding the
733 call. This can only be done if EXPR is a CALL_EXPR with valid
734 GIMPLE operands as arguments, or if it is a suitable RHS expression
735 for a GIMPLE_ASSIGN. More complex expressions will require
736 gimplification, which will introduce additional statements. In this
737 event, no update is performed, and the function returns false.
738 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
739 replace the statement at *SI_P with an entirely new statement.
740 The new statement need not be a call, e.g., if the original call
741 folded to a constant. */
743 bool
744 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
746 gimple stmt = gsi_stmt (*si_p);
748 if (valid_gimple_call_p (expr))
750 /* The call has simplified to another call. */
751 tree fn = CALL_EXPR_FN (expr);
752 unsigned i;
753 unsigned nargs = call_expr_nargs (expr);
754 vec<tree> args = vNULL;
755 gimple new_stmt;
757 if (nargs > 0)
759 args.create (nargs);
760 args.safe_grow_cleared (nargs);
762 for (i = 0; i < nargs; i++)
763 args[i] = CALL_EXPR_ARG (expr, i);
766 new_stmt = gimple_build_call_vec (fn, args);
767 finish_update_gimple_call (si_p, new_stmt, stmt);
768 args.release ();
770 return true;
772 else if (valid_gimple_rhs_p (expr))
774 tree lhs = gimple_call_lhs (stmt);
775 gimple new_stmt;
777 /* The call has simplified to an expression
778 that cannot be represented as a GIMPLE_CALL. */
779 if (lhs)
781 /* A value is expected.
782 Introduce a new GIMPLE_ASSIGN statement. */
783 STRIP_USELESS_TYPE_CONVERSION (expr);
784 new_stmt = gimple_build_assign (lhs, expr);
785 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
786 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
787 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
789 else if (!TREE_SIDE_EFFECTS (expr))
791 /* No value is expected, and EXPR has no effect.
792 Replace it with an empty statement. */
793 new_stmt = gimple_build_nop ();
794 if (gimple_in_ssa_p (cfun))
796 unlink_stmt_vdef (stmt);
797 release_defs (stmt);
800 else
802 /* No value is expected, but EXPR has an effect,
803 e.g., it could be a reference to a volatile
804 variable. Create an assignment statement
805 with a dummy (unused) lhs variable. */
806 STRIP_USELESS_TYPE_CONVERSION (expr);
807 if (gimple_in_ssa_p (cfun))
808 lhs = make_ssa_name (TREE_TYPE (expr), NULL);
809 else
810 lhs = create_tmp_var (TREE_TYPE (expr), NULL);
811 new_stmt = gimple_build_assign (lhs, expr);
812 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
813 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
814 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
816 gimple_set_location (new_stmt, gimple_location (stmt));
817 gsi_replace (si_p, new_stmt, false);
818 return true;
820 else
821 /* The call simplified to an expression that is
822 not a valid GIMPLE RHS. */
823 return false;
827 /* Entry point to the propagation engine.
829 VISIT_STMT is called for every statement visited.
830 VISIT_PHI is called for every PHI node visited. */
832 void
833 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
834 ssa_prop_visit_phi_fn visit_phi)
836 ssa_prop_visit_stmt = visit_stmt;
837 ssa_prop_visit_phi = visit_phi;
839 ssa_prop_init ();
841 /* Iterate until the worklists are empty. */
842 while (!cfg_blocks_empty_p ()
843 || interesting_ssa_edges->length () > 0
844 || varying_ssa_edges->length () > 0)
846 if (!cfg_blocks_empty_p ())
848 /* Pull the next block to simulate off the worklist. */
849 basic_block dest_block = cfg_blocks_get ();
850 simulate_block (dest_block);
853 /* In order to move things to varying as quickly as
854 possible,process the VARYING_SSA_EDGES worklist first. */
855 process_ssa_edge_worklist (&varying_ssa_edges);
857 /* Now process the INTERESTING_SSA_EDGES worklist. */
858 process_ssa_edge_worklist (&interesting_ssa_edges);
861 ssa_prop_fini ();
865 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
866 is a non-volatile pointer dereference, a structure reference or a
867 reference to a single _DECL. Ignore volatile memory references
868 because they are not interesting for the optimizers. */
870 bool
871 stmt_makes_single_store (gimple stmt)
873 tree lhs;
875 if (gimple_code (stmt) != GIMPLE_ASSIGN
876 && gimple_code (stmt) != GIMPLE_CALL)
877 return false;
879 if (!gimple_vdef (stmt))
880 return false;
882 lhs = gimple_get_lhs (stmt);
884 /* A call statement may have a null LHS. */
885 if (!lhs)
886 return false;
888 return (!TREE_THIS_VOLATILE (lhs)
889 && (DECL_P (lhs)
890 || REFERENCE_CLASS_P (lhs)));
894 /* Propagation statistics. */
895 struct prop_stats_d
897 long num_const_prop;
898 long num_copy_prop;
899 long num_stmts_folded;
900 long num_dce;
903 static struct prop_stats_d prop_stats;
905 /* Replace USE references in statement STMT with the values stored in
906 PROP_VALUE. Return true if at least one reference was replaced. */
908 static bool
909 replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
911 bool replaced = false;
912 use_operand_p use;
913 ssa_op_iter iter;
915 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
917 tree tuse = USE_FROM_PTR (use);
918 tree val = (*get_value) (tuse);
920 if (val == tuse || val == NULL_TREE)
921 continue;
923 if (gimple_code (stmt) == GIMPLE_ASM
924 && !may_propagate_copy_into_asm (tuse))
925 continue;
927 if (!may_propagate_copy (tuse, val))
928 continue;
930 if (TREE_CODE (val) != SSA_NAME)
931 prop_stats.num_const_prop++;
932 else
933 prop_stats.num_copy_prop++;
935 propagate_value (use, val);
937 replaced = true;
940 return replaced;
944 /* Replace propagated values into all the arguments for PHI using the
945 values from PROP_VALUE. */
947 static void
948 replace_phi_args_in (gimple phi, ssa_prop_get_value_fn get_value)
950 size_t i;
951 bool replaced = false;
953 if (dump_file && (dump_flags & TDF_DETAILS))
955 fprintf (dump_file, "Folding PHI node: ");
956 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
959 for (i = 0; i < gimple_phi_num_args (phi); i++)
961 tree arg = gimple_phi_arg_def (phi, i);
963 if (TREE_CODE (arg) == SSA_NAME)
965 tree val = (*get_value) (arg);
967 if (val && val != arg && may_propagate_copy (arg, val))
969 if (TREE_CODE (val) != SSA_NAME)
970 prop_stats.num_const_prop++;
971 else
972 prop_stats.num_copy_prop++;
974 propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
975 replaced = true;
977 /* If we propagated a copy and this argument flows
978 through an abnormal edge, update the replacement
979 accordingly. */
980 if (TREE_CODE (val) == SSA_NAME
981 && gimple_phi_arg_edge (phi, i)->flags & EDGE_ABNORMAL)
982 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
987 if (dump_file && (dump_flags & TDF_DETAILS))
989 if (!replaced)
990 fprintf (dump_file, "No folding possible\n");
991 else
993 fprintf (dump_file, "Folded into: ");
994 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
995 fprintf (dump_file, "\n");
1001 /* Perform final substitution and folding of propagated values.
1003 PROP_VALUE[I] contains the single value that should be substituted
1004 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1005 substituted.
1007 If FOLD_FN is non-NULL the function will be invoked on all statements
1008 before propagating values for pass specific simplification.
1010 DO_DCE is true if trivially dead stmts can be removed.
1012 If DO_DCE is true, the statements within a BB are walked from
1013 last to first element. Otherwise we scan from first to last element.
1015 Return TRUE when something changed. */
1017 bool
1018 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1019 ssa_prop_fold_stmt_fn fold_fn,
1020 bool do_dce)
1022 basic_block bb;
1023 bool something_changed = false;
1024 unsigned i;
1026 if (!get_value_fn && !fold_fn)
1027 return false;
1029 if (dump_file && (dump_flags & TDF_DETAILS))
1030 fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1032 memset (&prop_stats, 0, sizeof (prop_stats));
1034 /* Substitute lattice values at definition sites. */
1035 if (get_value_fn)
1036 for (i = 1; i < num_ssa_names; ++i)
1038 tree name = ssa_name (i);
1039 tree val;
1040 gimple def_stmt;
1041 gimple_stmt_iterator gsi;
1043 if (!name
1044 || virtual_operand_p (name))
1045 continue;
1047 def_stmt = SSA_NAME_DEF_STMT (name);
1048 if (gimple_nop_p (def_stmt)
1049 /* Do not substitute ASSERT_EXPR rhs, this will confuse VRP. */
1050 || (gimple_assign_single_p (def_stmt)
1051 && gimple_assign_rhs_code (def_stmt) == ASSERT_EXPR)
1052 || !(val = (*get_value_fn) (name))
1053 || !may_propagate_copy (name, val))
1054 continue;
1056 gsi = gsi_for_stmt (def_stmt);
1057 if (is_gimple_assign (def_stmt))
1059 gimple_assign_set_rhs_with_ops (&gsi, TREE_CODE (val),
1060 val, NULL_TREE);
1061 gcc_assert (gsi_stmt (gsi) == def_stmt);
1062 if (maybe_clean_eh_stmt (def_stmt))
1063 gimple_purge_dead_eh_edges (gimple_bb (def_stmt));
1064 update_stmt (def_stmt);
1066 else if (is_gimple_call (def_stmt))
1068 int flags = gimple_call_flags (def_stmt);
1070 /* Don't optimize away calls that have side-effects. */
1071 if ((flags & (ECF_CONST|ECF_PURE)) == 0
1072 || (flags & ECF_LOOPING_CONST_OR_PURE))
1073 continue;
1074 if (update_call_from_tree (&gsi, val)
1075 && maybe_clean_or_replace_eh_stmt (def_stmt, gsi_stmt (gsi)))
1076 gimple_purge_dead_eh_edges (gimple_bb (gsi_stmt (gsi)));
1078 else if (gimple_code (def_stmt) == GIMPLE_PHI)
1080 gimple new_stmt = gimple_build_assign (name, val);
1081 gimple_stmt_iterator gsi2;
1082 gsi2 = gsi_after_labels (gimple_bb (def_stmt));
1083 gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
1084 remove_phi_node (&gsi, false);
1087 something_changed = true;
1090 /* Propagate into all uses and fold. */
1091 FOR_EACH_BB (bb)
1093 gimple_stmt_iterator i;
1095 /* Propagate known values into PHI nodes. */
1096 if (get_value_fn)
1097 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
1098 replace_phi_args_in (gsi_stmt (i), get_value_fn);
1100 /* Propagate known values into stmts. Do a backward walk if
1101 do_dce is true. In some case it exposes
1102 more trivially deletable stmts to walk backward. */
1103 for (i = (do_dce ? gsi_last_bb (bb) : gsi_start_bb (bb)); !gsi_end_p (i);)
1105 bool did_replace;
1106 gimple stmt = gsi_stmt (i);
1107 gimple old_stmt;
1108 enum gimple_code code = gimple_code (stmt);
1109 gimple_stmt_iterator oldi;
1111 oldi = i;
1112 if (do_dce)
1113 gsi_prev (&i);
1114 else
1115 gsi_next (&i);
1117 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1118 range information for names and they are discarded
1119 afterwards. */
1121 if (code == GIMPLE_ASSIGN
1122 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1123 continue;
1125 /* No point propagating into a stmt whose result is not used,
1126 but instead we might be able to remove a trivially dead stmt.
1127 Don't do this when called from VRP, since the SSA_NAME which
1128 is going to be released could be still referenced in VRP
1129 ranges. */
1130 if (do_dce
1131 && gimple_get_lhs (stmt)
1132 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
1133 && has_zero_uses (gimple_get_lhs (stmt))
1134 && !stmt_could_throw_p (stmt)
1135 && !gimple_has_side_effects (stmt))
1137 gimple_stmt_iterator i2;
1139 if (dump_file && dump_flags & TDF_DETAILS)
1141 fprintf (dump_file, "Removing dead stmt ");
1142 print_gimple_stmt (dump_file, stmt, 0, 0);
1143 fprintf (dump_file, "\n");
1145 prop_stats.num_dce++;
1146 i2 = gsi_for_stmt (stmt);
1147 gsi_remove (&i2, true);
1148 release_defs (stmt);
1149 continue;
1152 /* Replace the statement with its folded version and mark it
1153 folded. */
1154 did_replace = false;
1155 if (dump_file && (dump_flags & TDF_DETAILS))
1157 fprintf (dump_file, "Folding statement: ");
1158 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1161 old_stmt = stmt;
1163 /* Some statements may be simplified using propagator
1164 specific information. Do this before propagating
1165 into the stmt to not disturb pass specific information. */
1166 if (fold_fn
1167 && (*fold_fn)(&oldi))
1169 did_replace = true;
1170 prop_stats.num_stmts_folded++;
1171 stmt = gsi_stmt (oldi);
1172 update_stmt (stmt);
1175 /* Replace real uses in the statement. */
1176 if (get_value_fn)
1177 did_replace |= replace_uses_in (stmt, get_value_fn);
1179 /* If we made a replacement, fold the statement. */
1180 if (did_replace)
1181 fold_stmt (&oldi);
1183 /* Now cleanup. */
1184 if (did_replace)
1186 stmt = gsi_stmt (oldi);
1188 /* If we cleaned up EH information from the statement,
1189 remove EH edges. */
1190 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1191 gimple_purge_dead_eh_edges (bb);
1193 if (is_gimple_assign (stmt)
1194 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1195 == GIMPLE_SINGLE_RHS))
1197 tree rhs = gimple_assign_rhs1 (stmt);
1199 if (TREE_CODE (rhs) == ADDR_EXPR)
1200 recompute_tree_invariant_for_addr_expr (rhs);
1203 /* Determine what needs to be done to update the SSA form. */
1204 update_stmt (stmt);
1205 if (!is_gimple_debug (stmt))
1206 something_changed = true;
1209 if (dump_file && (dump_flags & TDF_DETAILS))
1211 if (did_replace)
1213 fprintf (dump_file, "Folded into: ");
1214 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1215 fprintf (dump_file, "\n");
1217 else
1218 fprintf (dump_file, "Not folded\n");
1223 statistics_counter_event (cfun, "Constants propagated",
1224 prop_stats.num_const_prop);
1225 statistics_counter_event (cfun, "Copies propagated",
1226 prop_stats.num_copy_prop);
1227 statistics_counter_event (cfun, "Statements folded",
1228 prop_stats.num_stmts_folded);
1229 statistics_counter_event (cfun, "Statements deleted",
1230 prop_stats.num_dce);
1231 return something_changed;
1235 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1237 bool
1238 may_propagate_copy (tree dest, tree orig)
1240 tree type_d = TREE_TYPE (dest);
1241 tree type_o = TREE_TYPE (orig);
1243 /* If ORIG flows in from an abnormal edge, it cannot be propagated. */
1244 if (TREE_CODE (orig) == SSA_NAME
1245 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1246 /* If it is the default definition and an automatic variable then
1247 we can though and it is important that we do to avoid
1248 uninitialized regular copies. */
1249 && !(SSA_NAME_IS_DEFAULT_DEF (orig)
1250 && (SSA_NAME_VAR (orig) == NULL_TREE
1251 || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL)))
1252 return false;
1254 /* If DEST is an SSA_NAME that flows from an abnormal edge, then it
1255 cannot be replaced. */
1256 if (TREE_CODE (dest) == SSA_NAME
1257 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1258 return false;
1260 /* Do not copy between types for which we *do* need a conversion. */
1261 if (!useless_type_conversion_p (type_d, type_o))
1262 return false;
1264 /* Generally propagating virtual operands is not ok as that may
1265 create overlapping life-ranges. */
1266 if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1267 return false;
1269 /* Anything else is OK. */
1270 return true;
1273 /* Like may_propagate_copy, but use as the destination expression
1274 the principal expression (typically, the RHS) contained in
1275 statement DEST. This is more efficient when working with the
1276 gimple tuples representation. */
1278 bool
1279 may_propagate_copy_into_stmt (gimple dest, tree orig)
1281 tree type_d;
1282 tree type_o;
1284 /* If the statement is a switch or a single-rhs assignment,
1285 then the expression to be replaced by the propagation may
1286 be an SSA_NAME. Fortunately, there is an explicit tree
1287 for the expression, so we delegate to may_propagate_copy. */
1289 if (gimple_assign_single_p (dest))
1290 return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1291 else if (gimple_code (dest) == GIMPLE_SWITCH)
1292 return may_propagate_copy (gimple_switch_index (dest), orig);
1294 /* In other cases, the expression is not materialized, so there
1295 is no destination to pass to may_propagate_copy. On the other
1296 hand, the expression cannot be an SSA_NAME, so the analysis
1297 is much simpler. */
1299 if (TREE_CODE (orig) == SSA_NAME
1300 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1301 return false;
1303 if (is_gimple_assign (dest))
1304 type_d = TREE_TYPE (gimple_assign_lhs (dest));
1305 else if (gimple_code (dest) == GIMPLE_COND)
1306 type_d = boolean_type_node;
1307 else if (is_gimple_call (dest)
1308 && gimple_call_lhs (dest) != NULL_TREE)
1309 type_d = TREE_TYPE (gimple_call_lhs (dest));
1310 else
1311 gcc_unreachable ();
1313 type_o = TREE_TYPE (orig);
1315 if (!useless_type_conversion_p (type_d, type_o))
1316 return false;
1318 return true;
1321 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1323 bool
1324 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1326 return true;
1330 /* Common code for propagate_value and replace_exp.
1332 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1333 replacement is done to propagate a value or not. */
1335 static void
1336 replace_exp_1 (use_operand_p op_p, tree val,
1337 bool for_propagation ATTRIBUTE_UNUSED)
1339 #if defined ENABLE_CHECKING
1340 tree op = USE_FROM_PTR (op_p);
1342 gcc_assert (!(for_propagation
1343 && TREE_CODE (op) == SSA_NAME
1344 && TREE_CODE (val) == SSA_NAME
1345 && !may_propagate_copy (op, val)));
1346 #endif
1348 if (TREE_CODE (val) == SSA_NAME)
1349 SET_USE (op_p, val);
1350 else
1351 SET_USE (op_p, unshare_expr (val));
1355 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1356 into the operand pointed to by OP_P.
1358 Use this version for const/copy propagation as it will perform additional
1359 checks to ensure validity of the const/copy propagation. */
1361 void
1362 propagate_value (use_operand_p op_p, tree val)
1364 replace_exp_1 (op_p, val, true);
1367 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1369 Use this version when not const/copy propagating values. For example,
1370 PRE uses this version when building expressions as they would appear
1371 in specific blocks taking into account actions of PHI nodes.
1373 The statement in which an expression has been replaced should be
1374 folded using fold_stmt_inplace. */
1376 void
1377 replace_exp (use_operand_p op_p, tree val)
1379 replace_exp_1 (op_p, val, false);
1383 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1384 into the tree pointed to by OP_P.
1386 Use this version for const/copy propagation when SSA operands are not
1387 available. It will perform the additional checks to ensure validity of
1388 the const/copy propagation, but will not update any operand information.
1389 Be sure to mark the stmt as modified. */
1391 void
1392 propagate_tree_value (tree *op_p, tree val)
1394 gcc_checking_assert (!(TREE_CODE (val) == SSA_NAME
1395 && *op_p
1396 && TREE_CODE (*op_p) == SSA_NAME
1397 && !may_propagate_copy (*op_p, val)));
1399 if (TREE_CODE (val) == SSA_NAME)
1400 *op_p = val;
1401 else
1402 *op_p = unshare_expr (val);
1406 /* Like propagate_tree_value, but use as the operand to replace
1407 the principal expression (typically, the RHS) contained in the
1408 statement referenced by iterator GSI. Note that it is not
1409 always possible to update the statement in-place, so a new
1410 statement may be created to replace the original. */
1412 void
1413 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1415 gimple stmt = gsi_stmt (*gsi);
1417 if (is_gimple_assign (stmt))
1419 tree expr = NULL_TREE;
1420 if (gimple_assign_single_p (stmt))
1421 expr = gimple_assign_rhs1 (stmt);
1422 propagate_tree_value (&expr, val);
1423 gimple_assign_set_rhs_from_tree (gsi, expr);
1425 else if (gimple_code (stmt) == GIMPLE_COND)
1427 tree lhs = NULL_TREE;
1428 tree rhs = build_zero_cst (TREE_TYPE (val));
1429 propagate_tree_value (&lhs, val);
1430 gimple_cond_set_code (stmt, NE_EXPR);
1431 gimple_cond_set_lhs (stmt, lhs);
1432 gimple_cond_set_rhs (stmt, rhs);
1434 else if (is_gimple_call (stmt)
1435 && gimple_call_lhs (stmt) != NULL_TREE)
1437 tree expr = NULL_TREE;
1438 bool res;
1439 propagate_tree_value (&expr, val);
1440 res = update_call_from_tree (gsi, expr);
1441 gcc_assert (res);
1443 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1444 propagate_tree_value (gimple_switch_index_ptr (stmt), val);
1445 else
1446 gcc_unreachable ();
1449 #include "gt-tree-ssa-propagate.h"