Fix dot dump bug
[official-gcc.git] / gcc / tree-ssa-propagate.c
blobcdc9f6ad39b65a9fe65d08ffa3836e00021d16a9
1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "gimple-pretty-print.h"
31 #include "dumpfile.h"
32 #include "sbitmap.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimple-expr.h"
38 #include "is-a.h"
39 #include "gimple.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-cfg.h"
44 #include "tree-phinodes.h"
45 #include "ssa-iterators.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "tree-ssa.h"
49 #include "tree-ssa-propagate.h"
50 #include "langhooks.h"
51 #include "value-prof.h"
52 #include "domwalk.h"
54 /* This file implements a generic value propagation engine based on
55 the same propagation used by the SSA-CCP algorithm [1].
57 Propagation is performed by simulating the execution of every
58 statement that produces the value being propagated. Simulation
59 proceeds as follows:
61 1- Initially, all edges of the CFG are marked not executable and
62 the CFG worklist is seeded with all the statements in the entry
63 basic block (block 0).
65 2- Every statement S is simulated with a call to the call-back
66 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
67 results:
69 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
70 interest and does not affect any of the work lists.
72 SSA_PROP_VARYING: The value produced by S cannot be determined
73 at compile time. Further simulation of S is not required.
74 If S is a conditional jump, all the outgoing edges for the
75 block are considered executable and added to the work
76 list.
78 SSA_PROP_INTERESTING: S produces a value that can be computed
79 at compile time. Its result can be propagated into the
80 statements that feed from S. Furthermore, if S is a
81 conditional jump, only the edge known to be taken is added
82 to the work list. Edges that are known not to execute are
83 never simulated.
85 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
86 return value from SSA_PROP_VISIT_PHI has the same semantics as
87 described in #2.
89 4- Three work lists are kept. Statements are only added to these
90 lists if they produce one of SSA_PROP_INTERESTING or
91 SSA_PROP_VARYING.
93 CFG_BLOCKS contains the list of blocks to be simulated.
94 Blocks are added to this list if their incoming edges are
95 found executable.
97 VARYING_SSA_EDGES contains the list of statements that feed
98 from statements that produce an SSA_PROP_VARYING result.
99 These are simulated first to speed up processing.
101 INTERESTING_SSA_EDGES contains the list of statements that
102 feed from statements that produce an SSA_PROP_INTERESTING
103 result.
105 5- Simulation terminates when all three work lists are drained.
107 Before calling ssa_propagate, it is important to clear
108 prop_simulate_again_p for all the statements in the program that
109 should be simulated. This initialization allows an implementation
110 to specify which statements should never be simulated.
112 It is also important to compute def-use information before calling
113 ssa_propagate.
115 References:
117 [1] Constant propagation with conditional branches,
118 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
120 [2] Building an Optimizing Compiler,
121 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
123 [3] Advanced Compiler Design and Implementation,
124 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
126 /* Function pointers used to parameterize the propagation engine. */
127 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
128 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
130 /* Keep track of statements that have been added to one of the SSA
131 edges worklists. This flag is used to avoid visiting statements
132 unnecessarily when draining an SSA edge worklist. If while
133 simulating a basic block, we find a statement with
134 STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
135 processing from visiting it again.
137 NOTE: users of the propagation engine are not allowed to use
138 the GF_PLF_1 flag. */
139 #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1
141 /* A bitmap to keep track of executable blocks in the CFG. */
142 static sbitmap executable_blocks;
144 /* Array of control flow edges on the worklist. */
145 static vec<basic_block> cfg_blocks;
147 static unsigned int cfg_blocks_num = 0;
148 static int cfg_blocks_tail;
149 static int cfg_blocks_head;
151 static sbitmap bb_in_list;
153 /* Worklist of SSA edges which will need reexamination as their
154 definition has changed. SSA edges are def-use edges in the SSA
155 web. For each D-U edge, we store the target statement or PHI node
156 U. */
157 static vec<gimple> interesting_ssa_edges;
159 /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
160 list of SSA edges is split into two. One contains all SSA edges
161 who need to be reexamined because their lattice value changed to
162 varying (this worklist), and the other contains all other SSA edges
163 to be reexamined (INTERESTING_SSA_EDGES).
165 Since most values in the program are VARYING, the ideal situation
166 is to move them to that lattice value as quickly as possible.
167 Thus, it doesn't make sense to process any other type of lattice
168 value until all VARYING values are propagated fully, which is one
169 thing using the VARYING worklist achieves. In addition, if we
170 don't use a separate worklist for VARYING edges, we end up with
171 situations where lattice values move from
172 UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
173 static vec<gimple> varying_ssa_edges;
176 /* Return true if the block worklist empty. */
178 static inline bool
179 cfg_blocks_empty_p (void)
181 return (cfg_blocks_num == 0);
185 /* Add a basic block to the worklist. The block must not be already
186 in the worklist, and it must not be the ENTRY or EXIT block. */
188 static void
189 cfg_blocks_add (basic_block bb)
191 bool head = false;
193 gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
194 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
195 gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
197 if (cfg_blocks_empty_p ())
199 cfg_blocks_tail = cfg_blocks_head = 0;
200 cfg_blocks_num = 1;
202 else
204 cfg_blocks_num++;
205 if (cfg_blocks_num > cfg_blocks.length ())
207 /* We have to grow the array now. Adjust to queue to occupy
208 the full space of the original array. We do not need to
209 initialize the newly allocated portion of the array
210 because we keep track of CFG_BLOCKS_HEAD and
211 CFG_BLOCKS_HEAD. */
212 cfg_blocks_tail = cfg_blocks.length ();
213 cfg_blocks_head = 0;
214 cfg_blocks.safe_grow (2 * cfg_blocks_tail);
216 /* Minor optimization: we prefer to see blocks with more
217 predecessors later, because there is more of a chance that
218 the incoming edges will be executable. */
219 else if (EDGE_COUNT (bb->preds)
220 >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
221 cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
222 else
224 if (cfg_blocks_head == 0)
225 cfg_blocks_head = cfg_blocks.length ();
226 --cfg_blocks_head;
227 head = true;
231 cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
232 bitmap_set_bit (bb_in_list, bb->index);
236 /* Remove a block from the worklist. */
238 static basic_block
239 cfg_blocks_get (void)
241 basic_block bb;
243 bb = cfg_blocks[cfg_blocks_head];
245 gcc_assert (!cfg_blocks_empty_p ());
246 gcc_assert (bb);
248 cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
249 --cfg_blocks_num;
250 bitmap_clear_bit (bb_in_list, bb->index);
252 return bb;
256 /* We have just defined a new value for VAR. If IS_VARYING is true,
257 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
258 them to INTERESTING_SSA_EDGES. */
260 static void
261 add_ssa_edge (tree var, bool is_varying)
263 imm_use_iterator iter;
264 use_operand_p use_p;
266 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
268 gimple use_stmt = USE_STMT (use_p);
270 if (prop_simulate_again_p (use_stmt)
271 && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
273 gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
274 if (is_varying)
275 varying_ssa_edges.safe_push (use_stmt);
276 else
277 interesting_ssa_edges.safe_push (use_stmt);
283 /* Add edge E to the control flow worklist. */
285 static void
286 add_control_edge (edge e)
288 basic_block bb = e->dest;
289 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
290 return;
292 /* If the edge had already been executed, skip it. */
293 if (e->flags & EDGE_EXECUTABLE)
294 return;
296 e->flags |= EDGE_EXECUTABLE;
298 /* If the block is already in the list, we're done. */
299 if (bitmap_bit_p (bb_in_list, bb->index))
300 return;
302 cfg_blocks_add (bb);
304 if (dump_file && (dump_flags & TDF_DETAILS))
305 fprintf (dump_file, "\nAdding Destination of edge (%d -> %d) to worklist\n",
306 e->src->index, e->dest->index);
310 /* Simulate the execution of STMT and update the work lists accordingly. */
312 static void
313 simulate_stmt (gimple stmt)
315 enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
316 edge taken_edge = NULL;
317 tree output_name = NULL_TREE;
319 /* Don't bother visiting statements that are already
320 considered varying by the propagator. */
321 if (!prop_simulate_again_p (stmt))
322 return;
324 if (gimple_code (stmt) == GIMPLE_PHI)
326 val = ssa_prop_visit_phi (stmt);
327 output_name = gimple_phi_result (stmt);
329 else
330 val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
332 if (val == SSA_PROP_VARYING)
334 prop_set_simulate_again (stmt, false);
336 /* If the statement produced a new varying value, add the SSA
337 edges coming out of OUTPUT_NAME. */
338 if (output_name)
339 add_ssa_edge (output_name, true);
341 /* If STMT transfers control out of its basic block, add
342 all outgoing edges to the work list. */
343 if (stmt_ends_bb_p (stmt))
345 edge e;
346 edge_iterator ei;
347 basic_block bb = gimple_bb (stmt);
348 FOR_EACH_EDGE (e, ei, bb->succs)
349 add_control_edge (e);
352 else if (val == SSA_PROP_INTERESTING)
354 /* If the statement produced new value, add the SSA edges coming
355 out of OUTPUT_NAME. */
356 if (output_name)
357 add_ssa_edge (output_name, false);
359 /* If we know which edge is going to be taken out of this block,
360 add it to the CFG work list. */
361 if (taken_edge)
362 add_control_edge (taken_edge);
366 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
367 drain. This pops statements off the given WORKLIST and processes
368 them until there are no more statements on WORKLIST.
369 We take a pointer to WORKLIST because it may be reallocated when an
370 SSA edge is added to it in simulate_stmt. */
372 static void
373 process_ssa_edge_worklist (vec<gimple> *worklist)
375 /* Drain the entire worklist. */
376 while (worklist->length () > 0)
378 basic_block bb;
380 /* Pull the statement to simulate off the worklist. */
381 gimple stmt = worklist->pop ();
383 /* If this statement was already visited by simulate_block, then
384 we don't need to visit it again here. */
385 if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
386 continue;
388 /* STMT is no longer in a worklist. */
389 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
391 if (dump_file && (dump_flags & TDF_DETAILS))
393 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
394 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
397 bb = gimple_bb (stmt);
399 /* PHI nodes are always visited, regardless of whether or not
400 the destination block is executable. Otherwise, visit the
401 statement only if its block is marked executable. */
402 if (gimple_code (stmt) == GIMPLE_PHI
403 || bitmap_bit_p (executable_blocks, bb->index))
404 simulate_stmt (stmt);
409 /* Simulate the execution of BLOCK. Evaluate the statement associated
410 with each variable reference inside the block. */
412 static void
413 simulate_block (basic_block block)
415 gimple_stmt_iterator gsi;
417 /* There is nothing to do for the exit block. */
418 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
419 return;
421 if (dump_file && (dump_flags & TDF_DETAILS))
422 fprintf (dump_file, "\nSimulating block %d\n", block->index);
424 /* Always simulate PHI nodes, even if we have simulated this block
425 before. */
426 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
427 simulate_stmt (gsi_stmt (gsi));
429 /* If this is the first time we've simulated this block, then we
430 must simulate each of its statements. */
431 if (!bitmap_bit_p (executable_blocks, block->index))
433 gimple_stmt_iterator j;
434 unsigned int normal_edge_count;
435 edge e, normal_edge;
436 edge_iterator ei;
438 /* Note that we have simulated this block. */
439 bitmap_set_bit (executable_blocks, block->index);
441 for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
443 gimple stmt = gsi_stmt (j);
445 /* If this statement is already in the worklist then
446 "cancel" it. The reevaluation implied by the worklist
447 entry will produce the same value we generate here and
448 thus reevaluating it again from the worklist is
449 pointless. */
450 if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
451 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
453 simulate_stmt (stmt);
456 /* We can not predict when abnormal and EH edges will be executed, so
457 once a block is considered executable, we consider any
458 outgoing abnormal edges as executable.
460 TODO: This is not exactly true. Simplifying statement might
461 prove it non-throwing and also computed goto can be handled
462 when destination is known.
464 At the same time, if this block has only one successor that is
465 reached by non-abnormal edges, then add that successor to the
466 worklist. */
467 normal_edge_count = 0;
468 normal_edge = NULL;
469 FOR_EACH_EDGE (e, ei, block->succs)
471 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
472 add_control_edge (e);
473 else
475 normal_edge_count++;
476 normal_edge = e;
480 if (normal_edge_count == 1)
481 add_control_edge (normal_edge);
486 /* Initialize local data structures and work lists. */
488 static void
489 ssa_prop_init (void)
491 edge e;
492 edge_iterator ei;
493 basic_block bb;
495 /* Worklists of SSA edges. */
496 interesting_ssa_edges.create (20);
497 varying_ssa_edges.create (20);
499 executable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
500 bitmap_clear (executable_blocks);
502 bb_in_list = sbitmap_alloc (last_basic_block_for_fn (cfun));
503 bitmap_clear (bb_in_list);
505 if (dump_file && (dump_flags & TDF_DETAILS))
506 dump_immediate_uses (dump_file);
508 cfg_blocks.create (20);
509 cfg_blocks.safe_grow_cleared (20);
511 /* Initially assume that every edge in the CFG is not executable.
512 (including the edges coming out of the entry block). */
513 FOR_ALL_BB_FN (bb, cfun)
515 gimple_stmt_iterator si;
517 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
518 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
520 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
521 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
523 FOR_EACH_EDGE (e, ei, bb->succs)
524 e->flags &= ~EDGE_EXECUTABLE;
527 /* Seed the algorithm by adding the successors of the entry block to the
528 edge worklist. */
529 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
530 add_control_edge (e);
534 /* Free allocated storage. */
536 static void
537 ssa_prop_fini (void)
539 interesting_ssa_edges.release ();
540 varying_ssa_edges.release ();
541 cfg_blocks.release ();
542 sbitmap_free (bb_in_list);
543 sbitmap_free (executable_blocks);
547 /* Return true if EXPR is an acceptable right-hand-side for a
548 GIMPLE assignment. We validate the entire tree, not just
549 the root node, thus catching expressions that embed complex
550 operands that are not permitted in GIMPLE. This function
551 is needed because the folding routines in fold-const.c
552 may return such expressions in some cases, e.g., an array
553 access with an embedded index addition. It may make more
554 sense to have folding routines that are sensitive to the
555 constraints on GIMPLE operands, rather than abandoning any
556 any attempt to fold if the usual folding turns out to be too
557 aggressive. */
559 bool
560 valid_gimple_rhs_p (tree expr)
562 enum tree_code code = TREE_CODE (expr);
564 switch (TREE_CODE_CLASS (code))
566 case tcc_declaration:
567 if (!is_gimple_variable (expr))
568 return false;
569 break;
571 case tcc_constant:
572 /* All constants are ok. */
573 break;
575 case tcc_comparison:
576 /* GENERIC allows comparisons with non-boolean types, reject
577 those for GIMPLE. Let vector-typed comparisons pass - rules
578 for GENERIC and GIMPLE are the same here. */
579 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
580 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
581 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
582 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
583 return false;
585 /* Fallthru. */
586 case tcc_binary:
587 if (!is_gimple_val (TREE_OPERAND (expr, 0))
588 || !is_gimple_val (TREE_OPERAND (expr, 1)))
589 return false;
590 break;
592 case tcc_unary:
593 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
594 return false;
595 break;
597 case tcc_expression:
598 switch (code)
600 case ADDR_EXPR:
602 tree t;
603 if (is_gimple_min_invariant (expr))
604 return true;
605 t = TREE_OPERAND (expr, 0);
606 while (handled_component_p (t))
608 /* ??? More checks needed, see the GIMPLE verifier. */
609 if ((TREE_CODE (t) == ARRAY_REF
610 || TREE_CODE (t) == ARRAY_RANGE_REF)
611 && !is_gimple_val (TREE_OPERAND (t, 1)))
612 return false;
613 t = TREE_OPERAND (t, 0);
615 if (!is_gimple_id (t))
616 return false;
618 break;
620 default:
621 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
623 if (((code == VEC_COND_EXPR || code == COND_EXPR)
624 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
625 : !is_gimple_val (TREE_OPERAND (expr, 0)))
626 || !is_gimple_val (TREE_OPERAND (expr, 1))
627 || !is_gimple_val (TREE_OPERAND (expr, 2)))
628 return false;
629 break;
631 return false;
633 break;
635 case tcc_vl_exp:
636 return false;
638 case tcc_exceptional:
639 if (code == CONSTRUCTOR)
641 unsigned i;
642 tree elt;
643 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
644 if (!is_gimple_val (elt))
645 return false;
646 return true;
648 if (code != SSA_NAME)
649 return false;
650 break;
652 case tcc_reference:
653 if (code == BIT_FIELD_REF)
654 return is_gimple_val (TREE_OPERAND (expr, 0));
655 return false;
657 default:
658 return false;
661 return true;
665 /* Return true if EXPR is a CALL_EXPR suitable for representation
666 as a single GIMPLE_CALL statement. If the arguments require
667 further gimplification, return false. */
669 static bool
670 valid_gimple_call_p (tree expr)
672 unsigned i, nargs;
674 if (TREE_CODE (expr) != CALL_EXPR)
675 return false;
677 nargs = call_expr_nargs (expr);
678 for (i = 0; i < nargs; i++)
680 tree arg = CALL_EXPR_ARG (expr, i);
681 if (is_gimple_reg_type (TREE_TYPE (arg)))
683 if (!is_gimple_val (arg))
684 return false;
686 else
687 if (!is_gimple_lvalue (arg))
688 return false;
691 return true;
695 /* Make SSA names defined by OLD_STMT point to NEW_STMT
696 as their defining statement. */
698 void
699 move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
701 tree var;
702 ssa_op_iter iter;
704 if (gimple_in_ssa_p (cfun))
706 /* Make defined SSA_NAMEs point to the new
707 statement as their definition. */
708 FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
710 if (TREE_CODE (var) == SSA_NAME)
711 SSA_NAME_DEF_STMT (var) = new_stmt;
716 /* Helper function for update_gimple_call and update_call_from_tree.
717 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
719 static void
720 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
721 gimple stmt)
723 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
724 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
725 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
726 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
727 gimple_set_location (new_stmt, gimple_location (stmt));
728 if (gimple_block (new_stmt) == NULL_TREE)
729 gimple_set_block (new_stmt, gimple_block (stmt));
730 gsi_replace (si_p, new_stmt, false);
733 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
734 with number of arguments NARGS, where the arguments in GIMPLE form
735 follow NARGS argument. */
737 bool
738 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
740 va_list ap;
741 gimple new_stmt, stmt = gsi_stmt (*si_p);
743 gcc_assert (is_gimple_call (stmt));
744 va_start (ap, nargs);
745 new_stmt = gimple_build_call_valist (fn, nargs, ap);
746 finish_update_gimple_call (si_p, new_stmt, stmt);
747 va_end (ap);
748 return true;
751 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
752 value of EXPR, which is expected to be the result of folding the
753 call. This can only be done if EXPR is a CALL_EXPR with valid
754 GIMPLE operands as arguments, or if it is a suitable RHS expression
755 for a GIMPLE_ASSIGN. More complex expressions will require
756 gimplification, which will introduce additional statements. In this
757 event, no update is performed, and the function returns false.
758 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
759 replace the statement at *SI_P with an entirely new statement.
760 The new statement need not be a call, e.g., if the original call
761 folded to a constant. */
763 bool
764 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
766 gimple stmt = gsi_stmt (*si_p);
768 if (valid_gimple_call_p (expr))
770 /* The call has simplified to another call. */
771 tree fn = CALL_EXPR_FN (expr);
772 unsigned i;
773 unsigned nargs = call_expr_nargs (expr);
774 vec<tree> args = vNULL;
775 gimple new_stmt;
777 if (nargs > 0)
779 args.create (nargs);
780 args.safe_grow_cleared (nargs);
782 for (i = 0; i < nargs; i++)
783 args[i] = CALL_EXPR_ARG (expr, i);
786 new_stmt = gimple_build_call_vec (fn, args);
787 finish_update_gimple_call (si_p, new_stmt, stmt);
788 args.release ();
790 return true;
792 else if (valid_gimple_rhs_p (expr))
794 tree lhs = gimple_call_lhs (stmt);
795 gimple new_stmt;
797 /* The call has simplified to an expression
798 that cannot be represented as a GIMPLE_CALL. */
799 if (lhs)
801 /* A value is expected.
802 Introduce a new GIMPLE_ASSIGN statement. */
803 STRIP_USELESS_TYPE_CONVERSION (expr);
804 new_stmt = gimple_build_assign (lhs, expr);
805 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
806 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
807 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
809 else if (!TREE_SIDE_EFFECTS (expr))
811 /* No value is expected, and EXPR has no effect.
812 Replace it with an empty statement. */
813 new_stmt = gimple_build_nop ();
814 if (gimple_in_ssa_p (cfun))
816 unlink_stmt_vdef (stmt);
817 release_defs (stmt);
820 else
822 /* No value is expected, but EXPR has an effect,
823 e.g., it could be a reference to a volatile
824 variable. Create an assignment statement
825 with a dummy (unused) lhs variable. */
826 STRIP_USELESS_TYPE_CONVERSION (expr);
827 if (gimple_in_ssa_p (cfun))
828 lhs = make_ssa_name (TREE_TYPE (expr), NULL);
829 else
830 lhs = create_tmp_var (TREE_TYPE (expr), NULL);
831 new_stmt = gimple_build_assign (lhs, expr);
832 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
833 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
834 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
836 gimple_set_location (new_stmt, gimple_location (stmt));
837 gsi_replace (si_p, new_stmt, false);
838 return true;
840 else
841 /* The call simplified to an expression that is
842 not a valid GIMPLE RHS. */
843 return false;
847 /* Entry point to the propagation engine.
849 VISIT_STMT is called for every statement visited.
850 VISIT_PHI is called for every PHI node visited. */
852 void
853 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
854 ssa_prop_visit_phi_fn visit_phi)
856 ssa_prop_visit_stmt = visit_stmt;
857 ssa_prop_visit_phi = visit_phi;
859 ssa_prop_init ();
861 /* Iterate until the worklists are empty. */
862 while (!cfg_blocks_empty_p ()
863 || interesting_ssa_edges.length () > 0
864 || varying_ssa_edges.length () > 0)
866 if (!cfg_blocks_empty_p ())
868 /* Pull the next block to simulate off the worklist. */
869 basic_block dest_block = cfg_blocks_get ();
870 simulate_block (dest_block);
873 /* In order to move things to varying as quickly as
874 possible,process the VARYING_SSA_EDGES worklist first. */
875 process_ssa_edge_worklist (&varying_ssa_edges);
877 /* Now process the INTERESTING_SSA_EDGES worklist. */
878 process_ssa_edge_worklist (&interesting_ssa_edges);
881 ssa_prop_fini ();
885 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
886 is a non-volatile pointer dereference, a structure reference or a
887 reference to a single _DECL. Ignore volatile memory references
888 because they are not interesting for the optimizers. */
890 bool
891 stmt_makes_single_store (gimple stmt)
893 tree lhs;
895 if (gimple_code (stmt) != GIMPLE_ASSIGN
896 && gimple_code (stmt) != GIMPLE_CALL)
897 return false;
899 if (!gimple_vdef (stmt))
900 return false;
902 lhs = gimple_get_lhs (stmt);
904 /* A call statement may have a null LHS. */
905 if (!lhs)
906 return false;
908 return (!TREE_THIS_VOLATILE (lhs)
909 && (DECL_P (lhs)
910 || REFERENCE_CLASS_P (lhs)));
914 /* Propagation statistics. */
915 struct prop_stats_d
917 long num_const_prop;
918 long num_copy_prop;
919 long num_stmts_folded;
920 long num_dce;
923 static struct prop_stats_d prop_stats;
925 /* Replace USE references in statement STMT with the values stored in
926 PROP_VALUE. Return true if at least one reference was replaced. */
928 static bool
929 replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
931 bool replaced = false;
932 use_operand_p use;
933 ssa_op_iter iter;
935 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
937 tree tuse = USE_FROM_PTR (use);
938 tree val = (*get_value) (tuse);
940 if (val == tuse || val == NULL_TREE)
941 continue;
943 if (gimple_code (stmt) == GIMPLE_ASM
944 && !may_propagate_copy_into_asm (tuse))
945 continue;
947 if (!may_propagate_copy (tuse, val))
948 continue;
950 if (TREE_CODE (val) != SSA_NAME)
951 prop_stats.num_const_prop++;
952 else
953 prop_stats.num_copy_prop++;
955 propagate_value (use, val);
957 replaced = true;
960 return replaced;
964 /* Replace propagated values into all the arguments for PHI using the
965 values from PROP_VALUE. */
967 static void
968 replace_phi_args_in (gimple phi, ssa_prop_get_value_fn get_value)
970 size_t i;
971 bool replaced = false;
973 if (dump_file && (dump_flags & TDF_DETAILS))
975 fprintf (dump_file, "Folding PHI node: ");
976 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
979 for (i = 0; i < gimple_phi_num_args (phi); i++)
981 tree arg = gimple_phi_arg_def (phi, i);
983 if (TREE_CODE (arg) == SSA_NAME)
985 tree val = (*get_value) (arg);
987 if (val && val != arg && may_propagate_copy (arg, val))
989 if (TREE_CODE (val) != SSA_NAME)
990 prop_stats.num_const_prop++;
991 else
992 prop_stats.num_copy_prop++;
994 propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
995 replaced = true;
997 /* If we propagated a copy and this argument flows
998 through an abnormal edge, update the replacement
999 accordingly. */
1000 if (TREE_CODE (val) == SSA_NAME
1001 && gimple_phi_arg_edge (phi, i)->flags & EDGE_ABNORMAL)
1002 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1007 if (dump_file && (dump_flags & TDF_DETAILS))
1009 if (!replaced)
1010 fprintf (dump_file, "No folding possible\n");
1011 else
1013 fprintf (dump_file, "Folded into: ");
1014 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1015 fprintf (dump_file, "\n");
1021 class substitute_and_fold_dom_walker : public dom_walker
1023 public:
1024 substitute_and_fold_dom_walker (cdi_direction direction,
1025 ssa_prop_get_value_fn get_value_fn_,
1026 ssa_prop_fold_stmt_fn fold_fn_,
1027 bool do_dce_)
1028 : dom_walker (direction), get_value_fn (get_value_fn_),
1029 fold_fn (fold_fn_), do_dce (do_dce_), something_changed (false)
1031 stmts_to_remove.create (0);
1033 ~substitute_and_fold_dom_walker () { stmts_to_remove.release (); }
1035 virtual void before_dom_children (basic_block);
1036 virtual void after_dom_children (basic_block) {}
1038 ssa_prop_get_value_fn get_value_fn;
1039 ssa_prop_fold_stmt_fn fold_fn;
1040 bool do_dce;
1041 bool something_changed;
1042 vec<gimple> stmts_to_remove;
1045 void
1046 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
1048 gimple_stmt_iterator i;
1050 /* Propagate known values into PHI nodes. */
1051 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
1053 gimple phi = gsi_stmt (i);
1054 tree res = gimple_phi_result (phi);
1055 if (virtual_operand_p (res))
1056 continue;
1057 if (do_dce
1058 && res && TREE_CODE (res) == SSA_NAME)
1060 tree sprime = get_value_fn (res);
1061 if (sprime
1062 && sprime != res
1063 && may_propagate_copy (res, sprime))
1065 stmts_to_remove.safe_push (phi);
1066 continue;
1069 replace_phi_args_in (phi, get_value_fn);
1072 /* Propagate known values into stmts. In some case it exposes
1073 more trivially deletable stmts to walk backward. */
1074 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1076 bool did_replace;
1077 gimple stmt = gsi_stmt (i);
1078 gimple old_stmt;
1079 enum gimple_code code = gimple_code (stmt);
1081 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1082 range information for names and they are discarded
1083 afterwards. */
1085 if (code == GIMPLE_ASSIGN
1086 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1087 continue;
1089 /* No point propagating into a stmt we have a value for we
1090 can propagate into all uses. Mark it for removal instead. */
1091 tree lhs = gimple_get_lhs (stmt);
1092 if (do_dce
1093 && lhs && TREE_CODE (lhs) == SSA_NAME)
1095 tree sprime = get_value_fn (lhs);
1096 if (sprime
1097 && sprime != lhs
1098 && may_propagate_copy (lhs, sprime)
1099 && !stmt_could_throw_p (stmt)
1100 && !gimple_has_side_effects (stmt))
1102 stmts_to_remove.safe_push (stmt);
1103 continue;
1107 /* Replace the statement with its folded version and mark it
1108 folded. */
1109 did_replace = false;
1110 if (dump_file && (dump_flags & TDF_DETAILS))
1112 fprintf (dump_file, "Folding statement: ");
1113 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1116 old_stmt = stmt;
1118 /* Some statements may be simplified using propagator
1119 specific information. Do this before propagating
1120 into the stmt to not disturb pass specific information. */
1121 if (fold_fn
1122 && (*fold_fn)(&i))
1124 did_replace = true;
1125 prop_stats.num_stmts_folded++;
1126 stmt = gsi_stmt (i);
1127 update_stmt (stmt);
1130 /* Replace real uses in the statement. */
1131 did_replace |= replace_uses_in (stmt, get_value_fn);
1133 /* If we made a replacement, fold the statement. */
1134 if (did_replace)
1135 fold_stmt (&i);
1137 /* Now cleanup. */
1138 if (did_replace)
1140 stmt = gsi_stmt (i);
1142 /* If we cleaned up EH information from the statement,
1143 remove EH edges. */
1144 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1145 gimple_purge_dead_eh_edges (bb);
1147 if (is_gimple_assign (stmt)
1148 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1149 == GIMPLE_SINGLE_RHS))
1151 tree rhs = gimple_assign_rhs1 (stmt);
1153 if (TREE_CODE (rhs) == ADDR_EXPR)
1154 recompute_tree_invariant_for_addr_expr (rhs);
1157 /* Determine what needs to be done to update the SSA form. */
1158 update_stmt (stmt);
1159 if (!is_gimple_debug (stmt))
1160 something_changed = true;
1163 if (dump_file && (dump_flags & TDF_DETAILS))
1165 if (did_replace)
1167 fprintf (dump_file, "Folded into: ");
1168 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1169 fprintf (dump_file, "\n");
1171 else
1172 fprintf (dump_file, "Not folded\n");
1179 /* Perform final substitution and folding of propagated values.
1181 PROP_VALUE[I] contains the single value that should be substituted
1182 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1183 substituted.
1185 If FOLD_FN is non-NULL the function will be invoked on all statements
1186 before propagating values for pass specific simplification.
1188 DO_DCE is true if trivially dead stmts can be removed.
1190 If DO_DCE is true, the statements within a BB are walked from
1191 last to first element. Otherwise we scan from first to last element.
1193 Return TRUE when something changed. */
1195 bool
1196 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1197 ssa_prop_fold_stmt_fn fold_fn,
1198 bool do_dce)
1200 gcc_assert (get_value_fn);
1202 if (dump_file && (dump_flags & TDF_DETAILS))
1203 fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1205 memset (&prop_stats, 0, sizeof (prop_stats));
1207 calculate_dominance_info (CDI_DOMINATORS);
1208 substitute_and_fold_dom_walker walker(CDI_DOMINATORS,
1209 get_value_fn, fold_fn, do_dce);
1210 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1212 /* We cannot remove stmts during the BB walk, especially not release
1213 SSA names there as that destroys the lattice of our callers.
1214 Remove stmts in reverse order to make debug stmt creation possible. */
1215 while (!walker.stmts_to_remove.is_empty ())
1217 gimple stmt = walker.stmts_to_remove.pop ();
1218 if (dump_file && dump_flags & TDF_DETAILS)
1220 fprintf (dump_file, "Removing dead stmt ");
1221 print_gimple_stmt (dump_file, stmt, 0, 0);
1222 fprintf (dump_file, "\n");
1224 prop_stats.num_dce++;
1225 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1226 if (gimple_code (stmt) == GIMPLE_PHI)
1227 remove_phi_node (&gsi, true);
1228 else
1230 unlink_stmt_vdef (stmt);
1231 gsi_remove (&gsi, true);
1232 release_defs (stmt);
1236 statistics_counter_event (cfun, "Constants propagated",
1237 prop_stats.num_const_prop);
1238 statistics_counter_event (cfun, "Copies propagated",
1239 prop_stats.num_copy_prop);
1240 statistics_counter_event (cfun, "Statements folded",
1241 prop_stats.num_stmts_folded);
1242 statistics_counter_event (cfun, "Statements deleted",
1243 prop_stats.num_dce);
1245 return walker.something_changed;
1249 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1251 bool
1252 may_propagate_copy (tree dest, tree orig)
1254 tree type_d = TREE_TYPE (dest);
1255 tree type_o = TREE_TYPE (orig);
1257 /* If ORIG flows in from an abnormal edge, it cannot be propagated. */
1258 if (TREE_CODE (orig) == SSA_NAME
1259 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1260 /* If it is the default definition and an automatic variable then
1261 we can though and it is important that we do to avoid
1262 uninitialized regular copies. */
1263 && !(SSA_NAME_IS_DEFAULT_DEF (orig)
1264 && (SSA_NAME_VAR (orig) == NULL_TREE
1265 || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL)))
1266 return false;
1268 /* If DEST is an SSA_NAME that flows from an abnormal edge, then it
1269 cannot be replaced. */
1270 if (TREE_CODE (dest) == SSA_NAME
1271 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1272 return false;
1274 /* Do not copy between types for which we *do* need a conversion. */
1275 if (!useless_type_conversion_p (type_d, type_o))
1276 return false;
1278 /* Generally propagating virtual operands is not ok as that may
1279 create overlapping life-ranges. */
1280 if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1281 return false;
1283 /* Anything else is OK. */
1284 return true;
1287 /* Like may_propagate_copy, but use as the destination expression
1288 the principal expression (typically, the RHS) contained in
1289 statement DEST. This is more efficient when working with the
1290 gimple tuples representation. */
1292 bool
1293 may_propagate_copy_into_stmt (gimple dest, tree orig)
1295 tree type_d;
1296 tree type_o;
1298 /* If the statement is a switch or a single-rhs assignment,
1299 then the expression to be replaced by the propagation may
1300 be an SSA_NAME. Fortunately, there is an explicit tree
1301 for the expression, so we delegate to may_propagate_copy. */
1303 if (gimple_assign_single_p (dest))
1304 return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1305 else if (gimple_code (dest) == GIMPLE_SWITCH)
1306 return may_propagate_copy (gimple_switch_index (dest), orig);
1308 /* In other cases, the expression is not materialized, so there
1309 is no destination to pass to may_propagate_copy. On the other
1310 hand, the expression cannot be an SSA_NAME, so the analysis
1311 is much simpler. */
1313 if (TREE_CODE (orig) == SSA_NAME
1314 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1315 return false;
1317 if (is_gimple_assign (dest))
1318 type_d = TREE_TYPE (gimple_assign_lhs (dest));
1319 else if (gimple_code (dest) == GIMPLE_COND)
1320 type_d = boolean_type_node;
1321 else if (is_gimple_call (dest)
1322 && gimple_call_lhs (dest) != NULL_TREE)
1323 type_d = TREE_TYPE (gimple_call_lhs (dest));
1324 else
1325 gcc_unreachable ();
1327 type_o = TREE_TYPE (orig);
1329 if (!useless_type_conversion_p (type_d, type_o))
1330 return false;
1332 return true;
1335 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1337 bool
1338 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1340 return true;
1344 /* Common code for propagate_value and replace_exp.
1346 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1347 replacement is done to propagate a value or not. */
1349 static void
1350 replace_exp_1 (use_operand_p op_p, tree val,
1351 bool for_propagation ATTRIBUTE_UNUSED)
1353 #if defined ENABLE_CHECKING
1354 tree op = USE_FROM_PTR (op_p);
1356 gcc_assert (!(for_propagation
1357 && TREE_CODE (op) == SSA_NAME
1358 && TREE_CODE (val) == SSA_NAME
1359 && !may_propagate_copy (op, val)));
1360 #endif
1362 if (TREE_CODE (val) == SSA_NAME)
1363 SET_USE (op_p, val);
1364 else
1365 SET_USE (op_p, unshare_expr (val));
1369 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1370 into the operand pointed to by OP_P.
1372 Use this version for const/copy propagation as it will perform additional
1373 checks to ensure validity of the const/copy propagation. */
1375 void
1376 propagate_value (use_operand_p op_p, tree val)
1378 replace_exp_1 (op_p, val, true);
1381 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1383 Use this version when not const/copy propagating values. For example,
1384 PRE uses this version when building expressions as they would appear
1385 in specific blocks taking into account actions of PHI nodes.
1387 The statement in which an expression has been replaced should be
1388 folded using fold_stmt_inplace. */
1390 void
1391 replace_exp (use_operand_p op_p, tree val)
1393 replace_exp_1 (op_p, val, false);
1397 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1398 into the tree pointed to by OP_P.
1400 Use this version for const/copy propagation when SSA operands are not
1401 available. It will perform the additional checks to ensure validity of
1402 the const/copy propagation, but will not update any operand information.
1403 Be sure to mark the stmt as modified. */
1405 void
1406 propagate_tree_value (tree *op_p, tree val)
1408 if (TREE_CODE (val) == SSA_NAME)
1409 *op_p = val;
1410 else
1411 *op_p = unshare_expr (val);
1415 /* Like propagate_tree_value, but use as the operand to replace
1416 the principal expression (typically, the RHS) contained in the
1417 statement referenced by iterator GSI. Note that it is not
1418 always possible to update the statement in-place, so a new
1419 statement may be created to replace the original. */
1421 void
1422 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1424 gimple stmt = gsi_stmt (*gsi);
1426 if (is_gimple_assign (stmt))
1428 tree expr = NULL_TREE;
1429 if (gimple_assign_single_p (stmt))
1430 expr = gimple_assign_rhs1 (stmt);
1431 propagate_tree_value (&expr, val);
1432 gimple_assign_set_rhs_from_tree (gsi, expr);
1434 else if (gimple_code (stmt) == GIMPLE_COND)
1436 tree lhs = NULL_TREE;
1437 tree rhs = build_zero_cst (TREE_TYPE (val));
1438 propagate_tree_value (&lhs, val);
1439 gimple_cond_set_code (stmt, NE_EXPR);
1440 gimple_cond_set_lhs (stmt, lhs);
1441 gimple_cond_set_rhs (stmt, rhs);
1443 else if (is_gimple_call (stmt)
1444 && gimple_call_lhs (stmt) != NULL_TREE)
1446 tree expr = NULL_TREE;
1447 bool res;
1448 propagate_tree_value (&expr, val);
1449 res = update_call_from_tree (gsi, expr);
1450 gcc_assert (res);
1452 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1453 propagate_tree_value (gimple_switch_index_ptr (stmt), val);
1454 else
1455 gcc_unreachable ();