Daily bump.
[official-gcc.git] / gcc / tree-ssa-propagate.c
blobc38bb281c6893ce6e41760bfaea93a6d04d3a1c0
1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "flags.h"
30 #include "tm_p.h"
31 #include "predict.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "dominance.h"
35 #include "cfg.h"
36 #include "basic-block.h"
37 #include "gimple-pretty-print.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "sbitmap.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimple-expr.h"
46 #include "gimple.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimple-ssa.h"
50 #include "tree-cfg.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa.h"
56 #include "tree-ssa-propagate.h"
57 #include "langhooks.h"
58 #include "value-prof.h"
59 #include "domwalk.h"
60 #include "cfgloop.h"
61 #include "tree-cfgcleanup.h"
63 /* This file implements a generic value propagation engine based on
64 the same propagation used by the SSA-CCP algorithm [1].
66 Propagation is performed by simulating the execution of every
67 statement that produces the value being propagated. Simulation
68 proceeds as follows:
70 1- Initially, all edges of the CFG are marked not executable and
71 the CFG worklist is seeded with all the statements in the entry
72 basic block (block 0).
74 2- Every statement S is simulated with a call to the call-back
75 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
76 results:
78 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
79 interest and does not affect any of the work lists.
81 SSA_PROP_VARYING: The value produced by S cannot be determined
82 at compile time. Further simulation of S is not required.
83 If S is a conditional jump, all the outgoing edges for the
84 block are considered executable and added to the work
85 list.
87 SSA_PROP_INTERESTING: S produces a value that can be computed
88 at compile time. Its result can be propagated into the
89 statements that feed from S. Furthermore, if S is a
90 conditional jump, only the edge known to be taken is added
91 to the work list. Edges that are known not to execute are
92 never simulated.
94 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
95 return value from SSA_PROP_VISIT_PHI has the same semantics as
96 described in #2.
98 4- Three work lists are kept. Statements are only added to these
99 lists if they produce one of SSA_PROP_INTERESTING or
100 SSA_PROP_VARYING.
102 CFG_BLOCKS contains the list of blocks to be simulated.
103 Blocks are added to this list if their incoming edges are
104 found executable.
106 VARYING_SSA_EDGES contains the list of statements that feed
107 from statements that produce an SSA_PROP_VARYING result.
108 These are simulated first to speed up processing.
110 INTERESTING_SSA_EDGES contains the list of statements that
111 feed from statements that produce an SSA_PROP_INTERESTING
112 result.
114 5- Simulation terminates when all three work lists are drained.
116 Before calling ssa_propagate, it is important to clear
117 prop_simulate_again_p for all the statements in the program that
118 should be simulated. This initialization allows an implementation
119 to specify which statements should never be simulated.
121 It is also important to compute def-use information before calling
122 ssa_propagate.
124 References:
126 [1] Constant propagation with conditional branches,
127 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
129 [2] Building an Optimizing Compiler,
130 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
132 [3] Advanced Compiler Design and Implementation,
133 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
135 /* Function pointers used to parameterize the propagation engine. */
136 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
137 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
139 /* Keep track of statements that have been added to one of the SSA
140 edges worklists. This flag is used to avoid visiting statements
141 unnecessarily when draining an SSA edge worklist. If while
142 simulating a basic block, we find a statement with
143 STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
144 processing from visiting it again.
146 NOTE: users of the propagation engine are not allowed to use
147 the GF_PLF_1 flag. */
148 #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1
150 /* A bitmap to keep track of executable blocks in the CFG. */
151 static sbitmap executable_blocks;
153 /* Array of control flow edges on the worklist. */
154 static vec<basic_block> cfg_blocks;
156 static unsigned int cfg_blocks_num = 0;
157 static int cfg_blocks_tail;
158 static int cfg_blocks_head;
160 static sbitmap bb_in_list;
162 /* Worklist of SSA edges which will need reexamination as their
163 definition has changed. SSA edges are def-use edges in the SSA
164 web. For each D-U edge, we store the target statement or PHI node
165 U. */
166 static vec<gimple> interesting_ssa_edges;
168 /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
169 list of SSA edges is split into two. One contains all SSA edges
170 who need to be reexamined because their lattice value changed to
171 varying (this worklist), and the other contains all other SSA edges
172 to be reexamined (INTERESTING_SSA_EDGES).
174 Since most values in the program are VARYING, the ideal situation
175 is to move them to that lattice value as quickly as possible.
176 Thus, it doesn't make sense to process any other type of lattice
177 value until all VARYING values are propagated fully, which is one
178 thing using the VARYING worklist achieves. In addition, if we
179 don't use a separate worklist for VARYING edges, we end up with
180 situations where lattice values move from
181 UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
182 static vec<gimple> varying_ssa_edges;
185 /* Return true if the block worklist empty. */
187 static inline bool
188 cfg_blocks_empty_p (void)
190 return (cfg_blocks_num == 0);
194 /* Add a basic block to the worklist. The block must not be already
195 in the worklist, and it must not be the ENTRY or EXIT block. */
197 static void
198 cfg_blocks_add (basic_block bb)
200 bool head = false;
202 gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
203 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
204 gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
206 if (cfg_blocks_empty_p ())
208 cfg_blocks_tail = cfg_blocks_head = 0;
209 cfg_blocks_num = 1;
211 else
213 cfg_blocks_num++;
214 if (cfg_blocks_num > cfg_blocks.length ())
216 /* We have to grow the array now. Adjust to queue to occupy
217 the full space of the original array. We do not need to
218 initialize the newly allocated portion of the array
219 because we keep track of CFG_BLOCKS_HEAD and
220 CFG_BLOCKS_HEAD. */
221 cfg_blocks_tail = cfg_blocks.length ();
222 cfg_blocks_head = 0;
223 cfg_blocks.safe_grow (2 * cfg_blocks_tail);
225 /* Minor optimization: we prefer to see blocks with more
226 predecessors later, because there is more of a chance that
227 the incoming edges will be executable. */
228 else if (EDGE_COUNT (bb->preds)
229 >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
230 cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
231 else
233 if (cfg_blocks_head == 0)
234 cfg_blocks_head = cfg_blocks.length ();
235 --cfg_blocks_head;
236 head = true;
240 cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
241 bitmap_set_bit (bb_in_list, bb->index);
245 /* Remove a block from the worklist. */
247 static basic_block
248 cfg_blocks_get (void)
250 basic_block bb;
252 bb = cfg_blocks[cfg_blocks_head];
254 gcc_assert (!cfg_blocks_empty_p ());
255 gcc_assert (bb);
257 cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
258 --cfg_blocks_num;
259 bitmap_clear_bit (bb_in_list, bb->index);
261 return bb;
265 /* We have just defined a new value for VAR. If IS_VARYING is true,
266 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
267 them to INTERESTING_SSA_EDGES. */
269 static void
270 add_ssa_edge (tree var, bool is_varying)
272 imm_use_iterator iter;
273 use_operand_p use_p;
275 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
277 gimple use_stmt = USE_STMT (use_p);
279 if (prop_simulate_again_p (use_stmt)
280 && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
282 gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
283 if (is_varying)
284 varying_ssa_edges.safe_push (use_stmt);
285 else
286 interesting_ssa_edges.safe_push (use_stmt);
292 /* Add edge E to the control flow worklist. */
294 static void
295 add_control_edge (edge e)
297 basic_block bb = e->dest;
298 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
299 return;
301 /* If the edge had already been executed, skip it. */
302 if (e->flags & EDGE_EXECUTABLE)
303 return;
305 e->flags |= EDGE_EXECUTABLE;
307 /* If the block is already in the list, we're done. */
308 if (bitmap_bit_p (bb_in_list, bb->index))
309 return;
311 cfg_blocks_add (bb);
313 if (dump_file && (dump_flags & TDF_DETAILS))
314 fprintf (dump_file, "\nAdding Destination of edge (%d -> %d) to worklist\n",
315 e->src->index, e->dest->index);
319 /* Simulate the execution of STMT and update the work lists accordingly. */
321 static void
322 simulate_stmt (gimple stmt)
324 enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
325 edge taken_edge = NULL;
326 tree output_name = NULL_TREE;
328 /* Don't bother visiting statements that are already
329 considered varying by the propagator. */
330 if (!prop_simulate_again_p (stmt))
331 return;
333 if (gimple_code (stmt) == GIMPLE_PHI)
335 val = ssa_prop_visit_phi (as_a <gphi *> (stmt));
336 output_name = gimple_phi_result (stmt);
338 else
339 val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
341 if (val == SSA_PROP_VARYING)
343 prop_set_simulate_again (stmt, false);
345 /* If the statement produced a new varying value, add the SSA
346 edges coming out of OUTPUT_NAME. */
347 if (output_name)
348 add_ssa_edge (output_name, true);
350 /* If STMT transfers control out of its basic block, add
351 all outgoing edges to the work list. */
352 if (stmt_ends_bb_p (stmt))
354 edge e;
355 edge_iterator ei;
356 basic_block bb = gimple_bb (stmt);
357 FOR_EACH_EDGE (e, ei, bb->succs)
358 add_control_edge (e);
360 return;
362 else if (val == SSA_PROP_INTERESTING)
364 /* If the statement produced new value, add the SSA edges coming
365 out of OUTPUT_NAME. */
366 if (output_name)
367 add_ssa_edge (output_name, false);
369 /* If we know which edge is going to be taken out of this block,
370 add it to the CFG work list. */
371 if (taken_edge)
372 add_control_edge (taken_edge);
375 /* If there are no SSA uses on the stmt whose defs are simulated
376 again then this stmt will be never visited again. */
377 bool has_simulate_again_uses = false;
378 use_operand_p use_p;
379 ssa_op_iter iter;
380 if (gimple_code (stmt) == GIMPLE_PHI)
382 edge_iterator ei;
383 edge e;
384 tree arg;
385 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
386 if (!(e->flags & EDGE_EXECUTABLE)
387 || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
388 && TREE_CODE (arg) == SSA_NAME
389 && !SSA_NAME_IS_DEFAULT_DEF (arg)
390 && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
392 has_simulate_again_uses = true;
393 break;
396 else
397 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
399 gimple def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
400 if (!gimple_nop_p (def_stmt)
401 && prop_simulate_again_p (def_stmt))
403 has_simulate_again_uses = true;
404 break;
407 if (!has_simulate_again_uses)
409 if (dump_file && (dump_flags & TDF_DETAILS))
410 fprintf (dump_file, "marking stmt to be not simulated again\n");
411 prop_set_simulate_again (stmt, false);
415 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
416 drain. This pops statements off the given WORKLIST and processes
417 them until there are no more statements on WORKLIST.
418 We take a pointer to WORKLIST because it may be reallocated when an
419 SSA edge is added to it in simulate_stmt. */
421 static void
422 process_ssa_edge_worklist (vec<gimple> *worklist)
424 /* Drain the entire worklist. */
425 while (worklist->length () > 0)
427 basic_block bb;
429 /* Pull the statement to simulate off the worklist. */
430 gimple stmt = worklist->pop ();
432 /* If this statement was already visited by simulate_block, then
433 we don't need to visit it again here. */
434 if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
435 continue;
437 /* STMT is no longer in a worklist. */
438 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
440 if (dump_file && (dump_flags & TDF_DETAILS))
442 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
443 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
446 bb = gimple_bb (stmt);
448 /* PHI nodes are always visited, regardless of whether or not
449 the destination block is executable. Otherwise, visit the
450 statement only if its block is marked executable. */
451 if (gimple_code (stmt) == GIMPLE_PHI
452 || bitmap_bit_p (executable_blocks, bb->index))
453 simulate_stmt (stmt);
458 /* Simulate the execution of BLOCK. Evaluate the statement associated
459 with each variable reference inside the block. */
461 static void
462 simulate_block (basic_block block)
464 gimple_stmt_iterator gsi;
466 /* There is nothing to do for the exit block. */
467 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
468 return;
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 fprintf (dump_file, "\nSimulating block %d\n", block->index);
473 /* Always simulate PHI nodes, even if we have simulated this block
474 before. */
475 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
476 simulate_stmt (gsi_stmt (gsi));
478 /* If this is the first time we've simulated this block, then we
479 must simulate each of its statements. */
480 if (!bitmap_bit_p (executable_blocks, block->index))
482 gimple_stmt_iterator j;
483 unsigned int normal_edge_count;
484 edge e, normal_edge;
485 edge_iterator ei;
487 /* Note that we have simulated this block. */
488 bitmap_set_bit (executable_blocks, block->index);
490 for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
492 gimple stmt = gsi_stmt (j);
494 /* If this statement is already in the worklist then
495 "cancel" it. The reevaluation implied by the worklist
496 entry will produce the same value we generate here and
497 thus reevaluating it again from the worklist is
498 pointless. */
499 if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
500 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
502 simulate_stmt (stmt);
505 /* We can not predict when abnormal and EH edges will be executed, so
506 once a block is considered executable, we consider any
507 outgoing abnormal edges as executable.
509 TODO: This is not exactly true. Simplifying statement might
510 prove it non-throwing and also computed goto can be handled
511 when destination is known.
513 At the same time, if this block has only one successor that is
514 reached by non-abnormal edges, then add that successor to the
515 worklist. */
516 normal_edge_count = 0;
517 normal_edge = NULL;
518 FOR_EACH_EDGE (e, ei, block->succs)
520 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
521 add_control_edge (e);
522 else
524 normal_edge_count++;
525 normal_edge = e;
529 if (normal_edge_count == 1)
530 add_control_edge (normal_edge);
535 /* Initialize local data structures and work lists. */
537 static void
538 ssa_prop_init (void)
540 edge e;
541 edge_iterator ei;
542 basic_block bb;
544 /* Worklists of SSA edges. */
545 interesting_ssa_edges.create (20);
546 varying_ssa_edges.create (20);
548 executable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
549 bitmap_clear (executable_blocks);
551 bb_in_list = sbitmap_alloc (last_basic_block_for_fn (cfun));
552 bitmap_clear (bb_in_list);
554 if (dump_file && (dump_flags & TDF_DETAILS))
555 dump_immediate_uses (dump_file);
557 cfg_blocks.create (20);
558 cfg_blocks.safe_grow_cleared (20);
560 /* Initially assume that every edge in the CFG is not executable.
561 (including the edges coming out of the entry block). */
562 FOR_ALL_BB_FN (bb, cfun)
564 gimple_stmt_iterator si;
566 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
567 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
569 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
570 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
572 FOR_EACH_EDGE (e, ei, bb->succs)
573 e->flags &= ~EDGE_EXECUTABLE;
576 /* Seed the algorithm by adding the successors of the entry block to the
577 edge worklist. */
578 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
579 add_control_edge (e);
583 /* Free allocated storage. */
585 static void
586 ssa_prop_fini (void)
588 interesting_ssa_edges.release ();
589 varying_ssa_edges.release ();
590 cfg_blocks.release ();
591 sbitmap_free (bb_in_list);
592 sbitmap_free (executable_blocks);
596 /* Return true if EXPR is an acceptable right-hand-side for a
597 GIMPLE assignment. We validate the entire tree, not just
598 the root node, thus catching expressions that embed complex
599 operands that are not permitted in GIMPLE. This function
600 is needed because the folding routines in fold-const.c
601 may return such expressions in some cases, e.g., an array
602 access with an embedded index addition. It may make more
603 sense to have folding routines that are sensitive to the
604 constraints on GIMPLE operands, rather than abandoning any
605 any attempt to fold if the usual folding turns out to be too
606 aggressive. */
608 bool
609 valid_gimple_rhs_p (tree expr)
611 enum tree_code code = TREE_CODE (expr);
613 switch (TREE_CODE_CLASS (code))
615 case tcc_declaration:
616 if (!is_gimple_variable (expr))
617 return false;
618 break;
620 case tcc_constant:
621 /* All constants are ok. */
622 break;
624 case tcc_comparison:
625 /* GENERIC allows comparisons with non-boolean types, reject
626 those for GIMPLE. Let vector-typed comparisons pass - rules
627 for GENERIC and GIMPLE are the same here. */
628 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
629 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
630 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
631 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
632 return false;
634 /* Fallthru. */
635 case tcc_binary:
636 if (!is_gimple_val (TREE_OPERAND (expr, 0))
637 || !is_gimple_val (TREE_OPERAND (expr, 1)))
638 return false;
639 break;
641 case tcc_unary:
642 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
643 return false;
644 break;
646 case tcc_expression:
647 switch (code)
649 case ADDR_EXPR:
651 tree t;
652 if (is_gimple_min_invariant (expr))
653 return true;
654 t = TREE_OPERAND (expr, 0);
655 while (handled_component_p (t))
657 /* ??? More checks needed, see the GIMPLE verifier. */
658 if ((TREE_CODE (t) == ARRAY_REF
659 || TREE_CODE (t) == ARRAY_RANGE_REF)
660 && !is_gimple_val (TREE_OPERAND (t, 1)))
661 return false;
662 t = TREE_OPERAND (t, 0);
664 if (!is_gimple_id (t))
665 return false;
667 break;
669 default:
670 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
672 if (((code == VEC_COND_EXPR || code == COND_EXPR)
673 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
674 : !is_gimple_val (TREE_OPERAND (expr, 0)))
675 || !is_gimple_val (TREE_OPERAND (expr, 1))
676 || !is_gimple_val (TREE_OPERAND (expr, 2)))
677 return false;
678 break;
680 return false;
682 break;
684 case tcc_vl_exp:
685 return false;
687 case tcc_exceptional:
688 if (code == CONSTRUCTOR)
690 unsigned i;
691 tree elt;
692 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
693 if (!is_gimple_val (elt))
694 return false;
695 return true;
697 if (code != SSA_NAME)
698 return false;
699 break;
701 case tcc_reference:
702 if (code == BIT_FIELD_REF)
703 return is_gimple_val (TREE_OPERAND (expr, 0));
704 return false;
706 default:
707 return false;
710 return true;
714 /* Return true if EXPR is a CALL_EXPR suitable for representation
715 as a single GIMPLE_CALL statement. If the arguments require
716 further gimplification, return false. */
718 static bool
719 valid_gimple_call_p (tree expr)
721 unsigned i, nargs;
723 if (TREE_CODE (expr) != CALL_EXPR)
724 return false;
726 nargs = call_expr_nargs (expr);
727 for (i = 0; i < nargs; i++)
729 tree arg = CALL_EXPR_ARG (expr, i);
730 if (is_gimple_reg_type (TREE_TYPE (arg)))
732 if (!is_gimple_val (arg))
733 return false;
735 else
736 if (!is_gimple_lvalue (arg))
737 return false;
740 return true;
744 /* Make SSA names defined by OLD_STMT point to NEW_STMT
745 as their defining statement. */
747 void
748 move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
750 tree var;
751 ssa_op_iter iter;
753 if (gimple_in_ssa_p (cfun))
755 /* Make defined SSA_NAMEs point to the new
756 statement as their definition. */
757 FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
759 if (TREE_CODE (var) == SSA_NAME)
760 SSA_NAME_DEF_STMT (var) = new_stmt;
765 /* Helper function for update_gimple_call and update_call_from_tree.
766 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
768 static void
769 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
770 gimple stmt)
772 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
773 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
774 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
775 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
776 gimple_set_location (new_stmt, gimple_location (stmt));
777 if (gimple_block (new_stmt) == NULL_TREE)
778 gimple_set_block (new_stmt, gimple_block (stmt));
779 gsi_replace (si_p, new_stmt, false);
782 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
783 with number of arguments NARGS, where the arguments in GIMPLE form
784 follow NARGS argument. */
786 bool
787 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
789 va_list ap;
790 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
792 gcc_assert (is_gimple_call (stmt));
793 va_start (ap, nargs);
794 new_stmt = gimple_build_call_valist (fn, nargs, ap);
795 finish_update_gimple_call (si_p, new_stmt, stmt);
796 va_end (ap);
797 return true;
800 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
801 value of EXPR, which is expected to be the result of folding the
802 call. This can only be done if EXPR is a CALL_EXPR with valid
803 GIMPLE operands as arguments, or if it is a suitable RHS expression
804 for a GIMPLE_ASSIGN. More complex expressions will require
805 gimplification, which will introduce additional statements. In this
806 event, no update is performed, and the function returns false.
807 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
808 replace the statement at *SI_P with an entirely new statement.
809 The new statement need not be a call, e.g., if the original call
810 folded to a constant. */
812 bool
813 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
815 gimple stmt = gsi_stmt (*si_p);
817 if (valid_gimple_call_p (expr))
819 /* The call has simplified to another call. */
820 tree fn = CALL_EXPR_FN (expr);
821 unsigned i;
822 unsigned nargs = call_expr_nargs (expr);
823 vec<tree> args = vNULL;
824 gcall *new_stmt;
826 if (nargs > 0)
828 args.create (nargs);
829 args.safe_grow_cleared (nargs);
831 for (i = 0; i < nargs; i++)
832 args[i] = CALL_EXPR_ARG (expr, i);
835 new_stmt = gimple_build_call_vec (fn, args);
836 finish_update_gimple_call (si_p, new_stmt, stmt);
837 args.release ();
839 return true;
841 else if (valid_gimple_rhs_p (expr))
843 tree lhs = gimple_call_lhs (stmt);
844 gimple new_stmt;
846 /* The call has simplified to an expression
847 that cannot be represented as a GIMPLE_CALL. */
848 if (lhs)
850 /* A value is expected.
851 Introduce a new GIMPLE_ASSIGN statement. */
852 STRIP_USELESS_TYPE_CONVERSION (expr);
853 new_stmt = gimple_build_assign (lhs, expr);
854 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
855 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
856 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
858 else if (!TREE_SIDE_EFFECTS (expr))
860 /* No value is expected, and EXPR has no effect.
861 Replace it with an empty statement. */
862 new_stmt = gimple_build_nop ();
863 if (gimple_in_ssa_p (cfun))
865 unlink_stmt_vdef (stmt);
866 release_defs (stmt);
869 else
871 /* No value is expected, but EXPR has an effect,
872 e.g., it could be a reference to a volatile
873 variable. Create an assignment statement
874 with a dummy (unused) lhs variable. */
875 STRIP_USELESS_TYPE_CONVERSION (expr);
876 if (gimple_in_ssa_p (cfun))
877 lhs = make_ssa_name (TREE_TYPE (expr));
878 else
879 lhs = create_tmp_var (TREE_TYPE (expr));
880 new_stmt = gimple_build_assign (lhs, expr);
881 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
882 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
883 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
885 gimple_set_location (new_stmt, gimple_location (stmt));
886 gsi_replace (si_p, new_stmt, false);
887 return true;
889 else
890 /* The call simplified to an expression that is
891 not a valid GIMPLE RHS. */
892 return false;
896 /* Entry point to the propagation engine.
898 VISIT_STMT is called for every statement visited.
899 VISIT_PHI is called for every PHI node visited. */
901 void
902 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
903 ssa_prop_visit_phi_fn visit_phi)
905 ssa_prop_visit_stmt = visit_stmt;
906 ssa_prop_visit_phi = visit_phi;
908 ssa_prop_init ();
910 /* Iterate until the worklists are empty. */
911 while (!cfg_blocks_empty_p ()
912 || interesting_ssa_edges.length () > 0
913 || varying_ssa_edges.length () > 0)
915 if (!cfg_blocks_empty_p ())
917 /* Pull the next block to simulate off the worklist. */
918 basic_block dest_block = cfg_blocks_get ();
919 simulate_block (dest_block);
922 /* In order to move things to varying as quickly as
923 possible,process the VARYING_SSA_EDGES worklist first. */
924 process_ssa_edge_worklist (&varying_ssa_edges);
926 /* Now process the INTERESTING_SSA_EDGES worklist. */
927 process_ssa_edge_worklist (&interesting_ssa_edges);
930 ssa_prop_fini ();
934 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
935 is a non-volatile pointer dereference, a structure reference or a
936 reference to a single _DECL. Ignore volatile memory references
937 because they are not interesting for the optimizers. */
939 bool
940 stmt_makes_single_store (gimple stmt)
942 tree lhs;
944 if (gimple_code (stmt) != GIMPLE_ASSIGN
945 && gimple_code (stmt) != GIMPLE_CALL)
946 return false;
948 if (!gimple_vdef (stmt))
949 return false;
951 lhs = gimple_get_lhs (stmt);
953 /* A call statement may have a null LHS. */
954 if (!lhs)
955 return false;
957 return (!TREE_THIS_VOLATILE (lhs)
958 && (DECL_P (lhs)
959 || REFERENCE_CLASS_P (lhs)));
963 /* Propagation statistics. */
964 struct prop_stats_d
966 long num_const_prop;
967 long num_copy_prop;
968 long num_stmts_folded;
969 long num_dce;
972 static struct prop_stats_d prop_stats;
974 /* Replace USE references in statement STMT with the values stored in
975 PROP_VALUE. Return true if at least one reference was replaced. */
977 static bool
978 replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
980 bool replaced = false;
981 use_operand_p use;
982 ssa_op_iter iter;
984 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
986 tree tuse = USE_FROM_PTR (use);
987 tree val = (*get_value) (tuse);
989 if (val == tuse || val == NULL_TREE)
990 continue;
992 if (gimple_code (stmt) == GIMPLE_ASM
993 && !may_propagate_copy_into_asm (tuse))
994 continue;
996 if (!may_propagate_copy (tuse, val))
997 continue;
999 if (TREE_CODE (val) != SSA_NAME)
1000 prop_stats.num_const_prop++;
1001 else
1002 prop_stats.num_copy_prop++;
1004 propagate_value (use, val);
1006 replaced = true;
1009 return replaced;
1013 /* Replace propagated values into all the arguments for PHI using the
1014 values from PROP_VALUE. */
1016 static bool
1017 replace_phi_args_in (gphi *phi, ssa_prop_get_value_fn get_value)
1019 size_t i;
1020 bool replaced = false;
1022 if (dump_file && (dump_flags & TDF_DETAILS))
1024 fprintf (dump_file, "Folding PHI node: ");
1025 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1028 basic_block bb = gimple_bb (phi);
1029 for (i = 0; i < gimple_phi_num_args (phi); i++)
1031 tree arg = gimple_phi_arg_def (phi, i);
1033 if (TREE_CODE (arg) == SSA_NAME)
1035 tree val = (*get_value) (arg);
1037 if (val && val != arg && may_propagate_copy (arg, val))
1039 edge e = gimple_phi_arg_edge (phi, i);
1041 /* Avoid propagating constants into loop latch edge
1042 PHI arguments as this makes coalescing the copy
1043 across this edge impossible. If the argument is
1044 defined by an assert - otherwise the stmt will
1045 get removed without replacing its uses. */
1046 if (TREE_CODE (val) != SSA_NAME
1047 && bb->loop_father->header == bb
1048 && dominated_by_p (CDI_DOMINATORS, e->src, bb)
1049 && is_gimple_assign (SSA_NAME_DEF_STMT (arg))
1050 && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (arg))
1051 == ASSERT_EXPR))
1052 continue;
1054 if (TREE_CODE (val) != SSA_NAME)
1055 prop_stats.num_const_prop++;
1056 else
1057 prop_stats.num_copy_prop++;
1059 propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
1060 replaced = true;
1062 /* If we propagated a copy and this argument flows
1063 through an abnormal edge, update the replacement
1064 accordingly. */
1065 if (TREE_CODE (val) == SSA_NAME
1066 && e->flags & EDGE_ABNORMAL
1067 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1069 /* This can only occur for virtual operands, since
1070 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1071 would prevent replacement. */
1072 gcc_checking_assert (virtual_operand_p (val));
1073 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1079 if (dump_file && (dump_flags & TDF_DETAILS))
1081 if (!replaced)
1082 fprintf (dump_file, "No folding possible\n");
1083 else
1085 fprintf (dump_file, "Folded into: ");
1086 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1087 fprintf (dump_file, "\n");
1091 return replaced;
1095 class substitute_and_fold_dom_walker : public dom_walker
1097 public:
1098 substitute_and_fold_dom_walker (cdi_direction direction,
1099 ssa_prop_get_value_fn get_value_fn_,
1100 ssa_prop_fold_stmt_fn fold_fn_,
1101 bool do_dce_)
1102 : dom_walker (direction), get_value_fn (get_value_fn_),
1103 fold_fn (fold_fn_), do_dce (do_dce_), something_changed (false)
1105 stmts_to_remove.create (0);
1106 stmts_to_fixup.create (0);
1107 need_eh_cleanup = BITMAP_ALLOC (NULL);
1109 ~substitute_and_fold_dom_walker ()
1111 stmts_to_remove.release ();
1112 stmts_to_fixup.release ();
1113 BITMAP_FREE (need_eh_cleanup);
1116 virtual void before_dom_children (basic_block);
1117 virtual void after_dom_children (basic_block) {}
1119 ssa_prop_get_value_fn get_value_fn;
1120 ssa_prop_fold_stmt_fn fold_fn;
1121 bool do_dce;
1122 bool something_changed;
1123 vec<gimple> stmts_to_remove;
1124 vec<gimple> stmts_to_fixup;
1125 bitmap need_eh_cleanup;
1128 void
1129 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
1131 /* Propagate known values into PHI nodes. */
1132 for (gphi_iterator i = gsi_start_phis (bb);
1133 !gsi_end_p (i);
1134 gsi_next (&i))
1136 gphi *phi = i.phi ();
1137 tree res = gimple_phi_result (phi);
1138 if (virtual_operand_p (res))
1139 continue;
1140 if (do_dce
1141 && res && TREE_CODE (res) == SSA_NAME)
1143 tree sprime = get_value_fn (res);
1144 if (sprime
1145 && sprime != res
1146 && may_propagate_copy (res, sprime))
1148 stmts_to_remove.safe_push (phi);
1149 continue;
1152 something_changed |= replace_phi_args_in (phi, get_value_fn);
1155 /* Propagate known values into stmts. In some case it exposes
1156 more trivially deletable stmts to walk backward. */
1157 for (gimple_stmt_iterator i = gsi_start_bb (bb);
1158 !gsi_end_p (i);
1159 gsi_next (&i))
1161 bool did_replace;
1162 gimple stmt = gsi_stmt (i);
1163 enum gimple_code code = gimple_code (stmt);
1165 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1166 range information for names and they are discarded
1167 afterwards. */
1169 if (code == GIMPLE_ASSIGN
1170 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1171 continue;
1173 /* No point propagating into a stmt we have a value for we
1174 can propagate into all uses. Mark it for removal instead. */
1175 tree lhs = gimple_get_lhs (stmt);
1176 if (do_dce
1177 && lhs && TREE_CODE (lhs) == SSA_NAME)
1179 tree sprime = get_value_fn (lhs);
1180 if (sprime
1181 && sprime != lhs
1182 && may_propagate_copy (lhs, sprime)
1183 && !stmt_could_throw_p (stmt)
1184 && !gimple_has_side_effects (stmt))
1186 stmts_to_remove.safe_push (stmt);
1187 continue;
1191 /* Replace the statement with its folded version and mark it
1192 folded. */
1193 did_replace = false;
1194 if (dump_file && (dump_flags & TDF_DETAILS))
1196 fprintf (dump_file, "Folding statement: ");
1197 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1200 gimple old_stmt = stmt;
1201 bool was_noreturn = (is_gimple_call (stmt)
1202 && gimple_call_noreturn_p (stmt));
1204 /* Some statements may be simplified using propagator
1205 specific information. Do this before propagating
1206 into the stmt to not disturb pass specific information. */
1207 if (fold_fn
1208 && (*fold_fn)(&i))
1210 did_replace = true;
1211 prop_stats.num_stmts_folded++;
1212 stmt = gsi_stmt (i);
1213 update_stmt (stmt);
1216 /* Replace real uses in the statement. */
1217 did_replace |= replace_uses_in (stmt, get_value_fn);
1219 /* If we made a replacement, fold the statement. */
1220 if (did_replace)
1221 fold_stmt (&i, follow_single_use_edges);
1223 /* Now cleanup. */
1224 if (did_replace)
1226 stmt = gsi_stmt (i);
1228 /* If we cleaned up EH information from the statement,
1229 remove EH edges. */
1230 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1231 bitmap_set_bit (need_eh_cleanup, bb->index);
1233 /* If we turned a not noreturn call into a noreturn one
1234 schedule it for fixup. */
1235 if (!was_noreturn
1236 && is_gimple_call (stmt)
1237 && gimple_call_noreturn_p (stmt))
1238 stmts_to_fixup.safe_push (stmt);
1240 if (gimple_assign_single_p (stmt))
1242 tree rhs = gimple_assign_rhs1 (stmt);
1244 if (TREE_CODE (rhs) == ADDR_EXPR)
1245 recompute_tree_invariant_for_addr_expr (rhs);
1248 /* Determine what needs to be done to update the SSA form. */
1249 update_stmt (stmt);
1250 if (!is_gimple_debug (stmt))
1251 something_changed = true;
1254 if (dump_file && (dump_flags & TDF_DETAILS))
1256 if (did_replace)
1258 fprintf (dump_file, "Folded into: ");
1259 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1260 fprintf (dump_file, "\n");
1262 else
1263 fprintf (dump_file, "Not folded\n");
1270 /* Perform final substitution and folding of propagated values.
1272 PROP_VALUE[I] contains the single value that should be substituted
1273 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1274 substituted.
1276 If FOLD_FN is non-NULL the function will be invoked on all statements
1277 before propagating values for pass specific simplification.
1279 DO_DCE is true if trivially dead stmts can be removed.
1281 If DO_DCE is true, the statements within a BB are walked from
1282 last to first element. Otherwise we scan from first to last element.
1284 Return TRUE when something changed. */
1286 bool
1287 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1288 ssa_prop_fold_stmt_fn fold_fn,
1289 bool do_dce)
1291 gcc_assert (get_value_fn);
1293 if (dump_file && (dump_flags & TDF_DETAILS))
1294 fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1296 memset (&prop_stats, 0, sizeof (prop_stats));
1298 calculate_dominance_info (CDI_DOMINATORS);
1299 substitute_and_fold_dom_walker walker(CDI_DOMINATORS,
1300 get_value_fn, fold_fn, do_dce);
1301 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1303 /* We cannot remove stmts during the BB walk, especially not release
1304 SSA names there as that destroys the lattice of our callers.
1305 Remove stmts in reverse order to make debug stmt creation possible. */
1306 while (!walker.stmts_to_remove.is_empty ())
1308 gimple stmt = walker.stmts_to_remove.pop ();
1309 if (dump_file && dump_flags & TDF_DETAILS)
1311 fprintf (dump_file, "Removing dead stmt ");
1312 print_gimple_stmt (dump_file, stmt, 0, 0);
1313 fprintf (dump_file, "\n");
1315 prop_stats.num_dce++;
1316 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1317 if (gimple_code (stmt) == GIMPLE_PHI)
1318 remove_phi_node (&gsi, true);
1319 else
1321 unlink_stmt_vdef (stmt);
1322 gsi_remove (&gsi, true);
1323 release_defs (stmt);
1327 if (!bitmap_empty_p (walker.need_eh_cleanup))
1328 gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1330 /* Fixup stmts that became noreturn calls. This may require splitting
1331 blocks and thus isn't possible during the dominator walk. Do this
1332 in reverse order so we don't inadvertedly remove a stmt we want to
1333 fixup by visiting a dominating now noreturn call first. */
1334 while (!walker.stmts_to_fixup.is_empty ())
1336 gimple stmt = walker.stmts_to_fixup.pop ();
1337 if (dump_file && dump_flags & TDF_DETAILS)
1339 fprintf (dump_file, "Fixing up noreturn call ");
1340 print_gimple_stmt (dump_file, stmt, 0, 0);
1341 fprintf (dump_file, "\n");
1343 fixup_noreturn_call (stmt);
1346 statistics_counter_event (cfun, "Constants propagated",
1347 prop_stats.num_const_prop);
1348 statistics_counter_event (cfun, "Copies propagated",
1349 prop_stats.num_copy_prop);
1350 statistics_counter_event (cfun, "Statements folded",
1351 prop_stats.num_stmts_folded);
1352 statistics_counter_event (cfun, "Statements deleted",
1353 prop_stats.num_dce);
1355 return walker.something_changed;
1359 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1361 bool
1362 may_propagate_copy (tree dest, tree orig)
1364 tree type_d = TREE_TYPE (dest);
1365 tree type_o = TREE_TYPE (orig);
1367 /* If ORIG is a default definition which flows in from an abnormal edge
1368 then the copy can be propagated. It is important that we do so to avoid
1369 uninitialized copies. */
1370 if (TREE_CODE (orig) == SSA_NAME
1371 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1372 && SSA_NAME_IS_DEFAULT_DEF (orig)
1373 && (SSA_NAME_VAR (orig) == NULL_TREE
1374 || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
1376 /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1377 be propagated. */
1378 else if (TREE_CODE (orig) == SSA_NAME
1379 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1380 return false;
1381 /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1382 propagated. */
1383 else if (TREE_CODE (dest) == SSA_NAME
1384 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1385 return false;
1387 /* Do not copy between types for which we *do* need a conversion. */
1388 if (!useless_type_conversion_p (type_d, type_o))
1389 return false;
1391 /* Generally propagating virtual operands is not ok as that may
1392 create overlapping life-ranges. */
1393 if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1394 return false;
1396 /* Anything else is OK. */
1397 return true;
1400 /* Like may_propagate_copy, but use as the destination expression
1401 the principal expression (typically, the RHS) contained in
1402 statement DEST. This is more efficient when working with the
1403 gimple tuples representation. */
1405 bool
1406 may_propagate_copy_into_stmt (gimple dest, tree orig)
1408 tree type_d;
1409 tree type_o;
1411 /* If the statement is a switch or a single-rhs assignment,
1412 then the expression to be replaced by the propagation may
1413 be an SSA_NAME. Fortunately, there is an explicit tree
1414 for the expression, so we delegate to may_propagate_copy. */
1416 if (gimple_assign_single_p (dest))
1417 return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1418 else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1419 return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
1421 /* In other cases, the expression is not materialized, so there
1422 is no destination to pass to may_propagate_copy. On the other
1423 hand, the expression cannot be an SSA_NAME, so the analysis
1424 is much simpler. */
1426 if (TREE_CODE (orig) == SSA_NAME
1427 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1428 return false;
1430 if (is_gimple_assign (dest))
1431 type_d = TREE_TYPE (gimple_assign_lhs (dest));
1432 else if (gimple_code (dest) == GIMPLE_COND)
1433 type_d = boolean_type_node;
1434 else if (is_gimple_call (dest)
1435 && gimple_call_lhs (dest) != NULL_TREE)
1436 type_d = TREE_TYPE (gimple_call_lhs (dest));
1437 else
1438 gcc_unreachable ();
1440 type_o = TREE_TYPE (orig);
1442 if (!useless_type_conversion_p (type_d, type_o))
1443 return false;
1445 return true;
1448 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1450 bool
1451 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1453 return true;
1457 /* Common code for propagate_value and replace_exp.
1459 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1460 replacement is done to propagate a value or not. */
1462 static void
1463 replace_exp_1 (use_operand_p op_p, tree val,
1464 bool for_propagation ATTRIBUTE_UNUSED)
1466 #if defined ENABLE_CHECKING
1467 tree op = USE_FROM_PTR (op_p);
1469 gcc_assert (!(for_propagation
1470 && TREE_CODE (op) == SSA_NAME
1471 && TREE_CODE (val) == SSA_NAME
1472 && !may_propagate_copy (op, val)));
1473 #endif
1475 if (TREE_CODE (val) == SSA_NAME)
1476 SET_USE (op_p, val);
1477 else
1478 SET_USE (op_p, unshare_expr (val));
1482 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1483 into the operand pointed to by OP_P.
1485 Use this version for const/copy propagation as it will perform additional
1486 checks to ensure validity of the const/copy propagation. */
1488 void
1489 propagate_value (use_operand_p op_p, tree val)
1491 replace_exp_1 (op_p, val, true);
1494 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1496 Use this version when not const/copy propagating values. For example,
1497 PRE uses this version when building expressions as they would appear
1498 in specific blocks taking into account actions of PHI nodes.
1500 The statement in which an expression has been replaced should be
1501 folded using fold_stmt_inplace. */
1503 void
1504 replace_exp (use_operand_p op_p, tree val)
1506 replace_exp_1 (op_p, val, false);
1510 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1511 into the tree pointed to by OP_P.
1513 Use this version for const/copy propagation when SSA operands are not
1514 available. It will perform the additional checks to ensure validity of
1515 the const/copy propagation, but will not update any operand information.
1516 Be sure to mark the stmt as modified. */
1518 void
1519 propagate_tree_value (tree *op_p, tree val)
1521 if (TREE_CODE (val) == SSA_NAME)
1522 *op_p = val;
1523 else
1524 *op_p = unshare_expr (val);
1528 /* Like propagate_tree_value, but use as the operand to replace
1529 the principal expression (typically, the RHS) contained in the
1530 statement referenced by iterator GSI. Note that it is not
1531 always possible to update the statement in-place, so a new
1532 statement may be created to replace the original. */
1534 void
1535 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1537 gimple stmt = gsi_stmt (*gsi);
1539 if (is_gimple_assign (stmt))
1541 tree expr = NULL_TREE;
1542 if (gimple_assign_single_p (stmt))
1543 expr = gimple_assign_rhs1 (stmt);
1544 propagate_tree_value (&expr, val);
1545 gimple_assign_set_rhs_from_tree (gsi, expr);
1547 else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1549 tree lhs = NULL_TREE;
1550 tree rhs = build_zero_cst (TREE_TYPE (val));
1551 propagate_tree_value (&lhs, val);
1552 gimple_cond_set_code (cond_stmt, NE_EXPR);
1553 gimple_cond_set_lhs (cond_stmt, lhs);
1554 gimple_cond_set_rhs (cond_stmt, rhs);
1556 else if (is_gimple_call (stmt)
1557 && gimple_call_lhs (stmt) != NULL_TREE)
1559 tree expr = NULL_TREE;
1560 bool res;
1561 propagate_tree_value (&expr, val);
1562 res = update_call_from_tree (gsi, expr);
1563 gcc_assert (res);
1565 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1566 propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1567 else
1568 gcc_unreachable ();