2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-propagate.c
blob1db0e86b1c6d667e1c6a992af0d5f177278ec64d
1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "flags.h"
31 #include "tm_p.h"
32 #include "predict.h"
33 #include "hard-reg-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "gimple-pretty-print.h"
40 #include "dumpfile.h"
41 #include "bitmap.h"
42 #include "sbitmap.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "gimplify.h"
51 #include "gimple-iterator.h"
52 #include "gimple-ssa.h"
53 #include "tree-cfg.h"
54 #include "tree-phinodes.h"
55 #include "ssa-iterators.h"
56 #include "stringpool.h"
57 #include "tree-ssanames.h"
58 #include "tree-ssa.h"
59 #include "tree-ssa-propagate.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "domwalk.h"
63 #include "cfgloop.h"
64 #include "tree-cfgcleanup.h"
66 /* This file implements a generic value propagation engine based on
67 the same propagation used by the SSA-CCP algorithm [1].
69 Propagation is performed by simulating the execution of every
70 statement that produces the value being propagated. Simulation
71 proceeds as follows:
73 1- Initially, all edges of the CFG are marked not executable and
74 the CFG worklist is seeded with all the statements in the entry
75 basic block (block 0).
77 2- Every statement S is simulated with a call to the call-back
78 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
79 results:
81 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
82 interest and does not affect any of the work lists.
84 SSA_PROP_VARYING: The value produced by S cannot be determined
85 at compile time. Further simulation of S is not required.
86 If S is a conditional jump, all the outgoing edges for the
87 block are considered executable and added to the work
88 list.
90 SSA_PROP_INTERESTING: S produces a value that can be computed
91 at compile time. Its result can be propagated into the
92 statements that feed from S. Furthermore, if S is a
93 conditional jump, only the edge known to be taken is added
94 to the work list. Edges that are known not to execute are
95 never simulated.
97 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
98 return value from SSA_PROP_VISIT_PHI has the same semantics as
99 described in #2.
101 4- Three work lists are kept. Statements are only added to these
102 lists if they produce one of SSA_PROP_INTERESTING or
103 SSA_PROP_VARYING.
105 CFG_BLOCKS contains the list of blocks to be simulated.
106 Blocks are added to this list if their incoming edges are
107 found executable.
109 VARYING_SSA_EDGES contains the list of statements that feed
110 from statements that produce an SSA_PROP_VARYING result.
111 These are simulated first to speed up processing.
113 INTERESTING_SSA_EDGES contains the list of statements that
114 feed from statements that produce an SSA_PROP_INTERESTING
115 result.
117 5- Simulation terminates when all three work lists are drained.
119 Before calling ssa_propagate, it is important to clear
120 prop_simulate_again_p for all the statements in the program that
121 should be simulated. This initialization allows an implementation
122 to specify which statements should never be simulated.
124 It is also important to compute def-use information before calling
125 ssa_propagate.
127 References:
129 [1] Constant propagation with conditional branches,
130 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
132 [2] Building an Optimizing Compiler,
133 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
135 [3] Advanced Compiler Design and Implementation,
136 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
138 /* Function pointers used to parameterize the propagation engine. */
139 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
140 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
142 /* Keep track of statements that have been added to one of the SSA
143 edges worklists. This flag is used to avoid visiting statements
144 unnecessarily when draining an SSA edge worklist. If while
145 simulating a basic block, we find a statement with
146 STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
147 processing from visiting it again.
149 NOTE: users of the propagation engine are not allowed to use
150 the GF_PLF_1 flag. */
151 #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1
153 /* A bitmap to keep track of executable blocks in the CFG. */
154 static sbitmap executable_blocks;
156 /* Array of control flow edges on the worklist. */
157 static vec<basic_block> cfg_blocks;
159 static unsigned int cfg_blocks_num = 0;
160 static int cfg_blocks_tail;
161 static int cfg_blocks_head;
163 static sbitmap bb_in_list;
165 /* Worklist of SSA edges which will need reexamination as their
166 definition has changed. SSA edges are def-use edges in the SSA
167 web. For each D-U edge, we store the target statement or PHI node
168 U. */
169 static vec<gimple> interesting_ssa_edges;
171 /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
172 list of SSA edges is split into two. One contains all SSA edges
173 who need to be reexamined because their lattice value changed to
174 varying (this worklist), and the other contains all other SSA edges
175 to be reexamined (INTERESTING_SSA_EDGES).
177 Since most values in the program are VARYING, the ideal situation
178 is to move them to that lattice value as quickly as possible.
179 Thus, it doesn't make sense to process any other type of lattice
180 value until all VARYING values are propagated fully, which is one
181 thing using the VARYING worklist achieves. In addition, if we
182 don't use a separate worklist for VARYING edges, we end up with
183 situations where lattice values move from
184 UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
185 static vec<gimple> varying_ssa_edges;
188 /* Return true if the block worklist empty. */
190 static inline bool
191 cfg_blocks_empty_p (void)
193 return (cfg_blocks_num == 0);
197 /* Add a basic block to the worklist. The block must not be already
198 in the worklist, and it must not be the ENTRY or EXIT block. */
200 static void
201 cfg_blocks_add (basic_block bb)
203 bool head = false;
205 gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
206 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
207 gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
209 if (cfg_blocks_empty_p ())
211 cfg_blocks_tail = cfg_blocks_head = 0;
212 cfg_blocks_num = 1;
214 else
216 cfg_blocks_num++;
217 if (cfg_blocks_num > cfg_blocks.length ())
219 /* We have to grow the array now. Adjust to queue to occupy
220 the full space of the original array. We do not need to
221 initialize the newly allocated portion of the array
222 because we keep track of CFG_BLOCKS_HEAD and
223 CFG_BLOCKS_HEAD. */
224 cfg_blocks_tail = cfg_blocks.length ();
225 cfg_blocks_head = 0;
226 cfg_blocks.safe_grow (2 * cfg_blocks_tail);
228 /* Minor optimization: we prefer to see blocks with more
229 predecessors later, because there is more of a chance that
230 the incoming edges will be executable. */
231 else if (EDGE_COUNT (bb->preds)
232 >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
233 cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
234 else
236 if (cfg_blocks_head == 0)
237 cfg_blocks_head = cfg_blocks.length ();
238 --cfg_blocks_head;
239 head = true;
243 cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
244 bitmap_set_bit (bb_in_list, bb->index);
248 /* Remove a block from the worklist. */
250 static basic_block
251 cfg_blocks_get (void)
253 basic_block bb;
255 bb = cfg_blocks[cfg_blocks_head];
257 gcc_assert (!cfg_blocks_empty_p ());
258 gcc_assert (bb);
260 cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
261 --cfg_blocks_num;
262 bitmap_clear_bit (bb_in_list, bb->index);
264 return bb;
268 /* We have just defined a new value for VAR. If IS_VARYING is true,
269 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
270 them to INTERESTING_SSA_EDGES. */
272 static void
273 add_ssa_edge (tree var, bool is_varying)
275 imm_use_iterator iter;
276 use_operand_p use_p;
278 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
280 gimple use_stmt = USE_STMT (use_p);
282 if (prop_simulate_again_p (use_stmt)
283 && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
285 gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
286 if (is_varying)
287 varying_ssa_edges.safe_push (use_stmt);
288 else
289 interesting_ssa_edges.safe_push (use_stmt);
295 /* Add edge E to the control flow worklist. */
297 static void
298 add_control_edge (edge e)
300 basic_block bb = e->dest;
301 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
302 return;
304 /* If the edge had already been executed, skip it. */
305 if (e->flags & EDGE_EXECUTABLE)
306 return;
308 e->flags |= EDGE_EXECUTABLE;
310 /* If the block is already in the list, we're done. */
311 if (bitmap_bit_p (bb_in_list, bb->index))
312 return;
314 cfg_blocks_add (bb);
316 if (dump_file && (dump_flags & TDF_DETAILS))
317 fprintf (dump_file, "\nAdding Destination of edge (%d -> %d) to worklist\n",
318 e->src->index, e->dest->index);
322 /* Simulate the execution of STMT and update the work lists accordingly. */
324 static void
325 simulate_stmt (gimple stmt)
327 enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
328 edge taken_edge = NULL;
329 tree output_name = NULL_TREE;
331 /* Don't bother visiting statements that are already
332 considered varying by the propagator. */
333 if (!prop_simulate_again_p (stmt))
334 return;
336 if (gimple_code (stmt) == GIMPLE_PHI)
338 val = ssa_prop_visit_phi (as_a <gphi *> (stmt));
339 output_name = gimple_phi_result (stmt);
341 else
342 val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
344 if (val == SSA_PROP_VARYING)
346 prop_set_simulate_again (stmt, false);
348 /* If the statement produced a new varying value, add the SSA
349 edges coming out of OUTPUT_NAME. */
350 if (output_name)
351 add_ssa_edge (output_name, true);
353 /* If STMT transfers control out of its basic block, add
354 all outgoing edges to the work list. */
355 if (stmt_ends_bb_p (stmt))
357 edge e;
358 edge_iterator ei;
359 basic_block bb = gimple_bb (stmt);
360 FOR_EACH_EDGE (e, ei, bb->succs)
361 add_control_edge (e);
363 return;
365 else if (val == SSA_PROP_INTERESTING)
367 /* If the statement produced new value, add the SSA edges coming
368 out of OUTPUT_NAME. */
369 if (output_name)
370 add_ssa_edge (output_name, false);
372 /* If we know which edge is going to be taken out of this block,
373 add it to the CFG work list. */
374 if (taken_edge)
375 add_control_edge (taken_edge);
378 /* If there are no SSA uses on the stmt whose defs are simulated
379 again then this stmt will be never visited again. */
380 bool has_simulate_again_uses = false;
381 use_operand_p use_p;
382 ssa_op_iter iter;
383 if (gimple_code (stmt) == GIMPLE_PHI)
385 edge_iterator ei;
386 edge e;
387 tree arg;
388 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
389 if (!(e->flags & EDGE_EXECUTABLE)
390 || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
391 && TREE_CODE (arg) == SSA_NAME
392 && !SSA_NAME_IS_DEFAULT_DEF (arg)
393 && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
395 has_simulate_again_uses = true;
396 break;
399 else
400 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
402 gimple def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
403 if (!gimple_nop_p (def_stmt)
404 && prop_simulate_again_p (def_stmt))
406 has_simulate_again_uses = true;
407 break;
410 if (!has_simulate_again_uses)
412 if (dump_file && (dump_flags & TDF_DETAILS))
413 fprintf (dump_file, "marking stmt to be not simulated again\n");
414 prop_set_simulate_again (stmt, false);
418 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
419 drain. This pops statements off the given WORKLIST and processes
420 them until there are no more statements on WORKLIST.
421 We take a pointer to WORKLIST because it may be reallocated when an
422 SSA edge is added to it in simulate_stmt. */
424 static void
425 process_ssa_edge_worklist (vec<gimple> *worklist)
427 /* Drain the entire worklist. */
428 while (worklist->length () > 0)
430 basic_block bb;
432 /* Pull the statement to simulate off the worklist. */
433 gimple stmt = worklist->pop ();
435 /* If this statement was already visited by simulate_block, then
436 we don't need to visit it again here. */
437 if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
438 continue;
440 /* STMT is no longer in a worklist. */
441 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
443 if (dump_file && (dump_flags & TDF_DETAILS))
445 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
446 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
449 bb = gimple_bb (stmt);
451 /* PHI nodes are always visited, regardless of whether or not
452 the destination block is executable. Otherwise, visit the
453 statement only if its block is marked executable. */
454 if (gimple_code (stmt) == GIMPLE_PHI
455 || bitmap_bit_p (executable_blocks, bb->index))
456 simulate_stmt (stmt);
461 /* Simulate the execution of BLOCK. Evaluate the statement associated
462 with each variable reference inside the block. */
464 static void
465 simulate_block (basic_block block)
467 gimple_stmt_iterator gsi;
469 /* There is nothing to do for the exit block. */
470 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
471 return;
473 if (dump_file && (dump_flags & TDF_DETAILS))
474 fprintf (dump_file, "\nSimulating block %d\n", block->index);
476 /* Always simulate PHI nodes, even if we have simulated this block
477 before. */
478 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
479 simulate_stmt (gsi_stmt (gsi));
481 /* If this is the first time we've simulated this block, then we
482 must simulate each of its statements. */
483 if (!bitmap_bit_p (executable_blocks, block->index))
485 gimple_stmt_iterator j;
486 unsigned int normal_edge_count;
487 edge e, normal_edge;
488 edge_iterator ei;
490 /* Note that we have simulated this block. */
491 bitmap_set_bit (executable_blocks, block->index);
493 for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
495 gimple stmt = gsi_stmt (j);
497 /* If this statement is already in the worklist then
498 "cancel" it. The reevaluation implied by the worklist
499 entry will produce the same value we generate here and
500 thus reevaluating it again from the worklist is
501 pointless. */
502 if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
503 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
505 simulate_stmt (stmt);
508 /* We can not predict when abnormal and EH edges will be executed, so
509 once a block is considered executable, we consider any
510 outgoing abnormal edges as executable.
512 TODO: This is not exactly true. Simplifying statement might
513 prove it non-throwing and also computed goto can be handled
514 when destination is known.
516 At the same time, if this block has only one successor that is
517 reached by non-abnormal edges, then add that successor to the
518 worklist. */
519 normal_edge_count = 0;
520 normal_edge = NULL;
521 FOR_EACH_EDGE (e, ei, block->succs)
523 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
524 add_control_edge (e);
525 else
527 normal_edge_count++;
528 normal_edge = e;
532 if (normal_edge_count == 1)
533 add_control_edge (normal_edge);
538 /* Initialize local data structures and work lists. */
540 static void
541 ssa_prop_init (void)
543 edge e;
544 edge_iterator ei;
545 basic_block bb;
547 /* Worklists of SSA edges. */
548 interesting_ssa_edges.create (20);
549 varying_ssa_edges.create (20);
551 executable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
552 bitmap_clear (executable_blocks);
554 bb_in_list = sbitmap_alloc (last_basic_block_for_fn (cfun));
555 bitmap_clear (bb_in_list);
557 if (dump_file && (dump_flags & TDF_DETAILS))
558 dump_immediate_uses (dump_file);
560 cfg_blocks.create (20);
561 cfg_blocks.safe_grow_cleared (20);
563 /* Initially assume that every edge in the CFG is not executable.
564 (including the edges coming out of the entry block). */
565 FOR_ALL_BB_FN (bb, cfun)
567 gimple_stmt_iterator si;
569 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
570 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
572 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
573 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
575 FOR_EACH_EDGE (e, ei, bb->succs)
576 e->flags &= ~EDGE_EXECUTABLE;
579 /* Seed the algorithm by adding the successors of the entry block to the
580 edge worklist. */
581 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
582 add_control_edge (e);
586 /* Free allocated storage. */
588 static void
589 ssa_prop_fini (void)
591 interesting_ssa_edges.release ();
592 varying_ssa_edges.release ();
593 cfg_blocks.release ();
594 sbitmap_free (bb_in_list);
595 sbitmap_free (executable_blocks);
599 /* Return true if EXPR is an acceptable right-hand-side for a
600 GIMPLE assignment. We validate the entire tree, not just
601 the root node, thus catching expressions that embed complex
602 operands that are not permitted in GIMPLE. This function
603 is needed because the folding routines in fold-const.c
604 may return such expressions in some cases, e.g., an array
605 access with an embedded index addition. It may make more
606 sense to have folding routines that are sensitive to the
607 constraints on GIMPLE operands, rather than abandoning any
608 any attempt to fold if the usual folding turns out to be too
609 aggressive. */
611 bool
612 valid_gimple_rhs_p (tree expr)
614 enum tree_code code = TREE_CODE (expr);
616 switch (TREE_CODE_CLASS (code))
618 case tcc_declaration:
619 if (!is_gimple_variable (expr))
620 return false;
621 break;
623 case tcc_constant:
624 /* All constants are ok. */
625 break;
627 case tcc_comparison:
628 /* GENERIC allows comparisons with non-boolean types, reject
629 those for GIMPLE. Let vector-typed comparisons pass - rules
630 for GENERIC and GIMPLE are the same here. */
631 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
632 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
633 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
634 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
635 return false;
637 /* Fallthru. */
638 case tcc_binary:
639 if (!is_gimple_val (TREE_OPERAND (expr, 0))
640 || !is_gimple_val (TREE_OPERAND (expr, 1)))
641 return false;
642 break;
644 case tcc_unary:
645 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
646 return false;
647 break;
649 case tcc_expression:
650 switch (code)
652 case ADDR_EXPR:
654 tree t;
655 if (is_gimple_min_invariant (expr))
656 return true;
657 t = TREE_OPERAND (expr, 0);
658 while (handled_component_p (t))
660 /* ??? More checks needed, see the GIMPLE verifier. */
661 if ((TREE_CODE (t) == ARRAY_REF
662 || TREE_CODE (t) == ARRAY_RANGE_REF)
663 && !is_gimple_val (TREE_OPERAND (t, 1)))
664 return false;
665 t = TREE_OPERAND (t, 0);
667 if (!is_gimple_id (t))
668 return false;
670 break;
672 default:
673 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
675 if (((code == VEC_COND_EXPR || code == COND_EXPR)
676 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
677 : !is_gimple_val (TREE_OPERAND (expr, 0)))
678 || !is_gimple_val (TREE_OPERAND (expr, 1))
679 || !is_gimple_val (TREE_OPERAND (expr, 2)))
680 return false;
681 break;
683 return false;
685 break;
687 case tcc_vl_exp:
688 return false;
690 case tcc_exceptional:
691 if (code == CONSTRUCTOR)
693 unsigned i;
694 tree elt;
695 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
696 if (!is_gimple_val (elt))
697 return false;
698 return true;
700 if (code != SSA_NAME)
701 return false;
702 break;
704 case tcc_reference:
705 if (code == BIT_FIELD_REF)
706 return is_gimple_val (TREE_OPERAND (expr, 0));
707 return false;
709 default:
710 return false;
713 return true;
717 /* Return true if EXPR is a CALL_EXPR suitable for representation
718 as a single GIMPLE_CALL statement. If the arguments require
719 further gimplification, return false. */
721 static bool
722 valid_gimple_call_p (tree expr)
724 unsigned i, nargs;
726 if (TREE_CODE (expr) != CALL_EXPR)
727 return false;
729 nargs = call_expr_nargs (expr);
730 for (i = 0; i < nargs; i++)
732 tree arg = CALL_EXPR_ARG (expr, i);
733 if (is_gimple_reg_type (TREE_TYPE (arg)))
735 if (!is_gimple_val (arg))
736 return false;
738 else
739 if (!is_gimple_lvalue (arg))
740 return false;
743 return true;
747 /* Make SSA names defined by OLD_STMT point to NEW_STMT
748 as their defining statement. */
750 void
751 move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
753 tree var;
754 ssa_op_iter iter;
756 if (gimple_in_ssa_p (cfun))
758 /* Make defined SSA_NAMEs point to the new
759 statement as their definition. */
760 FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
762 if (TREE_CODE (var) == SSA_NAME)
763 SSA_NAME_DEF_STMT (var) = new_stmt;
768 /* Helper function for update_gimple_call and update_call_from_tree.
769 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
771 static void
772 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
773 gimple stmt)
775 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
776 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
777 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
778 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
779 gimple_set_location (new_stmt, gimple_location (stmt));
780 if (gimple_block (new_stmt) == NULL_TREE)
781 gimple_set_block (new_stmt, gimple_block (stmt));
782 gsi_replace (si_p, new_stmt, false);
785 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
786 with number of arguments NARGS, where the arguments in GIMPLE form
787 follow NARGS argument. */
789 bool
790 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
792 va_list ap;
793 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
795 gcc_assert (is_gimple_call (stmt));
796 va_start (ap, nargs);
797 new_stmt = gimple_build_call_valist (fn, nargs, ap);
798 finish_update_gimple_call (si_p, new_stmt, stmt);
799 va_end (ap);
800 return true;
803 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
804 value of EXPR, which is expected to be the result of folding the
805 call. This can only be done if EXPR is a CALL_EXPR with valid
806 GIMPLE operands as arguments, or if it is a suitable RHS expression
807 for a GIMPLE_ASSIGN. More complex expressions will require
808 gimplification, which will introduce additional statements. In this
809 event, no update is performed, and the function returns false.
810 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
811 replace the statement at *SI_P with an entirely new statement.
812 The new statement need not be a call, e.g., if the original call
813 folded to a constant. */
815 bool
816 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
818 gimple stmt = gsi_stmt (*si_p);
820 if (valid_gimple_call_p (expr))
822 /* The call has simplified to another call. */
823 tree fn = CALL_EXPR_FN (expr);
824 unsigned i;
825 unsigned nargs = call_expr_nargs (expr);
826 vec<tree> args = vNULL;
827 gcall *new_stmt;
829 if (nargs > 0)
831 args.create (nargs);
832 args.safe_grow_cleared (nargs);
834 for (i = 0; i < nargs; i++)
835 args[i] = CALL_EXPR_ARG (expr, i);
838 new_stmt = gimple_build_call_vec (fn, args);
839 finish_update_gimple_call (si_p, new_stmt, stmt);
840 args.release ();
842 return true;
844 else if (valid_gimple_rhs_p (expr))
846 tree lhs = gimple_call_lhs (stmt);
847 gimple new_stmt;
849 /* The call has simplified to an expression
850 that cannot be represented as a GIMPLE_CALL. */
851 if (lhs)
853 /* A value is expected.
854 Introduce a new GIMPLE_ASSIGN statement. */
855 STRIP_USELESS_TYPE_CONVERSION (expr);
856 new_stmt = gimple_build_assign (lhs, expr);
857 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
858 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
859 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
861 else if (!TREE_SIDE_EFFECTS (expr))
863 /* No value is expected, and EXPR has no effect.
864 Replace it with an empty statement. */
865 new_stmt = gimple_build_nop ();
866 if (gimple_in_ssa_p (cfun))
868 unlink_stmt_vdef (stmt);
869 release_defs (stmt);
872 else
874 /* No value is expected, but EXPR has an effect,
875 e.g., it could be a reference to a volatile
876 variable. Create an assignment statement
877 with a dummy (unused) lhs variable. */
878 STRIP_USELESS_TYPE_CONVERSION (expr);
879 if (gimple_in_ssa_p (cfun))
880 lhs = make_ssa_name (TREE_TYPE (expr));
881 else
882 lhs = create_tmp_var (TREE_TYPE (expr));
883 new_stmt = gimple_build_assign (lhs, expr);
884 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
885 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
886 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
888 gimple_set_location (new_stmt, gimple_location (stmt));
889 gsi_replace (si_p, new_stmt, false);
890 return true;
892 else
893 /* The call simplified to an expression that is
894 not a valid GIMPLE RHS. */
895 return false;
899 /* Entry point to the propagation engine.
901 VISIT_STMT is called for every statement visited.
902 VISIT_PHI is called for every PHI node visited. */
904 void
905 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
906 ssa_prop_visit_phi_fn visit_phi)
908 ssa_prop_visit_stmt = visit_stmt;
909 ssa_prop_visit_phi = visit_phi;
911 ssa_prop_init ();
913 /* Iterate until the worklists are empty. */
914 while (!cfg_blocks_empty_p ()
915 || interesting_ssa_edges.length () > 0
916 || varying_ssa_edges.length () > 0)
918 if (!cfg_blocks_empty_p ())
920 /* Pull the next block to simulate off the worklist. */
921 basic_block dest_block = cfg_blocks_get ();
922 simulate_block (dest_block);
925 /* In order to move things to varying as quickly as
926 possible,process the VARYING_SSA_EDGES worklist first. */
927 process_ssa_edge_worklist (&varying_ssa_edges);
929 /* Now process the INTERESTING_SSA_EDGES worklist. */
930 process_ssa_edge_worklist (&interesting_ssa_edges);
933 ssa_prop_fini ();
937 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
938 is a non-volatile pointer dereference, a structure reference or a
939 reference to a single _DECL. Ignore volatile memory references
940 because they are not interesting for the optimizers. */
942 bool
943 stmt_makes_single_store (gimple stmt)
945 tree lhs;
947 if (gimple_code (stmt) != GIMPLE_ASSIGN
948 && gimple_code (stmt) != GIMPLE_CALL)
949 return false;
951 if (!gimple_vdef (stmt))
952 return false;
954 lhs = gimple_get_lhs (stmt);
956 /* A call statement may have a null LHS. */
957 if (!lhs)
958 return false;
960 return (!TREE_THIS_VOLATILE (lhs)
961 && (DECL_P (lhs)
962 || REFERENCE_CLASS_P (lhs)));
966 /* Propagation statistics. */
967 struct prop_stats_d
969 long num_const_prop;
970 long num_copy_prop;
971 long num_stmts_folded;
972 long num_dce;
975 static struct prop_stats_d prop_stats;
977 /* Replace USE references in statement STMT with the values stored in
978 PROP_VALUE. Return true if at least one reference was replaced. */
980 static bool
981 replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
983 bool replaced = false;
984 use_operand_p use;
985 ssa_op_iter iter;
987 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
989 tree tuse = USE_FROM_PTR (use);
990 tree val = (*get_value) (tuse);
992 if (val == tuse || val == NULL_TREE)
993 continue;
995 if (gimple_code (stmt) == GIMPLE_ASM
996 && !may_propagate_copy_into_asm (tuse))
997 continue;
999 if (!may_propagate_copy (tuse, val))
1000 continue;
1002 if (TREE_CODE (val) != SSA_NAME)
1003 prop_stats.num_const_prop++;
1004 else
1005 prop_stats.num_copy_prop++;
1007 propagate_value (use, val);
1009 replaced = true;
1012 return replaced;
1016 /* Replace propagated values into all the arguments for PHI using the
1017 values from PROP_VALUE. */
1019 static bool
1020 replace_phi_args_in (gphi *phi, ssa_prop_get_value_fn get_value)
1022 size_t i;
1023 bool replaced = false;
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1027 fprintf (dump_file, "Folding PHI node: ");
1028 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1031 basic_block bb = gimple_bb (phi);
1032 for (i = 0; i < gimple_phi_num_args (phi); i++)
1034 tree arg = gimple_phi_arg_def (phi, i);
1036 if (TREE_CODE (arg) == SSA_NAME)
1038 tree val = (*get_value) (arg);
1040 if (val && val != arg && may_propagate_copy (arg, val))
1042 edge e = gimple_phi_arg_edge (phi, i);
1044 /* Avoid propagating constants into loop latch edge
1045 PHI arguments as this makes coalescing the copy
1046 across this edge impossible. If the argument is
1047 defined by an assert - otherwise the stmt will
1048 get removed without replacing its uses. */
1049 if (TREE_CODE (val) != SSA_NAME
1050 && bb->loop_father->header == bb
1051 && dominated_by_p (CDI_DOMINATORS, e->src, bb)
1052 && is_gimple_assign (SSA_NAME_DEF_STMT (arg))
1053 && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (arg))
1054 == ASSERT_EXPR))
1055 continue;
1057 if (TREE_CODE (val) != SSA_NAME)
1058 prop_stats.num_const_prop++;
1059 else
1060 prop_stats.num_copy_prop++;
1062 propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
1063 replaced = true;
1065 /* If we propagated a copy and this argument flows
1066 through an abnormal edge, update the replacement
1067 accordingly. */
1068 if (TREE_CODE (val) == SSA_NAME
1069 && e->flags & EDGE_ABNORMAL
1070 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1072 /* This can only occur for virtual operands, since
1073 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1074 would prevent replacement. */
1075 gcc_checking_assert (virtual_operand_p (val));
1076 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1082 if (dump_file && (dump_flags & TDF_DETAILS))
1084 if (!replaced)
1085 fprintf (dump_file, "No folding possible\n");
1086 else
1088 fprintf (dump_file, "Folded into: ");
1089 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1090 fprintf (dump_file, "\n");
1094 return replaced;
1098 class substitute_and_fold_dom_walker : public dom_walker
1100 public:
1101 substitute_and_fold_dom_walker (cdi_direction direction,
1102 ssa_prop_get_value_fn get_value_fn_,
1103 ssa_prop_fold_stmt_fn fold_fn_,
1104 bool do_dce_)
1105 : dom_walker (direction), get_value_fn (get_value_fn_),
1106 fold_fn (fold_fn_), do_dce (do_dce_), something_changed (false)
1108 stmts_to_remove.create (0);
1109 stmts_to_fixup.create (0);
1110 need_eh_cleanup = BITMAP_ALLOC (NULL);
1112 ~substitute_and_fold_dom_walker ()
1114 stmts_to_remove.release ();
1115 stmts_to_fixup.release ();
1116 BITMAP_FREE (need_eh_cleanup);
1119 virtual void before_dom_children (basic_block);
1120 virtual void after_dom_children (basic_block) {}
1122 ssa_prop_get_value_fn get_value_fn;
1123 ssa_prop_fold_stmt_fn fold_fn;
1124 bool do_dce;
1125 bool something_changed;
1126 vec<gimple> stmts_to_remove;
1127 vec<gimple> stmts_to_fixup;
1128 bitmap need_eh_cleanup;
1131 void
1132 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
1134 /* Propagate known values into PHI nodes. */
1135 for (gphi_iterator i = gsi_start_phis (bb);
1136 !gsi_end_p (i);
1137 gsi_next (&i))
1139 gphi *phi = i.phi ();
1140 tree res = gimple_phi_result (phi);
1141 if (virtual_operand_p (res))
1142 continue;
1143 if (do_dce
1144 && res && TREE_CODE (res) == SSA_NAME)
1146 tree sprime = get_value_fn (res);
1147 if (sprime
1148 && sprime != res
1149 && may_propagate_copy (res, sprime))
1151 stmts_to_remove.safe_push (phi);
1152 continue;
1155 something_changed |= replace_phi_args_in (phi, get_value_fn);
1158 /* Propagate known values into stmts. In some case it exposes
1159 more trivially deletable stmts to walk backward. */
1160 for (gimple_stmt_iterator i = gsi_start_bb (bb);
1161 !gsi_end_p (i);
1162 gsi_next (&i))
1164 bool did_replace;
1165 gimple stmt = gsi_stmt (i);
1166 enum gimple_code code = gimple_code (stmt);
1168 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1169 range information for names and they are discarded
1170 afterwards. */
1172 if (code == GIMPLE_ASSIGN
1173 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1174 continue;
1176 /* No point propagating into a stmt we have a value for we
1177 can propagate into all uses. Mark it for removal instead. */
1178 tree lhs = gimple_get_lhs (stmt);
1179 if (do_dce
1180 && lhs && TREE_CODE (lhs) == SSA_NAME)
1182 tree sprime = get_value_fn (lhs);
1183 if (sprime
1184 && sprime != lhs
1185 && may_propagate_copy (lhs, sprime)
1186 && !stmt_could_throw_p (stmt)
1187 && !gimple_has_side_effects (stmt))
1189 stmts_to_remove.safe_push (stmt);
1190 continue;
1194 /* Replace the statement with its folded version and mark it
1195 folded. */
1196 did_replace = false;
1197 if (dump_file && (dump_flags & TDF_DETAILS))
1199 fprintf (dump_file, "Folding statement: ");
1200 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1203 gimple old_stmt = stmt;
1204 bool was_noreturn = (is_gimple_call (stmt)
1205 && gimple_call_noreturn_p (stmt));
1207 /* Some statements may be simplified using propagator
1208 specific information. Do this before propagating
1209 into the stmt to not disturb pass specific information. */
1210 if (fold_fn
1211 && (*fold_fn)(&i))
1213 did_replace = true;
1214 prop_stats.num_stmts_folded++;
1215 stmt = gsi_stmt (i);
1216 update_stmt (stmt);
1219 /* Replace real uses in the statement. */
1220 did_replace |= replace_uses_in (stmt, get_value_fn);
1222 /* If we made a replacement, fold the statement. */
1223 if (did_replace)
1224 fold_stmt (&i, follow_single_use_edges);
1226 /* Now cleanup. */
1227 if (did_replace)
1229 stmt = gsi_stmt (i);
1231 /* If we cleaned up EH information from the statement,
1232 remove EH edges. */
1233 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1234 bitmap_set_bit (need_eh_cleanup, bb->index);
1236 /* If we turned a not noreturn call into a noreturn one
1237 schedule it for fixup. */
1238 if (!was_noreturn
1239 && is_gimple_call (stmt)
1240 && gimple_call_noreturn_p (stmt))
1241 stmts_to_fixup.safe_push (stmt);
1243 if (gimple_assign_single_p (stmt))
1245 tree rhs = gimple_assign_rhs1 (stmt);
1247 if (TREE_CODE (rhs) == ADDR_EXPR)
1248 recompute_tree_invariant_for_addr_expr (rhs);
1251 /* Determine what needs to be done to update the SSA form. */
1252 update_stmt (stmt);
1253 if (!is_gimple_debug (stmt))
1254 something_changed = true;
1257 if (dump_file && (dump_flags & TDF_DETAILS))
1259 if (did_replace)
1261 fprintf (dump_file, "Folded into: ");
1262 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1263 fprintf (dump_file, "\n");
1265 else
1266 fprintf (dump_file, "Not folded\n");
1273 /* Perform final substitution and folding of propagated values.
1275 PROP_VALUE[I] contains the single value that should be substituted
1276 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1277 substituted.
1279 If FOLD_FN is non-NULL the function will be invoked on all statements
1280 before propagating values for pass specific simplification.
1282 DO_DCE is true if trivially dead stmts can be removed.
1284 If DO_DCE is true, the statements within a BB are walked from
1285 last to first element. Otherwise we scan from first to last element.
1287 Return TRUE when something changed. */
1289 bool
1290 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1291 ssa_prop_fold_stmt_fn fold_fn,
1292 bool do_dce)
1294 gcc_assert (get_value_fn);
1296 if (dump_file && (dump_flags & TDF_DETAILS))
1297 fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1299 memset (&prop_stats, 0, sizeof (prop_stats));
1301 calculate_dominance_info (CDI_DOMINATORS);
1302 substitute_and_fold_dom_walker walker(CDI_DOMINATORS,
1303 get_value_fn, fold_fn, do_dce);
1304 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1306 /* We cannot remove stmts during the BB walk, especially not release
1307 SSA names there as that destroys the lattice of our callers.
1308 Remove stmts in reverse order to make debug stmt creation possible. */
1309 while (!walker.stmts_to_remove.is_empty ())
1311 gimple stmt = walker.stmts_to_remove.pop ();
1312 if (dump_file && dump_flags & TDF_DETAILS)
1314 fprintf (dump_file, "Removing dead stmt ");
1315 print_gimple_stmt (dump_file, stmt, 0, 0);
1316 fprintf (dump_file, "\n");
1318 prop_stats.num_dce++;
1319 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1320 if (gimple_code (stmt) == GIMPLE_PHI)
1321 remove_phi_node (&gsi, true);
1322 else
1324 unlink_stmt_vdef (stmt);
1325 gsi_remove (&gsi, true);
1326 release_defs (stmt);
1330 if (!bitmap_empty_p (walker.need_eh_cleanup))
1331 gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1333 /* Fixup stmts that became noreturn calls. This may require splitting
1334 blocks and thus isn't possible during the dominator walk. Do this
1335 in reverse order so we don't inadvertedly remove a stmt we want to
1336 fixup by visiting a dominating now noreturn call first. */
1337 while (!walker.stmts_to_fixup.is_empty ())
1339 gimple stmt = walker.stmts_to_fixup.pop ();
1340 if (dump_file && dump_flags & TDF_DETAILS)
1342 fprintf (dump_file, "Fixing up noreturn call ");
1343 print_gimple_stmt (dump_file, stmt, 0, 0);
1344 fprintf (dump_file, "\n");
1346 fixup_noreturn_call (stmt);
1349 statistics_counter_event (cfun, "Constants propagated",
1350 prop_stats.num_const_prop);
1351 statistics_counter_event (cfun, "Copies propagated",
1352 prop_stats.num_copy_prop);
1353 statistics_counter_event (cfun, "Statements folded",
1354 prop_stats.num_stmts_folded);
1355 statistics_counter_event (cfun, "Statements deleted",
1356 prop_stats.num_dce);
1358 return walker.something_changed;
1362 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1364 bool
1365 may_propagate_copy (tree dest, tree orig)
1367 tree type_d = TREE_TYPE (dest);
1368 tree type_o = TREE_TYPE (orig);
1370 /* If ORIG is a default definition which flows in from an abnormal edge
1371 then the copy can be propagated. It is important that we do so to avoid
1372 uninitialized copies. */
1373 if (TREE_CODE (orig) == SSA_NAME
1374 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1375 && SSA_NAME_IS_DEFAULT_DEF (orig)
1376 && (SSA_NAME_VAR (orig) == NULL_TREE
1377 || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
1379 /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1380 be propagated. */
1381 else if (TREE_CODE (orig) == SSA_NAME
1382 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1383 return false;
1384 /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1385 propagated. */
1386 else if (TREE_CODE (dest) == SSA_NAME
1387 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1388 return false;
1390 /* Do not copy between types for which we *do* need a conversion. */
1391 if (!useless_type_conversion_p (type_d, type_o))
1392 return false;
1394 /* Generally propagating virtual operands is not ok as that may
1395 create overlapping life-ranges. */
1396 if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1397 return false;
1399 /* Anything else is OK. */
1400 return true;
1403 /* Like may_propagate_copy, but use as the destination expression
1404 the principal expression (typically, the RHS) contained in
1405 statement DEST. This is more efficient when working with the
1406 gimple tuples representation. */
1408 bool
1409 may_propagate_copy_into_stmt (gimple dest, tree orig)
1411 tree type_d;
1412 tree type_o;
1414 /* If the statement is a switch or a single-rhs assignment,
1415 then the expression to be replaced by the propagation may
1416 be an SSA_NAME. Fortunately, there is an explicit tree
1417 for the expression, so we delegate to may_propagate_copy. */
1419 if (gimple_assign_single_p (dest))
1420 return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1421 else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1422 return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
1424 /* In other cases, the expression is not materialized, so there
1425 is no destination to pass to may_propagate_copy. On the other
1426 hand, the expression cannot be an SSA_NAME, so the analysis
1427 is much simpler. */
1429 if (TREE_CODE (orig) == SSA_NAME
1430 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1431 return false;
1433 if (is_gimple_assign (dest))
1434 type_d = TREE_TYPE (gimple_assign_lhs (dest));
1435 else if (gimple_code (dest) == GIMPLE_COND)
1436 type_d = boolean_type_node;
1437 else if (is_gimple_call (dest)
1438 && gimple_call_lhs (dest) != NULL_TREE)
1439 type_d = TREE_TYPE (gimple_call_lhs (dest));
1440 else
1441 gcc_unreachable ();
1443 type_o = TREE_TYPE (orig);
1445 if (!useless_type_conversion_p (type_d, type_o))
1446 return false;
1448 return true;
1451 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1453 bool
1454 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1456 return true;
1460 /* Common code for propagate_value and replace_exp.
1462 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1463 replacement is done to propagate a value or not. */
1465 static void
1466 replace_exp_1 (use_operand_p op_p, tree val,
1467 bool for_propagation ATTRIBUTE_UNUSED)
1469 #if defined ENABLE_CHECKING
1470 tree op = USE_FROM_PTR (op_p);
1472 gcc_assert (!(for_propagation
1473 && TREE_CODE (op) == SSA_NAME
1474 && TREE_CODE (val) == SSA_NAME
1475 && !may_propagate_copy (op, val)));
1476 #endif
1478 if (TREE_CODE (val) == SSA_NAME)
1479 SET_USE (op_p, val);
1480 else
1481 SET_USE (op_p, unshare_expr (val));
1485 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1486 into the operand pointed to by OP_P.
1488 Use this version for const/copy propagation as it will perform additional
1489 checks to ensure validity of the const/copy propagation. */
1491 void
1492 propagate_value (use_operand_p op_p, tree val)
1494 replace_exp_1 (op_p, val, true);
1497 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1499 Use this version when not const/copy propagating values. For example,
1500 PRE uses this version when building expressions as they would appear
1501 in specific blocks taking into account actions of PHI nodes.
1503 The statement in which an expression has been replaced should be
1504 folded using fold_stmt_inplace. */
1506 void
1507 replace_exp (use_operand_p op_p, tree val)
1509 replace_exp_1 (op_p, val, false);
1513 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1514 into the tree pointed to by OP_P.
1516 Use this version for const/copy propagation when SSA operands are not
1517 available. It will perform the additional checks to ensure validity of
1518 the const/copy propagation, but will not update any operand information.
1519 Be sure to mark the stmt as modified. */
1521 void
1522 propagate_tree_value (tree *op_p, tree val)
1524 if (TREE_CODE (val) == SSA_NAME)
1525 *op_p = val;
1526 else
1527 *op_p = unshare_expr (val);
1531 /* Like propagate_tree_value, but use as the operand to replace
1532 the principal expression (typically, the RHS) contained in the
1533 statement referenced by iterator GSI. Note that it is not
1534 always possible to update the statement in-place, so a new
1535 statement may be created to replace the original. */
1537 void
1538 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1540 gimple stmt = gsi_stmt (*gsi);
1542 if (is_gimple_assign (stmt))
1544 tree expr = NULL_TREE;
1545 if (gimple_assign_single_p (stmt))
1546 expr = gimple_assign_rhs1 (stmt);
1547 propagate_tree_value (&expr, val);
1548 gimple_assign_set_rhs_from_tree (gsi, expr);
1550 else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1552 tree lhs = NULL_TREE;
1553 tree rhs = build_zero_cst (TREE_TYPE (val));
1554 propagate_tree_value (&lhs, val);
1555 gimple_cond_set_code (cond_stmt, NE_EXPR);
1556 gimple_cond_set_lhs (cond_stmt, lhs);
1557 gimple_cond_set_rhs (cond_stmt, rhs);
1559 else if (is_gimple_call (stmt)
1560 && gimple_call_lhs (stmt) != NULL_TREE)
1562 tree expr = NULL_TREE;
1563 bool res;
1564 propagate_tree_value (&expr, val);
1565 res = update_call_from_tree (gsi, expr);
1566 gcc_assert (res);
1568 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1569 propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1570 else
1571 gcc_unreachable ();