2016-01-21 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / tree-ssa-propagate.c
blob3277e4985e9590c85eab46d7148a0fbeb05eb41b
1 /* Generic SSA value propagation engine.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "dumpfile.h"
30 #include "gimple-fold.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-ssa.h"
36 #include "tree-ssa-propagate.h"
37 #include "domwalk.h"
38 #include "cfgloop.h"
39 #include "tree-cfgcleanup.h"
41 /* This file implements a generic value propagation engine based on
42 the same propagation used by the SSA-CCP algorithm [1].
44 Propagation is performed by simulating the execution of every
45 statement that produces the value being propagated. Simulation
46 proceeds as follows:
48 1- Initially, all edges of the CFG are marked not executable and
49 the CFG worklist is seeded with all the statements in the entry
50 basic block (block 0).
52 2- Every statement S is simulated with a call to the call-back
53 function SSA_PROP_VISIT_STMT. This evaluation may produce 3
54 results:
56 SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
57 interest and does not affect any of the work lists.
59 SSA_PROP_VARYING: The value produced by S cannot be determined
60 at compile time. Further simulation of S is not required.
61 If S is a conditional jump, all the outgoing edges for the
62 block are considered executable and added to the work
63 list.
65 SSA_PROP_INTERESTING: S produces a value that can be computed
66 at compile time. Its result can be propagated into the
67 statements that feed from S. Furthermore, if S is a
68 conditional jump, only the edge known to be taken is added
69 to the work list. Edges that are known not to execute are
70 never simulated.
72 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
73 return value from SSA_PROP_VISIT_PHI has the same semantics as
74 described in #2.
76 4- Three work lists are kept. Statements are only added to these
77 lists if they produce one of SSA_PROP_INTERESTING or
78 SSA_PROP_VARYING.
80 CFG_BLOCKS contains the list of blocks to be simulated.
81 Blocks are added to this list if their incoming edges are
82 found executable.
84 VARYING_SSA_EDGES contains the list of statements that feed
85 from statements that produce an SSA_PROP_VARYING result.
86 These are simulated first to speed up processing.
88 INTERESTING_SSA_EDGES contains the list of statements that
89 feed from statements that produce an SSA_PROP_INTERESTING
90 result.
92 5- Simulation terminates when all three work lists are drained.
94 Before calling ssa_propagate, it is important to clear
95 prop_simulate_again_p for all the statements in the program that
96 should be simulated. This initialization allows an implementation
97 to specify which statements should never be simulated.
99 It is also important to compute def-use information before calling
100 ssa_propagate.
102 References:
104 [1] Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 [2] Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 [3] Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
113 /* Function pointers used to parameterize the propagation engine. */
114 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
115 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
117 /* Keep track of statements that have been added to one of the SSA
118 edges worklists. This flag is used to avoid visiting statements
119 unnecessarily when draining an SSA edge worklist. If while
120 simulating a basic block, we find a statement with
121 STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
122 processing from visiting it again.
124 NOTE: users of the propagation engine are not allowed to use
125 the GF_PLF_1 flag. */
126 #define STMT_IN_SSA_EDGE_WORKLIST GF_PLF_1
128 /* A bitmap to keep track of executable blocks in the CFG. */
129 static sbitmap executable_blocks;
131 /* Array of control flow edges on the worklist. */
132 static vec<basic_block> cfg_blocks;
134 static unsigned int cfg_blocks_num = 0;
135 static int cfg_blocks_tail;
136 static int cfg_blocks_head;
138 static sbitmap bb_in_list;
140 /* Worklist of SSA edges which will need reexamination as their
141 definition has changed. SSA edges are def-use edges in the SSA
142 web. For each D-U edge, we store the target statement or PHI node
143 U. */
144 static vec<gimple *> interesting_ssa_edges;
146 /* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
147 list of SSA edges is split into two. One contains all SSA edges
148 who need to be reexamined because their lattice value changed to
149 varying (this worklist), and the other contains all other SSA edges
150 to be reexamined (INTERESTING_SSA_EDGES).
152 Since most values in the program are VARYING, the ideal situation
153 is to move them to that lattice value as quickly as possible.
154 Thus, it doesn't make sense to process any other type of lattice
155 value until all VARYING values are propagated fully, which is one
156 thing using the VARYING worklist achieves. In addition, if we
157 don't use a separate worklist for VARYING edges, we end up with
158 situations where lattice values move from
159 UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
160 static vec<gimple *> varying_ssa_edges;
163 /* Return true if the block worklist empty. */
165 static inline bool
166 cfg_blocks_empty_p (void)
168 return (cfg_blocks_num == 0);
172 /* Add a basic block to the worklist. The block must not be already
173 in the worklist, and it must not be the ENTRY or EXIT block. */
175 static void
176 cfg_blocks_add (basic_block bb)
178 bool head = false;
180 gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
181 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
182 gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
184 if (cfg_blocks_empty_p ())
186 cfg_blocks_tail = cfg_blocks_head = 0;
187 cfg_blocks_num = 1;
189 else
191 cfg_blocks_num++;
192 if (cfg_blocks_num > cfg_blocks.length ())
194 /* We have to grow the array now. Adjust to queue to occupy
195 the full space of the original array. We do not need to
196 initialize the newly allocated portion of the array
197 because we keep track of CFG_BLOCKS_HEAD and
198 CFG_BLOCKS_HEAD. */
199 cfg_blocks_tail = cfg_blocks.length ();
200 cfg_blocks_head = 0;
201 cfg_blocks.safe_grow (2 * cfg_blocks_tail);
203 /* Minor optimization: we prefer to see blocks with more
204 predecessors later, because there is more of a chance that
205 the incoming edges will be executable. */
206 else if (EDGE_COUNT (bb->preds)
207 >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
208 cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
209 else
211 if (cfg_blocks_head == 0)
212 cfg_blocks_head = cfg_blocks.length ();
213 --cfg_blocks_head;
214 head = true;
218 cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
219 bitmap_set_bit (bb_in_list, bb->index);
223 /* Remove a block from the worklist. */
225 static basic_block
226 cfg_blocks_get (void)
228 basic_block bb;
230 bb = cfg_blocks[cfg_blocks_head];
232 gcc_assert (!cfg_blocks_empty_p ());
233 gcc_assert (bb);
235 cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
236 --cfg_blocks_num;
237 bitmap_clear_bit (bb_in_list, bb->index);
239 return bb;
243 /* We have just defined a new value for VAR. If IS_VARYING is true,
244 add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
245 them to INTERESTING_SSA_EDGES. */
247 static void
248 add_ssa_edge (tree var, bool is_varying)
250 imm_use_iterator iter;
251 use_operand_p use_p;
253 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
255 gimple *use_stmt = USE_STMT (use_p);
257 if (prop_simulate_again_p (use_stmt)
258 && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
260 gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
261 if (is_varying)
263 if (dump_file && (dump_flags & TDF_DETAILS))
265 fprintf (dump_file, "varying_ssa_edges: adding SSA use in ");
266 print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
268 varying_ssa_edges.safe_push (use_stmt);
270 else
272 if (dump_file && (dump_flags & TDF_DETAILS))
274 fprintf (dump_file, "interesting_ssa_edges: adding SSA use in ");
275 print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
277 interesting_ssa_edges.safe_push (use_stmt);
284 /* Add edge E to the control flow worklist. */
286 static void
287 add_control_edge (edge e)
289 basic_block bb = e->dest;
290 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
291 return;
293 /* If the edge had already been executed, skip it. */
294 if (e->flags & EDGE_EXECUTABLE)
295 return;
297 e->flags |= EDGE_EXECUTABLE;
299 /* If the block is already in the list, we're done. */
300 if (bitmap_bit_p (bb_in_list, bb->index))
301 return;
303 cfg_blocks_add (bb);
305 if (dump_file && (dump_flags & TDF_DETAILS))
306 fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
307 e->src->index, e->dest->index);
311 /* Simulate the execution of STMT and update the work lists accordingly. */
313 static void
314 simulate_stmt (gimple *stmt)
316 enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
317 edge taken_edge = NULL;
318 tree output_name = NULL_TREE;
320 /* Don't bother visiting statements that are already
321 considered varying by the propagator. */
322 if (!prop_simulate_again_p (stmt))
323 return;
325 if (gimple_code (stmt) == GIMPLE_PHI)
327 val = ssa_prop_visit_phi (as_a <gphi *> (stmt));
328 output_name = gimple_phi_result (stmt);
330 else
331 val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
333 if (val == SSA_PROP_VARYING)
335 prop_set_simulate_again (stmt, false);
337 /* If the statement produced a new varying value, add the SSA
338 edges coming out of OUTPUT_NAME. */
339 if (output_name)
340 add_ssa_edge (output_name, true);
342 /* If STMT transfers control out of its basic block, add
343 all outgoing edges to the work list. */
344 if (stmt_ends_bb_p (stmt))
346 edge e;
347 edge_iterator ei;
348 basic_block bb = gimple_bb (stmt);
349 FOR_EACH_EDGE (e, ei, bb->succs)
350 add_control_edge (e);
352 return;
354 else if (val == SSA_PROP_INTERESTING)
356 /* If the statement produced new value, add the SSA edges coming
357 out of OUTPUT_NAME. */
358 if (output_name)
359 add_ssa_edge (output_name, false);
361 /* If we know which edge is going to be taken out of this block,
362 add it to the CFG work list. */
363 if (taken_edge)
364 add_control_edge (taken_edge);
367 /* If there are no SSA uses on the stmt whose defs are simulated
368 again then this stmt will be never visited again. */
369 bool has_simulate_again_uses = false;
370 use_operand_p use_p;
371 ssa_op_iter iter;
372 if (gimple_code (stmt) == GIMPLE_PHI)
374 edge_iterator ei;
375 edge e;
376 tree arg;
377 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
378 if (!(e->flags & EDGE_EXECUTABLE)
379 || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
380 && TREE_CODE (arg) == SSA_NAME
381 && !SSA_NAME_IS_DEFAULT_DEF (arg)
382 && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
384 has_simulate_again_uses = true;
385 break;
388 else
389 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
391 gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
392 if (!gimple_nop_p (def_stmt)
393 && prop_simulate_again_p (def_stmt))
395 has_simulate_again_uses = true;
396 break;
399 if (!has_simulate_again_uses)
401 if (dump_file && (dump_flags & TDF_DETAILS))
402 fprintf (dump_file, "marking stmt to be not simulated again\n");
403 prop_set_simulate_again (stmt, false);
407 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
408 drain. This pops statements off the given WORKLIST and processes
409 them until one statement was simulated or there are no more statements
410 on WORKLIST. We take a pointer to WORKLIST because it may be reallocated
411 when an SSA edge is added to it in simulate_stmt. Return true if a stmt
412 was simulated. */
414 static bool
415 process_ssa_edge_worklist (vec<gimple *> *worklist, const char *edge_list_name)
417 /* Process the next entry from the worklist. */
418 while (worklist->length () > 0)
420 basic_block bb;
422 /* Pull the statement to simulate off the worklist. */
423 gimple *stmt = worklist->pop ();
425 /* If this statement was already visited by simulate_block, then
426 we don't need to visit it again here. */
427 if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
428 continue;
430 /* STMT is no longer in a worklist. */
431 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
433 bb = gimple_bb (stmt);
435 /* Visit the statement only if its block is marked executable.
436 If it is not executable then it will be visited when we simulate
437 all statements in the block as soon as an incoming edge gets
438 marked executable. */
439 if (!bitmap_bit_p (executable_blocks, bb->index))
441 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, "\nDropping statement from SSA worklist: ");
444 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
446 continue;
449 if (dump_file && (dump_flags & TDF_DETAILS))
451 fprintf (dump_file, "\nSimulating statement (from %s): ",
452 edge_list_name);
453 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
456 simulate_stmt (stmt);
458 return true;
461 return false;
465 /* Simulate the execution of BLOCK. Evaluate the statement associated
466 with each variable reference inside the block. */
468 static void
469 simulate_block (basic_block block)
471 gimple_stmt_iterator gsi;
473 /* There is nothing to do for the exit block. */
474 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
475 return;
477 if (dump_file && (dump_flags & TDF_DETAILS))
478 fprintf (dump_file, "\nSimulating block %d\n", block->index);
480 /* Always simulate PHI nodes, even if we have simulated this block
481 before. */
482 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
483 simulate_stmt (gsi_stmt (gsi));
485 /* If this is the first time we've simulated this block, then we
486 must simulate each of its statements. */
487 if (!bitmap_bit_p (executable_blocks, block->index))
489 gimple_stmt_iterator j;
490 unsigned int normal_edge_count;
491 edge e, normal_edge;
492 edge_iterator ei;
494 /* Note that we have simulated this block. */
495 bitmap_set_bit (executable_blocks, block->index);
497 for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
499 gimple *stmt = gsi_stmt (j);
501 /* If this statement is already in the worklist then
502 "cancel" it. The reevaluation implied by the worklist
503 entry will produce the same value we generate here and
504 thus reevaluating it again from the worklist is
505 pointless. */
506 if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
507 gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
509 simulate_stmt (stmt);
512 /* We can not predict when abnormal and EH edges will be executed, so
513 once a block is considered executable, we consider any
514 outgoing abnormal edges as executable.
516 TODO: This is not exactly true. Simplifying statement might
517 prove it non-throwing and also computed goto can be handled
518 when destination is known.
520 At the same time, if this block has only one successor that is
521 reached by non-abnormal edges, then add that successor to the
522 worklist. */
523 normal_edge_count = 0;
524 normal_edge = NULL;
525 FOR_EACH_EDGE (e, ei, block->succs)
527 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
528 add_control_edge (e);
529 else
531 normal_edge_count++;
532 normal_edge = e;
536 if (normal_edge_count == 1)
537 add_control_edge (normal_edge);
542 /* Initialize local data structures and work lists. */
544 static void
545 ssa_prop_init (void)
547 edge e;
548 edge_iterator ei;
549 basic_block bb;
551 /* Worklists of SSA edges. */
552 interesting_ssa_edges.create (20);
553 varying_ssa_edges.create (20);
555 executable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
556 bitmap_clear (executable_blocks);
558 bb_in_list = sbitmap_alloc (last_basic_block_for_fn (cfun));
559 bitmap_clear (bb_in_list);
561 if (dump_file && (dump_flags & TDF_DETAILS))
562 dump_immediate_uses (dump_file);
564 cfg_blocks.create (20);
565 cfg_blocks.safe_grow_cleared (20);
567 /* Initially assume that every edge in the CFG is not executable.
568 (including the edges coming out of the entry block). */
569 FOR_ALL_BB_FN (bb, cfun)
571 gimple_stmt_iterator si;
573 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
574 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
576 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
577 gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
579 FOR_EACH_EDGE (e, ei, bb->succs)
580 e->flags &= ~EDGE_EXECUTABLE;
583 /* Seed the algorithm by adding the successors of the entry block to the
584 edge worklist. */
585 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
586 add_control_edge (e);
590 /* Free allocated storage. */
592 static void
593 ssa_prop_fini (void)
595 interesting_ssa_edges.release ();
596 varying_ssa_edges.release ();
597 cfg_blocks.release ();
598 sbitmap_free (bb_in_list);
599 sbitmap_free (executable_blocks);
603 /* Return true if EXPR is an acceptable right-hand-side for a
604 GIMPLE assignment. We validate the entire tree, not just
605 the root node, thus catching expressions that embed complex
606 operands that are not permitted in GIMPLE. This function
607 is needed because the folding routines in fold-const.c
608 may return such expressions in some cases, e.g., an array
609 access with an embedded index addition. It may make more
610 sense to have folding routines that are sensitive to the
611 constraints on GIMPLE operands, rather than abandoning any
612 any attempt to fold if the usual folding turns out to be too
613 aggressive. */
615 bool
616 valid_gimple_rhs_p (tree expr)
618 enum tree_code code = TREE_CODE (expr);
620 switch (TREE_CODE_CLASS (code))
622 case tcc_declaration:
623 if (!is_gimple_variable (expr))
624 return false;
625 break;
627 case tcc_constant:
628 /* All constants are ok. */
629 break;
631 case tcc_comparison:
632 /* GENERIC allows comparisons with non-boolean types, reject
633 those for GIMPLE. Let vector-typed comparisons pass - rules
634 for GENERIC and GIMPLE are the same here. */
635 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
636 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
637 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
638 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
639 return false;
641 /* Fallthru. */
642 case tcc_binary:
643 if (!is_gimple_val (TREE_OPERAND (expr, 0))
644 || !is_gimple_val (TREE_OPERAND (expr, 1)))
645 return false;
646 break;
648 case tcc_unary:
649 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
650 return false;
651 break;
653 case tcc_expression:
654 switch (code)
656 case ADDR_EXPR:
658 tree t;
659 if (is_gimple_min_invariant (expr))
660 return true;
661 t = TREE_OPERAND (expr, 0);
662 while (handled_component_p (t))
664 /* ??? More checks needed, see the GIMPLE verifier. */
665 if ((TREE_CODE (t) == ARRAY_REF
666 || TREE_CODE (t) == ARRAY_RANGE_REF)
667 && !is_gimple_val (TREE_OPERAND (t, 1)))
668 return false;
669 t = TREE_OPERAND (t, 0);
671 if (!is_gimple_id (t))
672 return false;
674 break;
676 default:
677 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
679 if (((code == VEC_COND_EXPR || code == COND_EXPR)
680 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
681 : !is_gimple_val (TREE_OPERAND (expr, 0)))
682 || !is_gimple_val (TREE_OPERAND (expr, 1))
683 || !is_gimple_val (TREE_OPERAND (expr, 2)))
684 return false;
685 break;
687 return false;
689 break;
691 case tcc_vl_exp:
692 return false;
694 case tcc_exceptional:
695 if (code == CONSTRUCTOR)
697 unsigned i;
698 tree elt;
699 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
700 if (!is_gimple_val (elt))
701 return false;
702 return true;
704 if (code != SSA_NAME)
705 return false;
706 break;
708 case tcc_reference:
709 if (code == BIT_FIELD_REF)
710 return is_gimple_val (TREE_OPERAND (expr, 0));
711 return false;
713 default:
714 return false;
717 return true;
721 /* Return true if EXPR is a CALL_EXPR suitable for representation
722 as a single GIMPLE_CALL statement. If the arguments require
723 further gimplification, return false. */
725 static bool
726 valid_gimple_call_p (tree expr)
728 unsigned i, nargs;
730 if (TREE_CODE (expr) != CALL_EXPR)
731 return false;
733 nargs = call_expr_nargs (expr);
734 for (i = 0; i < nargs; i++)
736 tree arg = CALL_EXPR_ARG (expr, i);
737 if (is_gimple_reg_type (TREE_TYPE (arg)))
739 if (!is_gimple_val (arg))
740 return false;
742 else
743 if (!is_gimple_lvalue (arg))
744 return false;
747 return true;
751 /* Make SSA names defined by OLD_STMT point to NEW_STMT
752 as their defining statement. */
754 void
755 move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
757 tree var;
758 ssa_op_iter iter;
760 if (gimple_in_ssa_p (cfun))
762 /* Make defined SSA_NAMEs point to the new
763 statement as their definition. */
764 FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
766 if (TREE_CODE (var) == SSA_NAME)
767 SSA_NAME_DEF_STMT (var) = new_stmt;
772 /* Helper function for update_gimple_call and update_call_from_tree.
773 A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
775 static void
776 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
777 gimple *stmt)
779 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
780 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
781 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
782 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
783 gimple_set_location (new_stmt, gimple_location (stmt));
784 if (gimple_block (new_stmt) == NULL_TREE)
785 gimple_set_block (new_stmt, gimple_block (stmt));
786 gsi_replace (si_p, new_stmt, false);
789 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
790 with number of arguments NARGS, where the arguments in GIMPLE form
791 follow NARGS argument. */
793 bool
794 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
796 va_list ap;
797 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
799 gcc_assert (is_gimple_call (stmt));
800 va_start (ap, nargs);
801 new_stmt = gimple_build_call_valist (fn, nargs, ap);
802 finish_update_gimple_call (si_p, new_stmt, stmt);
803 va_end (ap);
804 return true;
807 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
808 value of EXPR, which is expected to be the result of folding the
809 call. This can only be done if EXPR is a CALL_EXPR with valid
810 GIMPLE operands as arguments, or if it is a suitable RHS expression
811 for a GIMPLE_ASSIGN. More complex expressions will require
812 gimplification, which will introduce additional statements. In this
813 event, no update is performed, and the function returns false.
814 Note that we cannot mutate a GIMPLE_CALL in-place, so we always
815 replace the statement at *SI_P with an entirely new statement.
816 The new statement need not be a call, e.g., if the original call
817 folded to a constant. */
819 bool
820 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
822 gimple *stmt = gsi_stmt (*si_p);
824 if (valid_gimple_call_p (expr))
826 /* The call has simplified to another call. */
827 tree fn = CALL_EXPR_FN (expr);
828 unsigned i;
829 unsigned nargs = call_expr_nargs (expr);
830 vec<tree> args = vNULL;
831 gcall *new_stmt;
833 if (nargs > 0)
835 args.create (nargs);
836 args.safe_grow_cleared (nargs);
838 for (i = 0; i < nargs; i++)
839 args[i] = CALL_EXPR_ARG (expr, i);
842 new_stmt = gimple_build_call_vec (fn, args);
843 finish_update_gimple_call (si_p, new_stmt, stmt);
844 args.release ();
846 return true;
848 else if (valid_gimple_rhs_p (expr))
850 tree lhs = gimple_call_lhs (stmt);
851 gimple *new_stmt;
853 /* The call has simplified to an expression
854 that cannot be represented as a GIMPLE_CALL. */
855 if (lhs)
857 /* A value is expected.
858 Introduce a new GIMPLE_ASSIGN statement. */
859 STRIP_USELESS_TYPE_CONVERSION (expr);
860 new_stmt = gimple_build_assign (lhs, expr);
861 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
862 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
863 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
865 else if (!TREE_SIDE_EFFECTS (expr))
867 /* No value is expected, and EXPR has no effect.
868 Replace it with an empty statement. */
869 new_stmt = gimple_build_nop ();
870 if (gimple_in_ssa_p (cfun))
872 unlink_stmt_vdef (stmt);
873 release_defs (stmt);
876 else
878 /* No value is expected, but EXPR has an effect,
879 e.g., it could be a reference to a volatile
880 variable. Create an assignment statement
881 with a dummy (unused) lhs variable. */
882 STRIP_USELESS_TYPE_CONVERSION (expr);
883 if (gimple_in_ssa_p (cfun))
884 lhs = make_ssa_name (TREE_TYPE (expr));
885 else
886 lhs = create_tmp_var (TREE_TYPE (expr));
887 new_stmt = gimple_build_assign (lhs, expr);
888 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
889 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
890 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
892 gimple_set_location (new_stmt, gimple_location (stmt));
893 gsi_replace (si_p, new_stmt, false);
894 return true;
896 else
897 /* The call simplified to an expression that is
898 not a valid GIMPLE RHS. */
899 return false;
903 /* Entry point to the propagation engine.
905 VISIT_STMT is called for every statement visited.
906 VISIT_PHI is called for every PHI node visited. */
908 void
909 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
910 ssa_prop_visit_phi_fn visit_phi)
912 ssa_prop_visit_stmt = visit_stmt;
913 ssa_prop_visit_phi = visit_phi;
915 ssa_prop_init ();
917 /* Iterate until the worklists are empty. */
918 while (!cfg_blocks_empty_p ()
919 || interesting_ssa_edges.length () > 0
920 || varying_ssa_edges.length () > 0)
922 if (!cfg_blocks_empty_p ())
924 /* Pull the next block to simulate off the worklist. */
925 basic_block dest_block = cfg_blocks_get ();
926 simulate_block (dest_block);
927 continue;
930 /* In order to move things to varying as quickly as
931 possible,process the VARYING_SSA_EDGES worklist first. */
932 if (process_ssa_edge_worklist (&varying_ssa_edges, "varying_ssa_edges"))
933 continue;
935 /* Now process the INTERESTING_SSA_EDGES worklist. */
936 process_ssa_edge_worklist (&interesting_ssa_edges,
937 "interesting_ssa_edges");
940 ssa_prop_fini ();
944 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
945 is a non-volatile pointer dereference, a structure reference or a
946 reference to a single _DECL. Ignore volatile memory references
947 because they are not interesting for the optimizers. */
949 bool
950 stmt_makes_single_store (gimple *stmt)
952 tree lhs;
954 if (gimple_code (stmt) != GIMPLE_ASSIGN
955 && gimple_code (stmt) != GIMPLE_CALL)
956 return false;
958 if (!gimple_vdef (stmt))
959 return false;
961 lhs = gimple_get_lhs (stmt);
963 /* A call statement may have a null LHS. */
964 if (!lhs)
965 return false;
967 return (!TREE_THIS_VOLATILE (lhs)
968 && (DECL_P (lhs)
969 || REFERENCE_CLASS_P (lhs)));
973 /* Propagation statistics. */
974 struct prop_stats_d
976 long num_const_prop;
977 long num_copy_prop;
978 long num_stmts_folded;
979 long num_dce;
982 static struct prop_stats_d prop_stats;
984 /* Replace USE references in statement STMT with the values stored in
985 PROP_VALUE. Return true if at least one reference was replaced. */
987 static bool
988 replace_uses_in (gimple *stmt, ssa_prop_get_value_fn get_value)
990 bool replaced = false;
991 use_operand_p use;
992 ssa_op_iter iter;
994 FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
996 tree tuse = USE_FROM_PTR (use);
997 tree val = (*get_value) (tuse);
999 if (val == tuse || val == NULL_TREE)
1000 continue;
1002 if (gimple_code (stmt) == GIMPLE_ASM
1003 && !may_propagate_copy_into_asm (tuse))
1004 continue;
1006 if (!may_propagate_copy (tuse, val))
1007 continue;
1009 if (TREE_CODE (val) != SSA_NAME)
1010 prop_stats.num_const_prop++;
1011 else
1012 prop_stats.num_copy_prop++;
1014 propagate_value (use, val);
1016 replaced = true;
1019 return replaced;
1023 /* Replace propagated values into all the arguments for PHI using the
1024 values from PROP_VALUE. */
1026 static bool
1027 replace_phi_args_in (gphi *phi, ssa_prop_get_value_fn get_value)
1029 size_t i;
1030 bool replaced = false;
1032 if (dump_file && (dump_flags & TDF_DETAILS))
1034 fprintf (dump_file, "Folding PHI node: ");
1035 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1038 basic_block bb = gimple_bb (phi);
1039 for (i = 0; i < gimple_phi_num_args (phi); i++)
1041 tree arg = gimple_phi_arg_def (phi, i);
1043 if (TREE_CODE (arg) == SSA_NAME)
1045 tree val = (*get_value) (arg);
1047 if (val && val != arg && may_propagate_copy (arg, val))
1049 edge e = gimple_phi_arg_edge (phi, i);
1051 /* Avoid propagating constants into loop latch edge
1052 PHI arguments as this makes coalescing the copy
1053 across this edge impossible. If the argument is
1054 defined by an assert - otherwise the stmt will
1055 get removed without replacing its uses. */
1056 if (TREE_CODE (val) != SSA_NAME
1057 && bb->loop_father->header == bb
1058 && dominated_by_p (CDI_DOMINATORS, e->src, bb)
1059 && is_gimple_assign (SSA_NAME_DEF_STMT (arg))
1060 && (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (arg))
1061 == ASSERT_EXPR))
1062 continue;
1064 if (TREE_CODE (val) != SSA_NAME)
1065 prop_stats.num_const_prop++;
1066 else
1067 prop_stats.num_copy_prop++;
1069 propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
1070 replaced = true;
1072 /* If we propagated a copy and this argument flows
1073 through an abnormal edge, update the replacement
1074 accordingly. */
1075 if (TREE_CODE (val) == SSA_NAME
1076 && e->flags & EDGE_ABNORMAL
1077 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1079 /* This can only occur for virtual operands, since
1080 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1081 would prevent replacement. */
1082 gcc_checking_assert (virtual_operand_p (val));
1083 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1089 if (dump_file && (dump_flags & TDF_DETAILS))
1091 if (!replaced)
1092 fprintf (dump_file, "No folding possible\n");
1093 else
1095 fprintf (dump_file, "Folded into: ");
1096 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1097 fprintf (dump_file, "\n");
1101 return replaced;
1105 class substitute_and_fold_dom_walker : public dom_walker
1107 public:
1108 substitute_and_fold_dom_walker (cdi_direction direction,
1109 ssa_prop_get_value_fn get_value_fn_,
1110 ssa_prop_fold_stmt_fn fold_fn_,
1111 bool do_dce_)
1112 : dom_walker (direction), get_value_fn (get_value_fn_),
1113 fold_fn (fold_fn_), do_dce (do_dce_), something_changed (false)
1115 stmts_to_remove.create (0);
1116 stmts_to_fixup.create (0);
1117 need_eh_cleanup = BITMAP_ALLOC (NULL);
1119 ~substitute_and_fold_dom_walker ()
1121 stmts_to_remove.release ();
1122 stmts_to_fixup.release ();
1123 BITMAP_FREE (need_eh_cleanup);
1126 virtual edge before_dom_children (basic_block);
1127 virtual void after_dom_children (basic_block) {}
1129 ssa_prop_get_value_fn get_value_fn;
1130 ssa_prop_fold_stmt_fn fold_fn;
1131 bool do_dce;
1132 bool something_changed;
1133 vec<gimple *> stmts_to_remove;
1134 vec<gimple *> stmts_to_fixup;
1135 bitmap need_eh_cleanup;
1138 edge
1139 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
1141 /* Propagate known values into PHI nodes. */
1142 for (gphi_iterator i = gsi_start_phis (bb);
1143 !gsi_end_p (i);
1144 gsi_next (&i))
1146 gphi *phi = i.phi ();
1147 tree res = gimple_phi_result (phi);
1148 if (virtual_operand_p (res))
1149 continue;
1150 if (do_dce
1151 && res && TREE_CODE (res) == SSA_NAME)
1153 tree sprime = get_value_fn (res);
1154 if (sprime
1155 && sprime != res
1156 && may_propagate_copy (res, sprime))
1158 stmts_to_remove.safe_push (phi);
1159 continue;
1162 something_changed |= replace_phi_args_in (phi, get_value_fn);
1165 /* Propagate known values into stmts. In some case it exposes
1166 more trivially deletable stmts to walk backward. */
1167 for (gimple_stmt_iterator i = gsi_start_bb (bb);
1168 !gsi_end_p (i);
1169 gsi_next (&i))
1171 bool did_replace;
1172 gimple *stmt = gsi_stmt (i);
1173 enum gimple_code code = gimple_code (stmt);
1175 /* Ignore ASSERT_EXPRs. They are used by VRP to generate
1176 range information for names and they are discarded
1177 afterwards. */
1179 if (code == GIMPLE_ASSIGN
1180 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
1181 continue;
1183 /* No point propagating into a stmt we have a value for we
1184 can propagate into all uses. Mark it for removal instead. */
1185 tree lhs = gimple_get_lhs (stmt);
1186 if (do_dce
1187 && lhs && TREE_CODE (lhs) == SSA_NAME)
1189 tree sprime = get_value_fn (lhs);
1190 if (sprime
1191 && sprime != lhs
1192 && may_propagate_copy (lhs, sprime)
1193 && !stmt_could_throw_p (stmt)
1194 && !gimple_has_side_effects (stmt))
1196 stmts_to_remove.safe_push (stmt);
1197 continue;
1201 /* Replace the statement with its folded version and mark it
1202 folded. */
1203 did_replace = false;
1204 if (dump_file && (dump_flags & TDF_DETAILS))
1206 fprintf (dump_file, "Folding statement: ");
1207 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1210 gimple *old_stmt = stmt;
1211 bool was_noreturn = (is_gimple_call (stmt)
1212 && gimple_call_noreturn_p (stmt));
1214 /* Some statements may be simplified using propagator
1215 specific information. Do this before propagating
1216 into the stmt to not disturb pass specific information. */
1217 if (fold_fn
1218 && (*fold_fn)(&i))
1220 did_replace = true;
1221 prop_stats.num_stmts_folded++;
1222 stmt = gsi_stmt (i);
1223 update_stmt (stmt);
1226 /* Replace real uses in the statement. */
1227 did_replace |= replace_uses_in (stmt, get_value_fn);
1229 /* If we made a replacement, fold the statement. */
1230 if (did_replace)
1232 fold_stmt (&i, follow_single_use_edges);
1233 stmt = gsi_stmt (i);
1236 /* If this is a control statement the propagator left edges
1237 unexecuted on force the condition in a way consistent with
1238 that. See PR66945 for cases where the propagator can end
1239 up with a different idea of a taken edge than folding
1240 (once undefined behavior is involved). */
1241 if (gimple_code (stmt) == GIMPLE_COND)
1243 if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
1244 ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
1246 if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
1247 == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
1248 gimple_cond_make_true (as_a <gcond *> (stmt));
1249 else
1250 gimple_cond_make_false (as_a <gcond *> (stmt));
1251 did_replace = true;
1255 /* Now cleanup. */
1256 if (did_replace)
1258 /* If we cleaned up EH information from the statement,
1259 remove EH edges. */
1260 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1261 bitmap_set_bit (need_eh_cleanup, bb->index);
1263 /* If we turned a not noreturn call into a noreturn one
1264 schedule it for fixup. */
1265 if (!was_noreturn
1266 && is_gimple_call (stmt)
1267 && gimple_call_noreturn_p (stmt))
1268 stmts_to_fixup.safe_push (stmt);
1270 if (gimple_assign_single_p (stmt))
1272 tree rhs = gimple_assign_rhs1 (stmt);
1274 if (TREE_CODE (rhs) == ADDR_EXPR)
1275 recompute_tree_invariant_for_addr_expr (rhs);
1278 /* Determine what needs to be done to update the SSA form. */
1279 update_stmt (stmt);
1280 if (!is_gimple_debug (stmt))
1281 something_changed = true;
1284 if (dump_file && (dump_flags & TDF_DETAILS))
1286 if (did_replace)
1288 fprintf (dump_file, "Folded into: ");
1289 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1290 fprintf (dump_file, "\n");
1292 else
1293 fprintf (dump_file, "Not folded\n");
1296 return NULL;
1301 /* Perform final substitution and folding of propagated values.
1303 PROP_VALUE[I] contains the single value that should be substituted
1304 at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
1305 substituted.
1307 If FOLD_FN is non-NULL the function will be invoked on all statements
1308 before propagating values for pass specific simplification.
1310 DO_DCE is true if trivially dead stmts can be removed.
1312 If DO_DCE is true, the statements within a BB are walked from
1313 last to first element. Otherwise we scan from first to last element.
1315 Return TRUE when something changed. */
1317 bool
1318 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1319 ssa_prop_fold_stmt_fn fold_fn,
1320 bool do_dce)
1322 gcc_assert (get_value_fn);
1324 if (dump_file && (dump_flags & TDF_DETAILS))
1325 fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1327 memset (&prop_stats, 0, sizeof (prop_stats));
1329 calculate_dominance_info (CDI_DOMINATORS);
1330 substitute_and_fold_dom_walker walker(CDI_DOMINATORS,
1331 get_value_fn, fold_fn, do_dce);
1332 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1334 /* We cannot remove stmts during the BB walk, especially not release
1335 SSA names there as that destroys the lattice of our callers.
1336 Remove stmts in reverse order to make debug stmt creation possible. */
1337 while (!walker.stmts_to_remove.is_empty ())
1339 gimple *stmt = walker.stmts_to_remove.pop ();
1340 if (dump_file && dump_flags & TDF_DETAILS)
1342 fprintf (dump_file, "Removing dead stmt ");
1343 print_gimple_stmt (dump_file, stmt, 0, 0);
1344 fprintf (dump_file, "\n");
1346 prop_stats.num_dce++;
1347 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1348 if (gimple_code (stmt) == GIMPLE_PHI)
1349 remove_phi_node (&gsi, true);
1350 else
1352 unlink_stmt_vdef (stmt);
1353 gsi_remove (&gsi, true);
1354 release_defs (stmt);
1358 if (!bitmap_empty_p (walker.need_eh_cleanup))
1359 gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1361 /* Fixup stmts that became noreturn calls. This may require splitting
1362 blocks and thus isn't possible during the dominator walk. Do this
1363 in reverse order so we don't inadvertedly remove a stmt we want to
1364 fixup by visiting a dominating now noreturn call first. */
1365 while (!walker.stmts_to_fixup.is_empty ())
1367 gimple *stmt = walker.stmts_to_fixup.pop ();
1368 if (dump_file && dump_flags & TDF_DETAILS)
1370 fprintf (dump_file, "Fixing up noreturn call ");
1371 print_gimple_stmt (dump_file, stmt, 0, 0);
1372 fprintf (dump_file, "\n");
1374 fixup_noreturn_call (stmt);
1377 statistics_counter_event (cfun, "Constants propagated",
1378 prop_stats.num_const_prop);
1379 statistics_counter_event (cfun, "Copies propagated",
1380 prop_stats.num_copy_prop);
1381 statistics_counter_event (cfun, "Statements folded",
1382 prop_stats.num_stmts_folded);
1383 statistics_counter_event (cfun, "Statements deleted",
1384 prop_stats.num_dce);
1386 return walker.something_changed;
1390 /* Return true if we may propagate ORIG into DEST, false otherwise. */
1392 bool
1393 may_propagate_copy (tree dest, tree orig)
1395 tree type_d = TREE_TYPE (dest);
1396 tree type_o = TREE_TYPE (orig);
1398 /* If ORIG is a default definition which flows in from an abnormal edge
1399 then the copy can be propagated. It is important that we do so to avoid
1400 uninitialized copies. */
1401 if (TREE_CODE (orig) == SSA_NAME
1402 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1403 && SSA_NAME_IS_DEFAULT_DEF (orig)
1404 && (SSA_NAME_VAR (orig) == NULL_TREE
1405 || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
1407 /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1408 be propagated. */
1409 else if (TREE_CODE (orig) == SSA_NAME
1410 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1411 return false;
1412 /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1413 propagated. */
1414 else if (TREE_CODE (dest) == SSA_NAME
1415 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1416 return false;
1418 /* Do not copy between types for which we *do* need a conversion. */
1419 if (!useless_type_conversion_p (type_d, type_o))
1420 return false;
1422 /* Generally propagating virtual operands is not ok as that may
1423 create overlapping life-ranges. */
1424 if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1425 return false;
1427 /* Anything else is OK. */
1428 return true;
1431 /* Like may_propagate_copy, but use as the destination expression
1432 the principal expression (typically, the RHS) contained in
1433 statement DEST. This is more efficient when working with the
1434 gimple tuples representation. */
1436 bool
1437 may_propagate_copy_into_stmt (gimple *dest, tree orig)
1439 tree type_d;
1440 tree type_o;
1442 /* If the statement is a switch or a single-rhs assignment,
1443 then the expression to be replaced by the propagation may
1444 be an SSA_NAME. Fortunately, there is an explicit tree
1445 for the expression, so we delegate to may_propagate_copy. */
1447 if (gimple_assign_single_p (dest))
1448 return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1449 else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1450 return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
1452 /* In other cases, the expression is not materialized, so there
1453 is no destination to pass to may_propagate_copy. On the other
1454 hand, the expression cannot be an SSA_NAME, so the analysis
1455 is much simpler. */
1457 if (TREE_CODE (orig) == SSA_NAME
1458 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1459 return false;
1461 if (is_gimple_assign (dest))
1462 type_d = TREE_TYPE (gimple_assign_lhs (dest));
1463 else if (gimple_code (dest) == GIMPLE_COND)
1464 type_d = boolean_type_node;
1465 else if (is_gimple_call (dest)
1466 && gimple_call_lhs (dest) != NULL_TREE)
1467 type_d = TREE_TYPE (gimple_call_lhs (dest));
1468 else
1469 gcc_unreachable ();
1471 type_o = TREE_TYPE (orig);
1473 if (!useless_type_conversion_p (type_d, type_o))
1474 return false;
1476 return true;
1479 /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1481 bool
1482 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1484 return true;
1488 /* Common code for propagate_value and replace_exp.
1490 Replace use operand OP_P with VAL. FOR_PROPAGATION indicates if the
1491 replacement is done to propagate a value or not. */
1493 static void
1494 replace_exp_1 (use_operand_p op_p, tree val,
1495 bool for_propagation ATTRIBUTE_UNUSED)
1497 if (flag_checking)
1499 tree op = USE_FROM_PTR (op_p);
1500 gcc_assert (!(for_propagation
1501 && TREE_CODE (op) == SSA_NAME
1502 && TREE_CODE (val) == SSA_NAME
1503 && !may_propagate_copy (op, val)));
1506 if (TREE_CODE (val) == SSA_NAME)
1507 SET_USE (op_p, val);
1508 else
1509 SET_USE (op_p, unshare_expr (val));
1513 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1514 into the operand pointed to by OP_P.
1516 Use this version for const/copy propagation as it will perform additional
1517 checks to ensure validity of the const/copy propagation. */
1519 void
1520 propagate_value (use_operand_p op_p, tree val)
1522 replace_exp_1 (op_p, val, true);
1525 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1527 Use this version when not const/copy propagating values. For example,
1528 PRE uses this version when building expressions as they would appear
1529 in specific blocks taking into account actions of PHI nodes.
1531 The statement in which an expression has been replaced should be
1532 folded using fold_stmt_inplace. */
1534 void
1535 replace_exp (use_operand_p op_p, tree val)
1537 replace_exp_1 (op_p, val, false);
1541 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1542 into the tree pointed to by OP_P.
1544 Use this version for const/copy propagation when SSA operands are not
1545 available. It will perform the additional checks to ensure validity of
1546 the const/copy propagation, but will not update any operand information.
1547 Be sure to mark the stmt as modified. */
1549 void
1550 propagate_tree_value (tree *op_p, tree val)
1552 if (TREE_CODE (val) == SSA_NAME)
1553 *op_p = val;
1554 else
1555 *op_p = unshare_expr (val);
1559 /* Like propagate_tree_value, but use as the operand to replace
1560 the principal expression (typically, the RHS) contained in the
1561 statement referenced by iterator GSI. Note that it is not
1562 always possible to update the statement in-place, so a new
1563 statement may be created to replace the original. */
1565 void
1566 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1568 gimple *stmt = gsi_stmt (*gsi);
1570 if (is_gimple_assign (stmt))
1572 tree expr = NULL_TREE;
1573 if (gimple_assign_single_p (stmt))
1574 expr = gimple_assign_rhs1 (stmt);
1575 propagate_tree_value (&expr, val);
1576 gimple_assign_set_rhs_from_tree (gsi, expr);
1578 else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1580 tree lhs = NULL_TREE;
1581 tree rhs = build_zero_cst (TREE_TYPE (val));
1582 propagate_tree_value (&lhs, val);
1583 gimple_cond_set_code (cond_stmt, NE_EXPR);
1584 gimple_cond_set_lhs (cond_stmt, lhs);
1585 gimple_cond_set_rhs (cond_stmt, rhs);
1587 else if (is_gimple_call (stmt)
1588 && gimple_call_lhs (stmt) != NULL_TREE)
1590 tree expr = NULL_TREE;
1591 bool res;
1592 propagate_tree_value (&expr, val);
1593 res = update_call_from_tree (gsi, expr);
1594 gcc_assert (res);
1596 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1597 propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1598 else
1599 gcc_unreachable ();