EnumSet*.class: Regenerate
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobcac5d0ae36912e1f0d3bafd7e96218f85af1e3ed
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
35 #include "flags.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
50 bb0:
51 x = a COND b;
52 if (x) goto ... else goto ...
54 Will be transformed into:
56 bb0:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
63 bb0:
64 x = a + c1;
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
69 bb0:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
76 bb0:
77 x = !a
78 if (x) goto ... else goto ...
80 Will be transformed into:
82 bb0:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
91 bb0:
92 x = (typecast) a
93 if (x) goto ... else goto ...
95 Will be transformed into:
97 bb0:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
117 nodes.
119 ptr = &x->y->z;
120 res = *ptr;
122 Will get turned into
124 res = x->y->z;
128 ptr = &x[0];
129 ptr2 = ptr + <constant>;
131 Will get turned into
133 ptr2 = &x[constant/elementsize];
137 ptr = &x[0];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
142 Will get turned into:
144 ptr2 = &x[index];
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
148 {NOT_EXPR,NEG_EXPR}.
150 This will (of course) be extended as other needs arise. */
152 static bool forward_propagate_addr_expr (tree name, tree rhs);
154 /* Set to true if we delete EH edges during the optimization. */
155 static bool cfg_changed;
158 /* Get the next statement we can propagate NAME's value into skipping
159 trivial copies. Returns the statement that is suitable as a
160 propagation destination or NULL_TREE if there is no such one.
161 This only returns destinations in a single-use chain. FINAL_NAME_P
162 if non-NULL is written to the ssa name that represents the use. */
164 static tree
165 get_prop_dest_stmt (tree name, tree *final_name_p)
167 use_operand_p use;
168 tree use_stmt;
170 do {
171 /* If name has multiple uses, bail out. */
172 if (!single_imm_use (name, &use, &use_stmt))
173 return NULL_TREE;
175 /* If this is not a trivial copy, we found it. */
176 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT
177 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) != SSA_NAME
178 || GIMPLE_STMT_OPERAND (use_stmt, 1) != name)
179 break;
181 /* Continue searching uses of the copy destination. */
182 name = GIMPLE_STMT_OPERAND (use_stmt, 0);
183 } while (1);
185 if (final_name_p)
186 *final_name_p = name;
188 return use_stmt;
191 /* Get the statement we can propagate from into NAME skipping
192 trivial copies. Returns the statement which defines the
193 propagation source or NULL_TREE if there is no such one.
194 If SINGLE_USE_ONLY is set considers only sources which have
195 a single use chain up to NAME. If SINGLE_USE_P is non-null,
196 it is set to whether the chain to NAME is a single use chain
197 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
199 static tree
200 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
202 bool single_use = true;
204 do {
205 tree def_stmt = SSA_NAME_DEF_STMT (name);
207 if (!has_single_use (name))
209 single_use = false;
210 if (single_use_only)
211 return NULL_TREE;
214 /* If name is defined by a PHI node or is the default def, bail out. */
215 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
216 return NULL_TREE;
218 /* If name is not a simple copy destination, we found it. */
219 if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) != SSA_NAME)
221 if (!single_use_only && single_use_p)
222 *single_use_p = single_use;
224 return def_stmt;
227 /* Continue searching the def of the copy source name. */
228 name = GIMPLE_STMT_OPERAND (def_stmt, 1);
229 } while (1);
232 /* Checks if the destination ssa name in DEF_STMT can be used as
233 propagation source. Returns true if so, otherwise false. */
235 static bool
236 can_propagate_from (tree def_stmt)
238 tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
240 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
241 switch (TREE_CODE_LENGTH (TREE_CODE (rhs)))
243 case 3:
244 if (TREE_OPERAND (rhs, 2) != NULL_TREE
245 && TREE_CODE (TREE_OPERAND (rhs, 2)) == SSA_NAME
246 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 2)))
247 return false;
248 case 2:
249 if (TREE_OPERAND (rhs, 1) != NULL_TREE
250 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
251 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 1)))
252 return false;
253 case 1:
254 if (TREE_OPERAND (rhs, 0) != NULL_TREE
255 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
256 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 0)))
257 return false;
258 break;
260 default:
261 return false;
264 /* If the definition is a conversion of a pointer to a function type,
265 then we can not apply optimizations as some targets require function
266 pointers to be canonicalized and in this case this optimization could
267 eliminate a necessary canonicalization. */
268 if ((TREE_CODE (rhs) == NOP_EXPR
269 || TREE_CODE (rhs) == CONVERT_EXPR)
270 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
271 && TREE_CODE (TREE_TYPE (TREE_TYPE
272 (TREE_OPERAND (rhs, 0)))) == FUNCTION_TYPE)
273 return false;
275 return true;
278 /* Remove a copy chain ending in NAME along the defs but not
279 further or including UP_TO_STMT. If NAME was replaced in
280 its only use then this function can be used to clean up
281 dead stmts. Returns true if UP_TO_STMT can be removed
282 as well, otherwise false. */
284 static bool
285 remove_prop_source_from_use (tree name, tree up_to_stmt)
287 block_stmt_iterator bsi;
288 tree stmt;
290 do {
291 if (!has_zero_uses (name))
292 return false;
294 stmt = SSA_NAME_DEF_STMT (name);
295 if (stmt == up_to_stmt)
296 return true;
298 bsi = bsi_for_stmt (stmt);
299 release_defs (stmt);
300 bsi_remove (&bsi, true);
302 name = GIMPLE_STMT_OPERAND (stmt, 1);
303 } while (TREE_CODE (name) == SSA_NAME);
305 return false;
308 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
309 the folded result in a form suitable for COND_EXPR_COND or
310 NULL_TREE, if there is no suitable simplified form. If
311 INVARIANT_ONLY is true only gimple_min_invariant results are
312 considered simplified. */
314 static tree
315 combine_cond_expr_cond (enum tree_code code, tree type,
316 tree op0, tree op1, bool invariant_only)
318 tree t;
320 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
322 t = fold_binary (code, type, op0, op1);
323 if (!t)
324 return NULL_TREE;
326 /* Require that we got a boolean type out if we put one in. */
327 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
329 /* For (bool)x use x != 0. */
330 if (TREE_CODE (t) == NOP_EXPR
331 && TREE_TYPE (t) == boolean_type_node)
333 tree top0 = TREE_OPERAND (t, 0);
334 t = build2 (NE_EXPR, type,
335 top0, build_int_cst (TREE_TYPE (top0), 0));
337 /* For !x use x == 0. */
338 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
340 tree top0 = TREE_OPERAND (t, 0);
341 t = build2 (EQ_EXPR, type,
342 top0, build_int_cst (TREE_TYPE (top0), 0));
344 /* For cmp ? 1 : 0 use cmp. */
345 else if (TREE_CODE (t) == COND_EXPR
346 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
347 && integer_onep (TREE_OPERAND (t, 1))
348 && integer_zerop (TREE_OPERAND (t, 2)))
350 tree top0 = TREE_OPERAND (t, 0);
351 t = build2 (TREE_CODE (top0), type,
352 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
355 /* Bail out if we required an invariant but didn't get one. */
356 if (invariant_only
357 && !is_gimple_min_invariant (t))
358 return NULL_TREE;
360 /* A valid conditional for a COND_EXPR is either a gimple value
361 or a comparison with two gimple value operands. */
362 if (is_gimple_val (t)
363 || (COMPARISON_CLASS_P (t)
364 && is_gimple_val (TREE_OPERAND (t, 0))
365 && is_gimple_val (TREE_OPERAND (t, 1))))
366 return t;
368 return NULL_TREE;
371 /* Propagate from the ssa name definition statements of COND_EXPR
372 in statement STMT into the conditional if that simplifies it.
373 Returns zero if no statement was changed, one if there were
374 changes and two if cfg_cleanup needs to run. */
376 static int
377 forward_propagate_into_cond (tree cond_expr, tree stmt)
379 int did_something = 0;
381 do {
382 tree tmp = NULL_TREE;
383 tree cond = COND_EXPR_COND (cond_expr);
384 tree name, def_stmt, rhs;
385 bool single_use_p;
387 /* We can do tree combining on SSA_NAME and comparison expressions. */
388 if (COMPARISON_CLASS_P (cond)
389 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
391 /* For comparisons use the first operand, that is likely to
392 simplify comparisons against constants. */
393 name = TREE_OPERAND (cond, 0);
394 def_stmt = get_prop_source_stmt (name, false, &single_use_p);
395 if (def_stmt != NULL_TREE
396 && can_propagate_from (def_stmt))
398 tree op1 = TREE_OPERAND (cond, 1);
399 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
400 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
401 fold_convert (TREE_TYPE (op1), rhs),
402 op1, !single_use_p);
404 /* If that wasn't successful, try the second operand. */
405 if (tmp == NULL_TREE
406 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
408 tree op0 = TREE_OPERAND (cond, 0);
409 name = TREE_OPERAND (cond, 1);
410 def_stmt = get_prop_source_stmt (name, false, &single_use_p);
411 if (def_stmt == NULL_TREE
412 || !can_propagate_from (def_stmt))
413 return did_something;
415 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
416 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
417 op0,
418 fold_convert (TREE_TYPE (op0), rhs),
419 !single_use_p);
422 else if (TREE_CODE (cond) == SSA_NAME)
424 name = cond;
425 def_stmt = get_prop_source_stmt (name, true, NULL);
426 if (def_stmt == NULL_TREE
427 || !can_propagate_from (def_stmt))
428 return did_something;
430 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
431 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs,
432 build_int_cst (TREE_TYPE (rhs), 0),
433 false);
436 if (tmp)
438 if (dump_file && tmp)
440 fprintf (dump_file, " Replaced '");
441 print_generic_expr (dump_file, cond, 0);
442 fprintf (dump_file, "' with '");
443 print_generic_expr (dump_file, tmp, 0);
444 fprintf (dump_file, "'\n");
447 COND_EXPR_COND (cond_expr) = unshare_expr (tmp);
448 update_stmt (stmt);
450 /* Remove defining statements. */
451 remove_prop_source_from_use (name, NULL);
453 if (is_gimple_min_invariant (tmp))
454 did_something = 2;
455 else if (did_something == 0)
456 did_something = 1;
458 /* Continue combining. */
459 continue;
462 break;
463 } while (1);
465 return did_something;
468 /* We've just substituted an ADDR_EXPR into stmt. Update all the
469 relevant data structures to match. */
471 static void
472 tidy_after_forward_propagate_addr (tree stmt)
474 /* We may have turned a trapping insn into a non-trapping insn. */
475 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
476 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
477 cfg_changed = true;
479 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
480 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
482 mark_symbols_for_renaming (stmt);
485 /* DEF_RHS contains the address of the 0th element in an array.
486 USE_STMT uses type of DEF_RHS to compute the address of an
487 arbitrary element within the array. The (variable) byte offset
488 of the element is contained in OFFSET.
490 We walk back through the use-def chains of OFFSET to verify that
491 it is indeed computing the offset of an element within the array
492 and extract the index corresponding to the given byte offset.
494 We then try to fold the entire address expression into a form
495 &array[index].
497 If we are successful, we replace the right hand side of USE_STMT
498 with the new address computation. */
500 static bool
501 forward_propagate_addr_into_variable_array_index (tree offset,
502 tree def_rhs, tree use_stmt)
504 tree index;
506 /* Try to find an expression for a proper index. This is either
507 a multiplication expression by the element size or just the
508 ssa name we came along in case the element size is one. */
509 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
510 index = offset;
511 else
513 /* Get the offset's defining statement. */
514 offset = SSA_NAME_DEF_STMT (offset);
516 /* The statement which defines OFFSET before type conversion
517 must be a simple GIMPLE_MODIFY_STMT. */
518 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
519 return false;
521 /* The RHS of the statement which defines OFFSET must be a
522 multiplication of an object by the size of the array elements.
523 This implicitly verifies that the size of the array elements
524 is constant. */
525 offset = GIMPLE_STMT_OPERAND (offset, 1);
526 if (TREE_CODE (offset) != MULT_EXPR
527 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
528 || !simple_cst_equal (TREE_OPERAND (offset, 1),
529 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
530 return false;
532 /* The first operand to the MULT_EXPR is the desired index. */
533 index = TREE_OPERAND (offset, 0);
536 /* Replace the pointer addition with array indexing. */
537 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
538 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
539 = index;
541 /* That should have created gimple, so there is no need to
542 record information to undo the propagation. */
543 fold_stmt_inplace (use_stmt);
544 tidy_after_forward_propagate_addr (use_stmt);
545 return true;
548 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
549 ADDR_EXPR <whatever>.
551 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
552 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
553 node or for recovery of array indexing from pointer arithmetic.
555 Return true if the propagation was successful (the propagation can
556 be not totally successful, yet things may have been changed). */
558 static bool
559 forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
560 bool single_use_p)
562 tree lhs, rhs, array_ref;
564 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
565 ADDR_EXPR will not appear on the LHS. */
566 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
567 while (handled_component_p (lhs))
568 lhs = TREE_OPERAND (lhs, 0);
570 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
572 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
573 propagate the ADDR_EXPR into the use of NAME and fold the result. */
574 if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
576 /* This should always succeed in creating gimple, so there is
577 no need to save enough state to undo this propagation. */
578 TREE_OPERAND (lhs, 0) = unshare_expr (def_rhs);
579 fold_stmt_inplace (use_stmt);
580 tidy_after_forward_propagate_addr (use_stmt);
582 /* Continue propagating into the RHS. */
585 /* Trivial cases. The use statement could be a trivial copy or a
586 useless conversion. Recurse to the uses of the lhs as copyprop does
587 not copy through differen variant pointers and FRE does not catch
588 all useless conversions. Treat the case of a single-use name and
589 a conversion to def_rhs type separate, though. */
590 else if (TREE_CODE (lhs) == SSA_NAME
591 && (TREE_CODE (rhs) == NOP_EXPR
592 || TREE_CODE (rhs) == CONVERT_EXPR)
593 && TREE_TYPE (rhs) == TREE_TYPE (def_rhs)
594 && single_use_p)
596 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
597 return true;
599 else if ((TREE_CODE (lhs) == SSA_NAME
600 && rhs == name)
601 || ((TREE_CODE (rhs) == NOP_EXPR
602 || TREE_CODE (rhs) == CONVERT_EXPR)
603 && useless_type_conversion_p (TREE_TYPE (rhs),
604 TREE_TYPE (def_rhs))))
605 return forward_propagate_addr_expr (lhs, def_rhs);
607 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
608 nodes from the RHS. */
609 while (handled_component_p (rhs)
610 || TREE_CODE (rhs) == ADDR_EXPR)
611 rhs = TREE_OPERAND (rhs, 0);
613 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
614 propagate the ADDR_EXPR into the use of NAME and fold the result. */
615 if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
617 /* This should always succeed in creating gimple, so there is
618 no need to save enough state to undo this propagation. */
619 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
620 fold_stmt_inplace (use_stmt);
621 tidy_after_forward_propagate_addr (use_stmt);
622 return true;
625 /* The remaining cases are all for turning pointer arithmetic into
626 array indexing. They only apply when we have the address of
627 element zero in an array. If that is not the case then there
628 is nothing to do. */
629 array_ref = TREE_OPERAND (def_rhs, 0);
630 if (TREE_CODE (array_ref) != ARRAY_REF
631 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
632 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
633 return false;
635 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
636 is nothing to do. */
637 if (TREE_CODE (rhs) != POINTER_PLUS_EXPR)
638 return false;
640 /* Try to optimize &x[0] p+ C where C is a multiple of the size
641 of the elements in X into &x[C/element size]. */
642 if (TREE_OPERAND (rhs, 0) == name
643 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
645 tree orig = unshare_expr (rhs);
646 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
648 /* If folding succeeds, then we have just exposed new variables
649 in USE_STMT which will need to be renamed. If folding fails,
650 then we need to put everything back the way it was. */
651 if (fold_stmt_inplace (use_stmt))
653 tidy_after_forward_propagate_addr (use_stmt);
654 return true;
656 else
658 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
659 update_stmt (use_stmt);
660 return false;
664 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
665 converting a multiplication of an index by the size of the
666 array elements, then the result is converted into the proper
667 type for the arithmetic. */
668 if (TREE_OPERAND (rhs, 0) == name
669 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
670 /* Avoid problems with IVopts creating PLUS_EXPRs with a
671 different type than their operands. */
672 && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (name)))
674 bool res;
676 res = forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs, 1),
677 def_rhs, use_stmt);
678 return res;
680 return false;
683 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
685 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
686 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
687 node or for recovery of array indexing from pointer arithmetic.
688 Returns true, if all uses have been propagated into. */
690 static bool
691 forward_propagate_addr_expr (tree name, tree rhs)
693 int stmt_loop_depth = bb_for_stmt (SSA_NAME_DEF_STMT (name))->loop_depth;
694 imm_use_iterator iter;
695 tree use_stmt;
696 bool all = true;
697 bool single_use_p = has_single_use (name);
699 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
701 bool result;
703 /* If the use is not in a simple assignment statement, then
704 there is nothing we can do. */
705 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
707 all = false;
708 continue;
711 /* If the use is in a deeper loop nest, then we do not want
712 to propagate the ADDR_EXPR into the loop as that is likely
713 adding expression evaluations into the loop. */
714 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
716 all = false;
717 continue;
720 /* If the use_stmt has side-effects, don't propagate into it. */
721 if (stmt_ann (use_stmt)->has_volatile_ops)
723 all = false;
724 continue;
727 push_stmt_changes (&use_stmt);
729 result = forward_propagate_addr_expr_1 (name, rhs, use_stmt,
730 single_use_p);
731 all &= result;
733 pop_stmt_changes (&use_stmt);
735 /* Remove intermediate now unused copy and conversion chains. */
736 if (result
737 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
738 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == SSA_NAME
739 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
740 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR))
742 block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
743 release_defs (use_stmt);
744 bsi_remove (&bsi, true);
748 return all;
751 /* Forward propagate the comparison COND defined in STMT like
752 cond_1 = x CMP y to uses of the form
753 a_1 = (T')cond_1
754 a_1 = !cond_1
755 a_1 = cond_1 != 0
756 Returns true if stmt is now unused. */
758 static bool
759 forward_propagate_comparison (tree cond, tree stmt)
761 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
762 tree use_stmt, tmp = NULL_TREE;
764 /* Don't propagate ssa names that occur in abnormal phis. */
765 if ((TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
766 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 0)))
767 || (TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME
768 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 1))))
769 return false;
771 /* Do not un-cse comparisons. But propagate through copies. */
772 use_stmt = get_prop_dest_stmt (name, &name);
773 if (use_stmt == NULL_TREE)
774 return false;
776 /* Conversion of the condition result to another integral type. */
777 if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
778 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR
779 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
780 || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
781 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == TRUTH_NOT_EXPR)
782 && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt, 0))))
784 tree lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
785 tree rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
787 /* We can propagate the condition into a conversion. */
788 if (TREE_CODE (rhs) == CONVERT_EXPR
789 || TREE_CODE (rhs) == NOP_EXPR)
791 /* Avoid using fold here as that may create a COND_EXPR with
792 non-boolean condition as canonical form. */
793 tmp = build2 (TREE_CODE (cond), TREE_TYPE (lhs),
794 TREE_OPERAND (cond, 0), TREE_OPERAND (cond, 1));
796 /* We can propagate the condition into X op CST where op
797 is EQ_EXRP or NE_EXPR and CST is either one or zero. */
798 else if (COMPARISON_CLASS_P (rhs)
799 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
800 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
802 enum tree_code code = TREE_CODE (rhs);
803 tree cst = TREE_OPERAND (rhs, 1);
805 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs),
806 fold_convert (TREE_TYPE (cst), cond),
807 cst, false);
808 if (tmp == NULL_TREE)
809 return false;
811 /* We can propagate the condition into a statement that
812 computes the logical negation of the comparison result. */
813 else if (TREE_CODE (rhs) == TRUTH_NOT_EXPR)
815 tree type = TREE_TYPE (TREE_OPERAND (cond, 0));
816 bool nans = HONOR_NANS (TYPE_MODE (type));
817 enum tree_code code;
818 code = invert_tree_comparison (TREE_CODE (cond), nans);
819 if (code == ERROR_MARK)
820 return false;
822 tmp = build2 (code, TREE_TYPE (lhs), TREE_OPERAND (cond, 0),
823 TREE_OPERAND (cond, 1));
825 else
826 return false;
828 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (tmp);
829 update_stmt (use_stmt);
831 /* Remove defining statements. */
832 remove_prop_source_from_use (name, stmt);
834 if (dump_file && (dump_flags & TDF_DETAILS))
836 fprintf (dump_file, " Replaced '");
837 print_generic_expr (dump_file, rhs, dump_flags);
838 fprintf (dump_file, "' with '");
839 print_generic_expr (dump_file, tmp, dump_flags);
840 fprintf (dump_file, "'\n");
843 return true;
846 return false;
849 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
850 If so, we can change STMT into lhs = y which can later be copy
851 propagated. Similarly for negation.
853 This could trivially be formulated as a forward propagation
854 to immediate uses. However, we already had an implementation
855 from DOM which used backward propagation via the use-def links.
857 It turns out that backward propagation is actually faster as
858 there's less work to do for each NOT/NEG expression we find.
859 Backwards propagation needs to look at the statement in a single
860 backlink. Forward propagation needs to look at potentially more
861 than one forward link. */
863 static void
864 simplify_not_neg_expr (tree stmt)
866 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
867 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
869 /* See if the RHS_DEF_STMT has the same form as our statement. */
870 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
871 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
873 tree rhs_def_operand =
874 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
876 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
877 if (TREE_CODE (rhs_def_operand) == SSA_NAME
878 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
880 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
881 update_stmt (stmt);
886 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
887 the condition which we may be able to optimize better. */
889 static void
890 simplify_switch_expr (tree stmt)
892 tree cond = SWITCH_COND (stmt);
893 tree def, to, ti;
895 /* The optimization that we really care about is removing unnecessary
896 casts. That will let us do much better in propagating the inferred
897 constant at the switch target. */
898 if (TREE_CODE (cond) == SSA_NAME)
900 def = SSA_NAME_DEF_STMT (cond);
901 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
903 def = GIMPLE_STMT_OPERAND (def, 1);
904 if (TREE_CODE (def) == NOP_EXPR)
906 int need_precision;
907 bool fail;
909 def = TREE_OPERAND (def, 0);
911 #ifdef ENABLE_CHECKING
912 /* ??? Why was Jeff testing this? We are gimple... */
913 gcc_assert (is_gimple_val (def));
914 #endif
916 to = TREE_TYPE (cond);
917 ti = TREE_TYPE (def);
919 /* If we have an extension that preserves value, then we
920 can copy the source value into the switch. */
922 need_precision = TYPE_PRECISION (ti);
923 fail = false;
924 if (! INTEGRAL_TYPE_P (ti))
925 fail = true;
926 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
927 fail = true;
928 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
929 need_precision += 1;
930 if (TYPE_PRECISION (to) < need_precision)
931 fail = true;
933 if (!fail)
935 SWITCH_COND (stmt) = def;
936 update_stmt (stmt);
943 /* Main entry point for the forward propagation optimizer. */
945 static unsigned int
946 tree_ssa_forward_propagate_single_use_vars (void)
948 basic_block bb;
949 unsigned int todoflags = 0;
951 cfg_changed = false;
953 FOR_EACH_BB (bb)
955 block_stmt_iterator bsi;
957 /* Note we update BSI within the loop as necessary. */
958 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
960 tree stmt = bsi_stmt (bsi);
962 /* If this statement sets an SSA_NAME to an address,
963 try to propagate the address into the uses of the SSA_NAME. */
964 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
966 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
967 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
970 if (TREE_CODE (lhs) != SSA_NAME)
972 bsi_next (&bsi);
973 continue;
976 if (TREE_CODE (rhs) == ADDR_EXPR)
978 if (forward_propagate_addr_expr (lhs, rhs))
980 release_defs (stmt);
981 todoflags |= TODO_remove_unused_locals;
982 bsi_remove (&bsi, true);
984 else
985 bsi_next (&bsi);
987 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
988 || TREE_CODE (rhs) == NEGATE_EXPR)
989 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
991 simplify_not_neg_expr (stmt);
992 bsi_next (&bsi);
994 else if (TREE_CODE (rhs) == COND_EXPR)
996 int did_something;
997 fold_defer_overflow_warnings ();
998 did_something = forward_propagate_into_cond (rhs, stmt);
999 if (did_something == 2)
1000 cfg_changed = true;
1001 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1002 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1003 bsi_next (&bsi);
1005 else if (COMPARISON_CLASS_P (rhs))
1007 if (forward_propagate_comparison (rhs, stmt))
1009 release_defs (stmt);
1010 todoflags |= TODO_remove_unused_locals;
1011 bsi_remove (&bsi, true);
1013 else
1014 bsi_next (&bsi);
1016 else
1017 bsi_next (&bsi);
1019 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1021 simplify_switch_expr (stmt);
1022 bsi_next (&bsi);
1024 else if (TREE_CODE (stmt) == COND_EXPR)
1026 int did_something;
1027 fold_defer_overflow_warnings ();
1028 did_something = forward_propagate_into_cond (stmt, stmt);
1029 if (did_something == 2)
1030 cfg_changed = true;
1031 fold_undefer_overflow_warnings (!TREE_NO_WARNING (stmt)
1032 && did_something, stmt,
1033 WARN_STRICT_OVERFLOW_CONDITIONAL);
1034 bsi_next (&bsi);
1036 else
1037 bsi_next (&bsi);
1041 if (cfg_changed)
1042 todoflags |= TODO_cleanup_cfg;
1043 return todoflags;
1047 static bool
1048 gate_forwprop (void)
1050 return 1;
1053 struct tree_opt_pass pass_forwprop = {
1054 "forwprop", /* name */
1055 gate_forwprop, /* gate */
1056 tree_ssa_forward_propagate_single_use_vars, /* execute */
1057 NULL, /* sub */
1058 NULL, /* next */
1059 0, /* static_pass_number */
1060 TV_TREE_FORWPROP, /* tv_id */
1061 PROP_cfg | PROP_ssa, /* properties_required */
1062 0, /* properties_provided */
1063 0, /* properties_destroyed */
1064 0, /* todo_flags_start */
1065 TODO_dump_func
1066 | TODO_ggc_collect
1067 | TODO_update_ssa
1068 | TODO_verify_ssa, /* todo_flags_finish */
1069 0 /* letter */
1073 /* Structure to keep track of the value of a dereferenced PHI result
1074 and the set of virtual operands used for that dereference. */
1076 struct phiprop_d
1078 tree value;
1079 tree vop_stmt;
1082 /* Verify if the value recorded for NAME in PHIVN is still valid at
1083 the start of basic block BB. */
1085 static bool
1086 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
1088 tree vop_stmt = phivn[SSA_NAME_VERSION (name)].vop_stmt;
1089 ssa_op_iter ui;
1090 tree vuse;
1092 /* The def stmts of all virtual uses need to be post-dominated
1093 by bb. */
1094 FOR_EACH_SSA_TREE_OPERAND (vuse, vop_stmt, ui, SSA_OP_VUSE)
1096 tree use_stmt;
1097 imm_use_iterator ui2;
1098 bool ok = true;
1100 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
1102 /* If BB does not dominate a VDEF, the value is invalid. */
1103 if (((TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1104 && !ZERO_SSA_OPERANDS (use_stmt, SSA_OP_VDEF))
1105 || TREE_CODE (use_stmt) == PHI_NODE)
1106 && !dominated_by_p (CDI_DOMINATORS, bb_for_stmt (use_stmt), bb))
1108 ok = false;
1109 BREAK_FROM_IMM_USE_STMT (ui2);
1112 if (!ok)
1113 return false;
1116 return true;
1119 /* Insert a new phi node for the dereference of PHI at basic_block
1120 BB with the virtual operands from USE_STMT. */
1122 static tree
1123 phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
1124 struct phiprop_d *phivn, size_t n)
1126 tree res, new_phi;
1127 edge_iterator ei;
1128 edge e;
1130 /* Build a new PHI node to replace the definition of
1131 the indirect reference lhs. */
1132 res = GIMPLE_STMT_OPERAND (use_stmt, 0);
1133 SSA_NAME_DEF_STMT (res) = new_phi = create_phi_node (res, bb);
1135 /* Add PHI arguments for each edge inserting loads of the
1136 addressable operands. */
1137 FOR_EACH_EDGE (e, ei, bb->preds)
1139 tree old_arg, new_var, tmp;
1141 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
1142 while (TREE_CODE (old_arg) == SSA_NAME
1143 && (SSA_NAME_VERSION (old_arg) >= n
1144 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
1146 tree def_stmt = SSA_NAME_DEF_STMT (old_arg);
1147 old_arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1150 if (TREE_CODE (old_arg) == SSA_NAME)
1151 /* Reuse a formerly created dereference. */
1152 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
1153 else
1155 old_arg = TREE_OPERAND (old_arg, 0);
1156 new_var = create_tmp_var (TREE_TYPE (old_arg), NULL);
1157 tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node,
1158 NULL_TREE, unshare_expr (old_arg));
1159 if (TREE_CODE (TREE_TYPE (old_arg)) == COMPLEX_TYPE
1160 || TREE_CODE (TREE_TYPE (old_arg)) == VECTOR_TYPE)
1161 DECL_GIMPLE_REG_P (new_var) = 1;
1162 add_referenced_var (new_var);
1163 new_var = make_ssa_name (new_var, tmp);
1164 GIMPLE_STMT_OPERAND (tmp, 0) = new_var;
1166 bsi_insert_on_edge (e, tmp);
1168 update_stmt (tmp);
1169 mark_symbols_for_renaming (tmp);
1172 add_phi_arg (new_phi, new_var, e);
1175 update_stmt (new_phi);
1177 return res;
1180 /* Propagate between the phi node arguments of PHI in BB and phi result
1181 users. For now this matches
1182 # p_2 = PHI <&x, &y>
1183 <Lx>:;
1184 p_3 = p_2;
1185 z_2 = *p_3;
1186 and converts it to
1187 # z_2 = PHI <x, y>
1188 <Lx>:;
1189 Returns true if a transformation was done and edge insertions
1190 need to be committed. Global data PHIVN and N is used to track
1191 past transformation results. We need to be especially careful here
1192 with aliasing issues as we are moving memory reads. */
1194 static bool
1195 propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
1197 tree ptr = PHI_RESULT (phi);
1198 tree use_stmt, res = NULL_TREE;
1199 block_stmt_iterator bsi;
1200 imm_use_iterator ui;
1201 use_operand_p arg_p, use;
1202 ssa_op_iter i;
1203 bool phi_inserted;
1205 if (MTAG_P (SSA_NAME_VAR (ptr))
1206 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1207 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
1208 return false;
1210 /* Check if we can "cheaply" dereference all phi arguments. */
1211 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
1213 tree arg = USE_FROM_PTR (arg_p);
1214 /* Walk the ssa chain until we reach a ssa name we already
1215 created a value for or we reach a definition of the form
1216 ssa_name_n = &var; */
1217 while (TREE_CODE (arg) == SSA_NAME
1218 && !SSA_NAME_IS_DEFAULT_DEF (arg)
1219 && (SSA_NAME_VERSION (arg) >= n
1220 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
1222 tree def_stmt = SSA_NAME_DEF_STMT (arg);
1223 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
1224 return false;
1225 arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1227 if ((TREE_CODE (arg) != ADDR_EXPR
1228 /* Avoid to have to decay *&a to a[0] later. */
1229 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (arg, 0))))
1230 && !(TREE_CODE (arg) == SSA_NAME
1231 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
1232 && phivn_valid_p (phivn, arg, bb)))
1233 return false;
1236 /* Find a dereferencing use. First follow (single use) ssa
1237 copy chains for ptr. */
1238 while (single_imm_use (ptr, &use, &use_stmt)
1239 && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1240 && GIMPLE_STMT_OPERAND (use_stmt, 1) == ptr
1241 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME)
1242 ptr = GIMPLE_STMT_OPERAND (use_stmt, 0);
1244 /* Replace the first dereference of *ptr if there is one and if we
1245 can move the loads to the place of the ptr phi node. */
1246 phi_inserted = false;
1247 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
1249 ssa_op_iter ui2;
1250 tree vuse;
1252 /* Check whether this is a load of *ptr. */
1253 if (!(TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1254 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
1255 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == INDIRECT_REF
1256 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == ptr
1257 /* We cannot replace a load that may throw or is volatile. */
1258 && !tree_can_throw_internal (use_stmt)))
1259 continue;
1261 /* Check if we can move the loads. The def stmts of all virtual uses
1262 need to be post-dominated by bb. */
1263 FOR_EACH_SSA_TREE_OPERAND (vuse, use_stmt, ui2, SSA_OP_VUSE)
1265 tree def_stmt = SSA_NAME_DEF_STMT (vuse);
1266 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
1267 && (bb_for_stmt (def_stmt) == bb
1268 || !dominated_by_p (CDI_DOMINATORS,
1269 bb, bb_for_stmt (def_stmt))))
1270 goto next;
1273 /* Found a proper dereference. Insert a phi node if this
1274 is the first load transformation. */
1275 if (!phi_inserted)
1277 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
1279 /* Remember the value we created for *ptr. */
1280 phivn[SSA_NAME_VERSION (ptr)].value = res;
1281 phivn[SSA_NAME_VERSION (ptr)].vop_stmt = use_stmt;
1283 /* Remove old stmt. The phi is taken care of by DCE, if we
1284 want to delete it here we also have to delete all intermediate
1285 copies. */
1286 bsi = bsi_for_stmt (use_stmt);
1287 bsi_remove (&bsi, 0);
1289 phi_inserted = true;
1291 else
1293 /* Further replacements are easy, just make a copy out of the
1294 load. */
1295 GIMPLE_STMT_OPERAND (use_stmt, 1) = res;
1296 update_stmt (use_stmt);
1299 next:;
1300 /* Continue searching for a proper dereference. */
1303 return phi_inserted;
1306 /* Helper walking the dominator tree starting from BB and processing
1307 phi nodes with global data PHIVN and N. */
1309 static bool
1310 tree_ssa_phiprop_1 (basic_block bb, struct phiprop_d *phivn, size_t n)
1312 bool did_something = false;
1313 basic_block son;
1314 tree phi;
1316 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1317 did_something |= propagate_with_phi (bb, phi, phivn, n);
1319 for (son = first_dom_son (CDI_DOMINATORS, bb);
1320 son;
1321 son = next_dom_son (CDI_DOMINATORS, son))
1322 did_something |= tree_ssa_phiprop_1 (son, phivn, n);
1324 return did_something;
1327 /* Main entry for phiprop pass. */
1329 static unsigned int
1330 tree_ssa_phiprop (void)
1332 struct phiprop_d *phivn;
1334 calculate_dominance_info (CDI_DOMINATORS);
1336 phivn = XCNEWVEC (struct phiprop_d, num_ssa_names);
1338 if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR, phivn, num_ssa_names))
1339 bsi_commit_edge_inserts ();
1341 free (phivn);
1343 return 0;
1346 static bool
1347 gate_phiprop (void)
1349 return 1;
1352 struct tree_opt_pass pass_phiprop = {
1353 "phiprop", /* name */
1354 gate_phiprop, /* gate */
1355 tree_ssa_phiprop, /* execute */
1356 NULL, /* sub */
1357 NULL, /* next */
1358 0, /* static_pass_number */
1359 TV_TREE_FORWPROP, /* tv_id */
1360 PROP_cfg | PROP_ssa, /* properties_required */
1361 0, /* properties_provided */
1362 0, /* properties_destroyed */
1363 0, /* todo_flags_start */
1364 TODO_dump_func
1365 | TODO_ggc_collect
1366 | TODO_update_ssa
1367 | TODO_verify_ssa, /* todo_flags_finish */
1368 0 /* letter */