re PR middle-end/33617 (ICE for nonconstant callee-copied constructor arguments)
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob60e6ffa9e62512e1442ecb05325bcd245f4b03bd
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
35 #include "flags.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
50 bb0:
51 x = a COND b;
52 if (x) goto ... else goto ...
54 Will be transformed into:
56 bb0:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
63 bb0:
64 x = a + c1;
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
69 bb0:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
76 bb0:
77 x = !a
78 if (x) goto ... else goto ...
80 Will be transformed into:
82 bb0:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
91 bb0:
92 x = (typecast) a
93 if (x) goto ... else goto ...
95 Will be transformed into:
97 bb0:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
117 nodes.
119 ptr = &x->y->z;
120 res = *ptr;
122 Will get turned into
124 res = x->y->z;
128 ptr = &x[0];
129 ptr2 = ptr + <constant>;
131 Will get turned into
133 ptr2 = &x[constant/elementsize];
137 ptr = &x[0];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
142 Will get turned into:
144 ptr2 = &x[index];
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
148 {NOT_EXPR,NEG_EXPR}.
150 This will (of course) be extended as other needs arise. */
152 static bool forward_propagate_addr_expr (tree name, tree rhs);
154 /* Set to true if we delete EH edges during the optimization. */
155 static bool cfg_changed;
158 /* Get the next statement we can propagate NAME's value into skipping
159 trivial copies. Returns the statement that is suitable as a
160 propagation destination or NULL_TREE if there is no such one.
161 This only returns destinations in a single-use chain. FINAL_NAME_P
162 if non-NULL is written to the ssa name that represents the use. */
164 static tree
165 get_prop_dest_stmt (tree name, tree *final_name_p)
167 use_operand_p use;
168 tree use_stmt;
170 do {
171 /* If name has multiple uses, bail out. */
172 if (!single_imm_use (name, &use, &use_stmt))
173 return NULL_TREE;
175 /* If this is not a trivial copy, we found it. */
176 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT
177 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) != SSA_NAME
178 || GIMPLE_STMT_OPERAND (use_stmt, 1) != name)
179 break;
181 /* Continue searching uses of the copy destination. */
182 name = GIMPLE_STMT_OPERAND (use_stmt, 0);
183 } while (1);
185 if (final_name_p)
186 *final_name_p = name;
188 return use_stmt;
191 /* Get the statement we can propagate from into NAME skipping
192 trivial copies. Returns the statement which defines the
193 propagation source or NULL_TREE if there is no such one.
194 If SINGLE_USE_ONLY is set considers only sources which have
195 a single use chain up to NAME. If SINGLE_USE_P is non-null,
196 it is set to whether the chain to NAME is a single use chain
197 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
199 static tree
200 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
202 bool single_use = true;
204 do {
205 tree def_stmt = SSA_NAME_DEF_STMT (name);
207 if (!has_single_use (name))
209 single_use = false;
210 if (single_use_only)
211 return NULL_TREE;
214 /* If name is defined by a PHI node or is the default def, bail out. */
215 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
216 return NULL_TREE;
218 /* If name is not a simple copy destination, we found it. */
219 if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) != SSA_NAME)
221 if (!single_use_only && single_use_p)
222 *single_use_p = single_use;
224 return def_stmt;
227 /* Continue searching the def of the copy source name. */
228 name = GIMPLE_STMT_OPERAND (def_stmt, 1);
229 } while (1);
232 /* Checks if the destination ssa name in DEF_STMT can be used as
233 propagation source. Returns true if so, otherwise false. */
235 static bool
236 can_propagate_from (tree def_stmt)
238 tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
240 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
241 switch (TREE_CODE_LENGTH (TREE_CODE (rhs)))
243 case 3:
244 if (TREE_OPERAND (rhs, 2) != NULL_TREE
245 && TREE_CODE (TREE_OPERAND (rhs, 2)) == SSA_NAME
246 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 2)))
247 return false;
248 case 2:
249 if (TREE_OPERAND (rhs, 1) != NULL_TREE
250 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
251 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 1)))
252 return false;
253 case 1:
254 if (TREE_OPERAND (rhs, 0) != NULL_TREE
255 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
256 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 0)))
257 return false;
258 break;
260 default:
261 return false;
264 /* If the definition is a conversion of a pointer to a function type,
265 then we can not apply optimizations as some targets require function
266 pointers to be canonicalized and in this case this optimization could
267 eliminate a necessary canonicalization. */
268 if ((TREE_CODE (rhs) == NOP_EXPR
269 || TREE_CODE (rhs) == CONVERT_EXPR)
270 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
271 && TREE_CODE (TREE_TYPE (TREE_TYPE
272 (TREE_OPERAND (rhs, 0)))) == FUNCTION_TYPE)
273 return false;
275 return true;
278 /* Remove a copy chain ending in NAME along the defs but not
279 further or including UP_TO_STMT. If NAME was replaced in
280 its only use then this function can be used to clean up
281 dead stmts. Returns true if UP_TO_STMT can be removed
282 as well, otherwise false. */
284 static bool
285 remove_prop_source_from_use (tree name, tree up_to_stmt)
287 block_stmt_iterator bsi;
288 tree stmt;
290 do {
291 if (!has_zero_uses (name))
292 return false;
294 stmt = SSA_NAME_DEF_STMT (name);
295 if (stmt == up_to_stmt)
296 return true;
298 bsi = bsi_for_stmt (stmt);
299 release_defs (stmt);
300 bsi_remove (&bsi, true);
302 name = GIMPLE_STMT_OPERAND (stmt, 1);
303 } while (TREE_CODE (name) == SSA_NAME);
305 return false;
308 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
309 the folded result in a form suitable for COND_EXPR_COND or
310 NULL_TREE, if there is no suitable simplified form. If
311 INVARIANT_ONLY is true only gimple_min_invariant results are
312 considered simplified. */
314 static tree
315 combine_cond_expr_cond (enum tree_code code, tree type,
316 tree op0, tree op1, bool invariant_only)
318 tree t;
320 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
322 t = fold_binary (code, type, op0, op1);
323 if (!t)
324 return NULL_TREE;
326 /* Require that we got a boolean type out if we put one in. */
327 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
329 /* For (bool)x use x != 0. */
330 if (TREE_CODE (t) == NOP_EXPR
331 && TREE_TYPE (t) == boolean_type_node)
333 tree top0 = TREE_OPERAND (t, 0);
334 t = build2 (NE_EXPR, type,
335 top0, build_int_cst (TREE_TYPE (top0), 0));
337 /* For !x use x == 0. */
338 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
340 tree top0 = TREE_OPERAND (t, 0);
341 t = build2 (EQ_EXPR, type,
342 top0, build_int_cst (TREE_TYPE (top0), 0));
344 /* For cmp ? 1 : 0 use cmp. */
345 else if (TREE_CODE (t) == COND_EXPR
346 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
347 && integer_onep (TREE_OPERAND (t, 1))
348 && integer_zerop (TREE_OPERAND (t, 2)))
350 tree top0 = TREE_OPERAND (t, 0);
351 t = build2 (TREE_CODE (top0), type,
352 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
355 /* Bail out if we required an invariant but didn't get one. */
356 if (invariant_only
357 && !is_gimple_min_invariant (t))
358 return NULL_TREE;
360 /* A valid conditional for a COND_EXPR is either a gimple value
361 or a comparison with two gimple value operands. */
362 if (is_gimple_val (t)
363 || (COMPARISON_CLASS_P (t)
364 && is_gimple_val (TREE_OPERAND (t, 0))
365 && is_gimple_val (TREE_OPERAND (t, 1))))
366 return t;
368 return NULL_TREE;
371 /* Propagate from the ssa name definition statements of COND_EXPR
372 in statement STMT into the conditional if that simplifies it.
373 Returns zero if no statement was changed, one if there were
374 changes and two if cfg_cleanup needs to run. */
376 static int
377 forward_propagate_into_cond (tree cond_expr, tree stmt)
379 int did_something = 0;
381 do {
382 tree tmp = NULL_TREE;
383 tree cond = COND_EXPR_COND (cond_expr);
384 tree name, def_stmt, rhs;
385 bool single_use_p;
387 /* We can do tree combining on SSA_NAME and comparison expressions. */
388 if (COMPARISON_CLASS_P (cond)
389 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
391 /* For comparisons use the first operand, that is likely to
392 simplify comparisons against constants. */
393 name = TREE_OPERAND (cond, 0);
394 def_stmt = get_prop_source_stmt (name, false, &single_use_p);
395 if (def_stmt != NULL_TREE
396 && can_propagate_from (def_stmt))
398 tree op1 = TREE_OPERAND (cond, 1);
399 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
400 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
401 fold_convert (TREE_TYPE (op1), rhs),
402 op1, !single_use_p);
404 /* If that wasn't successful, try the second operand. */
405 if (tmp == NULL_TREE
406 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
408 tree op0 = TREE_OPERAND (cond, 0);
409 name = TREE_OPERAND (cond, 1);
410 def_stmt = get_prop_source_stmt (name, false, &single_use_p);
411 if (def_stmt == NULL_TREE
412 || !can_propagate_from (def_stmt))
413 return did_something;
415 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
416 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
417 op0,
418 fold_convert (TREE_TYPE (op0), rhs),
419 !single_use_p);
422 else if (TREE_CODE (cond) == SSA_NAME)
424 name = cond;
425 def_stmt = get_prop_source_stmt (name, true, NULL);
426 if (def_stmt == NULL_TREE
427 || !can_propagate_from (def_stmt))
428 return did_something;
430 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
431 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs,
432 build_int_cst (TREE_TYPE (rhs), 0),
433 false);
436 if (tmp)
438 if (dump_file && tmp)
440 fprintf (dump_file, " Replaced '");
441 print_generic_expr (dump_file, cond, 0);
442 fprintf (dump_file, "' with '");
443 print_generic_expr (dump_file, tmp, 0);
444 fprintf (dump_file, "'\n");
447 COND_EXPR_COND (cond_expr) = unshare_expr (tmp);
448 update_stmt (stmt);
450 /* Remove defining statements. */
451 remove_prop_source_from_use (name, NULL);
453 if (is_gimple_min_invariant (tmp))
454 did_something = 2;
455 else if (did_something == 0)
456 did_something = 1;
458 /* Continue combining. */
459 continue;
462 break;
463 } while (1);
465 return did_something;
468 /* We've just substituted an ADDR_EXPR into stmt. Update all the
469 relevant data structures to match. */
471 static void
472 tidy_after_forward_propagate_addr (tree stmt)
474 /* We may have turned a trapping insn into a non-trapping insn. */
475 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
476 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
477 cfg_changed = true;
479 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
480 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
482 mark_symbols_for_renaming (stmt);
485 /* DEF_RHS contains the address of the 0th element in an array.
486 USE_STMT uses type of DEF_RHS to compute the address of an
487 arbitrary element within the array. The (variable) byte offset
488 of the element is contained in OFFSET.
490 We walk back through the use-def chains of OFFSET to verify that
491 it is indeed computing the offset of an element within the array
492 and extract the index corresponding to the given byte offset.
494 We then try to fold the entire address expression into a form
495 &array[index].
497 If we are successful, we replace the right hand side of USE_STMT
498 with the new address computation. */
500 static bool
501 forward_propagate_addr_into_variable_array_index (tree offset,
502 tree def_rhs, tree use_stmt)
504 tree index;
506 /* Try to find an expression for a proper index. This is either
507 a multiplication expression by the element size or just the
508 ssa name we came along in case the element size is one. */
509 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
510 index = offset;
511 else
513 /* Get the offset's defining statement. */
514 offset = SSA_NAME_DEF_STMT (offset);
516 /* The statement which defines OFFSET before type conversion
517 must be a simple GIMPLE_MODIFY_STMT. */
518 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
519 return false;
521 /* The RHS of the statement which defines OFFSET must be a
522 multiplication of an object by the size of the array elements.
523 This implicitly verifies that the size of the array elements
524 is constant. */
525 offset = GIMPLE_STMT_OPERAND (offset, 1);
526 if (TREE_CODE (offset) != MULT_EXPR
527 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
528 || !simple_cst_equal (TREE_OPERAND (offset, 1),
529 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
530 return false;
532 /* The first operand to the MULT_EXPR is the desired index. */
533 index = TREE_OPERAND (offset, 0);
536 /* Replace the pointer addition with array indexing. */
537 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
538 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
539 = index;
541 /* That should have created gimple, so there is no need to
542 record information to undo the propagation. */
543 fold_stmt_inplace (use_stmt);
544 tidy_after_forward_propagate_addr (use_stmt);
545 return true;
548 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
549 ADDR_EXPR <whatever>.
551 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
552 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
553 node or for recovery of array indexing from pointer arithmetic.
555 Return true if the propagation was successful (the propagation can
556 be not totally successful, yet things may have been changed). */
558 static bool
559 forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
560 bool single_use_p)
562 tree lhs, rhs, array_ref;
564 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
565 ADDR_EXPR will not appear on the LHS. */
566 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
567 while (handled_component_p (lhs))
568 lhs = TREE_OPERAND (lhs, 0);
570 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
572 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
573 propagate the ADDR_EXPR into the use of NAME and fold the result. */
574 if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
576 /* This should always succeed in creating gimple, so there is
577 no need to save enough state to undo this propagation. */
578 TREE_OPERAND (lhs, 0) = unshare_expr (def_rhs);
579 fold_stmt_inplace (use_stmt);
580 tidy_after_forward_propagate_addr (use_stmt);
582 /* Continue propagating into the RHS. */
585 /* Trivial cases. The use statement could be a trivial copy or a
586 useless conversion. Recurse to the uses of the lhs as copyprop does
587 not copy through differen variant pointers and FRE does not catch
588 all useless conversions. Treat the case of a single-use name and
589 a conversion to def_rhs type separate, though. */
590 else if (TREE_CODE (lhs) == SSA_NAME
591 && (TREE_CODE (rhs) == NOP_EXPR
592 || TREE_CODE (rhs) == CONVERT_EXPR)
593 && TREE_TYPE (rhs) == TREE_TYPE (def_rhs)
594 && single_use_p)
596 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
597 return true;
599 else if ((TREE_CODE (lhs) == SSA_NAME
600 && rhs == name)
601 || ((TREE_CODE (rhs) == NOP_EXPR
602 || TREE_CODE (rhs) == CONVERT_EXPR)
603 && useless_type_conversion_p (TREE_TYPE (rhs),
604 TREE_TYPE (def_rhs))))
605 return forward_propagate_addr_expr (lhs, def_rhs);
607 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
608 nodes from the RHS. */
609 while (handled_component_p (rhs)
610 || TREE_CODE (rhs) == ADDR_EXPR)
611 rhs = TREE_OPERAND (rhs, 0);
613 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
614 propagate the ADDR_EXPR into the use of NAME and fold the result. */
615 if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
617 /* This should always succeed in creating gimple, so there is
618 no need to save enough state to undo this propagation. */
619 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
620 fold_stmt_inplace (use_stmt);
621 tidy_after_forward_propagate_addr (use_stmt);
622 return true;
625 /* The remaining cases are all for turning pointer arithmetic into
626 array indexing. They only apply when we have the address of
627 element zero in an array. If that is not the case then there
628 is nothing to do. */
629 array_ref = TREE_OPERAND (def_rhs, 0);
630 if (TREE_CODE (array_ref) != ARRAY_REF
631 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
632 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
633 return false;
635 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
636 is nothing to do. */
637 if (TREE_CODE (rhs) != POINTER_PLUS_EXPR)
638 return false;
640 /* Try to optimize &x[0] p+ C where C is a multiple of the size
641 of the elements in X into &x[C/element size]. */
642 if (TREE_OPERAND (rhs, 0) == name
643 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
645 tree orig = unshare_expr (rhs);
646 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
648 /* If folding succeeds, then we have just exposed new variables
649 in USE_STMT which will need to be renamed. If folding fails,
650 then we need to put everything back the way it was. */
651 if (fold_stmt_inplace (use_stmt))
653 tidy_after_forward_propagate_addr (use_stmt);
654 return true;
656 else
658 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
659 update_stmt (use_stmt);
660 return false;
664 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
665 converting a multiplication of an index by the size of the
666 array elements, then the result is converted into the proper
667 type for the arithmetic. */
668 if (TREE_OPERAND (rhs, 0) == name
669 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
670 /* Avoid problems with IVopts creating PLUS_EXPRs with a
671 different type than their operands. */
672 && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (name)))
674 bool res;
676 res = forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs, 1),
677 def_rhs, use_stmt);
678 return res;
680 return false;
683 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
685 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
686 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
687 node or for recovery of array indexing from pointer arithmetic.
688 Returns true, if all uses have been propagated into. */
690 static bool
691 forward_propagate_addr_expr (tree name, tree rhs)
693 int stmt_loop_depth = bb_for_stmt (SSA_NAME_DEF_STMT (name))->loop_depth;
694 imm_use_iterator iter;
695 tree use_stmt;
696 bool all = true;
697 bool single_use_p = has_single_use (name);
699 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
701 bool result;
703 /* If the use is not in a simple assignment statement, then
704 there is nothing we can do. */
705 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
707 all = false;
708 continue;
711 /* If the use is in a deeper loop nest, then we do not want
712 to propagate the ADDR_EXPR into the loop as that is likely
713 adding expression evaluations into the loop. */
714 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
716 all = false;
717 continue;
720 push_stmt_changes (&use_stmt);
722 result = forward_propagate_addr_expr_1 (name, rhs, use_stmt,
723 single_use_p);
724 all &= result;
726 pop_stmt_changes (&use_stmt);
728 /* Remove intermediate now unused copy and conversion chains. */
729 if (result
730 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
731 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == SSA_NAME
732 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
733 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR))
735 block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
736 release_defs (use_stmt);
737 bsi_remove (&bsi, true);
741 return all;
744 /* Forward propagate the comparison COND defined in STMT like
745 cond_1 = x CMP y to uses of the form
746 a_1 = (T')cond_1
747 a_1 = !cond_1
748 a_1 = cond_1 != 0
749 Returns true if stmt is now unused. */
751 static bool
752 forward_propagate_comparison (tree cond, tree stmt)
754 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
755 tree use_stmt, tmp = NULL_TREE;
757 /* Don't propagate ssa names that occur in abnormal phis. */
758 if ((TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
759 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 0)))
760 || (TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME
761 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 1))))
762 return false;
764 /* Do not un-cse comparisons. But propagate through copies. */
765 use_stmt = get_prop_dest_stmt (name, &name);
766 if (use_stmt == NULL_TREE)
767 return false;
769 /* Conversion of the condition result to another integral type. */
770 if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
771 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR
772 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
773 || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
774 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == TRUTH_NOT_EXPR)
775 && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt, 0))))
777 tree lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
778 tree rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
780 /* We can propagate the condition into a conversion. */
781 if (TREE_CODE (rhs) == CONVERT_EXPR
782 || TREE_CODE (rhs) == NOP_EXPR)
784 /* Avoid using fold here as that may create a COND_EXPR with
785 non-boolean condition as canonical form. */
786 tmp = build2 (TREE_CODE (cond), TREE_TYPE (lhs),
787 TREE_OPERAND (cond, 0), TREE_OPERAND (cond, 1));
789 /* We can propagate the condition into X op CST where op
790 is EQ_EXRP or NE_EXPR and CST is either one or zero. */
791 else if (COMPARISON_CLASS_P (rhs)
792 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
793 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
795 enum tree_code code = TREE_CODE (rhs);
796 tree cst = TREE_OPERAND (rhs, 1);
798 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs),
799 fold_convert (TREE_TYPE (cst), cond),
800 cst, false);
801 if (tmp == NULL_TREE)
802 return false;
804 /* We can propagate the condition into a statement that
805 computes the logical negation of the comparison result. */
806 else if (TREE_CODE (rhs) == TRUTH_NOT_EXPR)
808 tree type = TREE_TYPE (TREE_OPERAND (cond, 0));
809 bool nans = HONOR_NANS (TYPE_MODE (type));
810 enum tree_code code;
811 code = invert_tree_comparison (TREE_CODE (cond), nans);
812 if (code == ERROR_MARK)
813 return false;
815 tmp = build2 (code, TREE_TYPE (lhs), TREE_OPERAND (cond, 0),
816 TREE_OPERAND (cond, 1));
818 else
819 return false;
821 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (tmp);
822 update_stmt (use_stmt);
824 /* Remove defining statements. */
825 remove_prop_source_from_use (name, stmt);
827 if (dump_file && (dump_flags & TDF_DETAILS))
829 fprintf (dump_file, " Replaced '");
830 print_generic_expr (dump_file, rhs, dump_flags);
831 fprintf (dump_file, "' with '");
832 print_generic_expr (dump_file, tmp, dump_flags);
833 fprintf (dump_file, "'\n");
836 return true;
839 return false;
842 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
843 If so, we can change STMT into lhs = y which can later be copy
844 propagated. Similarly for negation.
846 This could trivially be formulated as a forward propagation
847 to immediate uses. However, we already had an implementation
848 from DOM which used backward propagation via the use-def links.
850 It turns out that backward propagation is actually faster as
851 there's less work to do for each NOT/NEG expression we find.
852 Backwards propagation needs to look at the statement in a single
853 backlink. Forward propagation needs to look at potentially more
854 than one forward link. */
856 static void
857 simplify_not_neg_expr (tree stmt)
859 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
860 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
862 /* See if the RHS_DEF_STMT has the same form as our statement. */
863 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
864 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
866 tree rhs_def_operand =
867 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
869 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
870 if (TREE_CODE (rhs_def_operand) == SSA_NAME
871 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
873 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
874 update_stmt (stmt);
879 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
880 the condition which we may be able to optimize better. */
882 static void
883 simplify_switch_expr (tree stmt)
885 tree cond = SWITCH_COND (stmt);
886 tree def, to, ti;
888 /* The optimization that we really care about is removing unnecessary
889 casts. That will let us do much better in propagating the inferred
890 constant at the switch target. */
891 if (TREE_CODE (cond) == SSA_NAME)
893 def = SSA_NAME_DEF_STMT (cond);
894 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
896 def = GIMPLE_STMT_OPERAND (def, 1);
897 if (TREE_CODE (def) == NOP_EXPR)
899 int need_precision;
900 bool fail;
902 def = TREE_OPERAND (def, 0);
904 #ifdef ENABLE_CHECKING
905 /* ??? Why was Jeff testing this? We are gimple... */
906 gcc_assert (is_gimple_val (def));
907 #endif
909 to = TREE_TYPE (cond);
910 ti = TREE_TYPE (def);
912 /* If we have an extension that preserves value, then we
913 can copy the source value into the switch. */
915 need_precision = TYPE_PRECISION (ti);
916 fail = false;
917 if (! INTEGRAL_TYPE_P (ti))
918 fail = true;
919 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
920 fail = true;
921 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
922 need_precision += 1;
923 if (TYPE_PRECISION (to) < need_precision)
924 fail = true;
926 if (!fail)
928 SWITCH_COND (stmt) = def;
929 update_stmt (stmt);
936 /* Main entry point for the forward propagation optimizer. */
938 static unsigned int
939 tree_ssa_forward_propagate_single_use_vars (void)
941 basic_block bb;
942 unsigned int todoflags = 0;
944 cfg_changed = false;
946 FOR_EACH_BB (bb)
948 block_stmt_iterator bsi;
950 /* Note we update BSI within the loop as necessary. */
951 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
953 tree stmt = bsi_stmt (bsi);
955 /* If this statement sets an SSA_NAME to an address,
956 try to propagate the address into the uses of the SSA_NAME. */
957 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
959 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
960 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
963 if (TREE_CODE (lhs) != SSA_NAME)
965 bsi_next (&bsi);
966 continue;
969 if (TREE_CODE (rhs) == ADDR_EXPR)
971 if (forward_propagate_addr_expr (lhs, rhs))
973 release_defs (stmt);
974 todoflags |= TODO_remove_unused_locals;
975 bsi_remove (&bsi, true);
977 else
978 bsi_next (&bsi);
980 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
981 || TREE_CODE (rhs) == NEGATE_EXPR)
982 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
984 simplify_not_neg_expr (stmt);
985 bsi_next (&bsi);
987 else if (TREE_CODE (rhs) == COND_EXPR)
989 int did_something;
990 fold_defer_overflow_warnings ();
991 did_something = forward_propagate_into_cond (rhs, stmt);
992 if (did_something == 2)
993 cfg_changed = true;
994 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
995 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
996 bsi_next (&bsi);
998 else if (COMPARISON_CLASS_P (rhs))
1000 if (forward_propagate_comparison (rhs, stmt))
1002 release_defs (stmt);
1003 todoflags |= TODO_remove_unused_locals;
1004 bsi_remove (&bsi, true);
1006 else
1007 bsi_next (&bsi);
1009 else
1010 bsi_next (&bsi);
1012 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1014 simplify_switch_expr (stmt);
1015 bsi_next (&bsi);
1017 else if (TREE_CODE (stmt) == COND_EXPR)
1019 int did_something;
1020 fold_defer_overflow_warnings ();
1021 did_something = forward_propagate_into_cond (stmt, stmt);
1022 if (did_something == 2)
1023 cfg_changed = true;
1024 fold_undefer_overflow_warnings (did_something, stmt,
1025 WARN_STRICT_OVERFLOW_CONDITIONAL);
1026 bsi_next (&bsi);
1028 else
1029 bsi_next (&bsi);
1033 if (cfg_changed)
1034 todoflags |= TODO_cleanup_cfg;
1035 return todoflags;
1039 static bool
1040 gate_forwprop (void)
1042 return 1;
1045 struct tree_opt_pass pass_forwprop = {
1046 "forwprop", /* name */
1047 gate_forwprop, /* gate */
1048 tree_ssa_forward_propagate_single_use_vars, /* execute */
1049 NULL, /* sub */
1050 NULL, /* next */
1051 0, /* static_pass_number */
1052 TV_TREE_FORWPROP, /* tv_id */
1053 PROP_cfg | PROP_ssa, /* properties_required */
1054 0, /* properties_provided */
1055 0, /* properties_destroyed */
1056 0, /* todo_flags_start */
1057 TODO_dump_func
1058 | TODO_ggc_collect
1059 | TODO_update_ssa
1060 | TODO_verify_ssa, /* todo_flags_finish */
1061 0 /* letter */
1065 /* Structure to keep track of the value of a dereferenced PHI result
1066 and the set of virtual operands used for that dereference. */
1068 struct phiprop_d
1070 tree value;
1071 tree vop_stmt;
1074 /* Verify if the value recorded for NAME in PHIVN is still valid at
1075 the start of basic block BB. */
1077 static bool
1078 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
1080 tree vop_stmt = phivn[SSA_NAME_VERSION (name)].vop_stmt;
1081 ssa_op_iter ui;
1082 tree vuse;
1084 /* The def stmts of all virtual uses need to be post-dominated
1085 by bb. */
1086 FOR_EACH_SSA_TREE_OPERAND (vuse, vop_stmt, ui, SSA_OP_VUSE)
1088 tree use_stmt;
1089 imm_use_iterator ui2;
1090 bool ok = true;
1092 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
1094 /* If BB does not dominate a VDEF, the value is invalid. */
1095 if (((TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1096 && !ZERO_SSA_OPERANDS (use_stmt, SSA_OP_VDEF))
1097 || TREE_CODE (use_stmt) == PHI_NODE)
1098 && !dominated_by_p (CDI_DOMINATORS, bb_for_stmt (use_stmt), bb))
1100 ok = false;
1101 BREAK_FROM_IMM_USE_STMT (ui2);
1104 if (!ok)
1105 return false;
1108 return true;
1111 /* Insert a new phi node for the dereference of PHI at basic_block
1112 BB with the virtual operands from USE_STMT. */
1114 static tree
1115 phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
1116 struct phiprop_d *phivn, size_t n)
1118 tree res, new_phi;
1119 edge_iterator ei;
1120 edge e;
1122 /* Build a new PHI node to replace the definition of
1123 the indirect reference lhs. */
1124 res = GIMPLE_STMT_OPERAND (use_stmt, 0);
1125 SSA_NAME_DEF_STMT (res) = new_phi = create_phi_node (res, bb);
1127 /* Add PHI arguments for each edge inserting loads of the
1128 addressable operands. */
1129 FOR_EACH_EDGE (e, ei, bb->preds)
1131 tree old_arg, new_var, tmp;
1133 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
1134 while (TREE_CODE (old_arg) == SSA_NAME
1135 && (SSA_NAME_VERSION (old_arg) >= n
1136 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
1138 tree def_stmt = SSA_NAME_DEF_STMT (old_arg);
1139 old_arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1142 if (TREE_CODE (old_arg) == SSA_NAME)
1143 /* Reuse a formerly created dereference. */
1144 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
1145 else
1147 old_arg = TREE_OPERAND (old_arg, 0);
1148 new_var = create_tmp_var (TREE_TYPE (old_arg), NULL);
1149 tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node,
1150 NULL_TREE, unshare_expr (old_arg));
1151 if (TREE_CODE (TREE_TYPE (old_arg)) == COMPLEX_TYPE
1152 || TREE_CODE (TREE_TYPE (old_arg)) == VECTOR_TYPE)
1153 DECL_GIMPLE_REG_P (new_var) = 1;
1154 add_referenced_var (new_var);
1155 new_var = make_ssa_name (new_var, tmp);
1156 GIMPLE_STMT_OPERAND (tmp, 0) = new_var;
1158 bsi_insert_on_edge (e, tmp);
1160 update_stmt (tmp);
1161 mark_symbols_for_renaming (tmp);
1164 add_phi_arg (new_phi, new_var, e);
1167 update_stmt (new_phi);
1169 return res;
1172 /* Propagate between the phi node arguments of PHI in BB and phi result
1173 users. For now this matches
1174 # p_2 = PHI <&x, &y>
1175 <Lx>:;
1176 p_3 = p_2;
1177 z_2 = *p_3;
1178 and converts it to
1179 # z_2 = PHI <x, y>
1180 <Lx>:;
1181 Returns true if a transformation was done and edge insertions
1182 need to be committed. Global data PHIVN and N is used to track
1183 past transformation results. We need to be especially careful here
1184 with aliasing issues as we are moving memory reads. */
1186 static bool
1187 propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
1189 tree ptr = PHI_RESULT (phi);
1190 tree use_stmt, res = NULL_TREE;
1191 block_stmt_iterator bsi;
1192 imm_use_iterator ui;
1193 use_operand_p arg_p, use;
1194 ssa_op_iter i;
1195 bool phi_inserted;
1197 if (MTAG_P (SSA_NAME_VAR (ptr))
1198 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1199 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
1200 return false;
1202 /* Check if we can "cheaply" dereference all phi arguments. */
1203 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
1205 tree arg = USE_FROM_PTR (arg_p);
1206 /* Walk the ssa chain until we reach a ssa name we already
1207 created a value for or we reach a definition of the form
1208 ssa_name_n = &var; */
1209 while (TREE_CODE (arg) == SSA_NAME
1210 && !SSA_NAME_IS_DEFAULT_DEF (arg)
1211 && (SSA_NAME_VERSION (arg) >= n
1212 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
1214 tree def_stmt = SSA_NAME_DEF_STMT (arg);
1215 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
1216 return false;
1217 arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1219 if ((TREE_CODE (arg) != ADDR_EXPR
1220 /* Avoid to have to decay *&a to a[0] later. */
1221 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (arg, 0))))
1222 && !(TREE_CODE (arg) == SSA_NAME
1223 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
1224 && phivn_valid_p (phivn, arg, bb)))
1225 return false;
1228 /* Find a dereferencing use. First follow (single use) ssa
1229 copy chains for ptr. */
1230 while (single_imm_use (ptr, &use, &use_stmt)
1231 && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1232 && GIMPLE_STMT_OPERAND (use_stmt, 1) == ptr
1233 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME)
1234 ptr = GIMPLE_STMT_OPERAND (use_stmt, 0);
1236 /* Replace the first dereference of *ptr if there is one and if we
1237 can move the loads to the place of the ptr phi node. */
1238 phi_inserted = false;
1239 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
1241 ssa_op_iter ui2;
1242 tree vuse;
1244 /* Check whether this is a load of *ptr. */
1245 if (!(TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1246 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
1247 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == INDIRECT_REF
1248 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == ptr
1249 /* We cannot replace a load that may throw or is volatile. */
1250 && !tree_can_throw_internal (use_stmt)))
1251 continue;
1253 /* Check if we can move the loads. The def stmts of all virtual uses
1254 need to be post-dominated by bb. */
1255 FOR_EACH_SSA_TREE_OPERAND (vuse, use_stmt, ui2, SSA_OP_VUSE)
1257 tree def_stmt = SSA_NAME_DEF_STMT (vuse);
1258 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
1259 && (bb_for_stmt (def_stmt) == bb
1260 || !dominated_by_p (CDI_DOMINATORS,
1261 bb, bb_for_stmt (def_stmt))))
1262 goto next;
1265 /* Found a proper dereference. Insert a phi node if this
1266 is the first load transformation. */
1267 if (!phi_inserted)
1269 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
1271 /* Remember the value we created for *ptr. */
1272 phivn[SSA_NAME_VERSION (ptr)].value = res;
1273 phivn[SSA_NAME_VERSION (ptr)].vop_stmt = use_stmt;
1275 /* Remove old stmt. The phi is taken care of by DCE, if we
1276 want to delete it here we also have to delete all intermediate
1277 copies. */
1278 bsi = bsi_for_stmt (use_stmt);
1279 bsi_remove (&bsi, 0);
1281 phi_inserted = true;
1283 else
1285 /* Further replacements are easy, just make a copy out of the
1286 load. */
1287 GIMPLE_STMT_OPERAND (use_stmt, 1) = res;
1288 update_stmt (use_stmt);
1291 next:;
1292 /* Continue searching for a proper dereference. */
1295 return phi_inserted;
1298 /* Helper walking the dominator tree starting from BB and processing
1299 phi nodes with global data PHIVN and N. */
1301 static bool
1302 tree_ssa_phiprop_1 (basic_block bb, struct phiprop_d *phivn, size_t n)
1304 bool did_something = false;
1305 basic_block son;
1306 tree phi;
1308 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1309 did_something |= propagate_with_phi (bb, phi, phivn, n);
1311 for (son = first_dom_son (CDI_DOMINATORS, bb);
1312 son;
1313 son = next_dom_son (CDI_DOMINATORS, son))
1314 did_something |= tree_ssa_phiprop_1 (son, phivn, n);
1316 return did_something;
1319 /* Main entry for phiprop pass. */
1321 static unsigned int
1322 tree_ssa_phiprop (void)
1324 struct phiprop_d *phivn;
1326 calculate_dominance_info (CDI_DOMINATORS);
1328 phivn = XCNEWVEC (struct phiprop_d, num_ssa_names);
1330 if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR, phivn, num_ssa_names))
1331 bsi_commit_edge_inserts ();
1333 free (phivn);
1335 return 0;
1338 static bool
1339 gate_phiprop (void)
1341 return 1;
1344 struct tree_opt_pass pass_phiprop = {
1345 "phiprop", /* name */
1346 gate_phiprop, /* gate */
1347 tree_ssa_phiprop, /* execute */
1348 NULL, /* sub */
1349 NULL, /* next */
1350 0, /* static_pass_number */
1351 TV_TREE_FORWPROP, /* tv_id */
1352 PROP_cfg | PROP_ssa, /* properties_required */
1353 0, /* properties_provided */
1354 0, /* properties_destroyed */
1355 0, /* todo_flags_start */
1356 TODO_dump_func
1357 | TODO_ggc_collect
1358 | TODO_update_ssa
1359 | TODO_verify_ssa, /* todo_flags_finish */
1360 0 /* letter */