fortran frontend:
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob29ee8887230f1864fc4cec19215dd3052124a08a
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "tree-dump.h"
35 #include "langhooks.h"
36 #include "flags.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 Note carefully that after propagation the resulting statement
44 must still be a proper gimple statement. Right now we simply
45 only perform propagations we know will result in valid gimple
46 code. One day we'll want to generalize this code.
48 One class of common cases we handle is forward propagating a single use
49 variable into a COND_EXPR.
51 bb0:
52 x = a COND b;
53 if (x) goto ... else goto ...
55 Will be transformed into:
57 bb0:
58 if (a COND b) goto ... else goto ...
60 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
62 Or (assuming c1 and c2 are constants):
64 bb0:
65 x = a + c1;
66 if (x EQ/NEQ c2) goto ... else goto ...
68 Will be transformed into:
70 bb0:
71 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
73 Similarly for x = a - c1.
77 bb0:
78 x = !a
79 if (x) goto ... else goto ...
81 Will be transformed into:
83 bb0:
84 if (a == 0) goto ... else goto ...
86 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
87 For these cases, we propagate A into all, possibly more than one,
88 COND_EXPRs that use X.
92 bb0:
93 x = (typecast) a
94 if (x) goto ... else goto ...
96 Will be transformed into:
98 bb0:
99 if (a != 0) goto ... else goto ...
101 (Assuming a is an integral type and x is a boolean or x is an
102 integral and a is a boolean.)
104 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
105 For these cases, we propagate A into all, possibly more than one,
106 COND_EXPRs that use X.
108 In addition to eliminating the variable and the statement which assigns
109 a value to the variable, we may be able to later thread the jump without
110 adding insane complexity in the dominator optimizer.
112 Also note these transformations can cascade. We handle this by having
113 a worklist of COND_EXPR statements to examine. As we make a change to
114 a statement, we put it back on the worklist to examine on the next
115 iteration of the main loop.
117 A second class of propagation opportunities arises for ADDR_EXPR
118 nodes.
120 ptr = &x->y->z;
121 res = *ptr;
123 Will get turned into
125 res = x->y->z;
129 ptr = &x[0];
130 ptr2 = ptr + <constant>;
132 Will get turned into
134 ptr2 = &x[constant/elementsize];
138 ptr = &x[0];
139 offset = index * element_size;
140 offset_p = (pointer) offset;
141 ptr2 = ptr + offset_p
143 Will get turned into:
145 ptr2 = &x[index];
147 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
148 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
149 {NOT_EXPR,NEG_EXPR}.
151 This will (of course) be extended as other needs arise. */
153 static bool forward_propagate_addr_expr (tree name, tree rhs);
155 /* Set to true if we delete EH edges during the optimization. */
156 static bool cfg_changed;
159 /* Get the next statement we can propagate NAME's value into skipping
160 trivial copies. Returns the statement that is suitable as a
161 propagation destination or NULL_TREE if there is no such one.
162 This only returns destinations in a single-use chain. FINAL_NAME_P
163 if non-NULL is written to the ssa name that represents the use. */
165 static tree
166 get_prop_dest_stmt (tree name, tree *final_name_p)
168 use_operand_p use;
169 tree use_stmt;
171 do {
172 /* If name has multiple uses, bail out. */
173 if (!single_imm_use (name, &use, &use_stmt))
174 return NULL_TREE;
176 /* If this is not a trivial copy, we found it. */
177 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT
178 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) != SSA_NAME
179 || GIMPLE_STMT_OPERAND (use_stmt, 1) != name)
180 break;
182 /* Continue searching uses of the copy destination. */
183 name = GIMPLE_STMT_OPERAND (use_stmt, 0);
184 } while (1);
186 if (final_name_p)
187 *final_name_p = name;
189 return use_stmt;
192 /* Get the statement we can propagate from into NAME skipping
193 trivial copies. Returns the statement which defines the
194 propagation source or NULL_TREE if there is no such one.
195 If SINGLE_USE_ONLY is set considers only sources which have
196 a single use chain up to NAME. If SINGLE_USE_P is non-null,
197 it is set to whether the chain to NAME is a single use chain
198 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
200 static tree
201 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
203 bool single_use = true;
205 do {
206 tree def_stmt = SSA_NAME_DEF_STMT (name);
208 if (!has_single_use (name))
210 single_use = false;
211 if (single_use_only)
212 return NULL_TREE;
215 /* If name is defined by a PHI node or is the default def, bail out. */
216 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
217 return NULL_TREE;
219 /* If name is not a simple copy destination, we found it. */
220 if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) != SSA_NAME)
222 if (!single_use_only && single_use_p)
223 *single_use_p = single_use;
225 return def_stmt;
228 /* Continue searching the def of the copy source name. */
229 name = GIMPLE_STMT_OPERAND (def_stmt, 1);
230 } while (1);
233 /* Checks if the destination ssa name in DEF_STMT can be used as
234 propagation source. Returns true if so, otherwise false. */
236 static bool
237 can_propagate_from (tree def_stmt)
239 tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
241 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
242 switch (TREE_CODE_LENGTH (TREE_CODE (rhs)))
244 case 3:
245 if (TREE_OPERAND (rhs, 2) != NULL_TREE
246 && TREE_CODE (TREE_OPERAND (rhs, 2)) == SSA_NAME
247 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 2)))
248 return false;
249 case 2:
250 if (TREE_OPERAND (rhs, 1) != NULL_TREE
251 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 1)))
253 return false;
254 case 1:
255 if (TREE_OPERAND (rhs, 0) != NULL_TREE
256 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
257 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs, 0)))
258 return false;
259 break;
261 default:
262 return false;
265 /* If the definition is a conversion of a pointer to a function type,
266 then we can not apply optimizations as some targets require function
267 pointers to be canonicalized and in this case this optimization could
268 eliminate a necessary canonicalization. */
269 if ((TREE_CODE (rhs) == NOP_EXPR
270 || TREE_CODE (rhs) == CONVERT_EXPR)
271 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
272 && TREE_CODE (TREE_TYPE (TREE_TYPE
273 (TREE_OPERAND (rhs, 0)))) == FUNCTION_TYPE)
274 return false;
276 return true;
279 /* Remove a copy chain ending in NAME along the defs but not
280 further or including UP_TO_STMT. If NAME was replaced in
281 its only use then this function can be used to clean up
282 dead stmts. Returns true if UP_TO_STMT can be removed
283 as well, otherwise false. */
285 static bool
286 remove_prop_source_from_use (tree name, tree up_to_stmt)
288 block_stmt_iterator bsi;
289 tree stmt;
291 do {
292 if (!has_zero_uses (name))
293 return false;
295 stmt = SSA_NAME_DEF_STMT (name);
296 if (stmt == up_to_stmt)
297 return true;
299 bsi = bsi_for_stmt (stmt);
300 release_defs (stmt);
301 bsi_remove (&bsi, true);
303 name = GIMPLE_STMT_OPERAND (stmt, 1);
304 } while (TREE_CODE (name) == SSA_NAME);
306 return false;
309 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
310 the folded result in a form suitable for COND_EXPR_COND or
311 NULL_TREE, if there is no suitable simplified form. If
312 INVARIANT_ONLY is true only gimple_min_invariant results are
313 considered simplified. */
315 static tree
316 combine_cond_expr_cond (enum tree_code code, tree type,
317 tree op0, tree op1, bool invariant_only)
319 tree t;
321 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
323 t = fold_binary (code, type, op0, op1);
324 if (!t)
325 return NULL_TREE;
327 /* Require that we got a boolean type out if we put one in. */
328 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
330 /* For (bool)x use x != 0. */
331 if (TREE_CODE (t) == NOP_EXPR
332 && TREE_TYPE (t) == boolean_type_node)
334 tree top0 = TREE_OPERAND (t, 0);
335 t = build2 (NE_EXPR, type,
336 top0, build_int_cst (TREE_TYPE (top0), 0));
338 /* For !x use x == 0. */
339 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
341 tree top0 = TREE_OPERAND (t, 0);
342 t = build2 (EQ_EXPR, type,
343 top0, build_int_cst (TREE_TYPE (top0), 0));
345 /* For cmp ? 1 : 0 use cmp. */
346 else if (TREE_CODE (t) == COND_EXPR
347 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
348 && integer_onep (TREE_OPERAND (t, 1))
349 && integer_zerop (TREE_OPERAND (t, 2)))
351 tree top0 = TREE_OPERAND (t, 0);
352 t = build2 (TREE_CODE (top0), type,
353 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
356 /* Bail out if we required an invariant but didn't get one. */
357 if (invariant_only
358 && !is_gimple_min_invariant (t))
359 return NULL_TREE;
361 /* A valid conditional for a COND_EXPR is either a gimple value
362 or a comparison with two gimple value operands. */
363 if (is_gimple_val (t)
364 || (COMPARISON_CLASS_P (t)
365 && is_gimple_val (TREE_OPERAND (t, 0))
366 && is_gimple_val (TREE_OPERAND (t, 1))))
367 return t;
369 return NULL_TREE;
372 /* Propagate from the ssa name definition statements of COND_EXPR
373 in statement STMT into the conditional if that simplifies it.
374 Returns zero if no statement was changed, one if there were
375 changes and two if cfg_cleanup needs to run. */
377 static int
378 forward_propagate_into_cond (tree cond_expr, tree stmt)
380 int did_something = 0;
382 do {
383 tree tmp = NULL_TREE;
384 tree cond = COND_EXPR_COND (cond_expr);
385 tree name, def_stmt, rhs;
386 bool single_use_p;
388 /* We can do tree combining on SSA_NAME and comparison expressions. */
389 if (COMPARISON_CLASS_P (cond)
390 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
392 /* For comparisons use the first operand, that is likely to
393 simplify comparisons against constants. */
394 name = TREE_OPERAND (cond, 0);
395 def_stmt = get_prop_source_stmt (name, false, &single_use_p);
396 if (def_stmt != NULL_TREE
397 && can_propagate_from (def_stmt))
399 tree op1 = TREE_OPERAND (cond, 1);
400 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
401 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
402 fold_convert (TREE_TYPE (op1), rhs),
403 op1, !single_use_p);
405 /* If that wasn't successful, try the second operand. */
406 if (tmp == NULL_TREE
407 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
409 tree op0 = TREE_OPERAND (cond, 0);
410 name = TREE_OPERAND (cond, 1);
411 def_stmt = get_prop_source_stmt (name, false, &single_use_p);
412 if (def_stmt == NULL_TREE
413 || !can_propagate_from (def_stmt))
414 return did_something;
416 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
417 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
418 op0,
419 fold_convert (TREE_TYPE (op0), rhs),
420 !single_use_p);
423 else if (TREE_CODE (cond) == SSA_NAME)
425 name = cond;
426 def_stmt = get_prop_source_stmt (name, true, NULL);
427 if (def_stmt == NULL_TREE
428 || !can_propagate_from (def_stmt))
429 return did_something;
431 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
432 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs,
433 build_int_cst (TREE_TYPE (rhs), 0),
434 false);
437 if (tmp)
439 if (dump_file && tmp)
441 fprintf (dump_file, " Replaced '");
442 print_generic_expr (dump_file, cond, 0);
443 fprintf (dump_file, "' with '");
444 print_generic_expr (dump_file, tmp, 0);
445 fprintf (dump_file, "'\n");
448 COND_EXPR_COND (cond_expr) = unshare_expr (tmp);
449 update_stmt (stmt);
451 /* Remove defining statements. */
452 remove_prop_source_from_use (name, NULL);
454 if (is_gimple_min_invariant (tmp))
455 did_something = 2;
456 else if (did_something == 0)
457 did_something = 1;
459 /* Continue combining. */
460 continue;
463 break;
464 } while (1);
466 return did_something;
469 /* We've just substituted an ADDR_EXPR into stmt. Update all the
470 relevant data structures to match. */
472 static void
473 tidy_after_forward_propagate_addr (tree stmt)
475 /* We may have turned a trapping insn into a non-trapping insn. */
476 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
477 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
478 cfg_changed = true;
480 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
481 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
483 mark_symbols_for_renaming (stmt);
486 /* DEF_RHS contains the address of the 0th element in an array.
487 USE_STMT uses type of DEF_RHS to compute the address of an
488 arbitrary element within the array. The (variable) byte offset
489 of the element is contained in OFFSET.
491 We walk back through the use-def chains of OFFSET to verify that
492 it is indeed computing the offset of an element within the array
493 and extract the index corresponding to the given byte offset.
495 We then try to fold the entire address expression into a form
496 &array[index].
498 If we are successful, we replace the right hand side of USE_STMT
499 with the new address computation. */
501 static bool
502 forward_propagate_addr_into_variable_array_index (tree offset,
503 tree def_rhs, tree use_stmt)
505 tree index;
507 /* Try to find an expression for a proper index. This is either
508 a multiplication expression by the element size or just the
509 ssa name we came along in case the element size is one. */
510 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
511 index = offset;
512 else
514 /* Get the offset's defining statement. */
515 offset = SSA_NAME_DEF_STMT (offset);
517 /* The statement which defines OFFSET before type conversion
518 must be a simple GIMPLE_MODIFY_STMT. */
519 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
520 return false;
522 /* The RHS of the statement which defines OFFSET must be a
523 multiplication of an object by the size of the array elements.
524 This implicitly verifies that the size of the array elements
525 is constant. */
526 offset = GIMPLE_STMT_OPERAND (offset, 1);
527 if (TREE_CODE (offset) != MULT_EXPR
528 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
529 || !simple_cst_equal (TREE_OPERAND (offset, 1),
530 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
531 return false;
533 /* The first operand to the MULT_EXPR is the desired index. */
534 index = TREE_OPERAND (offset, 0);
537 /* Replace the pointer addition with array indexing. */
538 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
539 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
540 = index;
542 /* That should have created gimple, so there is no need to
543 record information to undo the propagation. */
544 fold_stmt_inplace (use_stmt);
545 tidy_after_forward_propagate_addr (use_stmt);
546 return true;
549 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
550 ADDR_EXPR <whatever>.
552 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
553 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
554 node or for recovery of array indexing from pointer arithmetic.
556 Return true if the propagation was successful (the propagation can
557 be not totally successful, yet things may have been changed). */
559 static bool
560 forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
561 bool single_use_p)
563 tree lhs, rhs, array_ref;
565 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
566 ADDR_EXPR will not appear on the LHS. */
567 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
568 while (handled_component_p (lhs))
569 lhs = TREE_OPERAND (lhs, 0);
571 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
573 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
574 propagate the ADDR_EXPR into the use of NAME and fold the result. */
575 if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
577 /* This should always succeed in creating gimple, so there is
578 no need to save enough state to undo this propagation. */
579 TREE_OPERAND (lhs, 0) = unshare_expr (def_rhs);
580 fold_stmt_inplace (use_stmt);
581 tidy_after_forward_propagate_addr (use_stmt);
583 /* Continue propagating into the RHS. */
586 /* Trivial cases. The use statement could be a trivial copy or a
587 useless conversion. Recurse to the uses of the lhs as copyprop does
588 not copy through differen variant pointers and FRE does not catch
589 all useless conversions. Treat the case of a single-use name and
590 a conversion to def_rhs type separate, though. */
591 else if (TREE_CODE (lhs) == SSA_NAME
592 && (TREE_CODE (rhs) == NOP_EXPR
593 || TREE_CODE (rhs) == CONVERT_EXPR)
594 && TREE_TYPE (rhs) == TREE_TYPE (def_rhs)
595 && single_use_p)
597 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
598 return true;
600 else if ((TREE_CODE (lhs) == SSA_NAME
601 && rhs == name)
602 || ((TREE_CODE (rhs) == NOP_EXPR
603 || TREE_CODE (rhs) == CONVERT_EXPR)
604 && tree_ssa_useless_type_conversion_1 (TREE_TYPE (rhs),
605 TREE_TYPE (def_rhs))))
606 return forward_propagate_addr_expr (lhs, def_rhs);
608 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
609 nodes from the RHS. */
610 while (handled_component_p (rhs)
611 || TREE_CODE (rhs) == ADDR_EXPR)
612 rhs = TREE_OPERAND (rhs, 0);
614 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
615 propagate the ADDR_EXPR into the use of NAME and fold the result. */
616 if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
618 /* This should always succeed in creating gimple, so there is
619 no need to save enough state to undo this propagation. */
620 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
621 fold_stmt_inplace (use_stmt);
622 tidy_after_forward_propagate_addr (use_stmt);
623 return true;
626 /* The remaining cases are all for turning pointer arithmetic into
627 array indexing. They only apply when we have the address of
628 element zero in an array. If that is not the case then there
629 is nothing to do. */
630 array_ref = TREE_OPERAND (def_rhs, 0);
631 if (TREE_CODE (array_ref) != ARRAY_REF
632 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
633 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
634 return false;
636 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
637 is nothing to do. */
638 if (TREE_CODE (rhs) != POINTER_PLUS_EXPR)
639 return false;
641 /* Try to optimize &x[0] p+ C where C is a multiple of the size
642 of the elements in X into &x[C/element size]. */
643 if (TREE_OPERAND (rhs, 0) == name
644 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
646 tree orig = unshare_expr (rhs);
647 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
649 /* If folding succeeds, then we have just exposed new variables
650 in USE_STMT which will need to be renamed. If folding fails,
651 then we need to put everything back the way it was. */
652 if (fold_stmt_inplace (use_stmt))
654 tidy_after_forward_propagate_addr (use_stmt);
655 return true;
657 else
659 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
660 update_stmt (use_stmt);
661 return false;
665 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
666 converting a multiplication of an index by the size of the
667 array elements, then the result is converted into the proper
668 type for the arithmetic. */
669 if (TREE_OPERAND (rhs, 0) == name
670 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
671 /* Avoid problems with IVopts creating PLUS_EXPRs with a
672 different type than their operands. */
673 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
675 bool res;
677 res = forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs, 1),
678 def_rhs, use_stmt);
679 return res;
681 return false;
684 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
686 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
687 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
688 node or for recovery of array indexing from pointer arithmetic.
689 Returns true, if all uses have been propagated into. */
691 static bool
692 forward_propagate_addr_expr (tree name, tree rhs)
694 int stmt_loop_depth = bb_for_stmt (SSA_NAME_DEF_STMT (name))->loop_depth;
695 imm_use_iterator iter;
696 tree use_stmt;
697 bool all = true;
698 bool single_use_p = has_single_use (name);
700 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
702 bool result;
704 /* If the use is not in a simple assignment statement, then
705 there is nothing we can do. */
706 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
708 all = false;
709 continue;
712 /* If the use is in a deeper loop nest, then we do not want
713 to propagate the ADDR_EXPR into the loop as that is likely
714 adding expression evaluations into the loop. */
715 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
717 all = false;
718 continue;
721 /* If the use_stmt has side-effects, don't propagate into it. */
722 if (stmt_ann (use_stmt)->has_volatile_ops)
724 all = false;
725 continue;
728 push_stmt_changes (&use_stmt);
730 result = forward_propagate_addr_expr_1 (name, rhs, use_stmt,
731 single_use_p);
732 all &= result;
734 pop_stmt_changes (&use_stmt);
736 /* Remove intermediate now unused copy and conversion chains. */
737 if (result
738 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
739 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == SSA_NAME
740 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
741 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR))
743 block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
744 release_defs (use_stmt);
745 bsi_remove (&bsi, true);
749 return all;
752 /* Forward propagate the comparison COND defined in STMT like
753 cond_1 = x CMP y to uses of the form
754 a_1 = (T')cond_1
755 a_1 = !cond_1
756 a_1 = cond_1 != 0
757 Returns true if stmt is now unused. */
759 static bool
760 forward_propagate_comparison (tree cond, tree stmt)
762 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
763 tree use_stmt, tmp = NULL_TREE;
765 /* Don't propagate ssa names that occur in abnormal phis. */
766 if ((TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
767 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 0)))
768 || (TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME
769 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 1))))
770 return false;
772 /* Do not un-cse comparisons. But propagate through copies. */
773 use_stmt = get_prop_dest_stmt (name, &name);
774 if (use_stmt == NULL_TREE)
775 return false;
777 /* Conversion of the condition result to another integral type. */
778 if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
779 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == CONVERT_EXPR
780 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == NOP_EXPR
781 || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
782 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == TRUTH_NOT_EXPR)
783 && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt, 0))))
785 tree lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
786 tree rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
788 /* We can propagate the condition into a conversion. */
789 if (TREE_CODE (rhs) == CONVERT_EXPR
790 || TREE_CODE (rhs) == NOP_EXPR)
792 /* Avoid using fold here as that may create a COND_EXPR with
793 non-boolean condition as canonical form. */
794 tmp = build2 (TREE_CODE (cond), TREE_TYPE (lhs),
795 TREE_OPERAND (cond, 0), TREE_OPERAND (cond, 1));
797 /* We can propagate the condition into X op CST where op
798 is EQ_EXRP or NE_EXPR and CST is either one or zero. */
799 else if (COMPARISON_CLASS_P (rhs)
800 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
801 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
803 enum tree_code code = TREE_CODE (rhs);
804 tree cst = TREE_OPERAND (rhs, 1);
806 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs),
807 fold_convert (TREE_TYPE (cst), cond),
808 cst, false);
809 if (tmp == NULL_TREE)
810 return false;
812 /* We can propagate the condition into a statement that
813 computes the logical negation of the comparison result. */
814 else if (TREE_CODE (rhs) == TRUTH_NOT_EXPR)
816 tree type = TREE_TYPE (TREE_OPERAND (cond, 0));
817 bool nans = HONOR_NANS (TYPE_MODE (type));
818 enum tree_code code;
819 code = invert_tree_comparison (TREE_CODE (cond), nans);
820 if (code == ERROR_MARK)
821 return false;
823 tmp = build2 (code, TREE_TYPE (lhs), TREE_OPERAND (cond, 0),
824 TREE_OPERAND (cond, 1));
826 else
827 return false;
829 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (tmp);
830 update_stmt (use_stmt);
832 /* Remove defining statements. */
833 remove_prop_source_from_use (name, stmt);
835 if (dump_file && (dump_flags & TDF_DETAILS))
837 fprintf (dump_file, " Replaced '");
838 print_generic_expr (dump_file, rhs, dump_flags);
839 fprintf (dump_file, "' with '");
840 print_generic_expr (dump_file, tmp, dump_flags);
841 fprintf (dump_file, "'\n");
844 return true;
847 return false;
850 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
851 If so, we can change STMT into lhs = y which can later be copy
852 propagated. Similarly for negation.
854 This could trivially be formulated as a forward propagation
855 to immediate uses. However, we already had an implementation
856 from DOM which used backward propagation via the use-def links.
858 It turns out that backward propagation is actually faster as
859 there's less work to do for each NOT/NEG expression we find.
860 Backwards propagation needs to look at the statement in a single
861 backlink. Forward propagation needs to look at potentially more
862 than one forward link. */
864 static void
865 simplify_not_neg_expr (tree stmt)
867 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
868 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
870 /* See if the RHS_DEF_STMT has the same form as our statement. */
871 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
872 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
874 tree rhs_def_operand =
875 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
877 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
878 if (TREE_CODE (rhs_def_operand) == SSA_NAME
879 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
881 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
882 update_stmt (stmt);
887 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
888 the condition which we may be able to optimize better. */
890 static void
891 simplify_switch_expr (tree stmt)
893 tree cond = SWITCH_COND (stmt);
894 tree def, to, ti;
896 /* The optimization that we really care about is removing unnecessary
897 casts. That will let us do much better in propagating the inferred
898 constant at the switch target. */
899 if (TREE_CODE (cond) == SSA_NAME)
901 def = SSA_NAME_DEF_STMT (cond);
902 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
904 def = GIMPLE_STMT_OPERAND (def, 1);
905 if (TREE_CODE (def) == NOP_EXPR)
907 int need_precision;
908 bool fail;
910 def = TREE_OPERAND (def, 0);
912 #ifdef ENABLE_CHECKING
913 /* ??? Why was Jeff testing this? We are gimple... */
914 gcc_assert (is_gimple_val (def));
915 #endif
917 to = TREE_TYPE (cond);
918 ti = TREE_TYPE (def);
920 /* If we have an extension that preserves value, then we
921 can copy the source value into the switch. */
923 need_precision = TYPE_PRECISION (ti);
924 fail = false;
925 if (! INTEGRAL_TYPE_P (ti))
926 fail = true;
927 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
928 fail = true;
929 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
930 need_precision += 1;
931 if (TYPE_PRECISION (to) < need_precision)
932 fail = true;
934 if (!fail)
936 SWITCH_COND (stmt) = def;
937 update_stmt (stmt);
944 /* Main entry point for the forward propagation optimizer. */
946 static unsigned int
947 tree_ssa_forward_propagate_single_use_vars (void)
949 basic_block bb;
950 unsigned int todoflags = 0;
952 cfg_changed = false;
954 FOR_EACH_BB (bb)
956 block_stmt_iterator bsi;
958 /* Note we update BSI within the loop as necessary. */
959 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
961 tree stmt = bsi_stmt (bsi);
963 /* If this statement sets an SSA_NAME to an address,
964 try to propagate the address into the uses of the SSA_NAME. */
965 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
967 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
968 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
971 if (TREE_CODE (lhs) != SSA_NAME)
973 bsi_next (&bsi);
974 continue;
977 if (TREE_CODE (rhs) == ADDR_EXPR)
979 if (forward_propagate_addr_expr (lhs, rhs))
981 release_defs (stmt);
982 todoflags |= TODO_remove_unused_locals;
983 bsi_remove (&bsi, true);
985 else
986 bsi_next (&bsi);
988 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
989 || TREE_CODE (rhs) == NEGATE_EXPR)
990 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
992 simplify_not_neg_expr (stmt);
993 bsi_next (&bsi);
995 else if (TREE_CODE (rhs) == COND_EXPR)
997 int did_something;
998 fold_defer_overflow_warnings ();
999 did_something = forward_propagate_into_cond (rhs, stmt);
1000 if (did_something == 2)
1001 cfg_changed = true;
1002 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1003 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1004 bsi_next (&bsi);
1006 else if (COMPARISON_CLASS_P (rhs))
1008 if (forward_propagate_comparison (rhs, stmt))
1010 release_defs (stmt);
1011 todoflags |= TODO_remove_unused_locals;
1012 bsi_remove (&bsi, true);
1014 else
1015 bsi_next (&bsi);
1017 else
1018 bsi_next (&bsi);
1020 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1022 simplify_switch_expr (stmt);
1023 bsi_next (&bsi);
1025 else if (TREE_CODE (stmt) == COND_EXPR)
1027 int did_something;
1028 fold_defer_overflow_warnings ();
1029 did_something = forward_propagate_into_cond (stmt, stmt);
1030 if (did_something == 2)
1031 cfg_changed = true;
1032 fold_undefer_overflow_warnings (!TREE_NO_WARNING (stmt)
1033 && did_something, stmt,
1034 WARN_STRICT_OVERFLOW_CONDITIONAL);
1035 bsi_next (&bsi);
1037 else
1038 bsi_next (&bsi);
1042 if (cfg_changed)
1043 todoflags |= TODO_cleanup_cfg;
1044 return todoflags;
1048 static bool
1049 gate_forwprop (void)
1051 return 1;
1054 struct tree_opt_pass pass_forwprop = {
1055 "forwprop", /* name */
1056 gate_forwprop, /* gate */
1057 tree_ssa_forward_propagate_single_use_vars, /* execute */
1058 NULL, /* sub */
1059 NULL, /* next */
1060 0, /* static_pass_number */
1061 TV_TREE_FORWPROP, /* tv_id */
1062 PROP_cfg | PROP_ssa, /* properties_required */
1063 0, /* properties_provided */
1064 0, /* properties_destroyed */
1065 0, /* todo_flags_start */
1066 TODO_dump_func
1067 | TODO_ggc_collect
1068 | TODO_update_ssa
1069 | TODO_verify_ssa, /* todo_flags_finish */
1070 0 /* letter */
1074 /* Structure to keep track of the value of a dereferenced PHI result
1075 and the set of virtual operands used for that dereference. */
1077 struct phiprop_d
1079 tree value;
1080 tree vop_stmt;
1083 /* Verify if the value recorded for NAME in PHIVN is still valid at
1084 the start of basic block BB. */
1086 static bool
1087 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
1089 tree vop_stmt = phivn[SSA_NAME_VERSION (name)].vop_stmt;
1090 ssa_op_iter ui;
1091 tree vuse;
1093 /* The def stmts of all virtual uses need to be post-dominated
1094 by bb. */
1095 FOR_EACH_SSA_TREE_OPERAND (vuse, vop_stmt, ui, SSA_OP_VUSE)
1097 tree use_stmt;
1098 imm_use_iterator ui2;
1099 bool ok = true;
1101 FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
1103 /* If BB does not dominate a VDEF, the value is invalid. */
1104 if (((TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1105 && !ZERO_SSA_OPERANDS (use_stmt, SSA_OP_VDEF))
1106 || TREE_CODE (use_stmt) == PHI_NODE)
1107 && !dominated_by_p (CDI_DOMINATORS, bb_for_stmt (use_stmt), bb))
1109 ok = false;
1110 BREAK_FROM_IMM_USE_STMT (ui2);
1113 if (!ok)
1114 return false;
1117 return true;
1120 /* Insert a new phi node for the dereference of PHI at basic_block
1121 BB with the virtual operands from USE_STMT. */
1123 static tree
1124 phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
1125 struct phiprop_d *phivn, size_t n)
1127 tree res, new_phi;
1128 edge_iterator ei;
1129 edge e;
1131 /* Build a new PHI node to replace the definition of
1132 the indirect reference lhs. */
1133 res = GIMPLE_STMT_OPERAND (use_stmt, 0);
1134 SSA_NAME_DEF_STMT (res) = new_phi = create_phi_node (res, bb);
1136 /* Add PHI arguments for each edge inserting loads of the
1137 addressable operands. */
1138 FOR_EACH_EDGE (e, ei, bb->preds)
1140 tree old_arg, new_var, tmp;
1142 old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
1143 while (TREE_CODE (old_arg) == SSA_NAME
1144 && (SSA_NAME_VERSION (old_arg) >= n
1145 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
1147 tree def_stmt = SSA_NAME_DEF_STMT (old_arg);
1148 old_arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1151 if (TREE_CODE (old_arg) == SSA_NAME)
1152 /* Reuse a formerly created dereference. */
1153 new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
1154 else
1156 old_arg = TREE_OPERAND (old_arg, 0);
1157 new_var = create_tmp_var (TREE_TYPE (old_arg), NULL);
1158 tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node,
1159 NULL_TREE, unshare_expr (old_arg));
1160 if (TREE_CODE (TREE_TYPE (old_arg)) == COMPLEX_TYPE
1161 || TREE_CODE (TREE_TYPE (old_arg)) == VECTOR_TYPE)
1162 DECL_GIMPLE_REG_P (new_var) = 1;
1163 add_referenced_var (new_var);
1164 new_var = make_ssa_name (new_var, tmp);
1165 GIMPLE_STMT_OPERAND (tmp, 0) = new_var;
1167 bsi_insert_on_edge (e, tmp);
1169 update_stmt (tmp);
1170 mark_symbols_for_renaming (tmp);
1173 add_phi_arg (new_phi, new_var, e);
1176 update_stmt (new_phi);
1178 return res;
1181 /* Propagate between the phi node arguments of PHI in BB and phi result
1182 users. For now this matches
1183 # p_2 = PHI <&x, &y>
1184 <Lx>:;
1185 p_3 = p_2;
1186 z_2 = *p_3;
1187 and converts it to
1188 # z_2 = PHI <x, y>
1189 <Lx>:;
1190 Returns true if a transformation was done and edge insertions
1191 need to be committed. Global data PHIVN and N is used to track
1192 past transformation results. We need to be especially careful here
1193 with aliasing issues as we are moving memory reads. */
1195 static bool
1196 propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
1198 tree ptr = PHI_RESULT (phi);
1199 tree use_stmt, res = NULL_TREE;
1200 block_stmt_iterator bsi;
1201 imm_use_iterator ui;
1202 use_operand_p arg_p, use;
1203 ssa_op_iter i;
1204 bool phi_inserted;
1206 if (MTAG_P (SSA_NAME_VAR (ptr))
1207 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1208 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
1209 return false;
1211 /* Check if we can "cheaply" dereference all phi arguments. */
1212 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
1214 tree arg = USE_FROM_PTR (arg_p);
1215 /* Walk the ssa chain until we reach a ssa name we already
1216 created a value for or we reach a definition of the form
1217 ssa_name_n = &var; */
1218 while (TREE_CODE (arg) == SSA_NAME
1219 && !SSA_NAME_IS_DEFAULT_DEF (arg)
1220 && (SSA_NAME_VERSION (arg) >= n
1221 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
1223 tree def_stmt = SSA_NAME_DEF_STMT (arg);
1224 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
1225 return false;
1226 arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
1228 if ((TREE_CODE (arg) != ADDR_EXPR
1229 /* Avoid to have to decay *&a to a[0] later. */
1230 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (arg, 0))))
1231 && !(TREE_CODE (arg) == SSA_NAME
1232 && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
1233 && phivn_valid_p (phivn, arg, bb)))
1234 return false;
1237 /* Find a dereferencing use. First follow (single use) ssa
1238 copy chains for ptr. */
1239 while (single_imm_use (ptr, &use, &use_stmt)
1240 && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1241 && GIMPLE_STMT_OPERAND (use_stmt, 1) == ptr
1242 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME)
1243 ptr = GIMPLE_STMT_OPERAND (use_stmt, 0);
1245 /* Replace the first dereference of *ptr if there is one and if we
1246 can move the loads to the place of the ptr phi node. */
1247 phi_inserted = false;
1248 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
1250 ssa_op_iter ui2;
1251 tree vuse;
1253 /* Check whether this is a load of *ptr. */
1254 if (!(TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
1255 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
1256 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == INDIRECT_REF
1257 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == ptr
1258 /* We cannot replace a load that may throw or is volatile. */
1259 && !tree_can_throw_internal (use_stmt)))
1260 continue;
1262 /* Check if we can move the loads. The def stmts of all virtual uses
1263 need to be post-dominated by bb. */
1264 FOR_EACH_SSA_TREE_OPERAND (vuse, use_stmt, ui2, SSA_OP_VUSE)
1266 tree def_stmt = SSA_NAME_DEF_STMT (vuse);
1267 if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
1268 && (bb_for_stmt (def_stmt) == bb
1269 || !dominated_by_p (CDI_DOMINATORS,
1270 bb, bb_for_stmt (def_stmt))))
1271 goto next;
1274 /* Found a proper dereference. Insert a phi node if this
1275 is the first load transformation. */
1276 if (!phi_inserted)
1278 res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
1280 /* Remember the value we created for *ptr. */
1281 phivn[SSA_NAME_VERSION (ptr)].value = res;
1282 phivn[SSA_NAME_VERSION (ptr)].vop_stmt = use_stmt;
1284 /* Remove old stmt. The phi is taken care of by DCE, if we
1285 want to delete it here we also have to delete all intermediate
1286 copies. */
1287 bsi = bsi_for_stmt (use_stmt);
1288 bsi_remove (&bsi, 0);
1290 phi_inserted = true;
1292 else
1294 /* Further replacements are easy, just make a copy out of the
1295 load. */
1296 GIMPLE_STMT_OPERAND (use_stmt, 1) = res;
1297 update_stmt (use_stmt);
1300 next:;
1301 /* Continue searching for a proper dereference. */
1304 return phi_inserted;
1307 /* Helper walking the dominator tree starting from BB and processing
1308 phi nodes with global data PHIVN and N. */
1310 static bool
1311 tree_ssa_phiprop_1 (basic_block bb, struct phiprop_d *phivn, size_t n)
1313 bool did_something = false;
1314 basic_block son;
1315 tree phi;
1317 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1318 did_something |= propagate_with_phi (bb, phi, phivn, n);
1320 for (son = first_dom_son (CDI_DOMINATORS, bb);
1321 son;
1322 son = next_dom_son (CDI_DOMINATORS, son))
1323 did_something |= tree_ssa_phiprop_1 (son, phivn, n);
1325 return did_something;
1328 /* Main entry for phiprop pass. */
1330 static unsigned int
1331 tree_ssa_phiprop (void)
1333 struct phiprop_d *phivn;
1335 calculate_dominance_info (CDI_DOMINATORS);
1337 phivn = XCNEWVEC (struct phiprop_d, num_ssa_names);
1339 if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR, phivn, num_ssa_names))
1340 bsi_commit_edge_inserts ();
1342 free (phivn);
1344 return 0;
1347 static bool
1348 gate_phiprop (void)
1350 return 1;
1353 struct tree_opt_pass pass_phiprop = {
1354 "phiprop", /* name */
1355 gate_phiprop, /* gate */
1356 tree_ssa_phiprop, /* execute */
1357 NULL, /* sub */
1358 NULL, /* next */
1359 0, /* static_pass_number */
1360 TV_TREE_FORWPROP, /* tv_id */
1361 PROP_cfg | PROP_ssa, /* properties_required */
1362 0, /* properties_provided */
1363 0, /* properties_destroyed */
1364 0, /* todo_flags_start */
1365 TODO_dump_func
1366 | TODO_ggc_collect
1367 | TODO_update_ssa
1368 | TODO_verify_ssa, /* todo_flags_finish */
1369 0 /* letter */