1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
52 if (x) goto ... else goto ...
54 Will be transformed into:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
78 if (x) goto ... else goto ...
80 Will be transformed into:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
93 if (x) goto ... else goto ...
95 Will be transformed into:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
129 ptr2 = ptr + <constant>;
133 ptr2 = &x[constant/elementsize];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
142 Will get turned into:
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
150 This will (of course) be extended as other needs arise. */
152 static bool forward_propagate_addr_expr (tree name
, tree rhs
);
154 /* Set to true if we delete EH edges during the optimization. */
155 static bool cfg_changed
;
158 /* Get the next statement we can propagate NAME's value into skipping
159 trivial copies. Returns the statement that is suitable as a
160 propagation destination or NULL_TREE if there is no such one.
161 This only returns destinations in a single-use chain. FINAL_NAME_P
162 if non-NULL is written to the ssa name that represents the use. */
165 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
171 /* If name has multiple uses, bail out. */
172 if (!single_imm_use (name
, &use
, &use_stmt
))
175 /* If this is not a trivial copy, we found it. */
176 if (TREE_CODE (use_stmt
) != GIMPLE_MODIFY_STMT
177 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) != SSA_NAME
178 || GIMPLE_STMT_OPERAND (use_stmt
, 1) != name
)
181 /* Continue searching uses of the copy destination. */
182 name
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
186 *final_name_p
= name
;
191 /* Get the statement we can propagate from into NAME skipping
192 trivial copies. Returns the statement which defines the
193 propagation source or NULL_TREE if there is no such one.
194 If SINGLE_USE_ONLY is set considers only sources which have
195 a single use chain up to NAME. If SINGLE_USE_P is non-null,
196 it is set to whether the chain to NAME is a single use chain
197 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
200 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
202 bool single_use
= true;
205 tree def_stmt
= SSA_NAME_DEF_STMT (name
);
207 if (!has_single_use (name
))
214 /* If name is defined by a PHI node or is the default def, bail out. */
215 if (TREE_CODE (def_stmt
) != GIMPLE_MODIFY_STMT
)
218 /* If name is not a simple copy destination, we found it. */
219 if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt
, 1)) != SSA_NAME
)
221 if (!single_use_only
&& single_use_p
)
222 *single_use_p
= single_use
;
227 /* Continue searching the def of the copy source name. */
228 name
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
232 /* Checks if the destination ssa name in DEF_STMT can be used as
233 propagation source. Returns true if so, otherwise false. */
236 can_propagate_from (tree def_stmt
)
238 tree rhs
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
240 /* If the rhs has side-effects we cannot propagate from it. */
241 if (TREE_SIDE_EFFECTS (rhs
))
244 /* If the rhs is a load we cannot propagate from it. */
245 if (REFERENCE_CLASS_P (rhs
))
248 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
249 switch (TREE_CODE_LENGTH (TREE_CODE (rhs
)))
252 if (TREE_OPERAND (rhs
, 2) != NULL_TREE
253 && TREE_CODE (TREE_OPERAND (rhs
, 2)) == SSA_NAME
254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs
, 2)))
257 if (TREE_OPERAND (rhs
, 1) != NULL_TREE
258 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == SSA_NAME
259 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs
, 1)))
262 if (TREE_OPERAND (rhs
, 0) != NULL_TREE
263 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs
, 0)))
272 /* If the definition is a conversion of a pointer to a function type,
273 then we can not apply optimizations as some targets require function
274 pointers to be canonicalized and in this case this optimization could
275 eliminate a necessary canonicalization. */
276 if ((TREE_CODE (rhs
) == NOP_EXPR
277 || TREE_CODE (rhs
) == CONVERT_EXPR
)
278 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs
, 0)))
279 && TREE_CODE (TREE_TYPE (TREE_TYPE
280 (TREE_OPERAND (rhs
, 0)))) == FUNCTION_TYPE
)
286 /* Remove a copy chain ending in NAME along the defs but not
287 further or including UP_TO_STMT. If NAME was replaced in
288 its only use then this function can be used to clean up
289 dead stmts. Returns true if UP_TO_STMT can be removed
290 as well, otherwise false. */
293 remove_prop_source_from_use (tree name
, tree up_to_stmt
)
295 block_stmt_iterator bsi
;
299 if (!has_zero_uses (name
))
302 stmt
= SSA_NAME_DEF_STMT (name
);
303 if (stmt
== up_to_stmt
)
306 bsi
= bsi_for_stmt (stmt
);
308 bsi_remove (&bsi
, true);
310 name
= GIMPLE_STMT_OPERAND (stmt
, 1);
311 } while (TREE_CODE (name
) == SSA_NAME
);
316 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
317 the folded result in a form suitable for COND_EXPR_COND or
318 NULL_TREE, if there is no suitable simplified form. If
319 INVARIANT_ONLY is true only gimple_min_invariant results are
320 considered simplified. */
323 combine_cond_expr_cond (enum tree_code code
, tree type
,
324 tree op0
, tree op1
, bool invariant_only
)
328 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
330 t
= fold_binary (code
, type
, op0
, op1
);
334 /* Require that we got a boolean type out if we put one in. */
335 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
337 /* Canonicalize the combined condition for use in a COND_EXPR. */
338 t
= canonicalize_cond_expr_cond (t
);
340 /* Bail out if we required an invariant but didn't get one. */
343 && !is_gimple_min_invariant (t
)))
349 /* Propagate from the ssa name definition statements of COND_EXPR
350 in statement STMT into the conditional if that simplifies it.
351 Returns zero if no statement was changed, one if there were
352 changes and two if cfg_cleanup needs to run. */
355 forward_propagate_into_cond (tree cond_expr
, tree stmt
)
357 int did_something
= 0;
360 tree tmp
= NULL_TREE
;
361 tree cond
= COND_EXPR_COND (cond_expr
);
362 tree name
, def_stmt
, rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
363 bool single_use0_p
= false, single_use1_p
= false;
365 /* We can do tree combining on SSA_NAME and comparison expressions. */
366 if (COMPARISON_CLASS_P (cond
)
367 && TREE_CODE (TREE_OPERAND (cond
, 0)) == SSA_NAME
)
369 /* For comparisons use the first operand, that is likely to
370 simplify comparisons against constants. */
371 name
= TREE_OPERAND (cond
, 0);
372 def_stmt
= get_prop_source_stmt (name
, false, &single_use0_p
);
373 if (def_stmt
!= NULL_TREE
374 && can_propagate_from (def_stmt
))
376 tree op1
= TREE_OPERAND (cond
, 1);
377 rhs0
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
378 tmp
= combine_cond_expr_cond (TREE_CODE (cond
), boolean_type_node
,
379 fold_convert (TREE_TYPE (op1
), rhs0
),
380 op1
, !single_use0_p
);
382 /* If that wasn't successful, try the second operand. */
384 && TREE_CODE (TREE_OPERAND (cond
, 1)) == SSA_NAME
)
386 tree op0
= TREE_OPERAND (cond
, 0);
387 name
= TREE_OPERAND (cond
, 1);
388 def_stmt
= get_prop_source_stmt (name
, false, &single_use1_p
);
389 if (def_stmt
== NULL_TREE
390 || !can_propagate_from (def_stmt
))
391 return did_something
;
393 rhs1
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
394 tmp
= combine_cond_expr_cond (TREE_CODE (cond
), boolean_type_node
,
396 fold_convert (TREE_TYPE (op0
), rhs1
),
399 /* If that wasn't successful either, try both operands. */
402 && rhs1
!= NULL_TREE
)
403 tmp
= combine_cond_expr_cond (TREE_CODE (cond
), boolean_type_node
,
405 fold_convert (TREE_TYPE (rhs0
), rhs1
),
406 !(single_use0_p
&& single_use1_p
));
408 else if (TREE_CODE (cond
) == SSA_NAME
)
411 def_stmt
= get_prop_source_stmt (name
, true, NULL
);
412 if (def_stmt
== NULL_TREE
413 || !can_propagate_from (def_stmt
))
414 return did_something
;
416 rhs0
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
417 tmp
= combine_cond_expr_cond (NE_EXPR
, boolean_type_node
, rhs0
,
418 build_int_cst (TREE_TYPE (rhs0
), 0),
424 if (dump_file
&& tmp
)
426 fprintf (dump_file
, " Replaced '");
427 print_generic_expr (dump_file
, cond
, 0);
428 fprintf (dump_file
, "' with '");
429 print_generic_expr (dump_file
, tmp
, 0);
430 fprintf (dump_file
, "'\n");
433 COND_EXPR_COND (cond_expr
) = unshare_expr (tmp
);
436 /* Remove defining statements. */
437 remove_prop_source_from_use (name
, NULL
);
439 if (is_gimple_min_invariant (tmp
))
441 else if (did_something
== 0)
444 /* Continue combining. */
451 return did_something
;
454 /* We've just substituted an ADDR_EXPR into stmt. Update all the
455 relevant data structures to match. */
458 tidy_after_forward_propagate_addr (tree stmt
)
460 /* We may have turned a trapping insn into a non-trapping insn. */
461 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
462 && tree_purge_dead_eh_edges (bb_for_stmt (stmt
)))
465 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)) == ADDR_EXPR
)
466 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt
, 1));
468 mark_symbols_for_renaming (stmt
);
471 /* DEF_RHS contains the address of the 0th element in an array.
472 USE_STMT uses type of DEF_RHS to compute the address of an
473 arbitrary element within the array. The (variable) byte offset
474 of the element is contained in OFFSET.
476 We walk back through the use-def chains of OFFSET to verify that
477 it is indeed computing the offset of an element within the array
478 and extract the index corresponding to the given byte offset.
480 We then try to fold the entire address expression into a form
483 If we are successful, we replace the right hand side of USE_STMT
484 with the new address computation. */
487 forward_propagate_addr_into_variable_array_index (tree offset
,
488 tree def_rhs
, tree use_stmt
)
492 /* Try to find an expression for a proper index. This is either
493 a multiplication expression by the element size or just the
494 ssa name we came along in case the element size is one. */
495 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs
)))))
499 /* Get the offset's defining statement. */
500 offset
= SSA_NAME_DEF_STMT (offset
);
502 /* The statement which defines OFFSET before type conversion
503 must be a simple GIMPLE_MODIFY_STMT. */
504 if (TREE_CODE (offset
) != GIMPLE_MODIFY_STMT
)
507 /* The RHS of the statement which defines OFFSET must be a
508 multiplication of an object by the size of the array elements.
509 This implicitly verifies that the size of the array elements
511 offset
= GIMPLE_STMT_OPERAND (offset
, 1);
512 if (TREE_CODE (offset
) != MULT_EXPR
513 || TREE_CODE (TREE_OPERAND (offset
, 1)) != INTEGER_CST
514 || !simple_cst_equal (TREE_OPERAND (offset
, 1),
515 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs
)))))
518 /* The first operand to the MULT_EXPR is the desired index. */
519 index
= TREE_OPERAND (offset
, 0);
522 /* Replace the pointer addition with array indexing. */
523 GIMPLE_STMT_OPERAND (use_stmt
, 1) = unshare_expr (def_rhs
);
524 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt
, 1), 0), 1)
527 /* That should have created gimple, so there is no need to
528 record information to undo the propagation. */
529 fold_stmt_inplace (use_stmt
);
530 tidy_after_forward_propagate_addr (use_stmt
);
534 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
535 ADDR_EXPR <whatever>.
537 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
538 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
539 node or for recovery of array indexing from pointer arithmetic.
541 Return true if the propagation was successful (the propagation can
542 be not totally successful, yet things may have been changed). */
545 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
, tree use_stmt
,
548 tree lhs
, rhs
, array_ref
;
550 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
551 ADDR_EXPR will not appear on the LHS. */
552 lhs
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
553 while (handled_component_p (lhs
))
554 lhs
= TREE_OPERAND (lhs
, 0);
556 rhs
= GIMPLE_STMT_OPERAND (use_stmt
, 1);
558 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
559 propagate the ADDR_EXPR into the use of NAME and fold the result. */
560 if (TREE_CODE (lhs
) == INDIRECT_REF
&& TREE_OPERAND (lhs
, 0) == name
)
562 /* This should always succeed in creating gimple, so there is
563 no need to save enough state to undo this propagation. */
564 TREE_OPERAND (lhs
, 0) = unshare_expr (def_rhs
);
565 fold_stmt_inplace (use_stmt
);
566 tidy_after_forward_propagate_addr (use_stmt
);
568 /* Continue propagating into the RHS. */
571 /* Trivial cases. The use statement could be a trivial copy or a
572 useless conversion. Recurse to the uses of the lhs as copyprop does
573 not copy through differen variant pointers and FRE does not catch
574 all useless conversions. Treat the case of a single-use name and
575 a conversion to def_rhs type separate, though. */
576 else if (TREE_CODE (lhs
) == SSA_NAME
577 && (TREE_CODE (rhs
) == NOP_EXPR
578 || TREE_CODE (rhs
) == CONVERT_EXPR
)
579 && TREE_TYPE (rhs
) == TREE_TYPE (def_rhs
)
582 GIMPLE_STMT_OPERAND (use_stmt
, 1) = unshare_expr (def_rhs
);
585 else if ((TREE_CODE (lhs
) == SSA_NAME
587 || ((TREE_CODE (rhs
) == NOP_EXPR
588 || TREE_CODE (rhs
) == CONVERT_EXPR
)
589 && useless_type_conversion_p (TREE_TYPE (rhs
),
590 TREE_TYPE (def_rhs
))))
591 return forward_propagate_addr_expr (lhs
, def_rhs
);
593 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
594 nodes from the RHS. */
595 while (handled_component_p (rhs
)
596 || TREE_CODE (rhs
) == ADDR_EXPR
)
597 rhs
= TREE_OPERAND (rhs
, 0);
599 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
600 propagate the ADDR_EXPR into the use of NAME and fold the result. */
601 if (TREE_CODE (rhs
) == INDIRECT_REF
&& TREE_OPERAND (rhs
, 0) == name
)
603 /* This should always succeed in creating gimple, so there is
604 no need to save enough state to undo this propagation. */
605 TREE_OPERAND (rhs
, 0) = unshare_expr (def_rhs
);
606 fold_stmt_inplace (use_stmt
);
607 tidy_after_forward_propagate_addr (use_stmt
);
611 /* The remaining cases are all for turning pointer arithmetic into
612 array indexing. They only apply when we have the address of
613 element zero in an array. If that is not the case then there
615 array_ref
= TREE_OPERAND (def_rhs
, 0);
616 if (TREE_CODE (array_ref
) != ARRAY_REF
617 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
618 || !integer_zerop (TREE_OPERAND (array_ref
, 1)))
621 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
623 if (TREE_CODE (rhs
) != POINTER_PLUS_EXPR
)
626 /* Try to optimize &x[0] p+ C where C is a multiple of the size
627 of the elements in X into &x[C/element size]. */
628 if (TREE_OPERAND (rhs
, 0) == name
629 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
631 tree orig
= unshare_expr (rhs
);
632 TREE_OPERAND (rhs
, 0) = unshare_expr (def_rhs
);
634 /* If folding succeeds, then we have just exposed new variables
635 in USE_STMT which will need to be renamed. If folding fails,
636 then we need to put everything back the way it was. */
637 if (fold_stmt_inplace (use_stmt
))
639 tidy_after_forward_propagate_addr (use_stmt
);
644 GIMPLE_STMT_OPERAND (use_stmt
, 1) = orig
;
645 update_stmt (use_stmt
);
650 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
651 converting a multiplication of an index by the size of the
652 array elements, then the result is converted into the proper
653 type for the arithmetic. */
654 if (TREE_OPERAND (rhs
, 0) == name
655 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == SSA_NAME
656 /* Avoid problems with IVopts creating PLUS_EXPRs with a
657 different type than their operands. */
658 && useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (name
)))
662 res
= forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs
, 1),
669 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
671 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
672 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
673 node or for recovery of array indexing from pointer arithmetic.
674 Returns true, if all uses have been propagated into. */
677 forward_propagate_addr_expr (tree name
, tree rhs
)
679 int stmt_loop_depth
= bb_for_stmt (SSA_NAME_DEF_STMT (name
))->loop_depth
;
680 imm_use_iterator iter
;
683 bool single_use_p
= has_single_use (name
);
685 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
690 /* If the use is not in a simple assignment statement, then
691 there is nothing we can do. */
692 if (TREE_CODE (use_stmt
) != GIMPLE_MODIFY_STMT
)
698 /* If the use is in a deeper loop nest, then we do not want
699 to propagate the ADDR_EXPR into the loop as that is likely
700 adding expression evaluations into the loop. */
701 if (bb_for_stmt (use_stmt
)->loop_depth
> stmt_loop_depth
)
707 push_stmt_changes (&use_stmt
);
709 result
= forward_propagate_addr_expr_1 (name
, rhs
, use_stmt
,
713 pop_stmt_changes (&use_stmt
);
715 /* Remove intermediate now unused copy and conversion chains. */
716 use_rhs
= GIMPLE_STMT_OPERAND (use_stmt
, 1);
718 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) == SSA_NAME
719 && (TREE_CODE (use_rhs
) == SSA_NAME
720 || ((TREE_CODE (use_rhs
) == NOP_EXPR
721 || TREE_CODE (use_rhs
) == CONVERT_EXPR
)
722 && TREE_CODE (TREE_OPERAND (use_rhs
, 0)) == SSA_NAME
)))
724 block_stmt_iterator bsi
= bsi_for_stmt (use_stmt
);
725 release_defs (use_stmt
);
726 bsi_remove (&bsi
, true);
733 /* Forward propagate the comparison COND defined in STMT like
734 cond_1 = x CMP y to uses of the form
738 Returns true if stmt is now unused. */
741 forward_propagate_comparison (tree cond
, tree stmt
)
743 tree name
= GIMPLE_STMT_OPERAND (stmt
, 0);
744 tree use_stmt
, tmp
= NULL_TREE
;
746 /* Don't propagate ssa names that occur in abnormal phis. */
747 if ((TREE_CODE (TREE_OPERAND (cond
, 0)) == SSA_NAME
748 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond
, 0)))
749 || (TREE_CODE (TREE_OPERAND (cond
, 1)) == SSA_NAME
750 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond
, 1))))
753 /* Do not un-cse comparisons. But propagate through copies. */
754 use_stmt
= get_prop_dest_stmt (name
, &name
);
755 if (use_stmt
== NULL_TREE
)
758 /* Conversion of the condition result to another integral type. */
759 if (TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
760 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == CONVERT_EXPR
761 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == NOP_EXPR
762 || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt
, 1))
763 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == TRUTH_NOT_EXPR
)
764 && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt
, 0))))
766 tree lhs
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
767 tree rhs
= GIMPLE_STMT_OPERAND (use_stmt
, 1);
769 /* We can propagate the condition into a conversion. */
770 if (TREE_CODE (rhs
) == CONVERT_EXPR
771 || TREE_CODE (rhs
) == NOP_EXPR
)
773 /* Avoid using fold here as that may create a COND_EXPR with
774 non-boolean condition as canonical form. */
775 tmp
= build2 (TREE_CODE (cond
), TREE_TYPE (lhs
),
776 TREE_OPERAND (cond
, 0), TREE_OPERAND (cond
, 1));
778 /* We can propagate the condition into X op CST where op
779 is EQ_EXRP or NE_EXPR and CST is either one or zero. */
780 else if (COMPARISON_CLASS_P (rhs
)
781 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
782 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
784 enum tree_code code
= TREE_CODE (rhs
);
785 tree cst
= TREE_OPERAND (rhs
, 1);
787 tmp
= combine_cond_expr_cond (code
, TREE_TYPE (lhs
),
788 fold_convert (TREE_TYPE (cst
), cond
),
790 if (tmp
== NULL_TREE
)
793 /* We can propagate the condition into a statement that
794 computes the logical negation of the comparison result. */
795 else if (TREE_CODE (rhs
) == TRUTH_NOT_EXPR
)
797 tree type
= TREE_TYPE (TREE_OPERAND (cond
, 0));
798 bool nans
= HONOR_NANS (TYPE_MODE (type
));
800 code
= invert_tree_comparison (TREE_CODE (cond
), nans
);
801 if (code
== ERROR_MARK
)
804 tmp
= build2 (code
, TREE_TYPE (lhs
), TREE_OPERAND (cond
, 0),
805 TREE_OPERAND (cond
, 1));
810 GIMPLE_STMT_OPERAND (use_stmt
, 1) = unshare_expr (tmp
);
811 update_stmt (use_stmt
);
813 /* Remove defining statements. */
814 remove_prop_source_from_use (name
, stmt
);
816 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
818 fprintf (dump_file
, " Replaced '");
819 print_generic_expr (dump_file
, rhs
, dump_flags
);
820 fprintf (dump_file
, "' with '");
821 print_generic_expr (dump_file
, tmp
, dump_flags
);
822 fprintf (dump_file
, "'\n");
831 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
832 If so, we can change STMT into lhs = y which can later be copy
833 propagated. Similarly for negation.
835 This could trivially be formulated as a forward propagation
836 to immediate uses. However, we already had an implementation
837 from DOM which used backward propagation via the use-def links.
839 It turns out that backward propagation is actually faster as
840 there's less work to do for each NOT/NEG expression we find.
841 Backwards propagation needs to look at the statement in a single
842 backlink. Forward propagation needs to look at potentially more
843 than one forward link. */
846 simplify_not_neg_expr (tree stmt
)
848 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
849 tree rhs_def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (rhs
, 0));
851 /* See if the RHS_DEF_STMT has the same form as our statement. */
852 if (TREE_CODE (rhs_def_stmt
) == GIMPLE_MODIFY_STMT
853 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt
, 1)) == TREE_CODE (rhs
))
855 tree rhs_def_operand
=
856 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt
, 1), 0);
858 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
859 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
860 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
862 GIMPLE_STMT_OPERAND (stmt
, 1) = rhs_def_operand
;
868 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
869 the condition which we may be able to optimize better. */
872 simplify_switch_expr (tree stmt
)
874 tree cond
= SWITCH_COND (stmt
);
877 /* The optimization that we really care about is removing unnecessary
878 casts. That will let us do much better in propagating the inferred
879 constant at the switch target. */
880 if (TREE_CODE (cond
) == SSA_NAME
)
882 def
= SSA_NAME_DEF_STMT (cond
);
883 if (TREE_CODE (def
) == GIMPLE_MODIFY_STMT
)
885 def
= GIMPLE_STMT_OPERAND (def
, 1);
886 if (TREE_CODE (def
) == NOP_EXPR
)
891 def
= TREE_OPERAND (def
, 0);
893 #ifdef ENABLE_CHECKING
894 /* ??? Why was Jeff testing this? We are gimple... */
895 gcc_assert (is_gimple_val (def
));
898 to
= TREE_TYPE (cond
);
899 ti
= TREE_TYPE (def
);
901 /* If we have an extension that preserves value, then we
902 can copy the source value into the switch. */
904 need_precision
= TYPE_PRECISION (ti
);
906 if (! INTEGRAL_TYPE_P (ti
))
908 else if (TYPE_UNSIGNED (to
) && !TYPE_UNSIGNED (ti
))
910 else if (!TYPE_UNSIGNED (to
) && TYPE_UNSIGNED (ti
))
912 if (TYPE_PRECISION (to
) < need_precision
)
917 SWITCH_COND (stmt
) = def
;
925 /* Main entry point for the forward propagation optimizer. */
928 tree_ssa_forward_propagate_single_use_vars (void)
931 unsigned int todoflags
= 0;
937 block_stmt_iterator bsi
;
939 /* Note we update BSI within the loop as necessary. */
940 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
942 tree stmt
= bsi_stmt (bsi
);
944 /* If this statement sets an SSA_NAME to an address,
945 try to propagate the address into the uses of the SSA_NAME. */
946 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
948 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
949 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
952 if (TREE_CODE (lhs
) != SSA_NAME
)
958 if (TREE_CODE (rhs
) == ADDR_EXPR
959 /* We can also disregard changes in const qualifiers for
960 the dereferenced value. */
961 || ((TREE_CODE (rhs
) == NOP_EXPR
962 || TREE_CODE (rhs
) == CONVERT_EXPR
)
963 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
964 && POINTER_TYPE_P (TREE_TYPE (rhs
))
965 /* But do not propagate changes in volatileness. */
966 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (rhs
)))
967 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (rhs
, 0)))))
968 && types_compatible_p (TREE_TYPE (TREE_TYPE (TREE_OPERAND (rhs
, 0))),
969 TREE_TYPE (TREE_TYPE (rhs
)))))
971 if (forward_propagate_addr_expr (lhs
, rhs
))
974 todoflags
|= TODO_remove_unused_locals
;
975 bsi_remove (&bsi
, true);
980 else if ((TREE_CODE (rhs
) == BIT_NOT_EXPR
981 || TREE_CODE (rhs
) == NEGATE_EXPR
)
982 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
984 simplify_not_neg_expr (stmt
);
987 else if (TREE_CODE (rhs
) == COND_EXPR
)
990 fold_defer_overflow_warnings ();
991 did_something
= forward_propagate_into_cond (rhs
, stmt
);
992 if (did_something
== 2)
994 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs
)
995 && did_something
, stmt
, WARN_STRICT_OVERFLOW_CONDITIONAL
);
998 else if (COMPARISON_CLASS_P (rhs
))
1000 if (forward_propagate_comparison (rhs
, stmt
))
1002 release_defs (stmt
);
1003 todoflags
|= TODO_remove_unused_locals
;
1004 bsi_remove (&bsi
, true);
1012 else if (TREE_CODE (stmt
) == SWITCH_EXPR
)
1014 simplify_switch_expr (stmt
);
1017 else if (TREE_CODE (stmt
) == COND_EXPR
)
1020 fold_defer_overflow_warnings ();
1021 did_something
= forward_propagate_into_cond (stmt
, stmt
);
1022 if (did_something
== 2)
1024 fold_undefer_overflow_warnings (did_something
, stmt
,
1025 WARN_STRICT_OVERFLOW_CONDITIONAL
);
1034 todoflags
|= TODO_cleanup_cfg
;
1040 gate_forwprop (void)
1045 struct tree_opt_pass pass_forwprop
= {
1046 "forwprop", /* name */
1047 gate_forwprop
, /* gate */
1048 tree_ssa_forward_propagate_single_use_vars
, /* execute */
1051 0, /* static_pass_number */
1052 TV_TREE_FORWPROP
, /* tv_id */
1053 PROP_cfg
| PROP_ssa
, /* properties_required */
1054 0, /* properties_provided */
1055 0, /* properties_destroyed */
1056 0, /* todo_flags_start */
1060 | TODO_verify_ssa
, /* todo_flags_finish */
1065 /* Structure to keep track of the value of a dereferenced PHI result
1066 and the set of virtual operands used for that dereference. */
1074 /* Verify if the value recorded for NAME in PHIVN is still valid at
1075 the start of basic block BB. */
1078 phivn_valid_p (struct phiprop_d
*phivn
, tree name
, basic_block bb
)
1080 tree vop_stmt
= phivn
[SSA_NAME_VERSION (name
)].vop_stmt
;
1084 /* The def stmts of all virtual uses need to be post-dominated
1086 FOR_EACH_SSA_TREE_OPERAND (vuse
, vop_stmt
, ui
, SSA_OP_VUSE
)
1089 imm_use_iterator ui2
;
1092 FOR_EACH_IMM_USE_STMT (use_stmt
, ui2
, vuse
)
1094 /* If BB does not dominate a VDEF, the value is invalid. */
1095 if (((TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
1096 && !ZERO_SSA_OPERANDS (use_stmt
, SSA_OP_VDEF
))
1097 || TREE_CODE (use_stmt
) == PHI_NODE
)
1098 && !dominated_by_p (CDI_DOMINATORS
, bb_for_stmt (use_stmt
), bb
))
1101 BREAK_FROM_IMM_USE_STMT (ui2
);
1111 /* Insert a new phi node for the dereference of PHI at basic_block
1112 BB with the virtual operands from USE_STMT. */
1115 phiprop_insert_phi (basic_block bb
, tree phi
, tree use_stmt
,
1116 struct phiprop_d
*phivn
, size_t n
)
1122 /* Build a new PHI node to replace the definition of
1123 the indirect reference lhs. */
1124 res
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
1125 SSA_NAME_DEF_STMT (res
) = new_phi
= create_phi_node (res
, bb
);
1127 /* Add PHI arguments for each edge inserting loads of the
1128 addressable operands. */
1129 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1131 tree old_arg
, new_var
, tmp
;
1133 old_arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
1134 while (TREE_CODE (old_arg
) == SSA_NAME
1135 && (SSA_NAME_VERSION (old_arg
) >= n
1136 || phivn
[SSA_NAME_VERSION (old_arg
)].value
== NULL_TREE
))
1138 tree def_stmt
= SSA_NAME_DEF_STMT (old_arg
);
1139 old_arg
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
1142 if (TREE_CODE (old_arg
) == SSA_NAME
)
1143 /* Reuse a formerly created dereference. */
1144 new_var
= phivn
[SSA_NAME_VERSION (old_arg
)].value
;
1147 old_arg
= TREE_OPERAND (old_arg
, 0);
1148 new_var
= create_tmp_var (TREE_TYPE (old_arg
), NULL
);
1149 tmp
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
,
1150 NULL_TREE
, unshare_expr (old_arg
));
1151 if (TREE_CODE (TREE_TYPE (old_arg
)) == COMPLEX_TYPE
1152 || TREE_CODE (TREE_TYPE (old_arg
)) == VECTOR_TYPE
)
1153 DECL_GIMPLE_REG_P (new_var
) = 1;
1154 add_referenced_var (new_var
);
1155 new_var
= make_ssa_name (new_var
, tmp
);
1156 GIMPLE_STMT_OPERAND (tmp
, 0) = new_var
;
1158 bsi_insert_on_edge (e
, tmp
);
1161 mark_symbols_for_renaming (tmp
);
1164 add_phi_arg (new_phi
, new_var
, e
);
1167 update_stmt (new_phi
);
1172 /* Propagate between the phi node arguments of PHI in BB and phi result
1173 users. For now this matches
1174 # p_2 = PHI <&x, &y>
1181 Returns true if a transformation was done and edge insertions
1182 need to be committed. Global data PHIVN and N is used to track
1183 past transformation results. We need to be especially careful here
1184 with aliasing issues as we are moving memory reads. */
1187 propagate_with_phi (basic_block bb
, tree phi
, struct phiprop_d
*phivn
, size_t n
)
1189 tree ptr
= PHI_RESULT (phi
);
1190 tree use_stmt
, res
= NULL_TREE
;
1191 block_stmt_iterator bsi
;
1192 imm_use_iterator ui
;
1193 use_operand_p arg_p
, use
;
1197 if (MTAG_P (SSA_NAME_VAR (ptr
))
1198 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
1199 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr
))))
1202 /* Check if we can "cheaply" dereference all phi arguments. */
1203 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_USE
)
1205 tree arg
= USE_FROM_PTR (arg_p
);
1206 /* Walk the ssa chain until we reach a ssa name we already
1207 created a value for or we reach a definition of the form
1208 ssa_name_n = &var; */
1209 while (TREE_CODE (arg
) == SSA_NAME
1210 && !SSA_NAME_IS_DEFAULT_DEF (arg
)
1211 && (SSA_NAME_VERSION (arg
) >= n
1212 || phivn
[SSA_NAME_VERSION (arg
)].value
== NULL_TREE
))
1214 tree def_stmt
= SSA_NAME_DEF_STMT (arg
);
1215 if (TREE_CODE (def_stmt
) != GIMPLE_MODIFY_STMT
)
1217 arg
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
1219 if ((TREE_CODE (arg
) != ADDR_EXPR
1220 /* Avoid to have to decay *&a to a[0] later. */
1221 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (arg
, 0))))
1222 && !(TREE_CODE (arg
) == SSA_NAME
1223 && phivn
[SSA_NAME_VERSION (arg
)].value
!= NULL_TREE
1224 && phivn_valid_p (phivn
, arg
, bb
)))
1228 /* Find a dereferencing use. First follow (single use) ssa
1229 copy chains for ptr. */
1230 while (single_imm_use (ptr
, &use
, &use_stmt
)
1231 && TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
1232 && GIMPLE_STMT_OPERAND (use_stmt
, 1) == ptr
1233 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) == SSA_NAME
)
1234 ptr
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
1236 /* Replace the first dereference of *ptr if there is one and if we
1237 can move the loads to the place of the ptr phi node. */
1238 phi_inserted
= false;
1239 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, ptr
)
1244 /* Check whether this is a load of *ptr. */
1245 if (!(TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
1246 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) == SSA_NAME
1247 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == INDIRECT_REF
1248 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt
, 1), 0) == ptr
1249 /* We cannot replace a load that may throw or is volatile. */
1250 && !tree_can_throw_internal (use_stmt
)))
1253 /* Check if we can move the loads. The def stmts of all virtual uses
1254 need to be post-dominated by bb. */
1255 FOR_EACH_SSA_TREE_OPERAND (vuse
, use_stmt
, ui2
, SSA_OP_VUSE
)
1257 tree def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1258 if (!SSA_NAME_IS_DEFAULT_DEF (vuse
)
1259 && (bb_for_stmt (def_stmt
) == bb
1260 || !dominated_by_p (CDI_DOMINATORS
,
1261 bb
, bb_for_stmt (def_stmt
))))
1265 /* Found a proper dereference. Insert a phi node if this
1266 is the first load transformation. */
1269 res
= phiprop_insert_phi (bb
, phi
, use_stmt
, phivn
, n
);
1271 /* Remember the value we created for *ptr. */
1272 phivn
[SSA_NAME_VERSION (ptr
)].value
= res
;
1273 phivn
[SSA_NAME_VERSION (ptr
)].vop_stmt
= use_stmt
;
1275 /* Remove old stmt. The phi is taken care of by DCE, if we
1276 want to delete it here we also have to delete all intermediate
1278 bsi
= bsi_for_stmt (use_stmt
);
1279 bsi_remove (&bsi
, 0);
1281 phi_inserted
= true;
1285 /* Further replacements are easy, just make a copy out of the
1287 GIMPLE_STMT_OPERAND (use_stmt
, 1) = res
;
1288 update_stmt (use_stmt
);
1292 /* Continue searching for a proper dereference. */
1295 return phi_inserted
;
1298 /* Helper walking the dominator tree starting from BB and processing
1299 phi nodes with global data PHIVN and N. */
1302 tree_ssa_phiprop_1 (basic_block bb
, struct phiprop_d
*phivn
, size_t n
)
1304 bool did_something
= false;
1308 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1309 did_something
|= propagate_with_phi (bb
, phi
, phivn
, n
);
1311 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
1313 son
= next_dom_son (CDI_DOMINATORS
, son
))
1314 did_something
|= tree_ssa_phiprop_1 (son
, phivn
, n
);
1316 return did_something
;
1319 /* Main entry for phiprop pass. */
1322 tree_ssa_phiprop (void)
1324 struct phiprop_d
*phivn
;
1326 calculate_dominance_info (CDI_DOMINATORS
);
1328 phivn
= XCNEWVEC (struct phiprop_d
, num_ssa_names
);
1330 if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR
, phivn
, num_ssa_names
))
1331 bsi_commit_edge_inserts ();
1344 struct tree_opt_pass pass_phiprop
= {
1345 "phiprop", /* name */
1346 gate_phiprop
, /* gate */
1347 tree_ssa_phiprop
, /* execute */
1350 0, /* static_pass_number */
1351 TV_TREE_FORWPROP
, /* tv_id */
1352 PROP_cfg
| PROP_ssa
, /* properties_required */
1353 0, /* properties_provided */
1354 0, /* properties_destroyed */
1355 0, /* todo_flags_start */
1359 | TODO_verify_ssa
, /* todo_flags_finish */