1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
52 if (x) goto ... else goto ...
54 Will be transformed into:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
78 if (x) goto ... else goto ...
80 Will be transformed into:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
93 if (x) goto ... else goto ...
95 Will be transformed into:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
129 ptr2 = ptr + <constant>;
133 ptr2 = &x[constant/elementsize];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
142 Will get turned into:
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
150 This will (of course) be extended as other needs arise. */
152 static bool forward_propagate_addr_expr (tree name
, tree rhs
);
154 /* Set to true if we delete EH edges during the optimization. */
155 static bool cfg_changed
;
158 /* Get the next statement we can propagate NAME's value into skipping
159 trivial copies. Returns the statement that is suitable as a
160 propagation destination or NULL_TREE if there is no such one.
161 This only returns destinations in a single-use chain. FINAL_NAME_P
162 if non-NULL is written to the ssa name that represents the use. */
165 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
171 /* If name has multiple uses, bail out. */
172 if (!single_imm_use (name
, &use
, &use_stmt
))
175 /* If this is not a trivial copy, we found it. */
176 if (TREE_CODE (use_stmt
) != GIMPLE_MODIFY_STMT
177 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) != SSA_NAME
178 || GIMPLE_STMT_OPERAND (use_stmt
, 1) != name
)
181 /* Continue searching uses of the copy destination. */
182 name
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
186 *final_name_p
= name
;
191 /* Get the statement we can propagate from into NAME skipping
192 trivial copies. Returns the statement which defines the
193 propagation source or NULL_TREE if there is no such one.
194 If SINGLE_USE_ONLY is set considers only sources which have
195 a single use chain up to NAME. If SINGLE_USE_P is non-null,
196 it is set to whether the chain to NAME is a single use chain
197 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
200 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
202 bool single_use
= true;
205 tree def_stmt
= SSA_NAME_DEF_STMT (name
);
207 if (!has_single_use (name
))
214 /* If name is defined by a PHI node or is the default def, bail out. */
215 if (TREE_CODE (def_stmt
) != GIMPLE_MODIFY_STMT
)
218 /* If name is not a simple copy destination, we found it. */
219 if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt
, 1)) != SSA_NAME
)
221 if (!single_use_only
&& single_use_p
)
222 *single_use_p
= single_use
;
227 /* Continue searching the def of the copy source name. */
228 name
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
232 /* Checks if the destination ssa name in DEF_STMT can be used as
233 propagation source. Returns true if so, otherwise false. */
236 can_propagate_from (tree def_stmt
)
238 tree rhs
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
240 /* If the rhs has side-effects we cannot propagate from it. */
241 if (TREE_SIDE_EFFECTS (rhs
))
244 /* If the rhs is a load we cannot propagate from it. */
245 if (REFERENCE_CLASS_P (rhs
))
248 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
249 switch (TREE_CODE_LENGTH (TREE_CODE (rhs
)))
252 if (TREE_OPERAND (rhs
, 2) != NULL_TREE
253 && TREE_CODE (TREE_OPERAND (rhs
, 2)) == SSA_NAME
254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs
, 2)))
257 if (TREE_OPERAND (rhs
, 1) != NULL_TREE
258 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == SSA_NAME
259 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs
, 1)))
262 if (TREE_OPERAND (rhs
, 0) != NULL_TREE
263 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (rhs
, 0)))
272 /* If the definition is a conversion of a pointer to a function type,
273 then we can not apply optimizations as some targets require function
274 pointers to be canonicalized and in this case this optimization could
275 eliminate a necessary canonicalization. */
276 if ((TREE_CODE (rhs
) == NOP_EXPR
277 || TREE_CODE (rhs
) == CONVERT_EXPR
)
278 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs
, 0)))
279 && TREE_CODE (TREE_TYPE (TREE_TYPE
280 (TREE_OPERAND (rhs
, 0)))) == FUNCTION_TYPE
)
286 /* Remove a copy chain ending in NAME along the defs but not
287 further or including UP_TO_STMT. If NAME was replaced in
288 its only use then this function can be used to clean up
289 dead stmts. Returns true if UP_TO_STMT can be removed
290 as well, otherwise false. */
293 remove_prop_source_from_use (tree name
, tree up_to_stmt
)
295 block_stmt_iterator bsi
;
299 if (!has_zero_uses (name
))
302 stmt
= SSA_NAME_DEF_STMT (name
);
303 if (stmt
== up_to_stmt
)
306 bsi
= bsi_for_stmt (stmt
);
308 bsi_remove (&bsi
, true);
310 name
= GIMPLE_STMT_OPERAND (stmt
, 1);
311 } while (TREE_CODE (name
) == SSA_NAME
);
316 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
317 the folded result in a form suitable for COND_EXPR_COND or
318 NULL_TREE, if there is no suitable simplified form. If
319 INVARIANT_ONLY is true only gimple_min_invariant results are
320 considered simplified. */
323 combine_cond_expr_cond (enum tree_code code
, tree type
,
324 tree op0
, tree op1
, bool invariant_only
)
328 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
330 t
= fold_binary (code
, type
, op0
, op1
);
334 /* Require that we got a boolean type out if we put one in. */
335 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
337 /* Canonicalize the combined condition for use in a COND_EXPR. */
338 t
= canonicalize_cond_expr_cond (t
);
340 /* Bail out if we required an invariant but didn't get one. */
343 && !is_gimple_min_invariant (t
)))
349 /* Propagate from the ssa name definition statements of COND_EXPR
350 in statement STMT into the conditional if that simplifies it.
351 Returns zero if no statement was changed, one if there were
352 changes and two if cfg_cleanup needs to run. */
355 forward_propagate_into_cond (tree cond_expr
, tree stmt
)
357 int did_something
= 0;
360 tree tmp
= NULL_TREE
;
361 tree cond
= COND_EXPR_COND (cond_expr
);
362 tree name
, def_stmt
, rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
363 bool single_use0_p
= false, single_use1_p
= false;
365 /* We can do tree combining on SSA_NAME and comparison expressions. */
366 if (COMPARISON_CLASS_P (cond
)
367 && TREE_CODE (TREE_OPERAND (cond
, 0)) == SSA_NAME
)
369 /* For comparisons use the first operand, that is likely to
370 simplify comparisons against constants. */
371 name
= TREE_OPERAND (cond
, 0);
372 def_stmt
= get_prop_source_stmt (name
, false, &single_use0_p
);
373 if (def_stmt
!= NULL_TREE
374 && can_propagate_from (def_stmt
))
376 tree op1
= TREE_OPERAND (cond
, 1);
377 rhs0
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
378 tmp
= combine_cond_expr_cond (TREE_CODE (cond
), boolean_type_node
,
379 fold_convert (TREE_TYPE (op1
), rhs0
),
380 op1
, !single_use0_p
);
382 /* If that wasn't successful, try the second operand. */
384 && TREE_CODE (TREE_OPERAND (cond
, 1)) == SSA_NAME
)
386 tree op0
= TREE_OPERAND (cond
, 0);
387 name
= TREE_OPERAND (cond
, 1);
388 def_stmt
= get_prop_source_stmt (name
, false, &single_use1_p
);
389 if (def_stmt
== NULL_TREE
390 || !can_propagate_from (def_stmt
))
391 return did_something
;
393 rhs1
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
394 tmp
= combine_cond_expr_cond (TREE_CODE (cond
), boolean_type_node
,
396 fold_convert (TREE_TYPE (op0
), rhs1
),
399 /* If that wasn't successful either, try both operands. */
402 && rhs1
!= NULL_TREE
)
403 tmp
= combine_cond_expr_cond (TREE_CODE (cond
), boolean_type_node
,
405 fold_convert (TREE_TYPE (rhs0
), rhs1
),
406 !(single_use0_p
&& single_use1_p
));
408 else if (TREE_CODE (cond
) == SSA_NAME
)
411 def_stmt
= get_prop_source_stmt (name
, true, NULL
);
412 if (def_stmt
== NULL_TREE
413 || !can_propagate_from (def_stmt
))
414 return did_something
;
416 rhs0
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
417 tmp
= combine_cond_expr_cond (NE_EXPR
, boolean_type_node
, rhs0
,
418 build_int_cst (TREE_TYPE (rhs0
), 0),
424 if (dump_file
&& tmp
)
426 fprintf (dump_file
, " Replaced '");
427 print_generic_expr (dump_file
, cond
, 0);
428 fprintf (dump_file
, "' with '");
429 print_generic_expr (dump_file
, tmp
, 0);
430 fprintf (dump_file
, "'\n");
433 COND_EXPR_COND (cond_expr
) = unshare_expr (tmp
);
436 /* Remove defining statements. */
437 remove_prop_source_from_use (name
, NULL
);
439 if (is_gimple_min_invariant (tmp
))
441 else if (did_something
== 0)
444 /* Continue combining. */
451 return did_something
;
454 /* We've just substituted an ADDR_EXPR into stmt. Update all the
455 relevant data structures to match. */
458 tidy_after_forward_propagate_addr (tree stmt
)
460 /* We may have turned a trapping insn into a non-trapping insn. */
461 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
462 && tree_purge_dead_eh_edges (bb_for_stmt (stmt
)))
465 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)) == ADDR_EXPR
)
466 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt
, 1));
468 mark_symbols_for_renaming (stmt
);
471 /* DEF_RHS contains the address of the 0th element in an array.
472 USE_STMT uses type of DEF_RHS to compute the address of an
473 arbitrary element within the array. The (variable) byte offset
474 of the element is contained in OFFSET.
476 We walk back through the use-def chains of OFFSET to verify that
477 it is indeed computing the offset of an element within the array
478 and extract the index corresponding to the given byte offset.
480 We then try to fold the entire address expression into a form
483 If we are successful, we replace the right hand side of USE_STMT
484 with the new address computation. */
487 forward_propagate_addr_into_variable_array_index (tree offset
,
488 tree def_rhs
, tree use_stmt
)
492 /* Try to find an expression for a proper index. This is either
493 a multiplication expression by the element size or just the
494 ssa name we came along in case the element size is one. */
495 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs
)))))
499 /* Get the offset's defining statement. */
500 offset
= SSA_NAME_DEF_STMT (offset
);
502 /* The statement which defines OFFSET before type conversion
503 must be a simple GIMPLE_MODIFY_STMT. */
504 if (TREE_CODE (offset
) != GIMPLE_MODIFY_STMT
)
507 /* The RHS of the statement which defines OFFSET must be a
508 multiplication of an object by the size of the array elements.
509 This implicitly verifies that the size of the array elements
511 offset
= GIMPLE_STMT_OPERAND (offset
, 1);
512 if (TREE_CODE (offset
) != MULT_EXPR
513 || TREE_CODE (TREE_OPERAND (offset
, 1)) != INTEGER_CST
514 || !simple_cst_equal (TREE_OPERAND (offset
, 1),
515 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs
)))))
518 /* The first operand to the MULT_EXPR is the desired index. */
519 index
= TREE_OPERAND (offset
, 0);
522 /* Replace the pointer addition with array indexing. */
523 GIMPLE_STMT_OPERAND (use_stmt
, 1) = unshare_expr (def_rhs
);
524 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt
, 1), 0), 1)
527 /* That should have created gimple, so there is no need to
528 record information to undo the propagation. */
529 fold_stmt_inplace (use_stmt
);
530 tidy_after_forward_propagate_addr (use_stmt
);
534 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
535 ADDR_EXPR <whatever>.
537 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
538 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
539 node or for recovery of array indexing from pointer arithmetic.
541 Return true if the propagation was successful (the propagation can
542 be not totally successful, yet things may have been changed). */
545 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
, tree use_stmt
,
548 tree lhs
, rhs
, array_ref
;
550 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
551 ADDR_EXPR will not appear on the LHS. */
552 lhs
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
553 while (handled_component_p (lhs
))
554 lhs
= TREE_OPERAND (lhs
, 0);
556 rhs
= GIMPLE_STMT_OPERAND (use_stmt
, 1);
558 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
559 propagate the ADDR_EXPR into the use of NAME and fold the result. */
560 if (TREE_CODE (lhs
) == INDIRECT_REF
&& TREE_OPERAND (lhs
, 0) == name
)
562 /* This should always succeed in creating gimple, so there is
563 no need to save enough state to undo this propagation. */
564 TREE_OPERAND (lhs
, 0) = unshare_expr (def_rhs
);
565 fold_stmt_inplace (use_stmt
);
566 tidy_after_forward_propagate_addr (use_stmt
);
568 /* Continue propagating into the RHS. */
571 /* Trivial cases. The use statement could be a trivial copy or a
572 useless conversion. Recurse to the uses of the lhs as copyprop does
573 not copy through differen variant pointers and FRE does not catch
574 all useless conversions. Treat the case of a single-use name and
575 a conversion to def_rhs type separate, though. */
576 else if (TREE_CODE (lhs
) == SSA_NAME
577 && (TREE_CODE (rhs
) == NOP_EXPR
578 || TREE_CODE (rhs
) == CONVERT_EXPR
)
579 && TREE_TYPE (rhs
) == TREE_TYPE (def_rhs
)
582 GIMPLE_STMT_OPERAND (use_stmt
, 1) = unshare_expr (def_rhs
);
585 else if ((TREE_CODE (lhs
) == SSA_NAME
587 || ((TREE_CODE (rhs
) == NOP_EXPR
588 || TREE_CODE (rhs
) == CONVERT_EXPR
)
589 && useless_type_conversion_p (TREE_TYPE (rhs
),
590 TREE_TYPE (def_rhs
))))
591 return forward_propagate_addr_expr (lhs
, def_rhs
);
593 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
594 nodes from the RHS. */
595 while (handled_component_p (rhs
)
596 || TREE_CODE (rhs
) == ADDR_EXPR
)
597 rhs
= TREE_OPERAND (rhs
, 0);
599 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
600 propagate the ADDR_EXPR into the use of NAME and fold the result. */
601 if (TREE_CODE (rhs
) == INDIRECT_REF
&& TREE_OPERAND (rhs
, 0) == name
)
603 /* This should always succeed in creating gimple, so there is
604 no need to save enough state to undo this propagation. */
605 TREE_OPERAND (rhs
, 0) = unshare_expr (def_rhs
);
606 fold_stmt_inplace (use_stmt
);
607 tidy_after_forward_propagate_addr (use_stmt
);
611 /* The remaining cases are all for turning pointer arithmetic into
612 array indexing. They only apply when we have the address of
613 element zero in an array. If that is not the case then there
615 array_ref
= TREE_OPERAND (def_rhs
, 0);
616 if (TREE_CODE (array_ref
) != ARRAY_REF
617 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
618 || !integer_zerop (TREE_OPERAND (array_ref
, 1)))
621 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
623 if (TREE_CODE (rhs
) != POINTER_PLUS_EXPR
)
626 /* Try to optimize &x[0] p+ C where C is a multiple of the size
627 of the elements in X into &x[C/element size]. */
628 if (TREE_OPERAND (rhs
, 0) == name
629 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
631 tree orig
= unshare_expr (rhs
);
632 TREE_OPERAND (rhs
, 0) = unshare_expr (def_rhs
);
634 /* If folding succeeds, then we have just exposed new variables
635 in USE_STMT which will need to be renamed. If folding fails,
636 then we need to put everything back the way it was. */
637 if (fold_stmt_inplace (use_stmt
))
639 tidy_after_forward_propagate_addr (use_stmt
);
644 GIMPLE_STMT_OPERAND (use_stmt
, 1) = orig
;
645 update_stmt (use_stmt
);
650 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
651 converting a multiplication of an index by the size of the
652 array elements, then the result is converted into the proper
653 type for the arithmetic. */
654 if (TREE_OPERAND (rhs
, 0) == name
655 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == SSA_NAME
656 /* Avoid problems with IVopts creating PLUS_EXPRs with a
657 different type than their operands. */
658 && useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (name
)))
662 res
= forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs
, 1),
669 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
671 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
672 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
673 node or for recovery of array indexing from pointer arithmetic.
674 Returns true, if all uses have been propagated into. */
677 forward_propagate_addr_expr (tree name
, tree rhs
)
679 int stmt_loop_depth
= bb_for_stmt (SSA_NAME_DEF_STMT (name
))->loop_depth
;
680 imm_use_iterator iter
;
683 bool single_use_p
= has_single_use (name
);
685 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
689 /* If the use is not in a simple assignment statement, then
690 there is nothing we can do. */
691 if (TREE_CODE (use_stmt
) != GIMPLE_MODIFY_STMT
)
697 /* If the use is in a deeper loop nest, then we do not want
698 to propagate the ADDR_EXPR into the loop as that is likely
699 adding expression evaluations into the loop. */
700 if (bb_for_stmt (use_stmt
)->loop_depth
> stmt_loop_depth
)
706 push_stmt_changes (&use_stmt
);
708 result
= forward_propagate_addr_expr_1 (name
, rhs
, use_stmt
,
712 pop_stmt_changes (&use_stmt
);
714 /* Remove intermediate now unused copy and conversion chains. */
716 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) == SSA_NAME
717 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == SSA_NAME
718 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == NOP_EXPR
719 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == CONVERT_EXPR
))
721 block_stmt_iterator bsi
= bsi_for_stmt (use_stmt
);
722 release_defs (use_stmt
);
723 bsi_remove (&bsi
, true);
730 /* Forward propagate the comparison COND defined in STMT like
731 cond_1 = x CMP y to uses of the form
735 Returns true if stmt is now unused. */
738 forward_propagate_comparison (tree cond
, tree stmt
)
740 tree name
= GIMPLE_STMT_OPERAND (stmt
, 0);
741 tree use_stmt
, tmp
= NULL_TREE
;
743 /* Don't propagate ssa names that occur in abnormal phis. */
744 if ((TREE_CODE (TREE_OPERAND (cond
, 0)) == SSA_NAME
745 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond
, 0)))
746 || (TREE_CODE (TREE_OPERAND (cond
, 1)) == SSA_NAME
747 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond
, 1))))
750 /* Do not un-cse comparisons. But propagate through copies. */
751 use_stmt
= get_prop_dest_stmt (name
, &name
);
752 if (use_stmt
== NULL_TREE
)
755 /* Conversion of the condition result to another integral type. */
756 if (TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
757 && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == CONVERT_EXPR
758 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == NOP_EXPR
759 || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt
, 1))
760 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == TRUTH_NOT_EXPR
)
761 && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt
, 0))))
763 tree lhs
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
764 tree rhs
= GIMPLE_STMT_OPERAND (use_stmt
, 1);
766 /* We can propagate the condition into a conversion. */
767 if (TREE_CODE (rhs
) == CONVERT_EXPR
768 || TREE_CODE (rhs
) == NOP_EXPR
)
770 /* Avoid using fold here as that may create a COND_EXPR with
771 non-boolean condition as canonical form. */
772 tmp
= build2 (TREE_CODE (cond
), TREE_TYPE (lhs
),
773 TREE_OPERAND (cond
, 0), TREE_OPERAND (cond
, 1));
775 /* We can propagate the condition into X op CST where op
776 is EQ_EXRP or NE_EXPR and CST is either one or zero. */
777 else if (COMPARISON_CLASS_P (rhs
)
778 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
779 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == INTEGER_CST
)
781 enum tree_code code
= TREE_CODE (rhs
);
782 tree cst
= TREE_OPERAND (rhs
, 1);
784 tmp
= combine_cond_expr_cond (code
, TREE_TYPE (lhs
),
785 fold_convert (TREE_TYPE (cst
), cond
),
787 if (tmp
== NULL_TREE
)
790 /* We can propagate the condition into a statement that
791 computes the logical negation of the comparison result. */
792 else if (TREE_CODE (rhs
) == TRUTH_NOT_EXPR
)
794 tree type
= TREE_TYPE (TREE_OPERAND (cond
, 0));
795 bool nans
= HONOR_NANS (TYPE_MODE (type
));
797 code
= invert_tree_comparison (TREE_CODE (cond
), nans
);
798 if (code
== ERROR_MARK
)
801 tmp
= build2 (code
, TREE_TYPE (lhs
), TREE_OPERAND (cond
, 0),
802 TREE_OPERAND (cond
, 1));
807 GIMPLE_STMT_OPERAND (use_stmt
, 1) = unshare_expr (tmp
);
808 update_stmt (use_stmt
);
810 /* Remove defining statements. */
811 remove_prop_source_from_use (name
, stmt
);
813 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
815 fprintf (dump_file
, " Replaced '");
816 print_generic_expr (dump_file
, rhs
, dump_flags
);
817 fprintf (dump_file
, "' with '");
818 print_generic_expr (dump_file
, tmp
, dump_flags
);
819 fprintf (dump_file
, "'\n");
828 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
829 If so, we can change STMT into lhs = y which can later be copy
830 propagated. Similarly for negation.
832 This could trivially be formulated as a forward propagation
833 to immediate uses. However, we already had an implementation
834 from DOM which used backward propagation via the use-def links.
836 It turns out that backward propagation is actually faster as
837 there's less work to do for each NOT/NEG expression we find.
838 Backwards propagation needs to look at the statement in a single
839 backlink. Forward propagation needs to look at potentially more
840 than one forward link. */
843 simplify_not_neg_expr (tree stmt
)
845 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
846 tree rhs_def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (rhs
, 0));
848 /* See if the RHS_DEF_STMT has the same form as our statement. */
849 if (TREE_CODE (rhs_def_stmt
) == GIMPLE_MODIFY_STMT
850 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt
, 1)) == TREE_CODE (rhs
))
852 tree rhs_def_operand
=
853 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt
, 1), 0);
855 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
856 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
857 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
859 GIMPLE_STMT_OPERAND (stmt
, 1) = rhs_def_operand
;
865 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
866 the condition which we may be able to optimize better. */
869 simplify_switch_expr (tree stmt
)
871 tree cond
= SWITCH_COND (stmt
);
874 /* The optimization that we really care about is removing unnecessary
875 casts. That will let us do much better in propagating the inferred
876 constant at the switch target. */
877 if (TREE_CODE (cond
) == SSA_NAME
)
879 def
= SSA_NAME_DEF_STMT (cond
);
880 if (TREE_CODE (def
) == GIMPLE_MODIFY_STMT
)
882 def
= GIMPLE_STMT_OPERAND (def
, 1);
883 if (TREE_CODE (def
) == NOP_EXPR
)
888 def
= TREE_OPERAND (def
, 0);
890 #ifdef ENABLE_CHECKING
891 /* ??? Why was Jeff testing this? We are gimple... */
892 gcc_assert (is_gimple_val (def
));
895 to
= TREE_TYPE (cond
);
896 ti
= TREE_TYPE (def
);
898 /* If we have an extension that preserves value, then we
899 can copy the source value into the switch. */
901 need_precision
= TYPE_PRECISION (ti
);
903 if (! INTEGRAL_TYPE_P (ti
))
905 else if (TYPE_UNSIGNED (to
) && !TYPE_UNSIGNED (ti
))
907 else if (!TYPE_UNSIGNED (to
) && TYPE_UNSIGNED (ti
))
909 if (TYPE_PRECISION (to
) < need_precision
)
914 SWITCH_COND (stmt
) = def
;
922 /* Main entry point for the forward propagation optimizer. */
925 tree_ssa_forward_propagate_single_use_vars (void)
928 unsigned int todoflags
= 0;
934 block_stmt_iterator bsi
;
936 /* Note we update BSI within the loop as necessary. */
937 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
939 tree stmt
= bsi_stmt (bsi
);
941 /* If this statement sets an SSA_NAME to an address,
942 try to propagate the address into the uses of the SSA_NAME. */
943 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
945 tree lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
946 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
949 if (TREE_CODE (lhs
) != SSA_NAME
)
955 if (TREE_CODE (rhs
) == ADDR_EXPR
)
957 if (forward_propagate_addr_expr (lhs
, rhs
))
960 todoflags
|= TODO_remove_unused_locals
;
961 bsi_remove (&bsi
, true);
966 else if ((TREE_CODE (rhs
) == BIT_NOT_EXPR
967 || TREE_CODE (rhs
) == NEGATE_EXPR
)
968 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
970 simplify_not_neg_expr (stmt
);
973 else if (TREE_CODE (rhs
) == COND_EXPR
)
976 fold_defer_overflow_warnings ();
977 did_something
= forward_propagate_into_cond (rhs
, stmt
);
978 if (did_something
== 2)
980 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs
)
981 && did_something
, stmt
, WARN_STRICT_OVERFLOW_CONDITIONAL
);
984 else if (COMPARISON_CLASS_P (rhs
))
986 if (forward_propagate_comparison (rhs
, stmt
))
989 todoflags
|= TODO_remove_unused_locals
;
990 bsi_remove (&bsi
, true);
998 else if (TREE_CODE (stmt
) == SWITCH_EXPR
)
1000 simplify_switch_expr (stmt
);
1003 else if (TREE_CODE (stmt
) == COND_EXPR
)
1006 fold_defer_overflow_warnings ();
1007 did_something
= forward_propagate_into_cond (stmt
, stmt
);
1008 if (did_something
== 2)
1010 fold_undefer_overflow_warnings (did_something
, stmt
,
1011 WARN_STRICT_OVERFLOW_CONDITIONAL
);
1020 todoflags
|= TODO_cleanup_cfg
;
1026 gate_forwprop (void)
1031 struct tree_opt_pass pass_forwprop
= {
1032 "forwprop", /* name */
1033 gate_forwprop
, /* gate */
1034 tree_ssa_forward_propagate_single_use_vars
, /* execute */
1037 0, /* static_pass_number */
1038 TV_TREE_FORWPROP
, /* tv_id */
1039 PROP_cfg
| PROP_ssa
, /* properties_required */
1040 0, /* properties_provided */
1041 0, /* properties_destroyed */
1042 0, /* todo_flags_start */
1046 | TODO_verify_ssa
, /* todo_flags_finish */
1051 /* Structure to keep track of the value of a dereferenced PHI result
1052 and the set of virtual operands used for that dereference. */
1060 /* Verify if the value recorded for NAME in PHIVN is still valid at
1061 the start of basic block BB. */
1064 phivn_valid_p (struct phiprop_d
*phivn
, tree name
, basic_block bb
)
1066 tree vop_stmt
= phivn
[SSA_NAME_VERSION (name
)].vop_stmt
;
1070 /* The def stmts of all virtual uses need to be post-dominated
1072 FOR_EACH_SSA_TREE_OPERAND (vuse
, vop_stmt
, ui
, SSA_OP_VUSE
)
1075 imm_use_iterator ui2
;
1078 FOR_EACH_IMM_USE_STMT (use_stmt
, ui2
, vuse
)
1080 /* If BB does not dominate a VDEF, the value is invalid. */
1081 if (((TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
1082 && !ZERO_SSA_OPERANDS (use_stmt
, SSA_OP_VDEF
))
1083 || TREE_CODE (use_stmt
) == PHI_NODE
)
1084 && !dominated_by_p (CDI_DOMINATORS
, bb_for_stmt (use_stmt
), bb
))
1087 BREAK_FROM_IMM_USE_STMT (ui2
);
1097 /* Insert a new phi node for the dereference of PHI at basic_block
1098 BB with the virtual operands from USE_STMT. */
1101 phiprop_insert_phi (basic_block bb
, tree phi
, tree use_stmt
,
1102 struct phiprop_d
*phivn
, size_t n
)
1108 /* Build a new PHI node to replace the definition of
1109 the indirect reference lhs. */
1110 res
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
1111 SSA_NAME_DEF_STMT (res
) = new_phi
= create_phi_node (res
, bb
);
1113 /* Add PHI arguments for each edge inserting loads of the
1114 addressable operands. */
1115 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1117 tree old_arg
, new_var
, tmp
;
1119 old_arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
1120 while (TREE_CODE (old_arg
) == SSA_NAME
1121 && (SSA_NAME_VERSION (old_arg
) >= n
1122 || phivn
[SSA_NAME_VERSION (old_arg
)].value
== NULL_TREE
))
1124 tree def_stmt
= SSA_NAME_DEF_STMT (old_arg
);
1125 old_arg
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
1128 if (TREE_CODE (old_arg
) == SSA_NAME
)
1129 /* Reuse a formerly created dereference. */
1130 new_var
= phivn
[SSA_NAME_VERSION (old_arg
)].value
;
1133 old_arg
= TREE_OPERAND (old_arg
, 0);
1134 new_var
= create_tmp_var (TREE_TYPE (old_arg
), NULL
);
1135 tmp
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
,
1136 NULL_TREE
, unshare_expr (old_arg
));
1137 if (TREE_CODE (TREE_TYPE (old_arg
)) == COMPLEX_TYPE
1138 || TREE_CODE (TREE_TYPE (old_arg
)) == VECTOR_TYPE
)
1139 DECL_GIMPLE_REG_P (new_var
) = 1;
1140 add_referenced_var (new_var
);
1141 new_var
= make_ssa_name (new_var
, tmp
);
1142 GIMPLE_STMT_OPERAND (tmp
, 0) = new_var
;
1144 bsi_insert_on_edge (e
, tmp
);
1147 mark_symbols_for_renaming (tmp
);
1150 add_phi_arg (new_phi
, new_var
, e
);
1153 update_stmt (new_phi
);
1158 /* Propagate between the phi node arguments of PHI in BB and phi result
1159 users. For now this matches
1160 # p_2 = PHI <&x, &y>
1167 Returns true if a transformation was done and edge insertions
1168 need to be committed. Global data PHIVN and N is used to track
1169 past transformation results. We need to be especially careful here
1170 with aliasing issues as we are moving memory reads. */
1173 propagate_with_phi (basic_block bb
, tree phi
, struct phiprop_d
*phivn
, size_t n
)
1175 tree ptr
= PHI_RESULT (phi
);
1176 tree use_stmt
, res
= NULL_TREE
;
1177 block_stmt_iterator bsi
;
1178 imm_use_iterator ui
;
1179 use_operand_p arg_p
, use
;
1183 if (MTAG_P (SSA_NAME_VAR (ptr
))
1184 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
1185 || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr
))))
1188 /* Check if we can "cheaply" dereference all phi arguments. */
1189 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_USE
)
1191 tree arg
= USE_FROM_PTR (arg_p
);
1192 /* Walk the ssa chain until we reach a ssa name we already
1193 created a value for or we reach a definition of the form
1194 ssa_name_n = &var; */
1195 while (TREE_CODE (arg
) == SSA_NAME
1196 && !SSA_NAME_IS_DEFAULT_DEF (arg
)
1197 && (SSA_NAME_VERSION (arg
) >= n
1198 || phivn
[SSA_NAME_VERSION (arg
)].value
== NULL_TREE
))
1200 tree def_stmt
= SSA_NAME_DEF_STMT (arg
);
1201 if (TREE_CODE (def_stmt
) != GIMPLE_MODIFY_STMT
)
1203 arg
= GIMPLE_STMT_OPERAND (def_stmt
, 1);
1205 if ((TREE_CODE (arg
) != ADDR_EXPR
1206 /* Avoid to have to decay *&a to a[0] later. */
1207 || !is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (arg
, 0))))
1208 && !(TREE_CODE (arg
) == SSA_NAME
1209 && phivn
[SSA_NAME_VERSION (arg
)].value
!= NULL_TREE
1210 && phivn_valid_p (phivn
, arg
, bb
)))
1214 /* Find a dereferencing use. First follow (single use) ssa
1215 copy chains for ptr. */
1216 while (single_imm_use (ptr
, &use
, &use_stmt
)
1217 && TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
1218 && GIMPLE_STMT_OPERAND (use_stmt
, 1) == ptr
1219 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) == SSA_NAME
)
1220 ptr
= GIMPLE_STMT_OPERAND (use_stmt
, 0);
1222 /* Replace the first dereference of *ptr if there is one and if we
1223 can move the loads to the place of the ptr phi node. */
1224 phi_inserted
= false;
1225 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, ptr
)
1230 /* Check whether this is a load of *ptr. */
1231 if (!(TREE_CODE (use_stmt
) == GIMPLE_MODIFY_STMT
1232 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 0)) == SSA_NAME
1233 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt
, 1)) == INDIRECT_REF
1234 && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt
, 1), 0) == ptr
1235 /* We cannot replace a load that may throw or is volatile. */
1236 && !tree_can_throw_internal (use_stmt
)))
1239 /* Check if we can move the loads. The def stmts of all virtual uses
1240 need to be post-dominated by bb. */
1241 FOR_EACH_SSA_TREE_OPERAND (vuse
, use_stmt
, ui2
, SSA_OP_VUSE
)
1243 tree def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1244 if (!SSA_NAME_IS_DEFAULT_DEF (vuse
)
1245 && (bb_for_stmt (def_stmt
) == bb
1246 || !dominated_by_p (CDI_DOMINATORS
,
1247 bb
, bb_for_stmt (def_stmt
))))
1251 /* Found a proper dereference. Insert a phi node if this
1252 is the first load transformation. */
1255 res
= phiprop_insert_phi (bb
, phi
, use_stmt
, phivn
, n
);
1257 /* Remember the value we created for *ptr. */
1258 phivn
[SSA_NAME_VERSION (ptr
)].value
= res
;
1259 phivn
[SSA_NAME_VERSION (ptr
)].vop_stmt
= use_stmt
;
1261 /* Remove old stmt. The phi is taken care of by DCE, if we
1262 want to delete it here we also have to delete all intermediate
1264 bsi
= bsi_for_stmt (use_stmt
);
1265 bsi_remove (&bsi
, 0);
1267 phi_inserted
= true;
1271 /* Further replacements are easy, just make a copy out of the
1273 GIMPLE_STMT_OPERAND (use_stmt
, 1) = res
;
1274 update_stmt (use_stmt
);
1278 /* Continue searching for a proper dereference. */
1281 return phi_inserted
;
1284 /* Helper walking the dominator tree starting from BB and processing
1285 phi nodes with global data PHIVN and N. */
1288 tree_ssa_phiprop_1 (basic_block bb
, struct phiprop_d
*phivn
, size_t n
)
1290 bool did_something
= false;
1294 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1295 did_something
|= propagate_with_phi (bb
, phi
, phivn
, n
);
1297 for (son
= first_dom_son (CDI_DOMINATORS
, bb
);
1299 son
= next_dom_son (CDI_DOMINATORS
, son
))
1300 did_something
|= tree_ssa_phiprop_1 (son
, phivn
, n
);
1302 return did_something
;
1305 /* Main entry for phiprop pass. */
1308 tree_ssa_phiprop (void)
1310 struct phiprop_d
*phivn
;
1312 calculate_dominance_info (CDI_DOMINATORS
);
1314 phivn
= XCNEWVEC (struct phiprop_d
, num_ssa_names
);
1316 if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR
, phivn
, num_ssa_names
))
1317 bsi_commit_edge_inserts ();
1330 struct tree_opt_pass pass_phiprop
= {
1331 "phiprop", /* name */
1332 gate_phiprop
, /* gate */
1333 tree_ssa_phiprop
, /* execute */
1336 0, /* static_pass_number */
1337 TV_TREE_FORWPROP
, /* tv_id */
1338 PROP_cfg
| PROP_ssa
, /* properties_required */
1339 0, /* properties_provided */
1340 0, /* properties_destroyed */
1341 0, /* todo_flags_start */
1345 | TODO_verify_ssa
, /* todo_flags_finish */