Merge with trank @ 137446
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobc49f0a489093e35dfabd1995a3621b0ebc9bbafc
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
35 #include "flags.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
50 bb0:
51 x = a COND b;
52 if (x) goto ... else goto ...
54 Will be transformed into:
56 bb0:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
63 bb0:
64 x = a + c1;
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
69 bb0:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
76 bb0:
77 x = !a
78 if (x) goto ... else goto ...
80 Will be transformed into:
82 bb0:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
91 bb0:
92 x = (typecast) a
93 if (x) goto ... else goto ...
95 Will be transformed into:
97 bb0:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
117 nodes.
119 ptr = &x->y->z;
120 res = *ptr;
122 Will get turned into
124 res = x->y->z;
127 ptr = (type1*)&type2var;
128 res = *ptr
130 Will get turned into (if type1 and type2 are the same size
131 and neither have volatile on them):
132 res = VIEW_CONVERT_EXPR<type1>(type2var)
136 ptr = &x[0];
137 ptr2 = ptr + <constant>;
139 Will get turned into
141 ptr2 = &x[constant/elementsize];
145 ptr = &x[0];
146 offset = index * element_size;
147 offset_p = (pointer) offset;
148 ptr2 = ptr + offset_p
150 Will get turned into:
152 ptr2 = &x[index];
154 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
155 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
156 {NOT_EXPR,NEG_EXPR}.
158 This will (of course) be extended as other needs arise. */
160 static bool forward_propagate_addr_expr (tree name, tree rhs);
162 /* Set to true if we delete EH edges during the optimization. */
163 static bool cfg_changed;
166 /* Get the next statement we can propagate NAME's value into skipping
167 trivial copies. Returns the statement that is suitable as a
168 propagation destination or NULL_TREE if there is no such one.
169 This only returns destinations in a single-use chain. FINAL_NAME_P
170 if non-NULL is written to the ssa name that represents the use. */
172 static tree
173 get_prop_dest_stmt (tree name, tree *final_name_p)
175 use_operand_p use;
176 tree use_stmt;
178 do {
179 /* If name has multiple uses, bail out. */
180 if (!single_imm_use (name, &use, &use_stmt))
181 return NULL_TREE;
183 /* If this is not a trivial copy, we found it. */
184 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT
185 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) != SSA_NAME
186 || GIMPLE_STMT_OPERAND (use_stmt, 1) != name)
187 break;
189 /* Continue searching uses of the copy destination. */
190 name = GIMPLE_STMT_OPERAND (use_stmt, 0);
191 } while (1);
193 if (final_name_p)
194 *final_name_p = name;
196 return use_stmt;
199 /* Get the statement we can propagate from into NAME skipping
200 trivial copies. Returns the statement which defines the
201 propagation source or NULL_TREE if there is no such one.
202 If SINGLE_USE_ONLY is set considers only sources which have
203 a single use chain up to NAME. If SINGLE_USE_P is non-null,
204 it is set to whether the chain to NAME is a single use chain
205 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
207 static tree
208 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
210 bool single_use = true;
212 do {
213 tree def_stmt = SSA_NAME_DEF_STMT (name);
215 if (!has_single_use (name))
217 single_use = false;
218 if (single_use_only)
219 return NULL_TREE;
222 /* If name is defined by a PHI node or is the default def, bail out. */
223 if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
224 return NULL_TREE;
226 /* If name is not a simple copy destination, we found it. */
227 if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) != SSA_NAME)
229 tree rhs;
231 if (!single_use_only && single_use_p)
232 *single_use_p = single_use;
234 /* We can look through pointer conversions in the search
235 for a useful stmt for the comparison folding. */
236 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
237 if (CONVERT_EXPR_P (rhs)
238 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
239 && POINTER_TYPE_P (TREE_TYPE (rhs))
240 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
241 name = TREE_OPERAND (rhs, 0);
242 else
243 return def_stmt;
245 else
247 /* Continue searching the def of the copy source name. */
248 name = GIMPLE_STMT_OPERAND (def_stmt, 1);
250 } while (1);
253 /* Checks if the destination ssa name in DEF_STMT can be used as
254 propagation source. Returns true if so, otherwise false. */
256 static bool
257 can_propagate_from (tree def_stmt)
259 tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
260 use_operand_p use_p;
261 ssa_op_iter iter;
263 /* If the rhs has side-effects we cannot propagate from it. */
264 if (TREE_SIDE_EFFECTS (rhs))
265 return false;
267 /* If the rhs is a load we cannot propagate from it. */
268 if (REFERENCE_CLASS_P (rhs)
269 || DECL_P (rhs))
270 return false;
272 /* Constants can be always propagated. */
273 if (is_gimple_min_invariant (rhs))
274 return true;
276 /* If any of the SSA operands occurs in abnormal PHIs we cannot
277 propagate from this stmt. */
278 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
279 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
280 return false;
282 /* If the definition is a conversion of a pointer to a function type,
283 then we can not apply optimizations as some targets require function
284 pointers to be canonicalized and in this case this optimization could
285 eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_P (rhs)
287 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
288 && TREE_CODE (TREE_TYPE (TREE_TYPE
289 (TREE_OPERAND (rhs, 0)))) == FUNCTION_TYPE)
290 return false;
292 return true;
295 /* Remove a copy chain ending in NAME along the defs but not
296 further or including UP_TO_STMT. If NAME was replaced in
297 its only use then this function can be used to clean up
298 dead stmts. Returns true if UP_TO_STMT can be removed
299 as well, otherwise false. */
301 static bool
302 remove_prop_source_from_use (tree name, tree up_to_stmt)
304 block_stmt_iterator bsi;
305 tree stmt;
307 do {
308 if (!has_zero_uses (name))
309 return false;
311 stmt = SSA_NAME_DEF_STMT (name);
312 if (stmt == up_to_stmt)
313 return true;
315 bsi = bsi_for_stmt (stmt);
316 release_defs (stmt);
317 bsi_remove (&bsi, true);
319 name = GIMPLE_STMT_OPERAND (stmt, 1);
320 } while (TREE_CODE (name) == SSA_NAME);
322 return false;
325 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
326 the folded result in a form suitable for COND_EXPR_COND or
327 NULL_TREE, if there is no suitable simplified form. If
328 INVARIANT_ONLY is true only gimple_min_invariant results are
329 considered simplified. */
331 static tree
332 combine_cond_expr_cond (enum tree_code code, tree type,
333 tree op0, tree op1, bool invariant_only)
335 tree t;
337 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
339 t = fold_binary (code, type, op0, op1);
340 if (!t)
341 return NULL_TREE;
343 /* Require that we got a boolean type out if we put one in. */
344 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
346 /* Canonicalize the combined condition for use in a COND_EXPR. */
347 t = canonicalize_cond_expr_cond (t);
349 /* Bail out if we required an invariant but didn't get one. */
350 if (!t
351 || (invariant_only
352 && !is_gimple_min_invariant (t)))
353 return NULL_TREE;
355 return t;
358 /* Propagate from the ssa name definition statements of COND_EXPR
359 in statement STMT into the conditional if that simplifies it.
360 Returns zero if no statement was changed, one if there were
361 changes and two if cfg_cleanup needs to run. */
363 static int
364 forward_propagate_into_cond (tree cond_expr, tree stmt)
366 int did_something = 0;
368 do {
369 tree tmp = NULL_TREE;
370 tree cond = COND_EXPR_COND (cond_expr);
371 tree name, def_stmt, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
372 bool single_use0_p = false, single_use1_p = false;
374 /* We can do tree combining on SSA_NAME and comparison expressions. */
375 if (COMPARISON_CLASS_P (cond)
376 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
378 /* For comparisons use the first operand, that is likely to
379 simplify comparisons against constants. */
380 name = TREE_OPERAND (cond, 0);
381 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
382 if (def_stmt != NULL_TREE
383 && can_propagate_from (def_stmt))
385 tree op1 = TREE_OPERAND (cond, 1);
386 rhs0 = GIMPLE_STMT_OPERAND (def_stmt, 1);
387 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
388 fold_convert (TREE_TYPE (op1), rhs0),
389 op1, !single_use0_p);
391 /* If that wasn't successful, try the second operand. */
392 if (tmp == NULL_TREE
393 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
395 tree op0 = TREE_OPERAND (cond, 0);
396 name = TREE_OPERAND (cond, 1);
397 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
398 if (def_stmt == NULL_TREE
399 || !can_propagate_from (def_stmt))
400 return did_something;
402 rhs1 = GIMPLE_STMT_OPERAND (def_stmt, 1);
403 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
404 op0,
405 fold_convert (TREE_TYPE (op0), rhs1),
406 !single_use1_p);
408 /* If that wasn't successful either, try both operands. */
409 if (tmp == NULL_TREE
410 && rhs0 != NULL_TREE
411 && rhs1 != NULL_TREE)
412 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
413 rhs0,
414 fold_convert (TREE_TYPE (rhs0), rhs1),
415 !(single_use0_p && single_use1_p));
417 else if (TREE_CODE (cond) == SSA_NAME)
419 name = cond;
420 def_stmt = get_prop_source_stmt (name, true, NULL);
421 if (def_stmt == NULL_TREE
422 || !can_propagate_from (def_stmt))
423 return did_something;
425 rhs0 = GIMPLE_STMT_OPERAND (def_stmt, 1);
426 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
427 build_int_cst (TREE_TYPE (rhs0), 0),
428 false);
431 if (tmp)
433 if (dump_file && tmp)
435 fprintf (dump_file, " Replaced '");
436 print_generic_expr (dump_file, cond, 0);
437 fprintf (dump_file, "' with '");
438 print_generic_expr (dump_file, tmp, 0);
439 fprintf (dump_file, "'\n");
442 COND_EXPR_COND (cond_expr) = unshare_expr (tmp);
443 update_stmt (stmt);
445 /* Remove defining statements. */
446 remove_prop_source_from_use (name, NULL);
448 if (is_gimple_min_invariant (tmp))
449 did_something = 2;
450 else if (did_something == 0)
451 did_something = 1;
453 /* Continue combining. */
454 continue;
457 break;
458 } while (1);
460 return did_something;
463 /* We've just substituted an ADDR_EXPR into stmt. Update all the
464 relevant data structures to match. */
466 static void
467 tidy_after_forward_propagate_addr (tree stmt)
469 /* We may have turned a trapping insn into a non-trapping insn. */
470 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
471 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
472 cfg_changed = true;
474 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
475 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
477 mark_symbols_for_renaming (stmt);
480 /* DEF_RHS contains the address of the 0th element in an array.
481 USE_STMT uses type of DEF_RHS to compute the address of an
482 arbitrary element within the array. The (variable) byte offset
483 of the element is contained in OFFSET.
485 We walk back through the use-def chains of OFFSET to verify that
486 it is indeed computing the offset of an element within the array
487 and extract the index corresponding to the given byte offset.
489 We then try to fold the entire address expression into a form
490 &array[index].
492 If we are successful, we replace the right hand side of USE_STMT
493 with the new address computation. */
495 static bool
496 forward_propagate_addr_into_variable_array_index (tree offset,
497 tree def_rhs, tree use_stmt)
499 tree index;
501 /* Try to find an expression for a proper index. This is either
502 a multiplication expression by the element size or just the
503 ssa name we came along in case the element size is one. */
504 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
505 index = offset;
506 else
508 /* Get the offset's defining statement. */
509 offset = SSA_NAME_DEF_STMT (offset);
511 /* The statement which defines OFFSET before type conversion
512 must be a simple GIMPLE_MODIFY_STMT. */
513 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
514 return false;
516 /* The RHS of the statement which defines OFFSET must be a
517 multiplication of an object by the size of the array elements.
518 This implicitly verifies that the size of the array elements
519 is constant. */
520 offset = GIMPLE_STMT_OPERAND (offset, 1);
521 if (TREE_CODE (offset) != MULT_EXPR
522 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
523 || !simple_cst_equal (TREE_OPERAND (offset, 1),
524 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
525 return false;
527 /* The first operand to the MULT_EXPR is the desired index. */
528 index = TREE_OPERAND (offset, 0);
531 /* Replace the pointer addition with array indexing. */
532 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
533 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
534 = index;
536 /* That should have created gimple, so there is no need to
537 record information to undo the propagation. */
538 fold_stmt_inplace (use_stmt);
539 tidy_after_forward_propagate_addr (use_stmt);
540 return true;
543 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
544 ADDR_EXPR <whatever>.
546 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
547 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
548 node or for recovery of array indexing from pointer arithmetic.
550 Return true if the propagation was successful (the propagation can
551 be not totally successful, yet things may have been changed). */
553 static bool
554 forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
555 bool single_use_p)
557 tree lhs, rhs, array_ref;
558 tree *rhsp, *lhsp;
560 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
562 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
563 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
565 /* Trivial cases. The use statement could be a trivial copy or a
566 useless conversion. Recurse to the uses of the lhs as copyprop does
567 not copy through different variant pointers and FRE does not catch
568 all useless conversions. Treat the case of a single-use name and
569 a conversion to def_rhs type separate, though. */
570 if (TREE_CODE (lhs) == SSA_NAME
571 && (rhs == name
572 || CONVERT_EXPR_P (rhs))
573 && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (def_rhs)))
575 /* Only recurse if we don't deal with a single use. */
576 if (!single_use_p)
577 return forward_propagate_addr_expr (lhs, def_rhs);
579 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
580 return true;
583 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
584 ADDR_EXPR will not appear on the LHS. */
585 lhsp = &GIMPLE_STMT_OPERAND (use_stmt, 0);
586 while (handled_component_p (*lhsp))
587 lhsp = &TREE_OPERAND (*lhsp, 0);
588 lhs = *lhsp;
590 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
591 propagate the ADDR_EXPR into the use of NAME and fold the result. */
592 if (TREE_CODE (lhs) == INDIRECT_REF
593 && TREE_OPERAND (lhs, 0) == name
594 && useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (lhs, 0)),
595 TREE_TYPE (def_rhs))
596 /* ??? This looks redundant, but is required for bogus types
597 that can sometimes occur. */
598 && useless_type_conversion_p (TREE_TYPE (lhs),
599 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
601 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
602 fold_stmt_inplace (use_stmt);
603 tidy_after_forward_propagate_addr (use_stmt);
605 /* Continue propagating into the RHS if this was not the only use. */
606 if (single_use_p)
607 return true;
610 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
611 nodes from the RHS. */
612 rhsp = &GIMPLE_STMT_OPERAND (use_stmt, 1);
613 while (handled_component_p (*rhsp)
614 || TREE_CODE (*rhsp) == ADDR_EXPR)
615 rhsp = &TREE_OPERAND (*rhsp, 0);
616 rhs = *rhsp;
618 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
619 propagate the ADDR_EXPR into the use of NAME and fold the result. */
620 if (TREE_CODE (rhs) == INDIRECT_REF
621 && TREE_OPERAND (rhs, 0) == name
622 && useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (rhs, 0)),
623 TREE_TYPE (def_rhs))
624 /* ??? This looks redundant, but is required for bogus types
625 that can sometimes occur. */
626 && useless_type_conversion_p (TREE_TYPE (rhs),
627 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
629 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
630 fold_stmt_inplace (use_stmt);
631 tidy_after_forward_propagate_addr (use_stmt);
632 return true;
635 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
636 propagate the ADDR_EXPR into the use of NAME and try to
637 create a VCE and fold the result. */
638 if (TREE_CODE (rhs) == INDIRECT_REF
639 && TREE_OPERAND (rhs, 0) == name
640 && TYPE_SIZE (TREE_TYPE (rhs))
641 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
642 /* Function decls should not be used for VCE either as it could be
643 a function descriptor that we want and not the actual function code. */
644 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
645 /* We should not convert volatile loads to non volatile loads. */
646 && !TYPE_VOLATILE (TREE_TYPE (rhs))
647 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
648 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
649 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
651 bool res = true;
652 tree new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
653 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
654 /* If we have folded the VCE, then we have to create a new statement. */
655 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
657 block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
658 new_rhs = force_gimple_operand_bsi (&bsi, new_rhs, true, NULL, true, BSI_SAME_STMT);
659 /* As we change the deference to a SSA_NAME, we need to return false to make sure that
660 the statement does not get removed. */
661 res = false;
663 *rhsp = new_rhs;
664 fold_stmt_inplace (use_stmt);
665 tidy_after_forward_propagate_addr (use_stmt);
666 return res;
669 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
670 is nothing to do. */
671 if (TREE_CODE (rhs) != POINTER_PLUS_EXPR
672 || TREE_OPERAND (rhs, 0) != name)
673 return false;
675 /* The remaining cases are all for turning pointer arithmetic into
676 array indexing. They only apply when we have the address of
677 element zero in an array. If that is not the case then there
678 is nothing to do. */
679 array_ref = TREE_OPERAND (def_rhs, 0);
680 if (TREE_CODE (array_ref) != ARRAY_REF
681 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
682 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
683 return false;
685 /* Try to optimize &x[0] p+ C where C is a multiple of the size
686 of the elements in X into &x[C/element size]. */
687 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
689 tree orig = unshare_expr (rhs);
690 TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
692 /* If folding succeeds, then we have just exposed new variables
693 in USE_STMT which will need to be renamed. If folding fails,
694 then we need to put everything back the way it was. */
695 if (fold_stmt_inplace (use_stmt))
697 tidy_after_forward_propagate_addr (use_stmt);
698 return true;
700 else
702 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
703 update_stmt (use_stmt);
704 return false;
708 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
709 converting a multiplication of an index by the size of the
710 array elements, then the result is converted into the proper
711 type for the arithmetic. */
712 if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
713 /* Avoid problems with IVopts creating PLUS_EXPRs with a
714 different type than their operands. */
715 && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (name)))
717 bool res;
719 res = forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs, 1),
720 def_rhs, use_stmt);
721 return res;
723 return false;
726 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
728 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
729 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
730 node or for recovery of array indexing from pointer arithmetic.
731 Returns true, if all uses have been propagated into. */
733 static bool
734 forward_propagate_addr_expr (tree name, tree rhs)
736 int stmt_loop_depth = bb_for_stmt (SSA_NAME_DEF_STMT (name))->loop_depth;
737 imm_use_iterator iter;
738 tree use_stmt;
739 bool all = true;
740 bool single_use_p = has_single_use (name);
742 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
744 bool result;
745 tree use_rhs;
747 /* If the use is not in a simple assignment statement, then
748 there is nothing we can do. */
749 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
751 all = false;
752 continue;
755 /* If the use is in a deeper loop nest, then we do not want
756 to propagate the ADDR_EXPR into the loop as that is likely
757 adding expression evaluations into the loop. */
758 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
760 all = false;
761 continue;
764 push_stmt_changes (&use_stmt);
766 result = forward_propagate_addr_expr_1 (name, rhs, use_stmt,
767 single_use_p);
768 all &= result;
770 pop_stmt_changes (&use_stmt);
772 /* Remove intermediate now unused copy and conversion chains. */
773 use_rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
774 if (result
775 && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
776 && (TREE_CODE (use_rhs) == SSA_NAME
777 || (CONVERT_EXPR_P (use_rhs)
778 && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
780 block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
781 release_defs (use_stmt);
782 bsi_remove (&bsi, true);
786 return all;
789 /* Forward propagate the comparison COND defined in STMT like
790 cond_1 = x CMP y to uses of the form
791 a_1 = (T')cond_1
792 a_1 = !cond_1
793 a_1 = cond_1 != 0
794 Returns true if stmt is now unused. */
796 static bool
797 forward_propagate_comparison (tree cond, tree stmt)
799 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
800 tree use_stmt, tmp = NULL_TREE;
802 /* Don't propagate ssa names that occur in abnormal phis. */
803 if ((TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
804 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 0)))
805 || (TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME
806 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 1))))
807 return false;
809 /* Do not un-cse comparisons. But propagate through copies. */
810 use_stmt = get_prop_dest_stmt (name, &name);
811 if (use_stmt == NULL_TREE)
812 return false;
814 /* Conversion of the condition result to another integral type. */
815 if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
816 && (CONVERT_EXPR_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
817 || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
818 || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == TRUTH_NOT_EXPR)
819 && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt, 0))))
821 tree lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
822 tree rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
824 /* We can propagate the condition into a conversion. */
825 if (CONVERT_EXPR_P (rhs))
827 /* Avoid using fold here as that may create a COND_EXPR with
828 non-boolean condition as canonical form. */
829 tmp = build2 (TREE_CODE (cond), TREE_TYPE (lhs),
830 TREE_OPERAND (cond, 0), TREE_OPERAND (cond, 1));
832 /* We can propagate the condition into X op CST where op
833 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
834 else if (COMPARISON_CLASS_P (rhs)
835 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
836 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
838 enum tree_code code = TREE_CODE (rhs);
839 tree cst = TREE_OPERAND (rhs, 1);
841 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs),
842 fold_convert (TREE_TYPE (cst), cond),
843 cst, false);
844 if (tmp == NULL_TREE)
845 return false;
847 /* We can propagate the condition into a statement that
848 computes the logical negation of the comparison result. */
849 else if (TREE_CODE (rhs) == TRUTH_NOT_EXPR)
851 tree type = TREE_TYPE (TREE_OPERAND (cond, 0));
852 bool nans = HONOR_NANS (TYPE_MODE (type));
853 enum tree_code code;
854 code = invert_tree_comparison (TREE_CODE (cond), nans);
855 if (code == ERROR_MARK)
856 return false;
858 tmp = build2 (code, TREE_TYPE (lhs), TREE_OPERAND (cond, 0),
859 TREE_OPERAND (cond, 1));
861 else
862 return false;
864 GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (tmp);
865 update_stmt (use_stmt);
867 /* Remove defining statements. */
868 remove_prop_source_from_use (name, stmt);
870 if (dump_file && (dump_flags & TDF_DETAILS))
872 fprintf (dump_file, " Replaced '");
873 print_generic_expr (dump_file, rhs, dump_flags);
874 fprintf (dump_file, "' with '");
875 print_generic_expr (dump_file, tmp, dump_flags);
876 fprintf (dump_file, "'\n");
879 return true;
882 return false;
885 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
886 If so, we can change STMT into lhs = y which can later be copy
887 propagated. Similarly for negation.
889 This could trivially be formulated as a forward propagation
890 to immediate uses. However, we already had an implementation
891 from DOM which used backward propagation via the use-def links.
893 It turns out that backward propagation is actually faster as
894 there's less work to do for each NOT/NEG expression we find.
895 Backwards propagation needs to look at the statement in a single
896 backlink. Forward propagation needs to look at potentially more
897 than one forward link. */
899 static void
900 simplify_not_neg_expr (tree stmt)
902 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
903 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
905 /* See if the RHS_DEF_STMT has the same form as our statement. */
906 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
907 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
909 tree rhs_def_operand =
910 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
912 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
913 if (TREE_CODE (rhs_def_operand) == SSA_NAME
914 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
916 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
917 update_stmt (stmt);
922 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
923 the condition which we may be able to optimize better. */
925 static void
926 simplify_switch_expr (tree stmt)
928 tree cond = SWITCH_COND (stmt);
929 tree def, to, ti;
931 /* The optimization that we really care about is removing unnecessary
932 casts. That will let us do much better in propagating the inferred
933 constant at the switch target. */
934 if (TREE_CODE (cond) == SSA_NAME)
936 def = SSA_NAME_DEF_STMT (cond);
937 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
939 def = GIMPLE_STMT_OPERAND (def, 1);
940 if (TREE_CODE (def) == NOP_EXPR)
942 int need_precision;
943 bool fail;
945 def = TREE_OPERAND (def, 0);
947 #ifdef ENABLE_CHECKING
948 /* ??? Why was Jeff testing this? We are gimple... */
949 gcc_assert (is_gimple_val (def));
950 #endif
952 to = TREE_TYPE (cond);
953 ti = TREE_TYPE (def);
955 /* If we have an extension that preserves value, then we
956 can copy the source value into the switch. */
958 need_precision = TYPE_PRECISION (ti);
959 fail = false;
960 if (! INTEGRAL_TYPE_P (ti))
961 fail = true;
962 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
963 fail = true;
964 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
965 need_precision += 1;
966 if (TYPE_PRECISION (to) < need_precision)
967 fail = true;
969 if (!fail)
971 SWITCH_COND (stmt) = def;
972 update_stmt (stmt);
979 /* Main entry point for the forward propagation optimizer. */
981 static unsigned int
982 tree_ssa_forward_propagate_single_use_vars (void)
984 basic_block bb;
985 unsigned int todoflags = 0;
987 cfg_changed = false;
989 FOR_EACH_BB (bb)
991 block_stmt_iterator bsi;
993 /* Note we update BSI within the loop as necessary. */
994 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
996 tree stmt = bsi_stmt (bsi);
998 /* If this statement sets an SSA_NAME to an address,
999 try to propagate the address into the uses of the SSA_NAME. */
1000 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1002 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1003 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1006 if (TREE_CODE (lhs) != SSA_NAME)
1008 bsi_next (&bsi);
1009 continue;
1012 if (TREE_CODE (rhs) == ADDR_EXPR
1013 /* Handle pointer conversions on invariant addresses
1014 as well, as this is valid gimple. */
1015 || (CONVERT_EXPR_P (rhs)
1016 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
1017 && POINTER_TYPE_P (TREE_TYPE (rhs))))
1019 STRIP_NOPS (rhs);
1020 if (!stmt_references_abnormal_ssa_name (stmt)
1021 && forward_propagate_addr_expr (lhs, rhs))
1023 release_defs (stmt);
1024 todoflags |= TODO_remove_unused_locals;
1025 bsi_remove (&bsi, true);
1027 else
1028 bsi_next (&bsi);
1030 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
1031 || TREE_CODE (rhs) == NEGATE_EXPR)
1032 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1034 simplify_not_neg_expr (stmt);
1035 bsi_next (&bsi);
1037 else if (TREE_CODE (rhs) == COND_EXPR)
1039 int did_something;
1040 fold_defer_overflow_warnings ();
1041 did_something = forward_propagate_into_cond (rhs, stmt);
1042 if (did_something == 2)
1043 cfg_changed = true;
1044 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1045 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1046 bsi_next (&bsi);
1048 else if (COMPARISON_CLASS_P (rhs))
1050 if (forward_propagate_comparison (rhs, stmt))
1052 release_defs (stmt);
1053 todoflags |= TODO_remove_unused_locals;
1054 bsi_remove (&bsi, true);
1056 else
1057 bsi_next (&bsi);
1059 else
1060 bsi_next (&bsi);
1062 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1064 simplify_switch_expr (stmt);
1065 bsi_next (&bsi);
1067 else if (TREE_CODE (stmt) == COND_EXPR)
1069 int did_something;
1070 fold_defer_overflow_warnings ();
1071 did_something = forward_propagate_into_cond (stmt, stmt);
1072 if (did_something == 2)
1073 cfg_changed = true;
1074 fold_undefer_overflow_warnings (did_something, stmt,
1075 WARN_STRICT_OVERFLOW_CONDITIONAL);
1076 bsi_next (&bsi);
1078 else
1079 bsi_next (&bsi);
1083 if (cfg_changed)
1084 todoflags |= TODO_cleanup_cfg;
1085 return todoflags;
1089 static bool
1090 gate_forwprop (void)
1092 return 1;
1095 struct gimple_opt_pass pass_forwprop =
1098 GIMPLE_PASS,
1099 "forwprop", /* name */
1100 gate_forwprop, /* gate */
1101 tree_ssa_forward_propagate_single_use_vars, /* execute */
1102 NULL, /* sub */
1103 NULL, /* next */
1104 0, /* static_pass_number */
1105 TV_TREE_FORWPROP, /* tv_id */
1106 PROP_cfg | PROP_ssa, /* properties_required */
1107 0, /* properties_provided */
1108 0, /* properties_destroyed */
1109 0, /* todo_flags_start */
1110 TODO_dump_func
1111 | TODO_ggc_collect
1112 | TODO_update_ssa
1113 | TODO_verify_ssa /* todo_flags_finish */