2007-01-04 Paolo Bonzini <bonzini@gnu.org>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob638224a803566e8d9d5a40c4c4150102e4d892f8
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "tree-dump.h"
35 #include "langhooks.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 Note carefully that after propagation the resulting statement
43 must still be a proper gimple statement. Right now we simply
44 only perform propagations we know will result in valid gimple
45 code. One day we'll want to generalize this code.
47 One class of common cases we handle is forward propagating a single use
48 variable into a COND_EXPR.
50 bb0:
51 x = a COND b;
52 if (x) goto ... else goto ...
54 Will be transformed into:
56 bb0:
57 if (a COND b) goto ... else goto ...
59 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
61 Or (assuming c1 and c2 are constants):
63 bb0:
64 x = a + c1;
65 if (x EQ/NEQ c2) goto ... else goto ...
67 Will be transformed into:
69 bb0:
70 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
72 Similarly for x = a - c1.
76 bb0:
77 x = !a
78 if (x) goto ... else goto ...
80 Will be transformed into:
82 bb0:
83 if (a == 0) goto ... else goto ...
85 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
86 For these cases, we propagate A into all, possibly more than one,
87 COND_EXPRs that use X.
91 bb0:
92 x = (typecast) a
93 if (x) goto ... else goto ...
95 Will be transformed into:
97 bb0:
98 if (a != 0) goto ... else goto ...
100 (Assuming a is an integral type and x is a boolean or x is an
101 integral and a is a boolean.)
103 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
104 For these cases, we propagate A into all, possibly more than one,
105 COND_EXPRs that use X.
107 In addition to eliminating the variable and the statement which assigns
108 a value to the variable, we may be able to later thread the jump without
109 adding insane complexity in the dominator optimizer.
111 Also note these transformations can cascade. We handle this by having
112 a worklist of COND_EXPR statements to examine. As we make a change to
113 a statement, we put it back on the worklist to examine on the next
114 iteration of the main loop.
116 A second class of propagation opportunities arises for ADDR_EXPR
117 nodes.
119 ptr = &x->y->z;
120 res = *ptr;
122 Will get turned into
124 res = x->y->z;
128 ptr = &x[0];
129 ptr2 = ptr + <constant>;
131 Will get turned into
133 ptr2 = &x[constant/elementsize];
137 ptr = &x[0];
138 offset = index * element_size;
139 offset_p = (pointer) offset;
140 ptr2 = ptr + offset_p
142 Will get turned into:
144 ptr2 = &x[index];
146 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
147 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
148 {NOT_EXPR,NEG_EXPR}.
150 This will (of course) be extended as other needs arise. */
153 /* Set to true if we delete EH edges during the optimization. */
154 static bool cfg_changed;
157 /* Given an SSA_NAME VAR, return true if and only if VAR is defined by
158 a comparison. */
160 static bool
161 ssa_name_defined_by_comparison_p (tree var)
163 tree def = SSA_NAME_DEF_STMT (var);
165 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
167 tree rhs = GIMPLE_STMT_OPERAND (def, 1);
168 return COMPARISON_CLASS_P (rhs);
171 return 0;
174 /* Forward propagate a single-use variable into COND once. Return a
175 new condition if successful. Return NULL_TREE otherwise. */
177 static tree
178 forward_propagate_into_cond_1 (tree cond, tree *test_var_p)
180 tree new_cond = NULL_TREE;
181 enum tree_code cond_code = TREE_CODE (cond);
182 tree test_var = NULL_TREE;
183 tree def;
184 tree def_rhs;
186 /* If the condition is not a lone variable or an equality test of an
187 SSA_NAME against an integral constant, then we do not have an
188 optimizable case.
190 Note these conditions also ensure the COND_EXPR has no
191 virtual operands or other side effects. */
192 if (cond_code != SSA_NAME
193 && !((cond_code == EQ_EXPR || cond_code == NE_EXPR)
194 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
195 && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1))
196 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
197 return NULL_TREE;
199 /* Extract the single variable used in the test into TEST_VAR. */
200 if (cond_code == SSA_NAME)
201 test_var = cond;
202 else
203 test_var = TREE_OPERAND (cond, 0);
205 /* Now get the defining statement for TEST_VAR. Skip this case if
206 it's not defined by some GIMPLE_MODIFY_STMT. */
207 def = SSA_NAME_DEF_STMT (test_var);
208 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT)
209 return NULL_TREE;
211 def_rhs = GIMPLE_STMT_OPERAND (def, 1);
213 /* If TEST_VAR is set by adding or subtracting a constant
214 from an SSA_NAME, then it is interesting to us as we
215 can adjust the constant in the conditional and thus
216 eliminate the arithmetic operation. */
217 if (TREE_CODE (def_rhs) == PLUS_EXPR
218 || TREE_CODE (def_rhs) == MINUS_EXPR)
220 tree op0 = TREE_OPERAND (def_rhs, 0);
221 tree op1 = TREE_OPERAND (def_rhs, 1);
223 /* The first operand must be an SSA_NAME and the second
224 operand must be a constant. */
225 if (TREE_CODE (op0) != SSA_NAME
226 || !CONSTANT_CLASS_P (op1)
227 || !INTEGRAL_TYPE_P (TREE_TYPE (op1)))
228 return NULL_TREE;
230 /* Don't propagate if the first operand occurs in
231 an abnormal PHI. */
232 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
233 return NULL_TREE;
235 if (has_single_use (test_var))
237 enum tree_code new_code;
238 tree t;
240 /* If the variable was defined via X + C, then we must
241 subtract C from the constant in the conditional.
242 Otherwise we add C to the constant in the
243 conditional. The result must fold into a valid
244 gimple operand to be optimizable. */
245 new_code = (TREE_CODE (def_rhs) == PLUS_EXPR
246 ? MINUS_EXPR : PLUS_EXPR);
247 t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0);
248 if (!is_gimple_val (t))
249 return NULL_TREE;
251 new_cond = build2 (cond_code, boolean_type_node, op0, t);
255 /* These cases require comparisons of a naked SSA_NAME or
256 comparison of an SSA_NAME against zero or one. */
257 else if (TREE_CODE (cond) == SSA_NAME
258 || integer_zerop (TREE_OPERAND (cond, 1))
259 || integer_onep (TREE_OPERAND (cond, 1)))
261 /* If TEST_VAR is set from a relational operation
262 between two SSA_NAMEs or a combination of an SSA_NAME
263 and a constant, then it is interesting. */
264 if (COMPARISON_CLASS_P (def_rhs))
266 tree op0 = TREE_OPERAND (def_rhs, 0);
267 tree op1 = TREE_OPERAND (def_rhs, 1);
269 /* Both operands of DEF_RHS must be SSA_NAMEs or
270 constants. */
271 if ((TREE_CODE (op0) != SSA_NAME
272 && !is_gimple_min_invariant (op0))
273 || (TREE_CODE (op1) != SSA_NAME
274 && !is_gimple_min_invariant (op1)))
275 return NULL_TREE;
277 /* Don't propagate if the first operand occurs in
278 an abnormal PHI. */
279 if (TREE_CODE (op0) == SSA_NAME
280 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
281 return NULL_TREE;
283 /* Don't propagate if the second operand occurs in
284 an abnormal PHI. */
285 if (TREE_CODE (op1) == SSA_NAME
286 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1))
287 return NULL_TREE;
289 if (has_single_use (test_var))
291 /* TEST_VAR was set from a relational operator. */
292 new_cond = build2 (TREE_CODE (def_rhs),
293 boolean_type_node, op0, op1);
295 /* Invert the conditional if necessary. */
296 if ((cond_code == EQ_EXPR
297 && integer_zerop (TREE_OPERAND (cond, 1)))
298 || (cond_code == NE_EXPR
299 && integer_onep (TREE_OPERAND (cond, 1))))
301 new_cond = invert_truthvalue (new_cond);
303 /* If we did not get a simple relational
304 expression or bare SSA_NAME, then we can
305 not optimize this case. */
306 if (!COMPARISON_CLASS_P (new_cond)
307 && TREE_CODE (new_cond) != SSA_NAME)
308 new_cond = NULL_TREE;
313 /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it
314 is interesting. */
315 else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR)
317 enum tree_code new_code;
319 def_rhs = TREE_OPERAND (def_rhs, 0);
321 /* DEF_RHS must be an SSA_NAME or constant. */
322 if (TREE_CODE (def_rhs) != SSA_NAME
323 && !is_gimple_min_invariant (def_rhs))
324 return NULL_TREE;
326 /* Don't propagate if the operand occurs in
327 an abnormal PHI. */
328 if (TREE_CODE (def_rhs) == SSA_NAME
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs))
330 return NULL_TREE;
332 if (cond_code == SSA_NAME
333 || (cond_code == NE_EXPR
334 && integer_zerop (TREE_OPERAND (cond, 1)))
335 || (cond_code == EQ_EXPR
336 && integer_onep (TREE_OPERAND (cond, 1))))
337 new_code = EQ_EXPR;
338 else
339 new_code = NE_EXPR;
341 new_cond = build2 (new_code, boolean_type_node, def_rhs,
342 fold_convert (TREE_TYPE (def_rhs),
343 integer_zero_node));
346 /* If TEST_VAR was set from a cast of an integer type
347 to a boolean type or a cast of a boolean to an
348 integral, then it is interesting. */
349 else if (TREE_CODE (def_rhs) == NOP_EXPR
350 || TREE_CODE (def_rhs) == CONVERT_EXPR)
352 tree outer_type;
353 tree inner_type;
355 outer_type = TREE_TYPE (def_rhs);
356 inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0));
358 if ((TREE_CODE (outer_type) == BOOLEAN_TYPE
359 && INTEGRAL_TYPE_P (inner_type))
360 || (TREE_CODE (inner_type) == BOOLEAN_TYPE
361 && INTEGRAL_TYPE_P (outer_type)))
363 else if (INTEGRAL_TYPE_P (outer_type)
364 && INTEGRAL_TYPE_P (inner_type)
365 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
366 && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs,
367 0)))
369 else
370 return NULL_TREE;
372 /* Don't propagate if the operand occurs in
373 an abnormal PHI. */
374 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
375 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND
376 (def_rhs, 0)))
377 return NULL_TREE;
379 if (has_single_use (test_var))
381 enum tree_code new_code;
382 tree new_arg;
384 if (cond_code == SSA_NAME
385 || (cond_code == NE_EXPR
386 && integer_zerop (TREE_OPERAND (cond, 1)))
387 || (cond_code == EQ_EXPR
388 && integer_onep (TREE_OPERAND (cond, 1))))
389 new_code = NE_EXPR;
390 else
391 new_code = EQ_EXPR;
393 new_arg = TREE_OPERAND (def_rhs, 0);
394 new_cond = build2 (new_code, boolean_type_node, new_arg,
395 fold_convert (TREE_TYPE (new_arg),
396 integer_zero_node));
401 *test_var_p = test_var;
402 return new_cond;
405 /* COND is a condition of the form:
407 x == const or x != const
409 Look back to x's defining statement and see if x is defined as
411 x = (type) y;
413 If const is unchanged if we convert it to type, then we can build
414 the equivalent expression:
417 y == const or y != const
419 Which may allow further optimizations.
421 Return the equivalent comparison or NULL if no such equivalent comparison
422 was found. */
424 static tree
425 find_equivalent_equality_comparison (tree cond)
427 tree op0 = TREE_OPERAND (cond, 0);
428 tree op1 = TREE_OPERAND (cond, 1);
429 tree def_stmt = SSA_NAME_DEF_STMT (op0);
431 while (def_stmt
432 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
433 && TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == SSA_NAME)
434 def_stmt = SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (def_stmt, 1));
436 /* OP0 might have been a parameter, so first make sure it
437 was defined by a GIMPLE_MODIFY_STMT. */
438 if (def_stmt && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT)
440 tree def_rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
442 /* If either operand to the comparison is a pointer to
443 a function, then we can not apply this optimization
444 as some targets require function pointers to be
445 canonicalized and in this case this optimization would
446 eliminate a necessary canonicalization. */
447 if ((POINTER_TYPE_P (TREE_TYPE (op0))
448 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
449 || (POINTER_TYPE_P (TREE_TYPE (op1))
450 && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
451 return NULL;
453 /* Now make sure the RHS of the GIMPLE_MODIFY_STMT is a typecast. */
454 if ((TREE_CODE (def_rhs) == NOP_EXPR
455 || TREE_CODE (def_rhs) == CONVERT_EXPR)
456 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
458 tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
459 tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
460 tree new;
462 if (TYPE_PRECISION (def_rhs_inner_type)
463 > TYPE_PRECISION (TREE_TYPE (def_rhs)))
464 return NULL;
466 /* If the inner type of the conversion is a pointer to
467 a function, then we can not apply this optimization
468 as some targets require function pointers to be
469 canonicalized. This optimization would result in
470 canonicalization of the pointer when it was not originally
471 needed/intended. */
472 if (POINTER_TYPE_P (def_rhs_inner_type)
473 && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
474 return NULL;
476 /* What we want to prove is that if we convert OP1 to
477 the type of the object inside the NOP_EXPR that the
478 result is still equivalent to SRC.
480 If that is true, the build and return new equivalent
481 condition which uses the source of the typecast and the
482 new constant (which has only changed its type). */
483 new = fold_build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
484 STRIP_USELESS_TYPE_CONVERSION (new);
485 if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
486 return build2 (TREE_CODE (cond), TREE_TYPE (cond),
487 def_rhs_inner, new);
490 return NULL;
493 /* STMT is a COND_EXPR
495 This routine attempts to find equivalent forms of the condition
496 which we may be able to optimize better. */
498 static void
499 simplify_cond (tree stmt)
501 tree cond = COND_EXPR_COND (stmt);
503 if (COMPARISON_CLASS_P (cond))
505 tree op0 = TREE_OPERAND (cond, 0);
506 tree op1 = TREE_OPERAND (cond, 1);
508 if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
510 /* First see if we have test of an SSA_NAME against a constant
511 where the SSA_NAME is defined by an earlier typecast which
512 is irrelevant when performing tests against the given
513 constant. */
514 if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
516 tree new_cond = find_equivalent_equality_comparison (cond);
518 if (new_cond)
520 COND_EXPR_COND (stmt) = new_cond;
521 update_stmt (stmt);
528 /* Forward propagate a single-use variable into COND_EXPR as many
529 times as possible. */
531 static void
532 forward_propagate_into_cond (tree cond_expr)
534 gcc_assert (TREE_CODE (cond_expr) == COND_EXPR);
536 while (1)
538 tree test_var = NULL_TREE;
539 tree cond = COND_EXPR_COND (cond_expr);
540 tree new_cond = forward_propagate_into_cond_1 (cond, &test_var);
542 /* Return if unsuccessful. */
543 if (new_cond == NULL_TREE)
544 break;
546 /* Dump details. */
547 if (dump_file && (dump_flags & TDF_DETAILS))
549 fprintf (dump_file, " Replaced '");
550 print_generic_expr (dump_file, cond, dump_flags);
551 fprintf (dump_file, "' with '");
552 print_generic_expr (dump_file, new_cond, dump_flags);
553 fprintf (dump_file, "'\n");
556 COND_EXPR_COND (cond_expr) = new_cond;
557 update_stmt (cond_expr);
559 if (has_zero_uses (test_var))
561 tree def = SSA_NAME_DEF_STMT (test_var);
562 block_stmt_iterator bsi = bsi_for_stmt (def);
563 bsi_remove (&bsi, true);
567 /* There are further simplifications that can be performed
568 on COND_EXPRs. Specifically, when comparing an SSA_NAME
569 against a constant where the SSA_NAME is the result of a
570 conversion. Perhaps this should be folded into the rest
571 of the COND_EXPR simplification code. */
572 simplify_cond (cond_expr);
575 /* We've just substituted an ADDR_EXPR into stmt. Update all the
576 relevant data structures to match. */
578 static void
579 tidy_after_forward_propagate_addr (tree stmt)
581 /* We may have turned a trapping insn into a non-trapping insn. */
582 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
583 && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
584 cfg_changed = true;
586 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
587 recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
589 mark_symbols_for_renaming (stmt);
592 /* STMT defines LHS which is contains the address of the 0th element
593 in an array. USE_STMT uses LHS to compute the address of an
594 arbitrary element within the array. The (variable) byte offset
595 of the element is contained in OFFSET.
597 We walk back through the use-def chains of OFFSET to verify that
598 it is indeed computing the offset of an element within the array
599 and extract the index corresponding to the given byte offset.
601 We then try to fold the entire address expression into a form
602 &array[index].
604 If we are successful, we replace the right hand side of USE_STMT
605 with the new address computation. */
607 static bool
608 forward_propagate_addr_into_variable_array_index (tree offset, tree lhs,
609 tree stmt, tree use_stmt)
611 tree index;
613 /* The offset must be defined by a simple GIMPLE_MODIFY_STMT statement. */
614 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
615 return false;
617 /* The RHS of the statement which defines OFFSET must be a gimple
618 cast of another SSA_NAME. */
619 offset = GIMPLE_STMT_OPERAND (offset, 1);
620 if (!is_gimple_cast (offset))
621 return false;
623 offset = TREE_OPERAND (offset, 0);
624 if (TREE_CODE (offset) != SSA_NAME)
625 return false;
627 /* Get the defining statement of the offset before type
628 conversion. */
629 offset = SSA_NAME_DEF_STMT (offset);
631 /* The statement which defines OFFSET before type conversion
632 must be a simple GIMPLE_MODIFY_STMT. */
633 if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
634 return false;
636 /* The RHS of the statement which defines OFFSET must be a
637 multiplication of an object by the size of the array elements.
638 This implicitly verifies that the size of the array elements
639 is constant. */
640 offset = GIMPLE_STMT_OPERAND (offset, 1);
641 if (TREE_CODE (offset) != MULT_EXPR
642 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
643 || !simple_cst_equal (TREE_OPERAND (offset, 1),
644 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (lhs)))))
645 return false;
647 /* The first operand to the MULT_EXPR is the desired index. */
648 index = TREE_OPERAND (offset, 0);
650 /* Replace the pointer addition with array indexing. */
651 GIMPLE_STMT_OPERAND (use_stmt, 1)
652 = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
653 TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
654 = index;
656 /* That should have created gimple, so there is no need to
657 record information to undo the propagation. */
658 fold_stmt_inplace (use_stmt);
659 tidy_after_forward_propagate_addr (use_stmt);
660 return true;
663 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
665 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
666 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
667 node or for recovery of array indexing from pointer arithmetic.
669 CHANGED is an optional pointer to a boolean variable set to true if
670 either the LHS or RHS was changed in the USE_STMT.
672 Return true if the propagation was successful (the propagation can
673 be not totally successful, yet things may have been changed). */
675 static bool
676 forward_propagate_addr_expr_1 (tree stmt, tree use_stmt, bool *changed)
678 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
679 tree lhs, rhs, array_ref;
681 /* Strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
682 ADDR_EXPR will not appear on the LHS. */
683 lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
684 while (TREE_CODE (lhs) == COMPONENT_REF || TREE_CODE (lhs) == ARRAY_REF)
685 lhs = TREE_OPERAND (lhs, 0);
687 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
688 propagate the ADDR_EXPR into the use of NAME and fold the result. */
689 if (TREE_CODE (lhs) == INDIRECT_REF && TREE_OPERAND (lhs, 0) == name)
691 /* This should always succeed in creating gimple, so there is
692 no need to save enough state to undo this propagation. */
693 TREE_OPERAND (lhs, 0) = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
694 fold_stmt_inplace (use_stmt);
695 tidy_after_forward_propagate_addr (use_stmt);
696 if (changed)
697 *changed = true;
700 /* Trivial case. The use statement could be a trivial copy. We
701 go ahead and handle that case here since it's trivial and
702 removes the need to run copy-prop before this pass to get
703 the best results. Also note that by handling this case here
704 we can catch some cascading effects, ie the single use is
705 in a copy, and the copy is used later by a single INDIRECT_REF
706 for example. */
707 else if (TREE_CODE (lhs) == SSA_NAME
708 && GIMPLE_STMT_OPERAND (use_stmt, 1) == name)
710 GIMPLE_STMT_OPERAND (use_stmt, 1)
711 = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
712 tidy_after_forward_propagate_addr (use_stmt);
713 if (changed)
714 *changed = true;
715 return true;
718 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
719 nodes from the RHS. */
720 rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
721 while (TREE_CODE (rhs) == COMPONENT_REF
722 || TREE_CODE (rhs) == ARRAY_REF
723 || TREE_CODE (rhs) == ADDR_EXPR)
724 rhs = TREE_OPERAND (rhs, 0);
726 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
727 propagate the ADDR_EXPR into the use of NAME and fold the result. */
728 if (TREE_CODE (rhs) == INDIRECT_REF && TREE_OPERAND (rhs, 0) == name)
730 /* This should always succeed in creating gimple, so there is
731 no need to save enough state to undo this propagation. */
732 TREE_OPERAND (rhs, 0) = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
733 fold_stmt_inplace (use_stmt);
734 tidy_after_forward_propagate_addr (use_stmt);
735 if (changed)
736 *changed = true;
737 return true;
740 /* The remaining cases are all for turning pointer arithmetic into
741 array indexing. They only apply when we have the address of
742 element zero in an array. If that is not the case then there
743 is nothing to do. */
744 array_ref = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0);
745 if (TREE_CODE (array_ref) != ARRAY_REF
746 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
747 || !integer_zerop (TREE_OPERAND (array_ref, 1)))
748 return false;
750 /* If the use of the ADDR_EXPR must be a PLUS_EXPR, or else there
751 is nothing to do. */
752 if (TREE_CODE (rhs) != PLUS_EXPR)
753 return false;
755 /* Try to optimize &x[0] + C where C is a multiple of the size
756 of the elements in X into &x[C/element size]. */
757 if (TREE_OPERAND (rhs, 0) == name
758 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
760 tree orig = unshare_expr (rhs);
761 TREE_OPERAND (rhs, 0) = unshare_expr (GIMPLE_STMT_OPERAND (stmt, 1));
763 /* If folding succeeds, then we have just exposed new variables
764 in USE_STMT which will need to be renamed. If folding fails,
765 then we need to put everything back the way it was. */
766 if (fold_stmt_inplace (use_stmt))
768 tidy_after_forward_propagate_addr (use_stmt);
769 if (changed)
770 *changed = true;
771 return true;
773 else
775 GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
776 update_stmt (use_stmt);
777 return false;
781 /* Try to optimize &x[0] + OFFSET where OFFSET is defined by
782 converting a multiplication of an index by the size of the
783 array elements, then the result is converted into the proper
784 type for the arithmetic. */
785 if (TREE_OPERAND (rhs, 0) == name
786 && TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
787 /* Avoid problems with IVopts creating PLUS_EXPRs with a
788 different type than their operands. */
789 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
791 bool res;
792 tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
794 res = forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
795 stmt, use_stmt);
796 if (res && changed)
797 *changed = true;
798 return res;
801 /* Same as the previous case, except the operands of the PLUS_EXPR
802 were reversed. */
803 if (TREE_OPERAND (rhs, 1) == name
804 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
805 /* Avoid problems with IVopts creating PLUS_EXPRs with a
806 different type than their operands. */
807 && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
809 bool res;
810 tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
811 res = forward_propagate_addr_into_variable_array_index (offset_stmt, lhs,
812 stmt, use_stmt);
813 if (res && changed)
814 *changed = true;
815 return res;
817 return false;
820 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
821 SOME is a pointer to a boolean value indicating whether we
822 propagated the address expression anywhere.
824 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
825 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
826 node or for recovery of array indexing from pointer arithmetic.
827 Returns true, if all uses have been propagated into. */
829 static bool
830 forward_propagate_addr_expr (tree stmt, bool *some)
832 int stmt_loop_depth = bb_for_stmt (stmt)->loop_depth;
833 tree name = GIMPLE_STMT_OPERAND (stmt, 0);
834 imm_use_iterator iter;
835 tree use_stmt;
836 bool all = true;
838 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
840 bool result;
842 /* If the use is not in a simple assignment statement, then
843 there is nothing we can do. */
844 if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
846 all = false;
847 continue;
850 /* If the use is in a deeper loop nest, then we do not want
851 to propagate the ADDR_EXPR into the loop as that is likely
852 adding expression evaluations into the loop. */
853 if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
855 all = false;
856 continue;
859 push_stmt_changes (&use_stmt);
861 result = forward_propagate_addr_expr_1 (stmt, use_stmt, some);
862 *some |= result;
863 all &= result;
865 pop_stmt_changes (&use_stmt);
868 return all;
871 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
872 If so, we can change STMT into lhs = y which can later be copy
873 propagated. Similarly for negation.
875 This could trivially be formulated as a forward propagation
876 to immediate uses. However, we already had an implementation
877 from DOM which used backward propagation via the use-def links.
879 It turns out that backward propagation is actually faster as
880 there's less work to do for each NOT/NEG expression we find.
881 Backwards propagation needs to look at the statement in a single
882 backlink. Forward propagation needs to look at potentially more
883 than one forward link. */
885 static void
886 simplify_not_neg_expr (tree stmt)
888 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
889 tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
891 /* See if the RHS_DEF_STMT has the same form as our statement. */
892 if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
893 && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
895 tree rhs_def_operand =
896 TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
898 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
899 if (TREE_CODE (rhs_def_operand) == SSA_NAME
900 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
902 GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
903 update_stmt (stmt);
908 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
909 the condition which we may be able to optimize better. */
911 static void
912 simplify_switch_expr (tree stmt)
914 tree cond = SWITCH_COND (stmt);
915 tree def, to, ti;
917 /* The optimization that we really care about is removing unnecessary
918 casts. That will let us do much better in propagating the inferred
919 constant at the switch target. */
920 if (TREE_CODE (cond) == SSA_NAME)
922 def = SSA_NAME_DEF_STMT (cond);
923 if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
925 def = GIMPLE_STMT_OPERAND (def, 1);
926 if (TREE_CODE (def) == NOP_EXPR)
928 int need_precision;
929 bool fail;
931 def = TREE_OPERAND (def, 0);
933 #ifdef ENABLE_CHECKING
934 /* ??? Why was Jeff testing this? We are gimple... */
935 gcc_assert (is_gimple_val (def));
936 #endif
938 to = TREE_TYPE (cond);
939 ti = TREE_TYPE (def);
941 /* If we have an extension that preserves value, then we
942 can copy the source value into the switch. */
944 need_precision = TYPE_PRECISION (ti);
945 fail = false;
946 if (! INTEGRAL_TYPE_P (ti))
947 fail = true;
948 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
949 fail = true;
950 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
951 need_precision += 1;
952 if (TYPE_PRECISION (to) < need_precision)
953 fail = true;
955 if (!fail)
957 SWITCH_COND (stmt) = def;
958 update_stmt (stmt);
965 /* Main entry point for the forward propagation optimizer. */
967 static unsigned int
968 tree_ssa_forward_propagate_single_use_vars (void)
970 basic_block bb;
971 unsigned int todoflags = 0;
973 cfg_changed = false;
975 FOR_EACH_BB (bb)
977 block_stmt_iterator bsi;
979 /* Note we update BSI within the loop as necessary. */
980 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
982 tree stmt = bsi_stmt (bsi);
984 /* If this statement sets an SSA_NAME to an address,
985 try to propagate the address into the uses of the SSA_NAME. */
986 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
988 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
989 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
992 if (TREE_CODE (lhs) != SSA_NAME)
994 bsi_next (&bsi);
995 continue;
998 if (TREE_CODE (rhs) == ADDR_EXPR)
1000 bool some = false;
1001 if (forward_propagate_addr_expr (stmt, &some))
1002 bsi_remove (&bsi, true);
1003 else
1004 bsi_next (&bsi);
1005 if (some)
1006 todoflags |= TODO_update_smt_usage;
1008 else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
1009 || TREE_CODE (rhs) == NEGATE_EXPR)
1010 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1012 simplify_not_neg_expr (stmt);
1013 bsi_next (&bsi);
1015 else
1016 bsi_next (&bsi);
1018 else if (TREE_CODE (stmt) == SWITCH_EXPR)
1020 simplify_switch_expr (stmt);
1021 bsi_next (&bsi);
1023 else if (TREE_CODE (stmt) == COND_EXPR)
1025 forward_propagate_into_cond (stmt);
1026 bsi_next (&bsi);
1028 else
1029 bsi_next (&bsi);
1033 if (cfg_changed)
1034 cleanup_tree_cfg ();
1035 return todoflags;
1039 static bool
1040 gate_forwprop (void)
1042 return 1;
1045 struct tree_opt_pass pass_forwprop = {
1046 "forwprop", /* name */
1047 gate_forwprop, /* gate */
1048 tree_ssa_forward_propagate_single_use_vars, /* execute */
1049 NULL, /* sub */
1050 NULL, /* next */
1051 0, /* static_pass_number */
1052 TV_TREE_FORWPROP, /* tv_id */
1053 PROP_cfg | PROP_ssa
1054 | PROP_alias, /* properties_required */
1055 0, /* properties_provided */
1056 0, /* properties_destroyed */
1057 0, /* todo_flags_start */
1058 TODO_dump_func
1059 | TODO_ggc_collect
1060 | TODO_update_ssa
1061 | TODO_verify_ssa, /* todo_flags_finish */
1062 0 /* letter */