Mark ChangeLog
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob0b9925ea773e74c77dcfbdf946419a9b21b7d31d
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "ggc.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "timevar.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "gimple.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 use_operand_p use_p;
264 ssa_op_iter iter;
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
270 return false;
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 return false;
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 return true;
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 return false;
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
296 return false;
299 return true;
302 /* Remove a copy chain ending in NAME along the defs but not
303 further or including UP_TO_STMT. If NAME was replaced in
304 its only use then this function can be used to clean up
305 dead stmts. Returns true if UP_TO_STMT can be removed
306 as well, otherwise false. */
308 static bool
309 remove_prop_source_from_use (tree name, gimple up_to_stmt)
311 gimple_stmt_iterator gsi;
312 gimple stmt;
314 do {
315 if (!has_zero_uses (name))
316 return false;
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (stmt == up_to_stmt)
320 return true;
322 gsi = gsi_for_stmt (stmt);
323 release_defs (stmt);
324 gsi_remove (&gsi, true);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
329 return false;
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
339 static tree
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
345 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
348 return build1 (code, type, gimple_assign_rhs1 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
350 return gimple_assign_rhs1 (stmt);
351 else
352 gcc_unreachable ();
355 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
356 the folded result in a form suitable for COND_EXPR_COND or
357 NULL_TREE, if there is no suitable simplified form. If
358 INVARIANT_ONLY is true only gimple_min_invariant results are
359 considered simplified. */
361 static tree
362 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
363 tree op0, tree op1, bool invariant_only)
365 tree t;
367 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
369 t = fold_binary_loc (loc, code, type, op0, op1);
370 if (!t)
371 return NULL_TREE;
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
381 return NULL_TREE;
383 return t;
386 /* Propagate from the ssa name definition statements of COND_EXPR
387 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
388 Returns zero if no statement was changed, one if there were
389 changes and two if cfg_cleanup needs to run.
391 This must be kept in sync with forward_propagate_into_cond. */
393 static int
394 forward_propagate_into_gimple_cond (gimple stmt)
396 int did_something = 0;
397 location_t loc = gimple_location (stmt);
399 do {
400 tree tmp = NULL_TREE;
401 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
402 gimple def_stmt;
403 bool single_use0_p = false, single_use1_p = false;
404 enum tree_code code = gimple_cond_code (stmt);
406 /* We can do tree combining on SSA_NAME and comparison expressions. */
407 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
408 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
410 /* For comparisons use the first operand, that is likely to
411 simplify comparisons against constants. */
412 name = gimple_cond_lhs (stmt);
413 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
414 if (def_stmt && can_propagate_from (def_stmt))
416 tree op1 = gimple_cond_rhs (stmt);
417 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
418 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
419 op1, !single_use0_p);
421 /* If that wasn't successful, try the second operand. */
422 if (tmp == NULL_TREE
423 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
425 tree op0 = gimple_cond_lhs (stmt);
426 name = gimple_cond_rhs (stmt);
427 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
428 if (!def_stmt || !can_propagate_from (def_stmt))
429 return did_something;
431 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
432 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
433 rhs1, !single_use1_p);
435 /* If that wasn't successful either, try both operands. */
436 if (tmp == NULL_TREE
437 && rhs0 != NULL_TREE
438 && rhs1 != NULL_TREE)
439 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
440 fold_convert_loc (loc,
441 TREE_TYPE (rhs0),
442 rhs1),
443 !(single_use0_p && single_use1_p));
446 if (tmp)
448 if (dump_file && tmp)
450 tree cond = build2 (gimple_cond_code (stmt),
451 boolean_type_node,
452 gimple_cond_lhs (stmt),
453 gimple_cond_rhs (stmt));
454 fprintf (dump_file, " Replaced '");
455 print_generic_expr (dump_file, cond, 0);
456 fprintf (dump_file, "' with '");
457 print_generic_expr (dump_file, tmp, 0);
458 fprintf (dump_file, "'\n");
461 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
462 update_stmt (stmt);
464 /* Remove defining statements. */
465 remove_prop_source_from_use (name, NULL);
467 if (is_gimple_min_invariant (tmp))
468 did_something = 2;
469 else if (did_something == 0)
470 did_something = 1;
472 /* Continue combining. */
473 continue;
476 break;
477 } while (1);
479 return did_something;
483 /* Propagate from the ssa name definition statements of COND_EXPR
484 in the rhs of statement STMT into the conditional if that simplifies it.
485 Returns zero if no statement was changed, one if there were
486 changes and two if cfg_cleanup needs to run.
488 This must be kept in sync with forward_propagate_into_gimple_cond. */
490 static int
491 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
493 gimple stmt = gsi_stmt (*gsi_p);
494 location_t loc = gimple_location (stmt);
495 int did_something = 0;
497 do {
498 tree tmp = NULL_TREE;
499 tree cond = gimple_assign_rhs1 (stmt);
500 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
501 gimple def_stmt;
502 bool single_use0_p = false, single_use1_p = false;
504 /* We can do tree combining on SSA_NAME and comparison expressions. */
505 if (COMPARISON_CLASS_P (cond)
506 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
508 /* For comparisons use the first operand, that is likely to
509 simplify comparisons against constants. */
510 name = TREE_OPERAND (cond, 0);
511 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
512 if (def_stmt && can_propagate_from (def_stmt))
514 tree op1 = TREE_OPERAND (cond, 1);
515 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
516 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
517 boolean_type_node,
518 rhs0, op1, !single_use0_p);
520 /* If that wasn't successful, try the second operand. */
521 if (tmp == NULL_TREE
522 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
524 tree op0 = TREE_OPERAND (cond, 0);
525 name = TREE_OPERAND (cond, 1);
526 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
527 if (!def_stmt || !can_propagate_from (def_stmt))
528 return did_something;
530 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
531 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
532 boolean_type_node,
533 op0, rhs1, !single_use1_p);
535 /* If that wasn't successful either, try both operands. */
536 if (tmp == NULL_TREE
537 && rhs0 != NULL_TREE
538 && rhs1 != NULL_TREE)
539 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
540 boolean_type_node,
541 rhs0,
542 fold_convert_loc (loc,
543 TREE_TYPE (rhs0),
544 rhs1),
545 !(single_use0_p && single_use1_p));
547 else if (TREE_CODE (cond) == SSA_NAME)
549 name = cond;
550 def_stmt = get_prop_source_stmt (name, true, NULL);
551 if (def_stmt || !can_propagate_from (def_stmt))
552 return did_something;
554 rhs0 = gimple_assign_rhs1 (def_stmt);
555 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
556 build_int_cst (TREE_TYPE (rhs0), 0),
557 false);
560 if (tmp)
562 if (dump_file && tmp)
564 fprintf (dump_file, " Replaced '");
565 print_generic_expr (dump_file, cond, 0);
566 fprintf (dump_file, "' with '");
567 print_generic_expr (dump_file, tmp, 0);
568 fprintf (dump_file, "'\n");
571 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
572 stmt = gsi_stmt (*gsi_p);
573 update_stmt (stmt);
575 /* Remove defining statements. */
576 remove_prop_source_from_use (name, NULL);
578 if (is_gimple_min_invariant (tmp))
579 did_something = 2;
580 else if (did_something == 0)
581 did_something = 1;
583 /* Continue combining. */
584 continue;
587 break;
588 } while (1);
590 return did_something;
593 /* We've just substituted an ADDR_EXPR into stmt. Update all the
594 relevant data structures to match. */
596 static void
597 tidy_after_forward_propagate_addr (gimple stmt)
599 /* We may have turned a trapping insn into a non-trapping insn. */
600 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
601 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
602 cfg_changed = true;
604 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
605 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
608 /* DEF_RHS contains the address of the 0th element in an array.
609 USE_STMT uses type of DEF_RHS to compute the address of an
610 arbitrary element within the array. The (variable) byte offset
611 of the element is contained in OFFSET.
613 We walk back through the use-def chains of OFFSET to verify that
614 it is indeed computing the offset of an element within the array
615 and extract the index corresponding to the given byte offset.
617 We then try to fold the entire address expression into a form
618 &array[index].
620 If we are successful, we replace the right hand side of USE_STMT
621 with the new address computation. */
623 static bool
624 forward_propagate_addr_into_variable_array_index (tree offset,
625 tree def_rhs,
626 gimple_stmt_iterator *use_stmt_gsi)
628 tree index, tunit;
629 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
630 tree tmp;
632 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
633 if (!host_integerp (tunit, 1))
634 return false;
636 /* Get the offset's defining statement. */
637 offset_def = SSA_NAME_DEF_STMT (offset);
639 /* Try to find an expression for a proper index. This is either a
640 multiplication expression by the element size or just the ssa name we came
641 along in case the element size is one. In that case, however, we do not
642 allow multiplications because they can be computing index to a higher
643 level dimension (PR 37861). */
644 if (integer_onep (tunit))
646 if (is_gimple_assign (offset_def)
647 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
648 return false;
650 index = offset;
652 else
654 /* The statement which defines OFFSET before type conversion
655 must be a simple GIMPLE_ASSIGN. */
656 if (!is_gimple_assign (offset_def))
657 return false;
659 /* The RHS of the statement which defines OFFSET must be a
660 multiplication of an object by the size of the array elements.
661 This implicitly verifies that the size of the array elements
662 is constant. */
663 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
664 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
665 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
667 /* The first operand to the MULT_EXPR is the desired index. */
668 index = gimple_assign_rhs1 (offset_def);
670 /* If we have idx * tunit + CST * tunit re-associate that. */
671 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
672 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
673 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
674 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
675 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
676 gimple_assign_rhs2 (offset_def),
677 tunit)) != NULL_TREE)
679 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
680 if (is_gimple_assign (offset_def2)
681 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
682 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
683 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
685 index = fold_build2 (gimple_assign_rhs_code (offset_def),
686 TREE_TYPE (offset),
687 gimple_assign_rhs1 (offset_def2), tmp);
689 else
690 return false;
692 else
693 return false;
696 /* Replace the pointer addition with array indexing. */
697 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
698 true, GSI_SAME_STMT);
699 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
700 use_stmt = gsi_stmt (*use_stmt_gsi);
701 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
702 = index;
704 /* That should have created gimple, so there is no need to
705 record information to undo the propagation. */
706 fold_stmt_inplace (use_stmt);
707 tidy_after_forward_propagate_addr (use_stmt);
708 return true;
711 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
712 ADDR_EXPR <whatever>.
714 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
715 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
716 node or for recovery of array indexing from pointer arithmetic.
718 Return true if the propagation was successful (the propagation can
719 be not totally successful, yet things may have been changed). */
721 static bool
722 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
723 gimple_stmt_iterator *use_stmt_gsi,
724 bool single_use_p)
726 tree lhs, rhs, rhs2, array_ref;
727 tree *rhsp, *lhsp;
728 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
729 enum tree_code rhs_code;
730 bool res = true;
731 bool addr_p = false;
733 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
735 lhs = gimple_assign_lhs (use_stmt);
736 rhs_code = gimple_assign_rhs_code (use_stmt);
737 rhs = gimple_assign_rhs1 (use_stmt);
739 /* Trivial cases. The use statement could be a trivial copy or a
740 useless conversion. Recurse to the uses of the lhs as copyprop does
741 not copy through different variant pointers and FRE does not catch
742 all useless conversions. Treat the case of a single-use name and
743 a conversion to def_rhs type separate, though. */
744 if (TREE_CODE (lhs) == SSA_NAME
745 && ((rhs_code == SSA_NAME && rhs == name)
746 || CONVERT_EXPR_CODE_P (rhs_code)))
748 /* Only recurse if we don't deal with a single use or we cannot
749 do the propagation to the current statement. In particular
750 we can end up with a conversion needed for a non-invariant
751 address which we cannot do in a single statement. */
752 if (!single_use_p
753 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
754 && (!is_gimple_min_invariant (def_rhs)
755 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
756 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
757 && (TYPE_PRECISION (TREE_TYPE (lhs))
758 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
759 return forward_propagate_addr_expr (lhs, def_rhs);
761 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
762 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
763 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
764 else
765 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
766 return true;
769 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
770 ADDR_EXPR will not appear on the LHS. */
771 lhsp = gimple_assign_lhs_ptr (use_stmt);
772 while (handled_component_p (*lhsp))
773 lhsp = &TREE_OPERAND (*lhsp, 0);
774 lhs = *lhsp;
776 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
777 propagate the ADDR_EXPR into the use of NAME and fold the result. */
778 if (TREE_CODE (lhs) == INDIRECT_REF
779 && TREE_OPERAND (lhs, 0) == name)
781 if (may_propagate_address_into_dereference (def_rhs, lhs)
782 && (lhsp != gimple_assign_lhs_ptr (use_stmt)
783 || useless_type_conversion_p
784 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
786 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
787 fold_stmt_inplace (use_stmt);
788 tidy_after_forward_propagate_addr (use_stmt);
790 /* Continue propagating into the RHS if this was not the only use. */
791 if (single_use_p)
792 return true;
794 else
795 /* We can have a struct assignment dereferencing our name twice.
796 Note that we didn't propagate into the lhs to not falsely
797 claim we did when propagating into the rhs. */
798 res = false;
801 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
802 nodes from the RHS. */
803 rhsp = gimple_assign_rhs1_ptr (use_stmt);
804 if (TREE_CODE (*rhsp) == ADDR_EXPR)
806 rhsp = &TREE_OPERAND (*rhsp, 0);
807 addr_p = true;
809 while (handled_component_p (*rhsp))
810 rhsp = &TREE_OPERAND (*rhsp, 0);
811 rhs = *rhsp;
813 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
814 propagate the ADDR_EXPR into the use of NAME and fold the result. */
815 if (TREE_CODE (rhs) == INDIRECT_REF
816 && TREE_OPERAND (rhs, 0) == name
817 && may_propagate_address_into_dereference (def_rhs, rhs))
819 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
820 fold_stmt_inplace (use_stmt);
821 tidy_after_forward_propagate_addr (use_stmt);
822 return res;
825 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
826 propagate the ADDR_EXPR into the use of NAME and try to
827 create a VCE and fold the result. */
828 if (TREE_CODE (rhs) == INDIRECT_REF
829 && TREE_OPERAND (rhs, 0) == name
830 && TYPE_SIZE (TREE_TYPE (rhs))
831 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
832 /* Function decls should not be used for VCE either as it could be a
833 function descriptor that we want and not the actual function code. */
834 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
835 /* We should not convert volatile loads to non volatile loads. */
836 && !TYPE_VOLATILE (TREE_TYPE (rhs))
837 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
838 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
839 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
840 /* Make sure we only do TBAA compatible replacements. */
841 && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
843 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
844 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
845 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
847 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
848 valid if we can replace the whole rhs of the use statement. */
849 if (rhs != gimple_assign_rhs1 (use_stmt))
850 return false;
851 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
852 true, GSI_NEW_STMT);
853 gimple_assign_set_rhs1 (use_stmt, new_rhs);
854 tidy_after_forward_propagate_addr (use_stmt);
855 return res;
857 /* If the defining rhs comes from an indirect reference, then do not
858 convert into a VIEW_CONVERT_EXPR. Likewise if we'll end up taking
859 the address of a V_C_E of a constant. */
860 def_rhs_base = TREE_OPERAND (def_rhs, 0);
861 while (handled_component_p (def_rhs_base))
862 def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
863 if (!INDIRECT_REF_P (def_rhs_base)
864 && (!addr_p
865 || !is_gimple_min_invariant (def_rhs)))
867 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
868 reference. Place it there and fold the thing. */
869 *rhsp = new_rhs;
870 fold_stmt_inplace (use_stmt);
871 tidy_after_forward_propagate_addr (use_stmt);
872 return res;
876 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
877 is nothing to do. */
878 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
879 || gimple_assign_rhs1 (use_stmt) != name)
880 return false;
882 /* The remaining cases are all for turning pointer arithmetic into
883 array indexing. They only apply when we have the address of
884 element zero in an array. If that is not the case then there
885 is nothing to do. */
886 array_ref = TREE_OPERAND (def_rhs, 0);
887 if (TREE_CODE (array_ref) != ARRAY_REF
888 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
889 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
890 return false;
892 rhs2 = gimple_assign_rhs2 (use_stmt);
893 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
894 of the elements in X into &x[C1 + C2/element size]. */
895 if (TREE_CODE (rhs2) == INTEGER_CST)
897 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
898 TREE_TYPE (def_rhs),
899 def_rhs, rhs2);
900 if (new_rhs)
902 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
903 new_rhs = unshare_expr (new_rhs);
904 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
906 if (!is_gimple_min_invariant (new_rhs))
907 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
908 true, NULL_TREE,
909 true, GSI_SAME_STMT);
910 new_rhs = fold_convert (type, new_rhs);
912 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
913 use_stmt = gsi_stmt (*use_stmt_gsi);
914 update_stmt (use_stmt);
915 tidy_after_forward_propagate_addr (use_stmt);
916 return true;
920 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
921 converting a multiplication of an index by the size of the
922 array elements, then the result is converted into the proper
923 type for the arithmetic. */
924 if (TREE_CODE (rhs2) == SSA_NAME
925 && integer_zerop (TREE_OPERAND (array_ref, 1))
926 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
927 /* Avoid problems with IVopts creating PLUS_EXPRs with a
928 different type than their operands. */
929 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
930 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
931 use_stmt_gsi);
932 return false;
935 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
937 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
938 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
939 node or for recovery of array indexing from pointer arithmetic.
940 Returns true, if all uses have been propagated into. */
942 static bool
943 forward_propagate_addr_expr (tree name, tree rhs)
945 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
946 imm_use_iterator iter;
947 gimple use_stmt;
948 bool all = true;
949 bool single_use_p = has_single_use (name);
951 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
953 bool result;
954 tree use_rhs;
956 /* If the use is not in a simple assignment statement, then
957 there is nothing we can do. */
958 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
960 if (!is_gimple_debug (use_stmt))
961 all = false;
962 continue;
965 /* If the use is in a deeper loop nest, then we do not want
966 to propagate the ADDR_EXPR into the loop as that is likely
967 adding expression evaluations into the loop. */
968 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
970 all = false;
971 continue;
975 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
976 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
977 single_use_p);
978 /* If the use has moved to a different statement adjust
979 the update machinery for the old statement too. */
980 if (use_stmt != gsi_stmt (gsi))
982 update_stmt (use_stmt);
983 use_stmt = gsi_stmt (gsi);
986 update_stmt (use_stmt);
988 all &= result;
990 /* Remove intermediate now unused copy and conversion chains. */
991 use_rhs = gimple_assign_rhs1 (use_stmt);
992 if (result
993 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
994 && TREE_CODE (use_rhs) == SSA_NAME
995 && has_zero_uses (gimple_assign_lhs (use_stmt)))
997 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
998 release_defs (use_stmt);
999 gsi_remove (&gsi, true);
1003 return all;
1006 /* Forward propagate the comparison defined in STMT like
1007 cond_1 = x CMP y to uses of the form
1008 a_1 = (T')cond_1
1009 a_1 = !cond_1
1010 a_1 = cond_1 != 0
1011 Returns true if stmt is now unused. */
1013 static bool
1014 forward_propagate_comparison (gimple stmt)
1016 tree name = gimple_assign_lhs (stmt);
1017 gimple use_stmt;
1018 tree tmp = NULL_TREE;
1020 /* Don't propagate ssa names that occur in abnormal phis. */
1021 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1022 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1023 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1024 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1025 return false;
1027 /* Do not un-cse comparisons. But propagate through copies. */
1028 use_stmt = get_prop_dest_stmt (name, &name);
1029 if (!use_stmt)
1030 return false;
1032 /* Conversion of the condition result to another integral type. */
1033 if (is_gimple_assign (use_stmt)
1034 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1035 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1036 == tcc_comparison
1037 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1038 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1040 tree lhs = gimple_assign_lhs (use_stmt);
1042 /* We can propagate the condition into a conversion. */
1043 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1045 /* Avoid using fold here as that may create a COND_EXPR with
1046 non-boolean condition as canonical form. */
1047 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1048 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1050 /* We can propagate the condition into X op CST where op
1051 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1052 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1053 == tcc_comparison
1054 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1055 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1057 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1058 tree cst = gimple_assign_rhs2 (use_stmt);
1059 tree cond;
1061 cond = build2 (gimple_assign_rhs_code (stmt),
1062 TREE_TYPE (cst),
1063 gimple_assign_rhs1 (stmt),
1064 gimple_assign_rhs2 (stmt));
1066 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1067 code, TREE_TYPE (lhs),
1068 cond, cst, false);
1069 if (tmp == NULL_TREE)
1070 return false;
1072 /* We can propagate the condition into a statement that
1073 computes the logical negation of the comparison result. */
1074 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1076 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1077 bool nans = HONOR_NANS (TYPE_MODE (type));
1078 enum tree_code code;
1079 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1080 if (code == ERROR_MARK)
1081 return false;
1083 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1084 gimple_assign_rhs2 (stmt));
1086 else
1087 return false;
1090 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1091 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1092 use_stmt = gsi_stmt (gsi);
1093 update_stmt (use_stmt);
1096 /* Remove defining statements. */
1097 remove_prop_source_from_use (name, stmt);
1099 if (dump_file && (dump_flags & TDF_DETAILS))
1101 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1102 stmt);
1103 fprintf (dump_file, " Replaced '");
1104 print_generic_expr (dump_file, old_rhs, dump_flags);
1105 fprintf (dump_file, "' with '");
1106 print_generic_expr (dump_file, tmp, dump_flags);
1107 fprintf (dump_file, "'\n");
1110 return true;
1113 return false;
1116 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1117 If so, we can change STMT into lhs = y which can later be copy
1118 propagated. Similarly for negation.
1120 This could trivially be formulated as a forward propagation
1121 to immediate uses. However, we already had an implementation
1122 from DOM which used backward propagation via the use-def links.
1124 It turns out that backward propagation is actually faster as
1125 there's less work to do for each NOT/NEG expression we find.
1126 Backwards propagation needs to look at the statement in a single
1127 backlink. Forward propagation needs to look at potentially more
1128 than one forward link. */
1130 static void
1131 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1133 gimple stmt = gsi_stmt (*gsi_p);
1134 tree rhs = gimple_assign_rhs1 (stmt);
1135 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1137 /* See if the RHS_DEF_STMT has the same form as our statement. */
1138 if (is_gimple_assign (rhs_def_stmt)
1139 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1141 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1143 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1144 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1145 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1147 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1148 stmt = gsi_stmt (*gsi_p);
1149 update_stmt (stmt);
1154 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1155 the condition which we may be able to optimize better. */
1157 static void
1158 simplify_gimple_switch (gimple stmt)
1160 tree cond = gimple_switch_index (stmt);
1161 tree def, to, ti;
1162 gimple def_stmt;
1164 /* The optimization that we really care about is removing unnecessary
1165 casts. That will let us do much better in propagating the inferred
1166 constant at the switch target. */
1167 if (TREE_CODE (cond) == SSA_NAME)
1169 def_stmt = SSA_NAME_DEF_STMT (cond);
1170 if (is_gimple_assign (def_stmt))
1172 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1174 int need_precision;
1175 bool fail;
1177 def = gimple_assign_rhs1 (def_stmt);
1179 #ifdef ENABLE_CHECKING
1180 /* ??? Why was Jeff testing this? We are gimple... */
1181 gcc_assert (is_gimple_val (def));
1182 #endif
1184 to = TREE_TYPE (cond);
1185 ti = TREE_TYPE (def);
1187 /* If we have an extension that preserves value, then we
1188 can copy the source value into the switch. */
1190 need_precision = TYPE_PRECISION (ti);
1191 fail = false;
1192 if (! INTEGRAL_TYPE_P (ti))
1193 fail = true;
1194 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1195 fail = true;
1196 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1197 need_precision += 1;
1198 if (TYPE_PRECISION (to) < need_precision)
1199 fail = true;
1201 if (!fail)
1203 gimple_switch_set_index (stmt, def);
1204 update_stmt (stmt);
1211 /* Run bitwise and assignments throug the folder. If the first argument is an
1212 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1213 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1216 static void
1217 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1219 tree res;
1220 tree arg1 = gimple_assign_rhs1 (stmt);
1221 tree arg2 = gimple_assign_rhs2 (stmt);
1223 if (TREE_CODE (arg2) != INTEGER_CST)
1224 return;
1226 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1228 gimple def = SSA_NAME_DEF_STMT (arg1);
1230 if (gimple_assign_cast_p (def)
1231 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1233 tree op = gimple_assign_rhs1 (def);
1235 if (TREE_CODE (op) == ADDR_EXPR)
1236 arg1 = op;
1240 res = fold_binary_loc (gimple_location (stmt),
1241 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1242 arg1, arg2);
1243 if (res && is_gimple_min_invariant (res))
1245 gimple_assign_set_rhs_from_tree (gsi, res);
1246 update_stmt (stmt);
1248 return;
1251 /* Main entry point for the forward propagation optimizer. */
1253 static unsigned int
1254 tree_ssa_forward_propagate_single_use_vars (void)
1256 basic_block bb;
1257 unsigned int todoflags = 0;
1259 cfg_changed = false;
1261 FOR_EACH_BB (bb)
1263 gimple_stmt_iterator gsi;
1265 /* Note we update GSI within the loop as necessary. */
1266 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1268 gimple stmt = gsi_stmt (gsi);
1270 /* If this statement sets an SSA_NAME to an address,
1271 try to propagate the address into the uses of the SSA_NAME. */
1272 if (is_gimple_assign (stmt))
1274 tree lhs = gimple_assign_lhs (stmt);
1275 tree rhs = gimple_assign_rhs1 (stmt);
1277 if (TREE_CODE (lhs) != SSA_NAME)
1279 gsi_next (&gsi);
1280 continue;
1283 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1284 /* Handle pointer conversions on invariant addresses
1285 as well, as this is valid gimple. */
1286 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1287 && TREE_CODE (rhs) == ADDR_EXPR
1288 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1290 STRIP_NOPS (rhs);
1291 if (!stmt_references_abnormal_ssa_name (stmt)
1292 && forward_propagate_addr_expr (lhs, rhs))
1294 release_defs (stmt);
1295 todoflags |= TODO_remove_unused_locals;
1296 gsi_remove (&gsi, true);
1298 else
1299 gsi_next (&gsi);
1301 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1302 && is_gimple_min_invariant (rhs))
1304 /* Make sure to fold &a[0] + off_1 here. */
1305 fold_stmt_inplace (stmt);
1306 update_stmt (stmt);
1307 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1308 gsi_next (&gsi);
1310 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1311 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1312 && TREE_CODE (rhs) == SSA_NAME)
1314 simplify_not_neg_expr (&gsi);
1315 gsi_next (&gsi);
1317 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1319 /* In this case the entire COND_EXPR is in rhs1. */
1320 int did_something;
1321 fold_defer_overflow_warnings ();
1322 did_something = forward_propagate_into_cond (&gsi);
1323 stmt = gsi_stmt (gsi);
1324 if (did_something == 2)
1325 cfg_changed = true;
1326 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1327 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1328 gsi_next (&gsi);
1330 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1331 == tcc_comparison)
1333 if (forward_propagate_comparison (stmt))
1335 release_defs (stmt);
1336 todoflags |= TODO_remove_unused_locals;
1337 gsi_remove (&gsi, true);
1339 else
1340 gsi_next (&gsi);
1342 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1344 simplify_bitwise_and (&gsi, stmt);
1345 gsi_next (&gsi);
1347 else
1348 gsi_next (&gsi);
1350 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1352 simplify_gimple_switch (stmt);
1353 gsi_next (&gsi);
1355 else if (gimple_code (stmt) == GIMPLE_COND)
1357 int did_something;
1358 fold_defer_overflow_warnings ();
1359 did_something = forward_propagate_into_gimple_cond (stmt);
1360 if (did_something == 2)
1361 cfg_changed = true;
1362 fold_undefer_overflow_warnings (did_something, stmt,
1363 WARN_STRICT_OVERFLOW_CONDITIONAL);
1364 gsi_next (&gsi);
1366 else
1367 gsi_next (&gsi);
1371 if (cfg_changed)
1372 todoflags |= TODO_cleanup_cfg;
1373 return todoflags;
1377 static bool
1378 gate_forwprop (void)
1380 return flag_tree_forwprop;
1383 struct gimple_opt_pass pass_forwprop =
1386 GIMPLE_PASS,
1387 "forwprop", /* name */
1388 gate_forwprop, /* gate */
1389 tree_ssa_forward_propagate_single_use_vars, /* execute */
1390 NULL, /* sub */
1391 NULL, /* next */
1392 0, /* static_pass_number */
1393 TV_TREE_FORWPROP, /* tv_id */
1394 PROP_cfg | PROP_ssa, /* properties_required */
1395 0, /* properties_provided */
1396 0, /* properties_destroyed */
1397 0, /* todo_flags_start */
1398 TODO_dump_func
1399 | TODO_ggc_collect
1400 | TODO_update_ssa
1401 | TODO_verify_ssa /* todo_flags_finish */