gcc/
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobeb6c831f7c8e98625275d18656da1ed00c6f7341
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 One class of common cases we handle is forward propagating a single use
43 variable into a COND_EXPR.
45 bb0:
46 x = a COND b;
47 if (x) goto ... else goto ...
49 Will be transformed into:
51 bb0:
52 if (a COND b) goto ... else goto ...
54 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56 Or (assuming c1 and c2 are constants):
58 bb0:
59 x = a + c1;
60 if (x EQ/NEQ c2) goto ... else goto ...
62 Will be transformed into:
64 bb0:
65 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67 Similarly for x = a - c1.
71 bb0:
72 x = !a
73 if (x) goto ... else goto ...
75 Will be transformed into:
77 bb0:
78 if (a == 0) goto ... else goto ...
80 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
81 For these cases, we propagate A into all, possibly more than one,
82 COND_EXPRs that use X.
86 bb0:
87 x = (typecast) a
88 if (x) goto ... else goto ...
90 Will be transformed into:
92 bb0:
93 if (a != 0) goto ... else goto ...
95 (Assuming a is an integral type and x is a boolean or x is an
96 integral and a is a boolean.)
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
102 In addition to eliminating the variable and the statement which assigns
103 a value to the variable, we may be able to later thread the jump without
104 adding insane complexity in the dominator optimizer.
106 Also note these transformations can cascade. We handle this by having
107 a worklist of COND_EXPR statements to examine. As we make a change to
108 a statement, we put it back on the worklist to examine on the next
109 iteration of the main loop.
111 A second class of propagation opportunities arises for ADDR_EXPR
112 nodes.
114 ptr = &x->y->z;
115 res = *ptr;
117 Will get turned into
119 res = x->y->z;
122 ptr = (type1*)&type2var;
123 res = *ptr
125 Will get turned into (if type1 and type2 are the same size
126 and neither have volatile on them):
127 res = VIEW_CONVERT_EXPR<type1>(type2var)
131 ptr = &x[0];
132 ptr2 = ptr + <constant>;
134 Will get turned into
136 ptr2 = &x[constant/elementsize];
140 ptr = &x[0];
141 offset = index * element_size;
142 offset_p = (pointer) offset;
143 ptr2 = ptr + offset_p
145 Will get turned into:
147 ptr2 = &x[index];
150 ssa = (int) decl
151 res = ssa & 1
153 Provided that decl has known alignment >= 2, will get turned into
155 res = 0
157 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
158 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
159 {NOT_EXPR,NEG_EXPR}.
161 This will (of course) be extended as other needs arise. */
163 static bool forward_propagate_addr_expr (tree name, tree rhs);
165 /* Set to true if we delete EH edges during the optimization. */
166 static bool cfg_changed;
168 static tree rhs_to_tree (tree type, gimple stmt);
170 /* Get the next statement we can propagate NAME's value into skipping
171 trivial copies. Returns the statement that is suitable as a
172 propagation destination or NULL_TREE if there is no such one.
173 This only returns destinations in a single-use chain. FINAL_NAME_P
174 if non-NULL is written to the ssa name that represents the use. */
176 static gimple
177 get_prop_dest_stmt (tree name, tree *final_name_p)
179 use_operand_p use;
180 gimple use_stmt;
182 do {
183 /* If name has multiple uses, bail out. */
184 if (!single_imm_use (name, &use, &use_stmt))
185 return NULL;
187 /* If this is not a trivial copy, we found it. */
188 if (!gimple_assign_ssa_name_copy_p (use_stmt)
189 || gimple_assign_rhs1 (use_stmt) != name)
190 break;
192 /* Continue searching uses of the copy destination. */
193 name = gimple_assign_lhs (use_stmt);
194 } while (1);
196 if (final_name_p)
197 *final_name_p = name;
199 return use_stmt;
202 /* Get the statement we can propagate from into NAME skipping
203 trivial copies. Returns the statement which defines the
204 propagation source or NULL_TREE if there is no such one.
205 If SINGLE_USE_ONLY is set considers only sources which have
206 a single use chain up to NAME. If SINGLE_USE_P is non-null,
207 it is set to whether the chain to NAME is a single use chain
208 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210 static gimple
211 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
213 bool single_use = true;
215 do {
216 gimple def_stmt = SSA_NAME_DEF_STMT (name);
218 if (!has_single_use (name))
220 single_use = false;
221 if (single_use_only)
222 return NULL;
225 /* If name is defined by a PHI node or is the default def, bail out. */
226 if (!is_gimple_assign (def_stmt))
227 return NULL;
229 /* If def_stmt is not a simple copy, we possibly found it. */
230 if (!gimple_assign_ssa_name_copy_p (def_stmt))
232 tree rhs;
234 if (!single_use_only && single_use_p)
235 *single_use_p = single_use;
237 /* We can look through pointer conversions in the search
238 for a useful stmt for the comparison folding. */
239 rhs = gimple_assign_rhs1 (def_stmt);
240 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
241 && TREE_CODE (rhs) == SSA_NAME
242 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
243 && POINTER_TYPE_P (TREE_TYPE (rhs)))
244 name = rhs;
245 else
246 return def_stmt;
248 else
250 /* Continue searching the def of the copy source name. */
251 name = gimple_assign_rhs1 (def_stmt);
253 } while (1);
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
259 static bool
260 can_propagate_from (gimple def_stmt)
262 use_operand_p use_p;
263 ssa_op_iter iter;
265 gcc_assert (is_gimple_assign (def_stmt));
267 /* If the rhs has side-effects we cannot propagate from it. */
268 if (gimple_has_volatile_ops (def_stmt))
269 return false;
271 /* If the rhs is a load we cannot propagate from it. */
272 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
273 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
274 return false;
276 /* Constants can be always propagated. */
277 if (gimple_assign_single_p (def_stmt)
278 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
279 return true;
281 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
282 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
283 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
284 return false;
286 /* If the definition is a conversion of a pointer to a function type,
287 then we can not apply optimizations as some targets require
288 function pointers to be canonicalized and in this case this
289 optimization could eliminate a necessary canonicalization. */
290 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
292 tree rhs = gimple_assign_rhs1 (def_stmt);
293 if (POINTER_TYPE_P (TREE_TYPE (rhs))
294 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
295 return false;
298 return true;
301 /* Remove a copy chain ending in NAME along the defs but not
302 further or including UP_TO_STMT. If NAME was replaced in
303 its only use then this function can be used to clean up
304 dead stmts. Returns true if UP_TO_STMT can be removed
305 as well, otherwise false. */
307 static bool
308 remove_prop_source_from_use (tree name, gimple up_to_stmt)
310 gimple_stmt_iterator gsi;
311 gimple stmt;
313 do {
314 if (!has_zero_uses (name))
315 return false;
317 stmt = SSA_NAME_DEF_STMT (name);
318 if (stmt == up_to_stmt)
319 return true;
321 gsi = gsi_for_stmt (stmt);
322 release_defs (stmt);
323 gsi_remove (&gsi, true);
325 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
326 } while (name && TREE_CODE (name) == SSA_NAME);
328 return false;
331 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
332 converted to type TYPE.
334 This should disappear, but is needed so we can combine expressions and use
335 the fold() interfaces. Long term, we need to develop folding and combine
336 routines that deal with gimple exclusively . */
338 static tree
339 rhs_to_tree (tree type, gimple stmt)
341 location_t loc = gimple_location (stmt);
342 enum tree_code code = gimple_assign_rhs_code (stmt);
343 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
344 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
345 gimple_assign_rhs2 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
347 return build1 (code, type, gimple_assign_rhs1 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
349 return gimple_assign_rhs1 (stmt);
350 else
351 gcc_unreachable ();
354 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
355 the folded result in a form suitable for COND_EXPR_COND or
356 NULL_TREE, if there is no suitable simplified form. If
357 INVARIANT_ONLY is true only gimple_min_invariant results are
358 considered simplified. */
360 static tree
361 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
362 tree op0, tree op1, bool invariant_only)
364 tree t;
366 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
368 t = fold_binary_loc (loc, code, type, op0, op1);
369 if (!t)
370 return NULL_TREE;
372 /* Require that we got a boolean type out if we put one in. */
373 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
375 /* Canonicalize the combined condition for use in a COND_EXPR. */
376 t = canonicalize_cond_expr_cond (t);
378 /* Bail out if we required an invariant but didn't get one. */
379 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
380 return NULL_TREE;
382 return t;
385 /* Propagate from the ssa name definition statements of COND_EXPR
386 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
387 Returns zero if no statement was changed, one if there were
388 changes and two if cfg_cleanup needs to run.
390 This must be kept in sync with forward_propagate_into_cond. */
392 static int
393 forward_propagate_into_gimple_cond (gimple stmt)
395 int did_something = 0;
396 location_t loc = gimple_location (stmt);
398 do {
399 tree tmp = NULL_TREE;
400 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
401 gimple def_stmt;
402 bool single_use0_p = false, single_use1_p = false;
403 enum tree_code code = gimple_cond_code (stmt);
405 /* We can do tree combining on SSA_NAME and comparison expressions. */
406 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
408 /* For comparisons use the first operand, that is likely to
409 simplify comparisons against constants. */
410 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
412 name = gimple_cond_lhs (stmt);
413 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
414 if (def_stmt && can_propagate_from (def_stmt))
416 tree op1 = gimple_cond_rhs (stmt);
417 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
418 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
419 rhs0, op1, !single_use0_p);
422 /* If that wasn't successful, try the second operand. */
423 if (tmp == NULL_TREE
424 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
426 tree op0 = gimple_cond_lhs (stmt);
427 name = gimple_cond_rhs (stmt);
428 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
429 if (!def_stmt || !can_propagate_from (def_stmt))
430 return did_something;
432 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
433 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
434 rhs1, !single_use1_p);
436 /* If that wasn't successful either, try both operands. */
437 if (tmp == NULL_TREE
438 && rhs0 != NULL_TREE
439 && rhs1 != NULL_TREE)
440 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
441 fold_convert_loc (loc,
442 TREE_TYPE (rhs0),
443 rhs1),
444 !(single_use0_p && single_use1_p));
447 if (tmp)
449 if (dump_file && tmp)
451 tree cond = build2 (gimple_cond_code (stmt),
452 boolean_type_node,
453 gimple_cond_lhs (stmt),
454 gimple_cond_rhs (stmt));
455 fprintf (dump_file, " Replaced '");
456 print_generic_expr (dump_file, cond, 0);
457 fprintf (dump_file, "' with '");
458 print_generic_expr (dump_file, tmp, 0);
459 fprintf (dump_file, "'\n");
462 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
463 update_stmt (stmt);
465 /* Remove defining statements. */
466 remove_prop_source_from_use (name, NULL);
468 if (is_gimple_min_invariant (tmp))
469 did_something = 2;
470 else if (did_something == 0)
471 did_something = 1;
473 /* Continue combining. */
474 continue;
477 break;
478 } while (1);
480 return did_something;
484 /* Propagate from the ssa name definition statements of COND_EXPR
485 in the rhs of statement STMT into the conditional if that simplifies it.
486 Returns zero if no statement was changed, one if there were
487 changes and two if cfg_cleanup needs to run.
489 This must be kept in sync with forward_propagate_into_gimple_cond. */
491 static int
492 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
494 gimple stmt = gsi_stmt (*gsi_p);
495 location_t loc = gimple_location (stmt);
496 int did_something = 0;
498 do {
499 tree tmp = NULL_TREE;
500 tree cond = gimple_assign_rhs1 (stmt);
501 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
502 gimple def_stmt;
503 bool single_use0_p = false, single_use1_p = false;
505 /* We can do tree combining on SSA_NAME and comparison expressions. */
506 if (COMPARISON_CLASS_P (cond)
507 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
509 /* For comparisons use the first operand, that is likely to
510 simplify comparisons against constants. */
511 name = TREE_OPERAND (cond, 0);
512 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
513 if (def_stmt && can_propagate_from (def_stmt))
515 tree op1 = TREE_OPERAND (cond, 1);
516 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
517 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
518 boolean_type_node,
519 rhs0, op1, !single_use0_p);
521 /* If that wasn't successful, try the second operand. */
522 if (tmp == NULL_TREE
523 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
525 tree op0 = TREE_OPERAND (cond, 0);
526 name = TREE_OPERAND (cond, 1);
527 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
528 if (!def_stmt || !can_propagate_from (def_stmt))
529 return did_something;
531 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
532 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
533 boolean_type_node,
534 op0, rhs1, !single_use1_p);
536 /* If that wasn't successful either, try both operands. */
537 if (tmp == NULL_TREE
538 && rhs0 != NULL_TREE
539 && rhs1 != NULL_TREE)
540 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
541 boolean_type_node,
542 rhs0,
543 fold_convert_loc (loc,
544 TREE_TYPE (rhs0),
545 rhs1),
546 !(single_use0_p && single_use1_p));
548 else if (TREE_CODE (cond) == SSA_NAME)
550 name = cond;
551 def_stmt = get_prop_source_stmt (name, true, NULL);
552 if (def_stmt || !can_propagate_from (def_stmt))
553 return did_something;
555 rhs0 = gimple_assign_rhs1 (def_stmt);
556 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
557 build_int_cst (TREE_TYPE (rhs0), 0),
558 false);
561 if (tmp)
563 if (dump_file && tmp)
565 fprintf (dump_file, " Replaced '");
566 print_generic_expr (dump_file, cond, 0);
567 fprintf (dump_file, "' with '");
568 print_generic_expr (dump_file, tmp, 0);
569 fprintf (dump_file, "'\n");
572 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
573 stmt = gsi_stmt (*gsi_p);
574 update_stmt (stmt);
576 /* Remove defining statements. */
577 remove_prop_source_from_use (name, NULL);
579 if (is_gimple_min_invariant (tmp))
580 did_something = 2;
581 else if (did_something == 0)
582 did_something = 1;
584 /* Continue combining. */
585 continue;
588 break;
589 } while (1);
591 return did_something;
594 /* We've just substituted an ADDR_EXPR into stmt. Update all the
595 relevant data structures to match. */
597 static void
598 tidy_after_forward_propagate_addr (gimple stmt)
600 /* We may have turned a trapping insn into a non-trapping insn. */
601 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
602 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
603 cfg_changed = true;
605 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
606 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
609 /* DEF_RHS contains the address of the 0th element in an array.
610 USE_STMT uses type of DEF_RHS to compute the address of an
611 arbitrary element within the array. The (variable) byte offset
612 of the element is contained in OFFSET.
614 We walk back through the use-def chains of OFFSET to verify that
615 it is indeed computing the offset of an element within the array
616 and extract the index corresponding to the given byte offset.
618 We then try to fold the entire address expression into a form
619 &array[index].
621 If we are successful, we replace the right hand side of USE_STMT
622 with the new address computation. */
624 static bool
625 forward_propagate_addr_into_variable_array_index (tree offset,
626 tree def_rhs,
627 gimple_stmt_iterator *use_stmt_gsi)
629 tree index, tunit;
630 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
631 tree tmp;
633 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
634 if (!host_integerp (tunit, 1))
635 return false;
637 /* Get the offset's defining statement. */
638 offset_def = SSA_NAME_DEF_STMT (offset);
640 /* Try to find an expression for a proper index. This is either a
641 multiplication expression by the element size or just the ssa name we came
642 along in case the element size is one. In that case, however, we do not
643 allow multiplications because they can be computing index to a higher
644 level dimension (PR 37861). */
645 if (integer_onep (tunit))
647 if (is_gimple_assign (offset_def)
648 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
649 return false;
651 index = offset;
653 else
655 /* The statement which defines OFFSET before type conversion
656 must be a simple GIMPLE_ASSIGN. */
657 if (!is_gimple_assign (offset_def))
658 return false;
660 /* The RHS of the statement which defines OFFSET must be a
661 multiplication of an object by the size of the array elements.
662 This implicitly verifies that the size of the array elements
663 is constant. */
664 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
665 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
666 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
668 /* The first operand to the MULT_EXPR is the desired index. */
669 index = gimple_assign_rhs1 (offset_def);
671 /* If we have idx * tunit + CST * tunit re-associate that. */
672 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
673 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
674 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
675 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
676 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
677 gimple_assign_rhs2 (offset_def),
678 tunit)) != NULL_TREE)
680 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
681 if (is_gimple_assign (offset_def2)
682 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
683 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
684 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
686 index = fold_build2 (gimple_assign_rhs_code (offset_def),
687 TREE_TYPE (offset),
688 gimple_assign_rhs1 (offset_def2), tmp);
690 else
691 return false;
693 else
694 return false;
697 /* Replace the pointer addition with array indexing. */
698 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
699 true, GSI_SAME_STMT);
700 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
701 use_stmt = gsi_stmt (*use_stmt_gsi);
702 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
703 = index;
705 /* That should have created gimple, so there is no need to
706 record information to undo the propagation. */
707 fold_stmt_inplace (use_stmt);
708 tidy_after_forward_propagate_addr (use_stmt);
709 return true;
712 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
713 ADDR_EXPR <whatever>.
715 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
716 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
717 node or for recovery of array indexing from pointer arithmetic.
719 Return true if the propagation was successful (the propagation can
720 be not totally successful, yet things may have been changed). */
722 static bool
723 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
724 gimple_stmt_iterator *use_stmt_gsi,
725 bool single_use_p)
727 tree lhs, rhs, rhs2, array_ref;
728 tree *rhsp, *lhsp;
729 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
730 enum tree_code rhs_code;
731 bool res = true;
732 bool addr_p = false;
734 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
736 lhs = gimple_assign_lhs (use_stmt);
737 rhs_code = gimple_assign_rhs_code (use_stmt);
738 rhs = gimple_assign_rhs1 (use_stmt);
740 /* Trivial cases. The use statement could be a trivial copy or a
741 useless conversion. Recurse to the uses of the lhs as copyprop does
742 not copy through different variant pointers and FRE does not catch
743 all useless conversions. Treat the case of a single-use name and
744 a conversion to def_rhs type separate, though. */
745 if (TREE_CODE (lhs) == SSA_NAME
746 && ((rhs_code == SSA_NAME && rhs == name)
747 || CONVERT_EXPR_CODE_P (rhs_code)))
749 /* Only recurse if we don't deal with a single use or we cannot
750 do the propagation to the current statement. In particular
751 we can end up with a conversion needed for a non-invariant
752 address which we cannot do in a single statement. */
753 if (!single_use_p
754 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
755 && (!is_gimple_min_invariant (def_rhs)
756 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
757 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
758 && (TYPE_PRECISION (TREE_TYPE (lhs))
759 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
760 return forward_propagate_addr_expr (lhs, def_rhs);
762 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
763 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
764 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
765 else
766 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
767 return true;
770 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
771 ADDR_EXPR will not appear on the LHS. */
772 lhsp = gimple_assign_lhs_ptr (use_stmt);
773 while (handled_component_p (*lhsp))
774 lhsp = &TREE_OPERAND (*lhsp, 0);
775 lhs = *lhsp;
777 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
778 propagate the ADDR_EXPR into the use of NAME and fold the result. */
779 if (TREE_CODE (lhs) == INDIRECT_REF
780 && TREE_OPERAND (lhs, 0) == name)
782 if (may_propagate_address_into_dereference (def_rhs, lhs)
783 && (lhsp != gimple_assign_lhs_ptr (use_stmt)
784 || useless_type_conversion_p
785 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
787 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
788 fold_stmt_inplace (use_stmt);
789 tidy_after_forward_propagate_addr (use_stmt);
791 /* Continue propagating into the RHS if this was not the only use. */
792 if (single_use_p)
793 return true;
795 else
796 /* We can have a struct assignment dereferencing our name twice.
797 Note that we didn't propagate into the lhs to not falsely
798 claim we did when propagating into the rhs. */
799 res = false;
802 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
803 nodes from the RHS. */
804 rhsp = gimple_assign_rhs1_ptr (use_stmt);
805 if (TREE_CODE (*rhsp) == ADDR_EXPR)
807 rhsp = &TREE_OPERAND (*rhsp, 0);
808 addr_p = true;
810 while (handled_component_p (*rhsp))
811 rhsp = &TREE_OPERAND (*rhsp, 0);
812 rhs = *rhsp;
814 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
815 propagate the ADDR_EXPR into the use of NAME and fold the result. */
816 if (TREE_CODE (rhs) == INDIRECT_REF
817 && TREE_OPERAND (rhs, 0) == name
818 && may_propagate_address_into_dereference (def_rhs, rhs))
820 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
821 fold_stmt_inplace (use_stmt);
822 tidy_after_forward_propagate_addr (use_stmt);
823 return res;
826 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
827 propagate the ADDR_EXPR into the use of NAME and try to
828 create a VCE and fold the result. */
829 if (TREE_CODE (rhs) == INDIRECT_REF
830 && TREE_OPERAND (rhs, 0) == name
831 && TYPE_SIZE (TREE_TYPE (rhs))
832 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
833 /* Function decls should not be used for VCE either as it could be a
834 function descriptor that we want and not the actual function code. */
835 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
836 /* We should not convert volatile loads to non volatile loads. */
837 && !TYPE_VOLATILE (TREE_TYPE (rhs))
838 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
839 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
840 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
841 /* Make sure we only do TBAA compatible replacements. */
842 && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
844 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
845 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
846 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
848 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
849 valid if we can replace the whole rhs of the use statement. */
850 if (rhs != gimple_assign_rhs1 (use_stmt))
851 return false;
852 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
853 true, GSI_NEW_STMT);
854 gimple_assign_set_rhs1 (use_stmt, new_rhs);
855 tidy_after_forward_propagate_addr (use_stmt);
856 return res;
858 /* If the defining rhs comes from an indirect reference, then do not
859 convert into a VIEW_CONVERT_EXPR. Likewise if we'll end up taking
860 the address of a V_C_E of a constant. */
861 def_rhs_base = TREE_OPERAND (def_rhs, 0);
862 while (handled_component_p (def_rhs_base))
863 def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
864 if (!INDIRECT_REF_P (def_rhs_base)
865 && (!addr_p
866 || !is_gimple_min_invariant (def_rhs)))
868 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
869 reference. Place it there and fold the thing. */
870 *rhsp = new_rhs;
871 fold_stmt_inplace (use_stmt);
872 tidy_after_forward_propagate_addr (use_stmt);
873 return res;
877 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
878 is nothing to do. */
879 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
880 || gimple_assign_rhs1 (use_stmt) != name)
881 return false;
883 /* The remaining cases are all for turning pointer arithmetic into
884 array indexing. They only apply when we have the address of
885 element zero in an array. If that is not the case then there
886 is nothing to do. */
887 array_ref = TREE_OPERAND (def_rhs, 0);
888 if (TREE_CODE (array_ref) != ARRAY_REF
889 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
890 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
891 return false;
893 rhs2 = gimple_assign_rhs2 (use_stmt);
894 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
895 of the elements in X into &x[C1 + C2/element size]. */
896 if (TREE_CODE (rhs2) == INTEGER_CST)
898 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
899 TREE_TYPE (def_rhs),
900 def_rhs, rhs2);
901 if (new_rhs)
903 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
904 new_rhs = unshare_expr (new_rhs);
905 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
907 if (!is_gimple_min_invariant (new_rhs))
908 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
909 true, NULL_TREE,
910 true, GSI_SAME_STMT);
911 new_rhs = fold_convert (type, new_rhs);
913 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
914 use_stmt = gsi_stmt (*use_stmt_gsi);
915 update_stmt (use_stmt);
916 tidy_after_forward_propagate_addr (use_stmt);
917 return true;
921 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
922 converting a multiplication of an index by the size of the
923 array elements, then the result is converted into the proper
924 type for the arithmetic. */
925 if (TREE_CODE (rhs2) == SSA_NAME
926 && integer_zerop (TREE_OPERAND (array_ref, 1))
927 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
928 /* Avoid problems with IVopts creating PLUS_EXPRs with a
929 different type than their operands. */
930 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
931 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
932 use_stmt_gsi);
933 return false;
936 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
938 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
939 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
940 node or for recovery of array indexing from pointer arithmetic.
941 Returns true, if all uses have been propagated into. */
943 static bool
944 forward_propagate_addr_expr (tree name, tree rhs)
946 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
947 imm_use_iterator iter;
948 gimple use_stmt;
949 bool all = true;
950 bool single_use_p = has_single_use (name);
952 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
954 bool result;
955 tree use_rhs;
957 /* If the use is not in a simple assignment statement, then
958 there is nothing we can do. */
959 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
961 if (!is_gimple_debug (use_stmt))
962 all = false;
963 continue;
966 /* If the use is in a deeper loop nest, then we do not want
967 to propagate non-invariant ADDR_EXPRs into the loop as that
968 is likely adding expression evaluations into the loop. */
969 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
970 && !is_gimple_min_invariant (rhs))
972 all = false;
973 continue;
977 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
978 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
979 single_use_p);
980 /* If the use has moved to a different statement adjust
981 the update machinery for the old statement too. */
982 if (use_stmt != gsi_stmt (gsi))
984 update_stmt (use_stmt);
985 use_stmt = gsi_stmt (gsi);
988 update_stmt (use_stmt);
990 all &= result;
992 /* Remove intermediate now unused copy and conversion chains. */
993 use_rhs = gimple_assign_rhs1 (use_stmt);
994 if (result
995 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
996 && TREE_CODE (use_rhs) == SSA_NAME
997 && has_zero_uses (gimple_assign_lhs (use_stmt)))
999 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1000 release_defs (use_stmt);
1001 gsi_remove (&gsi, true);
1005 return all;
1008 /* Forward propagate the comparison defined in STMT like
1009 cond_1 = x CMP y to uses of the form
1010 a_1 = (T')cond_1
1011 a_1 = !cond_1
1012 a_1 = cond_1 != 0
1013 Returns true if stmt is now unused. */
1015 static bool
1016 forward_propagate_comparison (gimple stmt)
1018 tree name = gimple_assign_lhs (stmt);
1019 gimple use_stmt;
1020 tree tmp = NULL_TREE;
1022 /* Don't propagate ssa names that occur in abnormal phis. */
1023 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1024 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1025 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1026 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1027 return false;
1029 /* Do not un-cse comparisons. But propagate through copies. */
1030 use_stmt = get_prop_dest_stmt (name, &name);
1031 if (!use_stmt)
1032 return false;
1034 /* Conversion of the condition result to another integral type. */
1035 if (is_gimple_assign (use_stmt)
1036 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1037 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1038 == tcc_comparison
1039 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1040 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1042 tree lhs = gimple_assign_lhs (use_stmt);
1044 /* We can propagate the condition into a conversion. */
1045 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1047 /* Avoid using fold here as that may create a COND_EXPR with
1048 non-boolean condition as canonical form. */
1049 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1050 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1052 /* We can propagate the condition into X op CST where op
1053 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1054 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1055 == tcc_comparison
1056 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1057 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1059 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1060 tree cst = gimple_assign_rhs2 (use_stmt);
1061 tree cond;
1063 cond = build2 (gimple_assign_rhs_code (stmt),
1064 TREE_TYPE (cst),
1065 gimple_assign_rhs1 (stmt),
1066 gimple_assign_rhs2 (stmt));
1068 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1069 code, TREE_TYPE (lhs),
1070 cond, cst, false);
1071 if (tmp == NULL_TREE)
1072 return false;
1074 /* We can propagate the condition into a statement that
1075 computes the logical negation of the comparison result. */
1076 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1078 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1079 bool nans = HONOR_NANS (TYPE_MODE (type));
1080 enum tree_code code;
1081 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1082 if (code == ERROR_MARK)
1083 return false;
1085 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1086 gimple_assign_rhs2 (stmt));
1088 else
1089 return false;
1092 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1093 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1094 use_stmt = gsi_stmt (gsi);
1095 update_stmt (use_stmt);
1098 /* Remove defining statements. */
1099 remove_prop_source_from_use (name, stmt);
1101 if (dump_file && (dump_flags & TDF_DETAILS))
1103 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1104 stmt);
1105 fprintf (dump_file, " Replaced '");
1106 print_generic_expr (dump_file, old_rhs, dump_flags);
1107 fprintf (dump_file, "' with '");
1108 print_generic_expr (dump_file, tmp, dump_flags);
1109 fprintf (dump_file, "'\n");
1112 return true;
1115 return false;
1118 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1119 If so, we can change STMT into lhs = y which can later be copy
1120 propagated. Similarly for negation.
1122 This could trivially be formulated as a forward propagation
1123 to immediate uses. However, we already had an implementation
1124 from DOM which used backward propagation via the use-def links.
1126 It turns out that backward propagation is actually faster as
1127 there's less work to do for each NOT/NEG expression we find.
1128 Backwards propagation needs to look at the statement in a single
1129 backlink. Forward propagation needs to look at potentially more
1130 than one forward link. */
1132 static void
1133 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1135 gimple stmt = gsi_stmt (*gsi_p);
1136 tree rhs = gimple_assign_rhs1 (stmt);
1137 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1139 /* See if the RHS_DEF_STMT has the same form as our statement. */
1140 if (is_gimple_assign (rhs_def_stmt)
1141 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1143 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1145 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1146 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1147 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1149 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1150 stmt = gsi_stmt (*gsi_p);
1151 update_stmt (stmt);
1156 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1157 the condition which we may be able to optimize better. */
1159 static void
1160 simplify_gimple_switch (gimple stmt)
1162 tree cond = gimple_switch_index (stmt);
1163 tree def, to, ti;
1164 gimple def_stmt;
1166 /* The optimization that we really care about is removing unnecessary
1167 casts. That will let us do much better in propagating the inferred
1168 constant at the switch target. */
1169 if (TREE_CODE (cond) == SSA_NAME)
1171 def_stmt = SSA_NAME_DEF_STMT (cond);
1172 if (is_gimple_assign (def_stmt))
1174 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1176 int need_precision;
1177 bool fail;
1179 def = gimple_assign_rhs1 (def_stmt);
1181 #ifdef ENABLE_CHECKING
1182 /* ??? Why was Jeff testing this? We are gimple... */
1183 gcc_assert (is_gimple_val (def));
1184 #endif
1186 to = TREE_TYPE (cond);
1187 ti = TREE_TYPE (def);
1189 /* If we have an extension that preserves value, then we
1190 can copy the source value into the switch. */
1192 need_precision = TYPE_PRECISION (ti);
1193 fail = false;
1194 if (! INTEGRAL_TYPE_P (ti))
1195 fail = true;
1196 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1197 fail = true;
1198 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1199 need_precision += 1;
1200 if (TYPE_PRECISION (to) < need_precision)
1201 fail = true;
1203 if (!fail)
1205 gimple_switch_set_index (stmt, def);
1206 update_stmt (stmt);
1213 /* Run bitwise and assignments throug the folder. If the first argument is an
1214 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1215 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1218 static void
1219 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1221 tree res;
1222 tree arg1 = gimple_assign_rhs1 (stmt);
1223 tree arg2 = gimple_assign_rhs2 (stmt);
1225 if (TREE_CODE (arg2) != INTEGER_CST)
1226 return;
1228 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1230 gimple def = SSA_NAME_DEF_STMT (arg1);
1232 if (gimple_assign_cast_p (def)
1233 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1235 tree op = gimple_assign_rhs1 (def);
1237 if (TREE_CODE (op) == ADDR_EXPR)
1238 arg1 = op;
1242 res = fold_binary_loc (gimple_location (stmt),
1243 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1244 arg1, arg2);
1245 if (res && is_gimple_min_invariant (res))
1247 gimple_assign_set_rhs_from_tree (gsi, res);
1248 update_stmt (stmt);
1250 return;
1253 /* Main entry point for the forward propagation optimizer. */
1255 static unsigned int
1256 tree_ssa_forward_propagate_single_use_vars (void)
1258 basic_block bb;
1259 unsigned int todoflags = 0;
1261 cfg_changed = false;
1263 FOR_EACH_BB (bb)
1265 gimple_stmt_iterator gsi;
1267 /* Note we update GSI within the loop as necessary. */
1268 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1270 gimple stmt = gsi_stmt (gsi);
1272 /* If this statement sets an SSA_NAME to an address,
1273 try to propagate the address into the uses of the SSA_NAME. */
1274 if (is_gimple_assign (stmt))
1276 tree lhs = gimple_assign_lhs (stmt);
1277 tree rhs = gimple_assign_rhs1 (stmt);
1279 if (TREE_CODE (lhs) != SSA_NAME)
1281 gsi_next (&gsi);
1282 continue;
1285 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1286 /* Handle pointer conversions on invariant addresses
1287 as well, as this is valid gimple. */
1288 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1289 && TREE_CODE (rhs) == ADDR_EXPR
1290 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1292 STRIP_NOPS (rhs);
1293 if (!stmt_references_abnormal_ssa_name (stmt)
1294 && forward_propagate_addr_expr (lhs, rhs))
1296 release_defs (stmt);
1297 todoflags |= TODO_remove_unused_locals;
1298 gsi_remove (&gsi, true);
1300 else
1301 gsi_next (&gsi);
1303 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1304 && is_gimple_min_invariant (rhs))
1306 /* Make sure to fold &a[0] + off_1 here. */
1307 fold_stmt_inplace (stmt);
1308 update_stmt (stmt);
1309 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1310 gsi_next (&gsi);
1312 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1313 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1314 && TREE_CODE (rhs) == SSA_NAME)
1316 simplify_not_neg_expr (&gsi);
1317 gsi_next (&gsi);
1319 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1321 /* In this case the entire COND_EXPR is in rhs1. */
1322 int did_something;
1323 fold_defer_overflow_warnings ();
1324 did_something = forward_propagate_into_cond (&gsi);
1325 stmt = gsi_stmt (gsi);
1326 if (did_something == 2)
1327 cfg_changed = true;
1328 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1329 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1330 gsi_next (&gsi);
1332 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1333 == tcc_comparison)
1335 if (forward_propagate_comparison (stmt))
1337 release_defs (stmt);
1338 todoflags |= TODO_remove_unused_locals;
1339 gsi_remove (&gsi, true);
1341 else
1342 gsi_next (&gsi);
1344 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1346 simplify_bitwise_and (&gsi, stmt);
1347 gsi_next (&gsi);
1349 else
1350 gsi_next (&gsi);
1352 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1354 simplify_gimple_switch (stmt);
1355 gsi_next (&gsi);
1357 else if (gimple_code (stmt) == GIMPLE_COND)
1359 int did_something;
1360 fold_defer_overflow_warnings ();
1361 did_something = forward_propagate_into_gimple_cond (stmt);
1362 if (did_something == 2)
1363 cfg_changed = true;
1364 fold_undefer_overflow_warnings (did_something, stmt,
1365 WARN_STRICT_OVERFLOW_CONDITIONAL);
1366 gsi_next (&gsi);
1368 else
1369 gsi_next (&gsi);
1373 if (cfg_changed)
1374 todoflags |= TODO_cleanup_cfg;
1375 return todoflags;
1379 static bool
1380 gate_forwprop (void)
1382 return flag_tree_forwprop;
1385 struct gimple_opt_pass pass_forwprop =
1388 GIMPLE_PASS,
1389 "forwprop", /* name */
1390 gate_forwprop, /* gate */
1391 tree_ssa_forward_propagate_single_use_vars, /* execute */
1392 NULL, /* sub */
1393 NULL, /* next */
1394 0, /* static_pass_number */
1395 TV_TREE_FORWPROP, /* tv_id */
1396 PROP_cfg | PROP_ssa, /* properties_required */
1397 0, /* properties_provided */
1398 0, /* properties_destroyed */
1399 0, /* todo_flags_start */
1400 TODO_dump_func
1401 | TODO_ggc_collect
1402 | TODO_update_ssa
1403 | TODO_verify_ssa /* todo_flags_finish */