2010-05-28 Segher Boessenkool <segher@kernel.crashing.org>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobf7295067801992b77f96898272631ad98e751bda
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "diagnostic.h"
30 #include "tree-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-dump.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "gimple.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 use_operand_p use_p;
264 ssa_op_iter iter;
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
270 return false;
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 return false;
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 return true;
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 return false;
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
296 return false;
299 return true;
302 /* Remove a copy chain ending in NAME along the defs but not
303 further or including UP_TO_STMT. If NAME was replaced in
304 its only use then this function can be used to clean up
305 dead stmts. Returns true if UP_TO_STMT can be removed
306 as well, otherwise false. */
308 static bool
309 remove_prop_source_from_use (tree name, gimple up_to_stmt)
311 gimple_stmt_iterator gsi;
312 gimple stmt;
314 do {
315 if (!has_zero_uses (name))
316 return false;
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (stmt == up_to_stmt)
320 return true;
322 gsi = gsi_for_stmt (stmt);
323 release_defs (stmt);
324 gsi_remove (&gsi, true);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
329 return false;
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
339 static tree
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
345 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
348 return build1 (code, type, gimple_assign_rhs1 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
350 return gimple_assign_rhs1 (stmt);
351 else
352 gcc_unreachable ();
355 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
356 the folded result in a form suitable for COND_EXPR_COND or
357 NULL_TREE, if there is no suitable simplified form. If
358 INVARIANT_ONLY is true only gimple_min_invariant results are
359 considered simplified. */
361 static tree
362 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
363 tree op0, tree op1, bool invariant_only)
365 tree t;
367 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
369 t = fold_binary_loc (loc, code, type, op0, op1);
370 if (!t)
371 return NULL_TREE;
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
381 return NULL_TREE;
383 return t;
386 /* Propagate from the ssa name definition statements of COND_EXPR
387 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
388 Returns zero if no statement was changed, one if there were
389 changes and two if cfg_cleanup needs to run.
391 This must be kept in sync with forward_propagate_into_cond. */
393 static int
394 forward_propagate_into_gimple_cond (gimple stmt)
396 int did_something = 0;
397 location_t loc = gimple_location (stmt);
399 do {
400 tree tmp = NULL_TREE;
401 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
402 gimple def_stmt;
403 bool single_use0_p = false, single_use1_p = false;
404 enum tree_code code = gimple_cond_code (stmt);
406 /* We can do tree combining on SSA_NAME and comparison expressions. */
407 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
409 /* For comparisons use the first operand, that is likely to
410 simplify comparisons against constants. */
411 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
413 name = gimple_cond_lhs (stmt);
414 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
415 if (def_stmt && can_propagate_from (def_stmt))
417 tree op1 = gimple_cond_rhs (stmt);
418 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
419 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
420 rhs0, op1, !single_use0_p);
423 /* If that wasn't successful, try the second operand. */
424 if (tmp == NULL_TREE
425 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
427 tree op0 = gimple_cond_lhs (stmt);
428 name = gimple_cond_rhs (stmt);
429 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
430 if (!def_stmt || !can_propagate_from (def_stmt))
431 return did_something;
433 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
434 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
435 rhs1, !single_use1_p);
437 /* If that wasn't successful either, try both operands. */
438 if (tmp == NULL_TREE
439 && rhs0 != NULL_TREE
440 && rhs1 != NULL_TREE)
441 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
442 fold_convert_loc (loc,
443 TREE_TYPE (rhs0),
444 rhs1),
445 !(single_use0_p && single_use1_p));
448 if (tmp)
450 if (dump_file && tmp)
452 tree cond = build2 (gimple_cond_code (stmt),
453 boolean_type_node,
454 gimple_cond_lhs (stmt),
455 gimple_cond_rhs (stmt));
456 fprintf (dump_file, " Replaced '");
457 print_generic_expr (dump_file, cond, 0);
458 fprintf (dump_file, "' with '");
459 print_generic_expr (dump_file, tmp, 0);
460 fprintf (dump_file, "'\n");
463 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
464 update_stmt (stmt);
466 /* Remove defining statements. */
467 remove_prop_source_from_use (name, NULL);
469 if (is_gimple_min_invariant (tmp))
470 did_something = 2;
471 else if (did_something == 0)
472 did_something = 1;
474 /* Continue combining. */
475 continue;
478 break;
479 } while (1);
481 return did_something;
485 /* Propagate from the ssa name definition statements of COND_EXPR
486 in the rhs of statement STMT into the conditional if that simplifies it.
487 Returns zero if no statement was changed, one if there were
488 changes and two if cfg_cleanup needs to run.
490 This must be kept in sync with forward_propagate_into_gimple_cond. */
492 static int
493 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
495 gimple stmt = gsi_stmt (*gsi_p);
496 location_t loc = gimple_location (stmt);
497 int did_something = 0;
499 do {
500 tree tmp = NULL_TREE;
501 tree cond = gimple_assign_rhs1 (stmt);
502 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
503 gimple def_stmt;
504 bool single_use0_p = false, single_use1_p = false;
506 /* We can do tree combining on SSA_NAME and comparison expressions. */
507 if (COMPARISON_CLASS_P (cond)
508 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
510 /* For comparisons use the first operand, that is likely to
511 simplify comparisons against constants. */
512 name = TREE_OPERAND (cond, 0);
513 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
514 if (def_stmt && can_propagate_from (def_stmt))
516 tree op1 = TREE_OPERAND (cond, 1);
517 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
518 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
519 boolean_type_node,
520 rhs0, op1, !single_use0_p);
522 /* If that wasn't successful, try the second operand. */
523 if (tmp == NULL_TREE
524 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
526 tree op0 = TREE_OPERAND (cond, 0);
527 name = TREE_OPERAND (cond, 1);
528 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
529 if (!def_stmt || !can_propagate_from (def_stmt))
530 return did_something;
532 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
533 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
534 boolean_type_node,
535 op0, rhs1, !single_use1_p);
537 /* If that wasn't successful either, try both operands. */
538 if (tmp == NULL_TREE
539 && rhs0 != NULL_TREE
540 && rhs1 != NULL_TREE)
541 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
542 boolean_type_node,
543 rhs0,
544 fold_convert_loc (loc,
545 TREE_TYPE (rhs0),
546 rhs1),
547 !(single_use0_p && single_use1_p));
549 else if (TREE_CODE (cond) == SSA_NAME)
551 name = cond;
552 def_stmt = get_prop_source_stmt (name, true, NULL);
553 if (def_stmt || !can_propagate_from (def_stmt))
554 return did_something;
556 rhs0 = gimple_assign_rhs1 (def_stmt);
557 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
558 build_int_cst (TREE_TYPE (rhs0), 0),
559 false);
562 if (tmp)
564 if (dump_file && tmp)
566 fprintf (dump_file, " Replaced '");
567 print_generic_expr (dump_file, cond, 0);
568 fprintf (dump_file, "' with '");
569 print_generic_expr (dump_file, tmp, 0);
570 fprintf (dump_file, "'\n");
573 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
574 stmt = gsi_stmt (*gsi_p);
575 update_stmt (stmt);
577 /* Remove defining statements. */
578 remove_prop_source_from_use (name, NULL);
580 if (is_gimple_min_invariant (tmp))
581 did_something = 2;
582 else if (did_something == 0)
583 did_something = 1;
585 /* Continue combining. */
586 continue;
589 break;
590 } while (1);
592 return did_something;
595 /* We've just substituted an ADDR_EXPR into stmt. Update all the
596 relevant data structures to match. */
598 static void
599 tidy_after_forward_propagate_addr (gimple stmt)
601 /* We may have turned a trapping insn into a non-trapping insn. */
602 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
603 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
604 cfg_changed = true;
606 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
607 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
610 /* DEF_RHS contains the address of the 0th element in an array.
611 USE_STMT uses type of DEF_RHS to compute the address of an
612 arbitrary element within the array. The (variable) byte offset
613 of the element is contained in OFFSET.
615 We walk back through the use-def chains of OFFSET to verify that
616 it is indeed computing the offset of an element within the array
617 and extract the index corresponding to the given byte offset.
619 We then try to fold the entire address expression into a form
620 &array[index].
622 If we are successful, we replace the right hand side of USE_STMT
623 with the new address computation. */
625 static bool
626 forward_propagate_addr_into_variable_array_index (tree offset,
627 tree def_rhs,
628 gimple_stmt_iterator *use_stmt_gsi)
630 tree index, tunit;
631 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
632 tree tmp;
634 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
635 if (!host_integerp (tunit, 1))
636 return false;
638 /* Get the offset's defining statement. */
639 offset_def = SSA_NAME_DEF_STMT (offset);
641 /* Try to find an expression for a proper index. This is either a
642 multiplication expression by the element size or just the ssa name we came
643 along in case the element size is one. In that case, however, we do not
644 allow multiplications because they can be computing index to a higher
645 level dimension (PR 37861). */
646 if (integer_onep (tunit))
648 if (is_gimple_assign (offset_def)
649 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
650 return false;
652 index = offset;
654 else
656 /* The statement which defines OFFSET before type conversion
657 must be a simple GIMPLE_ASSIGN. */
658 if (!is_gimple_assign (offset_def))
659 return false;
661 /* The RHS of the statement which defines OFFSET must be a
662 multiplication of an object by the size of the array elements.
663 This implicitly verifies that the size of the array elements
664 is constant. */
665 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
666 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
667 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
669 /* The first operand to the MULT_EXPR is the desired index. */
670 index = gimple_assign_rhs1 (offset_def);
672 /* If we have idx * tunit + CST * tunit re-associate that. */
673 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
674 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
675 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
676 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
677 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
678 gimple_assign_rhs2 (offset_def),
679 tunit)) != NULL_TREE)
681 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
682 if (is_gimple_assign (offset_def2)
683 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
684 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
685 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
687 index = fold_build2 (gimple_assign_rhs_code (offset_def),
688 TREE_TYPE (offset),
689 gimple_assign_rhs1 (offset_def2), tmp);
691 else
692 return false;
694 else
695 return false;
698 /* Replace the pointer addition with array indexing. */
699 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
700 true, GSI_SAME_STMT);
701 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
702 use_stmt = gsi_stmt (*use_stmt_gsi);
703 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
704 = index;
706 /* That should have created gimple, so there is no need to
707 record information to undo the propagation. */
708 fold_stmt_inplace (use_stmt);
709 tidy_after_forward_propagate_addr (use_stmt);
710 return true;
713 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
714 ADDR_EXPR <whatever>.
716 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
717 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
718 node or for recovery of array indexing from pointer arithmetic.
720 Return true if the propagation was successful (the propagation can
721 be not totally successful, yet things may have been changed). */
723 static bool
724 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
725 gimple_stmt_iterator *use_stmt_gsi,
726 bool single_use_p)
728 tree lhs, rhs, rhs2, array_ref;
729 tree *rhsp, *lhsp;
730 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
731 enum tree_code rhs_code;
732 bool res = true;
733 bool addr_p = false;
735 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
737 lhs = gimple_assign_lhs (use_stmt);
738 rhs_code = gimple_assign_rhs_code (use_stmt);
739 rhs = gimple_assign_rhs1 (use_stmt);
741 /* Trivial cases. The use statement could be a trivial copy or a
742 useless conversion. Recurse to the uses of the lhs as copyprop does
743 not copy through different variant pointers and FRE does not catch
744 all useless conversions. Treat the case of a single-use name and
745 a conversion to def_rhs type separate, though. */
746 if (TREE_CODE (lhs) == SSA_NAME
747 && ((rhs_code == SSA_NAME && rhs == name)
748 || CONVERT_EXPR_CODE_P (rhs_code)))
750 /* Only recurse if we don't deal with a single use or we cannot
751 do the propagation to the current statement. In particular
752 we can end up with a conversion needed for a non-invariant
753 address which we cannot do in a single statement. */
754 if (!single_use_p
755 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
756 && (!is_gimple_min_invariant (def_rhs)
757 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
758 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
759 && (TYPE_PRECISION (TREE_TYPE (lhs))
760 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
761 return forward_propagate_addr_expr (lhs, def_rhs);
763 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
764 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
765 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
766 else
767 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
768 return true;
771 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
772 ADDR_EXPR will not appear on the LHS. */
773 lhsp = gimple_assign_lhs_ptr (use_stmt);
774 while (handled_component_p (*lhsp))
775 lhsp = &TREE_OPERAND (*lhsp, 0);
776 lhs = *lhsp;
778 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
779 propagate the ADDR_EXPR into the use of NAME and fold the result. */
780 if (TREE_CODE (lhs) == INDIRECT_REF
781 && TREE_OPERAND (lhs, 0) == name)
783 if (may_propagate_address_into_dereference (def_rhs, lhs)
784 && (lhsp != gimple_assign_lhs_ptr (use_stmt)
785 || useless_type_conversion_p
786 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
788 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
789 fold_stmt_inplace (use_stmt);
790 tidy_after_forward_propagate_addr (use_stmt);
792 /* Continue propagating into the RHS if this was not the only use. */
793 if (single_use_p)
794 return true;
796 else
797 /* We can have a struct assignment dereferencing our name twice.
798 Note that we didn't propagate into the lhs to not falsely
799 claim we did when propagating into the rhs. */
800 res = false;
803 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
804 nodes from the RHS. */
805 rhsp = gimple_assign_rhs1_ptr (use_stmt);
806 if (TREE_CODE (*rhsp) == ADDR_EXPR)
808 rhsp = &TREE_OPERAND (*rhsp, 0);
809 addr_p = true;
811 while (handled_component_p (*rhsp))
812 rhsp = &TREE_OPERAND (*rhsp, 0);
813 rhs = *rhsp;
815 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
816 propagate the ADDR_EXPR into the use of NAME and fold the result. */
817 if (TREE_CODE (rhs) == INDIRECT_REF
818 && TREE_OPERAND (rhs, 0) == name
819 && may_propagate_address_into_dereference (def_rhs, rhs))
821 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
822 fold_stmt_inplace (use_stmt);
823 tidy_after_forward_propagate_addr (use_stmt);
824 return res;
827 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
828 propagate the ADDR_EXPR into the use of NAME and try to
829 create a VCE and fold the result. */
830 if (TREE_CODE (rhs) == INDIRECT_REF
831 && TREE_OPERAND (rhs, 0) == name
832 && TYPE_SIZE (TREE_TYPE (rhs))
833 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
834 /* Function decls should not be used for VCE either as it could be a
835 function descriptor that we want and not the actual function code. */
836 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
837 /* We should not convert volatile loads to non volatile loads. */
838 && !TYPE_VOLATILE (TREE_TYPE (rhs))
839 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
840 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
841 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
842 /* Make sure we only do TBAA compatible replacements. */
843 && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
845 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
846 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
847 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
849 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
850 valid if we can replace the whole rhs of the use statement. */
851 if (rhs != gimple_assign_rhs1 (use_stmt))
852 return false;
853 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
854 true, GSI_NEW_STMT);
855 gimple_assign_set_rhs1 (use_stmt, new_rhs);
856 tidy_after_forward_propagate_addr (use_stmt);
857 return res;
859 /* If the defining rhs comes from an indirect reference, then do not
860 convert into a VIEW_CONVERT_EXPR. Likewise if we'll end up taking
861 the address of a V_C_E of a constant. */
862 def_rhs_base = TREE_OPERAND (def_rhs, 0);
863 while (handled_component_p (def_rhs_base))
864 def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
865 if (!INDIRECT_REF_P (def_rhs_base)
866 && (!addr_p
867 || !is_gimple_min_invariant (def_rhs)))
869 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
870 reference. Place it there and fold the thing. */
871 *rhsp = new_rhs;
872 fold_stmt_inplace (use_stmt);
873 tidy_after_forward_propagate_addr (use_stmt);
874 return res;
878 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
879 is nothing to do. */
880 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
881 || gimple_assign_rhs1 (use_stmt) != name)
882 return false;
884 /* The remaining cases are all for turning pointer arithmetic into
885 array indexing. They only apply when we have the address of
886 element zero in an array. If that is not the case then there
887 is nothing to do. */
888 array_ref = TREE_OPERAND (def_rhs, 0);
889 if (TREE_CODE (array_ref) != ARRAY_REF
890 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
891 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
892 return false;
894 rhs2 = gimple_assign_rhs2 (use_stmt);
895 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
896 of the elements in X into &x[C1 + C2/element size]. */
897 if (TREE_CODE (rhs2) == INTEGER_CST)
899 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
900 TREE_TYPE (def_rhs),
901 def_rhs, rhs2);
902 if (new_rhs)
904 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
905 new_rhs = unshare_expr (new_rhs);
906 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
908 if (!is_gimple_min_invariant (new_rhs))
909 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
910 true, NULL_TREE,
911 true, GSI_SAME_STMT);
912 new_rhs = fold_convert (type, new_rhs);
914 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
915 use_stmt = gsi_stmt (*use_stmt_gsi);
916 update_stmt (use_stmt);
917 tidy_after_forward_propagate_addr (use_stmt);
918 return true;
922 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
923 converting a multiplication of an index by the size of the
924 array elements, then the result is converted into the proper
925 type for the arithmetic. */
926 if (TREE_CODE (rhs2) == SSA_NAME
927 && integer_zerop (TREE_OPERAND (array_ref, 1))
928 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
929 /* Avoid problems with IVopts creating PLUS_EXPRs with a
930 different type than their operands. */
931 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
932 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
933 use_stmt_gsi);
934 return false;
937 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
939 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
940 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
941 node or for recovery of array indexing from pointer arithmetic.
942 Returns true, if all uses have been propagated into. */
944 static bool
945 forward_propagate_addr_expr (tree name, tree rhs)
947 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
948 imm_use_iterator iter;
949 gimple use_stmt;
950 bool all = true;
951 bool single_use_p = has_single_use (name);
953 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
955 bool result;
956 tree use_rhs;
958 /* If the use is not in a simple assignment statement, then
959 there is nothing we can do. */
960 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
962 if (!is_gimple_debug (use_stmt))
963 all = false;
964 continue;
967 /* If the use is in a deeper loop nest, then we do not want
968 to propagate non-invariant ADDR_EXPRs into the loop as that
969 is likely adding expression evaluations into the loop. */
970 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
971 && !is_gimple_min_invariant (rhs))
973 all = false;
974 continue;
978 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
979 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
980 single_use_p);
981 /* If the use has moved to a different statement adjust
982 the update machinery for the old statement too. */
983 if (use_stmt != gsi_stmt (gsi))
985 update_stmt (use_stmt);
986 use_stmt = gsi_stmt (gsi);
989 update_stmt (use_stmt);
991 all &= result;
993 /* Remove intermediate now unused copy and conversion chains. */
994 use_rhs = gimple_assign_rhs1 (use_stmt);
995 if (result
996 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
997 && TREE_CODE (use_rhs) == SSA_NAME
998 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1000 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1001 release_defs (use_stmt);
1002 gsi_remove (&gsi, true);
1006 return all;
1009 /* Forward propagate the comparison defined in STMT like
1010 cond_1 = x CMP y to uses of the form
1011 a_1 = (T')cond_1
1012 a_1 = !cond_1
1013 a_1 = cond_1 != 0
1014 Returns true if stmt is now unused. */
1016 static bool
1017 forward_propagate_comparison (gimple stmt)
1019 tree name = gimple_assign_lhs (stmt);
1020 gimple use_stmt;
1021 tree tmp = NULL_TREE;
1023 /* Don't propagate ssa names that occur in abnormal phis. */
1024 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1025 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1026 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1027 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1028 return false;
1030 /* Do not un-cse comparisons. But propagate through copies. */
1031 use_stmt = get_prop_dest_stmt (name, &name);
1032 if (!use_stmt)
1033 return false;
1035 /* Conversion of the condition result to another integral type. */
1036 if (is_gimple_assign (use_stmt)
1037 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1038 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1039 == tcc_comparison
1040 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1041 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1043 tree lhs = gimple_assign_lhs (use_stmt);
1045 /* We can propagate the condition into a conversion. */
1046 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1048 /* Avoid using fold here as that may create a COND_EXPR with
1049 non-boolean condition as canonical form. */
1050 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1051 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1053 /* We can propagate the condition into X op CST where op
1054 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1055 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1056 == tcc_comparison
1057 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1058 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1060 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1061 tree cst = gimple_assign_rhs2 (use_stmt);
1062 tree cond;
1064 cond = build2 (gimple_assign_rhs_code (stmt),
1065 TREE_TYPE (cst),
1066 gimple_assign_rhs1 (stmt),
1067 gimple_assign_rhs2 (stmt));
1069 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1070 code, TREE_TYPE (lhs),
1071 cond, cst, false);
1072 if (tmp == NULL_TREE)
1073 return false;
1075 /* We can propagate the condition into a statement that
1076 computes the logical negation of the comparison result. */
1077 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1079 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1080 bool nans = HONOR_NANS (TYPE_MODE (type));
1081 enum tree_code code;
1082 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1083 if (code == ERROR_MARK)
1084 return false;
1086 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1087 gimple_assign_rhs2 (stmt));
1089 else
1090 return false;
1093 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1094 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1095 use_stmt = gsi_stmt (gsi);
1096 update_stmt (use_stmt);
1099 /* Remove defining statements. */
1100 remove_prop_source_from_use (name, stmt);
1102 if (dump_file && (dump_flags & TDF_DETAILS))
1104 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1105 stmt);
1106 fprintf (dump_file, " Replaced '");
1107 print_generic_expr (dump_file, old_rhs, dump_flags);
1108 fprintf (dump_file, "' with '");
1109 print_generic_expr (dump_file, tmp, dump_flags);
1110 fprintf (dump_file, "'\n");
1113 return true;
1116 return false;
1119 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1120 If so, we can change STMT into lhs = y which can later be copy
1121 propagated. Similarly for negation.
1123 This could trivially be formulated as a forward propagation
1124 to immediate uses. However, we already had an implementation
1125 from DOM which used backward propagation via the use-def links.
1127 It turns out that backward propagation is actually faster as
1128 there's less work to do for each NOT/NEG expression we find.
1129 Backwards propagation needs to look at the statement in a single
1130 backlink. Forward propagation needs to look at potentially more
1131 than one forward link. */
1133 static void
1134 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1136 gimple stmt = gsi_stmt (*gsi_p);
1137 tree rhs = gimple_assign_rhs1 (stmt);
1138 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1140 /* See if the RHS_DEF_STMT has the same form as our statement. */
1141 if (is_gimple_assign (rhs_def_stmt)
1142 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1144 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1146 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1147 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1148 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1150 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1151 stmt = gsi_stmt (*gsi_p);
1152 update_stmt (stmt);
1157 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1158 the condition which we may be able to optimize better. */
1160 static void
1161 simplify_gimple_switch (gimple stmt)
1163 tree cond = gimple_switch_index (stmt);
1164 tree def, to, ti;
1165 gimple def_stmt;
1167 /* The optimization that we really care about is removing unnecessary
1168 casts. That will let us do much better in propagating the inferred
1169 constant at the switch target. */
1170 if (TREE_CODE (cond) == SSA_NAME)
1172 def_stmt = SSA_NAME_DEF_STMT (cond);
1173 if (is_gimple_assign (def_stmt))
1175 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1177 int need_precision;
1178 bool fail;
1180 def = gimple_assign_rhs1 (def_stmt);
1182 #ifdef ENABLE_CHECKING
1183 /* ??? Why was Jeff testing this? We are gimple... */
1184 gcc_assert (is_gimple_val (def));
1185 #endif
1187 to = TREE_TYPE (cond);
1188 ti = TREE_TYPE (def);
1190 /* If we have an extension that preserves value, then we
1191 can copy the source value into the switch. */
1193 need_precision = TYPE_PRECISION (ti);
1194 fail = false;
1195 if (! INTEGRAL_TYPE_P (ti))
1196 fail = true;
1197 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1198 fail = true;
1199 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1200 need_precision += 1;
1201 if (TYPE_PRECISION (to) < need_precision)
1202 fail = true;
1204 if (!fail)
1206 gimple_switch_set_index (stmt, def);
1207 update_stmt (stmt);
1214 /* Run bitwise and assignments throug the folder. If the first argument is an
1215 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1216 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1219 static void
1220 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1222 tree res;
1223 tree arg1 = gimple_assign_rhs1 (stmt);
1224 tree arg2 = gimple_assign_rhs2 (stmt);
1226 if (TREE_CODE (arg2) != INTEGER_CST)
1227 return;
1229 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1231 gimple def = SSA_NAME_DEF_STMT (arg1);
1233 if (gimple_assign_cast_p (def)
1234 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1236 tree op = gimple_assign_rhs1 (def);
1238 if (TREE_CODE (op) == ADDR_EXPR)
1239 arg1 = op;
1243 res = fold_binary_loc (gimple_location (stmt),
1244 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1245 arg1, arg2);
1246 if (res && is_gimple_min_invariant (res))
1248 gimple_assign_set_rhs_from_tree (gsi, res);
1249 update_stmt (stmt);
1251 return;
1254 /* Main entry point for the forward propagation optimizer. */
1256 static unsigned int
1257 tree_ssa_forward_propagate_single_use_vars (void)
1259 basic_block bb;
1260 unsigned int todoflags = 0;
1262 cfg_changed = false;
1264 FOR_EACH_BB (bb)
1266 gimple_stmt_iterator gsi;
1268 /* Note we update GSI within the loop as necessary. */
1269 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1271 gimple stmt = gsi_stmt (gsi);
1273 /* If this statement sets an SSA_NAME to an address,
1274 try to propagate the address into the uses of the SSA_NAME. */
1275 if (is_gimple_assign (stmt))
1277 tree lhs = gimple_assign_lhs (stmt);
1278 tree rhs = gimple_assign_rhs1 (stmt);
1280 if (TREE_CODE (lhs) != SSA_NAME)
1282 gsi_next (&gsi);
1283 continue;
1286 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1287 /* Handle pointer conversions on invariant addresses
1288 as well, as this is valid gimple. */
1289 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1290 && TREE_CODE (rhs) == ADDR_EXPR
1291 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1293 STRIP_NOPS (rhs);
1294 if (!stmt_references_abnormal_ssa_name (stmt)
1295 && forward_propagate_addr_expr (lhs, rhs))
1297 release_defs (stmt);
1298 todoflags |= TODO_remove_unused_locals;
1299 gsi_remove (&gsi, true);
1301 else
1302 gsi_next (&gsi);
1304 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1305 && is_gimple_min_invariant (rhs))
1307 /* Make sure to fold &a[0] + off_1 here. */
1308 fold_stmt_inplace (stmt);
1309 update_stmt (stmt);
1310 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1311 gsi_next (&gsi);
1313 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1314 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1315 && TREE_CODE (rhs) == SSA_NAME)
1317 simplify_not_neg_expr (&gsi);
1318 gsi_next (&gsi);
1320 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1322 /* In this case the entire COND_EXPR is in rhs1. */
1323 int did_something;
1324 fold_defer_overflow_warnings ();
1325 did_something = forward_propagate_into_cond (&gsi);
1326 stmt = gsi_stmt (gsi);
1327 if (did_something == 2)
1328 cfg_changed = true;
1329 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1330 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1331 gsi_next (&gsi);
1333 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1334 == tcc_comparison)
1336 if (forward_propagate_comparison (stmt))
1338 release_defs (stmt);
1339 todoflags |= TODO_remove_unused_locals;
1340 gsi_remove (&gsi, true);
1342 else
1343 gsi_next (&gsi);
1345 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1347 simplify_bitwise_and (&gsi, stmt);
1348 gsi_next (&gsi);
1350 else
1351 gsi_next (&gsi);
1353 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1355 simplify_gimple_switch (stmt);
1356 gsi_next (&gsi);
1358 else if (gimple_code (stmt) == GIMPLE_COND)
1360 int did_something;
1361 fold_defer_overflow_warnings ();
1362 did_something = forward_propagate_into_gimple_cond (stmt);
1363 if (did_something == 2)
1364 cfg_changed = true;
1365 fold_undefer_overflow_warnings (did_something, stmt,
1366 WARN_STRICT_OVERFLOW_CONDITIONAL);
1367 gsi_next (&gsi);
1369 else
1370 gsi_next (&gsi);
1374 if (cfg_changed)
1375 todoflags |= TODO_cleanup_cfg;
1376 return todoflags;
1380 static bool
1381 gate_forwprop (void)
1383 return flag_tree_forwprop;
1386 struct gimple_opt_pass pass_forwprop =
1389 GIMPLE_PASS,
1390 "forwprop", /* name */
1391 gate_forwprop, /* gate */
1392 tree_ssa_forward_propagate_single_use_vars, /* execute */
1393 NULL, /* sub */
1394 NULL, /* next */
1395 0, /* static_pass_number */
1396 TV_TREE_FORWPROP, /* tv_id */
1397 PROP_cfg | PROP_ssa, /* properties_required */
1398 0, /* properties_provided */
1399 0, /* properties_destroyed */
1400 0, /* todo_flags_start */
1401 TODO_dump_func
1402 | TODO_ggc_collect
1403 | TODO_update_ssa
1404 | TODO_verify_ssa /* todo_flags_finish */