2011-07-19 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob5a40f5e604ebce0334de4a2aa77981d69be52528
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
267 return false;
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
272 return false;
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
277 return true;
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
295 return true;
298 /* Remove a copy chain ending in NAME along the defs.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. Returns true if cleanup-cfg has to run. */
302 static bool
303 remove_prop_source_from_use (tree name)
305 gimple_stmt_iterator gsi;
306 gimple stmt;
307 bool cfg_changed = false;
309 do {
310 basic_block bb;
312 if (!has_zero_uses (name))
313 return cfg_changed;
315 stmt = SSA_NAME_DEF_STMT (name);
316 gsi = gsi_for_stmt (stmt);
317 bb = gimple_bb (stmt);
318 release_defs (stmt);
319 gsi_remove (&gsi, true);
320 cfg_changed |= gimple_purge_dead_eh_edges (bb);
322 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
323 } while (name && TREE_CODE (name) == SSA_NAME);
325 return cfg_changed;
328 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
329 converted to type TYPE.
331 This should disappear, but is needed so we can combine expressions and use
332 the fold() interfaces. Long term, we need to develop folding and combine
333 routines that deal with gimple exclusively . */
335 static tree
336 rhs_to_tree (tree type, gimple stmt)
338 location_t loc = gimple_location (stmt);
339 enum tree_code code = gimple_assign_rhs_code (stmt);
340 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
341 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
342 gimple_assign_rhs2 (stmt),
343 gimple_assign_rhs3 (stmt));
344 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
345 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
348 return build1 (code, type, gimple_assign_rhs1 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
350 return gimple_assign_rhs1 (stmt);
351 else
352 gcc_unreachable ();
355 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
356 the folded result in a form suitable for COND_EXPR_COND or
357 NULL_TREE, if there is no suitable simplified form. If
358 INVARIANT_ONLY is true only gimple_min_invariant results are
359 considered simplified. */
361 static tree
362 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
363 tree op0, tree op1, bool invariant_only)
365 tree t;
367 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
369 t = fold_binary_loc (loc, code, type, op0, op1);
370 if (!t)
371 return NULL_TREE;
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
381 return NULL_TREE;
383 return t;
386 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
387 of its operand. Return a new comparison tree or NULL_TREE if there
388 were no simplifying combines. */
390 static tree
391 forward_propagate_into_comparison_1 (location_t loc,
392 enum tree_code code, tree type,
393 tree op0, tree op1)
395 tree tmp = NULL_TREE;
396 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
397 bool single_use0_p = false, single_use1_p = false;
399 /* For comparisons use the first operand, that is likely to
400 simplify comparisons against constants. */
401 if (TREE_CODE (op0) == SSA_NAME)
403 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
404 if (def_stmt && can_propagate_from (def_stmt))
406 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
407 tmp = combine_cond_expr_cond (loc, code, type,
408 rhs0, op1, !single_use0_p);
409 if (tmp)
410 return tmp;
414 /* If that wasn't successful, try the second operand. */
415 if (TREE_CODE (op1) == SSA_NAME)
417 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
418 if (def_stmt && can_propagate_from (def_stmt))
420 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
421 tmp = combine_cond_expr_cond (loc, code, type,
422 op0, rhs1, !single_use1_p);
423 if (tmp)
424 return tmp;
428 /* If that wasn't successful either, try both operands. */
429 if (rhs0 != NULL_TREE
430 && rhs1 != NULL_TREE)
431 tmp = combine_cond_expr_cond (loc, code, type,
432 rhs0, rhs1,
433 !(single_use0_p && single_use1_p));
435 return tmp;
438 /* Propagate from the ssa name definition statements of the assignment
439 from a comparison at *GSI into the conditional if that simplifies it.
440 Returns true if the stmt was modified, false if not. */
442 static bool
443 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
445 gimple stmt = gsi_stmt (*gsi);
446 tree tmp;
448 /* Combine the comparison with defining statements. */
449 tmp = forward_propagate_into_comparison_1 (gimple_location (stmt),
450 gimple_assign_rhs_code (stmt),
451 TREE_TYPE
452 (gimple_assign_lhs (stmt)),
453 gimple_assign_rhs1 (stmt),
454 gimple_assign_rhs2 (stmt));
455 if (tmp)
457 gimple_assign_set_rhs_from_tree (gsi, tmp);
458 update_stmt (stmt);
459 return true;
462 return false;
465 /* Propagate from the ssa name definition statements of COND_EXPR
466 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
467 Returns zero if no statement was changed, one if there were
468 changes and two if cfg_cleanup needs to run.
470 This must be kept in sync with forward_propagate_into_cond. */
472 static int
473 forward_propagate_into_gimple_cond (gimple stmt)
475 int did_something = 0;
476 location_t loc = gimple_location (stmt);
477 tree tmp;
478 enum tree_code code = gimple_cond_code (stmt);
480 /* We can do tree combining on SSA_NAME and comparison expressions. */
481 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
482 return 0;
484 tmp = forward_propagate_into_comparison_1 (loc, code,
485 boolean_type_node,
486 gimple_cond_lhs (stmt),
487 gimple_cond_rhs (stmt));
488 if (tmp)
490 if (dump_file && tmp)
492 tree cond = build2 (gimple_cond_code (stmt),
493 boolean_type_node,
494 gimple_cond_lhs (stmt),
495 gimple_cond_rhs (stmt));
496 fprintf (dump_file, " Replaced '");
497 print_generic_expr (dump_file, cond, 0);
498 fprintf (dump_file, "' with '");
499 print_generic_expr (dump_file, tmp, 0);
500 fprintf (dump_file, "'\n");
503 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
504 update_stmt (stmt);
506 /* Remove defining statements. */
507 if (is_gimple_min_invariant (tmp))
508 did_something = 2;
509 else if (did_something == 0)
510 did_something = 1;
513 return did_something;
517 /* Propagate from the ssa name definition statements of COND_EXPR
518 in the rhs of statement STMT into the conditional if that simplifies it.
519 Returns zero if no statement was changed, one if there were
520 changes and two if cfg_cleanup needs to run.
522 This must be kept in sync with forward_propagate_into_gimple_cond. */
524 static int
525 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
527 gimple stmt = gsi_stmt (*gsi_p);
528 location_t loc = gimple_location (stmt);
529 int did_something = 0;
530 tree tmp = NULL_TREE;
531 tree cond = gimple_assign_rhs1 (stmt);
533 /* We can do tree combining on SSA_NAME and comparison expressions. */
534 if (COMPARISON_CLASS_P (cond))
535 tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond),
536 boolean_type_node,
537 TREE_OPERAND (cond, 0),
538 TREE_OPERAND (cond, 1));
539 else if (TREE_CODE (cond) == SSA_NAME)
541 tree name = cond, rhs0;
542 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
543 if (!def_stmt || !can_propagate_from (def_stmt))
544 return did_something;
546 rhs0 = gimple_assign_rhs1 (def_stmt);
547 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
548 build_int_cst (TREE_TYPE (rhs0), 0),
549 false);
552 if (tmp)
554 if (dump_file && tmp)
556 fprintf (dump_file, " Replaced '");
557 print_generic_expr (dump_file, cond, 0);
558 fprintf (dump_file, "' with '");
559 print_generic_expr (dump_file, tmp, 0);
560 fprintf (dump_file, "'\n");
563 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
564 stmt = gsi_stmt (*gsi_p);
565 update_stmt (stmt);
567 /* Remove defining statements. */
568 if (is_gimple_min_invariant (tmp))
569 did_something = 2;
570 else if (did_something == 0)
571 did_something = 1;
574 return did_something;
577 /* We've just substituted an ADDR_EXPR into stmt. Update all the
578 relevant data structures to match. */
580 static void
581 tidy_after_forward_propagate_addr (gimple stmt)
583 /* We may have turned a trapping insn into a non-trapping insn. */
584 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
585 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
586 cfg_changed = true;
588 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
589 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
592 /* DEF_RHS contains the address of the 0th element in an array.
593 USE_STMT uses type of DEF_RHS to compute the address of an
594 arbitrary element within the array. The (variable) byte offset
595 of the element is contained in OFFSET.
597 We walk back through the use-def chains of OFFSET to verify that
598 it is indeed computing the offset of an element within the array
599 and extract the index corresponding to the given byte offset.
601 We then try to fold the entire address expression into a form
602 &array[index].
604 If we are successful, we replace the right hand side of USE_STMT
605 with the new address computation. */
607 static bool
608 forward_propagate_addr_into_variable_array_index (tree offset,
609 tree def_rhs,
610 gimple_stmt_iterator *use_stmt_gsi)
612 tree index, tunit;
613 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
614 tree new_rhs, tmp;
616 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
617 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
618 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
619 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
620 else
621 return false;
622 if (!host_integerp (tunit, 1))
623 return false;
625 /* Get the offset's defining statement. */
626 offset_def = SSA_NAME_DEF_STMT (offset);
628 /* Try to find an expression for a proper index. This is either a
629 multiplication expression by the element size or just the ssa name we came
630 along in case the element size is one. In that case, however, we do not
631 allow multiplications because they can be computing index to a higher
632 level dimension (PR 37861). */
633 if (integer_onep (tunit))
635 if (is_gimple_assign (offset_def)
636 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
637 return false;
639 index = offset;
641 else
643 /* The statement which defines OFFSET before type conversion
644 must be a simple GIMPLE_ASSIGN. */
645 if (!is_gimple_assign (offset_def))
646 return false;
648 /* The RHS of the statement which defines OFFSET must be a
649 multiplication of an object by the size of the array elements.
650 This implicitly verifies that the size of the array elements
651 is constant. */
652 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
653 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
654 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
656 /* The first operand to the MULT_EXPR is the desired index. */
657 index = gimple_assign_rhs1 (offset_def);
659 /* If we have idx * tunit + CST * tunit re-associate that. */
660 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
661 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
662 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
663 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
664 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
665 gimple_assign_rhs2 (offset_def),
666 tunit)) != NULL_TREE)
668 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
669 if (is_gimple_assign (offset_def2)
670 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
671 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
672 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
674 index = fold_build2 (gimple_assign_rhs_code (offset_def),
675 TREE_TYPE (offset),
676 gimple_assign_rhs1 (offset_def2), tmp);
678 else
679 return false;
681 else
682 return false;
685 /* Replace the pointer addition with array indexing. */
686 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
687 true, GSI_SAME_STMT);
688 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
690 new_rhs = unshare_expr (def_rhs);
691 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
693 else
695 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
696 unshare_expr (TREE_OPERAND (def_rhs, 0)),
697 index, integer_zero_node, NULL_TREE);
698 new_rhs = build_fold_addr_expr (new_rhs);
699 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
700 TREE_TYPE (new_rhs)))
702 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
703 NULL_TREE, true, GSI_SAME_STMT);
704 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
705 new_rhs);
708 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
709 use_stmt = gsi_stmt (*use_stmt_gsi);
711 /* That should have created gimple, so there is no need to
712 record information to undo the propagation. */
713 fold_stmt_inplace (use_stmt);
714 tidy_after_forward_propagate_addr (use_stmt);
715 return true;
718 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
719 ADDR_EXPR <whatever>.
721 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
722 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
723 node or for recovery of array indexing from pointer arithmetic.
725 Return true if the propagation was successful (the propagation can
726 be not totally successful, yet things may have been changed). */
728 static bool
729 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
730 gimple_stmt_iterator *use_stmt_gsi,
731 bool single_use_p)
733 tree lhs, rhs, rhs2, array_ref;
734 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
735 enum tree_code rhs_code;
736 bool res = true;
738 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
740 lhs = gimple_assign_lhs (use_stmt);
741 rhs_code = gimple_assign_rhs_code (use_stmt);
742 rhs = gimple_assign_rhs1 (use_stmt);
744 /* Trivial cases. The use statement could be a trivial copy or a
745 useless conversion. Recurse to the uses of the lhs as copyprop does
746 not copy through different variant pointers and FRE does not catch
747 all useless conversions. Treat the case of a single-use name and
748 a conversion to def_rhs type separate, though. */
749 if (TREE_CODE (lhs) == SSA_NAME
750 && ((rhs_code == SSA_NAME && rhs == name)
751 || CONVERT_EXPR_CODE_P (rhs_code)))
753 /* Only recurse if we don't deal with a single use or we cannot
754 do the propagation to the current statement. In particular
755 we can end up with a conversion needed for a non-invariant
756 address which we cannot do in a single statement. */
757 if (!single_use_p
758 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
759 && (!is_gimple_min_invariant (def_rhs)
760 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
761 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
762 && (TYPE_PRECISION (TREE_TYPE (lhs))
763 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
764 return forward_propagate_addr_expr (lhs, def_rhs);
766 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
767 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
768 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
769 else
770 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
771 return true;
774 /* Propagate through constant pointer adjustments. */
775 if (TREE_CODE (lhs) == SSA_NAME
776 && rhs_code == POINTER_PLUS_EXPR
777 && rhs == name
778 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
780 tree new_def_rhs;
781 /* As we come here with non-invariant addresses in def_rhs we need
782 to make sure we can build a valid constant offsetted address
783 for further propagation. Simply rely on fold building that
784 and check after the fact. */
785 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
786 def_rhs,
787 fold_convert (ptr_type_node,
788 gimple_assign_rhs2 (use_stmt)));
789 if (TREE_CODE (new_def_rhs) == MEM_REF
790 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
791 return false;
792 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
793 TREE_TYPE (rhs));
795 /* Recurse. If we could propagate into all uses of lhs do not
796 bother to replace into the current use but just pretend we did. */
797 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
798 && forward_propagate_addr_expr (lhs, new_def_rhs))
799 return true;
801 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
802 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
803 new_def_rhs, NULL_TREE);
804 else if (is_gimple_min_invariant (new_def_rhs))
805 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
806 new_def_rhs, NULL_TREE);
807 else
808 return false;
809 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
810 update_stmt (use_stmt);
811 return true;
814 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
815 ADDR_EXPR will not appear on the LHS. */
816 lhs = gimple_assign_lhs (use_stmt);
817 while (handled_component_p (lhs))
818 lhs = TREE_OPERAND (lhs, 0);
820 /* Now see if the LHS node is a MEM_REF using NAME. If so,
821 propagate the ADDR_EXPR into the use of NAME and fold the result. */
822 if (TREE_CODE (lhs) == MEM_REF
823 && TREE_OPERAND (lhs, 0) == name)
825 tree def_rhs_base;
826 HOST_WIDE_INT def_rhs_offset;
827 /* If the address is invariant we can always fold it. */
828 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
829 &def_rhs_offset)))
831 double_int off = mem_ref_offset (lhs);
832 tree new_ptr;
833 off = double_int_add (off,
834 shwi_to_double_int (def_rhs_offset));
835 if (TREE_CODE (def_rhs_base) == MEM_REF)
837 off = double_int_add (off, mem_ref_offset (def_rhs_base));
838 new_ptr = TREE_OPERAND (def_rhs_base, 0);
840 else
841 new_ptr = build_fold_addr_expr (def_rhs_base);
842 TREE_OPERAND (lhs, 0) = new_ptr;
843 TREE_OPERAND (lhs, 1)
844 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
845 tidy_after_forward_propagate_addr (use_stmt);
846 /* Continue propagating into the RHS if this was not the only use. */
847 if (single_use_p)
848 return true;
850 /* If the LHS is a plain dereference and the value type is the same as
851 that of the pointed-to type of the address we can put the
852 dereferenced address on the LHS preserving the original alias-type. */
853 else if (gimple_assign_lhs (use_stmt) == lhs
854 && useless_type_conversion_p
855 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
856 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
858 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
859 tree new_offset, new_base, saved;
860 while (handled_component_p (*def_rhs_basep))
861 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
862 saved = *def_rhs_basep;
863 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
865 new_base = TREE_OPERAND (*def_rhs_basep, 0);
866 new_offset
867 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
868 TREE_OPERAND (*def_rhs_basep, 1));
870 else
872 new_base = build_fold_addr_expr (*def_rhs_basep);
873 new_offset = TREE_OPERAND (lhs, 1);
875 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
876 new_base, new_offset);
877 gimple_assign_set_lhs (use_stmt,
878 unshare_expr (TREE_OPERAND (def_rhs, 0)));
879 *def_rhs_basep = saved;
880 tidy_after_forward_propagate_addr (use_stmt);
881 /* Continue propagating into the RHS if this was not the
882 only use. */
883 if (single_use_p)
884 return true;
886 else
887 /* We can have a struct assignment dereferencing our name twice.
888 Note that we didn't propagate into the lhs to not falsely
889 claim we did when propagating into the rhs. */
890 res = false;
893 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
894 nodes from the RHS. */
895 rhs = gimple_assign_rhs1 (use_stmt);
896 if (TREE_CODE (rhs) == ADDR_EXPR)
897 rhs = TREE_OPERAND (rhs, 0);
898 while (handled_component_p (rhs))
899 rhs = TREE_OPERAND (rhs, 0);
901 /* Now see if the RHS node is a MEM_REF using NAME. If so,
902 propagate the ADDR_EXPR into the use of NAME and fold the result. */
903 if (TREE_CODE (rhs) == MEM_REF
904 && TREE_OPERAND (rhs, 0) == name)
906 tree def_rhs_base;
907 HOST_WIDE_INT def_rhs_offset;
908 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
909 &def_rhs_offset)))
911 double_int off = mem_ref_offset (rhs);
912 tree new_ptr;
913 off = double_int_add (off,
914 shwi_to_double_int (def_rhs_offset));
915 if (TREE_CODE (def_rhs_base) == MEM_REF)
917 off = double_int_add (off, mem_ref_offset (def_rhs_base));
918 new_ptr = TREE_OPERAND (def_rhs_base, 0);
920 else
921 new_ptr = build_fold_addr_expr (def_rhs_base);
922 TREE_OPERAND (rhs, 0) = new_ptr;
923 TREE_OPERAND (rhs, 1)
924 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
925 fold_stmt_inplace (use_stmt);
926 tidy_after_forward_propagate_addr (use_stmt);
927 return res;
929 /* If the LHS is a plain dereference and the value type is the same as
930 that of the pointed-to type of the address we can put the
931 dereferenced address on the LHS preserving the original alias-type. */
932 else if (gimple_assign_rhs1 (use_stmt) == rhs
933 && useless_type_conversion_p
934 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
935 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
937 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
938 tree new_offset, new_base, saved;
939 while (handled_component_p (*def_rhs_basep))
940 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
941 saved = *def_rhs_basep;
942 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
944 new_base = TREE_OPERAND (*def_rhs_basep, 0);
945 new_offset
946 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
947 TREE_OPERAND (*def_rhs_basep, 1));
949 else
951 new_base = build_fold_addr_expr (*def_rhs_basep);
952 new_offset = TREE_OPERAND (rhs, 1);
954 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
955 new_base, new_offset);
956 gimple_assign_set_rhs1 (use_stmt,
957 unshare_expr (TREE_OPERAND (def_rhs, 0)));
958 *def_rhs_basep = saved;
959 fold_stmt_inplace (use_stmt);
960 tidy_after_forward_propagate_addr (use_stmt);
961 return res;
965 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
966 is nothing to do. */
967 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
968 || gimple_assign_rhs1 (use_stmt) != name)
969 return false;
971 /* The remaining cases are all for turning pointer arithmetic into
972 array indexing. They only apply when we have the address of
973 element zero in an array. If that is not the case then there
974 is nothing to do. */
975 array_ref = TREE_OPERAND (def_rhs, 0);
976 if ((TREE_CODE (array_ref) != ARRAY_REF
977 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
978 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
979 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
980 return false;
982 rhs2 = gimple_assign_rhs2 (use_stmt);
983 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
984 of the elements in X into &x[C1 + C2/element size]. */
985 if (TREE_CODE (rhs2) == INTEGER_CST)
987 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
988 TREE_TYPE (def_rhs),
989 def_rhs, rhs2);
990 if (new_rhs)
992 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
993 new_rhs = unshare_expr (new_rhs);
994 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
996 if (!is_gimple_min_invariant (new_rhs))
997 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
998 true, NULL_TREE,
999 true, GSI_SAME_STMT);
1000 new_rhs = fold_convert (type, new_rhs);
1002 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1003 use_stmt = gsi_stmt (*use_stmt_gsi);
1004 update_stmt (use_stmt);
1005 tidy_after_forward_propagate_addr (use_stmt);
1006 return true;
1010 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1011 converting a multiplication of an index by the size of the
1012 array elements, then the result is converted into the proper
1013 type for the arithmetic. */
1014 if (TREE_CODE (rhs2) == SSA_NAME
1015 && (TREE_CODE (array_ref) != ARRAY_REF
1016 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1017 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1018 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1019 different type than their operands. */
1020 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1021 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1022 use_stmt_gsi);
1023 return false;
1026 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1028 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1029 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1030 node or for recovery of array indexing from pointer arithmetic.
1031 Returns true, if all uses have been propagated into. */
1033 static bool
1034 forward_propagate_addr_expr (tree name, tree rhs)
1036 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1037 imm_use_iterator iter;
1038 gimple use_stmt;
1039 bool all = true;
1040 bool single_use_p = has_single_use (name);
1042 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1044 bool result;
1045 tree use_rhs;
1047 /* If the use is not in a simple assignment statement, then
1048 there is nothing we can do. */
1049 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1051 if (!is_gimple_debug (use_stmt))
1052 all = false;
1053 continue;
1056 /* If the use is in a deeper loop nest, then we do not want
1057 to propagate non-invariant ADDR_EXPRs into the loop as that
1058 is likely adding expression evaluations into the loop. */
1059 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1060 && !is_gimple_min_invariant (rhs))
1062 all = false;
1063 continue;
1067 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1068 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1069 single_use_p);
1070 /* If the use has moved to a different statement adjust
1071 the update machinery for the old statement too. */
1072 if (use_stmt != gsi_stmt (gsi))
1074 update_stmt (use_stmt);
1075 use_stmt = gsi_stmt (gsi);
1078 update_stmt (use_stmt);
1080 all &= result;
1082 /* Remove intermediate now unused copy and conversion chains. */
1083 use_rhs = gimple_assign_rhs1 (use_stmt);
1084 if (result
1085 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1086 && TREE_CODE (use_rhs) == SSA_NAME
1087 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1089 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1090 release_defs (use_stmt);
1091 gsi_remove (&gsi, true);
1095 return all && has_zero_uses (name);
1099 /* Forward propagate the comparison defined in STMT like
1100 cond_1 = x CMP y to uses of the form
1101 a_1 = (T')cond_1
1102 a_1 = !cond_1
1103 a_1 = cond_1 != 0
1104 Returns true if stmt is now unused. */
1106 static bool
1107 forward_propagate_comparison (gimple stmt)
1109 tree name = gimple_assign_lhs (stmt);
1110 gimple use_stmt;
1111 tree tmp = NULL_TREE;
1112 gimple_stmt_iterator gsi;
1113 enum tree_code code;
1114 tree lhs;
1116 /* Don't propagate ssa names that occur in abnormal phis. */
1117 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1118 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1119 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1120 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1121 return false;
1123 /* Do not un-cse comparisons. But propagate through copies. */
1124 use_stmt = get_prop_dest_stmt (name, &name);
1125 if (!use_stmt
1126 || !is_gimple_assign (use_stmt))
1127 return false;
1129 code = gimple_assign_rhs_code (use_stmt);
1130 lhs = gimple_assign_lhs (use_stmt);
1131 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1132 return false;
1134 /* We can propagate the condition into a conversion. */
1135 if (CONVERT_EXPR_CODE_P (code))
1137 /* Avoid using fold here as that may create a COND_EXPR with
1138 non-boolean condition as canonical form. */
1139 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1140 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1142 /* We can propagate the condition into a statement that
1143 computes the logical negation of the comparison result. */
1144 else if ((code == BIT_NOT_EXPR
1145 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1146 || (code == BIT_XOR_EXPR
1147 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1149 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1150 bool nans = HONOR_NANS (TYPE_MODE (type));
1151 enum tree_code inv_code;
1152 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1153 if (inv_code == ERROR_MARK)
1154 return false;
1156 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1157 gimple_assign_rhs2 (stmt));
1159 else
1160 return false;
1162 gsi = gsi_for_stmt (use_stmt);
1163 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1164 use_stmt = gsi_stmt (gsi);
1165 update_stmt (use_stmt);
1167 if (dump_file && (dump_flags & TDF_DETAILS))
1169 fprintf (dump_file, " Replaced '");
1170 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1171 fprintf (dump_file, "' with '");
1172 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1173 fprintf (dump_file, "'\n");
1176 /* Remove defining statements. */
1177 return remove_prop_source_from_use (name);
1181 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1182 If so, we can change STMT into lhs = y which can later be copy
1183 propagated. Similarly for negation.
1185 This could trivially be formulated as a forward propagation
1186 to immediate uses. However, we already had an implementation
1187 from DOM which used backward propagation via the use-def links.
1189 It turns out that backward propagation is actually faster as
1190 there's less work to do for each NOT/NEG expression we find.
1191 Backwards propagation needs to look at the statement in a single
1192 backlink. Forward propagation needs to look at potentially more
1193 than one forward link.
1195 Returns true when the statement was changed. */
1197 static bool
1198 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1200 gimple stmt = gsi_stmt (*gsi_p);
1201 tree rhs = gimple_assign_rhs1 (stmt);
1202 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1204 /* See if the RHS_DEF_STMT has the same form as our statement. */
1205 if (is_gimple_assign (rhs_def_stmt)
1206 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1208 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1210 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1211 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1212 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1214 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1215 stmt = gsi_stmt (*gsi_p);
1216 update_stmt (stmt);
1217 return true;
1221 return false;
1224 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1225 the condition which we may be able to optimize better. */
1227 static bool
1228 simplify_gimple_switch (gimple stmt)
1230 tree cond = gimple_switch_index (stmt);
1231 tree def, to, ti;
1232 gimple def_stmt;
1234 /* The optimization that we really care about is removing unnecessary
1235 casts. That will let us do much better in propagating the inferred
1236 constant at the switch target. */
1237 if (TREE_CODE (cond) == SSA_NAME)
1239 def_stmt = SSA_NAME_DEF_STMT (cond);
1240 if (is_gimple_assign (def_stmt))
1242 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1244 int need_precision;
1245 bool fail;
1247 def = gimple_assign_rhs1 (def_stmt);
1249 /* ??? Why was Jeff testing this? We are gimple... */
1250 gcc_checking_assert (is_gimple_val (def));
1252 to = TREE_TYPE (cond);
1253 ti = TREE_TYPE (def);
1255 /* If we have an extension that preserves value, then we
1256 can copy the source value into the switch. */
1258 need_precision = TYPE_PRECISION (ti);
1259 fail = false;
1260 if (! INTEGRAL_TYPE_P (ti))
1261 fail = true;
1262 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1263 fail = true;
1264 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1265 need_precision += 1;
1266 if (TYPE_PRECISION (to) < need_precision)
1267 fail = true;
1269 if (!fail)
1271 gimple_switch_set_index (stmt, def);
1272 update_stmt (stmt);
1273 return true;
1279 return false;
1282 /* For pointers p2 and p1 return p2 - p1 if the
1283 difference is known and constant, otherwise return NULL. */
1285 static tree
1286 constant_pointer_difference (tree p1, tree p2)
1288 int i, j;
1289 #define CPD_ITERATIONS 5
1290 tree exps[2][CPD_ITERATIONS];
1291 tree offs[2][CPD_ITERATIONS];
1292 int cnt[2];
1294 for (i = 0; i < 2; i++)
1296 tree p = i ? p1 : p2;
1297 tree off = size_zero_node;
1298 gimple stmt;
1299 enum tree_code code;
1301 /* For each of p1 and p2 we need to iterate at least
1302 twice, to handle ADDR_EXPR directly in p1/p2,
1303 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1304 on definition's stmt RHS. Iterate a few extra times. */
1305 j = 0;
1308 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1309 break;
1310 if (TREE_CODE (p) == ADDR_EXPR)
1312 tree q = TREE_OPERAND (p, 0);
1313 HOST_WIDE_INT offset;
1314 tree base = get_addr_base_and_unit_offset (q, &offset);
1315 if (base)
1317 q = base;
1318 if (offset)
1319 off = size_binop (PLUS_EXPR, off, size_int (offset));
1321 if (TREE_CODE (q) == MEM_REF
1322 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1324 p = TREE_OPERAND (q, 0);
1325 off = size_binop (PLUS_EXPR, off,
1326 double_int_to_tree (sizetype,
1327 mem_ref_offset (q)));
1329 else
1331 exps[i][j] = q;
1332 offs[i][j++] = off;
1333 break;
1336 if (TREE_CODE (p) != SSA_NAME)
1337 break;
1338 exps[i][j] = p;
1339 offs[i][j++] = off;
1340 if (j == CPD_ITERATIONS)
1341 break;
1342 stmt = SSA_NAME_DEF_STMT (p);
1343 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1344 break;
1345 code = gimple_assign_rhs_code (stmt);
1346 if (code == POINTER_PLUS_EXPR)
1348 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1349 break;
1350 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1351 p = gimple_assign_rhs1 (stmt);
1353 else if (code == ADDR_EXPR || code == NOP_EXPR)
1354 p = gimple_assign_rhs1 (stmt);
1355 else
1356 break;
1358 while (1);
1359 cnt[i] = j;
1362 for (i = 0; i < cnt[0]; i++)
1363 for (j = 0; j < cnt[1]; j++)
1364 if (exps[0][i] == exps[1][j])
1365 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1367 return NULL_TREE;
1370 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1371 Optimize
1372 memcpy (p, "abcd", 4);
1373 memset (p + 4, ' ', 3);
1374 into
1375 memcpy (p, "abcd ", 7);
1376 call if the latter can be stored by pieces during expansion. */
1378 static bool
1379 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1381 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1382 tree vuse = gimple_vuse (stmt2);
1383 if (vuse == NULL)
1384 return false;
1385 stmt1 = SSA_NAME_DEF_STMT (vuse);
1387 switch (DECL_FUNCTION_CODE (callee2))
1389 case BUILT_IN_MEMSET:
1390 if (gimple_call_num_args (stmt2) != 3
1391 || gimple_call_lhs (stmt2)
1392 || CHAR_BIT != 8
1393 || BITS_PER_UNIT != 8)
1394 break;
1395 else
1397 tree callee1;
1398 tree ptr1, src1, str1, off1, len1, lhs1;
1399 tree ptr2 = gimple_call_arg (stmt2, 0);
1400 tree val2 = gimple_call_arg (stmt2, 1);
1401 tree len2 = gimple_call_arg (stmt2, 2);
1402 tree diff, vdef, new_str_cst;
1403 gimple use_stmt;
1404 unsigned int ptr1_align;
1405 unsigned HOST_WIDE_INT src_len;
1406 char *src_buf;
1407 use_operand_p use_p;
1409 if (!host_integerp (val2, 0)
1410 || !host_integerp (len2, 1))
1411 break;
1412 if (is_gimple_call (stmt1))
1414 /* If first stmt is a call, it needs to be memcpy
1415 or mempcpy, with string literal as second argument and
1416 constant length. */
1417 callee1 = gimple_call_fndecl (stmt1);
1418 if (callee1 == NULL_TREE
1419 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1420 || gimple_call_num_args (stmt1) != 3)
1421 break;
1422 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1423 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1424 break;
1425 ptr1 = gimple_call_arg (stmt1, 0);
1426 src1 = gimple_call_arg (stmt1, 1);
1427 len1 = gimple_call_arg (stmt1, 2);
1428 lhs1 = gimple_call_lhs (stmt1);
1429 if (!host_integerp (len1, 1))
1430 break;
1431 str1 = string_constant (src1, &off1);
1432 if (str1 == NULL_TREE)
1433 break;
1434 if (!host_integerp (off1, 1)
1435 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1436 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1437 - tree_low_cst (off1, 1)) > 0
1438 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1439 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1440 != TYPE_MODE (char_type_node))
1441 break;
1443 else if (gimple_assign_single_p (stmt1))
1445 /* Otherwise look for length 1 memcpy optimized into
1446 assignment. */
1447 ptr1 = gimple_assign_lhs (stmt1);
1448 src1 = gimple_assign_rhs1 (stmt1);
1449 if (TREE_CODE (ptr1) != MEM_REF
1450 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1451 || !host_integerp (src1, 0))
1452 break;
1453 ptr1 = build_fold_addr_expr (ptr1);
1454 callee1 = NULL_TREE;
1455 len1 = size_one_node;
1456 lhs1 = NULL_TREE;
1457 off1 = size_zero_node;
1458 str1 = NULL_TREE;
1460 else
1461 break;
1463 diff = constant_pointer_difference (ptr1, ptr2);
1464 if (diff == NULL && lhs1 != NULL)
1466 diff = constant_pointer_difference (lhs1, ptr2);
1467 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1468 && diff != NULL)
1469 diff = size_binop (PLUS_EXPR, diff,
1470 fold_convert (sizetype, len1));
1472 /* If the difference between the second and first destination pointer
1473 is not constant, or is bigger than memcpy length, bail out. */
1474 if (diff == NULL
1475 || !host_integerp (diff, 1)
1476 || tree_int_cst_lt (len1, diff))
1477 break;
1479 /* Use maximum of difference plus memset length and memcpy length
1480 as the new memcpy length, if it is too big, bail out. */
1481 src_len = tree_low_cst (diff, 1);
1482 src_len += tree_low_cst (len2, 1);
1483 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1484 src_len = tree_low_cst (len1, 1);
1485 if (src_len > 1024)
1486 break;
1488 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1489 with bigger length will return different result. */
1490 if (lhs1 != NULL_TREE
1491 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1492 && (TREE_CODE (lhs1) != SSA_NAME
1493 || !single_imm_use (lhs1, &use_p, &use_stmt)
1494 || use_stmt != stmt2))
1495 break;
1497 /* If anything reads memory in between memcpy and memset
1498 call, the modified memcpy call might change it. */
1499 vdef = gimple_vdef (stmt1);
1500 if (vdef != NULL
1501 && (!single_imm_use (vdef, &use_p, &use_stmt)
1502 || use_stmt != stmt2))
1503 break;
1505 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1506 /* Construct the new source string literal. */
1507 src_buf = XALLOCAVEC (char, src_len + 1);
1508 if (callee1)
1509 memcpy (src_buf,
1510 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1511 tree_low_cst (len1, 1));
1512 else
1513 src_buf[0] = tree_low_cst (src1, 0);
1514 memset (src_buf + tree_low_cst (diff, 1),
1515 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1516 src_buf[src_len] = '\0';
1517 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1518 handle embedded '\0's. */
1519 if (strlen (src_buf) != src_len)
1520 break;
1521 rtl_profile_for_bb (gimple_bb (stmt2));
1522 /* If the new memcpy wouldn't be emitted by storing the literal
1523 by pieces, this optimization might enlarge .rodata too much,
1524 as commonly used string literals couldn't be shared any
1525 longer. */
1526 if (!can_store_by_pieces (src_len,
1527 builtin_strncpy_read_str,
1528 src_buf, ptr1_align, false))
1529 break;
1531 new_str_cst = build_string_literal (src_len, src_buf);
1532 if (callee1)
1534 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1535 memset call. */
1536 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1537 gimple_call_set_lhs (stmt1, NULL_TREE);
1538 gimple_call_set_arg (stmt1, 1, new_str_cst);
1539 gimple_call_set_arg (stmt1, 2,
1540 build_int_cst (TREE_TYPE (len1), src_len));
1541 update_stmt (stmt1);
1542 unlink_stmt_vdef (stmt2);
1543 gsi_remove (gsi_p, true);
1544 release_defs (stmt2);
1545 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1546 release_ssa_name (lhs1);
1547 return true;
1549 else
1551 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1552 assignment, remove STMT1 and change memset call into
1553 memcpy call. */
1554 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1556 if (!is_gimple_val (ptr1))
1557 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1558 true, GSI_SAME_STMT);
1559 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1560 gimple_call_set_arg (stmt2, 0, ptr1);
1561 gimple_call_set_arg (stmt2, 1, new_str_cst);
1562 gimple_call_set_arg (stmt2, 2,
1563 build_int_cst (TREE_TYPE (len2), src_len));
1564 unlink_stmt_vdef (stmt1);
1565 gsi_remove (&gsi, true);
1566 release_defs (stmt1);
1567 update_stmt (stmt2);
1568 return false;
1571 break;
1572 default:
1573 break;
1575 return false;
1578 /* Checks if expression has type of one-bit precision, or is a known
1579 truth-valued expression. */
1580 static bool
1581 truth_valued_ssa_name (tree name)
1583 gimple def;
1584 tree type = TREE_TYPE (name);
1586 if (!INTEGRAL_TYPE_P (type))
1587 return false;
1588 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1589 necessarily one and so ~X is not equal to !X. */
1590 if (TYPE_PRECISION (type) == 1)
1591 return true;
1592 def = SSA_NAME_DEF_STMT (name);
1593 if (is_gimple_assign (def))
1594 return truth_value_p (gimple_assign_rhs_code (def));
1595 return false;
1598 /* Helper routine for simplify_bitwise_binary_1 function.
1599 Return for the SSA name NAME the expression X if it mets condition
1600 NAME = !X. Otherwise return NULL_TREE.
1601 Detected patterns for NAME = !X are:
1602 !X and X == 0 for X with integral type.
1603 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1604 static tree
1605 lookup_logical_inverted_value (tree name)
1607 tree op1, op2;
1608 enum tree_code code;
1609 gimple def;
1611 /* If name has none-intergal type, or isn't a SSA_NAME, then
1612 return. */
1613 if (TREE_CODE (name) != SSA_NAME
1614 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1615 return NULL_TREE;
1616 def = SSA_NAME_DEF_STMT (name);
1617 if (!is_gimple_assign (def))
1618 return NULL_TREE;
1620 code = gimple_assign_rhs_code (def);
1621 op1 = gimple_assign_rhs1 (def);
1622 op2 = NULL_TREE;
1624 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1625 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1626 if (code == EQ_EXPR || code == NE_EXPR
1627 || code == BIT_XOR_EXPR)
1628 op2 = gimple_assign_rhs2 (def);
1630 switch (code)
1632 case BIT_NOT_EXPR:
1633 if (truth_valued_ssa_name (name))
1634 return op1;
1635 break;
1636 case EQ_EXPR:
1637 /* Check if we have X == 0 and X has an integral type. */
1638 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1639 break;
1640 if (integer_zerop (op2))
1641 return op1;
1642 break;
1643 case NE_EXPR:
1644 /* Check if we have X != 1 and X is a truth-valued. */
1645 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1646 break;
1647 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1648 return op1;
1649 break;
1650 case BIT_XOR_EXPR:
1651 /* Check if we have X ^ 1 and X is truth valued. */
1652 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1653 return op1;
1654 break;
1655 default:
1656 break;
1659 return NULL_TREE;
1662 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1663 operations CODE, if one operand has the logically inverted
1664 value of the other. */
1665 static tree
1666 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1667 tree arg1, tree arg2)
1669 tree anot;
1671 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1672 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1673 && code != BIT_XOR_EXPR)
1674 return NULL_TREE;
1676 /* First check if operands ARG1 and ARG2 are equal. If so
1677 return NULL_TREE as this optimization is handled fold_stmt. */
1678 if (arg1 == arg2)
1679 return NULL_TREE;
1680 /* See if we have in arguments logical-not patterns. */
1681 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1682 || anot != arg2)
1683 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1684 || anot != arg1))
1685 return NULL_TREE;
1687 /* X & !X -> 0. */
1688 if (code == BIT_AND_EXPR)
1689 return fold_convert (type, integer_zero_node);
1690 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1691 if (truth_valued_ssa_name (anot))
1692 return fold_convert (type, integer_one_node);
1694 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1695 return NULL_TREE;
1698 /* Simplify bitwise binary operations.
1699 Return true if a transformation applied, otherwise return false. */
1701 static bool
1702 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1704 gimple stmt = gsi_stmt (*gsi);
1705 tree arg1 = gimple_assign_rhs1 (stmt);
1706 tree arg2 = gimple_assign_rhs2 (stmt);
1707 enum tree_code code = gimple_assign_rhs_code (stmt);
1708 tree res;
1709 gimple def1 = NULL, def2 = NULL;
1710 tree def1_arg1, def2_arg1;
1711 enum tree_code def1_code, def2_code;
1713 /* If the first argument is an SSA name that is itself a result of a
1714 typecast of an ADDR_EXPR to an integer, feed the ADDR_EXPR to the
1715 folder rather than the ssa name. */
1716 if (code == BIT_AND_EXPR
1717 && TREE_CODE (arg2) == INTEGER_CST
1718 && TREE_CODE (arg1) == SSA_NAME)
1720 gimple def = SSA_NAME_DEF_STMT (arg1);
1721 tree op = arg1;
1723 /* ??? This looks bogus - the conversion could be truncating. */
1724 if (is_gimple_assign (def)
1725 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))
1726 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1728 tree opp = gimple_assign_rhs1 (def);
1729 if (TREE_CODE (opp) == ADDR_EXPR)
1730 op = opp;
1733 res = fold_binary_loc (gimple_location (stmt),
1734 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1735 op, arg2);
1736 if (res && is_gimple_min_invariant (res))
1738 gimple_assign_set_rhs_from_tree (gsi, res);
1739 update_stmt (stmt);
1740 return true;
1744 def1_code = TREE_CODE (arg1);
1745 def1_arg1 = arg1;
1746 if (TREE_CODE (arg1) == SSA_NAME)
1748 def1 = SSA_NAME_DEF_STMT (arg1);
1749 if (is_gimple_assign (def1))
1751 def1_code = gimple_assign_rhs_code (def1);
1752 def1_arg1 = gimple_assign_rhs1 (def1);
1756 def2_code = TREE_CODE (arg2);
1757 def2_arg1 = arg2;
1758 if (TREE_CODE (arg2) == SSA_NAME)
1760 def2 = SSA_NAME_DEF_STMT (arg2);
1761 if (is_gimple_assign (def2))
1763 def2_code = gimple_assign_rhs_code (def2);
1764 def2_arg1 = gimple_assign_rhs1 (def2);
1768 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1769 if (TREE_CODE (arg2) == INTEGER_CST
1770 && CONVERT_EXPR_CODE_P (def1_code)
1771 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1772 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1774 gimple newop;
1775 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1776 newop =
1777 gimple_build_assign_with_ops (code, tem, def1_arg1,
1778 fold_convert_loc (gimple_location (stmt),
1779 TREE_TYPE (def1_arg1),
1780 arg2));
1781 tem = make_ssa_name (tem, newop);
1782 gimple_assign_set_lhs (newop, tem);
1783 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1784 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1785 tem, NULL_TREE, NULL_TREE);
1786 update_stmt (gsi_stmt (*gsi));
1787 return true;
1790 /* For bitwise binary operations apply operand conversions to the
1791 binary operation result instead of to the operands. This allows
1792 to combine successive conversions and bitwise binary operations. */
1793 if (CONVERT_EXPR_CODE_P (def1_code)
1794 && CONVERT_EXPR_CODE_P (def2_code)
1795 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1796 /* Make sure that the conversion widens the operands, or has same
1797 precision, or that it changes the operation to a bitfield
1798 precision. */
1799 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1800 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1801 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1802 != MODE_INT)
1803 || (TYPE_PRECISION (TREE_TYPE (arg1))
1804 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1806 gimple newop;
1807 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1808 NULL);
1809 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1810 tem = make_ssa_name (tem, newop);
1811 gimple_assign_set_lhs (newop, tem);
1812 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1813 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1814 tem, NULL_TREE, NULL_TREE);
1815 update_stmt (gsi_stmt (*gsi));
1816 return true;
1819 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1820 if (code == BIT_AND_EXPR
1821 && def1_code == BIT_IOR_EXPR
1822 && TREE_CODE (arg2) == INTEGER_CST
1823 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1825 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1826 arg2, gimple_assign_rhs2 (def1));
1827 tree tem;
1828 gimple newop;
1829 if (integer_zerop (cst))
1831 gimple_assign_set_rhs1 (stmt, def1_arg1);
1832 update_stmt (stmt);
1833 return true;
1835 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1836 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1837 tem, def1_arg1, arg2);
1838 tem = make_ssa_name (tem, newop);
1839 gimple_assign_set_lhs (newop, tem);
1840 /* Make sure to re-process the new stmt as it's walking upwards. */
1841 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1842 gimple_assign_set_rhs1 (stmt, tem);
1843 gimple_assign_set_rhs2 (stmt, cst);
1844 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1845 update_stmt (stmt);
1846 return true;
1849 /* Combine successive equal operations with constants. */
1850 if ((code == BIT_AND_EXPR
1851 || code == BIT_IOR_EXPR
1852 || code == BIT_XOR_EXPR)
1853 && def1_code == code
1854 && TREE_CODE (arg2) == INTEGER_CST
1855 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1857 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1858 arg2, gimple_assign_rhs2 (def1));
1859 gimple_assign_set_rhs1 (stmt, def1_arg1);
1860 gimple_assign_set_rhs2 (stmt, cst);
1861 update_stmt (stmt);
1862 return true;
1865 /* Try simple folding for X op !X, and X op X. */
1866 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1867 if (res != NULL_TREE)
1869 gimple_assign_set_rhs_from_tree (gsi, res);
1870 update_stmt (gsi_stmt (*gsi));
1871 return true;
1874 return false;
1878 /* Perform re-associations of the plus or minus statement STMT that are
1879 always permitted. Returns true if the CFG was changed. */
1881 static bool
1882 associate_plusminus (gimple stmt)
1884 tree rhs1 = gimple_assign_rhs1 (stmt);
1885 tree rhs2 = gimple_assign_rhs2 (stmt);
1886 enum tree_code code = gimple_assign_rhs_code (stmt);
1887 gimple_stmt_iterator gsi;
1888 bool changed;
1890 /* We can't reassociate at all for saturating types. */
1891 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1892 return false;
1894 /* First contract negates. */
1897 changed = false;
1899 /* A +- (-B) -> A -+ B. */
1900 if (TREE_CODE (rhs2) == SSA_NAME)
1902 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1903 if (is_gimple_assign (def_stmt)
1904 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1905 && can_propagate_from (def_stmt))
1907 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1908 gimple_assign_set_rhs_code (stmt, code);
1909 rhs2 = gimple_assign_rhs1 (def_stmt);
1910 gimple_assign_set_rhs2 (stmt, rhs2);
1911 gimple_set_modified (stmt, true);
1912 changed = true;
1916 /* (-A) + B -> B - A. */
1917 if (TREE_CODE (rhs1) == SSA_NAME
1918 && code == PLUS_EXPR)
1920 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1921 if (is_gimple_assign (def_stmt)
1922 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1923 && can_propagate_from (def_stmt))
1925 code = MINUS_EXPR;
1926 gimple_assign_set_rhs_code (stmt, code);
1927 rhs1 = rhs2;
1928 gimple_assign_set_rhs1 (stmt, rhs1);
1929 rhs2 = gimple_assign_rhs1 (def_stmt);
1930 gimple_assign_set_rhs2 (stmt, rhs2);
1931 gimple_set_modified (stmt, true);
1932 changed = true;
1936 while (changed);
1938 /* We can't reassociate floating-point or fixed-point plus or minus
1939 because of saturation to +-Inf. */
1940 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1941 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1942 goto out;
1944 /* Second match patterns that allow contracting a plus-minus pair
1945 irrespective of overflow issues.
1947 (A +- B) - A -> +- B
1948 (A +- B) -+ B -> A
1949 (CST +- A) +- CST -> CST +- A
1950 (A + CST) +- CST -> A + CST
1951 ~A + A -> -1
1952 ~A + 1 -> -A
1953 A - (A +- B) -> -+ B
1954 A +- (B +- A) -> +- B
1955 CST +- (CST +- A) -> CST +- A
1956 CST +- (A +- CST) -> CST +- A
1957 A + ~A -> -1
1959 via commutating the addition and contracting operations to zero
1960 by reassociation. */
1962 gsi = gsi_for_stmt (stmt);
1963 if (TREE_CODE (rhs1) == SSA_NAME)
1965 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1966 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1968 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1969 if (def_code == PLUS_EXPR
1970 || def_code == MINUS_EXPR)
1972 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1973 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1974 if (operand_equal_p (def_rhs1, rhs2, 0)
1975 && code == MINUS_EXPR)
1977 /* (A +- B) - A -> +- B. */
1978 code = ((def_code == PLUS_EXPR)
1979 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1980 rhs1 = def_rhs2;
1981 rhs2 = NULL_TREE;
1982 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1983 gcc_assert (gsi_stmt (gsi) == stmt);
1984 gimple_set_modified (stmt, true);
1986 else if (operand_equal_p (def_rhs2, rhs2, 0)
1987 && code != def_code)
1989 /* (A +- B) -+ B -> A. */
1990 code = TREE_CODE (def_rhs1);
1991 rhs1 = def_rhs1;
1992 rhs2 = NULL_TREE;
1993 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1994 gcc_assert (gsi_stmt (gsi) == stmt);
1995 gimple_set_modified (stmt, true);
1997 else if (TREE_CODE (rhs2) == INTEGER_CST
1998 && TREE_CODE (def_rhs1) == INTEGER_CST)
2000 /* (CST +- A) +- CST -> CST +- A. */
2001 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2002 def_rhs1, rhs2);
2003 if (cst && !TREE_OVERFLOW (cst))
2005 code = def_code;
2006 gimple_assign_set_rhs_code (stmt, code);
2007 rhs1 = cst;
2008 gimple_assign_set_rhs1 (stmt, rhs1);
2009 rhs2 = def_rhs2;
2010 gimple_assign_set_rhs2 (stmt, rhs2);
2011 gimple_set_modified (stmt, true);
2014 else if (TREE_CODE (rhs2) == INTEGER_CST
2015 && TREE_CODE (def_rhs2) == INTEGER_CST
2016 && def_code == PLUS_EXPR)
2018 /* (A + CST) +- CST -> A + CST. */
2019 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2020 def_rhs2, rhs2);
2021 if (cst && !TREE_OVERFLOW (cst))
2023 code = PLUS_EXPR;
2024 gimple_assign_set_rhs_code (stmt, code);
2025 rhs1 = def_rhs1;
2026 gimple_assign_set_rhs1 (stmt, rhs1);
2027 rhs2 = cst;
2028 gimple_assign_set_rhs2 (stmt, rhs2);
2029 gimple_set_modified (stmt, true);
2033 else if (def_code == BIT_NOT_EXPR
2034 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2036 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2037 if (code == PLUS_EXPR
2038 && operand_equal_p (def_rhs1, rhs2, 0))
2040 /* ~A + A -> -1. */
2041 code = INTEGER_CST;
2042 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2043 rhs2 = NULL_TREE;
2044 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2045 gcc_assert (gsi_stmt (gsi) == stmt);
2046 gimple_set_modified (stmt, true);
2048 else if (code == PLUS_EXPR
2049 && integer_onep (rhs1))
2051 /* ~A + 1 -> -A. */
2052 code = NEGATE_EXPR;
2053 rhs1 = def_rhs1;
2054 rhs2 = NULL_TREE;
2055 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2056 gcc_assert (gsi_stmt (gsi) == stmt);
2057 gimple_set_modified (stmt, true);
2063 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2065 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2066 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2068 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2069 if (def_code == PLUS_EXPR
2070 || def_code == MINUS_EXPR)
2072 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2073 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2074 if (operand_equal_p (def_rhs1, rhs1, 0)
2075 && code == MINUS_EXPR)
2077 /* A - (A +- B) -> -+ B. */
2078 code = ((def_code == PLUS_EXPR)
2079 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2080 rhs1 = def_rhs2;
2081 rhs2 = NULL_TREE;
2082 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2083 gcc_assert (gsi_stmt (gsi) == stmt);
2084 gimple_set_modified (stmt, true);
2086 else if (operand_equal_p (def_rhs2, rhs1, 0)
2087 && code != def_code)
2089 /* A +- (B +- A) -> +- B. */
2090 code = ((code == PLUS_EXPR)
2091 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2092 rhs1 = def_rhs1;
2093 rhs2 = NULL_TREE;
2094 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2095 gcc_assert (gsi_stmt (gsi) == stmt);
2096 gimple_set_modified (stmt, true);
2098 else if (TREE_CODE (rhs1) == INTEGER_CST
2099 && TREE_CODE (def_rhs1) == INTEGER_CST)
2101 /* CST +- (CST +- A) -> CST +- A. */
2102 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2103 rhs1, def_rhs1);
2104 if (cst && !TREE_OVERFLOW (cst))
2106 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2107 gimple_assign_set_rhs_code (stmt, code);
2108 rhs1 = cst;
2109 gimple_assign_set_rhs1 (stmt, rhs1);
2110 rhs2 = def_rhs2;
2111 gimple_assign_set_rhs2 (stmt, rhs2);
2112 gimple_set_modified (stmt, true);
2115 else if (TREE_CODE (rhs1) == INTEGER_CST
2116 && TREE_CODE (def_rhs2) == INTEGER_CST)
2118 /* CST +- (A +- CST) -> CST +- A. */
2119 tree cst = fold_binary (def_code == code
2120 ? PLUS_EXPR : MINUS_EXPR,
2121 TREE_TYPE (rhs2),
2122 rhs1, def_rhs2);
2123 if (cst && !TREE_OVERFLOW (cst))
2125 rhs1 = cst;
2126 gimple_assign_set_rhs1 (stmt, rhs1);
2127 rhs2 = def_rhs1;
2128 gimple_assign_set_rhs2 (stmt, rhs2);
2129 gimple_set_modified (stmt, true);
2133 else if (def_code == BIT_NOT_EXPR
2134 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2136 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2137 if (code == PLUS_EXPR
2138 && operand_equal_p (def_rhs1, rhs1, 0))
2140 /* A + ~A -> -1. */
2141 code = INTEGER_CST;
2142 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2143 rhs2 = NULL_TREE;
2144 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2145 gcc_assert (gsi_stmt (gsi) == stmt);
2146 gimple_set_modified (stmt, true);
2152 out:
2153 if (gimple_modified_p (stmt))
2155 fold_stmt_inplace (stmt);
2156 update_stmt (stmt);
2157 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2158 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2159 return true;
2162 return false;
2165 /* Combine two conversions in a row for the second conversion at *GSI.
2166 Returns true if there were any changes made. */
2168 static bool
2169 combine_conversions (gimple_stmt_iterator *gsi)
2171 gimple stmt = gsi_stmt (*gsi);
2172 gimple def_stmt;
2173 tree op0, lhs;
2174 enum tree_code code = gimple_assign_rhs_code (stmt);
2176 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2177 || code == FLOAT_EXPR
2178 || code == FIX_TRUNC_EXPR);
2180 lhs = gimple_assign_lhs (stmt);
2181 op0 = gimple_assign_rhs1 (stmt);
2182 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2184 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2185 return true;
2188 if (TREE_CODE (op0) != SSA_NAME)
2189 return false;
2191 def_stmt = SSA_NAME_DEF_STMT (op0);
2192 if (!is_gimple_assign (def_stmt))
2193 return false;
2195 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2197 tree defop0 = gimple_assign_rhs1 (def_stmt);
2198 tree type = TREE_TYPE (lhs);
2199 tree inside_type = TREE_TYPE (defop0);
2200 tree inter_type = TREE_TYPE (op0);
2201 int inside_int = INTEGRAL_TYPE_P (inside_type);
2202 int inside_ptr = POINTER_TYPE_P (inside_type);
2203 int inside_float = FLOAT_TYPE_P (inside_type);
2204 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2205 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2206 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2207 int inter_int = INTEGRAL_TYPE_P (inter_type);
2208 int inter_ptr = POINTER_TYPE_P (inter_type);
2209 int inter_float = FLOAT_TYPE_P (inter_type);
2210 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2211 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2212 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2213 int final_int = INTEGRAL_TYPE_P (type);
2214 int final_ptr = POINTER_TYPE_P (type);
2215 int final_float = FLOAT_TYPE_P (type);
2216 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2217 unsigned int final_prec = TYPE_PRECISION (type);
2218 int final_unsignedp = TYPE_UNSIGNED (type);
2220 /* In addition to the cases of two conversions in a row
2221 handled below, if we are converting something to its own
2222 type via an object of identical or wider precision, neither
2223 conversion is needed. */
2224 if (useless_type_conversion_p (type, inside_type)
2225 && (((inter_int || inter_ptr) && final_int)
2226 || (inter_float && final_float))
2227 && inter_prec >= final_prec)
2229 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2230 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2231 update_stmt (stmt);
2232 return true;
2235 /* Likewise, if the intermediate and initial types are either both
2236 float or both integer, we don't need the middle conversion if the
2237 former is wider than the latter and doesn't change the signedness
2238 (for integers). Avoid this if the final type is a pointer since
2239 then we sometimes need the middle conversion. Likewise if the
2240 final type has a precision not equal to the size of its mode. */
2241 if (((inter_int && inside_int)
2242 || (inter_float && inside_float)
2243 || (inter_vec && inside_vec))
2244 && inter_prec >= inside_prec
2245 && (inter_float || inter_vec
2246 || inter_unsignedp == inside_unsignedp)
2247 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2248 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2249 && ! final_ptr
2250 && (! final_vec || inter_prec == inside_prec))
2252 gimple_assign_set_rhs1 (stmt, defop0);
2253 update_stmt (stmt);
2254 return true;
2257 /* If we have a sign-extension of a zero-extended value, we can
2258 replace that by a single zero-extension. */
2259 if (inside_int && inter_int && final_int
2260 && inside_prec < inter_prec && inter_prec < final_prec
2261 && inside_unsignedp && !inter_unsignedp)
2263 gimple_assign_set_rhs1 (stmt, defop0);
2264 update_stmt (stmt);
2265 return true;
2268 /* Two conversions in a row are not needed unless:
2269 - some conversion is floating-point (overstrict for now), or
2270 - some conversion is a vector (overstrict for now), or
2271 - the intermediate type is narrower than both initial and
2272 final, or
2273 - the intermediate type and innermost type differ in signedness,
2274 and the outermost type is wider than the intermediate, or
2275 - the initial type is a pointer type and the precisions of the
2276 intermediate and final types differ, or
2277 - the final type is a pointer type and the precisions of the
2278 initial and intermediate types differ. */
2279 if (! inside_float && ! inter_float && ! final_float
2280 && ! inside_vec && ! inter_vec && ! final_vec
2281 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2282 && ! (inside_int && inter_int
2283 && inter_unsignedp != inside_unsignedp
2284 && inter_prec < final_prec)
2285 && ((inter_unsignedp && inter_prec > inside_prec)
2286 == (final_unsignedp && final_prec > inter_prec))
2287 && ! (inside_ptr && inter_prec != final_prec)
2288 && ! (final_ptr && inside_prec != inter_prec)
2289 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2290 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2292 gimple_assign_set_rhs1 (stmt, defop0);
2293 update_stmt (stmt);
2294 return true;
2297 /* A truncation to an unsigned type should be canonicalized as
2298 bitwise and of a mask. */
2299 if (final_int && inter_int && inside_int
2300 && final_prec == inside_prec
2301 && final_prec > inter_prec
2302 && inter_unsignedp)
2304 tree tem;
2305 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2306 defop0,
2307 double_int_to_tree
2308 (inside_type, double_int_mask (inter_prec)));
2309 if (!useless_type_conversion_p (type, inside_type))
2311 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2312 GSI_SAME_STMT);
2313 gimple_assign_set_rhs1 (stmt, tem);
2315 else
2316 gimple_assign_set_rhs_from_tree (gsi, tem);
2317 update_stmt (gsi_stmt (*gsi));
2318 return true;
2322 return false;
2325 /* Main entry point for the forward propagation and statement combine
2326 optimizer. */
2328 static unsigned int
2329 ssa_forward_propagate_and_combine (void)
2331 basic_block bb;
2332 unsigned int todoflags = 0;
2334 cfg_changed = false;
2336 FOR_EACH_BB (bb)
2338 gimple_stmt_iterator gsi, prev;
2339 bool prev_initialized;
2341 /* Apply forward propagation to all stmts in the basic-block.
2342 Note we update GSI within the loop as necessary. */
2343 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2345 gimple stmt = gsi_stmt (gsi);
2346 tree lhs, rhs;
2347 enum tree_code code;
2349 if (!is_gimple_assign (stmt))
2351 gsi_next (&gsi);
2352 continue;
2355 lhs = gimple_assign_lhs (stmt);
2356 rhs = gimple_assign_rhs1 (stmt);
2357 code = gimple_assign_rhs_code (stmt);
2358 if (TREE_CODE (lhs) != SSA_NAME
2359 || has_zero_uses (lhs))
2361 gsi_next (&gsi);
2362 continue;
2365 /* If this statement sets an SSA_NAME to an address,
2366 try to propagate the address into the uses of the SSA_NAME. */
2367 if (code == ADDR_EXPR
2368 /* Handle pointer conversions on invariant addresses
2369 as well, as this is valid gimple. */
2370 || (CONVERT_EXPR_CODE_P (code)
2371 && TREE_CODE (rhs) == ADDR_EXPR
2372 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2374 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2375 if ((!base
2376 || !DECL_P (base)
2377 || decl_address_invariant_p (base))
2378 && !stmt_references_abnormal_ssa_name (stmt)
2379 && forward_propagate_addr_expr (lhs, rhs))
2381 release_defs (stmt);
2382 todoflags |= TODO_remove_unused_locals;
2383 gsi_remove (&gsi, true);
2385 else
2386 gsi_next (&gsi);
2388 else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt))
2390 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2391 /* ??? Better adjust the interface to that function
2392 instead of building new trees here. */
2393 && forward_propagate_addr_expr
2394 (lhs,
2395 build1 (ADDR_EXPR,
2396 TREE_TYPE (rhs),
2397 fold_build2 (MEM_REF,
2398 TREE_TYPE (TREE_TYPE (rhs)),
2399 rhs,
2400 fold_convert
2401 (ptr_type_node,
2402 gimple_assign_rhs2 (stmt))))))
2404 release_defs (stmt);
2405 todoflags |= TODO_remove_unused_locals;
2406 gsi_remove (&gsi, true);
2408 else if (is_gimple_min_invariant (rhs))
2410 /* Make sure to fold &a[0] + off_1 here. */
2411 fold_stmt_inplace (stmt);
2412 update_stmt (stmt);
2413 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2414 gsi_next (&gsi);
2416 else
2417 gsi_next (&gsi);
2419 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2421 forward_propagate_comparison (stmt);
2422 gsi_next (&gsi);
2424 else
2425 gsi_next (&gsi);
2428 /* Combine stmts with the stmts defining their operands.
2429 Note we update GSI within the loop as necessary. */
2430 prev_initialized = false;
2431 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2433 gimple stmt = gsi_stmt (gsi);
2434 bool changed = false;
2436 switch (gimple_code (stmt))
2438 case GIMPLE_ASSIGN:
2440 tree rhs1 = gimple_assign_rhs1 (stmt);
2441 enum tree_code code = gimple_assign_rhs_code (stmt);
2443 if ((code == BIT_NOT_EXPR
2444 || code == NEGATE_EXPR)
2445 && TREE_CODE (rhs1) == SSA_NAME)
2446 changed = simplify_not_neg_expr (&gsi);
2447 else if (code == COND_EXPR)
2449 /* In this case the entire COND_EXPR is in rhs1. */
2450 int did_something;
2451 fold_defer_overflow_warnings ();
2452 did_something = forward_propagate_into_cond (&gsi);
2453 stmt = gsi_stmt (gsi);
2454 if (did_something == 2)
2455 cfg_changed = true;
2456 fold_undefer_overflow_warnings
2457 (!TREE_NO_WARNING (rhs1) && did_something, stmt,
2458 WARN_STRICT_OVERFLOW_CONDITIONAL);
2459 changed = did_something != 0;
2461 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2463 bool no_warning = gimple_no_warning_p (stmt);
2464 fold_defer_overflow_warnings ();
2465 changed = forward_propagate_into_comparison (&gsi);
2466 fold_undefer_overflow_warnings
2467 (!no_warning && changed,
2468 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2470 else if (code == BIT_AND_EXPR
2471 || code == BIT_IOR_EXPR
2472 || code == BIT_XOR_EXPR)
2473 changed = simplify_bitwise_binary (&gsi);
2474 else if (code == PLUS_EXPR
2475 || code == MINUS_EXPR)
2476 changed = associate_plusminus (stmt);
2477 else if (CONVERT_EXPR_CODE_P (code)
2478 || code == FLOAT_EXPR
2479 || code == FIX_TRUNC_EXPR)
2480 changed = combine_conversions (&gsi);
2481 break;
2484 case GIMPLE_SWITCH:
2485 changed = simplify_gimple_switch (stmt);
2486 break;
2488 case GIMPLE_COND:
2490 int did_something;
2491 fold_defer_overflow_warnings ();
2492 did_something = forward_propagate_into_gimple_cond (stmt);
2493 if (did_something == 2)
2494 cfg_changed = true;
2495 fold_undefer_overflow_warnings
2496 (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2497 changed = did_something != 0;
2498 break;
2501 case GIMPLE_CALL:
2503 tree callee = gimple_call_fndecl (stmt);
2504 if (callee != NULL_TREE
2505 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2506 changed = simplify_builtin_call (&gsi, callee);
2507 break;
2510 default:;
2513 if (changed)
2515 /* If the stmt changed then re-visit it and the statements
2516 inserted before it. */
2517 if (!prev_initialized)
2518 gsi = gsi_start_bb (bb);
2519 else
2521 gsi = prev;
2522 gsi_next (&gsi);
2525 else
2527 prev = gsi;
2528 prev_initialized = true;
2529 gsi_next (&gsi);
2534 if (cfg_changed)
2535 todoflags |= TODO_cleanup_cfg;
2537 return todoflags;
2541 static bool
2542 gate_forwprop (void)
2544 return flag_tree_forwprop;
2547 struct gimple_opt_pass pass_forwprop =
2550 GIMPLE_PASS,
2551 "forwprop", /* name */
2552 gate_forwprop, /* gate */
2553 ssa_forward_propagate_and_combine, /* execute */
2554 NULL, /* sub */
2555 NULL, /* next */
2556 0, /* static_pass_number */
2557 TV_TREE_FORWPROP, /* tv_id */
2558 PROP_cfg | PROP_ssa, /* properties_required */
2559 0, /* properties_provided */
2560 0, /* properties_destroyed */
2561 0, /* todo_flags_start */
2562 TODO_ggc_collect
2563 | TODO_update_ssa
2564 | TODO_verify_ssa /* todo_flags_finish */