2011-07-21 François Dumont <francois.cppdevs@free.fr>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobc08cb18e7afe7e53d460a1d0159d0b6b4bfc9c4a
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
267 return false;
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
272 return false;
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
277 return true;
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
295 return true;
298 /* Remove a copy chain ending in NAME along the defs.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. Returns true if cleanup-cfg has to run. */
302 static bool
303 remove_prop_source_from_use (tree name)
305 gimple_stmt_iterator gsi;
306 gimple stmt;
307 bool cfg_changed = false;
309 do {
310 basic_block bb;
312 if (!has_zero_uses (name))
313 return cfg_changed;
315 stmt = SSA_NAME_DEF_STMT (name);
316 bb = gimple_bb (stmt);
317 if (!bb)
318 return cfg_changed;
319 gsi = gsi_for_stmt (stmt);
320 release_defs (stmt);
321 gsi_remove (&gsi, true);
322 cfg_changed |= gimple_purge_dead_eh_edges (bb);
324 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
325 } while (name && TREE_CODE (name) == SSA_NAME);
327 return cfg_changed;
330 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
331 converted to type TYPE.
333 This should disappear, but is needed so we can combine expressions and use
334 the fold() interfaces. Long term, we need to develop folding and combine
335 routines that deal with gimple exclusively . */
337 static tree
338 rhs_to_tree (tree type, gimple stmt)
340 location_t loc = gimple_location (stmt);
341 enum tree_code code = gimple_assign_rhs_code (stmt);
342 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
343 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
344 gimple_assign_rhs2 (stmt),
345 gimple_assign_rhs3 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
347 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
348 gimple_assign_rhs2 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
350 return build1 (code, type, gimple_assign_rhs1 (stmt));
351 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
352 return gimple_assign_rhs1 (stmt);
353 else
354 gcc_unreachable ();
357 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
358 the folded result in a form suitable for COND_EXPR_COND or
359 NULL_TREE, if there is no suitable simplified form. If
360 INVARIANT_ONLY is true only gimple_min_invariant results are
361 considered simplified. */
363 static tree
364 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
365 tree op0, tree op1, bool invariant_only)
367 tree t;
369 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
371 t = fold_binary_loc (loc, code, type, op0, op1);
372 if (!t)
373 return NULL_TREE;
375 /* Require that we got a boolean type out if we put one in. */
376 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
378 /* Canonicalize the combined condition for use in a COND_EXPR. */
379 t = canonicalize_cond_expr_cond (t);
381 /* Bail out if we required an invariant but didn't get one. */
382 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
383 return NULL_TREE;
385 return t;
388 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
389 of its operand. Return a new comparison tree or NULL_TREE if there
390 were no simplifying combines. */
392 static tree
393 forward_propagate_into_comparison_1 (location_t loc,
394 enum tree_code code, tree type,
395 tree op0, tree op1)
397 tree tmp = NULL_TREE;
398 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
399 bool single_use0_p = false, single_use1_p = false;
401 /* For comparisons use the first operand, that is likely to
402 simplify comparisons against constants. */
403 if (TREE_CODE (op0) == SSA_NAME)
405 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
406 if (def_stmt && can_propagate_from (def_stmt))
408 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
409 tmp = combine_cond_expr_cond (loc, code, type,
410 rhs0, op1, !single_use0_p);
411 if (tmp)
412 return tmp;
416 /* If that wasn't successful, try the second operand. */
417 if (TREE_CODE (op1) == SSA_NAME)
419 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
420 if (def_stmt && can_propagate_from (def_stmt))
422 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
423 tmp = combine_cond_expr_cond (loc, code, type,
424 op0, rhs1, !single_use1_p);
425 if (tmp)
426 return tmp;
430 /* If that wasn't successful either, try both operands. */
431 if (rhs0 != NULL_TREE
432 && rhs1 != NULL_TREE)
433 tmp = combine_cond_expr_cond (loc, code, type,
434 rhs0, rhs1,
435 !(single_use0_p && single_use1_p));
437 return tmp;
440 /* Propagate from the ssa name definition statements of the assignment
441 from a comparison at *GSI into the conditional if that simplifies it.
442 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
443 otherwise returns 0. */
445 static int
446 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
448 gimple stmt = gsi_stmt (*gsi);
449 tree tmp;
450 bool cfg_changed = false;
451 tree rhs1 = gimple_assign_rhs1 (stmt);
452 tree rhs2 = gimple_assign_rhs2 (stmt);
454 /* Combine the comparison with defining statements. */
455 tmp = forward_propagate_into_comparison_1 (gimple_location (stmt),
456 gimple_assign_rhs_code (stmt),
457 TREE_TYPE
458 (gimple_assign_lhs (stmt)),
459 rhs1, rhs2);
460 if (tmp)
462 gimple_assign_set_rhs_from_tree (gsi, tmp);
463 update_stmt (stmt);
464 if (TREE_CODE (rhs1) == SSA_NAME)
465 cfg_changed |= remove_prop_source_from_use (rhs1);
466 if (TREE_CODE (rhs2) == SSA_NAME)
467 cfg_changed |= remove_prop_source_from_use (rhs2);
468 return cfg_changed ? 2 : 1;
471 return 0;
474 /* Propagate from the ssa name definition statements of COND_EXPR
475 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
476 Returns zero if no statement was changed, one if there were
477 changes and two if cfg_cleanup needs to run.
479 This must be kept in sync with forward_propagate_into_cond. */
481 static int
482 forward_propagate_into_gimple_cond (gimple stmt)
484 location_t loc = gimple_location (stmt);
485 tree tmp;
486 enum tree_code code = gimple_cond_code (stmt);
487 bool cfg_changed = false;
488 tree rhs1 = gimple_cond_lhs (stmt);
489 tree rhs2 = gimple_cond_rhs (stmt);
491 /* We can do tree combining on SSA_NAME and comparison expressions. */
492 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
493 return 0;
495 tmp = forward_propagate_into_comparison_1 (loc, code,
496 boolean_type_node,
497 rhs1, rhs2);
498 if (tmp)
500 if (dump_file && tmp)
502 fprintf (dump_file, " Replaced '");
503 print_gimple_expr (dump_file, stmt, 0, 0);
504 fprintf (dump_file, "' with '");
505 print_generic_expr (dump_file, tmp, 0);
506 fprintf (dump_file, "'\n");
509 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
510 update_stmt (stmt);
512 if (TREE_CODE (rhs1) == SSA_NAME)
513 cfg_changed |= remove_prop_source_from_use (rhs1);
514 if (TREE_CODE (rhs2) == SSA_NAME)
515 cfg_changed |= remove_prop_source_from_use (rhs2);
516 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
519 return 0;
523 /* Propagate from the ssa name definition statements of COND_EXPR
524 in the rhs of statement STMT into the conditional if that simplifies it.
525 Returns zero if no statement was changed, one if there were
526 changes and two if cfg_cleanup needs to run.
528 This must be kept in sync with forward_propagate_into_gimple_cond. */
530 static int
531 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
533 gimple stmt = gsi_stmt (*gsi_p);
534 location_t loc = gimple_location (stmt);
535 tree tmp = NULL_TREE;
536 tree cond = gimple_assign_rhs1 (stmt);
538 /* We can do tree combining on SSA_NAME and comparison expressions. */
539 if (COMPARISON_CLASS_P (cond))
540 tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond),
541 boolean_type_node,
542 TREE_OPERAND (cond, 0),
543 TREE_OPERAND (cond, 1));
544 else if (TREE_CODE (cond) == SSA_NAME)
546 tree name = cond, rhs0;
547 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
548 if (!def_stmt || !can_propagate_from (def_stmt))
549 return 0;
551 rhs0 = gimple_assign_rhs1 (def_stmt);
552 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
553 build_int_cst (TREE_TYPE (rhs0), 0),
554 false);
557 if (tmp)
559 if (dump_file && tmp)
561 fprintf (dump_file, " Replaced '");
562 print_generic_expr (dump_file, cond, 0);
563 fprintf (dump_file, "' with '");
564 print_generic_expr (dump_file, tmp, 0);
565 fprintf (dump_file, "'\n");
568 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
569 stmt = gsi_stmt (*gsi_p);
570 update_stmt (stmt);
572 return is_gimple_min_invariant (tmp) ? 2 : 1;
575 return 0;
578 /* We've just substituted an ADDR_EXPR into stmt. Update all the
579 relevant data structures to match. */
581 static void
582 tidy_after_forward_propagate_addr (gimple stmt)
584 /* We may have turned a trapping insn into a non-trapping insn. */
585 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
586 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
587 cfg_changed = true;
589 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
590 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
593 /* DEF_RHS contains the address of the 0th element in an array.
594 USE_STMT uses type of DEF_RHS to compute the address of an
595 arbitrary element within the array. The (variable) byte offset
596 of the element is contained in OFFSET.
598 We walk back through the use-def chains of OFFSET to verify that
599 it is indeed computing the offset of an element within the array
600 and extract the index corresponding to the given byte offset.
602 We then try to fold the entire address expression into a form
603 &array[index].
605 If we are successful, we replace the right hand side of USE_STMT
606 with the new address computation. */
608 static bool
609 forward_propagate_addr_into_variable_array_index (tree offset,
610 tree def_rhs,
611 gimple_stmt_iterator *use_stmt_gsi)
613 tree index, tunit;
614 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
615 tree new_rhs, tmp;
617 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
618 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
619 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
620 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
621 else
622 return false;
623 if (!host_integerp (tunit, 1))
624 return false;
626 /* Get the offset's defining statement. */
627 offset_def = SSA_NAME_DEF_STMT (offset);
629 /* Try to find an expression for a proper index. This is either a
630 multiplication expression by the element size or just the ssa name we came
631 along in case the element size is one. In that case, however, we do not
632 allow multiplications because they can be computing index to a higher
633 level dimension (PR 37861). */
634 if (integer_onep (tunit))
636 if (is_gimple_assign (offset_def)
637 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
638 return false;
640 index = offset;
642 else
644 /* The statement which defines OFFSET before type conversion
645 must be a simple GIMPLE_ASSIGN. */
646 if (!is_gimple_assign (offset_def))
647 return false;
649 /* The RHS of the statement which defines OFFSET must be a
650 multiplication of an object by the size of the array elements.
651 This implicitly verifies that the size of the array elements
652 is constant. */
653 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
654 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
655 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
657 /* The first operand to the MULT_EXPR is the desired index. */
658 index = gimple_assign_rhs1 (offset_def);
660 /* If we have idx * tunit + CST * tunit re-associate that. */
661 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
662 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
663 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
664 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
665 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
666 gimple_assign_rhs2 (offset_def),
667 tunit)) != NULL_TREE)
669 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
670 if (is_gimple_assign (offset_def2)
671 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
672 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
673 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
675 index = fold_build2 (gimple_assign_rhs_code (offset_def),
676 TREE_TYPE (offset),
677 gimple_assign_rhs1 (offset_def2), tmp);
679 else
680 return false;
682 else
683 return false;
686 /* Replace the pointer addition with array indexing. */
687 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
688 true, GSI_SAME_STMT);
689 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
691 new_rhs = unshare_expr (def_rhs);
692 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
694 else
696 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
697 unshare_expr (TREE_OPERAND (def_rhs, 0)),
698 index, integer_zero_node, NULL_TREE);
699 new_rhs = build_fold_addr_expr (new_rhs);
700 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
701 TREE_TYPE (new_rhs)))
703 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
704 NULL_TREE, true, GSI_SAME_STMT);
705 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
706 new_rhs);
709 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
710 use_stmt = gsi_stmt (*use_stmt_gsi);
712 /* That should have created gimple, so there is no need to
713 record information to undo the propagation. */
714 fold_stmt_inplace (use_stmt);
715 tidy_after_forward_propagate_addr (use_stmt);
716 return true;
719 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
720 ADDR_EXPR <whatever>.
722 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
723 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
724 node or for recovery of array indexing from pointer arithmetic.
726 Return true if the propagation was successful (the propagation can
727 be not totally successful, yet things may have been changed). */
729 static bool
730 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
731 gimple_stmt_iterator *use_stmt_gsi,
732 bool single_use_p)
734 tree lhs, rhs, rhs2, array_ref;
735 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
736 enum tree_code rhs_code;
737 bool res = true;
739 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
741 lhs = gimple_assign_lhs (use_stmt);
742 rhs_code = gimple_assign_rhs_code (use_stmt);
743 rhs = gimple_assign_rhs1 (use_stmt);
745 /* Trivial cases. The use statement could be a trivial copy or a
746 useless conversion. Recurse to the uses of the lhs as copyprop does
747 not copy through different variant pointers and FRE does not catch
748 all useless conversions. Treat the case of a single-use name and
749 a conversion to def_rhs type separate, though. */
750 if (TREE_CODE (lhs) == SSA_NAME
751 && ((rhs_code == SSA_NAME && rhs == name)
752 || CONVERT_EXPR_CODE_P (rhs_code)))
754 /* Only recurse if we don't deal with a single use or we cannot
755 do the propagation to the current statement. In particular
756 we can end up with a conversion needed for a non-invariant
757 address which we cannot do in a single statement. */
758 if (!single_use_p
759 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
760 && (!is_gimple_min_invariant (def_rhs)
761 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
762 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
763 && (TYPE_PRECISION (TREE_TYPE (lhs))
764 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
765 return forward_propagate_addr_expr (lhs, def_rhs);
767 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
768 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
769 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
770 else
771 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
772 return true;
775 /* Propagate through constant pointer adjustments. */
776 if (TREE_CODE (lhs) == SSA_NAME
777 && rhs_code == POINTER_PLUS_EXPR
778 && rhs == name
779 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
781 tree new_def_rhs;
782 /* As we come here with non-invariant addresses in def_rhs we need
783 to make sure we can build a valid constant offsetted address
784 for further propagation. Simply rely on fold building that
785 and check after the fact. */
786 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
787 def_rhs,
788 fold_convert (ptr_type_node,
789 gimple_assign_rhs2 (use_stmt)));
790 if (TREE_CODE (new_def_rhs) == MEM_REF
791 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
792 return false;
793 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
794 TREE_TYPE (rhs));
796 /* Recurse. If we could propagate into all uses of lhs do not
797 bother to replace into the current use but just pretend we did. */
798 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
799 && forward_propagate_addr_expr (lhs, new_def_rhs))
800 return true;
802 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
803 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
804 new_def_rhs, NULL_TREE);
805 else if (is_gimple_min_invariant (new_def_rhs))
806 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
807 new_def_rhs, NULL_TREE);
808 else
809 return false;
810 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
811 update_stmt (use_stmt);
812 return true;
815 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
816 ADDR_EXPR will not appear on the LHS. */
817 lhs = gimple_assign_lhs (use_stmt);
818 while (handled_component_p (lhs))
819 lhs = TREE_OPERAND (lhs, 0);
821 /* Now see if the LHS node is a MEM_REF using NAME. If so,
822 propagate the ADDR_EXPR into the use of NAME and fold the result. */
823 if (TREE_CODE (lhs) == MEM_REF
824 && TREE_OPERAND (lhs, 0) == name)
826 tree def_rhs_base;
827 HOST_WIDE_INT def_rhs_offset;
828 /* If the address is invariant we can always fold it. */
829 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
830 &def_rhs_offset)))
832 double_int off = mem_ref_offset (lhs);
833 tree new_ptr;
834 off = double_int_add (off,
835 shwi_to_double_int (def_rhs_offset));
836 if (TREE_CODE (def_rhs_base) == MEM_REF)
838 off = double_int_add (off, mem_ref_offset (def_rhs_base));
839 new_ptr = TREE_OPERAND (def_rhs_base, 0);
841 else
842 new_ptr = build_fold_addr_expr (def_rhs_base);
843 TREE_OPERAND (lhs, 0) = new_ptr;
844 TREE_OPERAND (lhs, 1)
845 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
846 tidy_after_forward_propagate_addr (use_stmt);
847 /* Continue propagating into the RHS if this was not the only use. */
848 if (single_use_p)
849 return true;
851 /* If the LHS is a plain dereference and the value type is the same as
852 that of the pointed-to type of the address we can put the
853 dereferenced address on the LHS preserving the original alias-type. */
854 else if (gimple_assign_lhs (use_stmt) == lhs
855 && useless_type_conversion_p
856 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
857 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
859 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
860 tree new_offset, new_base, saved;
861 while (handled_component_p (*def_rhs_basep))
862 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
863 saved = *def_rhs_basep;
864 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
866 new_base = TREE_OPERAND (*def_rhs_basep, 0);
867 new_offset
868 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
869 TREE_OPERAND (*def_rhs_basep, 1));
871 else
873 new_base = build_fold_addr_expr (*def_rhs_basep);
874 new_offset = TREE_OPERAND (lhs, 1);
876 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
877 new_base, new_offset);
878 gimple_assign_set_lhs (use_stmt,
879 unshare_expr (TREE_OPERAND (def_rhs, 0)));
880 *def_rhs_basep = saved;
881 tidy_after_forward_propagate_addr (use_stmt);
882 /* Continue propagating into the RHS if this was not the
883 only use. */
884 if (single_use_p)
885 return true;
887 else
888 /* We can have a struct assignment dereferencing our name twice.
889 Note that we didn't propagate into the lhs to not falsely
890 claim we did when propagating into the rhs. */
891 res = false;
894 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
895 nodes from the RHS. */
896 rhs = gimple_assign_rhs1 (use_stmt);
897 if (TREE_CODE (rhs) == ADDR_EXPR)
898 rhs = TREE_OPERAND (rhs, 0);
899 while (handled_component_p (rhs))
900 rhs = TREE_OPERAND (rhs, 0);
902 /* Now see if the RHS node is a MEM_REF using NAME. If so,
903 propagate the ADDR_EXPR into the use of NAME and fold the result. */
904 if (TREE_CODE (rhs) == MEM_REF
905 && TREE_OPERAND (rhs, 0) == name)
907 tree def_rhs_base;
908 HOST_WIDE_INT def_rhs_offset;
909 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
910 &def_rhs_offset)))
912 double_int off = mem_ref_offset (rhs);
913 tree new_ptr;
914 off = double_int_add (off,
915 shwi_to_double_int (def_rhs_offset));
916 if (TREE_CODE (def_rhs_base) == MEM_REF)
918 off = double_int_add (off, mem_ref_offset (def_rhs_base));
919 new_ptr = TREE_OPERAND (def_rhs_base, 0);
921 else
922 new_ptr = build_fold_addr_expr (def_rhs_base);
923 TREE_OPERAND (rhs, 0) = new_ptr;
924 TREE_OPERAND (rhs, 1)
925 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
926 fold_stmt_inplace (use_stmt);
927 tidy_after_forward_propagate_addr (use_stmt);
928 return res;
930 /* If the LHS is a plain dereference and the value type is the same as
931 that of the pointed-to type of the address we can put the
932 dereferenced address on the LHS preserving the original alias-type. */
933 else if (gimple_assign_rhs1 (use_stmt) == rhs
934 && useless_type_conversion_p
935 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
936 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
938 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
939 tree new_offset, new_base, saved;
940 while (handled_component_p (*def_rhs_basep))
941 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
942 saved = *def_rhs_basep;
943 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
945 new_base = TREE_OPERAND (*def_rhs_basep, 0);
946 new_offset
947 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
948 TREE_OPERAND (*def_rhs_basep, 1));
950 else
952 new_base = build_fold_addr_expr (*def_rhs_basep);
953 new_offset = TREE_OPERAND (rhs, 1);
955 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
956 new_base, new_offset);
957 gimple_assign_set_rhs1 (use_stmt,
958 unshare_expr (TREE_OPERAND (def_rhs, 0)));
959 *def_rhs_basep = saved;
960 fold_stmt_inplace (use_stmt);
961 tidy_after_forward_propagate_addr (use_stmt);
962 return res;
966 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
967 is nothing to do. */
968 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
969 || gimple_assign_rhs1 (use_stmt) != name)
970 return false;
972 /* The remaining cases are all for turning pointer arithmetic into
973 array indexing. They only apply when we have the address of
974 element zero in an array. If that is not the case then there
975 is nothing to do. */
976 array_ref = TREE_OPERAND (def_rhs, 0);
977 if ((TREE_CODE (array_ref) != ARRAY_REF
978 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
979 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
980 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
981 return false;
983 rhs2 = gimple_assign_rhs2 (use_stmt);
984 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
985 of the elements in X into &x[C1 + C2/element size]. */
986 if (TREE_CODE (rhs2) == INTEGER_CST)
988 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
989 TREE_TYPE (def_rhs),
990 def_rhs, rhs2);
991 if (new_rhs)
993 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
994 new_rhs = unshare_expr (new_rhs);
995 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
997 if (!is_gimple_min_invariant (new_rhs))
998 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
999 true, NULL_TREE,
1000 true, GSI_SAME_STMT);
1001 new_rhs = fold_convert (type, new_rhs);
1003 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1004 use_stmt = gsi_stmt (*use_stmt_gsi);
1005 update_stmt (use_stmt);
1006 tidy_after_forward_propagate_addr (use_stmt);
1007 return true;
1011 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1012 converting a multiplication of an index by the size of the
1013 array elements, then the result is converted into the proper
1014 type for the arithmetic. */
1015 if (TREE_CODE (rhs2) == SSA_NAME
1016 && (TREE_CODE (array_ref) != ARRAY_REF
1017 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1018 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1019 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1020 different type than their operands. */
1021 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1022 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1023 use_stmt_gsi);
1024 return false;
1027 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1029 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1030 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1031 node or for recovery of array indexing from pointer arithmetic.
1032 Returns true, if all uses have been propagated into. */
1034 static bool
1035 forward_propagate_addr_expr (tree name, tree rhs)
1037 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1038 imm_use_iterator iter;
1039 gimple use_stmt;
1040 bool all = true;
1041 bool single_use_p = has_single_use (name);
1043 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1045 bool result;
1046 tree use_rhs;
1048 /* If the use is not in a simple assignment statement, then
1049 there is nothing we can do. */
1050 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1052 if (!is_gimple_debug (use_stmt))
1053 all = false;
1054 continue;
1057 /* If the use is in a deeper loop nest, then we do not want
1058 to propagate non-invariant ADDR_EXPRs into the loop as that
1059 is likely adding expression evaluations into the loop. */
1060 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1061 && !is_gimple_min_invariant (rhs))
1063 all = false;
1064 continue;
1068 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1069 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1070 single_use_p);
1071 /* If the use has moved to a different statement adjust
1072 the update machinery for the old statement too. */
1073 if (use_stmt != gsi_stmt (gsi))
1075 update_stmt (use_stmt);
1076 use_stmt = gsi_stmt (gsi);
1079 update_stmt (use_stmt);
1081 all &= result;
1083 /* Remove intermediate now unused copy and conversion chains. */
1084 use_rhs = gimple_assign_rhs1 (use_stmt);
1085 if (result
1086 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1087 && TREE_CODE (use_rhs) == SSA_NAME
1088 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1090 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1091 release_defs (use_stmt);
1092 gsi_remove (&gsi, true);
1096 return all && has_zero_uses (name);
1100 /* Forward propagate the comparison defined in STMT like
1101 cond_1 = x CMP y to uses of the form
1102 a_1 = (T')cond_1
1103 a_1 = !cond_1
1104 a_1 = cond_1 != 0
1105 Returns true if stmt is now unused. */
1107 static bool
1108 forward_propagate_comparison (gimple stmt)
1110 tree name = gimple_assign_lhs (stmt);
1111 gimple use_stmt;
1112 tree tmp = NULL_TREE;
1113 gimple_stmt_iterator gsi;
1114 enum tree_code code;
1115 tree lhs;
1117 /* Don't propagate ssa names that occur in abnormal phis. */
1118 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1119 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1120 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1121 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1122 return false;
1124 /* Do not un-cse comparisons. But propagate through copies. */
1125 use_stmt = get_prop_dest_stmt (name, &name);
1126 if (!use_stmt
1127 || !is_gimple_assign (use_stmt))
1128 return false;
1130 code = gimple_assign_rhs_code (use_stmt);
1131 lhs = gimple_assign_lhs (use_stmt);
1132 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1133 return false;
1135 /* We can propagate the condition into a statement that
1136 computes the logical negation of the comparison result. */
1137 if ((code == BIT_NOT_EXPR
1138 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1139 || (code == BIT_XOR_EXPR
1140 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1142 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1143 bool nans = HONOR_NANS (TYPE_MODE (type));
1144 enum tree_code inv_code;
1145 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1146 if (inv_code == ERROR_MARK)
1147 return false;
1149 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1150 gimple_assign_rhs2 (stmt));
1152 else
1153 return false;
1155 gsi = gsi_for_stmt (use_stmt);
1156 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1157 use_stmt = gsi_stmt (gsi);
1158 update_stmt (use_stmt);
1160 if (dump_file && (dump_flags & TDF_DETAILS))
1162 fprintf (dump_file, " Replaced '");
1163 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1164 fprintf (dump_file, "' with '");
1165 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1166 fprintf (dump_file, "'\n");
1169 /* Remove defining statements. */
1170 return remove_prop_source_from_use (name);
1174 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1175 If so, we can change STMT into lhs = y which can later be copy
1176 propagated. Similarly for negation.
1178 This could trivially be formulated as a forward propagation
1179 to immediate uses. However, we already had an implementation
1180 from DOM which used backward propagation via the use-def links.
1182 It turns out that backward propagation is actually faster as
1183 there's less work to do for each NOT/NEG expression we find.
1184 Backwards propagation needs to look at the statement in a single
1185 backlink. Forward propagation needs to look at potentially more
1186 than one forward link.
1188 Returns true when the statement was changed. */
1190 static bool
1191 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1193 gimple stmt = gsi_stmt (*gsi_p);
1194 tree rhs = gimple_assign_rhs1 (stmt);
1195 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1197 /* See if the RHS_DEF_STMT has the same form as our statement. */
1198 if (is_gimple_assign (rhs_def_stmt)
1199 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1201 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1203 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1204 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1205 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1207 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1208 stmt = gsi_stmt (*gsi_p);
1209 update_stmt (stmt);
1210 return true;
1214 return false;
1217 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1218 the condition which we may be able to optimize better. */
1220 static bool
1221 simplify_gimple_switch (gimple stmt)
1223 tree cond = gimple_switch_index (stmt);
1224 tree def, to, ti;
1225 gimple def_stmt;
1227 /* The optimization that we really care about is removing unnecessary
1228 casts. That will let us do much better in propagating the inferred
1229 constant at the switch target. */
1230 if (TREE_CODE (cond) == SSA_NAME)
1232 def_stmt = SSA_NAME_DEF_STMT (cond);
1233 if (is_gimple_assign (def_stmt))
1235 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1237 int need_precision;
1238 bool fail;
1240 def = gimple_assign_rhs1 (def_stmt);
1242 /* ??? Why was Jeff testing this? We are gimple... */
1243 gcc_checking_assert (is_gimple_val (def));
1245 to = TREE_TYPE (cond);
1246 ti = TREE_TYPE (def);
1248 /* If we have an extension that preserves value, then we
1249 can copy the source value into the switch. */
1251 need_precision = TYPE_PRECISION (ti);
1252 fail = false;
1253 if (! INTEGRAL_TYPE_P (ti))
1254 fail = true;
1255 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1256 fail = true;
1257 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1258 need_precision += 1;
1259 if (TYPE_PRECISION (to) < need_precision)
1260 fail = true;
1262 if (!fail)
1264 gimple_switch_set_index (stmt, def);
1265 update_stmt (stmt);
1266 return true;
1272 return false;
1275 /* For pointers p2 and p1 return p2 - p1 if the
1276 difference is known and constant, otherwise return NULL. */
1278 static tree
1279 constant_pointer_difference (tree p1, tree p2)
1281 int i, j;
1282 #define CPD_ITERATIONS 5
1283 tree exps[2][CPD_ITERATIONS];
1284 tree offs[2][CPD_ITERATIONS];
1285 int cnt[2];
1287 for (i = 0; i < 2; i++)
1289 tree p = i ? p1 : p2;
1290 tree off = size_zero_node;
1291 gimple stmt;
1292 enum tree_code code;
1294 /* For each of p1 and p2 we need to iterate at least
1295 twice, to handle ADDR_EXPR directly in p1/p2,
1296 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1297 on definition's stmt RHS. Iterate a few extra times. */
1298 j = 0;
1301 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1302 break;
1303 if (TREE_CODE (p) == ADDR_EXPR)
1305 tree q = TREE_OPERAND (p, 0);
1306 HOST_WIDE_INT offset;
1307 tree base = get_addr_base_and_unit_offset (q, &offset);
1308 if (base)
1310 q = base;
1311 if (offset)
1312 off = size_binop (PLUS_EXPR, off, size_int (offset));
1314 if (TREE_CODE (q) == MEM_REF
1315 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1317 p = TREE_OPERAND (q, 0);
1318 off = size_binop (PLUS_EXPR, off,
1319 double_int_to_tree (sizetype,
1320 mem_ref_offset (q)));
1322 else
1324 exps[i][j] = q;
1325 offs[i][j++] = off;
1326 break;
1329 if (TREE_CODE (p) != SSA_NAME)
1330 break;
1331 exps[i][j] = p;
1332 offs[i][j++] = off;
1333 if (j == CPD_ITERATIONS)
1334 break;
1335 stmt = SSA_NAME_DEF_STMT (p);
1336 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1337 break;
1338 code = gimple_assign_rhs_code (stmt);
1339 if (code == POINTER_PLUS_EXPR)
1341 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1342 break;
1343 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1344 p = gimple_assign_rhs1 (stmt);
1346 else if (code == ADDR_EXPR || code == NOP_EXPR)
1347 p = gimple_assign_rhs1 (stmt);
1348 else
1349 break;
1351 while (1);
1352 cnt[i] = j;
1355 for (i = 0; i < cnt[0]; i++)
1356 for (j = 0; j < cnt[1]; j++)
1357 if (exps[0][i] == exps[1][j])
1358 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1360 return NULL_TREE;
1363 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1364 Optimize
1365 memcpy (p, "abcd", 4);
1366 memset (p + 4, ' ', 3);
1367 into
1368 memcpy (p, "abcd ", 7);
1369 call if the latter can be stored by pieces during expansion. */
1371 static bool
1372 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1374 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1375 tree vuse = gimple_vuse (stmt2);
1376 if (vuse == NULL)
1377 return false;
1378 stmt1 = SSA_NAME_DEF_STMT (vuse);
1380 switch (DECL_FUNCTION_CODE (callee2))
1382 case BUILT_IN_MEMSET:
1383 if (gimple_call_num_args (stmt2) != 3
1384 || gimple_call_lhs (stmt2)
1385 || CHAR_BIT != 8
1386 || BITS_PER_UNIT != 8)
1387 break;
1388 else
1390 tree callee1;
1391 tree ptr1, src1, str1, off1, len1, lhs1;
1392 tree ptr2 = gimple_call_arg (stmt2, 0);
1393 tree val2 = gimple_call_arg (stmt2, 1);
1394 tree len2 = gimple_call_arg (stmt2, 2);
1395 tree diff, vdef, new_str_cst;
1396 gimple use_stmt;
1397 unsigned int ptr1_align;
1398 unsigned HOST_WIDE_INT src_len;
1399 char *src_buf;
1400 use_operand_p use_p;
1402 if (!host_integerp (val2, 0)
1403 || !host_integerp (len2, 1))
1404 break;
1405 if (is_gimple_call (stmt1))
1407 /* If first stmt is a call, it needs to be memcpy
1408 or mempcpy, with string literal as second argument and
1409 constant length. */
1410 callee1 = gimple_call_fndecl (stmt1);
1411 if (callee1 == NULL_TREE
1412 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1413 || gimple_call_num_args (stmt1) != 3)
1414 break;
1415 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1416 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1417 break;
1418 ptr1 = gimple_call_arg (stmt1, 0);
1419 src1 = gimple_call_arg (stmt1, 1);
1420 len1 = gimple_call_arg (stmt1, 2);
1421 lhs1 = gimple_call_lhs (stmt1);
1422 if (!host_integerp (len1, 1))
1423 break;
1424 str1 = string_constant (src1, &off1);
1425 if (str1 == NULL_TREE)
1426 break;
1427 if (!host_integerp (off1, 1)
1428 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1429 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1430 - tree_low_cst (off1, 1)) > 0
1431 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1432 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1433 != TYPE_MODE (char_type_node))
1434 break;
1436 else if (gimple_assign_single_p (stmt1))
1438 /* Otherwise look for length 1 memcpy optimized into
1439 assignment. */
1440 ptr1 = gimple_assign_lhs (stmt1);
1441 src1 = gimple_assign_rhs1 (stmt1);
1442 if (TREE_CODE (ptr1) != MEM_REF
1443 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1444 || !host_integerp (src1, 0))
1445 break;
1446 ptr1 = build_fold_addr_expr (ptr1);
1447 callee1 = NULL_TREE;
1448 len1 = size_one_node;
1449 lhs1 = NULL_TREE;
1450 off1 = size_zero_node;
1451 str1 = NULL_TREE;
1453 else
1454 break;
1456 diff = constant_pointer_difference (ptr1, ptr2);
1457 if (diff == NULL && lhs1 != NULL)
1459 diff = constant_pointer_difference (lhs1, ptr2);
1460 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1461 && diff != NULL)
1462 diff = size_binop (PLUS_EXPR, diff,
1463 fold_convert (sizetype, len1));
1465 /* If the difference between the second and first destination pointer
1466 is not constant, or is bigger than memcpy length, bail out. */
1467 if (diff == NULL
1468 || !host_integerp (diff, 1)
1469 || tree_int_cst_lt (len1, diff))
1470 break;
1472 /* Use maximum of difference plus memset length and memcpy length
1473 as the new memcpy length, if it is too big, bail out. */
1474 src_len = tree_low_cst (diff, 1);
1475 src_len += tree_low_cst (len2, 1);
1476 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1477 src_len = tree_low_cst (len1, 1);
1478 if (src_len > 1024)
1479 break;
1481 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1482 with bigger length will return different result. */
1483 if (lhs1 != NULL_TREE
1484 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1485 && (TREE_CODE (lhs1) != SSA_NAME
1486 || !single_imm_use (lhs1, &use_p, &use_stmt)
1487 || use_stmt != stmt2))
1488 break;
1490 /* If anything reads memory in between memcpy and memset
1491 call, the modified memcpy call might change it. */
1492 vdef = gimple_vdef (stmt1);
1493 if (vdef != NULL
1494 && (!single_imm_use (vdef, &use_p, &use_stmt)
1495 || use_stmt != stmt2))
1496 break;
1498 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1499 /* Construct the new source string literal. */
1500 src_buf = XALLOCAVEC (char, src_len + 1);
1501 if (callee1)
1502 memcpy (src_buf,
1503 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1504 tree_low_cst (len1, 1));
1505 else
1506 src_buf[0] = tree_low_cst (src1, 0);
1507 memset (src_buf + tree_low_cst (diff, 1),
1508 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1509 src_buf[src_len] = '\0';
1510 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1511 handle embedded '\0's. */
1512 if (strlen (src_buf) != src_len)
1513 break;
1514 rtl_profile_for_bb (gimple_bb (stmt2));
1515 /* If the new memcpy wouldn't be emitted by storing the literal
1516 by pieces, this optimization might enlarge .rodata too much,
1517 as commonly used string literals couldn't be shared any
1518 longer. */
1519 if (!can_store_by_pieces (src_len,
1520 builtin_strncpy_read_str,
1521 src_buf, ptr1_align, false))
1522 break;
1524 new_str_cst = build_string_literal (src_len, src_buf);
1525 if (callee1)
1527 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1528 memset call. */
1529 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1530 gimple_call_set_lhs (stmt1, NULL_TREE);
1531 gimple_call_set_arg (stmt1, 1, new_str_cst);
1532 gimple_call_set_arg (stmt1, 2,
1533 build_int_cst (TREE_TYPE (len1), src_len));
1534 update_stmt (stmt1);
1535 unlink_stmt_vdef (stmt2);
1536 gsi_remove (gsi_p, true);
1537 release_defs (stmt2);
1538 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1539 release_ssa_name (lhs1);
1540 return true;
1542 else
1544 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1545 assignment, remove STMT1 and change memset call into
1546 memcpy call. */
1547 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1549 if (!is_gimple_val (ptr1))
1550 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1551 true, GSI_SAME_STMT);
1552 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1553 gimple_call_set_arg (stmt2, 0, ptr1);
1554 gimple_call_set_arg (stmt2, 1, new_str_cst);
1555 gimple_call_set_arg (stmt2, 2,
1556 build_int_cst (TREE_TYPE (len2), src_len));
1557 unlink_stmt_vdef (stmt1);
1558 gsi_remove (&gsi, true);
1559 release_defs (stmt1);
1560 update_stmt (stmt2);
1561 return false;
1564 break;
1565 default:
1566 break;
1568 return false;
1571 /* Checks if expression has type of one-bit precision, or is a known
1572 truth-valued expression. */
1573 static bool
1574 truth_valued_ssa_name (tree name)
1576 gimple def;
1577 tree type = TREE_TYPE (name);
1579 if (!INTEGRAL_TYPE_P (type))
1580 return false;
1581 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1582 necessarily one and so ~X is not equal to !X. */
1583 if (TYPE_PRECISION (type) == 1)
1584 return true;
1585 def = SSA_NAME_DEF_STMT (name);
1586 if (is_gimple_assign (def))
1587 return truth_value_p (gimple_assign_rhs_code (def));
1588 return false;
1591 /* Helper routine for simplify_bitwise_binary_1 function.
1592 Return for the SSA name NAME the expression X if it mets condition
1593 NAME = !X. Otherwise return NULL_TREE.
1594 Detected patterns for NAME = !X are:
1595 !X and X == 0 for X with integral type.
1596 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1597 static tree
1598 lookup_logical_inverted_value (tree name)
1600 tree op1, op2;
1601 enum tree_code code;
1602 gimple def;
1604 /* If name has none-intergal type, or isn't a SSA_NAME, then
1605 return. */
1606 if (TREE_CODE (name) != SSA_NAME
1607 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1608 return NULL_TREE;
1609 def = SSA_NAME_DEF_STMT (name);
1610 if (!is_gimple_assign (def))
1611 return NULL_TREE;
1613 code = gimple_assign_rhs_code (def);
1614 op1 = gimple_assign_rhs1 (def);
1615 op2 = NULL_TREE;
1617 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1618 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1619 if (code == EQ_EXPR || code == NE_EXPR
1620 || code == BIT_XOR_EXPR)
1621 op2 = gimple_assign_rhs2 (def);
1623 switch (code)
1625 case BIT_NOT_EXPR:
1626 if (truth_valued_ssa_name (name))
1627 return op1;
1628 break;
1629 case EQ_EXPR:
1630 /* Check if we have X == 0 and X has an integral type. */
1631 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1632 break;
1633 if (integer_zerop (op2))
1634 return op1;
1635 break;
1636 case NE_EXPR:
1637 /* Check if we have X != 1 and X is a truth-valued. */
1638 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1639 break;
1640 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1641 return op1;
1642 break;
1643 case BIT_XOR_EXPR:
1644 /* Check if we have X ^ 1 and X is truth valued. */
1645 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1646 return op1;
1647 break;
1648 default:
1649 break;
1652 return NULL_TREE;
1655 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1656 operations CODE, if one operand has the logically inverted
1657 value of the other. */
1658 static tree
1659 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1660 tree arg1, tree arg2)
1662 tree anot;
1664 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1665 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1666 && code != BIT_XOR_EXPR)
1667 return NULL_TREE;
1669 /* First check if operands ARG1 and ARG2 are equal. If so
1670 return NULL_TREE as this optimization is handled fold_stmt. */
1671 if (arg1 == arg2)
1672 return NULL_TREE;
1673 /* See if we have in arguments logical-not patterns. */
1674 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1675 || anot != arg2)
1676 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1677 || anot != arg1))
1678 return NULL_TREE;
1680 /* X & !X -> 0. */
1681 if (code == BIT_AND_EXPR)
1682 return fold_convert (type, integer_zero_node);
1683 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1684 if (truth_valued_ssa_name (anot))
1685 return fold_convert (type, integer_one_node);
1687 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1688 return NULL_TREE;
1691 /* Simplify bitwise binary operations.
1692 Return true if a transformation applied, otherwise return false. */
1694 static bool
1695 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1697 gimple stmt = gsi_stmt (*gsi);
1698 tree arg1 = gimple_assign_rhs1 (stmt);
1699 tree arg2 = gimple_assign_rhs2 (stmt);
1700 enum tree_code code = gimple_assign_rhs_code (stmt);
1701 tree res;
1702 gimple def1 = NULL, def2 = NULL;
1703 tree def1_arg1, def2_arg1;
1704 enum tree_code def1_code, def2_code;
1706 def1_code = TREE_CODE (arg1);
1707 def1_arg1 = arg1;
1708 if (TREE_CODE (arg1) == SSA_NAME)
1710 def1 = SSA_NAME_DEF_STMT (arg1);
1711 if (is_gimple_assign (def1))
1713 def1_code = gimple_assign_rhs_code (def1);
1714 def1_arg1 = gimple_assign_rhs1 (def1);
1718 def2_code = TREE_CODE (arg2);
1719 def2_arg1 = arg2;
1720 if (TREE_CODE (arg2) == SSA_NAME)
1722 def2 = SSA_NAME_DEF_STMT (arg2);
1723 if (is_gimple_assign (def2))
1725 def2_code = gimple_assign_rhs_code (def2);
1726 def2_arg1 = gimple_assign_rhs1 (def2);
1730 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1731 if (TREE_CODE (arg2) == INTEGER_CST
1732 && CONVERT_EXPR_CODE_P (def1_code)
1733 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1734 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1736 gimple newop;
1737 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1738 newop =
1739 gimple_build_assign_with_ops (code, tem, def1_arg1,
1740 fold_convert_loc (gimple_location (stmt),
1741 TREE_TYPE (def1_arg1),
1742 arg2));
1743 tem = make_ssa_name (tem, newop);
1744 gimple_assign_set_lhs (newop, tem);
1745 gimple_set_location (newop, gimple_location (stmt));
1746 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1747 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1748 tem, NULL_TREE, NULL_TREE);
1749 update_stmt (gsi_stmt (*gsi));
1750 return true;
1753 /* For bitwise binary operations apply operand conversions to the
1754 binary operation result instead of to the operands. This allows
1755 to combine successive conversions and bitwise binary operations. */
1756 if (CONVERT_EXPR_CODE_P (def1_code)
1757 && CONVERT_EXPR_CODE_P (def2_code)
1758 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1759 /* Make sure that the conversion widens the operands, or has same
1760 precision, or that it changes the operation to a bitfield
1761 precision. */
1762 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1763 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1764 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1765 != MODE_INT)
1766 || (TYPE_PRECISION (TREE_TYPE (arg1))
1767 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1769 gimple newop;
1770 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1771 NULL);
1772 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1773 tem = make_ssa_name (tem, newop);
1774 gimple_assign_set_lhs (newop, tem);
1775 gimple_set_location (newop, gimple_location (stmt));
1776 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1777 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1778 tem, NULL_TREE, NULL_TREE);
1779 update_stmt (gsi_stmt (*gsi));
1780 return true;
1783 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1784 if (code == BIT_AND_EXPR
1785 && def1_code == BIT_IOR_EXPR
1786 && TREE_CODE (arg2) == INTEGER_CST
1787 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1789 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1790 arg2, gimple_assign_rhs2 (def1));
1791 tree tem;
1792 gimple newop;
1793 if (integer_zerop (cst))
1795 gimple_assign_set_rhs1 (stmt, def1_arg1);
1796 update_stmt (stmt);
1797 return true;
1799 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1800 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1801 tem, def1_arg1, arg2);
1802 tem = make_ssa_name (tem, newop);
1803 gimple_assign_set_lhs (newop, tem);
1804 gimple_set_location (newop, gimple_location (stmt));
1805 /* Make sure to re-process the new stmt as it's walking upwards. */
1806 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1807 gimple_assign_set_rhs1 (stmt, tem);
1808 gimple_assign_set_rhs2 (stmt, cst);
1809 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1810 update_stmt (stmt);
1811 return true;
1814 /* Combine successive equal operations with constants. */
1815 if ((code == BIT_AND_EXPR
1816 || code == BIT_IOR_EXPR
1817 || code == BIT_XOR_EXPR)
1818 && def1_code == code
1819 && TREE_CODE (arg2) == INTEGER_CST
1820 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1822 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1823 arg2, gimple_assign_rhs2 (def1));
1824 gimple_assign_set_rhs1 (stmt, def1_arg1);
1825 gimple_assign_set_rhs2 (stmt, cst);
1826 update_stmt (stmt);
1827 return true;
1830 /* Canonicalize X ^ ~0 to ~X. */
1831 if (code == BIT_XOR_EXPR
1832 && TREE_CODE (arg2) == INTEGER_CST
1833 && integer_all_onesp (arg2))
1835 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1836 gcc_assert (gsi_stmt (*gsi) == stmt);
1837 update_stmt (stmt);
1838 return true;
1841 /* Try simple folding for X op !X, and X op X. */
1842 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1843 if (res != NULL_TREE)
1845 gimple_assign_set_rhs_from_tree (gsi, res);
1846 update_stmt (gsi_stmt (*gsi));
1847 return true;
1850 return false;
1854 /* Perform re-associations of the plus or minus statement STMT that are
1855 always permitted. Returns true if the CFG was changed. */
1857 static bool
1858 associate_plusminus (gimple stmt)
1860 tree rhs1 = gimple_assign_rhs1 (stmt);
1861 tree rhs2 = gimple_assign_rhs2 (stmt);
1862 enum tree_code code = gimple_assign_rhs_code (stmt);
1863 gimple_stmt_iterator gsi;
1864 bool changed;
1866 /* We can't reassociate at all for saturating types. */
1867 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1868 return false;
1870 /* First contract negates. */
1873 changed = false;
1875 /* A +- (-B) -> A -+ B. */
1876 if (TREE_CODE (rhs2) == SSA_NAME)
1878 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1879 if (is_gimple_assign (def_stmt)
1880 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1881 && can_propagate_from (def_stmt))
1883 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1884 gimple_assign_set_rhs_code (stmt, code);
1885 rhs2 = gimple_assign_rhs1 (def_stmt);
1886 gimple_assign_set_rhs2 (stmt, rhs2);
1887 gimple_set_modified (stmt, true);
1888 changed = true;
1892 /* (-A) + B -> B - A. */
1893 if (TREE_CODE (rhs1) == SSA_NAME
1894 && code == PLUS_EXPR)
1896 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1897 if (is_gimple_assign (def_stmt)
1898 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1899 && can_propagate_from (def_stmt))
1901 code = MINUS_EXPR;
1902 gimple_assign_set_rhs_code (stmt, code);
1903 rhs1 = rhs2;
1904 gimple_assign_set_rhs1 (stmt, rhs1);
1905 rhs2 = gimple_assign_rhs1 (def_stmt);
1906 gimple_assign_set_rhs2 (stmt, rhs2);
1907 gimple_set_modified (stmt, true);
1908 changed = true;
1912 while (changed);
1914 /* We can't reassociate floating-point or fixed-point plus or minus
1915 because of saturation to +-Inf. */
1916 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1917 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1918 goto out;
1920 /* Second match patterns that allow contracting a plus-minus pair
1921 irrespective of overflow issues.
1923 (A +- B) - A -> +- B
1924 (A +- B) -+ B -> A
1925 (CST +- A) +- CST -> CST +- A
1926 (A + CST) +- CST -> A + CST
1927 ~A + A -> -1
1928 ~A + 1 -> -A
1929 A - (A +- B) -> -+ B
1930 A +- (B +- A) -> +- B
1931 CST +- (CST +- A) -> CST +- A
1932 CST +- (A +- CST) -> CST +- A
1933 A + ~A -> -1
1935 via commutating the addition and contracting operations to zero
1936 by reassociation. */
1938 gsi = gsi_for_stmt (stmt);
1939 if (TREE_CODE (rhs1) == SSA_NAME)
1941 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1942 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1944 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1945 if (def_code == PLUS_EXPR
1946 || def_code == MINUS_EXPR)
1948 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1949 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1950 if (operand_equal_p (def_rhs1, rhs2, 0)
1951 && code == MINUS_EXPR)
1953 /* (A +- B) - A -> +- B. */
1954 code = ((def_code == PLUS_EXPR)
1955 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1956 rhs1 = def_rhs2;
1957 rhs2 = NULL_TREE;
1958 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1959 gcc_assert (gsi_stmt (gsi) == stmt);
1960 gimple_set_modified (stmt, true);
1962 else if (operand_equal_p (def_rhs2, rhs2, 0)
1963 && code != def_code)
1965 /* (A +- B) -+ B -> A. */
1966 code = TREE_CODE (def_rhs1);
1967 rhs1 = def_rhs1;
1968 rhs2 = NULL_TREE;
1969 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1970 gcc_assert (gsi_stmt (gsi) == stmt);
1971 gimple_set_modified (stmt, true);
1973 else if (TREE_CODE (rhs2) == INTEGER_CST
1974 && TREE_CODE (def_rhs1) == INTEGER_CST)
1976 /* (CST +- A) +- CST -> CST +- A. */
1977 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1978 def_rhs1, rhs2);
1979 if (cst && !TREE_OVERFLOW (cst))
1981 code = def_code;
1982 gimple_assign_set_rhs_code (stmt, code);
1983 rhs1 = cst;
1984 gimple_assign_set_rhs1 (stmt, rhs1);
1985 rhs2 = def_rhs2;
1986 gimple_assign_set_rhs2 (stmt, rhs2);
1987 gimple_set_modified (stmt, true);
1990 else if (TREE_CODE (rhs2) == INTEGER_CST
1991 && TREE_CODE (def_rhs2) == INTEGER_CST
1992 && def_code == PLUS_EXPR)
1994 /* (A + CST) +- CST -> A + CST. */
1995 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1996 def_rhs2, rhs2);
1997 if (cst && !TREE_OVERFLOW (cst))
1999 code = PLUS_EXPR;
2000 gimple_assign_set_rhs_code (stmt, code);
2001 rhs1 = def_rhs1;
2002 gimple_assign_set_rhs1 (stmt, rhs1);
2003 rhs2 = cst;
2004 gimple_assign_set_rhs2 (stmt, rhs2);
2005 gimple_set_modified (stmt, true);
2009 else if (def_code == BIT_NOT_EXPR
2010 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2012 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2013 if (code == PLUS_EXPR
2014 && operand_equal_p (def_rhs1, rhs2, 0))
2016 /* ~A + A -> -1. */
2017 code = INTEGER_CST;
2018 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2019 rhs2 = NULL_TREE;
2020 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2021 gcc_assert (gsi_stmt (gsi) == stmt);
2022 gimple_set_modified (stmt, true);
2024 else if (code == PLUS_EXPR
2025 && integer_onep (rhs1))
2027 /* ~A + 1 -> -A. */
2028 code = NEGATE_EXPR;
2029 rhs1 = def_rhs1;
2030 rhs2 = NULL_TREE;
2031 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2032 gcc_assert (gsi_stmt (gsi) == stmt);
2033 gimple_set_modified (stmt, true);
2039 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2041 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2042 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2044 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2045 if (def_code == PLUS_EXPR
2046 || def_code == MINUS_EXPR)
2048 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2049 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2050 if (operand_equal_p (def_rhs1, rhs1, 0)
2051 && code == MINUS_EXPR)
2053 /* A - (A +- B) -> -+ B. */
2054 code = ((def_code == PLUS_EXPR)
2055 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2056 rhs1 = def_rhs2;
2057 rhs2 = NULL_TREE;
2058 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2059 gcc_assert (gsi_stmt (gsi) == stmt);
2060 gimple_set_modified (stmt, true);
2062 else if (operand_equal_p (def_rhs2, rhs1, 0)
2063 && code != def_code)
2065 /* A +- (B +- A) -> +- B. */
2066 code = ((code == PLUS_EXPR)
2067 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2068 rhs1 = def_rhs1;
2069 rhs2 = NULL_TREE;
2070 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2071 gcc_assert (gsi_stmt (gsi) == stmt);
2072 gimple_set_modified (stmt, true);
2074 else if (TREE_CODE (rhs1) == INTEGER_CST
2075 && TREE_CODE (def_rhs1) == INTEGER_CST)
2077 /* CST +- (CST +- A) -> CST +- A. */
2078 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2079 rhs1, def_rhs1);
2080 if (cst && !TREE_OVERFLOW (cst))
2082 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2083 gimple_assign_set_rhs_code (stmt, code);
2084 rhs1 = cst;
2085 gimple_assign_set_rhs1 (stmt, rhs1);
2086 rhs2 = def_rhs2;
2087 gimple_assign_set_rhs2 (stmt, rhs2);
2088 gimple_set_modified (stmt, true);
2091 else if (TREE_CODE (rhs1) == INTEGER_CST
2092 && TREE_CODE (def_rhs2) == INTEGER_CST)
2094 /* CST +- (A +- CST) -> CST +- A. */
2095 tree cst = fold_binary (def_code == code
2096 ? PLUS_EXPR : MINUS_EXPR,
2097 TREE_TYPE (rhs2),
2098 rhs1, def_rhs2);
2099 if (cst && !TREE_OVERFLOW (cst))
2101 rhs1 = cst;
2102 gimple_assign_set_rhs1 (stmt, rhs1);
2103 rhs2 = def_rhs1;
2104 gimple_assign_set_rhs2 (stmt, rhs2);
2105 gimple_set_modified (stmt, true);
2109 else if (def_code == BIT_NOT_EXPR
2110 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2112 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2113 if (code == PLUS_EXPR
2114 && operand_equal_p (def_rhs1, rhs1, 0))
2116 /* A + ~A -> -1. */
2117 code = INTEGER_CST;
2118 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2119 rhs2 = NULL_TREE;
2120 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2121 gcc_assert (gsi_stmt (gsi) == stmt);
2122 gimple_set_modified (stmt, true);
2128 out:
2129 if (gimple_modified_p (stmt))
2131 fold_stmt_inplace (stmt);
2132 update_stmt (stmt);
2133 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2134 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2135 return true;
2138 return false;
2141 /* Combine two conversions in a row for the second conversion at *GSI.
2142 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2143 run. Else it returns 0. */
2145 static int
2146 combine_conversions (gimple_stmt_iterator *gsi)
2148 gimple stmt = gsi_stmt (*gsi);
2149 gimple def_stmt;
2150 tree op0, lhs;
2151 enum tree_code code = gimple_assign_rhs_code (stmt);
2153 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2154 || code == FLOAT_EXPR
2155 || code == FIX_TRUNC_EXPR);
2157 lhs = gimple_assign_lhs (stmt);
2158 op0 = gimple_assign_rhs1 (stmt);
2159 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2161 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2162 return 1;
2165 if (TREE_CODE (op0) != SSA_NAME)
2166 return 0;
2168 def_stmt = SSA_NAME_DEF_STMT (op0);
2169 if (!is_gimple_assign (def_stmt))
2170 return 0;
2172 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2174 tree defop0 = gimple_assign_rhs1 (def_stmt);
2175 tree type = TREE_TYPE (lhs);
2176 tree inside_type = TREE_TYPE (defop0);
2177 tree inter_type = TREE_TYPE (op0);
2178 int inside_int = INTEGRAL_TYPE_P (inside_type);
2179 int inside_ptr = POINTER_TYPE_P (inside_type);
2180 int inside_float = FLOAT_TYPE_P (inside_type);
2181 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2182 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2183 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2184 int inter_int = INTEGRAL_TYPE_P (inter_type);
2185 int inter_ptr = POINTER_TYPE_P (inter_type);
2186 int inter_float = FLOAT_TYPE_P (inter_type);
2187 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2188 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2189 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2190 int final_int = INTEGRAL_TYPE_P (type);
2191 int final_ptr = POINTER_TYPE_P (type);
2192 int final_float = FLOAT_TYPE_P (type);
2193 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2194 unsigned int final_prec = TYPE_PRECISION (type);
2195 int final_unsignedp = TYPE_UNSIGNED (type);
2197 /* In addition to the cases of two conversions in a row
2198 handled below, if we are converting something to its own
2199 type via an object of identical or wider precision, neither
2200 conversion is needed. */
2201 if (useless_type_conversion_p (type, inside_type)
2202 && (((inter_int || inter_ptr) && final_int)
2203 || (inter_float && final_float))
2204 && inter_prec >= final_prec)
2206 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2207 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2208 update_stmt (stmt);
2209 return remove_prop_source_from_use (op0) ? 2 : 1;
2212 /* Likewise, if the intermediate and initial types are either both
2213 float or both integer, we don't need the middle conversion if the
2214 former is wider than the latter and doesn't change the signedness
2215 (for integers). Avoid this if the final type is a pointer since
2216 then we sometimes need the middle conversion. Likewise if the
2217 final type has a precision not equal to the size of its mode. */
2218 if (((inter_int && inside_int)
2219 || (inter_float && inside_float)
2220 || (inter_vec && inside_vec))
2221 && inter_prec >= inside_prec
2222 && (inter_float || inter_vec
2223 || inter_unsignedp == inside_unsignedp)
2224 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2225 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2226 && ! final_ptr
2227 && (! final_vec || inter_prec == inside_prec))
2229 gimple_assign_set_rhs1 (stmt, defop0);
2230 update_stmt (stmt);
2231 return remove_prop_source_from_use (op0) ? 2 : 1;
2234 /* If we have a sign-extension of a zero-extended value, we can
2235 replace that by a single zero-extension. */
2236 if (inside_int && inter_int && final_int
2237 && inside_prec < inter_prec && inter_prec < final_prec
2238 && inside_unsignedp && !inter_unsignedp)
2240 gimple_assign_set_rhs1 (stmt, defop0);
2241 update_stmt (stmt);
2242 return remove_prop_source_from_use (op0) ? 2 : 1;
2245 /* Two conversions in a row are not needed unless:
2246 - some conversion is floating-point (overstrict for now), or
2247 - some conversion is a vector (overstrict for now), or
2248 - the intermediate type is narrower than both initial and
2249 final, or
2250 - the intermediate type and innermost type differ in signedness,
2251 and the outermost type is wider than the intermediate, or
2252 - the initial type is a pointer type and the precisions of the
2253 intermediate and final types differ, or
2254 - the final type is a pointer type and the precisions of the
2255 initial and intermediate types differ. */
2256 if (! inside_float && ! inter_float && ! final_float
2257 && ! inside_vec && ! inter_vec && ! final_vec
2258 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2259 && ! (inside_int && inter_int
2260 && inter_unsignedp != inside_unsignedp
2261 && inter_prec < final_prec)
2262 && ((inter_unsignedp && inter_prec > inside_prec)
2263 == (final_unsignedp && final_prec > inter_prec))
2264 && ! (inside_ptr && inter_prec != final_prec)
2265 && ! (final_ptr && inside_prec != inter_prec)
2266 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2267 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2269 gimple_assign_set_rhs1 (stmt, defop0);
2270 update_stmt (stmt);
2271 return remove_prop_source_from_use (op0) ? 2 : 1;
2274 /* A truncation to an unsigned type should be canonicalized as
2275 bitwise and of a mask. */
2276 if (final_int && inter_int && inside_int
2277 && final_prec == inside_prec
2278 && final_prec > inter_prec
2279 && inter_unsignedp)
2281 tree tem;
2282 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2283 defop0,
2284 double_int_to_tree
2285 (inside_type, double_int_mask (inter_prec)));
2286 if (!useless_type_conversion_p (type, inside_type))
2288 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2289 GSI_SAME_STMT);
2290 gimple_assign_set_rhs1 (stmt, tem);
2292 else
2293 gimple_assign_set_rhs_from_tree (gsi, tem);
2294 update_stmt (gsi_stmt (*gsi));
2295 return 1;
2299 return 0;
2302 /* Main entry point for the forward propagation and statement combine
2303 optimizer. */
2305 static unsigned int
2306 ssa_forward_propagate_and_combine (void)
2308 basic_block bb;
2309 unsigned int todoflags = 0;
2311 cfg_changed = false;
2313 FOR_EACH_BB (bb)
2315 gimple_stmt_iterator gsi, prev;
2316 bool prev_initialized;
2318 /* Apply forward propagation to all stmts in the basic-block.
2319 Note we update GSI within the loop as necessary. */
2320 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2322 gimple stmt = gsi_stmt (gsi);
2323 tree lhs, rhs;
2324 enum tree_code code;
2326 if (!is_gimple_assign (stmt))
2328 gsi_next (&gsi);
2329 continue;
2332 lhs = gimple_assign_lhs (stmt);
2333 rhs = gimple_assign_rhs1 (stmt);
2334 code = gimple_assign_rhs_code (stmt);
2335 if (TREE_CODE (lhs) != SSA_NAME
2336 || has_zero_uses (lhs))
2338 gsi_next (&gsi);
2339 continue;
2342 /* If this statement sets an SSA_NAME to an address,
2343 try to propagate the address into the uses of the SSA_NAME. */
2344 if (code == ADDR_EXPR
2345 /* Handle pointer conversions on invariant addresses
2346 as well, as this is valid gimple. */
2347 || (CONVERT_EXPR_CODE_P (code)
2348 && TREE_CODE (rhs) == ADDR_EXPR
2349 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2351 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2352 if ((!base
2353 || !DECL_P (base)
2354 || decl_address_invariant_p (base))
2355 && !stmt_references_abnormal_ssa_name (stmt)
2356 && forward_propagate_addr_expr (lhs, rhs))
2358 release_defs (stmt);
2359 todoflags |= TODO_remove_unused_locals;
2360 gsi_remove (&gsi, true);
2362 else
2363 gsi_next (&gsi);
2365 else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt))
2367 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2368 /* ??? Better adjust the interface to that function
2369 instead of building new trees here. */
2370 && forward_propagate_addr_expr
2371 (lhs,
2372 build1 (ADDR_EXPR,
2373 TREE_TYPE (rhs),
2374 fold_build2 (MEM_REF,
2375 TREE_TYPE (TREE_TYPE (rhs)),
2376 rhs,
2377 fold_convert
2378 (ptr_type_node,
2379 gimple_assign_rhs2 (stmt))))))
2381 release_defs (stmt);
2382 todoflags |= TODO_remove_unused_locals;
2383 gsi_remove (&gsi, true);
2385 else if (is_gimple_min_invariant (rhs))
2387 /* Make sure to fold &a[0] + off_1 here. */
2388 fold_stmt_inplace (stmt);
2389 update_stmt (stmt);
2390 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2391 gsi_next (&gsi);
2393 else
2394 gsi_next (&gsi);
2396 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2398 forward_propagate_comparison (stmt);
2399 gsi_next (&gsi);
2401 else
2402 gsi_next (&gsi);
2405 /* Combine stmts with the stmts defining their operands.
2406 Note we update GSI within the loop as necessary. */
2407 prev_initialized = false;
2408 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2410 gimple stmt = gsi_stmt (gsi);
2411 bool changed = false;
2413 switch (gimple_code (stmt))
2415 case GIMPLE_ASSIGN:
2417 tree rhs1 = gimple_assign_rhs1 (stmt);
2418 enum tree_code code = gimple_assign_rhs_code (stmt);
2420 if ((code == BIT_NOT_EXPR
2421 || code == NEGATE_EXPR)
2422 && TREE_CODE (rhs1) == SSA_NAME)
2423 changed = simplify_not_neg_expr (&gsi);
2424 else if (code == COND_EXPR)
2426 /* In this case the entire COND_EXPR is in rhs1. */
2427 int did_something;
2428 fold_defer_overflow_warnings ();
2429 did_something = forward_propagate_into_cond (&gsi);
2430 stmt = gsi_stmt (gsi);
2431 if (did_something == 2)
2432 cfg_changed = true;
2433 fold_undefer_overflow_warnings
2434 (!TREE_NO_WARNING (rhs1) && did_something, stmt,
2435 WARN_STRICT_OVERFLOW_CONDITIONAL);
2436 changed = did_something != 0;
2438 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2440 bool no_warning = gimple_no_warning_p (stmt);
2441 int did_something;
2442 fold_defer_overflow_warnings ();
2443 did_something = forward_propagate_into_comparison (&gsi);
2444 if (did_something == 2)
2445 cfg_changed = true;
2446 fold_undefer_overflow_warnings
2447 (!no_warning && changed,
2448 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2449 changed = did_something != 0;
2451 else if (code == BIT_AND_EXPR
2452 || code == BIT_IOR_EXPR
2453 || code == BIT_XOR_EXPR)
2454 changed = simplify_bitwise_binary (&gsi);
2455 else if (code == PLUS_EXPR
2456 || code == MINUS_EXPR)
2457 changed = associate_plusminus (stmt);
2458 else if (CONVERT_EXPR_CODE_P (code)
2459 || code == FLOAT_EXPR
2460 || code == FIX_TRUNC_EXPR)
2462 int did_something = combine_conversions (&gsi);
2463 if (did_something == 2)
2464 cfg_changed = true;
2465 changed = did_something != 0;
2467 break;
2470 case GIMPLE_SWITCH:
2471 changed = simplify_gimple_switch (stmt);
2472 break;
2474 case GIMPLE_COND:
2476 int did_something;
2477 fold_defer_overflow_warnings ();
2478 did_something = forward_propagate_into_gimple_cond (stmt);
2479 if (did_something == 2)
2480 cfg_changed = true;
2481 fold_undefer_overflow_warnings
2482 (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2483 changed = did_something != 0;
2484 break;
2487 case GIMPLE_CALL:
2489 tree callee = gimple_call_fndecl (stmt);
2490 if (callee != NULL_TREE
2491 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2492 changed = simplify_builtin_call (&gsi, callee);
2493 break;
2496 default:;
2499 if (changed)
2501 /* If the stmt changed then re-visit it and the statements
2502 inserted before it. */
2503 if (!prev_initialized)
2504 gsi = gsi_start_bb (bb);
2505 else
2507 gsi = prev;
2508 gsi_next (&gsi);
2511 else
2513 prev = gsi;
2514 prev_initialized = true;
2515 gsi_next (&gsi);
2520 if (cfg_changed)
2521 todoflags |= TODO_cleanup_cfg;
2523 return todoflags;
2527 static bool
2528 gate_forwprop (void)
2530 return flag_tree_forwprop;
2533 struct gimple_opt_pass pass_forwprop =
2536 GIMPLE_PASS,
2537 "forwprop", /* name */
2538 gate_forwprop, /* gate */
2539 ssa_forward_propagate_and_combine, /* execute */
2540 NULL, /* sub */
2541 NULL, /* next */
2542 0, /* static_pass_number */
2543 TV_TREE_FORWPROP, /* tv_id */
2544 PROP_cfg | PROP_ssa, /* properties_required */
2545 0, /* properties_provided */
2546 0, /* properties_destroyed */
2547 0, /* todo_flags_start */
2548 TODO_ggc_collect
2549 | TODO_update_ssa
2550 | TODO_verify_ssa /* todo_flags_finish */