2011-08-19 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob00121796613c5d9841d08a847f306990b37e9519
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
267 return false;
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
272 return false;
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
277 return true;
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
295 return true;
298 /* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
300 If NAME was replaced in its only use then this function can be used
301 to clean up dead stmts. The function handles already released SSA
302 names gracefully.
303 Returns true if cleanup-cfg has to run. */
305 static bool
306 remove_prop_source_from_use (tree name)
308 gimple_stmt_iterator gsi;
309 gimple stmt;
310 bool cfg_changed = false;
312 do {
313 basic_block bb;
315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
318 return cfg_changed;
320 stmt = SSA_NAME_DEF_STMT (name);
321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
323 return cfg_changed;
325 bb = gimple_bb (stmt);
326 gsi = gsi_for_stmt (stmt);
327 unlink_stmt_vdef (stmt);
328 gsi_remove (&gsi, true);
329 release_defs (stmt);
330 cfg_changed |= gimple_purge_dead_eh_edges (bb);
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
335 return cfg_changed;
338 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
345 static tree
346 rhs_to_tree (tree type, gimple stmt)
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 gimple_assign_rhs2 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358 return build1 (code, type, gimple_assign_rhs1 (stmt));
359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
361 else
362 gcc_unreachable ();
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
371 static tree
372 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
373 tree op0, tree op1, bool invariant_only)
375 tree t;
377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
379 fold_defer_overflow_warnings ();
380 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
381 if (!t)
383 fold_undefer_overflow_warnings (false, NULL, 0);
384 return NULL_TREE;
387 /* Require that we got a boolean type out if we put one in. */
388 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
390 /* Canonicalize the combined condition for use in a COND_EXPR. */
391 t = canonicalize_cond_expr_cond (t);
393 /* Bail out if we required an invariant but didn't get one. */
394 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
396 fold_undefer_overflow_warnings (false, NULL, 0);
397 return NULL_TREE;
400 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
402 return t;
405 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406 of its operand. Return a new comparison tree or NULL_TREE if there
407 were no simplifying combines. */
409 static tree
410 forward_propagate_into_comparison_1 (gimple stmt,
411 enum tree_code code, tree type,
412 tree op0, tree op1)
414 tree tmp = NULL_TREE;
415 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416 bool single_use0_p = false, single_use1_p = false;
418 /* For comparisons use the first operand, that is likely to
419 simplify comparisons against constants. */
420 if (TREE_CODE (op0) == SSA_NAME)
422 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423 if (def_stmt && can_propagate_from (def_stmt))
425 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
426 tmp = combine_cond_expr_cond (stmt, code, type,
427 rhs0, op1, !single_use0_p);
428 if (tmp)
429 return tmp;
433 /* If that wasn't successful, try the second operand. */
434 if (TREE_CODE (op1) == SSA_NAME)
436 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
437 if (def_stmt && can_propagate_from (def_stmt))
439 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
440 tmp = combine_cond_expr_cond (stmt, code, type,
441 op0, rhs1, !single_use1_p);
442 if (tmp)
443 return tmp;
447 /* If that wasn't successful either, try both operands. */
448 if (rhs0 != NULL_TREE
449 && rhs1 != NULL_TREE)
450 tmp = combine_cond_expr_cond (stmt, code, type,
451 rhs0, rhs1,
452 !(single_use0_p && single_use1_p));
454 return tmp;
457 /* Propagate from the ssa name definition statements of the assignment
458 from a comparison at *GSI into the conditional if that simplifies it.
459 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
460 otherwise returns 0. */
462 static int
463 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
465 gimple stmt = gsi_stmt (*gsi);
466 tree tmp;
467 bool cfg_changed = false;
468 tree rhs1 = gimple_assign_rhs1 (stmt);
469 tree rhs2 = gimple_assign_rhs2 (stmt);
471 /* Combine the comparison with defining statements. */
472 tmp = forward_propagate_into_comparison_1 (stmt,
473 gimple_assign_rhs_code (stmt),
474 TREE_TYPE
475 (gimple_assign_lhs (stmt)),
476 rhs1, rhs2);
477 if (tmp)
479 gimple_assign_set_rhs_from_tree (gsi, tmp);
480 fold_stmt_inplace (stmt);
481 update_stmt (stmt);
483 if (TREE_CODE (rhs1) == SSA_NAME)
484 cfg_changed |= remove_prop_source_from_use (rhs1);
485 if (TREE_CODE (rhs2) == SSA_NAME)
486 cfg_changed |= remove_prop_source_from_use (rhs2);
487 return cfg_changed ? 2 : 1;
490 return 0;
493 /* Propagate from the ssa name definition statements of COND_EXPR
494 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
495 Returns zero if no statement was changed, one if there were
496 changes and two if cfg_cleanup needs to run.
498 This must be kept in sync with forward_propagate_into_cond. */
500 static int
501 forward_propagate_into_gimple_cond (gimple stmt)
503 tree tmp;
504 enum tree_code code = gimple_cond_code (stmt);
505 bool cfg_changed = false;
506 tree rhs1 = gimple_cond_lhs (stmt);
507 tree rhs2 = gimple_cond_rhs (stmt);
509 /* We can do tree combining on SSA_NAME and comparison expressions. */
510 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
511 return 0;
513 tmp = forward_propagate_into_comparison_1 (stmt, code,
514 boolean_type_node,
515 rhs1, rhs2);
516 if (tmp)
518 if (dump_file && tmp)
520 fprintf (dump_file, " Replaced '");
521 print_gimple_expr (dump_file, stmt, 0, 0);
522 fprintf (dump_file, "' with '");
523 print_generic_expr (dump_file, tmp, 0);
524 fprintf (dump_file, "'\n");
527 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
528 update_stmt (stmt);
530 if (TREE_CODE (rhs1) == SSA_NAME)
531 cfg_changed |= remove_prop_source_from_use (rhs1);
532 if (TREE_CODE (rhs2) == SSA_NAME)
533 cfg_changed |= remove_prop_source_from_use (rhs2);
534 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
537 return 0;
541 /* Propagate from the ssa name definition statements of COND_EXPR
542 in the rhs of statement STMT into the conditional if that simplifies it.
543 Returns zero if no statement was changed, one if there were
544 changes and two if cfg_cleanup needs to run.
546 This must be kept in sync with forward_propagate_into_gimple_cond. */
548 static int
549 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
551 gimple stmt = gsi_stmt (*gsi_p);
552 tree tmp = NULL_TREE;
553 tree cond = gimple_assign_rhs1 (stmt);
555 /* We can do tree combining on SSA_NAME and comparison expressions. */
556 if (COMPARISON_CLASS_P (cond))
557 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
558 boolean_type_node,
559 TREE_OPERAND (cond, 0),
560 TREE_OPERAND (cond, 1));
561 else if (TREE_CODE (cond) == SSA_NAME)
563 tree name = cond, rhs0;
564 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
565 if (!def_stmt || !can_propagate_from (def_stmt))
566 return 0;
568 rhs0 = gimple_assign_rhs1 (def_stmt);
569 tmp = combine_cond_expr_cond (stmt, NE_EXPR, boolean_type_node, rhs0,
570 build_int_cst (TREE_TYPE (rhs0), 0),
571 false);
574 if (tmp)
576 if (dump_file && tmp)
578 fprintf (dump_file, " Replaced '");
579 print_generic_expr (dump_file, cond, 0);
580 fprintf (dump_file, "' with '");
581 print_generic_expr (dump_file, tmp, 0);
582 fprintf (dump_file, "'\n");
585 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
586 stmt = gsi_stmt (*gsi_p);
587 update_stmt (stmt);
589 return is_gimple_min_invariant (tmp) ? 2 : 1;
592 return 0;
595 /* We've just substituted an ADDR_EXPR into stmt. Update all the
596 relevant data structures to match. */
598 static void
599 tidy_after_forward_propagate_addr (gimple stmt)
601 /* We may have turned a trapping insn into a non-trapping insn. */
602 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
603 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
604 cfg_changed = true;
606 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
607 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
610 /* DEF_RHS contains the address of the 0th element in an array.
611 USE_STMT uses type of DEF_RHS to compute the address of an
612 arbitrary element within the array. The (variable) byte offset
613 of the element is contained in OFFSET.
615 We walk back through the use-def chains of OFFSET to verify that
616 it is indeed computing the offset of an element within the array
617 and extract the index corresponding to the given byte offset.
619 We then try to fold the entire address expression into a form
620 &array[index].
622 If we are successful, we replace the right hand side of USE_STMT
623 with the new address computation. */
625 static bool
626 forward_propagate_addr_into_variable_array_index (tree offset,
627 tree def_rhs,
628 gimple_stmt_iterator *use_stmt_gsi)
630 tree index, tunit;
631 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
632 tree new_rhs, tmp;
634 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
635 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
636 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
637 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
638 else
639 return false;
640 if (!host_integerp (tunit, 1))
641 return false;
643 /* Get the offset's defining statement. */
644 offset_def = SSA_NAME_DEF_STMT (offset);
646 /* Try to find an expression for a proper index. This is either a
647 multiplication expression by the element size or just the ssa name we came
648 along in case the element size is one. In that case, however, we do not
649 allow multiplications because they can be computing index to a higher
650 level dimension (PR 37861). */
651 if (integer_onep (tunit))
653 if (is_gimple_assign (offset_def)
654 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
655 return false;
657 index = offset;
659 else
661 /* The statement which defines OFFSET before type conversion
662 must be a simple GIMPLE_ASSIGN. */
663 if (!is_gimple_assign (offset_def))
664 return false;
666 /* The RHS of the statement which defines OFFSET must be a
667 multiplication of an object by the size of the array elements.
668 This implicitly verifies that the size of the array elements
669 is constant. */
670 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
671 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
672 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
674 /* The first operand to the MULT_EXPR is the desired index. */
675 index = gimple_assign_rhs1 (offset_def);
677 /* If we have idx * tunit + CST * tunit re-associate that. */
678 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
679 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
680 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
681 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
682 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
683 gimple_assign_rhs2 (offset_def),
684 tunit)) != NULL_TREE)
686 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
687 if (is_gimple_assign (offset_def2)
688 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
689 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
690 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
692 index = fold_build2 (gimple_assign_rhs_code (offset_def),
693 TREE_TYPE (offset),
694 gimple_assign_rhs1 (offset_def2), tmp);
696 else
697 return false;
699 else
700 return false;
703 /* Replace the pointer addition with array indexing. */
704 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
705 true, GSI_SAME_STMT);
706 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
708 new_rhs = unshare_expr (def_rhs);
709 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
711 else
713 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
714 unshare_expr (TREE_OPERAND (def_rhs, 0)),
715 index, integer_zero_node, NULL_TREE);
716 new_rhs = build_fold_addr_expr (new_rhs);
717 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
718 TREE_TYPE (new_rhs)))
720 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
721 NULL_TREE, true, GSI_SAME_STMT);
722 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
723 new_rhs);
726 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
727 use_stmt = gsi_stmt (*use_stmt_gsi);
729 /* That should have created gimple, so there is no need to
730 record information to undo the propagation. */
731 fold_stmt_inplace (use_stmt);
732 tidy_after_forward_propagate_addr (use_stmt);
733 return true;
736 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
737 ADDR_EXPR <whatever>.
739 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
740 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
741 node or for recovery of array indexing from pointer arithmetic.
743 Return true if the propagation was successful (the propagation can
744 be not totally successful, yet things may have been changed). */
746 static bool
747 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
748 gimple_stmt_iterator *use_stmt_gsi,
749 bool single_use_p)
751 tree lhs, rhs, rhs2, array_ref;
752 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
753 enum tree_code rhs_code;
754 bool res = true;
756 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
758 lhs = gimple_assign_lhs (use_stmt);
759 rhs_code = gimple_assign_rhs_code (use_stmt);
760 rhs = gimple_assign_rhs1 (use_stmt);
762 /* Trivial cases. The use statement could be a trivial copy or a
763 useless conversion. Recurse to the uses of the lhs as copyprop does
764 not copy through different variant pointers and FRE does not catch
765 all useless conversions. Treat the case of a single-use name and
766 a conversion to def_rhs type separate, though. */
767 if (TREE_CODE (lhs) == SSA_NAME
768 && ((rhs_code == SSA_NAME && rhs == name)
769 || CONVERT_EXPR_CODE_P (rhs_code)))
771 /* Only recurse if we don't deal with a single use or we cannot
772 do the propagation to the current statement. In particular
773 we can end up with a conversion needed for a non-invariant
774 address which we cannot do in a single statement. */
775 if (!single_use_p
776 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
777 && (!is_gimple_min_invariant (def_rhs)
778 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
779 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
780 && (TYPE_PRECISION (TREE_TYPE (lhs))
781 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
782 return forward_propagate_addr_expr (lhs, def_rhs);
784 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
785 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
786 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
787 else
788 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
789 return true;
792 /* Propagate through constant pointer adjustments. */
793 if (TREE_CODE (lhs) == SSA_NAME
794 && rhs_code == POINTER_PLUS_EXPR
795 && rhs == name
796 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
798 tree new_def_rhs;
799 /* As we come here with non-invariant addresses in def_rhs we need
800 to make sure we can build a valid constant offsetted address
801 for further propagation. Simply rely on fold building that
802 and check after the fact. */
803 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
804 def_rhs,
805 fold_convert (ptr_type_node,
806 gimple_assign_rhs2 (use_stmt)));
807 if (TREE_CODE (new_def_rhs) == MEM_REF
808 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
809 return false;
810 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
811 TREE_TYPE (rhs));
813 /* Recurse. If we could propagate into all uses of lhs do not
814 bother to replace into the current use but just pretend we did. */
815 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
816 && forward_propagate_addr_expr (lhs, new_def_rhs))
817 return true;
819 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
820 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
821 new_def_rhs, NULL_TREE);
822 else if (is_gimple_min_invariant (new_def_rhs))
823 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
824 new_def_rhs, NULL_TREE);
825 else
826 return false;
827 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
828 update_stmt (use_stmt);
829 return true;
832 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
833 ADDR_EXPR will not appear on the LHS. */
834 lhs = gimple_assign_lhs (use_stmt);
835 while (handled_component_p (lhs))
836 lhs = TREE_OPERAND (lhs, 0);
838 /* Now see if the LHS node is a MEM_REF using NAME. If so,
839 propagate the ADDR_EXPR into the use of NAME and fold the result. */
840 if (TREE_CODE (lhs) == MEM_REF
841 && TREE_OPERAND (lhs, 0) == name)
843 tree def_rhs_base;
844 HOST_WIDE_INT def_rhs_offset;
845 /* If the address is invariant we can always fold it. */
846 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
847 &def_rhs_offset)))
849 double_int off = mem_ref_offset (lhs);
850 tree new_ptr;
851 off = double_int_add (off,
852 shwi_to_double_int (def_rhs_offset));
853 if (TREE_CODE (def_rhs_base) == MEM_REF)
855 off = double_int_add (off, mem_ref_offset (def_rhs_base));
856 new_ptr = TREE_OPERAND (def_rhs_base, 0);
858 else
859 new_ptr = build_fold_addr_expr (def_rhs_base);
860 TREE_OPERAND (lhs, 0) = new_ptr;
861 TREE_OPERAND (lhs, 1)
862 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
863 tidy_after_forward_propagate_addr (use_stmt);
864 /* Continue propagating into the RHS if this was not the only use. */
865 if (single_use_p)
866 return true;
868 /* If the LHS is a plain dereference and the value type is the same as
869 that of the pointed-to type of the address we can put the
870 dereferenced address on the LHS preserving the original alias-type. */
871 else if (gimple_assign_lhs (use_stmt) == lhs
872 && useless_type_conversion_p
873 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
874 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
876 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
877 tree new_offset, new_base, saved;
878 while (handled_component_p (*def_rhs_basep))
879 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
880 saved = *def_rhs_basep;
881 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
883 new_base = TREE_OPERAND (*def_rhs_basep, 0);
884 new_offset
885 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
886 TREE_OPERAND (*def_rhs_basep, 1));
888 else
890 new_base = build_fold_addr_expr (*def_rhs_basep);
891 new_offset = TREE_OPERAND (lhs, 1);
893 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
894 new_base, new_offset);
895 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
896 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
897 gimple_assign_set_lhs (use_stmt,
898 unshare_expr (TREE_OPERAND (def_rhs, 0)));
899 *def_rhs_basep = saved;
900 tidy_after_forward_propagate_addr (use_stmt);
901 /* Continue propagating into the RHS if this was not the
902 only use. */
903 if (single_use_p)
904 return true;
906 else
907 /* We can have a struct assignment dereferencing our name twice.
908 Note that we didn't propagate into the lhs to not falsely
909 claim we did when propagating into the rhs. */
910 res = false;
913 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
914 nodes from the RHS. */
915 rhs = gimple_assign_rhs1 (use_stmt);
916 if (TREE_CODE (rhs) == ADDR_EXPR)
917 rhs = TREE_OPERAND (rhs, 0);
918 while (handled_component_p (rhs))
919 rhs = TREE_OPERAND (rhs, 0);
921 /* Now see if the RHS node is a MEM_REF using NAME. If so,
922 propagate the ADDR_EXPR into the use of NAME and fold the result. */
923 if (TREE_CODE (rhs) == MEM_REF
924 && TREE_OPERAND (rhs, 0) == name)
926 tree def_rhs_base;
927 HOST_WIDE_INT def_rhs_offset;
928 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
929 &def_rhs_offset)))
931 double_int off = mem_ref_offset (rhs);
932 tree new_ptr;
933 off = double_int_add (off,
934 shwi_to_double_int (def_rhs_offset));
935 if (TREE_CODE (def_rhs_base) == MEM_REF)
937 off = double_int_add (off, mem_ref_offset (def_rhs_base));
938 new_ptr = TREE_OPERAND (def_rhs_base, 0);
940 else
941 new_ptr = build_fold_addr_expr (def_rhs_base);
942 TREE_OPERAND (rhs, 0) = new_ptr;
943 TREE_OPERAND (rhs, 1)
944 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
945 fold_stmt_inplace (use_stmt);
946 tidy_after_forward_propagate_addr (use_stmt);
947 return res;
949 /* If the RHS is a plain dereference and the value type is the same as
950 that of the pointed-to type of the address we can put the
951 dereferenced address on the RHS preserving the original alias-type. */
952 else if (gimple_assign_rhs1 (use_stmt) == rhs
953 && useless_type_conversion_p
954 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
955 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
957 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
958 tree new_offset, new_base, saved;
959 while (handled_component_p (*def_rhs_basep))
960 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
961 saved = *def_rhs_basep;
962 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
964 new_base = TREE_OPERAND (*def_rhs_basep, 0);
965 new_offset
966 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
967 TREE_OPERAND (*def_rhs_basep, 1));
969 else
971 new_base = build_fold_addr_expr (*def_rhs_basep);
972 new_offset = TREE_OPERAND (rhs, 1);
974 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
975 new_base, new_offset);
976 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
977 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
978 gimple_assign_set_rhs1 (use_stmt,
979 unshare_expr (TREE_OPERAND (def_rhs, 0)));
980 *def_rhs_basep = saved;
981 fold_stmt_inplace (use_stmt);
982 tidy_after_forward_propagate_addr (use_stmt);
983 return res;
987 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
988 is nothing to do. */
989 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
990 || gimple_assign_rhs1 (use_stmt) != name)
991 return false;
993 /* The remaining cases are all for turning pointer arithmetic into
994 array indexing. They only apply when we have the address of
995 element zero in an array. If that is not the case then there
996 is nothing to do. */
997 array_ref = TREE_OPERAND (def_rhs, 0);
998 if ((TREE_CODE (array_ref) != ARRAY_REF
999 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1000 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1001 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1002 return false;
1004 rhs2 = gimple_assign_rhs2 (use_stmt);
1005 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1006 of the elements in X into &x[C1 + C2/element size]. */
1007 if (TREE_CODE (rhs2) == INTEGER_CST)
1009 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1010 TREE_TYPE (def_rhs),
1011 def_rhs, rhs2);
1012 if (new_rhs)
1014 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1015 new_rhs = unshare_expr (new_rhs);
1016 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1018 if (!is_gimple_min_invariant (new_rhs))
1019 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1020 true, NULL_TREE,
1021 true, GSI_SAME_STMT);
1022 new_rhs = fold_convert (type, new_rhs);
1024 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1025 use_stmt = gsi_stmt (*use_stmt_gsi);
1026 update_stmt (use_stmt);
1027 tidy_after_forward_propagate_addr (use_stmt);
1028 return true;
1032 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1033 converting a multiplication of an index by the size of the
1034 array elements, then the result is converted into the proper
1035 type for the arithmetic. */
1036 if (TREE_CODE (rhs2) == SSA_NAME
1037 && (TREE_CODE (array_ref) != ARRAY_REF
1038 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1039 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1040 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1041 different type than their operands. */
1042 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1043 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1044 use_stmt_gsi);
1045 return false;
1048 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1050 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1051 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1052 node or for recovery of array indexing from pointer arithmetic.
1053 Returns true, if all uses have been propagated into. */
1055 static bool
1056 forward_propagate_addr_expr (tree name, tree rhs)
1058 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1059 imm_use_iterator iter;
1060 gimple use_stmt;
1061 bool all = true;
1062 bool single_use_p = has_single_use (name);
1064 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1066 bool result;
1067 tree use_rhs;
1069 /* If the use is not in a simple assignment statement, then
1070 there is nothing we can do. */
1071 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1073 if (!is_gimple_debug (use_stmt))
1074 all = false;
1075 continue;
1078 /* If the use is in a deeper loop nest, then we do not want
1079 to propagate non-invariant ADDR_EXPRs into the loop as that
1080 is likely adding expression evaluations into the loop. */
1081 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1082 && !is_gimple_min_invariant (rhs))
1084 all = false;
1085 continue;
1089 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1090 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1091 single_use_p);
1092 /* If the use has moved to a different statement adjust
1093 the update machinery for the old statement too. */
1094 if (use_stmt != gsi_stmt (gsi))
1096 update_stmt (use_stmt);
1097 use_stmt = gsi_stmt (gsi);
1100 update_stmt (use_stmt);
1102 all &= result;
1104 /* Remove intermediate now unused copy and conversion chains. */
1105 use_rhs = gimple_assign_rhs1 (use_stmt);
1106 if (result
1107 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1108 && TREE_CODE (use_rhs) == SSA_NAME
1109 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1111 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1112 release_defs (use_stmt);
1113 gsi_remove (&gsi, true);
1117 return all && has_zero_uses (name);
1121 /* Forward propagate the comparison defined in STMT like
1122 cond_1 = x CMP y to uses of the form
1123 a_1 = (T')cond_1
1124 a_1 = !cond_1
1125 a_1 = cond_1 != 0
1126 Returns true if stmt is now unused. */
1128 static bool
1129 forward_propagate_comparison (gimple stmt)
1131 tree name = gimple_assign_lhs (stmt);
1132 gimple use_stmt;
1133 tree tmp = NULL_TREE;
1134 gimple_stmt_iterator gsi;
1135 enum tree_code code;
1136 tree lhs;
1138 /* Don't propagate ssa names that occur in abnormal phis. */
1139 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1140 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1141 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1142 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1143 return false;
1145 /* Do not un-cse comparisons. But propagate through copies. */
1146 use_stmt = get_prop_dest_stmt (name, &name);
1147 if (!use_stmt
1148 || !is_gimple_assign (use_stmt))
1149 return false;
1151 code = gimple_assign_rhs_code (use_stmt);
1152 lhs = gimple_assign_lhs (use_stmt);
1153 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1154 return false;
1156 /* We can propagate the condition into a statement that
1157 computes the logical negation of the comparison result. */
1158 if ((code == BIT_NOT_EXPR
1159 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1160 || (code == BIT_XOR_EXPR
1161 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1163 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1164 bool nans = HONOR_NANS (TYPE_MODE (type));
1165 enum tree_code inv_code;
1166 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1167 if (inv_code == ERROR_MARK)
1168 return false;
1170 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1171 gimple_assign_rhs2 (stmt));
1173 else
1174 return false;
1176 gsi = gsi_for_stmt (use_stmt);
1177 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1178 use_stmt = gsi_stmt (gsi);
1179 update_stmt (use_stmt);
1181 if (dump_file && (dump_flags & TDF_DETAILS))
1183 fprintf (dump_file, " Replaced '");
1184 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1185 fprintf (dump_file, "' with '");
1186 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1187 fprintf (dump_file, "'\n");
1190 /* Remove defining statements. */
1191 return remove_prop_source_from_use (name);
1195 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1196 If so, we can change STMT into lhs = y which can later be copy
1197 propagated. Similarly for negation.
1199 This could trivially be formulated as a forward propagation
1200 to immediate uses. However, we already had an implementation
1201 from DOM which used backward propagation via the use-def links.
1203 It turns out that backward propagation is actually faster as
1204 there's less work to do for each NOT/NEG expression we find.
1205 Backwards propagation needs to look at the statement in a single
1206 backlink. Forward propagation needs to look at potentially more
1207 than one forward link.
1209 Returns true when the statement was changed. */
1211 static bool
1212 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1214 gimple stmt = gsi_stmt (*gsi_p);
1215 tree rhs = gimple_assign_rhs1 (stmt);
1216 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1218 /* See if the RHS_DEF_STMT has the same form as our statement. */
1219 if (is_gimple_assign (rhs_def_stmt)
1220 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1222 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1224 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1225 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1226 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1228 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1229 stmt = gsi_stmt (*gsi_p);
1230 update_stmt (stmt);
1231 return true;
1235 return false;
1238 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1239 the condition which we may be able to optimize better. */
1241 static bool
1242 simplify_gimple_switch (gimple stmt)
1244 tree cond = gimple_switch_index (stmt);
1245 tree def, to, ti;
1246 gimple def_stmt;
1248 /* The optimization that we really care about is removing unnecessary
1249 casts. That will let us do much better in propagating the inferred
1250 constant at the switch target. */
1251 if (TREE_CODE (cond) == SSA_NAME)
1253 def_stmt = SSA_NAME_DEF_STMT (cond);
1254 if (is_gimple_assign (def_stmt))
1256 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1258 int need_precision;
1259 bool fail;
1261 def = gimple_assign_rhs1 (def_stmt);
1263 /* ??? Why was Jeff testing this? We are gimple... */
1264 gcc_checking_assert (is_gimple_val (def));
1266 to = TREE_TYPE (cond);
1267 ti = TREE_TYPE (def);
1269 /* If we have an extension that preserves value, then we
1270 can copy the source value into the switch. */
1272 need_precision = TYPE_PRECISION (ti);
1273 fail = false;
1274 if (! INTEGRAL_TYPE_P (ti))
1275 fail = true;
1276 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1277 fail = true;
1278 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1279 need_precision += 1;
1280 if (TYPE_PRECISION (to) < need_precision)
1281 fail = true;
1283 if (!fail)
1285 gimple_switch_set_index (stmt, def);
1286 update_stmt (stmt);
1287 return true;
1293 return false;
1296 /* For pointers p2 and p1 return p2 - p1 if the
1297 difference is known and constant, otherwise return NULL. */
1299 static tree
1300 constant_pointer_difference (tree p1, tree p2)
1302 int i, j;
1303 #define CPD_ITERATIONS 5
1304 tree exps[2][CPD_ITERATIONS];
1305 tree offs[2][CPD_ITERATIONS];
1306 int cnt[2];
1308 for (i = 0; i < 2; i++)
1310 tree p = i ? p1 : p2;
1311 tree off = size_zero_node;
1312 gimple stmt;
1313 enum tree_code code;
1315 /* For each of p1 and p2 we need to iterate at least
1316 twice, to handle ADDR_EXPR directly in p1/p2,
1317 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1318 on definition's stmt RHS. Iterate a few extra times. */
1319 j = 0;
1322 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1323 break;
1324 if (TREE_CODE (p) == ADDR_EXPR)
1326 tree q = TREE_OPERAND (p, 0);
1327 HOST_WIDE_INT offset;
1328 tree base = get_addr_base_and_unit_offset (q, &offset);
1329 if (base)
1331 q = base;
1332 if (offset)
1333 off = size_binop (PLUS_EXPR, off, size_int (offset));
1335 if (TREE_CODE (q) == MEM_REF
1336 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1338 p = TREE_OPERAND (q, 0);
1339 off = size_binop (PLUS_EXPR, off,
1340 double_int_to_tree (sizetype,
1341 mem_ref_offset (q)));
1343 else
1345 exps[i][j] = q;
1346 offs[i][j++] = off;
1347 break;
1350 if (TREE_CODE (p) != SSA_NAME)
1351 break;
1352 exps[i][j] = p;
1353 offs[i][j++] = off;
1354 if (j == CPD_ITERATIONS)
1355 break;
1356 stmt = SSA_NAME_DEF_STMT (p);
1357 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1358 break;
1359 code = gimple_assign_rhs_code (stmt);
1360 if (code == POINTER_PLUS_EXPR)
1362 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1363 break;
1364 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1365 p = gimple_assign_rhs1 (stmt);
1367 else if (code == ADDR_EXPR || code == NOP_EXPR)
1368 p = gimple_assign_rhs1 (stmt);
1369 else
1370 break;
1372 while (1);
1373 cnt[i] = j;
1376 for (i = 0; i < cnt[0]; i++)
1377 for (j = 0; j < cnt[1]; j++)
1378 if (exps[0][i] == exps[1][j])
1379 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1381 return NULL_TREE;
1384 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1385 Optimize
1386 memcpy (p, "abcd", 4);
1387 memset (p + 4, ' ', 3);
1388 into
1389 memcpy (p, "abcd ", 7);
1390 call if the latter can be stored by pieces during expansion. */
1392 static bool
1393 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1395 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1396 tree vuse = gimple_vuse (stmt2);
1397 if (vuse == NULL)
1398 return false;
1399 stmt1 = SSA_NAME_DEF_STMT (vuse);
1401 switch (DECL_FUNCTION_CODE (callee2))
1403 case BUILT_IN_MEMSET:
1404 if (gimple_call_num_args (stmt2) != 3
1405 || gimple_call_lhs (stmt2)
1406 || CHAR_BIT != 8
1407 || BITS_PER_UNIT != 8)
1408 break;
1409 else
1411 tree callee1;
1412 tree ptr1, src1, str1, off1, len1, lhs1;
1413 tree ptr2 = gimple_call_arg (stmt2, 0);
1414 tree val2 = gimple_call_arg (stmt2, 1);
1415 tree len2 = gimple_call_arg (stmt2, 2);
1416 tree diff, vdef, new_str_cst;
1417 gimple use_stmt;
1418 unsigned int ptr1_align;
1419 unsigned HOST_WIDE_INT src_len;
1420 char *src_buf;
1421 use_operand_p use_p;
1423 if (!host_integerp (val2, 0)
1424 || !host_integerp (len2, 1))
1425 break;
1426 if (is_gimple_call (stmt1))
1428 /* If first stmt is a call, it needs to be memcpy
1429 or mempcpy, with string literal as second argument and
1430 constant length. */
1431 callee1 = gimple_call_fndecl (stmt1);
1432 if (callee1 == NULL_TREE
1433 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1434 || gimple_call_num_args (stmt1) != 3)
1435 break;
1436 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1437 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1438 break;
1439 ptr1 = gimple_call_arg (stmt1, 0);
1440 src1 = gimple_call_arg (stmt1, 1);
1441 len1 = gimple_call_arg (stmt1, 2);
1442 lhs1 = gimple_call_lhs (stmt1);
1443 if (!host_integerp (len1, 1))
1444 break;
1445 str1 = string_constant (src1, &off1);
1446 if (str1 == NULL_TREE)
1447 break;
1448 if (!host_integerp (off1, 1)
1449 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1450 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1451 - tree_low_cst (off1, 1)) > 0
1452 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1453 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1454 != TYPE_MODE (char_type_node))
1455 break;
1457 else if (gimple_assign_single_p (stmt1))
1459 /* Otherwise look for length 1 memcpy optimized into
1460 assignment. */
1461 ptr1 = gimple_assign_lhs (stmt1);
1462 src1 = gimple_assign_rhs1 (stmt1);
1463 if (TREE_CODE (ptr1) != MEM_REF
1464 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1465 || !host_integerp (src1, 0))
1466 break;
1467 ptr1 = build_fold_addr_expr (ptr1);
1468 callee1 = NULL_TREE;
1469 len1 = size_one_node;
1470 lhs1 = NULL_TREE;
1471 off1 = size_zero_node;
1472 str1 = NULL_TREE;
1474 else
1475 break;
1477 diff = constant_pointer_difference (ptr1, ptr2);
1478 if (diff == NULL && lhs1 != NULL)
1480 diff = constant_pointer_difference (lhs1, ptr2);
1481 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1482 && diff != NULL)
1483 diff = size_binop (PLUS_EXPR, diff,
1484 fold_convert (sizetype, len1));
1486 /* If the difference between the second and first destination pointer
1487 is not constant, or is bigger than memcpy length, bail out. */
1488 if (diff == NULL
1489 || !host_integerp (diff, 1)
1490 || tree_int_cst_lt (len1, diff))
1491 break;
1493 /* Use maximum of difference plus memset length and memcpy length
1494 as the new memcpy length, if it is too big, bail out. */
1495 src_len = tree_low_cst (diff, 1);
1496 src_len += tree_low_cst (len2, 1);
1497 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1498 src_len = tree_low_cst (len1, 1);
1499 if (src_len > 1024)
1500 break;
1502 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1503 with bigger length will return different result. */
1504 if (lhs1 != NULL_TREE
1505 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1506 && (TREE_CODE (lhs1) != SSA_NAME
1507 || !single_imm_use (lhs1, &use_p, &use_stmt)
1508 || use_stmt != stmt2))
1509 break;
1511 /* If anything reads memory in between memcpy and memset
1512 call, the modified memcpy call might change it. */
1513 vdef = gimple_vdef (stmt1);
1514 if (vdef != NULL
1515 && (!single_imm_use (vdef, &use_p, &use_stmt)
1516 || use_stmt != stmt2))
1517 break;
1519 ptr1_align = get_pointer_alignment (ptr1);
1520 /* Construct the new source string literal. */
1521 src_buf = XALLOCAVEC (char, src_len + 1);
1522 if (callee1)
1523 memcpy (src_buf,
1524 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1525 tree_low_cst (len1, 1));
1526 else
1527 src_buf[0] = tree_low_cst (src1, 0);
1528 memset (src_buf + tree_low_cst (diff, 1),
1529 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1530 src_buf[src_len] = '\0';
1531 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1532 handle embedded '\0's. */
1533 if (strlen (src_buf) != src_len)
1534 break;
1535 rtl_profile_for_bb (gimple_bb (stmt2));
1536 /* If the new memcpy wouldn't be emitted by storing the literal
1537 by pieces, this optimization might enlarge .rodata too much,
1538 as commonly used string literals couldn't be shared any
1539 longer. */
1540 if (!can_store_by_pieces (src_len,
1541 builtin_strncpy_read_str,
1542 src_buf, ptr1_align, false))
1543 break;
1545 new_str_cst = build_string_literal (src_len, src_buf);
1546 if (callee1)
1548 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1549 memset call. */
1550 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1551 gimple_call_set_lhs (stmt1, NULL_TREE);
1552 gimple_call_set_arg (stmt1, 1, new_str_cst);
1553 gimple_call_set_arg (stmt1, 2,
1554 build_int_cst (TREE_TYPE (len1), src_len));
1555 update_stmt (stmt1);
1556 unlink_stmt_vdef (stmt2);
1557 gsi_remove (gsi_p, true);
1558 release_defs (stmt2);
1559 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1560 release_ssa_name (lhs1);
1561 return true;
1563 else
1565 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1566 assignment, remove STMT1 and change memset call into
1567 memcpy call. */
1568 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1570 if (!is_gimple_val (ptr1))
1571 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1572 true, GSI_SAME_STMT);
1573 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1574 gimple_call_set_arg (stmt2, 0, ptr1);
1575 gimple_call_set_arg (stmt2, 1, new_str_cst);
1576 gimple_call_set_arg (stmt2, 2,
1577 build_int_cst (TREE_TYPE (len2), src_len));
1578 unlink_stmt_vdef (stmt1);
1579 gsi_remove (&gsi, true);
1580 release_defs (stmt1);
1581 update_stmt (stmt2);
1582 return false;
1585 break;
1586 default:
1587 break;
1589 return false;
1592 /* Checks if expression has type of one-bit precision, or is a known
1593 truth-valued expression. */
1594 static bool
1595 truth_valued_ssa_name (tree name)
1597 gimple def;
1598 tree type = TREE_TYPE (name);
1600 if (!INTEGRAL_TYPE_P (type))
1601 return false;
1602 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1603 necessarily one and so ~X is not equal to !X. */
1604 if (TYPE_PRECISION (type) == 1)
1605 return true;
1606 def = SSA_NAME_DEF_STMT (name);
1607 if (is_gimple_assign (def))
1608 return truth_value_p (gimple_assign_rhs_code (def));
1609 return false;
1612 /* Helper routine for simplify_bitwise_binary_1 function.
1613 Return for the SSA name NAME the expression X if it mets condition
1614 NAME = !X. Otherwise return NULL_TREE.
1615 Detected patterns for NAME = !X are:
1616 !X and X == 0 for X with integral type.
1617 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1618 static tree
1619 lookup_logical_inverted_value (tree name)
1621 tree op1, op2;
1622 enum tree_code code;
1623 gimple def;
1625 /* If name has none-intergal type, or isn't a SSA_NAME, then
1626 return. */
1627 if (TREE_CODE (name) != SSA_NAME
1628 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1629 return NULL_TREE;
1630 def = SSA_NAME_DEF_STMT (name);
1631 if (!is_gimple_assign (def))
1632 return NULL_TREE;
1634 code = gimple_assign_rhs_code (def);
1635 op1 = gimple_assign_rhs1 (def);
1636 op2 = NULL_TREE;
1638 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1639 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1640 if (code == EQ_EXPR || code == NE_EXPR
1641 || code == BIT_XOR_EXPR)
1642 op2 = gimple_assign_rhs2 (def);
1644 switch (code)
1646 case BIT_NOT_EXPR:
1647 if (truth_valued_ssa_name (name))
1648 return op1;
1649 break;
1650 case EQ_EXPR:
1651 /* Check if we have X == 0 and X has an integral type. */
1652 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1653 break;
1654 if (integer_zerop (op2))
1655 return op1;
1656 break;
1657 case NE_EXPR:
1658 /* Check if we have X != 1 and X is a truth-valued. */
1659 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1660 break;
1661 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1662 return op1;
1663 break;
1664 case BIT_XOR_EXPR:
1665 /* Check if we have X ^ 1 and X is truth valued. */
1666 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1667 return op1;
1668 break;
1669 default:
1670 break;
1673 return NULL_TREE;
1676 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1677 operations CODE, if one operand has the logically inverted
1678 value of the other. */
1679 static tree
1680 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1681 tree arg1, tree arg2)
1683 tree anot;
1685 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1686 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1687 && code != BIT_XOR_EXPR)
1688 return NULL_TREE;
1690 /* First check if operands ARG1 and ARG2 are equal. If so
1691 return NULL_TREE as this optimization is handled fold_stmt. */
1692 if (arg1 == arg2)
1693 return NULL_TREE;
1694 /* See if we have in arguments logical-not patterns. */
1695 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1696 || anot != arg2)
1697 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1698 || anot != arg1))
1699 return NULL_TREE;
1701 /* X & !X -> 0. */
1702 if (code == BIT_AND_EXPR)
1703 return fold_convert (type, integer_zero_node);
1704 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1705 if (truth_valued_ssa_name (anot))
1706 return fold_convert (type, integer_one_node);
1708 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1709 return NULL_TREE;
1712 /* Simplify bitwise binary operations.
1713 Return true if a transformation applied, otherwise return false. */
1715 static bool
1716 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1718 gimple stmt = gsi_stmt (*gsi);
1719 tree arg1 = gimple_assign_rhs1 (stmt);
1720 tree arg2 = gimple_assign_rhs2 (stmt);
1721 enum tree_code code = gimple_assign_rhs_code (stmt);
1722 tree res;
1723 gimple def1 = NULL, def2 = NULL;
1724 tree def1_arg1, def2_arg1;
1725 enum tree_code def1_code, def2_code;
1727 def1_code = TREE_CODE (arg1);
1728 def1_arg1 = arg1;
1729 if (TREE_CODE (arg1) == SSA_NAME)
1731 def1 = SSA_NAME_DEF_STMT (arg1);
1732 if (is_gimple_assign (def1))
1734 def1_code = gimple_assign_rhs_code (def1);
1735 def1_arg1 = gimple_assign_rhs1 (def1);
1739 def2_code = TREE_CODE (arg2);
1740 def2_arg1 = arg2;
1741 if (TREE_CODE (arg2) == SSA_NAME)
1743 def2 = SSA_NAME_DEF_STMT (arg2);
1744 if (is_gimple_assign (def2))
1746 def2_code = gimple_assign_rhs_code (def2);
1747 def2_arg1 = gimple_assign_rhs1 (def2);
1751 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1752 if (TREE_CODE (arg2) == INTEGER_CST
1753 && CONVERT_EXPR_CODE_P (def1_code)
1754 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1755 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1757 gimple newop;
1758 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1759 newop =
1760 gimple_build_assign_with_ops (code, tem, def1_arg1,
1761 fold_convert_loc (gimple_location (stmt),
1762 TREE_TYPE (def1_arg1),
1763 arg2));
1764 tem = make_ssa_name (tem, newop);
1765 gimple_assign_set_lhs (newop, tem);
1766 gimple_set_location (newop, gimple_location (stmt));
1767 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1768 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1769 tem, NULL_TREE, NULL_TREE);
1770 update_stmt (gsi_stmt (*gsi));
1771 return true;
1774 /* For bitwise binary operations apply operand conversions to the
1775 binary operation result instead of to the operands. This allows
1776 to combine successive conversions and bitwise binary operations. */
1777 if (CONVERT_EXPR_CODE_P (def1_code)
1778 && CONVERT_EXPR_CODE_P (def2_code)
1779 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1780 /* Make sure that the conversion widens the operands, or has same
1781 precision, or that it changes the operation to a bitfield
1782 precision. */
1783 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1784 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1785 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1786 != MODE_INT)
1787 || (TYPE_PRECISION (TREE_TYPE (arg1))
1788 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1790 gimple newop;
1791 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1792 NULL);
1793 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1794 tem = make_ssa_name (tem, newop);
1795 gimple_assign_set_lhs (newop, tem);
1796 gimple_set_location (newop, gimple_location (stmt));
1797 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1798 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1799 tem, NULL_TREE, NULL_TREE);
1800 update_stmt (gsi_stmt (*gsi));
1801 return true;
1804 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1805 if (code == BIT_AND_EXPR
1806 && def1_code == BIT_IOR_EXPR
1807 && TREE_CODE (arg2) == INTEGER_CST
1808 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1810 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1811 arg2, gimple_assign_rhs2 (def1));
1812 tree tem;
1813 gimple newop;
1814 if (integer_zerop (cst))
1816 gimple_assign_set_rhs1 (stmt, def1_arg1);
1817 update_stmt (stmt);
1818 return true;
1820 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1821 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1822 tem, def1_arg1, arg2);
1823 tem = make_ssa_name (tem, newop);
1824 gimple_assign_set_lhs (newop, tem);
1825 gimple_set_location (newop, gimple_location (stmt));
1826 /* Make sure to re-process the new stmt as it's walking upwards. */
1827 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1828 gimple_assign_set_rhs1 (stmt, tem);
1829 gimple_assign_set_rhs2 (stmt, cst);
1830 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1831 update_stmt (stmt);
1832 return true;
1835 /* Combine successive equal operations with constants. */
1836 if ((code == BIT_AND_EXPR
1837 || code == BIT_IOR_EXPR
1838 || code == BIT_XOR_EXPR)
1839 && def1_code == code
1840 && TREE_CODE (arg2) == INTEGER_CST
1841 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1843 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1844 arg2, gimple_assign_rhs2 (def1));
1845 gimple_assign_set_rhs1 (stmt, def1_arg1);
1846 gimple_assign_set_rhs2 (stmt, cst);
1847 update_stmt (stmt);
1848 return true;
1851 /* Canonicalize X ^ ~0 to ~X. */
1852 if (code == BIT_XOR_EXPR
1853 && TREE_CODE (arg2) == INTEGER_CST
1854 && integer_all_onesp (arg2))
1856 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1857 gcc_assert (gsi_stmt (*gsi) == stmt);
1858 update_stmt (stmt);
1859 return true;
1862 /* Try simple folding for X op !X, and X op X. */
1863 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1864 if (res != NULL_TREE)
1866 gimple_assign_set_rhs_from_tree (gsi, res);
1867 update_stmt (gsi_stmt (*gsi));
1868 return true;
1871 return false;
1875 /* Perform re-associations of the plus or minus statement STMT that are
1876 always permitted. Returns true if the CFG was changed. */
1878 static bool
1879 associate_plusminus (gimple stmt)
1881 tree rhs1 = gimple_assign_rhs1 (stmt);
1882 tree rhs2 = gimple_assign_rhs2 (stmt);
1883 enum tree_code code = gimple_assign_rhs_code (stmt);
1884 gimple_stmt_iterator gsi;
1885 bool changed;
1887 /* We can't reassociate at all for saturating types. */
1888 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1889 return false;
1891 /* First contract negates. */
1894 changed = false;
1896 /* A +- (-B) -> A -+ B. */
1897 if (TREE_CODE (rhs2) == SSA_NAME)
1899 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1900 if (is_gimple_assign (def_stmt)
1901 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1902 && can_propagate_from (def_stmt))
1904 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1905 gimple_assign_set_rhs_code (stmt, code);
1906 rhs2 = gimple_assign_rhs1 (def_stmt);
1907 gimple_assign_set_rhs2 (stmt, rhs2);
1908 gimple_set_modified (stmt, true);
1909 changed = true;
1913 /* (-A) + B -> B - A. */
1914 if (TREE_CODE (rhs1) == SSA_NAME
1915 && code == PLUS_EXPR)
1917 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1918 if (is_gimple_assign (def_stmt)
1919 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1920 && can_propagate_from (def_stmt))
1922 code = MINUS_EXPR;
1923 gimple_assign_set_rhs_code (stmt, code);
1924 rhs1 = rhs2;
1925 gimple_assign_set_rhs1 (stmt, rhs1);
1926 rhs2 = gimple_assign_rhs1 (def_stmt);
1927 gimple_assign_set_rhs2 (stmt, rhs2);
1928 gimple_set_modified (stmt, true);
1929 changed = true;
1933 while (changed);
1935 /* We can't reassociate floating-point or fixed-point plus or minus
1936 because of saturation to +-Inf. */
1937 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1938 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1939 goto out;
1941 /* Second match patterns that allow contracting a plus-minus pair
1942 irrespective of overflow issues.
1944 (A +- B) - A -> +- B
1945 (A +- B) -+ B -> A
1946 (CST +- A) +- CST -> CST +- A
1947 (A + CST) +- CST -> A + CST
1948 ~A + A -> -1
1949 ~A + 1 -> -A
1950 A - (A +- B) -> -+ B
1951 A +- (B +- A) -> +- B
1952 CST +- (CST +- A) -> CST +- A
1953 CST +- (A +- CST) -> CST +- A
1954 A + ~A -> -1
1956 via commutating the addition and contracting operations to zero
1957 by reassociation. */
1959 gsi = gsi_for_stmt (stmt);
1960 if (TREE_CODE (rhs1) == SSA_NAME)
1962 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1963 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1965 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1966 if (def_code == PLUS_EXPR
1967 || def_code == MINUS_EXPR)
1969 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1970 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1971 if (operand_equal_p (def_rhs1, rhs2, 0)
1972 && code == MINUS_EXPR)
1974 /* (A +- B) - A -> +- B. */
1975 code = ((def_code == PLUS_EXPR)
1976 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1977 rhs1 = def_rhs2;
1978 rhs2 = NULL_TREE;
1979 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1980 gcc_assert (gsi_stmt (gsi) == stmt);
1981 gimple_set_modified (stmt, true);
1983 else if (operand_equal_p (def_rhs2, rhs2, 0)
1984 && code != def_code)
1986 /* (A +- B) -+ B -> A. */
1987 code = TREE_CODE (def_rhs1);
1988 rhs1 = def_rhs1;
1989 rhs2 = NULL_TREE;
1990 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1991 gcc_assert (gsi_stmt (gsi) == stmt);
1992 gimple_set_modified (stmt, true);
1994 else if (TREE_CODE (rhs2) == INTEGER_CST
1995 && TREE_CODE (def_rhs1) == INTEGER_CST)
1997 /* (CST +- A) +- CST -> CST +- A. */
1998 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1999 def_rhs1, rhs2);
2000 if (cst && !TREE_OVERFLOW (cst))
2002 code = def_code;
2003 gimple_assign_set_rhs_code (stmt, code);
2004 rhs1 = cst;
2005 gimple_assign_set_rhs1 (stmt, rhs1);
2006 rhs2 = def_rhs2;
2007 gimple_assign_set_rhs2 (stmt, rhs2);
2008 gimple_set_modified (stmt, true);
2011 else if (TREE_CODE (rhs2) == INTEGER_CST
2012 && TREE_CODE (def_rhs2) == INTEGER_CST
2013 && def_code == PLUS_EXPR)
2015 /* (A + CST) +- CST -> A + CST. */
2016 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2017 def_rhs2, rhs2);
2018 if (cst && !TREE_OVERFLOW (cst))
2020 code = PLUS_EXPR;
2021 gimple_assign_set_rhs_code (stmt, code);
2022 rhs1 = def_rhs1;
2023 gimple_assign_set_rhs1 (stmt, rhs1);
2024 rhs2 = cst;
2025 gimple_assign_set_rhs2 (stmt, rhs2);
2026 gimple_set_modified (stmt, true);
2030 else if (def_code == BIT_NOT_EXPR
2031 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2033 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2034 if (code == PLUS_EXPR
2035 && operand_equal_p (def_rhs1, rhs2, 0))
2037 /* ~A + A -> -1. */
2038 code = INTEGER_CST;
2039 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2040 rhs2 = NULL_TREE;
2041 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2042 gcc_assert (gsi_stmt (gsi) == stmt);
2043 gimple_set_modified (stmt, true);
2045 else if (code == PLUS_EXPR
2046 && integer_onep (rhs1))
2048 /* ~A + 1 -> -A. */
2049 code = NEGATE_EXPR;
2050 rhs1 = def_rhs1;
2051 rhs2 = NULL_TREE;
2052 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2053 gcc_assert (gsi_stmt (gsi) == stmt);
2054 gimple_set_modified (stmt, true);
2060 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2062 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2063 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2065 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2066 if (def_code == PLUS_EXPR
2067 || def_code == MINUS_EXPR)
2069 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2070 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2071 if (operand_equal_p (def_rhs1, rhs1, 0)
2072 && code == MINUS_EXPR)
2074 /* A - (A +- B) -> -+ B. */
2075 code = ((def_code == PLUS_EXPR)
2076 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2077 rhs1 = def_rhs2;
2078 rhs2 = NULL_TREE;
2079 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2080 gcc_assert (gsi_stmt (gsi) == stmt);
2081 gimple_set_modified (stmt, true);
2083 else if (operand_equal_p (def_rhs2, rhs1, 0)
2084 && code != def_code)
2086 /* A +- (B +- A) -> +- B. */
2087 code = ((code == PLUS_EXPR)
2088 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2089 rhs1 = def_rhs1;
2090 rhs2 = NULL_TREE;
2091 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2092 gcc_assert (gsi_stmt (gsi) == stmt);
2093 gimple_set_modified (stmt, true);
2095 else if (TREE_CODE (rhs1) == INTEGER_CST
2096 && TREE_CODE (def_rhs1) == INTEGER_CST)
2098 /* CST +- (CST +- A) -> CST +- A. */
2099 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2100 rhs1, def_rhs1);
2101 if (cst && !TREE_OVERFLOW (cst))
2103 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2104 gimple_assign_set_rhs_code (stmt, code);
2105 rhs1 = cst;
2106 gimple_assign_set_rhs1 (stmt, rhs1);
2107 rhs2 = def_rhs2;
2108 gimple_assign_set_rhs2 (stmt, rhs2);
2109 gimple_set_modified (stmt, true);
2112 else if (TREE_CODE (rhs1) == INTEGER_CST
2113 && TREE_CODE (def_rhs2) == INTEGER_CST)
2115 /* CST +- (A +- CST) -> CST +- A. */
2116 tree cst = fold_binary (def_code == code
2117 ? PLUS_EXPR : MINUS_EXPR,
2118 TREE_TYPE (rhs2),
2119 rhs1, def_rhs2);
2120 if (cst && !TREE_OVERFLOW (cst))
2122 rhs1 = cst;
2123 gimple_assign_set_rhs1 (stmt, rhs1);
2124 rhs2 = def_rhs1;
2125 gimple_assign_set_rhs2 (stmt, rhs2);
2126 gimple_set_modified (stmt, true);
2130 else if (def_code == BIT_NOT_EXPR
2131 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2133 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2134 if (code == PLUS_EXPR
2135 && operand_equal_p (def_rhs1, rhs1, 0))
2137 /* A + ~A -> -1. */
2138 code = INTEGER_CST;
2139 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2140 rhs2 = NULL_TREE;
2141 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2142 gcc_assert (gsi_stmt (gsi) == stmt);
2143 gimple_set_modified (stmt, true);
2149 out:
2150 if (gimple_modified_p (stmt))
2152 fold_stmt_inplace (stmt);
2153 update_stmt (stmt);
2154 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2155 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2156 return true;
2159 return false;
2162 /* Combine two conversions in a row for the second conversion at *GSI.
2163 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2164 run. Else it returns 0. */
2166 static int
2167 combine_conversions (gimple_stmt_iterator *gsi)
2169 gimple stmt = gsi_stmt (*gsi);
2170 gimple def_stmt;
2171 tree op0, lhs;
2172 enum tree_code code = gimple_assign_rhs_code (stmt);
2174 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2175 || code == FLOAT_EXPR
2176 || code == FIX_TRUNC_EXPR);
2178 lhs = gimple_assign_lhs (stmt);
2179 op0 = gimple_assign_rhs1 (stmt);
2180 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2182 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2183 return 1;
2186 if (TREE_CODE (op0) != SSA_NAME)
2187 return 0;
2189 def_stmt = SSA_NAME_DEF_STMT (op0);
2190 if (!is_gimple_assign (def_stmt))
2191 return 0;
2193 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2195 tree defop0 = gimple_assign_rhs1 (def_stmt);
2196 tree type = TREE_TYPE (lhs);
2197 tree inside_type = TREE_TYPE (defop0);
2198 tree inter_type = TREE_TYPE (op0);
2199 int inside_int = INTEGRAL_TYPE_P (inside_type);
2200 int inside_ptr = POINTER_TYPE_P (inside_type);
2201 int inside_float = FLOAT_TYPE_P (inside_type);
2202 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2203 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2204 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2205 int inter_int = INTEGRAL_TYPE_P (inter_type);
2206 int inter_ptr = POINTER_TYPE_P (inter_type);
2207 int inter_float = FLOAT_TYPE_P (inter_type);
2208 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2209 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2210 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2211 int final_int = INTEGRAL_TYPE_P (type);
2212 int final_ptr = POINTER_TYPE_P (type);
2213 int final_float = FLOAT_TYPE_P (type);
2214 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2215 unsigned int final_prec = TYPE_PRECISION (type);
2216 int final_unsignedp = TYPE_UNSIGNED (type);
2218 /* In addition to the cases of two conversions in a row
2219 handled below, if we are converting something to its own
2220 type via an object of identical or wider precision, neither
2221 conversion is needed. */
2222 if (useless_type_conversion_p (type, inside_type)
2223 && (((inter_int || inter_ptr) && final_int)
2224 || (inter_float && final_float))
2225 && inter_prec >= final_prec)
2227 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2228 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2229 update_stmt (stmt);
2230 return remove_prop_source_from_use (op0) ? 2 : 1;
2233 /* Likewise, if the intermediate and initial types are either both
2234 float or both integer, we don't need the middle conversion if the
2235 former is wider than the latter and doesn't change the signedness
2236 (for integers). Avoid this if the final type is a pointer since
2237 then we sometimes need the middle conversion. Likewise if the
2238 final type has a precision not equal to the size of its mode. */
2239 if (((inter_int && inside_int)
2240 || (inter_float && inside_float)
2241 || (inter_vec && inside_vec))
2242 && inter_prec >= inside_prec
2243 && (inter_float || inter_vec
2244 || inter_unsignedp == inside_unsignedp)
2245 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2246 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2247 && ! final_ptr
2248 && (! final_vec || inter_prec == inside_prec))
2250 gimple_assign_set_rhs1 (stmt, defop0);
2251 update_stmt (stmt);
2252 return remove_prop_source_from_use (op0) ? 2 : 1;
2255 /* If we have a sign-extension of a zero-extended value, we can
2256 replace that by a single zero-extension. */
2257 if (inside_int && inter_int && final_int
2258 && inside_prec < inter_prec && inter_prec < final_prec
2259 && inside_unsignedp && !inter_unsignedp)
2261 gimple_assign_set_rhs1 (stmt, defop0);
2262 update_stmt (stmt);
2263 return remove_prop_source_from_use (op0) ? 2 : 1;
2266 /* Two conversions in a row are not needed unless:
2267 - some conversion is floating-point (overstrict for now), or
2268 - some conversion is a vector (overstrict for now), or
2269 - the intermediate type is narrower than both initial and
2270 final, or
2271 - the intermediate type and innermost type differ in signedness,
2272 and the outermost type is wider than the intermediate, or
2273 - the initial type is a pointer type and the precisions of the
2274 intermediate and final types differ, or
2275 - the final type is a pointer type and the precisions of the
2276 initial and intermediate types differ. */
2277 if (! inside_float && ! inter_float && ! final_float
2278 && ! inside_vec && ! inter_vec && ! final_vec
2279 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2280 && ! (inside_int && inter_int
2281 && inter_unsignedp != inside_unsignedp
2282 && inter_prec < final_prec)
2283 && ((inter_unsignedp && inter_prec > inside_prec)
2284 == (final_unsignedp && final_prec > inter_prec))
2285 && ! (inside_ptr && inter_prec != final_prec)
2286 && ! (final_ptr && inside_prec != inter_prec)
2287 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2288 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2290 gimple_assign_set_rhs1 (stmt, defop0);
2291 update_stmt (stmt);
2292 return remove_prop_source_from_use (op0) ? 2 : 1;
2295 /* A truncation to an unsigned type should be canonicalized as
2296 bitwise and of a mask. */
2297 if (final_int && inter_int && inside_int
2298 && final_prec == inside_prec
2299 && final_prec > inter_prec
2300 && inter_unsignedp)
2302 tree tem;
2303 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2304 defop0,
2305 double_int_to_tree
2306 (inside_type, double_int_mask (inter_prec)));
2307 if (!useless_type_conversion_p (type, inside_type))
2309 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2310 GSI_SAME_STMT);
2311 gimple_assign_set_rhs1 (stmt, tem);
2313 else
2314 gimple_assign_set_rhs_from_tree (gsi, tem);
2315 update_stmt (gsi_stmt (*gsi));
2316 return 1;
2320 return 0;
2323 /* Main entry point for the forward propagation and statement combine
2324 optimizer. */
2326 static unsigned int
2327 ssa_forward_propagate_and_combine (void)
2329 basic_block bb;
2330 unsigned int todoflags = 0;
2332 cfg_changed = false;
2334 FOR_EACH_BB (bb)
2336 gimple_stmt_iterator gsi, prev;
2337 bool prev_initialized;
2339 /* Apply forward propagation to all stmts in the basic-block.
2340 Note we update GSI within the loop as necessary. */
2341 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2343 gimple stmt = gsi_stmt (gsi);
2344 tree lhs, rhs;
2345 enum tree_code code;
2347 if (!is_gimple_assign (stmt))
2349 gsi_next (&gsi);
2350 continue;
2353 lhs = gimple_assign_lhs (stmt);
2354 rhs = gimple_assign_rhs1 (stmt);
2355 code = gimple_assign_rhs_code (stmt);
2356 if (TREE_CODE (lhs) != SSA_NAME
2357 || has_zero_uses (lhs))
2359 gsi_next (&gsi);
2360 continue;
2363 /* If this statement sets an SSA_NAME to an address,
2364 try to propagate the address into the uses of the SSA_NAME. */
2365 if (code == ADDR_EXPR
2366 /* Handle pointer conversions on invariant addresses
2367 as well, as this is valid gimple. */
2368 || (CONVERT_EXPR_CODE_P (code)
2369 && TREE_CODE (rhs) == ADDR_EXPR
2370 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2372 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2373 if ((!base
2374 || !DECL_P (base)
2375 || decl_address_invariant_p (base))
2376 && !stmt_references_abnormal_ssa_name (stmt)
2377 && forward_propagate_addr_expr (lhs, rhs))
2379 release_defs (stmt);
2380 todoflags |= TODO_remove_unused_locals;
2381 gsi_remove (&gsi, true);
2383 else
2384 gsi_next (&gsi);
2386 else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt))
2388 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2389 /* ??? Better adjust the interface to that function
2390 instead of building new trees here. */
2391 && forward_propagate_addr_expr
2392 (lhs,
2393 build1 (ADDR_EXPR,
2394 TREE_TYPE (rhs),
2395 fold_build2 (MEM_REF,
2396 TREE_TYPE (TREE_TYPE (rhs)),
2397 rhs,
2398 fold_convert
2399 (ptr_type_node,
2400 gimple_assign_rhs2 (stmt))))))
2402 release_defs (stmt);
2403 todoflags |= TODO_remove_unused_locals;
2404 gsi_remove (&gsi, true);
2406 else if (is_gimple_min_invariant (rhs))
2408 /* Make sure to fold &a[0] + off_1 here. */
2409 fold_stmt_inplace (stmt);
2410 update_stmt (stmt);
2411 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2412 gsi_next (&gsi);
2414 else
2415 gsi_next (&gsi);
2417 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2419 if (forward_propagate_comparison (stmt))
2420 cfg_changed = true;
2421 gsi_next (&gsi);
2423 else
2424 gsi_next (&gsi);
2427 /* Combine stmts with the stmts defining their operands.
2428 Note we update GSI within the loop as necessary. */
2429 prev_initialized = false;
2430 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2432 gimple stmt = gsi_stmt (gsi);
2433 bool changed = false;
2435 switch (gimple_code (stmt))
2437 case GIMPLE_ASSIGN:
2439 tree rhs1 = gimple_assign_rhs1 (stmt);
2440 enum tree_code code = gimple_assign_rhs_code (stmt);
2442 if ((code == BIT_NOT_EXPR
2443 || code == NEGATE_EXPR)
2444 && TREE_CODE (rhs1) == SSA_NAME)
2445 changed = simplify_not_neg_expr (&gsi);
2446 else if (code == COND_EXPR)
2448 /* In this case the entire COND_EXPR is in rhs1. */
2449 int did_something;
2450 did_something = forward_propagate_into_cond (&gsi);
2451 stmt = gsi_stmt (gsi);
2452 if (did_something == 2)
2453 cfg_changed = true;
2454 changed = did_something != 0;
2456 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2458 int did_something;
2459 did_something = forward_propagate_into_comparison (&gsi);
2460 if (did_something == 2)
2461 cfg_changed = true;
2462 changed = did_something != 0;
2464 else if (code == BIT_AND_EXPR
2465 || code == BIT_IOR_EXPR
2466 || code == BIT_XOR_EXPR)
2467 changed = simplify_bitwise_binary (&gsi);
2468 else if (code == PLUS_EXPR
2469 || code == MINUS_EXPR)
2470 changed = associate_plusminus (stmt);
2471 else if (CONVERT_EXPR_CODE_P (code)
2472 || code == FLOAT_EXPR
2473 || code == FIX_TRUNC_EXPR)
2475 int did_something = combine_conversions (&gsi);
2476 if (did_something == 2)
2477 cfg_changed = true;
2478 changed = did_something != 0;
2480 break;
2483 case GIMPLE_SWITCH:
2484 changed = simplify_gimple_switch (stmt);
2485 break;
2487 case GIMPLE_COND:
2489 int did_something;
2490 did_something = forward_propagate_into_gimple_cond (stmt);
2491 if (did_something == 2)
2492 cfg_changed = true;
2493 changed = did_something != 0;
2494 break;
2497 case GIMPLE_CALL:
2499 tree callee = gimple_call_fndecl (stmt);
2500 if (callee != NULL_TREE
2501 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2502 changed = simplify_builtin_call (&gsi, callee);
2503 break;
2506 default:;
2509 if (changed)
2511 /* If the stmt changed then re-visit it and the statements
2512 inserted before it. */
2513 if (!prev_initialized)
2514 gsi = gsi_start_bb (bb);
2515 else
2517 gsi = prev;
2518 gsi_next (&gsi);
2521 else
2523 prev = gsi;
2524 prev_initialized = true;
2525 gsi_next (&gsi);
2530 if (cfg_changed)
2531 todoflags |= TODO_cleanup_cfg;
2533 return todoflags;
2537 static bool
2538 gate_forwprop (void)
2540 return flag_tree_forwprop;
2543 struct gimple_opt_pass pass_forwprop =
2546 GIMPLE_PASS,
2547 "forwprop", /* name */
2548 gate_forwprop, /* gate */
2549 ssa_forward_propagate_and_combine, /* execute */
2550 NULL, /* sub */
2551 NULL, /* next */
2552 0, /* static_pass_number */
2553 TV_TREE_FORWPROP, /* tv_id */
2554 PROP_cfg | PROP_ssa, /* properties_required */
2555 0, /* properties_provided */
2556 0, /* properties_destroyed */
2557 0, /* todo_flags_start */
2558 TODO_ggc_collect
2559 | TODO_update_ssa
2560 | TODO_verify_ssa /* todo_flags_finish */