2011-10-20 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobd707db520e342a273d86082003770ca9dbf621f6
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
267 return false;
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
272 return false;
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
277 return true;
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
295 return true;
298 /* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
300 If NAME was replaced in its only use then this function can be used
301 to clean up dead stmts. The function handles already released SSA
302 names gracefully.
303 Returns true if cleanup-cfg has to run. */
305 static bool
306 remove_prop_source_from_use (tree name)
308 gimple_stmt_iterator gsi;
309 gimple stmt;
310 bool cfg_changed = false;
312 do {
313 basic_block bb;
315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
318 return cfg_changed;
320 stmt = SSA_NAME_DEF_STMT (name);
321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
323 return cfg_changed;
325 bb = gimple_bb (stmt);
326 gsi = gsi_for_stmt (stmt);
327 unlink_stmt_vdef (stmt);
328 gsi_remove (&gsi, true);
329 release_defs (stmt);
330 cfg_changed |= gimple_purge_dead_eh_edges (bb);
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
335 return cfg_changed;
338 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
345 static tree
346 rhs_to_tree (tree type, gimple stmt)
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 gimple_assign_rhs2 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358 return build1 (code, type, gimple_assign_rhs1 (stmt));
359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
361 else
362 gcc_unreachable ();
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
371 static tree
372 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
373 tree op0, tree op1, bool invariant_only)
375 tree t;
377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
379 fold_defer_overflow_warnings ();
380 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
381 if (!t)
383 fold_undefer_overflow_warnings (false, NULL, 0);
384 return NULL_TREE;
387 /* Require that we got a boolean type out if we put one in. */
388 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
390 /* Canonicalize the combined condition for use in a COND_EXPR. */
391 t = canonicalize_cond_expr_cond (t);
393 /* Bail out if we required an invariant but didn't get one. */
394 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
396 fold_undefer_overflow_warnings (false, NULL, 0);
397 return NULL_TREE;
400 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
402 return t;
405 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406 of its operand. Return a new comparison tree or NULL_TREE if there
407 were no simplifying combines. */
409 static tree
410 forward_propagate_into_comparison_1 (gimple stmt,
411 enum tree_code code, tree type,
412 tree op0, tree op1)
414 tree tmp = NULL_TREE;
415 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416 bool single_use0_p = false, single_use1_p = false;
418 /* For comparisons use the first operand, that is likely to
419 simplify comparisons against constants. */
420 if (TREE_CODE (op0) == SSA_NAME)
422 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423 if (def_stmt && can_propagate_from (def_stmt))
425 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
426 tmp = combine_cond_expr_cond (stmt, code, type,
427 rhs0, op1, !single_use0_p);
428 if (tmp)
429 return tmp;
433 /* If that wasn't successful, try the second operand. */
434 if (TREE_CODE (op1) == SSA_NAME)
436 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
437 if (def_stmt && can_propagate_from (def_stmt))
439 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
440 tmp = combine_cond_expr_cond (stmt, code, type,
441 op0, rhs1, !single_use1_p);
442 if (tmp)
443 return tmp;
447 /* If that wasn't successful either, try both operands. */
448 if (rhs0 != NULL_TREE
449 && rhs1 != NULL_TREE)
450 tmp = combine_cond_expr_cond (stmt, code, type,
451 rhs0, rhs1,
452 !(single_use0_p && single_use1_p));
454 return tmp;
457 /* Propagate from the ssa name definition statements of the assignment
458 from a comparison at *GSI into the conditional if that simplifies it.
459 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
460 otherwise returns 0. */
462 static int
463 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
465 gimple stmt = gsi_stmt (*gsi);
466 tree tmp;
467 bool cfg_changed = false;
468 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
469 tree rhs1 = gimple_assign_rhs1 (stmt);
470 tree rhs2 = gimple_assign_rhs2 (stmt);
472 /* Combine the comparison with defining statements. */
473 tmp = forward_propagate_into_comparison_1 (stmt,
474 gimple_assign_rhs_code (stmt),
475 type, rhs1, rhs2);
476 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
478 gimple_assign_set_rhs_from_tree (gsi, tmp);
479 fold_stmt (gsi);
480 update_stmt (gsi_stmt (*gsi));
482 if (TREE_CODE (rhs1) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs1);
484 if (TREE_CODE (rhs2) == SSA_NAME)
485 cfg_changed |= remove_prop_source_from_use (rhs2);
486 return cfg_changed ? 2 : 1;
489 return 0;
492 /* Propagate from the ssa name definition statements of COND_EXPR
493 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
494 Returns zero if no statement was changed, one if there were
495 changes and two if cfg_cleanup needs to run.
497 This must be kept in sync with forward_propagate_into_cond. */
499 static int
500 forward_propagate_into_gimple_cond (gimple stmt)
502 tree tmp;
503 enum tree_code code = gimple_cond_code (stmt);
504 bool cfg_changed = false;
505 tree rhs1 = gimple_cond_lhs (stmt);
506 tree rhs2 = gimple_cond_rhs (stmt);
508 /* We can do tree combining on SSA_NAME and comparison expressions. */
509 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
510 return 0;
512 tmp = forward_propagate_into_comparison_1 (stmt, code,
513 boolean_type_node,
514 rhs1, rhs2);
515 if (tmp)
517 if (dump_file && tmp)
519 fprintf (dump_file, " Replaced '");
520 print_gimple_expr (dump_file, stmt, 0, 0);
521 fprintf (dump_file, "' with '");
522 print_generic_expr (dump_file, tmp, 0);
523 fprintf (dump_file, "'\n");
526 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
527 update_stmt (stmt);
529 if (TREE_CODE (rhs1) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs1);
531 if (TREE_CODE (rhs2) == SSA_NAME)
532 cfg_changed |= remove_prop_source_from_use (rhs2);
533 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
536 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
537 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
538 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
539 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
540 && ((code == EQ_EXPR
541 && integer_zerop (rhs2))
542 || (code == NE_EXPR
543 && integer_onep (rhs2))))
545 basic_block bb = gimple_bb (stmt);
546 gimple_cond_set_code (stmt, NE_EXPR);
547 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
548 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
549 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
550 return 1;
553 return 0;
557 /* Propagate from the ssa name definition statements of COND_EXPR
558 in the rhs of statement STMT into the conditional if that simplifies it.
559 Returns true zero if the stmt was changed. */
561 static bool
562 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
564 gimple stmt = gsi_stmt (*gsi_p);
565 tree tmp = NULL_TREE;
566 tree cond = gimple_assign_rhs1 (stmt);
567 bool swap = false;
569 /* We can do tree combining on SSA_NAME and comparison expressions. */
570 if (COMPARISON_CLASS_P (cond))
571 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
572 boolean_type_node,
573 TREE_OPERAND (cond, 0),
574 TREE_OPERAND (cond, 1));
575 else if (TREE_CODE (cond) == SSA_NAME)
577 enum tree_code code;
578 tree name = cond;
579 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
580 if (!def_stmt || !can_propagate_from (def_stmt))
581 return 0;
583 code = gimple_assign_rhs_code (def_stmt);
584 if (TREE_CODE_CLASS (code) == tcc_comparison)
585 tmp = fold_build2_loc (gimple_location (def_stmt),
586 code,
587 boolean_type_node,
588 gimple_assign_rhs1 (def_stmt),
589 gimple_assign_rhs2 (def_stmt));
590 else if ((code == BIT_NOT_EXPR
591 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
592 || (code == BIT_XOR_EXPR
593 && integer_onep (gimple_assign_rhs2 (def_stmt))))
595 tmp = gimple_assign_rhs1 (def_stmt);
596 swap = true;
600 if (tmp
601 && is_gimple_condexpr (tmp))
603 if (dump_file && tmp)
605 fprintf (dump_file, " Replaced '");
606 print_generic_expr (dump_file, cond, 0);
607 fprintf (dump_file, "' with '");
608 print_generic_expr (dump_file, tmp, 0);
609 fprintf (dump_file, "'\n");
612 if (integer_onep (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
614 else if (integer_zerop (tmp))
615 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
616 else
618 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
619 if (swap)
621 tree t = gimple_assign_rhs2 (stmt);
622 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
623 gimple_assign_set_rhs3 (stmt, t);
626 stmt = gsi_stmt (*gsi_p);
627 update_stmt (stmt);
629 return true;
632 return 0;
635 /* We've just substituted an ADDR_EXPR into stmt. Update all the
636 relevant data structures to match. */
638 static void
639 tidy_after_forward_propagate_addr (gimple stmt)
641 /* We may have turned a trapping insn into a non-trapping insn. */
642 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
643 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
644 cfg_changed = true;
646 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
647 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
650 /* DEF_RHS contains the address of the 0th element in an array.
651 USE_STMT uses type of DEF_RHS to compute the address of an
652 arbitrary element within the array. The (variable) byte offset
653 of the element is contained in OFFSET.
655 We walk back through the use-def chains of OFFSET to verify that
656 it is indeed computing the offset of an element within the array
657 and extract the index corresponding to the given byte offset.
659 We then try to fold the entire address expression into a form
660 &array[index].
662 If we are successful, we replace the right hand side of USE_STMT
663 with the new address computation. */
665 static bool
666 forward_propagate_addr_into_variable_array_index (tree offset,
667 tree def_rhs,
668 gimple_stmt_iterator *use_stmt_gsi)
670 tree index, tunit;
671 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
672 tree new_rhs, tmp;
674 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
675 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
676 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
677 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
678 else
679 return false;
680 if (!host_integerp (tunit, 1))
681 return false;
683 /* Get the offset's defining statement. */
684 offset_def = SSA_NAME_DEF_STMT (offset);
686 /* Try to find an expression for a proper index. This is either a
687 multiplication expression by the element size or just the ssa name we came
688 along in case the element size is one. In that case, however, we do not
689 allow multiplications because they can be computing index to a higher
690 level dimension (PR 37861). */
691 if (integer_onep (tunit))
693 if (is_gimple_assign (offset_def)
694 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
695 return false;
697 index = offset;
699 else
701 /* The statement which defines OFFSET before type conversion
702 must be a simple GIMPLE_ASSIGN. */
703 if (!is_gimple_assign (offset_def))
704 return false;
706 /* The RHS of the statement which defines OFFSET must be a
707 multiplication of an object by the size of the array elements.
708 This implicitly verifies that the size of the array elements
709 is constant. */
710 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
711 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
712 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
714 /* The first operand to the MULT_EXPR is the desired index. */
715 index = gimple_assign_rhs1 (offset_def);
717 /* If we have idx * tunit + CST * tunit re-associate that. */
718 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
719 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
720 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
721 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
722 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
723 gimple_assign_rhs2 (offset_def),
724 tunit)) != NULL_TREE)
726 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
727 if (is_gimple_assign (offset_def2)
728 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
729 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
730 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
732 index = fold_build2 (gimple_assign_rhs_code (offset_def),
733 TREE_TYPE (offset),
734 gimple_assign_rhs1 (offset_def2), tmp);
736 else
737 return false;
739 else
740 return false;
743 /* Replace the pointer addition with array indexing. */
744 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
745 true, GSI_SAME_STMT);
746 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
748 new_rhs = unshare_expr (def_rhs);
749 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
751 else
753 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
754 unshare_expr (TREE_OPERAND (def_rhs, 0)),
755 index, integer_zero_node, NULL_TREE);
756 new_rhs = build_fold_addr_expr (new_rhs);
757 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
758 TREE_TYPE (new_rhs)))
760 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
761 NULL_TREE, true, GSI_SAME_STMT);
762 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
763 new_rhs);
766 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
767 fold_stmt (use_stmt_gsi);
768 tidy_after_forward_propagate_addr (gsi_stmt (*use_stmt_gsi));
769 return true;
772 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
773 ADDR_EXPR <whatever>.
775 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
776 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
777 node or for recovery of array indexing from pointer arithmetic.
779 Return true if the propagation was successful (the propagation can
780 be not totally successful, yet things may have been changed). */
782 static bool
783 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
784 gimple_stmt_iterator *use_stmt_gsi,
785 bool single_use_p)
787 tree lhs, rhs, rhs2, array_ref;
788 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
789 enum tree_code rhs_code;
790 bool res = true;
792 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
794 lhs = gimple_assign_lhs (use_stmt);
795 rhs_code = gimple_assign_rhs_code (use_stmt);
796 rhs = gimple_assign_rhs1 (use_stmt);
798 /* Trivial cases. The use statement could be a trivial copy or a
799 useless conversion. Recurse to the uses of the lhs as copyprop does
800 not copy through different variant pointers and FRE does not catch
801 all useless conversions. Treat the case of a single-use name and
802 a conversion to def_rhs type separate, though. */
803 if (TREE_CODE (lhs) == SSA_NAME
804 && ((rhs_code == SSA_NAME && rhs == name)
805 || CONVERT_EXPR_CODE_P (rhs_code)))
807 /* Only recurse if we don't deal with a single use or we cannot
808 do the propagation to the current statement. In particular
809 we can end up with a conversion needed for a non-invariant
810 address which we cannot do in a single statement. */
811 if (!single_use_p
812 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
813 && (!is_gimple_min_invariant (def_rhs)
814 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
815 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
816 && (TYPE_PRECISION (TREE_TYPE (lhs))
817 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
818 return forward_propagate_addr_expr (lhs, def_rhs);
820 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
821 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
822 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
823 else
824 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
825 return true;
828 /* Propagate through constant pointer adjustments. */
829 if (TREE_CODE (lhs) == SSA_NAME
830 && rhs_code == POINTER_PLUS_EXPR
831 && rhs == name
832 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
834 tree new_def_rhs;
835 /* As we come here with non-invariant addresses in def_rhs we need
836 to make sure we can build a valid constant offsetted address
837 for further propagation. Simply rely on fold building that
838 and check after the fact. */
839 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
840 def_rhs,
841 fold_convert (ptr_type_node,
842 gimple_assign_rhs2 (use_stmt)));
843 if (TREE_CODE (new_def_rhs) == MEM_REF
844 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
845 return false;
846 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
847 TREE_TYPE (rhs));
849 /* Recurse. If we could propagate into all uses of lhs do not
850 bother to replace into the current use but just pretend we did. */
851 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
852 && forward_propagate_addr_expr (lhs, new_def_rhs))
853 return true;
855 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
856 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
857 new_def_rhs, NULL_TREE);
858 else if (is_gimple_min_invariant (new_def_rhs))
859 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
860 new_def_rhs, NULL_TREE);
861 else
862 return false;
863 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
864 update_stmt (use_stmt);
865 return true;
868 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
869 ADDR_EXPR will not appear on the LHS. */
870 lhs = gimple_assign_lhs (use_stmt);
871 while (handled_component_p (lhs))
872 lhs = TREE_OPERAND (lhs, 0);
874 /* Now see if the LHS node is a MEM_REF using NAME. If so,
875 propagate the ADDR_EXPR into the use of NAME and fold the result. */
876 if (TREE_CODE (lhs) == MEM_REF
877 && TREE_OPERAND (lhs, 0) == name)
879 tree def_rhs_base;
880 HOST_WIDE_INT def_rhs_offset;
881 /* If the address is invariant we can always fold it. */
882 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
883 &def_rhs_offset)))
885 double_int off = mem_ref_offset (lhs);
886 tree new_ptr;
887 off = double_int_add (off,
888 shwi_to_double_int (def_rhs_offset));
889 if (TREE_CODE (def_rhs_base) == MEM_REF)
891 off = double_int_add (off, mem_ref_offset (def_rhs_base));
892 new_ptr = TREE_OPERAND (def_rhs_base, 0);
894 else
895 new_ptr = build_fold_addr_expr (def_rhs_base);
896 TREE_OPERAND (lhs, 0) = new_ptr;
897 TREE_OPERAND (lhs, 1)
898 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
899 tidy_after_forward_propagate_addr (use_stmt);
900 /* Continue propagating into the RHS if this was not the only use. */
901 if (single_use_p)
902 return true;
904 /* If the LHS is a plain dereference and the value type is the same as
905 that of the pointed-to type of the address we can put the
906 dereferenced address on the LHS preserving the original alias-type. */
907 else if (gimple_assign_lhs (use_stmt) == lhs
908 && useless_type_conversion_p
909 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
910 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
912 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
913 tree new_offset, new_base, saved;
914 while (handled_component_p (*def_rhs_basep))
915 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
916 saved = *def_rhs_basep;
917 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
919 new_base = TREE_OPERAND (*def_rhs_basep, 0);
920 new_offset
921 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
922 TREE_OPERAND (*def_rhs_basep, 1));
924 else
926 new_base = build_fold_addr_expr (*def_rhs_basep);
927 new_offset = TREE_OPERAND (lhs, 1);
929 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
930 new_base, new_offset);
931 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
932 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
933 gimple_assign_set_lhs (use_stmt,
934 unshare_expr (TREE_OPERAND (def_rhs, 0)));
935 *def_rhs_basep = saved;
936 tidy_after_forward_propagate_addr (use_stmt);
937 /* Continue propagating into the RHS if this was not the
938 only use. */
939 if (single_use_p)
940 return true;
942 else
943 /* We can have a struct assignment dereferencing our name twice.
944 Note that we didn't propagate into the lhs to not falsely
945 claim we did when propagating into the rhs. */
946 res = false;
949 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
950 nodes from the RHS. */
951 rhs = gimple_assign_rhs1 (use_stmt);
952 if (TREE_CODE (rhs) == ADDR_EXPR)
953 rhs = TREE_OPERAND (rhs, 0);
954 while (handled_component_p (rhs))
955 rhs = TREE_OPERAND (rhs, 0);
957 /* Now see if the RHS node is a MEM_REF using NAME. If so,
958 propagate the ADDR_EXPR into the use of NAME and fold the result. */
959 if (TREE_CODE (rhs) == MEM_REF
960 && TREE_OPERAND (rhs, 0) == name)
962 tree def_rhs_base;
963 HOST_WIDE_INT def_rhs_offset;
964 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
965 &def_rhs_offset)))
967 double_int off = mem_ref_offset (rhs);
968 tree new_ptr;
969 off = double_int_add (off,
970 shwi_to_double_int (def_rhs_offset));
971 if (TREE_CODE (def_rhs_base) == MEM_REF)
973 off = double_int_add (off, mem_ref_offset (def_rhs_base));
974 new_ptr = TREE_OPERAND (def_rhs_base, 0);
976 else
977 new_ptr = build_fold_addr_expr (def_rhs_base);
978 TREE_OPERAND (rhs, 0) = new_ptr;
979 TREE_OPERAND (rhs, 1)
980 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
981 fold_stmt_inplace (use_stmt_gsi);
982 tidy_after_forward_propagate_addr (use_stmt);
983 return res;
985 /* If the RHS is a plain dereference and the value type is the same as
986 that of the pointed-to type of the address we can put the
987 dereferenced address on the RHS preserving the original alias-type. */
988 else if (gimple_assign_rhs1 (use_stmt) == rhs
989 && useless_type_conversion_p
990 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
991 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
993 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
994 tree new_offset, new_base, saved;
995 while (handled_component_p (*def_rhs_basep))
996 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
997 saved = *def_rhs_basep;
998 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
1000 new_base = TREE_OPERAND (*def_rhs_basep, 0);
1001 new_offset
1002 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
1003 TREE_OPERAND (*def_rhs_basep, 1));
1005 else
1007 new_base = build_fold_addr_expr (*def_rhs_basep);
1008 new_offset = TREE_OPERAND (rhs, 1);
1010 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1011 new_base, new_offset);
1012 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
1013 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
1014 gimple_assign_set_rhs1 (use_stmt,
1015 unshare_expr (TREE_OPERAND (def_rhs, 0)));
1016 *def_rhs_basep = saved;
1017 fold_stmt_inplace (use_stmt_gsi);
1018 tidy_after_forward_propagate_addr (use_stmt);
1019 return res;
1023 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1024 is nothing to do. */
1025 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1026 || gimple_assign_rhs1 (use_stmt) != name)
1027 return false;
1029 /* The remaining cases are all for turning pointer arithmetic into
1030 array indexing. They only apply when we have the address of
1031 element zero in an array. If that is not the case then there
1032 is nothing to do. */
1033 array_ref = TREE_OPERAND (def_rhs, 0);
1034 if ((TREE_CODE (array_ref) != ARRAY_REF
1035 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1036 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1037 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1038 return false;
1040 rhs2 = gimple_assign_rhs2 (use_stmt);
1041 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1042 if (TREE_CODE (rhs2) == INTEGER_CST)
1044 tree new_rhs = build1_loc (gimple_location (use_stmt),
1045 ADDR_EXPR, TREE_TYPE (def_rhs),
1046 fold_build2 (MEM_REF,
1047 TREE_TYPE (TREE_TYPE (def_rhs)),
1048 unshare_expr (def_rhs),
1049 fold_convert (ptr_type_node,
1050 rhs2)));
1051 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1052 use_stmt = gsi_stmt (*use_stmt_gsi);
1053 update_stmt (use_stmt);
1054 tidy_after_forward_propagate_addr (use_stmt);
1055 return true;
1058 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1059 converting a multiplication of an index by the size of the
1060 array elements, then the result is converted into the proper
1061 type for the arithmetic. */
1062 if (TREE_CODE (rhs2) == SSA_NAME
1063 && (TREE_CODE (array_ref) != ARRAY_REF
1064 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1065 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1066 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1067 different type than their operands. */
1068 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1069 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1070 use_stmt_gsi);
1071 return false;
1074 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1076 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1077 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1078 node or for recovery of array indexing from pointer arithmetic.
1079 Returns true, if all uses have been propagated into. */
1081 static bool
1082 forward_propagate_addr_expr (tree name, tree rhs)
1084 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1085 imm_use_iterator iter;
1086 gimple use_stmt;
1087 bool all = true;
1088 bool single_use_p = has_single_use (name);
1090 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1092 bool result;
1093 tree use_rhs;
1095 /* If the use is not in a simple assignment statement, then
1096 there is nothing we can do. */
1097 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1099 if (!is_gimple_debug (use_stmt))
1100 all = false;
1101 continue;
1104 /* If the use is in a deeper loop nest, then we do not want
1105 to propagate non-invariant ADDR_EXPRs into the loop as that
1106 is likely adding expression evaluations into the loop. */
1107 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1108 && !is_gimple_min_invariant (rhs))
1110 all = false;
1111 continue;
1115 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1116 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1117 single_use_p);
1118 /* If the use has moved to a different statement adjust
1119 the update machinery for the old statement too. */
1120 if (use_stmt != gsi_stmt (gsi))
1122 update_stmt (use_stmt);
1123 use_stmt = gsi_stmt (gsi);
1126 update_stmt (use_stmt);
1128 all &= result;
1130 /* Remove intermediate now unused copy and conversion chains. */
1131 use_rhs = gimple_assign_rhs1 (use_stmt);
1132 if (result
1133 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1134 && TREE_CODE (use_rhs) == SSA_NAME
1135 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1137 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1138 release_defs (use_stmt);
1139 gsi_remove (&gsi, true);
1143 return all && has_zero_uses (name);
1147 /* Forward propagate the comparison defined in STMT like
1148 cond_1 = x CMP y to uses of the form
1149 a_1 = (T')cond_1
1150 a_1 = !cond_1
1151 a_1 = cond_1 != 0
1152 Returns true if stmt is now unused. */
1154 static bool
1155 forward_propagate_comparison (gimple stmt)
1157 tree name = gimple_assign_lhs (stmt);
1158 gimple use_stmt;
1159 tree tmp = NULL_TREE;
1160 gimple_stmt_iterator gsi;
1161 enum tree_code code;
1162 tree lhs;
1164 /* Don't propagate ssa names that occur in abnormal phis. */
1165 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1166 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1167 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1168 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1169 return false;
1171 /* Do not un-cse comparisons. But propagate through copies. */
1172 use_stmt = get_prop_dest_stmt (name, &name);
1173 if (!use_stmt
1174 || !is_gimple_assign (use_stmt))
1175 return false;
1177 code = gimple_assign_rhs_code (use_stmt);
1178 lhs = gimple_assign_lhs (use_stmt);
1179 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1180 return false;
1182 /* We can propagate the condition into a statement that
1183 computes the logical negation of the comparison result. */
1184 if ((code == BIT_NOT_EXPR
1185 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1186 || (code == BIT_XOR_EXPR
1187 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1189 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1190 bool nans = HONOR_NANS (TYPE_MODE (type));
1191 enum tree_code inv_code;
1192 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1193 if (inv_code == ERROR_MARK)
1194 return false;
1196 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1197 gimple_assign_rhs2 (stmt));
1199 else
1200 return false;
1202 gsi = gsi_for_stmt (use_stmt);
1203 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1204 use_stmt = gsi_stmt (gsi);
1205 update_stmt (use_stmt);
1207 if (dump_file && (dump_flags & TDF_DETAILS))
1209 fprintf (dump_file, " Replaced '");
1210 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1211 fprintf (dump_file, "' with '");
1212 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1213 fprintf (dump_file, "'\n");
1216 /* Remove defining statements. */
1217 return remove_prop_source_from_use (name);
1221 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1222 If so, we can change STMT into lhs = y which can later be copy
1223 propagated. Similarly for negation.
1225 This could trivially be formulated as a forward propagation
1226 to immediate uses. However, we already had an implementation
1227 from DOM which used backward propagation via the use-def links.
1229 It turns out that backward propagation is actually faster as
1230 there's less work to do for each NOT/NEG expression we find.
1231 Backwards propagation needs to look at the statement in a single
1232 backlink. Forward propagation needs to look at potentially more
1233 than one forward link.
1235 Returns true when the statement was changed. */
1237 static bool
1238 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1240 gimple stmt = gsi_stmt (*gsi_p);
1241 tree rhs = gimple_assign_rhs1 (stmt);
1242 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1244 /* See if the RHS_DEF_STMT has the same form as our statement. */
1245 if (is_gimple_assign (rhs_def_stmt)
1246 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1248 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1250 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1251 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1252 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1254 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1255 stmt = gsi_stmt (*gsi_p);
1256 update_stmt (stmt);
1257 return true;
1261 return false;
1264 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1265 the condition which we may be able to optimize better. */
1267 static bool
1268 simplify_gimple_switch (gimple stmt)
1270 tree cond = gimple_switch_index (stmt);
1271 tree def, to, ti;
1272 gimple def_stmt;
1274 /* The optimization that we really care about is removing unnecessary
1275 casts. That will let us do much better in propagating the inferred
1276 constant at the switch target. */
1277 if (TREE_CODE (cond) == SSA_NAME)
1279 def_stmt = SSA_NAME_DEF_STMT (cond);
1280 if (is_gimple_assign (def_stmt))
1282 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1284 int need_precision;
1285 bool fail;
1287 def = gimple_assign_rhs1 (def_stmt);
1289 /* ??? Why was Jeff testing this? We are gimple... */
1290 gcc_checking_assert (is_gimple_val (def));
1292 to = TREE_TYPE (cond);
1293 ti = TREE_TYPE (def);
1295 /* If we have an extension that preserves value, then we
1296 can copy the source value into the switch. */
1298 need_precision = TYPE_PRECISION (ti);
1299 fail = false;
1300 if (! INTEGRAL_TYPE_P (ti))
1301 fail = true;
1302 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1303 fail = true;
1304 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1305 need_precision += 1;
1306 if (TYPE_PRECISION (to) < need_precision)
1307 fail = true;
1309 if (!fail)
1311 gimple_switch_set_index (stmt, def);
1312 update_stmt (stmt);
1313 return true;
1319 return false;
1322 /* For pointers p2 and p1 return p2 - p1 if the
1323 difference is known and constant, otherwise return NULL. */
1325 static tree
1326 constant_pointer_difference (tree p1, tree p2)
1328 int i, j;
1329 #define CPD_ITERATIONS 5
1330 tree exps[2][CPD_ITERATIONS];
1331 tree offs[2][CPD_ITERATIONS];
1332 int cnt[2];
1334 for (i = 0; i < 2; i++)
1336 tree p = i ? p1 : p2;
1337 tree off = size_zero_node;
1338 gimple stmt;
1339 enum tree_code code;
1341 /* For each of p1 and p2 we need to iterate at least
1342 twice, to handle ADDR_EXPR directly in p1/p2,
1343 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1344 on definition's stmt RHS. Iterate a few extra times. */
1345 j = 0;
1348 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1349 break;
1350 if (TREE_CODE (p) == ADDR_EXPR)
1352 tree q = TREE_OPERAND (p, 0);
1353 HOST_WIDE_INT offset;
1354 tree base = get_addr_base_and_unit_offset (q, &offset);
1355 if (base)
1357 q = base;
1358 if (offset)
1359 off = size_binop (PLUS_EXPR, off, size_int (offset));
1361 if (TREE_CODE (q) == MEM_REF
1362 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1364 p = TREE_OPERAND (q, 0);
1365 off = size_binop (PLUS_EXPR, off,
1366 double_int_to_tree (sizetype,
1367 mem_ref_offset (q)));
1369 else
1371 exps[i][j] = q;
1372 offs[i][j++] = off;
1373 break;
1376 if (TREE_CODE (p) != SSA_NAME)
1377 break;
1378 exps[i][j] = p;
1379 offs[i][j++] = off;
1380 if (j == CPD_ITERATIONS)
1381 break;
1382 stmt = SSA_NAME_DEF_STMT (p);
1383 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1384 break;
1385 code = gimple_assign_rhs_code (stmt);
1386 if (code == POINTER_PLUS_EXPR)
1388 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1389 break;
1390 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1391 p = gimple_assign_rhs1 (stmt);
1393 else if (code == ADDR_EXPR || code == NOP_EXPR)
1394 p = gimple_assign_rhs1 (stmt);
1395 else
1396 break;
1398 while (1);
1399 cnt[i] = j;
1402 for (i = 0; i < cnt[0]; i++)
1403 for (j = 0; j < cnt[1]; j++)
1404 if (exps[0][i] == exps[1][j])
1405 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1407 return NULL_TREE;
1410 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1411 Optimize
1412 memcpy (p, "abcd", 4);
1413 memset (p + 4, ' ', 3);
1414 into
1415 memcpy (p, "abcd ", 7);
1416 call if the latter can be stored by pieces during expansion. */
1418 static bool
1419 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1421 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1422 tree vuse = gimple_vuse (stmt2);
1423 if (vuse == NULL)
1424 return false;
1425 stmt1 = SSA_NAME_DEF_STMT (vuse);
1427 switch (DECL_FUNCTION_CODE (callee2))
1429 case BUILT_IN_MEMSET:
1430 if (gimple_call_num_args (stmt2) != 3
1431 || gimple_call_lhs (stmt2)
1432 || CHAR_BIT != 8
1433 || BITS_PER_UNIT != 8)
1434 break;
1435 else
1437 tree callee1;
1438 tree ptr1, src1, str1, off1, len1, lhs1;
1439 tree ptr2 = gimple_call_arg (stmt2, 0);
1440 tree val2 = gimple_call_arg (stmt2, 1);
1441 tree len2 = gimple_call_arg (stmt2, 2);
1442 tree diff, vdef, new_str_cst;
1443 gimple use_stmt;
1444 unsigned int ptr1_align;
1445 unsigned HOST_WIDE_INT src_len;
1446 char *src_buf;
1447 use_operand_p use_p;
1449 if (!host_integerp (val2, 0)
1450 || !host_integerp (len2, 1))
1451 break;
1452 if (is_gimple_call (stmt1))
1454 /* If first stmt is a call, it needs to be memcpy
1455 or mempcpy, with string literal as second argument and
1456 constant length. */
1457 callee1 = gimple_call_fndecl (stmt1);
1458 if (callee1 == NULL_TREE
1459 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1460 || gimple_call_num_args (stmt1) != 3)
1461 break;
1462 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1463 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1464 break;
1465 ptr1 = gimple_call_arg (stmt1, 0);
1466 src1 = gimple_call_arg (stmt1, 1);
1467 len1 = gimple_call_arg (stmt1, 2);
1468 lhs1 = gimple_call_lhs (stmt1);
1469 if (!host_integerp (len1, 1))
1470 break;
1471 str1 = string_constant (src1, &off1);
1472 if (str1 == NULL_TREE)
1473 break;
1474 if (!host_integerp (off1, 1)
1475 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1476 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1477 - tree_low_cst (off1, 1)) > 0
1478 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1479 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1480 != TYPE_MODE (char_type_node))
1481 break;
1483 else if (gimple_assign_single_p (stmt1))
1485 /* Otherwise look for length 1 memcpy optimized into
1486 assignment. */
1487 ptr1 = gimple_assign_lhs (stmt1);
1488 src1 = gimple_assign_rhs1 (stmt1);
1489 if (TREE_CODE (ptr1) != MEM_REF
1490 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1491 || !host_integerp (src1, 0))
1492 break;
1493 ptr1 = build_fold_addr_expr (ptr1);
1494 callee1 = NULL_TREE;
1495 len1 = size_one_node;
1496 lhs1 = NULL_TREE;
1497 off1 = size_zero_node;
1498 str1 = NULL_TREE;
1500 else
1501 break;
1503 diff = constant_pointer_difference (ptr1, ptr2);
1504 if (diff == NULL && lhs1 != NULL)
1506 diff = constant_pointer_difference (lhs1, ptr2);
1507 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1508 && diff != NULL)
1509 diff = size_binop (PLUS_EXPR, diff,
1510 fold_convert (sizetype, len1));
1512 /* If the difference between the second and first destination pointer
1513 is not constant, or is bigger than memcpy length, bail out. */
1514 if (diff == NULL
1515 || !host_integerp (diff, 1)
1516 || tree_int_cst_lt (len1, diff))
1517 break;
1519 /* Use maximum of difference plus memset length and memcpy length
1520 as the new memcpy length, if it is too big, bail out. */
1521 src_len = tree_low_cst (diff, 1);
1522 src_len += tree_low_cst (len2, 1);
1523 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1524 src_len = tree_low_cst (len1, 1);
1525 if (src_len > 1024)
1526 break;
1528 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1529 with bigger length will return different result. */
1530 if (lhs1 != NULL_TREE
1531 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1532 && (TREE_CODE (lhs1) != SSA_NAME
1533 || !single_imm_use (lhs1, &use_p, &use_stmt)
1534 || use_stmt != stmt2))
1535 break;
1537 /* If anything reads memory in between memcpy and memset
1538 call, the modified memcpy call might change it. */
1539 vdef = gimple_vdef (stmt1);
1540 if (vdef != NULL
1541 && (!single_imm_use (vdef, &use_p, &use_stmt)
1542 || use_stmt != stmt2))
1543 break;
1545 ptr1_align = get_pointer_alignment (ptr1);
1546 /* Construct the new source string literal. */
1547 src_buf = XALLOCAVEC (char, src_len + 1);
1548 if (callee1)
1549 memcpy (src_buf,
1550 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1551 tree_low_cst (len1, 1));
1552 else
1553 src_buf[0] = tree_low_cst (src1, 0);
1554 memset (src_buf + tree_low_cst (diff, 1),
1555 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1556 src_buf[src_len] = '\0';
1557 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1558 handle embedded '\0's. */
1559 if (strlen (src_buf) != src_len)
1560 break;
1561 rtl_profile_for_bb (gimple_bb (stmt2));
1562 /* If the new memcpy wouldn't be emitted by storing the literal
1563 by pieces, this optimization might enlarge .rodata too much,
1564 as commonly used string literals couldn't be shared any
1565 longer. */
1566 if (!can_store_by_pieces (src_len,
1567 builtin_strncpy_read_str,
1568 src_buf, ptr1_align, false))
1569 break;
1571 new_str_cst = build_string_literal (src_len, src_buf);
1572 if (callee1)
1574 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1575 memset call. */
1576 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1577 gimple_call_set_lhs (stmt1, NULL_TREE);
1578 gimple_call_set_arg (stmt1, 1, new_str_cst);
1579 gimple_call_set_arg (stmt1, 2,
1580 build_int_cst (TREE_TYPE (len1), src_len));
1581 update_stmt (stmt1);
1582 unlink_stmt_vdef (stmt2);
1583 gsi_remove (gsi_p, true);
1584 release_defs (stmt2);
1585 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1586 release_ssa_name (lhs1);
1587 return true;
1589 else
1591 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1592 assignment, remove STMT1 and change memset call into
1593 memcpy call. */
1594 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1596 if (!is_gimple_val (ptr1))
1597 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1598 true, GSI_SAME_STMT);
1599 gimple_call_set_fndecl (stmt2,
1600 builtin_decl_explicit (BUILT_IN_MEMCPY));
1601 gimple_call_set_arg (stmt2, 0, ptr1);
1602 gimple_call_set_arg (stmt2, 1, new_str_cst);
1603 gimple_call_set_arg (stmt2, 2,
1604 build_int_cst (TREE_TYPE (len2), src_len));
1605 unlink_stmt_vdef (stmt1);
1606 gsi_remove (&gsi, true);
1607 release_defs (stmt1);
1608 update_stmt (stmt2);
1609 return false;
1612 break;
1613 default:
1614 break;
1616 return false;
1619 /* Checks if expression has type of one-bit precision, or is a known
1620 truth-valued expression. */
1621 static bool
1622 truth_valued_ssa_name (tree name)
1624 gimple def;
1625 tree type = TREE_TYPE (name);
1627 if (!INTEGRAL_TYPE_P (type))
1628 return false;
1629 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1630 necessarily one and so ~X is not equal to !X. */
1631 if (TYPE_PRECISION (type) == 1)
1632 return true;
1633 def = SSA_NAME_DEF_STMT (name);
1634 if (is_gimple_assign (def))
1635 return truth_value_p (gimple_assign_rhs_code (def));
1636 return false;
1639 /* Helper routine for simplify_bitwise_binary_1 function.
1640 Return for the SSA name NAME the expression X if it mets condition
1641 NAME = !X. Otherwise return NULL_TREE.
1642 Detected patterns for NAME = !X are:
1643 !X and X == 0 for X with integral type.
1644 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1645 static tree
1646 lookup_logical_inverted_value (tree name)
1648 tree op1, op2;
1649 enum tree_code code;
1650 gimple def;
1652 /* If name has none-intergal type, or isn't a SSA_NAME, then
1653 return. */
1654 if (TREE_CODE (name) != SSA_NAME
1655 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1656 return NULL_TREE;
1657 def = SSA_NAME_DEF_STMT (name);
1658 if (!is_gimple_assign (def))
1659 return NULL_TREE;
1661 code = gimple_assign_rhs_code (def);
1662 op1 = gimple_assign_rhs1 (def);
1663 op2 = NULL_TREE;
1665 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1666 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1667 if (code == EQ_EXPR || code == NE_EXPR
1668 || code == BIT_XOR_EXPR)
1669 op2 = gimple_assign_rhs2 (def);
1671 switch (code)
1673 case BIT_NOT_EXPR:
1674 if (truth_valued_ssa_name (name))
1675 return op1;
1676 break;
1677 case EQ_EXPR:
1678 /* Check if we have X == 0 and X has an integral type. */
1679 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1680 break;
1681 if (integer_zerop (op2))
1682 return op1;
1683 break;
1684 case NE_EXPR:
1685 /* Check if we have X != 1 and X is a truth-valued. */
1686 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1687 break;
1688 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1689 return op1;
1690 break;
1691 case BIT_XOR_EXPR:
1692 /* Check if we have X ^ 1 and X is truth valued. */
1693 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1694 return op1;
1695 break;
1696 default:
1697 break;
1700 return NULL_TREE;
1703 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1704 operations CODE, if one operand has the logically inverted
1705 value of the other. */
1706 static tree
1707 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1708 tree arg1, tree arg2)
1710 tree anot;
1712 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1713 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1714 && code != BIT_XOR_EXPR)
1715 return NULL_TREE;
1717 /* First check if operands ARG1 and ARG2 are equal. If so
1718 return NULL_TREE as this optimization is handled fold_stmt. */
1719 if (arg1 == arg2)
1720 return NULL_TREE;
1721 /* See if we have in arguments logical-not patterns. */
1722 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1723 || anot != arg2)
1724 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1725 || anot != arg1))
1726 return NULL_TREE;
1728 /* X & !X -> 0. */
1729 if (code == BIT_AND_EXPR)
1730 return fold_convert (type, integer_zero_node);
1731 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1732 if (truth_valued_ssa_name (anot))
1733 return fold_convert (type, integer_one_node);
1735 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1736 return NULL_TREE;
1739 /* Simplify bitwise binary operations.
1740 Return true if a transformation applied, otherwise return false. */
1742 static bool
1743 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1745 gimple stmt = gsi_stmt (*gsi);
1746 tree arg1 = gimple_assign_rhs1 (stmt);
1747 tree arg2 = gimple_assign_rhs2 (stmt);
1748 enum tree_code code = gimple_assign_rhs_code (stmt);
1749 tree res;
1750 gimple def1 = NULL, def2 = NULL;
1751 tree def1_arg1, def2_arg1;
1752 enum tree_code def1_code, def2_code;
1754 def1_code = TREE_CODE (arg1);
1755 def1_arg1 = arg1;
1756 if (TREE_CODE (arg1) == SSA_NAME)
1758 def1 = SSA_NAME_DEF_STMT (arg1);
1759 if (is_gimple_assign (def1))
1761 def1_code = gimple_assign_rhs_code (def1);
1762 def1_arg1 = gimple_assign_rhs1 (def1);
1766 def2_code = TREE_CODE (arg2);
1767 def2_arg1 = arg2;
1768 if (TREE_CODE (arg2) == SSA_NAME)
1770 def2 = SSA_NAME_DEF_STMT (arg2);
1771 if (is_gimple_assign (def2))
1773 def2_code = gimple_assign_rhs_code (def2);
1774 def2_arg1 = gimple_assign_rhs1 (def2);
1778 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1779 if (TREE_CODE (arg2) == INTEGER_CST
1780 && CONVERT_EXPR_CODE_P (def1_code)
1781 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1782 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1784 gimple newop;
1785 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1786 newop =
1787 gimple_build_assign_with_ops (code, tem, def1_arg1,
1788 fold_convert_loc (gimple_location (stmt),
1789 TREE_TYPE (def1_arg1),
1790 arg2));
1791 tem = make_ssa_name (tem, newop);
1792 gimple_assign_set_lhs (newop, tem);
1793 gimple_set_location (newop, gimple_location (stmt));
1794 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1795 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1796 tem, NULL_TREE, NULL_TREE);
1797 update_stmt (gsi_stmt (*gsi));
1798 return true;
1801 /* For bitwise binary operations apply operand conversions to the
1802 binary operation result instead of to the operands. This allows
1803 to combine successive conversions and bitwise binary operations. */
1804 if (CONVERT_EXPR_CODE_P (def1_code)
1805 && CONVERT_EXPR_CODE_P (def2_code)
1806 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1807 /* Make sure that the conversion widens the operands, or has same
1808 precision, or that it changes the operation to a bitfield
1809 precision. */
1810 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1811 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1812 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1813 != MODE_INT)
1814 || (TYPE_PRECISION (TREE_TYPE (arg1))
1815 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1817 gimple newop;
1818 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1819 NULL);
1820 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1821 tem = make_ssa_name (tem, newop);
1822 gimple_assign_set_lhs (newop, tem);
1823 gimple_set_location (newop, gimple_location (stmt));
1824 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1825 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1826 tem, NULL_TREE, NULL_TREE);
1827 update_stmt (gsi_stmt (*gsi));
1828 return true;
1831 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1832 if (code == BIT_AND_EXPR
1833 && def1_code == BIT_IOR_EXPR
1834 && TREE_CODE (arg2) == INTEGER_CST
1835 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1837 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1838 arg2, gimple_assign_rhs2 (def1));
1839 tree tem;
1840 gimple newop;
1841 if (integer_zerop (cst))
1843 gimple_assign_set_rhs1 (stmt, def1_arg1);
1844 update_stmt (stmt);
1845 return true;
1847 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1848 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1849 tem, def1_arg1, arg2);
1850 tem = make_ssa_name (tem, newop);
1851 gimple_assign_set_lhs (newop, tem);
1852 gimple_set_location (newop, gimple_location (stmt));
1853 /* Make sure to re-process the new stmt as it's walking upwards. */
1854 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1855 gimple_assign_set_rhs1 (stmt, tem);
1856 gimple_assign_set_rhs2 (stmt, cst);
1857 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1858 update_stmt (stmt);
1859 return true;
1862 /* Combine successive equal operations with constants. */
1863 if ((code == BIT_AND_EXPR
1864 || code == BIT_IOR_EXPR
1865 || code == BIT_XOR_EXPR)
1866 && def1_code == code
1867 && TREE_CODE (arg2) == INTEGER_CST
1868 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1870 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1871 arg2, gimple_assign_rhs2 (def1));
1872 gimple_assign_set_rhs1 (stmt, def1_arg1);
1873 gimple_assign_set_rhs2 (stmt, cst);
1874 update_stmt (stmt);
1875 return true;
1878 /* Canonicalize X ^ ~0 to ~X. */
1879 if (code == BIT_XOR_EXPR
1880 && TREE_CODE (arg2) == INTEGER_CST
1881 && integer_all_onesp (arg2))
1883 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1884 gcc_assert (gsi_stmt (*gsi) == stmt);
1885 update_stmt (stmt);
1886 return true;
1889 /* Try simple folding for X op !X, and X op X. */
1890 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1891 if (res != NULL_TREE)
1893 gimple_assign_set_rhs_from_tree (gsi, res);
1894 update_stmt (gsi_stmt (*gsi));
1895 return true;
1898 return false;
1902 /* Perform re-associations of the plus or minus statement STMT that are
1903 always permitted. Returns true if the CFG was changed. */
1905 static bool
1906 associate_plusminus (gimple_stmt_iterator *gsi)
1908 gimple stmt = gsi_stmt (*gsi);
1909 tree rhs1 = gimple_assign_rhs1 (stmt);
1910 tree rhs2 = gimple_assign_rhs2 (stmt);
1911 enum tree_code code = gimple_assign_rhs_code (stmt);
1912 bool changed;
1914 /* We can't reassociate at all for saturating types. */
1915 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1916 return false;
1918 /* First contract negates. */
1921 changed = false;
1923 /* A +- (-B) -> A -+ B. */
1924 if (TREE_CODE (rhs2) == SSA_NAME)
1926 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1927 if (is_gimple_assign (def_stmt)
1928 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1929 && can_propagate_from (def_stmt))
1931 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1932 gimple_assign_set_rhs_code (stmt, code);
1933 rhs2 = gimple_assign_rhs1 (def_stmt);
1934 gimple_assign_set_rhs2 (stmt, rhs2);
1935 gimple_set_modified (stmt, true);
1936 changed = true;
1940 /* (-A) + B -> B - A. */
1941 if (TREE_CODE (rhs1) == SSA_NAME
1942 && code == PLUS_EXPR)
1944 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1945 if (is_gimple_assign (def_stmt)
1946 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1947 && can_propagate_from (def_stmt))
1949 code = MINUS_EXPR;
1950 gimple_assign_set_rhs_code (stmt, code);
1951 rhs1 = rhs2;
1952 gimple_assign_set_rhs1 (stmt, rhs1);
1953 rhs2 = gimple_assign_rhs1 (def_stmt);
1954 gimple_assign_set_rhs2 (stmt, rhs2);
1955 gimple_set_modified (stmt, true);
1956 changed = true;
1960 while (changed);
1962 /* We can't reassociate floating-point or fixed-point plus or minus
1963 because of saturation to +-Inf. */
1964 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1965 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1966 goto out;
1968 /* Second match patterns that allow contracting a plus-minus pair
1969 irrespective of overflow issues.
1971 (A +- B) - A -> +- B
1972 (A +- B) -+ B -> A
1973 (CST +- A) +- CST -> CST +- A
1974 (A + CST) +- CST -> A + CST
1975 ~A + A -> -1
1976 ~A + 1 -> -A
1977 A - (A +- B) -> -+ B
1978 A +- (B +- A) -> +- B
1979 CST +- (CST +- A) -> CST +- A
1980 CST +- (A +- CST) -> CST +- A
1981 A + ~A -> -1
1983 via commutating the addition and contracting operations to zero
1984 by reassociation. */
1986 if (TREE_CODE (rhs1) == SSA_NAME)
1988 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1989 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1991 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1992 if (def_code == PLUS_EXPR
1993 || def_code == MINUS_EXPR)
1995 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1996 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1997 if (operand_equal_p (def_rhs1, rhs2, 0)
1998 && code == MINUS_EXPR)
2000 /* (A +- B) - A -> +- B. */
2001 code = ((def_code == PLUS_EXPR)
2002 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2003 rhs1 = def_rhs2;
2004 rhs2 = NULL_TREE;
2005 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2006 gcc_assert (gsi_stmt (*gsi) == stmt);
2007 gimple_set_modified (stmt, true);
2009 else if (operand_equal_p (def_rhs2, rhs2, 0)
2010 && code != def_code)
2012 /* (A +- B) -+ B -> A. */
2013 code = TREE_CODE (def_rhs1);
2014 rhs1 = def_rhs1;
2015 rhs2 = NULL_TREE;
2016 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2017 gcc_assert (gsi_stmt (*gsi) == stmt);
2018 gimple_set_modified (stmt, true);
2020 else if (TREE_CODE (rhs2) == INTEGER_CST
2021 && TREE_CODE (def_rhs1) == INTEGER_CST)
2023 /* (CST +- A) +- CST -> CST +- A. */
2024 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2025 def_rhs1, rhs2);
2026 if (cst && !TREE_OVERFLOW (cst))
2028 code = def_code;
2029 gimple_assign_set_rhs_code (stmt, code);
2030 rhs1 = cst;
2031 gimple_assign_set_rhs1 (stmt, rhs1);
2032 rhs2 = def_rhs2;
2033 gimple_assign_set_rhs2 (stmt, rhs2);
2034 gimple_set_modified (stmt, true);
2037 else if (TREE_CODE (rhs2) == INTEGER_CST
2038 && TREE_CODE (def_rhs2) == INTEGER_CST
2039 && def_code == PLUS_EXPR)
2041 /* (A + CST) +- CST -> A + CST. */
2042 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2043 def_rhs2, rhs2);
2044 if (cst && !TREE_OVERFLOW (cst))
2046 code = PLUS_EXPR;
2047 gimple_assign_set_rhs_code (stmt, code);
2048 rhs1 = def_rhs1;
2049 gimple_assign_set_rhs1 (stmt, rhs1);
2050 rhs2 = cst;
2051 gimple_assign_set_rhs2 (stmt, rhs2);
2052 gimple_set_modified (stmt, true);
2056 else if (def_code == BIT_NOT_EXPR
2057 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2059 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2060 if (code == PLUS_EXPR
2061 && operand_equal_p (def_rhs1, rhs2, 0))
2063 /* ~A + A -> -1. */
2064 code = INTEGER_CST;
2065 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2066 rhs2 = NULL_TREE;
2067 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2068 gcc_assert (gsi_stmt (*gsi) == stmt);
2069 gimple_set_modified (stmt, true);
2071 else if (code == PLUS_EXPR
2072 && integer_onep (rhs1))
2074 /* ~A + 1 -> -A. */
2075 code = NEGATE_EXPR;
2076 rhs1 = def_rhs1;
2077 rhs2 = NULL_TREE;
2078 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2079 gcc_assert (gsi_stmt (*gsi) == stmt);
2080 gimple_set_modified (stmt, true);
2086 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2088 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2089 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2091 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2092 if (def_code == PLUS_EXPR
2093 || def_code == MINUS_EXPR)
2095 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2096 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2097 if (operand_equal_p (def_rhs1, rhs1, 0)
2098 && code == MINUS_EXPR)
2100 /* A - (A +- B) -> -+ B. */
2101 code = ((def_code == PLUS_EXPR)
2102 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2103 rhs1 = def_rhs2;
2104 rhs2 = NULL_TREE;
2105 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2106 gcc_assert (gsi_stmt (*gsi) == stmt);
2107 gimple_set_modified (stmt, true);
2109 else if (operand_equal_p (def_rhs2, rhs1, 0)
2110 && code != def_code)
2112 /* A +- (B +- A) -> +- B. */
2113 code = ((code == PLUS_EXPR)
2114 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2115 rhs1 = def_rhs1;
2116 rhs2 = NULL_TREE;
2117 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2118 gcc_assert (gsi_stmt (*gsi) == stmt);
2119 gimple_set_modified (stmt, true);
2121 else if (TREE_CODE (rhs1) == INTEGER_CST
2122 && TREE_CODE (def_rhs1) == INTEGER_CST)
2124 /* CST +- (CST +- A) -> CST +- A. */
2125 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2126 rhs1, def_rhs1);
2127 if (cst && !TREE_OVERFLOW (cst))
2129 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2130 gimple_assign_set_rhs_code (stmt, code);
2131 rhs1 = cst;
2132 gimple_assign_set_rhs1 (stmt, rhs1);
2133 rhs2 = def_rhs2;
2134 gimple_assign_set_rhs2 (stmt, rhs2);
2135 gimple_set_modified (stmt, true);
2138 else if (TREE_CODE (rhs1) == INTEGER_CST
2139 && TREE_CODE (def_rhs2) == INTEGER_CST)
2141 /* CST +- (A +- CST) -> CST +- A. */
2142 tree cst = fold_binary (def_code == code
2143 ? PLUS_EXPR : MINUS_EXPR,
2144 TREE_TYPE (rhs2),
2145 rhs1, def_rhs2);
2146 if (cst && !TREE_OVERFLOW (cst))
2148 rhs1 = cst;
2149 gimple_assign_set_rhs1 (stmt, rhs1);
2150 rhs2 = def_rhs1;
2151 gimple_assign_set_rhs2 (stmt, rhs2);
2152 gimple_set_modified (stmt, true);
2156 else if (def_code == BIT_NOT_EXPR
2157 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2159 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2160 if (code == PLUS_EXPR
2161 && operand_equal_p (def_rhs1, rhs1, 0))
2163 /* A + ~A -> -1. */
2164 code = INTEGER_CST;
2165 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2166 rhs2 = NULL_TREE;
2167 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2168 gcc_assert (gsi_stmt (*gsi) == stmt);
2169 gimple_set_modified (stmt, true);
2175 out:
2176 if (gimple_modified_p (stmt))
2178 fold_stmt_inplace (gsi);
2179 update_stmt (stmt);
2180 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2181 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2182 return true;
2185 return false;
2188 /* Combine two conversions in a row for the second conversion at *GSI.
2189 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2190 run. Else it returns 0. */
2192 static int
2193 combine_conversions (gimple_stmt_iterator *gsi)
2195 gimple stmt = gsi_stmt (*gsi);
2196 gimple def_stmt;
2197 tree op0, lhs;
2198 enum tree_code code = gimple_assign_rhs_code (stmt);
2200 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2201 || code == FLOAT_EXPR
2202 || code == FIX_TRUNC_EXPR);
2204 lhs = gimple_assign_lhs (stmt);
2205 op0 = gimple_assign_rhs1 (stmt);
2206 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2208 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2209 return 1;
2212 if (TREE_CODE (op0) != SSA_NAME)
2213 return 0;
2215 def_stmt = SSA_NAME_DEF_STMT (op0);
2216 if (!is_gimple_assign (def_stmt))
2217 return 0;
2219 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2221 tree defop0 = gimple_assign_rhs1 (def_stmt);
2222 tree type = TREE_TYPE (lhs);
2223 tree inside_type = TREE_TYPE (defop0);
2224 tree inter_type = TREE_TYPE (op0);
2225 int inside_int = INTEGRAL_TYPE_P (inside_type);
2226 int inside_ptr = POINTER_TYPE_P (inside_type);
2227 int inside_float = FLOAT_TYPE_P (inside_type);
2228 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2229 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2230 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2231 int inter_int = INTEGRAL_TYPE_P (inter_type);
2232 int inter_ptr = POINTER_TYPE_P (inter_type);
2233 int inter_float = FLOAT_TYPE_P (inter_type);
2234 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2235 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2236 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2237 int final_int = INTEGRAL_TYPE_P (type);
2238 int final_ptr = POINTER_TYPE_P (type);
2239 int final_float = FLOAT_TYPE_P (type);
2240 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2241 unsigned int final_prec = TYPE_PRECISION (type);
2242 int final_unsignedp = TYPE_UNSIGNED (type);
2244 /* In addition to the cases of two conversions in a row
2245 handled below, if we are converting something to its own
2246 type via an object of identical or wider precision, neither
2247 conversion is needed. */
2248 if (useless_type_conversion_p (type, inside_type)
2249 && (((inter_int || inter_ptr) && final_int)
2250 || (inter_float && final_float))
2251 && inter_prec >= final_prec)
2253 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2254 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2255 update_stmt (stmt);
2256 return remove_prop_source_from_use (op0) ? 2 : 1;
2259 /* Likewise, if the intermediate and initial types are either both
2260 float or both integer, we don't need the middle conversion if the
2261 former is wider than the latter and doesn't change the signedness
2262 (for integers). Avoid this if the final type is a pointer since
2263 then we sometimes need the middle conversion. Likewise if the
2264 final type has a precision not equal to the size of its mode. */
2265 if (((inter_int && inside_int)
2266 || (inter_float && inside_float)
2267 || (inter_vec && inside_vec))
2268 && inter_prec >= inside_prec
2269 && (inter_float || inter_vec
2270 || inter_unsignedp == inside_unsignedp)
2271 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2272 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2273 && ! final_ptr
2274 && (! final_vec || inter_prec == inside_prec))
2276 gimple_assign_set_rhs1 (stmt, defop0);
2277 update_stmt (stmt);
2278 return remove_prop_source_from_use (op0) ? 2 : 1;
2281 /* If we have a sign-extension of a zero-extended value, we can
2282 replace that by a single zero-extension. */
2283 if (inside_int && inter_int && final_int
2284 && inside_prec < inter_prec && inter_prec < final_prec
2285 && inside_unsignedp && !inter_unsignedp)
2287 gimple_assign_set_rhs1 (stmt, defop0);
2288 update_stmt (stmt);
2289 return remove_prop_source_from_use (op0) ? 2 : 1;
2292 /* Two conversions in a row are not needed unless:
2293 - some conversion is floating-point (overstrict for now), or
2294 - some conversion is a vector (overstrict for now), or
2295 - the intermediate type is narrower than both initial and
2296 final, or
2297 - the intermediate type and innermost type differ in signedness,
2298 and the outermost type is wider than the intermediate, or
2299 - the initial type is a pointer type and the precisions of the
2300 intermediate and final types differ, or
2301 - the final type is a pointer type and the precisions of the
2302 initial and intermediate types differ. */
2303 if (! inside_float && ! inter_float && ! final_float
2304 && ! inside_vec && ! inter_vec && ! final_vec
2305 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2306 && ! (inside_int && inter_int
2307 && inter_unsignedp != inside_unsignedp
2308 && inter_prec < final_prec)
2309 && ((inter_unsignedp && inter_prec > inside_prec)
2310 == (final_unsignedp && final_prec > inter_prec))
2311 && ! (inside_ptr && inter_prec != final_prec)
2312 && ! (final_ptr && inside_prec != inter_prec)
2313 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2314 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2316 gimple_assign_set_rhs1 (stmt, defop0);
2317 update_stmt (stmt);
2318 return remove_prop_source_from_use (op0) ? 2 : 1;
2321 /* A truncation to an unsigned type should be canonicalized as
2322 bitwise and of a mask. */
2323 if (final_int && inter_int && inside_int
2324 && final_prec == inside_prec
2325 && final_prec > inter_prec
2326 && inter_unsignedp)
2328 tree tem;
2329 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2330 defop0,
2331 double_int_to_tree
2332 (inside_type, double_int_mask (inter_prec)));
2333 if (!useless_type_conversion_p (type, inside_type))
2335 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2336 GSI_SAME_STMT);
2337 gimple_assign_set_rhs1 (stmt, tem);
2339 else
2340 gimple_assign_set_rhs_from_tree (gsi, tem);
2341 update_stmt (gsi_stmt (*gsi));
2342 return 1;
2346 return 0;
2349 /* Main entry point for the forward propagation and statement combine
2350 optimizer. */
2352 static unsigned int
2353 ssa_forward_propagate_and_combine (void)
2355 basic_block bb;
2356 unsigned int todoflags = 0;
2358 cfg_changed = false;
2360 FOR_EACH_BB (bb)
2362 gimple_stmt_iterator gsi, prev;
2363 bool prev_initialized;
2365 /* Apply forward propagation to all stmts in the basic-block.
2366 Note we update GSI within the loop as necessary. */
2367 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2369 gimple stmt = gsi_stmt (gsi);
2370 tree lhs, rhs;
2371 enum tree_code code;
2373 if (!is_gimple_assign (stmt))
2375 gsi_next (&gsi);
2376 continue;
2379 lhs = gimple_assign_lhs (stmt);
2380 rhs = gimple_assign_rhs1 (stmt);
2381 code = gimple_assign_rhs_code (stmt);
2382 if (TREE_CODE (lhs) != SSA_NAME
2383 || has_zero_uses (lhs))
2385 gsi_next (&gsi);
2386 continue;
2389 /* If this statement sets an SSA_NAME to an address,
2390 try to propagate the address into the uses of the SSA_NAME. */
2391 if (code == ADDR_EXPR
2392 /* Handle pointer conversions on invariant addresses
2393 as well, as this is valid gimple. */
2394 || (CONVERT_EXPR_CODE_P (code)
2395 && TREE_CODE (rhs) == ADDR_EXPR
2396 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2398 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2399 if ((!base
2400 || !DECL_P (base)
2401 || decl_address_invariant_p (base))
2402 && !stmt_references_abnormal_ssa_name (stmt)
2403 && forward_propagate_addr_expr (lhs, rhs))
2405 release_defs (stmt);
2406 todoflags |= TODO_remove_unused_locals;
2407 gsi_remove (&gsi, true);
2409 else
2410 gsi_next (&gsi);
2412 else if (code == POINTER_PLUS_EXPR)
2414 tree off = gimple_assign_rhs2 (stmt);
2415 if (TREE_CODE (off) == INTEGER_CST
2416 && can_propagate_from (stmt)
2417 && !simple_iv_increment_p (stmt)
2418 /* ??? Better adjust the interface to that function
2419 instead of building new trees here. */
2420 && forward_propagate_addr_expr
2421 (lhs,
2422 build1_loc (gimple_location (stmt),
2423 ADDR_EXPR, TREE_TYPE (rhs),
2424 fold_build2 (MEM_REF,
2425 TREE_TYPE (TREE_TYPE (rhs)),
2426 rhs,
2427 fold_convert (ptr_type_node,
2428 off)))))
2430 release_defs (stmt);
2431 todoflags |= TODO_remove_unused_locals;
2432 gsi_remove (&gsi, true);
2434 else if (is_gimple_min_invariant (rhs))
2436 /* Make sure to fold &a[0] + off_1 here. */
2437 fold_stmt_inplace (&gsi);
2438 update_stmt (stmt);
2439 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2440 gsi_next (&gsi);
2442 else
2443 gsi_next (&gsi);
2445 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2447 if (forward_propagate_comparison (stmt))
2448 cfg_changed = true;
2449 gsi_next (&gsi);
2451 else
2452 gsi_next (&gsi);
2455 /* Combine stmts with the stmts defining their operands.
2456 Note we update GSI within the loop as necessary. */
2457 prev_initialized = false;
2458 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2460 gimple stmt = gsi_stmt (gsi);
2461 bool changed = false;
2463 switch (gimple_code (stmt))
2465 case GIMPLE_ASSIGN:
2467 tree rhs1 = gimple_assign_rhs1 (stmt);
2468 enum tree_code code = gimple_assign_rhs_code (stmt);
2470 if ((code == BIT_NOT_EXPR
2471 || code == NEGATE_EXPR)
2472 && TREE_CODE (rhs1) == SSA_NAME)
2473 changed = simplify_not_neg_expr (&gsi);
2474 else if (code == COND_EXPR)
2476 /* In this case the entire COND_EXPR is in rhs1. */
2477 changed |= forward_propagate_into_cond (&gsi);
2478 stmt = gsi_stmt (gsi);
2480 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2482 int did_something;
2483 did_something = forward_propagate_into_comparison (&gsi);
2484 if (did_something == 2)
2485 cfg_changed = true;
2486 changed = did_something != 0;
2488 else if (code == BIT_AND_EXPR
2489 || code == BIT_IOR_EXPR
2490 || code == BIT_XOR_EXPR)
2491 changed = simplify_bitwise_binary (&gsi);
2492 else if (code == PLUS_EXPR
2493 || code == MINUS_EXPR)
2494 changed = associate_plusminus (&gsi);
2495 else if (CONVERT_EXPR_CODE_P (code)
2496 || code == FLOAT_EXPR
2497 || code == FIX_TRUNC_EXPR)
2499 int did_something = combine_conversions (&gsi);
2500 if (did_something == 2)
2501 cfg_changed = true;
2502 changed = did_something != 0;
2504 break;
2507 case GIMPLE_SWITCH:
2508 changed = simplify_gimple_switch (stmt);
2509 break;
2511 case GIMPLE_COND:
2513 int did_something;
2514 did_something = forward_propagate_into_gimple_cond (stmt);
2515 if (did_something == 2)
2516 cfg_changed = true;
2517 changed = did_something != 0;
2518 break;
2521 case GIMPLE_CALL:
2523 tree callee = gimple_call_fndecl (stmt);
2524 if (callee != NULL_TREE
2525 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2526 changed = simplify_builtin_call (&gsi, callee);
2527 break;
2530 default:;
2533 if (changed)
2535 /* If the stmt changed then re-visit it and the statements
2536 inserted before it. */
2537 if (!prev_initialized)
2538 gsi = gsi_start_bb (bb);
2539 else
2541 gsi = prev;
2542 gsi_next (&gsi);
2545 else
2547 prev = gsi;
2548 prev_initialized = true;
2549 gsi_next (&gsi);
2554 if (cfg_changed)
2555 todoflags |= TODO_cleanup_cfg;
2557 return todoflags;
2561 static bool
2562 gate_forwprop (void)
2564 return flag_tree_forwprop;
2567 struct gimple_opt_pass pass_forwprop =
2570 GIMPLE_PASS,
2571 "forwprop", /* name */
2572 gate_forwprop, /* gate */
2573 ssa_forward_propagate_and_combine, /* execute */
2574 NULL, /* sub */
2575 NULL, /* next */
2576 0, /* static_pass_number */
2577 TV_TREE_FORWPROP, /* tv_id */
2578 PROP_cfg | PROP_ssa, /* properties_required */
2579 0, /* properties_provided */
2580 0, /* properties_destroyed */
2581 0, /* todo_flags_start */
2582 TODO_ggc_collect
2583 | TODO_update_ssa
2584 | TODO_verify_ssa /* todo_flags_finish */