cgraphunit.c (handle_alias_pairs): Also handle wekref with destination declared.
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob1db93a65a65f434cf8ad6a42544306705652a9a2
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
267 return false;
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
272 return false;
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
277 return true;
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
281 return false;
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
292 return false;
295 return true;
298 /* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
300 If NAME was replaced in its only use then this function can be used
301 to clean up dead stmts. The function handles already released SSA
302 names gracefully.
303 Returns true if cleanup-cfg has to run. */
305 static bool
306 remove_prop_source_from_use (tree name)
308 gimple_stmt_iterator gsi;
309 gimple stmt;
310 bool cfg_changed = false;
312 do {
313 basic_block bb;
315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
318 return cfg_changed;
320 stmt = SSA_NAME_DEF_STMT (name);
321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
323 return cfg_changed;
325 bb = gimple_bb (stmt);
326 gsi = gsi_for_stmt (stmt);
327 unlink_stmt_vdef (stmt);
328 gsi_remove (&gsi, true);
329 release_defs (stmt);
330 cfg_changed |= gimple_purge_dead_eh_edges (bb);
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
335 return cfg_changed;
338 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
345 static tree
346 rhs_to_tree (tree type, gimple stmt)
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 gimple_assign_rhs2 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358 return build1 (code, type, gimple_assign_rhs1 (stmt));
359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
361 else
362 gcc_unreachable ();
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
371 static tree
372 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
373 tree op0, tree op1, bool invariant_only)
375 tree t;
377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
379 fold_defer_overflow_warnings ();
380 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
381 if (!t)
383 fold_undefer_overflow_warnings (false, NULL, 0);
384 return NULL_TREE;
387 /* Require that we got a boolean type out if we put one in. */
388 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
390 /* Canonicalize the combined condition for use in a COND_EXPR. */
391 t = canonicalize_cond_expr_cond (t);
393 /* Bail out if we required an invariant but didn't get one. */
394 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
396 fold_undefer_overflow_warnings (false, NULL, 0);
397 return NULL_TREE;
400 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
402 return t;
405 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406 of its operand. Return a new comparison tree or NULL_TREE if there
407 were no simplifying combines. */
409 static tree
410 forward_propagate_into_comparison_1 (gimple stmt,
411 enum tree_code code, tree type,
412 tree op0, tree op1)
414 tree tmp = NULL_TREE;
415 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
416 bool single_use0_p = false, single_use1_p = false;
418 /* For comparisons use the first operand, that is likely to
419 simplify comparisons against constants. */
420 if (TREE_CODE (op0) == SSA_NAME)
422 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
423 if (def_stmt && can_propagate_from (def_stmt))
425 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
426 tmp = combine_cond_expr_cond (stmt, code, type,
427 rhs0, op1, !single_use0_p);
428 if (tmp)
429 return tmp;
433 /* If that wasn't successful, try the second operand. */
434 if (TREE_CODE (op1) == SSA_NAME)
436 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
437 if (def_stmt && can_propagate_from (def_stmt))
439 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
440 tmp = combine_cond_expr_cond (stmt, code, type,
441 op0, rhs1, !single_use1_p);
442 if (tmp)
443 return tmp;
447 /* If that wasn't successful either, try both operands. */
448 if (rhs0 != NULL_TREE
449 && rhs1 != NULL_TREE)
450 tmp = combine_cond_expr_cond (stmt, code, type,
451 rhs0, rhs1,
452 !(single_use0_p && single_use1_p));
454 return tmp;
457 /* Propagate from the ssa name definition statements of the assignment
458 from a comparison at *GSI into the conditional if that simplifies it.
459 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
460 otherwise returns 0. */
462 static int
463 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
465 gimple stmt = gsi_stmt (*gsi);
466 tree tmp;
467 bool cfg_changed = false;
468 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
469 tree rhs1 = gimple_assign_rhs1 (stmt);
470 tree rhs2 = gimple_assign_rhs2 (stmt);
472 /* Combine the comparison with defining statements. */
473 tmp = forward_propagate_into_comparison_1 (stmt,
474 gimple_assign_rhs_code (stmt),
475 type, rhs1, rhs2);
476 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
478 gimple_assign_set_rhs_from_tree (gsi, tmp);
479 fold_stmt (gsi);
480 update_stmt (gsi_stmt (*gsi));
482 if (TREE_CODE (rhs1) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs1);
484 if (TREE_CODE (rhs2) == SSA_NAME)
485 cfg_changed |= remove_prop_source_from_use (rhs2);
486 return cfg_changed ? 2 : 1;
489 return 0;
492 /* Propagate from the ssa name definition statements of COND_EXPR
493 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
494 Returns zero if no statement was changed, one if there were
495 changes and two if cfg_cleanup needs to run.
497 This must be kept in sync with forward_propagate_into_cond. */
499 static int
500 forward_propagate_into_gimple_cond (gimple stmt)
502 tree tmp;
503 enum tree_code code = gimple_cond_code (stmt);
504 bool cfg_changed = false;
505 tree rhs1 = gimple_cond_lhs (stmt);
506 tree rhs2 = gimple_cond_rhs (stmt);
508 /* We can do tree combining on SSA_NAME and comparison expressions. */
509 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
510 return 0;
512 tmp = forward_propagate_into_comparison_1 (stmt, code,
513 boolean_type_node,
514 rhs1, rhs2);
515 if (tmp)
517 if (dump_file && tmp)
519 fprintf (dump_file, " Replaced '");
520 print_gimple_expr (dump_file, stmt, 0, 0);
521 fprintf (dump_file, "' with '");
522 print_generic_expr (dump_file, tmp, 0);
523 fprintf (dump_file, "'\n");
526 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
527 update_stmt (stmt);
529 if (TREE_CODE (rhs1) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs1);
531 if (TREE_CODE (rhs2) == SSA_NAME)
532 cfg_changed |= remove_prop_source_from_use (rhs2);
533 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
536 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
537 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
538 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
539 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
540 && ((code == EQ_EXPR
541 && integer_zerop (rhs2))
542 || (code == NE_EXPR
543 && integer_onep (rhs2))))
545 basic_block bb = gimple_bb (stmt);
546 gimple_cond_set_code (stmt, NE_EXPR);
547 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
548 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
549 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
550 return 1;
553 return 0;
557 /* Propagate from the ssa name definition statements of COND_EXPR
558 in the rhs of statement STMT into the conditional if that simplifies it.
559 Returns true zero if the stmt was changed. */
561 static bool
562 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
564 gimple stmt = gsi_stmt (*gsi_p);
565 tree tmp = NULL_TREE;
566 tree cond = gimple_assign_rhs1 (stmt);
567 bool swap = false;
569 /* We can do tree combining on SSA_NAME and comparison expressions. */
570 if (COMPARISON_CLASS_P (cond))
571 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
572 boolean_type_node,
573 TREE_OPERAND (cond, 0),
574 TREE_OPERAND (cond, 1));
575 else if (TREE_CODE (cond) == SSA_NAME)
577 enum tree_code code;
578 tree name = cond;
579 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
580 if (!def_stmt || !can_propagate_from (def_stmt))
581 return 0;
583 code = gimple_assign_rhs_code (def_stmt);
584 if (TREE_CODE_CLASS (code) == tcc_comparison)
585 tmp = fold_build2_loc (gimple_location (def_stmt),
586 code,
587 boolean_type_node,
588 gimple_assign_rhs1 (def_stmt),
589 gimple_assign_rhs2 (def_stmt));
590 else if ((code == BIT_NOT_EXPR
591 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
592 || (code == BIT_XOR_EXPR
593 && integer_onep (gimple_assign_rhs2 (def_stmt))))
595 tmp = gimple_assign_rhs1 (def_stmt);
596 swap = true;
600 if (tmp)
602 if (dump_file && tmp)
604 fprintf (dump_file, " Replaced '");
605 print_generic_expr (dump_file, cond, 0);
606 fprintf (dump_file, "' with '");
607 print_generic_expr (dump_file, tmp, 0);
608 fprintf (dump_file, "'\n");
611 if (integer_onep (tmp))
612 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
613 else if (integer_zerop (tmp))
614 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
615 else
617 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
618 if (swap)
620 tree t = gimple_assign_rhs2 (stmt);
621 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
622 gimple_assign_set_rhs3 (stmt, t);
625 stmt = gsi_stmt (*gsi_p);
626 update_stmt (stmt);
628 return true;
631 return 0;
634 /* We've just substituted an ADDR_EXPR into stmt. Update all the
635 relevant data structures to match. */
637 static void
638 tidy_after_forward_propagate_addr (gimple stmt)
640 /* We may have turned a trapping insn into a non-trapping insn. */
641 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
642 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
643 cfg_changed = true;
645 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
646 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
649 /* DEF_RHS contains the address of the 0th element in an array.
650 USE_STMT uses type of DEF_RHS to compute the address of an
651 arbitrary element within the array. The (variable) byte offset
652 of the element is contained in OFFSET.
654 We walk back through the use-def chains of OFFSET to verify that
655 it is indeed computing the offset of an element within the array
656 and extract the index corresponding to the given byte offset.
658 We then try to fold the entire address expression into a form
659 &array[index].
661 If we are successful, we replace the right hand side of USE_STMT
662 with the new address computation. */
664 static bool
665 forward_propagate_addr_into_variable_array_index (tree offset,
666 tree def_rhs,
667 gimple_stmt_iterator *use_stmt_gsi)
669 tree index, tunit;
670 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
671 tree new_rhs, tmp;
673 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
674 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
675 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
676 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
677 else
678 return false;
679 if (!host_integerp (tunit, 1))
680 return false;
682 /* Get the offset's defining statement. */
683 offset_def = SSA_NAME_DEF_STMT (offset);
685 /* Try to find an expression for a proper index. This is either a
686 multiplication expression by the element size or just the ssa name we came
687 along in case the element size is one. In that case, however, we do not
688 allow multiplications because they can be computing index to a higher
689 level dimension (PR 37861). */
690 if (integer_onep (tunit))
692 if (is_gimple_assign (offset_def)
693 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
694 return false;
696 index = offset;
698 else
700 /* The statement which defines OFFSET before type conversion
701 must be a simple GIMPLE_ASSIGN. */
702 if (!is_gimple_assign (offset_def))
703 return false;
705 /* The RHS of the statement which defines OFFSET must be a
706 multiplication of an object by the size of the array elements.
707 This implicitly verifies that the size of the array elements
708 is constant. */
709 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
710 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
711 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
713 /* The first operand to the MULT_EXPR is the desired index. */
714 index = gimple_assign_rhs1 (offset_def);
716 /* If we have idx * tunit + CST * tunit re-associate that. */
717 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
718 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
719 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
720 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
721 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
722 gimple_assign_rhs2 (offset_def),
723 tunit)) != NULL_TREE)
725 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
726 if (is_gimple_assign (offset_def2)
727 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
728 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
729 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
731 index = fold_build2 (gimple_assign_rhs_code (offset_def),
732 TREE_TYPE (offset),
733 gimple_assign_rhs1 (offset_def2), tmp);
735 else
736 return false;
738 else
739 return false;
742 /* Replace the pointer addition with array indexing. */
743 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
744 true, GSI_SAME_STMT);
745 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
747 new_rhs = unshare_expr (def_rhs);
748 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
750 else
752 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
753 unshare_expr (TREE_OPERAND (def_rhs, 0)),
754 index, integer_zero_node, NULL_TREE);
755 new_rhs = build_fold_addr_expr (new_rhs);
756 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
757 TREE_TYPE (new_rhs)))
759 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
760 NULL_TREE, true, GSI_SAME_STMT);
761 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
762 new_rhs);
765 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
766 fold_stmt (use_stmt_gsi);
767 tidy_after_forward_propagate_addr (gsi_stmt (*use_stmt_gsi));
768 return true;
771 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
772 ADDR_EXPR <whatever>.
774 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
775 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
776 node or for recovery of array indexing from pointer arithmetic.
778 Return true if the propagation was successful (the propagation can
779 be not totally successful, yet things may have been changed). */
781 static bool
782 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
783 gimple_stmt_iterator *use_stmt_gsi,
784 bool single_use_p)
786 tree lhs, rhs, rhs2, array_ref;
787 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
788 enum tree_code rhs_code;
789 bool res = true;
791 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
793 lhs = gimple_assign_lhs (use_stmt);
794 rhs_code = gimple_assign_rhs_code (use_stmt);
795 rhs = gimple_assign_rhs1 (use_stmt);
797 /* Trivial cases. The use statement could be a trivial copy or a
798 useless conversion. Recurse to the uses of the lhs as copyprop does
799 not copy through different variant pointers and FRE does not catch
800 all useless conversions. Treat the case of a single-use name and
801 a conversion to def_rhs type separate, though. */
802 if (TREE_CODE (lhs) == SSA_NAME
803 && ((rhs_code == SSA_NAME && rhs == name)
804 || CONVERT_EXPR_CODE_P (rhs_code)))
806 /* Only recurse if we don't deal with a single use or we cannot
807 do the propagation to the current statement. In particular
808 we can end up with a conversion needed for a non-invariant
809 address which we cannot do in a single statement. */
810 if (!single_use_p
811 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
812 && (!is_gimple_min_invariant (def_rhs)
813 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
814 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
815 && (TYPE_PRECISION (TREE_TYPE (lhs))
816 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
817 return forward_propagate_addr_expr (lhs, def_rhs);
819 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
820 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
821 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
822 else
823 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
824 return true;
827 /* Propagate through constant pointer adjustments. */
828 if (TREE_CODE (lhs) == SSA_NAME
829 && rhs_code == POINTER_PLUS_EXPR
830 && rhs == name
831 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
833 tree new_def_rhs;
834 /* As we come here with non-invariant addresses in def_rhs we need
835 to make sure we can build a valid constant offsetted address
836 for further propagation. Simply rely on fold building that
837 and check after the fact. */
838 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
839 def_rhs,
840 fold_convert (ptr_type_node,
841 gimple_assign_rhs2 (use_stmt)));
842 if (TREE_CODE (new_def_rhs) == MEM_REF
843 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
844 return false;
845 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
846 TREE_TYPE (rhs));
848 /* Recurse. If we could propagate into all uses of lhs do not
849 bother to replace into the current use but just pretend we did. */
850 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
851 && forward_propagate_addr_expr (lhs, new_def_rhs))
852 return true;
854 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
855 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
856 new_def_rhs, NULL_TREE);
857 else if (is_gimple_min_invariant (new_def_rhs))
858 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
859 new_def_rhs, NULL_TREE);
860 else
861 return false;
862 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
863 update_stmt (use_stmt);
864 return true;
867 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
868 ADDR_EXPR will not appear on the LHS. */
869 lhs = gimple_assign_lhs (use_stmt);
870 while (handled_component_p (lhs))
871 lhs = TREE_OPERAND (lhs, 0);
873 /* Now see if the LHS node is a MEM_REF using NAME. If so,
874 propagate the ADDR_EXPR into the use of NAME and fold the result. */
875 if (TREE_CODE (lhs) == MEM_REF
876 && TREE_OPERAND (lhs, 0) == name)
878 tree def_rhs_base;
879 HOST_WIDE_INT def_rhs_offset;
880 /* If the address is invariant we can always fold it. */
881 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
882 &def_rhs_offset)))
884 double_int off = mem_ref_offset (lhs);
885 tree new_ptr;
886 off = double_int_add (off,
887 shwi_to_double_int (def_rhs_offset));
888 if (TREE_CODE (def_rhs_base) == MEM_REF)
890 off = double_int_add (off, mem_ref_offset (def_rhs_base));
891 new_ptr = TREE_OPERAND (def_rhs_base, 0);
893 else
894 new_ptr = build_fold_addr_expr (def_rhs_base);
895 TREE_OPERAND (lhs, 0) = new_ptr;
896 TREE_OPERAND (lhs, 1)
897 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
898 tidy_after_forward_propagate_addr (use_stmt);
899 /* Continue propagating into the RHS if this was not the only use. */
900 if (single_use_p)
901 return true;
903 /* If the LHS is a plain dereference and the value type is the same as
904 that of the pointed-to type of the address we can put the
905 dereferenced address on the LHS preserving the original alias-type. */
906 else if (gimple_assign_lhs (use_stmt) == lhs
907 && useless_type_conversion_p
908 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
909 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
911 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
912 tree new_offset, new_base, saved;
913 while (handled_component_p (*def_rhs_basep))
914 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
915 saved = *def_rhs_basep;
916 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
918 new_base = TREE_OPERAND (*def_rhs_basep, 0);
919 new_offset
920 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
921 TREE_OPERAND (*def_rhs_basep, 1));
923 else
925 new_base = build_fold_addr_expr (*def_rhs_basep);
926 new_offset = TREE_OPERAND (lhs, 1);
928 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
929 new_base, new_offset);
930 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
931 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
932 gimple_assign_set_lhs (use_stmt,
933 unshare_expr (TREE_OPERAND (def_rhs, 0)));
934 *def_rhs_basep = saved;
935 tidy_after_forward_propagate_addr (use_stmt);
936 /* Continue propagating into the RHS if this was not the
937 only use. */
938 if (single_use_p)
939 return true;
941 else
942 /* We can have a struct assignment dereferencing our name twice.
943 Note that we didn't propagate into the lhs to not falsely
944 claim we did when propagating into the rhs. */
945 res = false;
948 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
949 nodes from the RHS. */
950 rhs = gimple_assign_rhs1 (use_stmt);
951 if (TREE_CODE (rhs) == ADDR_EXPR)
952 rhs = TREE_OPERAND (rhs, 0);
953 while (handled_component_p (rhs))
954 rhs = TREE_OPERAND (rhs, 0);
956 /* Now see if the RHS node is a MEM_REF using NAME. If so,
957 propagate the ADDR_EXPR into the use of NAME and fold the result. */
958 if (TREE_CODE (rhs) == MEM_REF
959 && TREE_OPERAND (rhs, 0) == name)
961 tree def_rhs_base;
962 HOST_WIDE_INT def_rhs_offset;
963 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
964 &def_rhs_offset)))
966 double_int off = mem_ref_offset (rhs);
967 tree new_ptr;
968 off = double_int_add (off,
969 shwi_to_double_int (def_rhs_offset));
970 if (TREE_CODE (def_rhs_base) == MEM_REF)
972 off = double_int_add (off, mem_ref_offset (def_rhs_base));
973 new_ptr = TREE_OPERAND (def_rhs_base, 0);
975 else
976 new_ptr = build_fold_addr_expr (def_rhs_base);
977 TREE_OPERAND (rhs, 0) = new_ptr;
978 TREE_OPERAND (rhs, 1)
979 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
980 fold_stmt_inplace (use_stmt_gsi);
981 tidy_after_forward_propagate_addr (use_stmt);
982 return res;
984 /* If the RHS is a plain dereference and the value type is the same as
985 that of the pointed-to type of the address we can put the
986 dereferenced address on the RHS preserving the original alias-type. */
987 else if (gimple_assign_rhs1 (use_stmt) == rhs
988 && useless_type_conversion_p
989 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
990 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
992 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
993 tree new_offset, new_base, saved;
994 while (handled_component_p (*def_rhs_basep))
995 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
996 saved = *def_rhs_basep;
997 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
999 new_base = TREE_OPERAND (*def_rhs_basep, 0);
1000 new_offset
1001 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
1002 TREE_OPERAND (*def_rhs_basep, 1));
1004 else
1006 new_base = build_fold_addr_expr (*def_rhs_basep);
1007 new_offset = TREE_OPERAND (rhs, 1);
1009 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1010 new_base, new_offset);
1011 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
1012 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
1013 gimple_assign_set_rhs1 (use_stmt,
1014 unshare_expr (TREE_OPERAND (def_rhs, 0)));
1015 *def_rhs_basep = saved;
1016 fold_stmt_inplace (use_stmt_gsi);
1017 tidy_after_forward_propagate_addr (use_stmt);
1018 return res;
1022 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1023 is nothing to do. */
1024 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1025 || gimple_assign_rhs1 (use_stmt) != name)
1026 return false;
1028 /* The remaining cases are all for turning pointer arithmetic into
1029 array indexing. They only apply when we have the address of
1030 element zero in an array. If that is not the case then there
1031 is nothing to do. */
1032 array_ref = TREE_OPERAND (def_rhs, 0);
1033 if ((TREE_CODE (array_ref) != ARRAY_REF
1034 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1035 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1036 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1037 return false;
1039 rhs2 = gimple_assign_rhs2 (use_stmt);
1040 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1041 if (TREE_CODE (rhs2) == INTEGER_CST)
1043 tree new_rhs = build1_loc (gimple_location (use_stmt),
1044 ADDR_EXPR, TREE_TYPE (def_rhs),
1045 fold_build2 (MEM_REF,
1046 TREE_TYPE (TREE_TYPE (def_rhs)),
1047 unshare_expr (def_rhs),
1048 fold_convert (ptr_type_node,
1049 rhs2)));
1050 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1051 use_stmt = gsi_stmt (*use_stmt_gsi);
1052 update_stmt (use_stmt);
1053 tidy_after_forward_propagate_addr (use_stmt);
1054 return true;
1057 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1058 converting a multiplication of an index by the size of the
1059 array elements, then the result is converted into the proper
1060 type for the arithmetic. */
1061 if (TREE_CODE (rhs2) == SSA_NAME
1062 && (TREE_CODE (array_ref) != ARRAY_REF
1063 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1064 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1065 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1066 different type than their operands. */
1067 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1068 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1069 use_stmt_gsi);
1070 return false;
1073 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1075 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1076 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1077 node or for recovery of array indexing from pointer arithmetic.
1078 Returns true, if all uses have been propagated into. */
1080 static bool
1081 forward_propagate_addr_expr (tree name, tree rhs)
1083 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1084 imm_use_iterator iter;
1085 gimple use_stmt;
1086 bool all = true;
1087 bool single_use_p = has_single_use (name);
1089 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1091 bool result;
1092 tree use_rhs;
1094 /* If the use is not in a simple assignment statement, then
1095 there is nothing we can do. */
1096 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1098 if (!is_gimple_debug (use_stmt))
1099 all = false;
1100 continue;
1103 /* If the use is in a deeper loop nest, then we do not want
1104 to propagate non-invariant ADDR_EXPRs into the loop as that
1105 is likely adding expression evaluations into the loop. */
1106 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1107 && !is_gimple_min_invariant (rhs))
1109 all = false;
1110 continue;
1114 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1115 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1116 single_use_p);
1117 /* If the use has moved to a different statement adjust
1118 the update machinery for the old statement too. */
1119 if (use_stmt != gsi_stmt (gsi))
1121 update_stmt (use_stmt);
1122 use_stmt = gsi_stmt (gsi);
1125 update_stmt (use_stmt);
1127 all &= result;
1129 /* Remove intermediate now unused copy and conversion chains. */
1130 use_rhs = gimple_assign_rhs1 (use_stmt);
1131 if (result
1132 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1133 && TREE_CODE (use_rhs) == SSA_NAME
1134 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1136 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1137 release_defs (use_stmt);
1138 gsi_remove (&gsi, true);
1142 return all && has_zero_uses (name);
1146 /* Forward propagate the comparison defined in STMT like
1147 cond_1 = x CMP y to uses of the form
1148 a_1 = (T')cond_1
1149 a_1 = !cond_1
1150 a_1 = cond_1 != 0
1151 Returns true if stmt is now unused. */
1153 static bool
1154 forward_propagate_comparison (gimple stmt)
1156 tree name = gimple_assign_lhs (stmt);
1157 gimple use_stmt;
1158 tree tmp = NULL_TREE;
1159 gimple_stmt_iterator gsi;
1160 enum tree_code code;
1161 tree lhs;
1163 /* Don't propagate ssa names that occur in abnormal phis. */
1164 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1165 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1166 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1167 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1168 return false;
1170 /* Do not un-cse comparisons. But propagate through copies. */
1171 use_stmt = get_prop_dest_stmt (name, &name);
1172 if (!use_stmt
1173 || !is_gimple_assign (use_stmt))
1174 return false;
1176 code = gimple_assign_rhs_code (use_stmt);
1177 lhs = gimple_assign_lhs (use_stmt);
1178 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1179 return false;
1181 /* We can propagate the condition into a statement that
1182 computes the logical negation of the comparison result. */
1183 if ((code == BIT_NOT_EXPR
1184 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1185 || (code == BIT_XOR_EXPR
1186 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1188 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1189 bool nans = HONOR_NANS (TYPE_MODE (type));
1190 enum tree_code inv_code;
1191 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1192 if (inv_code == ERROR_MARK)
1193 return false;
1195 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1196 gimple_assign_rhs2 (stmt));
1198 else
1199 return false;
1201 gsi = gsi_for_stmt (use_stmt);
1202 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1203 use_stmt = gsi_stmt (gsi);
1204 update_stmt (use_stmt);
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, " Replaced '");
1209 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1210 fprintf (dump_file, "' with '");
1211 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1212 fprintf (dump_file, "'\n");
1215 /* Remove defining statements. */
1216 return remove_prop_source_from_use (name);
1220 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1221 If so, we can change STMT into lhs = y which can later be copy
1222 propagated. Similarly for negation.
1224 This could trivially be formulated as a forward propagation
1225 to immediate uses. However, we already had an implementation
1226 from DOM which used backward propagation via the use-def links.
1228 It turns out that backward propagation is actually faster as
1229 there's less work to do for each NOT/NEG expression we find.
1230 Backwards propagation needs to look at the statement in a single
1231 backlink. Forward propagation needs to look at potentially more
1232 than one forward link.
1234 Returns true when the statement was changed. */
1236 static bool
1237 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1239 gimple stmt = gsi_stmt (*gsi_p);
1240 tree rhs = gimple_assign_rhs1 (stmt);
1241 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1243 /* See if the RHS_DEF_STMT has the same form as our statement. */
1244 if (is_gimple_assign (rhs_def_stmt)
1245 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1247 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1249 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1250 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1251 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1253 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1254 stmt = gsi_stmt (*gsi_p);
1255 update_stmt (stmt);
1256 return true;
1260 return false;
1263 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1264 the condition which we may be able to optimize better. */
1266 static bool
1267 simplify_gimple_switch (gimple stmt)
1269 tree cond = gimple_switch_index (stmt);
1270 tree def, to, ti;
1271 gimple def_stmt;
1273 /* The optimization that we really care about is removing unnecessary
1274 casts. That will let us do much better in propagating the inferred
1275 constant at the switch target. */
1276 if (TREE_CODE (cond) == SSA_NAME)
1278 def_stmt = SSA_NAME_DEF_STMT (cond);
1279 if (is_gimple_assign (def_stmt))
1281 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1283 int need_precision;
1284 bool fail;
1286 def = gimple_assign_rhs1 (def_stmt);
1288 /* ??? Why was Jeff testing this? We are gimple... */
1289 gcc_checking_assert (is_gimple_val (def));
1291 to = TREE_TYPE (cond);
1292 ti = TREE_TYPE (def);
1294 /* If we have an extension that preserves value, then we
1295 can copy the source value into the switch. */
1297 need_precision = TYPE_PRECISION (ti);
1298 fail = false;
1299 if (! INTEGRAL_TYPE_P (ti))
1300 fail = true;
1301 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1302 fail = true;
1303 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1304 need_precision += 1;
1305 if (TYPE_PRECISION (to) < need_precision)
1306 fail = true;
1308 if (!fail)
1310 gimple_switch_set_index (stmt, def);
1311 update_stmt (stmt);
1312 return true;
1318 return false;
1321 /* For pointers p2 and p1 return p2 - p1 if the
1322 difference is known and constant, otherwise return NULL. */
1324 static tree
1325 constant_pointer_difference (tree p1, tree p2)
1327 int i, j;
1328 #define CPD_ITERATIONS 5
1329 tree exps[2][CPD_ITERATIONS];
1330 tree offs[2][CPD_ITERATIONS];
1331 int cnt[2];
1333 for (i = 0; i < 2; i++)
1335 tree p = i ? p1 : p2;
1336 tree off = size_zero_node;
1337 gimple stmt;
1338 enum tree_code code;
1340 /* For each of p1 and p2 we need to iterate at least
1341 twice, to handle ADDR_EXPR directly in p1/p2,
1342 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1343 on definition's stmt RHS. Iterate a few extra times. */
1344 j = 0;
1347 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1348 break;
1349 if (TREE_CODE (p) == ADDR_EXPR)
1351 tree q = TREE_OPERAND (p, 0);
1352 HOST_WIDE_INT offset;
1353 tree base = get_addr_base_and_unit_offset (q, &offset);
1354 if (base)
1356 q = base;
1357 if (offset)
1358 off = size_binop (PLUS_EXPR, off, size_int (offset));
1360 if (TREE_CODE (q) == MEM_REF
1361 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1363 p = TREE_OPERAND (q, 0);
1364 off = size_binop (PLUS_EXPR, off,
1365 double_int_to_tree (sizetype,
1366 mem_ref_offset (q)));
1368 else
1370 exps[i][j] = q;
1371 offs[i][j++] = off;
1372 break;
1375 if (TREE_CODE (p) != SSA_NAME)
1376 break;
1377 exps[i][j] = p;
1378 offs[i][j++] = off;
1379 if (j == CPD_ITERATIONS)
1380 break;
1381 stmt = SSA_NAME_DEF_STMT (p);
1382 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1383 break;
1384 code = gimple_assign_rhs_code (stmt);
1385 if (code == POINTER_PLUS_EXPR)
1387 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1388 break;
1389 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1390 p = gimple_assign_rhs1 (stmt);
1392 else if (code == ADDR_EXPR || code == NOP_EXPR)
1393 p = gimple_assign_rhs1 (stmt);
1394 else
1395 break;
1397 while (1);
1398 cnt[i] = j;
1401 for (i = 0; i < cnt[0]; i++)
1402 for (j = 0; j < cnt[1]; j++)
1403 if (exps[0][i] == exps[1][j])
1404 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1406 return NULL_TREE;
1409 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1410 Optimize
1411 memcpy (p, "abcd", 4);
1412 memset (p + 4, ' ', 3);
1413 into
1414 memcpy (p, "abcd ", 7);
1415 call if the latter can be stored by pieces during expansion. */
1417 static bool
1418 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1420 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1421 tree vuse = gimple_vuse (stmt2);
1422 if (vuse == NULL)
1423 return false;
1424 stmt1 = SSA_NAME_DEF_STMT (vuse);
1426 switch (DECL_FUNCTION_CODE (callee2))
1428 case BUILT_IN_MEMSET:
1429 if (gimple_call_num_args (stmt2) != 3
1430 || gimple_call_lhs (stmt2)
1431 || CHAR_BIT != 8
1432 || BITS_PER_UNIT != 8)
1433 break;
1434 else
1436 tree callee1;
1437 tree ptr1, src1, str1, off1, len1, lhs1;
1438 tree ptr2 = gimple_call_arg (stmt2, 0);
1439 tree val2 = gimple_call_arg (stmt2, 1);
1440 tree len2 = gimple_call_arg (stmt2, 2);
1441 tree diff, vdef, new_str_cst;
1442 gimple use_stmt;
1443 unsigned int ptr1_align;
1444 unsigned HOST_WIDE_INT src_len;
1445 char *src_buf;
1446 use_operand_p use_p;
1448 if (!host_integerp (val2, 0)
1449 || !host_integerp (len2, 1))
1450 break;
1451 if (is_gimple_call (stmt1))
1453 /* If first stmt is a call, it needs to be memcpy
1454 or mempcpy, with string literal as second argument and
1455 constant length. */
1456 callee1 = gimple_call_fndecl (stmt1);
1457 if (callee1 == NULL_TREE
1458 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1459 || gimple_call_num_args (stmt1) != 3)
1460 break;
1461 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1462 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1463 break;
1464 ptr1 = gimple_call_arg (stmt1, 0);
1465 src1 = gimple_call_arg (stmt1, 1);
1466 len1 = gimple_call_arg (stmt1, 2);
1467 lhs1 = gimple_call_lhs (stmt1);
1468 if (!host_integerp (len1, 1))
1469 break;
1470 str1 = string_constant (src1, &off1);
1471 if (str1 == NULL_TREE)
1472 break;
1473 if (!host_integerp (off1, 1)
1474 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1475 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1476 - tree_low_cst (off1, 1)) > 0
1477 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1478 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1479 != TYPE_MODE (char_type_node))
1480 break;
1482 else if (gimple_assign_single_p (stmt1))
1484 /* Otherwise look for length 1 memcpy optimized into
1485 assignment. */
1486 ptr1 = gimple_assign_lhs (stmt1);
1487 src1 = gimple_assign_rhs1 (stmt1);
1488 if (TREE_CODE (ptr1) != MEM_REF
1489 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1490 || !host_integerp (src1, 0))
1491 break;
1492 ptr1 = build_fold_addr_expr (ptr1);
1493 callee1 = NULL_TREE;
1494 len1 = size_one_node;
1495 lhs1 = NULL_TREE;
1496 off1 = size_zero_node;
1497 str1 = NULL_TREE;
1499 else
1500 break;
1502 diff = constant_pointer_difference (ptr1, ptr2);
1503 if (diff == NULL && lhs1 != NULL)
1505 diff = constant_pointer_difference (lhs1, ptr2);
1506 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1507 && diff != NULL)
1508 diff = size_binop (PLUS_EXPR, diff,
1509 fold_convert (sizetype, len1));
1511 /* If the difference between the second and first destination pointer
1512 is not constant, or is bigger than memcpy length, bail out. */
1513 if (diff == NULL
1514 || !host_integerp (diff, 1)
1515 || tree_int_cst_lt (len1, diff))
1516 break;
1518 /* Use maximum of difference plus memset length and memcpy length
1519 as the new memcpy length, if it is too big, bail out. */
1520 src_len = tree_low_cst (diff, 1);
1521 src_len += tree_low_cst (len2, 1);
1522 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1523 src_len = tree_low_cst (len1, 1);
1524 if (src_len > 1024)
1525 break;
1527 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1528 with bigger length will return different result. */
1529 if (lhs1 != NULL_TREE
1530 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1531 && (TREE_CODE (lhs1) != SSA_NAME
1532 || !single_imm_use (lhs1, &use_p, &use_stmt)
1533 || use_stmt != stmt2))
1534 break;
1536 /* If anything reads memory in between memcpy and memset
1537 call, the modified memcpy call might change it. */
1538 vdef = gimple_vdef (stmt1);
1539 if (vdef != NULL
1540 && (!single_imm_use (vdef, &use_p, &use_stmt)
1541 || use_stmt != stmt2))
1542 break;
1544 ptr1_align = get_pointer_alignment (ptr1);
1545 /* Construct the new source string literal. */
1546 src_buf = XALLOCAVEC (char, src_len + 1);
1547 if (callee1)
1548 memcpy (src_buf,
1549 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1550 tree_low_cst (len1, 1));
1551 else
1552 src_buf[0] = tree_low_cst (src1, 0);
1553 memset (src_buf + tree_low_cst (diff, 1),
1554 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1555 src_buf[src_len] = '\0';
1556 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1557 handle embedded '\0's. */
1558 if (strlen (src_buf) != src_len)
1559 break;
1560 rtl_profile_for_bb (gimple_bb (stmt2));
1561 /* If the new memcpy wouldn't be emitted by storing the literal
1562 by pieces, this optimization might enlarge .rodata too much,
1563 as commonly used string literals couldn't be shared any
1564 longer. */
1565 if (!can_store_by_pieces (src_len,
1566 builtin_strncpy_read_str,
1567 src_buf, ptr1_align, false))
1568 break;
1570 new_str_cst = build_string_literal (src_len, src_buf);
1571 if (callee1)
1573 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1574 memset call. */
1575 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1576 gimple_call_set_lhs (stmt1, NULL_TREE);
1577 gimple_call_set_arg (stmt1, 1, new_str_cst);
1578 gimple_call_set_arg (stmt1, 2,
1579 build_int_cst (TREE_TYPE (len1), src_len));
1580 update_stmt (stmt1);
1581 unlink_stmt_vdef (stmt2);
1582 gsi_remove (gsi_p, true);
1583 release_defs (stmt2);
1584 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1585 release_ssa_name (lhs1);
1586 return true;
1588 else
1590 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1591 assignment, remove STMT1 and change memset call into
1592 memcpy call. */
1593 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1595 if (!is_gimple_val (ptr1))
1596 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1597 true, GSI_SAME_STMT);
1598 gimple_call_set_fndecl (stmt2,
1599 builtin_decl_explicit (BUILT_IN_MEMCPY));
1600 gimple_call_set_arg (stmt2, 0, ptr1);
1601 gimple_call_set_arg (stmt2, 1, new_str_cst);
1602 gimple_call_set_arg (stmt2, 2,
1603 build_int_cst (TREE_TYPE (len2), src_len));
1604 unlink_stmt_vdef (stmt1);
1605 gsi_remove (&gsi, true);
1606 release_defs (stmt1);
1607 update_stmt (stmt2);
1608 return false;
1611 break;
1612 default:
1613 break;
1615 return false;
1618 /* Checks if expression has type of one-bit precision, or is a known
1619 truth-valued expression. */
1620 static bool
1621 truth_valued_ssa_name (tree name)
1623 gimple def;
1624 tree type = TREE_TYPE (name);
1626 if (!INTEGRAL_TYPE_P (type))
1627 return false;
1628 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1629 necessarily one and so ~X is not equal to !X. */
1630 if (TYPE_PRECISION (type) == 1)
1631 return true;
1632 def = SSA_NAME_DEF_STMT (name);
1633 if (is_gimple_assign (def))
1634 return truth_value_p (gimple_assign_rhs_code (def));
1635 return false;
1638 /* Helper routine for simplify_bitwise_binary_1 function.
1639 Return for the SSA name NAME the expression X if it mets condition
1640 NAME = !X. Otherwise return NULL_TREE.
1641 Detected patterns for NAME = !X are:
1642 !X and X == 0 for X with integral type.
1643 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1644 static tree
1645 lookup_logical_inverted_value (tree name)
1647 tree op1, op2;
1648 enum tree_code code;
1649 gimple def;
1651 /* If name has none-intergal type, or isn't a SSA_NAME, then
1652 return. */
1653 if (TREE_CODE (name) != SSA_NAME
1654 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1655 return NULL_TREE;
1656 def = SSA_NAME_DEF_STMT (name);
1657 if (!is_gimple_assign (def))
1658 return NULL_TREE;
1660 code = gimple_assign_rhs_code (def);
1661 op1 = gimple_assign_rhs1 (def);
1662 op2 = NULL_TREE;
1664 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1665 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1666 if (code == EQ_EXPR || code == NE_EXPR
1667 || code == BIT_XOR_EXPR)
1668 op2 = gimple_assign_rhs2 (def);
1670 switch (code)
1672 case BIT_NOT_EXPR:
1673 if (truth_valued_ssa_name (name))
1674 return op1;
1675 break;
1676 case EQ_EXPR:
1677 /* Check if we have X == 0 and X has an integral type. */
1678 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1679 break;
1680 if (integer_zerop (op2))
1681 return op1;
1682 break;
1683 case NE_EXPR:
1684 /* Check if we have X != 1 and X is a truth-valued. */
1685 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1686 break;
1687 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1688 return op1;
1689 break;
1690 case BIT_XOR_EXPR:
1691 /* Check if we have X ^ 1 and X is truth valued. */
1692 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1693 return op1;
1694 break;
1695 default:
1696 break;
1699 return NULL_TREE;
1702 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1703 operations CODE, if one operand has the logically inverted
1704 value of the other. */
1705 static tree
1706 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1707 tree arg1, tree arg2)
1709 tree anot;
1711 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1712 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1713 && code != BIT_XOR_EXPR)
1714 return NULL_TREE;
1716 /* First check if operands ARG1 and ARG2 are equal. If so
1717 return NULL_TREE as this optimization is handled fold_stmt. */
1718 if (arg1 == arg2)
1719 return NULL_TREE;
1720 /* See if we have in arguments logical-not patterns. */
1721 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1722 || anot != arg2)
1723 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1724 || anot != arg1))
1725 return NULL_TREE;
1727 /* X & !X -> 0. */
1728 if (code == BIT_AND_EXPR)
1729 return fold_convert (type, integer_zero_node);
1730 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1731 if (truth_valued_ssa_name (anot))
1732 return fold_convert (type, integer_one_node);
1734 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1735 return NULL_TREE;
1738 /* Simplify bitwise binary operations.
1739 Return true if a transformation applied, otherwise return false. */
1741 static bool
1742 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1744 gimple stmt = gsi_stmt (*gsi);
1745 tree arg1 = gimple_assign_rhs1 (stmt);
1746 tree arg2 = gimple_assign_rhs2 (stmt);
1747 enum tree_code code = gimple_assign_rhs_code (stmt);
1748 tree res;
1749 gimple def1 = NULL, def2 = NULL;
1750 tree def1_arg1, def2_arg1;
1751 enum tree_code def1_code, def2_code;
1753 def1_code = TREE_CODE (arg1);
1754 def1_arg1 = arg1;
1755 if (TREE_CODE (arg1) == SSA_NAME)
1757 def1 = SSA_NAME_DEF_STMT (arg1);
1758 if (is_gimple_assign (def1))
1760 def1_code = gimple_assign_rhs_code (def1);
1761 def1_arg1 = gimple_assign_rhs1 (def1);
1765 def2_code = TREE_CODE (arg2);
1766 def2_arg1 = arg2;
1767 if (TREE_CODE (arg2) == SSA_NAME)
1769 def2 = SSA_NAME_DEF_STMT (arg2);
1770 if (is_gimple_assign (def2))
1772 def2_code = gimple_assign_rhs_code (def2);
1773 def2_arg1 = gimple_assign_rhs1 (def2);
1777 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1778 if (TREE_CODE (arg2) == INTEGER_CST
1779 && CONVERT_EXPR_CODE_P (def1_code)
1780 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1781 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1783 gimple newop;
1784 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1785 newop =
1786 gimple_build_assign_with_ops (code, tem, def1_arg1,
1787 fold_convert_loc (gimple_location (stmt),
1788 TREE_TYPE (def1_arg1),
1789 arg2));
1790 tem = make_ssa_name (tem, newop);
1791 gimple_assign_set_lhs (newop, tem);
1792 gimple_set_location (newop, gimple_location (stmt));
1793 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1794 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1795 tem, NULL_TREE, NULL_TREE);
1796 update_stmt (gsi_stmt (*gsi));
1797 return true;
1800 /* For bitwise binary operations apply operand conversions to the
1801 binary operation result instead of to the operands. This allows
1802 to combine successive conversions and bitwise binary operations. */
1803 if (CONVERT_EXPR_CODE_P (def1_code)
1804 && CONVERT_EXPR_CODE_P (def2_code)
1805 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1806 /* Make sure that the conversion widens the operands, or has same
1807 precision, or that it changes the operation to a bitfield
1808 precision. */
1809 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1810 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1811 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1812 != MODE_INT)
1813 || (TYPE_PRECISION (TREE_TYPE (arg1))
1814 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1816 gimple newop;
1817 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1818 NULL);
1819 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1820 tem = make_ssa_name (tem, newop);
1821 gimple_assign_set_lhs (newop, tem);
1822 gimple_set_location (newop, gimple_location (stmt));
1823 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1824 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1825 tem, NULL_TREE, NULL_TREE);
1826 update_stmt (gsi_stmt (*gsi));
1827 return true;
1830 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1831 if (code == BIT_AND_EXPR
1832 && def1_code == BIT_IOR_EXPR
1833 && TREE_CODE (arg2) == INTEGER_CST
1834 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1836 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1837 arg2, gimple_assign_rhs2 (def1));
1838 tree tem;
1839 gimple newop;
1840 if (integer_zerop (cst))
1842 gimple_assign_set_rhs1 (stmt, def1_arg1);
1843 update_stmt (stmt);
1844 return true;
1846 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1847 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1848 tem, def1_arg1, arg2);
1849 tem = make_ssa_name (tem, newop);
1850 gimple_assign_set_lhs (newop, tem);
1851 gimple_set_location (newop, gimple_location (stmt));
1852 /* Make sure to re-process the new stmt as it's walking upwards. */
1853 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1854 gimple_assign_set_rhs1 (stmt, tem);
1855 gimple_assign_set_rhs2 (stmt, cst);
1856 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1857 update_stmt (stmt);
1858 return true;
1861 /* Combine successive equal operations with constants. */
1862 if ((code == BIT_AND_EXPR
1863 || code == BIT_IOR_EXPR
1864 || code == BIT_XOR_EXPR)
1865 && def1_code == code
1866 && TREE_CODE (arg2) == INTEGER_CST
1867 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1869 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1870 arg2, gimple_assign_rhs2 (def1));
1871 gimple_assign_set_rhs1 (stmt, def1_arg1);
1872 gimple_assign_set_rhs2 (stmt, cst);
1873 update_stmt (stmt);
1874 return true;
1877 /* Canonicalize X ^ ~0 to ~X. */
1878 if (code == BIT_XOR_EXPR
1879 && TREE_CODE (arg2) == INTEGER_CST
1880 && integer_all_onesp (arg2))
1882 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1883 gcc_assert (gsi_stmt (*gsi) == stmt);
1884 update_stmt (stmt);
1885 return true;
1888 /* Try simple folding for X op !X, and X op X. */
1889 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1890 if (res != NULL_TREE)
1892 gimple_assign_set_rhs_from_tree (gsi, res);
1893 update_stmt (gsi_stmt (*gsi));
1894 return true;
1897 return false;
1901 /* Perform re-associations of the plus or minus statement STMT that are
1902 always permitted. Returns true if the CFG was changed. */
1904 static bool
1905 associate_plusminus (gimple_stmt_iterator *gsi)
1907 gimple stmt = gsi_stmt (*gsi);
1908 tree rhs1 = gimple_assign_rhs1 (stmt);
1909 tree rhs2 = gimple_assign_rhs2 (stmt);
1910 enum tree_code code = gimple_assign_rhs_code (stmt);
1911 bool changed;
1913 /* We can't reassociate at all for saturating types. */
1914 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1915 return false;
1917 /* First contract negates. */
1920 changed = false;
1922 /* A +- (-B) -> A -+ B. */
1923 if (TREE_CODE (rhs2) == SSA_NAME)
1925 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1926 if (is_gimple_assign (def_stmt)
1927 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1928 && can_propagate_from (def_stmt))
1930 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1931 gimple_assign_set_rhs_code (stmt, code);
1932 rhs2 = gimple_assign_rhs1 (def_stmt);
1933 gimple_assign_set_rhs2 (stmt, rhs2);
1934 gimple_set_modified (stmt, true);
1935 changed = true;
1939 /* (-A) + B -> B - A. */
1940 if (TREE_CODE (rhs1) == SSA_NAME
1941 && code == PLUS_EXPR)
1943 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1944 if (is_gimple_assign (def_stmt)
1945 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1946 && can_propagate_from (def_stmt))
1948 code = MINUS_EXPR;
1949 gimple_assign_set_rhs_code (stmt, code);
1950 rhs1 = rhs2;
1951 gimple_assign_set_rhs1 (stmt, rhs1);
1952 rhs2 = gimple_assign_rhs1 (def_stmt);
1953 gimple_assign_set_rhs2 (stmt, rhs2);
1954 gimple_set_modified (stmt, true);
1955 changed = true;
1959 while (changed);
1961 /* We can't reassociate floating-point or fixed-point plus or minus
1962 because of saturation to +-Inf. */
1963 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1964 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1965 goto out;
1967 /* Second match patterns that allow contracting a plus-minus pair
1968 irrespective of overflow issues.
1970 (A +- B) - A -> +- B
1971 (A +- B) -+ B -> A
1972 (CST +- A) +- CST -> CST +- A
1973 (A + CST) +- CST -> A + CST
1974 ~A + A -> -1
1975 ~A + 1 -> -A
1976 A - (A +- B) -> -+ B
1977 A +- (B +- A) -> +- B
1978 CST +- (CST +- A) -> CST +- A
1979 CST +- (A +- CST) -> CST +- A
1980 A + ~A -> -1
1982 via commutating the addition and contracting operations to zero
1983 by reassociation. */
1985 if (TREE_CODE (rhs1) == SSA_NAME)
1987 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1988 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1990 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1991 if (def_code == PLUS_EXPR
1992 || def_code == MINUS_EXPR)
1994 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1995 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1996 if (operand_equal_p (def_rhs1, rhs2, 0)
1997 && code == MINUS_EXPR)
1999 /* (A +- B) - A -> +- B. */
2000 code = ((def_code == PLUS_EXPR)
2001 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2002 rhs1 = def_rhs2;
2003 rhs2 = NULL_TREE;
2004 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2005 gcc_assert (gsi_stmt (*gsi) == stmt);
2006 gimple_set_modified (stmt, true);
2008 else if (operand_equal_p (def_rhs2, rhs2, 0)
2009 && code != def_code)
2011 /* (A +- B) -+ B -> A. */
2012 code = TREE_CODE (def_rhs1);
2013 rhs1 = def_rhs1;
2014 rhs2 = NULL_TREE;
2015 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2016 gcc_assert (gsi_stmt (*gsi) == stmt);
2017 gimple_set_modified (stmt, true);
2019 else if (TREE_CODE (rhs2) == INTEGER_CST
2020 && TREE_CODE (def_rhs1) == INTEGER_CST)
2022 /* (CST +- A) +- CST -> CST +- A. */
2023 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2024 def_rhs1, rhs2);
2025 if (cst && !TREE_OVERFLOW (cst))
2027 code = def_code;
2028 gimple_assign_set_rhs_code (stmt, code);
2029 rhs1 = cst;
2030 gimple_assign_set_rhs1 (stmt, rhs1);
2031 rhs2 = def_rhs2;
2032 gimple_assign_set_rhs2 (stmt, rhs2);
2033 gimple_set_modified (stmt, true);
2036 else if (TREE_CODE (rhs2) == INTEGER_CST
2037 && TREE_CODE (def_rhs2) == INTEGER_CST
2038 && def_code == PLUS_EXPR)
2040 /* (A + CST) +- CST -> A + CST. */
2041 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2042 def_rhs2, rhs2);
2043 if (cst && !TREE_OVERFLOW (cst))
2045 code = PLUS_EXPR;
2046 gimple_assign_set_rhs_code (stmt, code);
2047 rhs1 = def_rhs1;
2048 gimple_assign_set_rhs1 (stmt, rhs1);
2049 rhs2 = cst;
2050 gimple_assign_set_rhs2 (stmt, rhs2);
2051 gimple_set_modified (stmt, true);
2055 else if (def_code == BIT_NOT_EXPR
2056 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2058 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2059 if (code == PLUS_EXPR
2060 && operand_equal_p (def_rhs1, rhs2, 0))
2062 /* ~A + A -> -1. */
2063 code = INTEGER_CST;
2064 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2065 rhs2 = NULL_TREE;
2066 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2067 gcc_assert (gsi_stmt (*gsi) == stmt);
2068 gimple_set_modified (stmt, true);
2070 else if (code == PLUS_EXPR
2071 && integer_onep (rhs1))
2073 /* ~A + 1 -> -A. */
2074 code = NEGATE_EXPR;
2075 rhs1 = def_rhs1;
2076 rhs2 = NULL_TREE;
2077 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2078 gcc_assert (gsi_stmt (*gsi) == stmt);
2079 gimple_set_modified (stmt, true);
2085 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2087 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2088 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2090 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2091 if (def_code == PLUS_EXPR
2092 || def_code == MINUS_EXPR)
2094 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2095 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2096 if (operand_equal_p (def_rhs1, rhs1, 0)
2097 && code == MINUS_EXPR)
2099 /* A - (A +- B) -> -+ B. */
2100 code = ((def_code == PLUS_EXPR)
2101 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2102 rhs1 = def_rhs2;
2103 rhs2 = NULL_TREE;
2104 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2105 gcc_assert (gsi_stmt (*gsi) == stmt);
2106 gimple_set_modified (stmt, true);
2108 else if (operand_equal_p (def_rhs2, rhs1, 0)
2109 && code != def_code)
2111 /* A +- (B +- A) -> +- B. */
2112 code = ((code == PLUS_EXPR)
2113 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2114 rhs1 = def_rhs1;
2115 rhs2 = NULL_TREE;
2116 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2117 gcc_assert (gsi_stmt (*gsi) == stmt);
2118 gimple_set_modified (stmt, true);
2120 else if (TREE_CODE (rhs1) == INTEGER_CST
2121 && TREE_CODE (def_rhs1) == INTEGER_CST)
2123 /* CST +- (CST +- A) -> CST +- A. */
2124 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2125 rhs1, def_rhs1);
2126 if (cst && !TREE_OVERFLOW (cst))
2128 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2129 gimple_assign_set_rhs_code (stmt, code);
2130 rhs1 = cst;
2131 gimple_assign_set_rhs1 (stmt, rhs1);
2132 rhs2 = def_rhs2;
2133 gimple_assign_set_rhs2 (stmt, rhs2);
2134 gimple_set_modified (stmt, true);
2137 else if (TREE_CODE (rhs1) == INTEGER_CST
2138 && TREE_CODE (def_rhs2) == INTEGER_CST)
2140 /* CST +- (A +- CST) -> CST +- A. */
2141 tree cst = fold_binary (def_code == code
2142 ? PLUS_EXPR : MINUS_EXPR,
2143 TREE_TYPE (rhs2),
2144 rhs1, def_rhs2);
2145 if (cst && !TREE_OVERFLOW (cst))
2147 rhs1 = cst;
2148 gimple_assign_set_rhs1 (stmt, rhs1);
2149 rhs2 = def_rhs1;
2150 gimple_assign_set_rhs2 (stmt, rhs2);
2151 gimple_set_modified (stmt, true);
2155 else if (def_code == BIT_NOT_EXPR
2156 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2158 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2159 if (code == PLUS_EXPR
2160 && operand_equal_p (def_rhs1, rhs1, 0))
2162 /* A + ~A -> -1. */
2163 code = INTEGER_CST;
2164 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2165 rhs2 = NULL_TREE;
2166 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2167 gcc_assert (gsi_stmt (*gsi) == stmt);
2168 gimple_set_modified (stmt, true);
2174 out:
2175 if (gimple_modified_p (stmt))
2177 fold_stmt_inplace (gsi);
2178 update_stmt (stmt);
2179 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2180 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2181 return true;
2184 return false;
2187 /* Combine two conversions in a row for the second conversion at *GSI.
2188 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2189 run. Else it returns 0. */
2191 static int
2192 combine_conversions (gimple_stmt_iterator *gsi)
2194 gimple stmt = gsi_stmt (*gsi);
2195 gimple def_stmt;
2196 tree op0, lhs;
2197 enum tree_code code = gimple_assign_rhs_code (stmt);
2199 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2200 || code == FLOAT_EXPR
2201 || code == FIX_TRUNC_EXPR);
2203 lhs = gimple_assign_lhs (stmt);
2204 op0 = gimple_assign_rhs1 (stmt);
2205 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2207 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2208 return 1;
2211 if (TREE_CODE (op0) != SSA_NAME)
2212 return 0;
2214 def_stmt = SSA_NAME_DEF_STMT (op0);
2215 if (!is_gimple_assign (def_stmt))
2216 return 0;
2218 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2220 tree defop0 = gimple_assign_rhs1 (def_stmt);
2221 tree type = TREE_TYPE (lhs);
2222 tree inside_type = TREE_TYPE (defop0);
2223 tree inter_type = TREE_TYPE (op0);
2224 int inside_int = INTEGRAL_TYPE_P (inside_type);
2225 int inside_ptr = POINTER_TYPE_P (inside_type);
2226 int inside_float = FLOAT_TYPE_P (inside_type);
2227 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2228 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2229 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2230 int inter_int = INTEGRAL_TYPE_P (inter_type);
2231 int inter_ptr = POINTER_TYPE_P (inter_type);
2232 int inter_float = FLOAT_TYPE_P (inter_type);
2233 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2234 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2235 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2236 int final_int = INTEGRAL_TYPE_P (type);
2237 int final_ptr = POINTER_TYPE_P (type);
2238 int final_float = FLOAT_TYPE_P (type);
2239 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2240 unsigned int final_prec = TYPE_PRECISION (type);
2241 int final_unsignedp = TYPE_UNSIGNED (type);
2243 /* In addition to the cases of two conversions in a row
2244 handled below, if we are converting something to its own
2245 type via an object of identical or wider precision, neither
2246 conversion is needed. */
2247 if (useless_type_conversion_p (type, inside_type)
2248 && (((inter_int || inter_ptr) && final_int)
2249 || (inter_float && final_float))
2250 && inter_prec >= final_prec)
2252 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2253 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2254 update_stmt (stmt);
2255 return remove_prop_source_from_use (op0) ? 2 : 1;
2258 /* Likewise, if the intermediate and initial types are either both
2259 float or both integer, we don't need the middle conversion if the
2260 former is wider than the latter and doesn't change the signedness
2261 (for integers). Avoid this if the final type is a pointer since
2262 then we sometimes need the middle conversion. Likewise if the
2263 final type has a precision not equal to the size of its mode. */
2264 if (((inter_int && inside_int)
2265 || (inter_float && inside_float)
2266 || (inter_vec && inside_vec))
2267 && inter_prec >= inside_prec
2268 && (inter_float || inter_vec
2269 || inter_unsignedp == inside_unsignedp)
2270 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2271 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2272 && ! final_ptr
2273 && (! final_vec || inter_prec == inside_prec))
2275 gimple_assign_set_rhs1 (stmt, defop0);
2276 update_stmt (stmt);
2277 return remove_prop_source_from_use (op0) ? 2 : 1;
2280 /* If we have a sign-extension of a zero-extended value, we can
2281 replace that by a single zero-extension. */
2282 if (inside_int && inter_int && final_int
2283 && inside_prec < inter_prec && inter_prec < final_prec
2284 && inside_unsignedp && !inter_unsignedp)
2286 gimple_assign_set_rhs1 (stmt, defop0);
2287 update_stmt (stmt);
2288 return remove_prop_source_from_use (op0) ? 2 : 1;
2291 /* Two conversions in a row are not needed unless:
2292 - some conversion is floating-point (overstrict for now), or
2293 - some conversion is a vector (overstrict for now), or
2294 - the intermediate type is narrower than both initial and
2295 final, or
2296 - the intermediate type and innermost type differ in signedness,
2297 and the outermost type is wider than the intermediate, or
2298 - the initial type is a pointer type and the precisions of the
2299 intermediate and final types differ, or
2300 - the final type is a pointer type and the precisions of the
2301 initial and intermediate types differ. */
2302 if (! inside_float && ! inter_float && ! final_float
2303 && ! inside_vec && ! inter_vec && ! final_vec
2304 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2305 && ! (inside_int && inter_int
2306 && inter_unsignedp != inside_unsignedp
2307 && inter_prec < final_prec)
2308 && ((inter_unsignedp && inter_prec > inside_prec)
2309 == (final_unsignedp && final_prec > inter_prec))
2310 && ! (inside_ptr && inter_prec != final_prec)
2311 && ! (final_ptr && inside_prec != inter_prec)
2312 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2313 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2315 gimple_assign_set_rhs1 (stmt, defop0);
2316 update_stmt (stmt);
2317 return remove_prop_source_from_use (op0) ? 2 : 1;
2320 /* A truncation to an unsigned type should be canonicalized as
2321 bitwise and of a mask. */
2322 if (final_int && inter_int && inside_int
2323 && final_prec == inside_prec
2324 && final_prec > inter_prec
2325 && inter_unsignedp)
2327 tree tem;
2328 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2329 defop0,
2330 double_int_to_tree
2331 (inside_type, double_int_mask (inter_prec)));
2332 if (!useless_type_conversion_p (type, inside_type))
2334 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2335 GSI_SAME_STMT);
2336 gimple_assign_set_rhs1 (stmt, tem);
2338 else
2339 gimple_assign_set_rhs_from_tree (gsi, tem);
2340 update_stmt (gsi_stmt (*gsi));
2341 return 1;
2345 return 0;
2348 /* Main entry point for the forward propagation and statement combine
2349 optimizer. */
2351 static unsigned int
2352 ssa_forward_propagate_and_combine (void)
2354 basic_block bb;
2355 unsigned int todoflags = 0;
2357 cfg_changed = false;
2359 FOR_EACH_BB (bb)
2361 gimple_stmt_iterator gsi, prev;
2362 bool prev_initialized;
2364 /* Apply forward propagation to all stmts in the basic-block.
2365 Note we update GSI within the loop as necessary. */
2366 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2368 gimple stmt = gsi_stmt (gsi);
2369 tree lhs, rhs;
2370 enum tree_code code;
2372 if (!is_gimple_assign (stmt))
2374 gsi_next (&gsi);
2375 continue;
2378 lhs = gimple_assign_lhs (stmt);
2379 rhs = gimple_assign_rhs1 (stmt);
2380 code = gimple_assign_rhs_code (stmt);
2381 if (TREE_CODE (lhs) != SSA_NAME
2382 || has_zero_uses (lhs))
2384 gsi_next (&gsi);
2385 continue;
2388 /* If this statement sets an SSA_NAME to an address,
2389 try to propagate the address into the uses of the SSA_NAME. */
2390 if (code == ADDR_EXPR
2391 /* Handle pointer conversions on invariant addresses
2392 as well, as this is valid gimple. */
2393 || (CONVERT_EXPR_CODE_P (code)
2394 && TREE_CODE (rhs) == ADDR_EXPR
2395 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2397 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2398 if ((!base
2399 || !DECL_P (base)
2400 || decl_address_invariant_p (base))
2401 && !stmt_references_abnormal_ssa_name (stmt)
2402 && forward_propagate_addr_expr (lhs, rhs))
2404 release_defs (stmt);
2405 todoflags |= TODO_remove_unused_locals;
2406 gsi_remove (&gsi, true);
2408 else
2409 gsi_next (&gsi);
2411 else if (code == POINTER_PLUS_EXPR)
2413 tree off = gimple_assign_rhs2 (stmt);
2414 if (TREE_CODE (off) == INTEGER_CST
2415 && can_propagate_from (stmt)
2416 && !simple_iv_increment_p (stmt)
2417 /* ??? Better adjust the interface to that function
2418 instead of building new trees here. */
2419 && forward_propagate_addr_expr
2420 (lhs,
2421 build1_loc (gimple_location (stmt),
2422 ADDR_EXPR, TREE_TYPE (rhs),
2423 fold_build2 (MEM_REF,
2424 TREE_TYPE (TREE_TYPE (rhs)),
2425 rhs,
2426 fold_convert (ptr_type_node,
2427 off)))))
2429 release_defs (stmt);
2430 todoflags |= TODO_remove_unused_locals;
2431 gsi_remove (&gsi, true);
2433 else if (is_gimple_min_invariant (rhs))
2435 /* Make sure to fold &a[0] + off_1 here. */
2436 fold_stmt_inplace (&gsi);
2437 update_stmt (stmt);
2438 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2439 gsi_next (&gsi);
2441 else
2442 gsi_next (&gsi);
2444 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2446 if (forward_propagate_comparison (stmt))
2447 cfg_changed = true;
2448 gsi_next (&gsi);
2450 else
2451 gsi_next (&gsi);
2454 /* Combine stmts with the stmts defining their operands.
2455 Note we update GSI within the loop as necessary. */
2456 prev_initialized = false;
2457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2459 gimple stmt = gsi_stmt (gsi);
2460 bool changed = false;
2462 switch (gimple_code (stmt))
2464 case GIMPLE_ASSIGN:
2466 tree rhs1 = gimple_assign_rhs1 (stmt);
2467 enum tree_code code = gimple_assign_rhs_code (stmt);
2469 if ((code == BIT_NOT_EXPR
2470 || code == NEGATE_EXPR)
2471 && TREE_CODE (rhs1) == SSA_NAME)
2472 changed = simplify_not_neg_expr (&gsi);
2473 else if (code == COND_EXPR)
2475 /* In this case the entire COND_EXPR is in rhs1. */
2476 changed |= forward_propagate_into_cond (&gsi);
2477 stmt = gsi_stmt (gsi);
2479 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2481 int did_something;
2482 did_something = forward_propagate_into_comparison (&gsi);
2483 if (did_something == 2)
2484 cfg_changed = true;
2485 changed = did_something != 0;
2487 else if (code == BIT_AND_EXPR
2488 || code == BIT_IOR_EXPR
2489 || code == BIT_XOR_EXPR)
2490 changed = simplify_bitwise_binary (&gsi);
2491 else if (code == PLUS_EXPR
2492 || code == MINUS_EXPR)
2493 changed = associate_plusminus (&gsi);
2494 else if (CONVERT_EXPR_CODE_P (code)
2495 || code == FLOAT_EXPR
2496 || code == FIX_TRUNC_EXPR)
2498 int did_something = combine_conversions (&gsi);
2499 if (did_something == 2)
2500 cfg_changed = true;
2501 changed = did_something != 0;
2503 break;
2506 case GIMPLE_SWITCH:
2507 changed = simplify_gimple_switch (stmt);
2508 break;
2510 case GIMPLE_COND:
2512 int did_something;
2513 did_something = forward_propagate_into_gimple_cond (stmt);
2514 if (did_something == 2)
2515 cfg_changed = true;
2516 changed = did_something != 0;
2517 break;
2520 case GIMPLE_CALL:
2522 tree callee = gimple_call_fndecl (stmt);
2523 if (callee != NULL_TREE
2524 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2525 changed = simplify_builtin_call (&gsi, callee);
2526 break;
2529 default:;
2532 if (changed)
2534 /* If the stmt changed then re-visit it and the statements
2535 inserted before it. */
2536 if (!prev_initialized)
2537 gsi = gsi_start_bb (bb);
2538 else
2540 gsi = prev;
2541 gsi_next (&gsi);
2544 else
2546 prev = gsi;
2547 prev_initialized = true;
2548 gsi_next (&gsi);
2553 if (cfg_changed)
2554 todoflags |= TODO_cleanup_cfg;
2556 return todoflags;
2560 static bool
2561 gate_forwprop (void)
2563 return flag_tree_forwprop;
2566 struct gimple_opt_pass pass_forwprop =
2569 GIMPLE_PASS,
2570 "forwprop", /* name */
2571 gate_forwprop, /* gate */
2572 ssa_forward_propagate_and_combine, /* execute */
2573 NULL, /* sub */
2574 NULL, /* next */
2575 0, /* static_pass_number */
2576 TV_TREE_FORWPROP, /* tv_id */
2577 PROP_cfg | PROP_ssa, /* properties_required */
2578 0, /* properties_provided */
2579 0, /* properties_destroyed */
2580 0, /* todo_flags_start */
2581 TODO_ggc_collect
2582 | TODO_update_ssa
2583 | TODO_verify_ssa /* todo_flags_finish */