2011-01-30 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobee12e40c9722c2c36b5571e2526aafb2485da2ff
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 use_operand_p use_p;
264 ssa_op_iter iter;
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
270 return false;
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 return false;
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 return true;
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 return false;
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
296 return false;
299 return true;
302 /* Remove a copy chain ending in NAME along the defs but not
303 further or including UP_TO_STMT. If NAME was replaced in
304 its only use then this function can be used to clean up
305 dead stmts. Returns true if UP_TO_STMT can be removed
306 as well, otherwise false. */
308 static bool
309 remove_prop_source_from_use (tree name, gimple up_to_stmt)
311 gimple_stmt_iterator gsi;
312 gimple stmt;
314 do {
315 if (!has_zero_uses (name))
316 return false;
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (stmt == up_to_stmt)
320 return true;
322 gsi = gsi_for_stmt (stmt);
323 release_defs (stmt);
324 gsi_remove (&gsi, true);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
329 return false;
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
339 static tree
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
345 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt),
347 gimple_assign_rhs3 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
349 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt));
351 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
352 return build1 (code, type, gimple_assign_rhs1 (stmt));
353 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
354 return gimple_assign_rhs1 (stmt);
355 else
356 gcc_unreachable ();
359 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
360 the folded result in a form suitable for COND_EXPR_COND or
361 NULL_TREE, if there is no suitable simplified form. If
362 INVARIANT_ONLY is true only gimple_min_invariant results are
363 considered simplified. */
365 static tree
366 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
367 tree op0, tree op1, bool invariant_only)
369 tree t;
371 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
373 t = fold_binary_loc (loc, code, type, op0, op1);
374 if (!t)
375 return NULL_TREE;
377 /* Require that we got a boolean type out if we put one in. */
378 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
380 /* Canonicalize the combined condition for use in a COND_EXPR. */
381 t = canonicalize_cond_expr_cond (t);
383 /* Bail out if we required an invariant but didn't get one. */
384 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
385 return NULL_TREE;
387 return t;
390 /* Propagate from the ssa name definition statements of COND_EXPR
391 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
392 Returns zero if no statement was changed, one if there were
393 changes and two if cfg_cleanup needs to run.
395 This must be kept in sync with forward_propagate_into_cond. */
397 static int
398 forward_propagate_into_gimple_cond (gimple stmt)
400 int did_something = 0;
401 location_t loc = gimple_location (stmt);
403 do {
404 tree tmp = NULL_TREE;
405 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
406 gimple def_stmt;
407 bool single_use0_p = false, single_use1_p = false;
408 enum tree_code code = gimple_cond_code (stmt);
410 /* We can do tree combining on SSA_NAME and comparison expressions. */
411 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
413 /* For comparisons use the first operand, that is likely to
414 simplify comparisons against constants. */
415 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
417 name = gimple_cond_lhs (stmt);
418 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
419 if (def_stmt && can_propagate_from (def_stmt))
421 tree op1 = gimple_cond_rhs (stmt);
422 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
423 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
424 rhs0, op1, !single_use0_p);
427 /* If that wasn't successful, try the second operand. */
428 if (tmp == NULL_TREE
429 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
431 tree op0 = gimple_cond_lhs (stmt);
432 name = gimple_cond_rhs (stmt);
433 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
434 if (!def_stmt || !can_propagate_from (def_stmt))
435 return did_something;
437 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
438 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
439 rhs1, !single_use1_p);
441 /* If that wasn't successful either, try both operands. */
442 if (tmp == NULL_TREE
443 && rhs0 != NULL_TREE
444 && rhs1 != NULL_TREE)
445 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
446 fold_convert_loc (loc,
447 TREE_TYPE (rhs0),
448 rhs1),
449 !(single_use0_p && single_use1_p));
452 if (tmp)
454 if (dump_file && tmp)
456 tree cond = build2 (gimple_cond_code (stmt),
457 boolean_type_node,
458 gimple_cond_lhs (stmt),
459 gimple_cond_rhs (stmt));
460 fprintf (dump_file, " Replaced '");
461 print_generic_expr (dump_file, cond, 0);
462 fprintf (dump_file, "' with '");
463 print_generic_expr (dump_file, tmp, 0);
464 fprintf (dump_file, "'\n");
467 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
468 update_stmt (stmt);
470 /* Remove defining statements. */
471 remove_prop_source_from_use (name, NULL);
473 if (is_gimple_min_invariant (tmp))
474 did_something = 2;
475 else if (did_something == 0)
476 did_something = 1;
478 /* Continue combining. */
479 continue;
482 break;
483 } while (1);
485 return did_something;
489 /* Propagate from the ssa name definition statements of COND_EXPR
490 in the rhs of statement STMT into the conditional if that simplifies it.
491 Returns zero if no statement was changed, one if there were
492 changes and two if cfg_cleanup needs to run.
494 This must be kept in sync with forward_propagate_into_gimple_cond. */
496 static int
497 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
499 gimple stmt = gsi_stmt (*gsi_p);
500 location_t loc = gimple_location (stmt);
501 int did_something = 0;
503 do {
504 tree tmp = NULL_TREE;
505 tree cond = gimple_assign_rhs1 (stmt);
506 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
507 gimple def_stmt;
508 bool single_use0_p = false, single_use1_p = false;
510 /* We can do tree combining on SSA_NAME and comparison expressions. */
511 if (COMPARISON_CLASS_P (cond)
512 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
514 /* For comparisons use the first operand, that is likely to
515 simplify comparisons against constants. */
516 name = TREE_OPERAND (cond, 0);
517 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
518 if (def_stmt && can_propagate_from (def_stmt))
520 tree op1 = TREE_OPERAND (cond, 1);
521 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
522 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
523 boolean_type_node,
524 rhs0, op1, !single_use0_p);
526 /* If that wasn't successful, try the second operand. */
527 if (tmp == NULL_TREE
528 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
530 tree op0 = TREE_OPERAND (cond, 0);
531 name = TREE_OPERAND (cond, 1);
532 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
533 if (!def_stmt || !can_propagate_from (def_stmt))
534 return did_something;
536 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
537 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
538 boolean_type_node,
539 op0, rhs1, !single_use1_p);
541 /* If that wasn't successful either, try both operands. */
542 if (tmp == NULL_TREE
543 && rhs0 != NULL_TREE
544 && rhs1 != NULL_TREE)
545 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
546 boolean_type_node,
547 rhs0,
548 fold_convert_loc (loc,
549 TREE_TYPE (rhs0),
550 rhs1),
551 !(single_use0_p && single_use1_p));
553 else if (TREE_CODE (cond) == SSA_NAME)
555 name = cond;
556 def_stmt = get_prop_source_stmt (name, true, NULL);
557 if (def_stmt || !can_propagate_from (def_stmt))
558 return did_something;
560 rhs0 = gimple_assign_rhs1 (def_stmt);
561 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
562 build_int_cst (TREE_TYPE (rhs0), 0),
563 false);
566 if (tmp)
568 if (dump_file && tmp)
570 fprintf (dump_file, " Replaced '");
571 print_generic_expr (dump_file, cond, 0);
572 fprintf (dump_file, "' with '");
573 print_generic_expr (dump_file, tmp, 0);
574 fprintf (dump_file, "'\n");
577 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
578 stmt = gsi_stmt (*gsi_p);
579 update_stmt (stmt);
581 /* Remove defining statements. */
582 remove_prop_source_from_use (name, NULL);
584 if (is_gimple_min_invariant (tmp))
585 did_something = 2;
586 else if (did_something == 0)
587 did_something = 1;
589 /* Continue combining. */
590 continue;
593 break;
594 } while (1);
596 return did_something;
599 /* We've just substituted an ADDR_EXPR into stmt. Update all the
600 relevant data structures to match. */
602 static void
603 tidy_after_forward_propagate_addr (gimple stmt)
605 /* We may have turned a trapping insn into a non-trapping insn. */
606 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
607 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
608 cfg_changed = true;
610 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
611 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
614 /* DEF_RHS contains the address of the 0th element in an array.
615 USE_STMT uses type of DEF_RHS to compute the address of an
616 arbitrary element within the array. The (variable) byte offset
617 of the element is contained in OFFSET.
619 We walk back through the use-def chains of OFFSET to verify that
620 it is indeed computing the offset of an element within the array
621 and extract the index corresponding to the given byte offset.
623 We then try to fold the entire address expression into a form
624 &array[index].
626 If we are successful, we replace the right hand side of USE_STMT
627 with the new address computation. */
629 static bool
630 forward_propagate_addr_into_variable_array_index (tree offset,
631 tree def_rhs,
632 gimple_stmt_iterator *use_stmt_gsi)
634 tree index, tunit;
635 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
636 tree new_rhs, tmp;
638 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
639 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
640 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
641 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
642 else
643 return false;
644 if (!host_integerp (tunit, 1))
645 return false;
647 /* Get the offset's defining statement. */
648 offset_def = SSA_NAME_DEF_STMT (offset);
650 /* Try to find an expression for a proper index. This is either a
651 multiplication expression by the element size or just the ssa name we came
652 along in case the element size is one. In that case, however, we do not
653 allow multiplications because they can be computing index to a higher
654 level dimension (PR 37861). */
655 if (integer_onep (tunit))
657 if (is_gimple_assign (offset_def)
658 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
659 return false;
661 index = offset;
663 else
665 /* The statement which defines OFFSET before type conversion
666 must be a simple GIMPLE_ASSIGN. */
667 if (!is_gimple_assign (offset_def))
668 return false;
670 /* The RHS of the statement which defines OFFSET must be a
671 multiplication of an object by the size of the array elements.
672 This implicitly verifies that the size of the array elements
673 is constant. */
674 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
675 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
676 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
678 /* The first operand to the MULT_EXPR is the desired index. */
679 index = gimple_assign_rhs1 (offset_def);
681 /* If we have idx * tunit + CST * tunit re-associate that. */
682 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
683 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
684 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
685 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
686 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
687 gimple_assign_rhs2 (offset_def),
688 tunit)) != NULL_TREE)
690 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
691 if (is_gimple_assign (offset_def2)
692 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
693 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
694 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
696 index = fold_build2 (gimple_assign_rhs_code (offset_def),
697 TREE_TYPE (offset),
698 gimple_assign_rhs1 (offset_def2), tmp);
700 else
701 return false;
703 else
704 return false;
707 /* Replace the pointer addition with array indexing. */
708 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
709 true, GSI_SAME_STMT);
710 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
712 new_rhs = unshare_expr (def_rhs);
713 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
715 else
717 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
718 unshare_expr (TREE_OPERAND (def_rhs, 0)),
719 index, integer_zero_node, NULL_TREE);
720 new_rhs = build_fold_addr_expr (new_rhs);
721 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
722 TREE_TYPE (new_rhs)))
724 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
725 NULL_TREE, true, GSI_SAME_STMT);
726 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
727 new_rhs);
730 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
731 use_stmt = gsi_stmt (*use_stmt_gsi);
733 /* That should have created gimple, so there is no need to
734 record information to undo the propagation. */
735 fold_stmt_inplace (use_stmt);
736 tidy_after_forward_propagate_addr (use_stmt);
737 return true;
740 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
741 ADDR_EXPR <whatever>.
743 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
744 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
745 node or for recovery of array indexing from pointer arithmetic.
747 Return true if the propagation was successful (the propagation can
748 be not totally successful, yet things may have been changed). */
750 static bool
751 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
752 gimple_stmt_iterator *use_stmt_gsi,
753 bool single_use_p)
755 tree lhs, rhs, rhs2, array_ref;
756 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
757 enum tree_code rhs_code;
758 bool res = true;
760 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
762 lhs = gimple_assign_lhs (use_stmt);
763 rhs_code = gimple_assign_rhs_code (use_stmt);
764 rhs = gimple_assign_rhs1 (use_stmt);
766 /* Trivial cases. The use statement could be a trivial copy or a
767 useless conversion. Recurse to the uses of the lhs as copyprop does
768 not copy through different variant pointers and FRE does not catch
769 all useless conversions. Treat the case of a single-use name and
770 a conversion to def_rhs type separate, though. */
771 if (TREE_CODE (lhs) == SSA_NAME
772 && ((rhs_code == SSA_NAME && rhs == name)
773 || CONVERT_EXPR_CODE_P (rhs_code)))
775 /* Only recurse if we don't deal with a single use or we cannot
776 do the propagation to the current statement. In particular
777 we can end up with a conversion needed for a non-invariant
778 address which we cannot do in a single statement. */
779 if (!single_use_p
780 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
781 && (!is_gimple_min_invariant (def_rhs)
782 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
783 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
784 && (TYPE_PRECISION (TREE_TYPE (lhs))
785 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
786 return forward_propagate_addr_expr (lhs, def_rhs);
788 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
789 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
790 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
791 else
792 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
793 return true;
796 /* Propagate through constant pointer adjustments. */
797 if (TREE_CODE (lhs) == SSA_NAME
798 && rhs_code == POINTER_PLUS_EXPR
799 && rhs == name
800 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
802 tree new_def_rhs;
803 /* As we come here with non-invariant addresses in def_rhs we need
804 to make sure we can build a valid constant offsetted address
805 for further propagation. Simply rely on fold building that
806 and check after the fact. */
807 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
808 def_rhs,
809 fold_convert (ptr_type_node,
810 gimple_assign_rhs2 (use_stmt)));
811 if (TREE_CODE (new_def_rhs) == MEM_REF
812 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
813 return false;
814 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
815 TREE_TYPE (rhs));
817 /* Recurse. If we could propagate into all uses of lhs do not
818 bother to replace into the current use but just pretend we did. */
819 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
820 && forward_propagate_addr_expr (lhs, new_def_rhs))
821 return true;
823 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
824 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
825 new_def_rhs, NULL_TREE);
826 else if (is_gimple_min_invariant (new_def_rhs))
827 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
828 new_def_rhs, NULL_TREE);
829 else
830 return false;
831 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
832 update_stmt (use_stmt);
833 return true;
836 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
837 ADDR_EXPR will not appear on the LHS. */
838 lhs = gimple_assign_lhs (use_stmt);
839 while (handled_component_p (lhs))
840 lhs = TREE_OPERAND (lhs, 0);
842 /* Now see if the LHS node is a MEM_REF using NAME. If so,
843 propagate the ADDR_EXPR into the use of NAME and fold the result. */
844 if (TREE_CODE (lhs) == MEM_REF
845 && TREE_OPERAND (lhs, 0) == name)
847 tree def_rhs_base;
848 HOST_WIDE_INT def_rhs_offset;
849 /* If the address is invariant we can always fold it. */
850 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
851 &def_rhs_offset)))
853 double_int off = mem_ref_offset (lhs);
854 tree new_ptr;
855 off = double_int_add (off,
856 shwi_to_double_int (def_rhs_offset));
857 if (TREE_CODE (def_rhs_base) == MEM_REF)
859 off = double_int_add (off, mem_ref_offset (def_rhs_base));
860 new_ptr = TREE_OPERAND (def_rhs_base, 0);
862 else
863 new_ptr = build_fold_addr_expr (def_rhs_base);
864 TREE_OPERAND (lhs, 0) = new_ptr;
865 TREE_OPERAND (lhs, 1)
866 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
867 tidy_after_forward_propagate_addr (use_stmt);
868 /* Continue propagating into the RHS if this was not the only use. */
869 if (single_use_p)
870 return true;
872 /* If the LHS is a plain dereference and the value type is the same as
873 that of the pointed-to type of the address we can put the
874 dereferenced address on the LHS preserving the original alias-type. */
875 else if (gimple_assign_lhs (use_stmt) == lhs
876 && useless_type_conversion_p
877 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
878 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
880 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
881 tree new_offset, new_base, saved;
882 while (handled_component_p (*def_rhs_basep))
883 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
884 saved = *def_rhs_basep;
885 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
887 new_base = TREE_OPERAND (*def_rhs_basep, 0);
888 new_offset
889 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
890 TREE_OPERAND (*def_rhs_basep, 1), 0);
892 else
894 new_base = build_fold_addr_expr (*def_rhs_basep);
895 new_offset = TREE_OPERAND (lhs, 1);
897 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
898 new_base, new_offset);
899 gimple_assign_set_lhs (use_stmt,
900 unshare_expr (TREE_OPERAND (def_rhs, 0)));
901 *def_rhs_basep = saved;
902 tidy_after_forward_propagate_addr (use_stmt);
903 /* Continue propagating into the RHS if this was not the
904 only use. */
905 if (single_use_p)
906 return true;
908 else
909 /* We can have a struct assignment dereferencing our name twice.
910 Note that we didn't propagate into the lhs to not falsely
911 claim we did when propagating into the rhs. */
912 res = false;
915 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
916 nodes from the RHS. */
917 rhs = gimple_assign_rhs1 (use_stmt);
918 if (TREE_CODE (rhs) == ADDR_EXPR)
919 rhs = TREE_OPERAND (rhs, 0);
920 while (handled_component_p (rhs))
921 rhs = TREE_OPERAND (rhs, 0);
923 /* Now see if the RHS node is a MEM_REF using NAME. If so,
924 propagate the ADDR_EXPR into the use of NAME and fold the result. */
925 if (TREE_CODE (rhs) == MEM_REF
926 && TREE_OPERAND (rhs, 0) == name)
928 tree def_rhs_base;
929 HOST_WIDE_INT def_rhs_offset;
930 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
931 &def_rhs_offset)))
933 double_int off = mem_ref_offset (rhs);
934 tree new_ptr;
935 off = double_int_add (off,
936 shwi_to_double_int (def_rhs_offset));
937 if (TREE_CODE (def_rhs_base) == MEM_REF)
939 off = double_int_add (off, mem_ref_offset (def_rhs_base));
940 new_ptr = TREE_OPERAND (def_rhs_base, 0);
942 else
943 new_ptr = build_fold_addr_expr (def_rhs_base);
944 TREE_OPERAND (rhs, 0) = new_ptr;
945 TREE_OPERAND (rhs, 1)
946 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
947 fold_stmt_inplace (use_stmt);
948 tidy_after_forward_propagate_addr (use_stmt);
949 return res;
951 /* If the LHS is a plain dereference and the value type is the same as
952 that of the pointed-to type of the address we can put the
953 dereferenced address on the LHS preserving the original alias-type. */
954 else if (gimple_assign_rhs1 (use_stmt) == rhs
955 && useless_type_conversion_p
956 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
957 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
959 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
960 tree new_offset, new_base, saved;
961 while (handled_component_p (*def_rhs_basep))
962 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
963 saved = *def_rhs_basep;
964 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
966 new_base = TREE_OPERAND (*def_rhs_basep, 0);
967 new_offset
968 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
969 TREE_OPERAND (*def_rhs_basep, 1), 0);
971 else
973 new_base = build_fold_addr_expr (*def_rhs_basep);
974 new_offset = TREE_OPERAND (rhs, 1);
976 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
977 new_base, new_offset);
978 gimple_assign_set_rhs1 (use_stmt,
979 unshare_expr (TREE_OPERAND (def_rhs, 0)));
980 *def_rhs_basep = saved;
981 fold_stmt_inplace (use_stmt);
982 tidy_after_forward_propagate_addr (use_stmt);
983 return res;
987 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
988 is nothing to do. */
989 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
990 || gimple_assign_rhs1 (use_stmt) != name)
991 return false;
993 /* The remaining cases are all for turning pointer arithmetic into
994 array indexing. They only apply when we have the address of
995 element zero in an array. If that is not the case then there
996 is nothing to do. */
997 array_ref = TREE_OPERAND (def_rhs, 0);
998 if ((TREE_CODE (array_ref) != ARRAY_REF
999 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1000 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1001 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1002 return false;
1004 rhs2 = gimple_assign_rhs2 (use_stmt);
1005 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1006 of the elements in X into &x[C1 + C2/element size]. */
1007 if (TREE_CODE (rhs2) == INTEGER_CST)
1009 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1010 TREE_TYPE (def_rhs),
1011 def_rhs, rhs2);
1012 if (new_rhs)
1014 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1015 new_rhs = unshare_expr (new_rhs);
1016 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1018 if (!is_gimple_min_invariant (new_rhs))
1019 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1020 true, NULL_TREE,
1021 true, GSI_SAME_STMT);
1022 new_rhs = fold_convert (type, new_rhs);
1024 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1025 use_stmt = gsi_stmt (*use_stmt_gsi);
1026 update_stmt (use_stmt);
1027 tidy_after_forward_propagate_addr (use_stmt);
1028 return true;
1032 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1033 converting a multiplication of an index by the size of the
1034 array elements, then the result is converted into the proper
1035 type for the arithmetic. */
1036 if (TREE_CODE (rhs2) == SSA_NAME
1037 && (TREE_CODE (array_ref) != ARRAY_REF
1038 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1039 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1040 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1041 different type than their operands. */
1042 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1043 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1044 use_stmt_gsi);
1045 return false;
1048 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1050 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1051 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1052 node or for recovery of array indexing from pointer arithmetic.
1053 Returns true, if all uses have been propagated into. */
1055 static bool
1056 forward_propagate_addr_expr (tree name, tree rhs)
1058 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1059 imm_use_iterator iter;
1060 gimple use_stmt;
1061 bool all = true;
1062 bool single_use_p = has_single_use (name);
1064 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1066 bool result;
1067 tree use_rhs;
1069 /* If the use is not in a simple assignment statement, then
1070 there is nothing we can do. */
1071 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1073 if (!is_gimple_debug (use_stmt))
1074 all = false;
1075 continue;
1078 /* If the use is in a deeper loop nest, then we do not want
1079 to propagate non-invariant ADDR_EXPRs into the loop as that
1080 is likely adding expression evaluations into the loop. */
1081 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1082 && !is_gimple_min_invariant (rhs))
1084 all = false;
1085 continue;
1089 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1090 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1091 single_use_p);
1092 /* If the use has moved to a different statement adjust
1093 the update machinery for the old statement too. */
1094 if (use_stmt != gsi_stmt (gsi))
1096 update_stmt (use_stmt);
1097 use_stmt = gsi_stmt (gsi);
1100 update_stmt (use_stmt);
1102 all &= result;
1104 /* Remove intermediate now unused copy and conversion chains. */
1105 use_rhs = gimple_assign_rhs1 (use_stmt);
1106 if (result
1107 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1108 && TREE_CODE (use_rhs) == SSA_NAME
1109 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1111 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1112 release_defs (use_stmt);
1113 gsi_remove (&gsi, true);
1117 return all && has_zero_uses (name);
1120 /* Forward propagate the comparison defined in STMT like
1121 cond_1 = x CMP y to uses of the form
1122 a_1 = (T')cond_1
1123 a_1 = !cond_1
1124 a_1 = cond_1 != 0
1125 Returns true if stmt is now unused. */
1127 static bool
1128 forward_propagate_comparison (gimple stmt)
1130 tree name = gimple_assign_lhs (stmt);
1131 gimple use_stmt;
1132 tree tmp = NULL_TREE;
1134 /* Don't propagate ssa names that occur in abnormal phis. */
1135 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1136 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1137 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1138 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1139 return false;
1141 /* Do not un-cse comparisons. But propagate through copies. */
1142 use_stmt = get_prop_dest_stmt (name, &name);
1143 if (!use_stmt)
1144 return false;
1146 /* Conversion of the condition result to another integral type. */
1147 if (is_gimple_assign (use_stmt)
1148 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1149 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1150 == tcc_comparison
1151 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1152 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1154 tree lhs = gimple_assign_lhs (use_stmt);
1156 /* We can propagate the condition into a conversion. */
1157 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1159 /* Avoid using fold here as that may create a COND_EXPR with
1160 non-boolean condition as canonical form. */
1161 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1162 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1164 /* We can propagate the condition into X op CST where op
1165 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1166 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1167 == tcc_comparison
1168 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1169 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1171 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1172 tree cst = gimple_assign_rhs2 (use_stmt);
1173 tree cond;
1175 cond = build2 (gimple_assign_rhs_code (stmt),
1176 TREE_TYPE (cst),
1177 gimple_assign_rhs1 (stmt),
1178 gimple_assign_rhs2 (stmt));
1180 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1181 code, TREE_TYPE (lhs),
1182 cond, cst, false);
1183 if (tmp == NULL_TREE)
1184 return false;
1186 /* We can propagate the condition into a statement that
1187 computes the logical negation of the comparison result. */
1188 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1190 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1191 bool nans = HONOR_NANS (TYPE_MODE (type));
1192 enum tree_code code;
1193 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1194 if (code == ERROR_MARK)
1195 return false;
1197 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1198 gimple_assign_rhs2 (stmt));
1200 else
1201 return false;
1204 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1205 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1206 use_stmt = gsi_stmt (gsi);
1207 update_stmt (use_stmt);
1210 /* Remove defining statements. */
1211 remove_prop_source_from_use (name, stmt);
1213 if (dump_file && (dump_flags & TDF_DETAILS))
1215 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1216 stmt);
1217 fprintf (dump_file, " Replaced '");
1218 print_generic_expr (dump_file, old_rhs, dump_flags);
1219 fprintf (dump_file, "' with '");
1220 print_generic_expr (dump_file, tmp, dump_flags);
1221 fprintf (dump_file, "'\n");
1224 return true;
1227 return false;
1230 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1231 If so, we can change STMT into lhs = y which can later be copy
1232 propagated. Similarly for negation.
1234 This could trivially be formulated as a forward propagation
1235 to immediate uses. However, we already had an implementation
1236 from DOM which used backward propagation via the use-def links.
1238 It turns out that backward propagation is actually faster as
1239 there's less work to do for each NOT/NEG expression we find.
1240 Backwards propagation needs to look at the statement in a single
1241 backlink. Forward propagation needs to look at potentially more
1242 than one forward link. */
1244 static void
1245 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1247 gimple stmt = gsi_stmt (*gsi_p);
1248 tree rhs = gimple_assign_rhs1 (stmt);
1249 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1251 /* See if the RHS_DEF_STMT has the same form as our statement. */
1252 if (is_gimple_assign (rhs_def_stmt)
1253 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1255 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1257 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1258 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1259 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1261 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1262 stmt = gsi_stmt (*gsi_p);
1263 update_stmt (stmt);
1268 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1269 the condition which we may be able to optimize better. */
1271 static void
1272 simplify_gimple_switch (gimple stmt)
1274 tree cond = gimple_switch_index (stmt);
1275 tree def, to, ti;
1276 gimple def_stmt;
1278 /* The optimization that we really care about is removing unnecessary
1279 casts. That will let us do much better in propagating the inferred
1280 constant at the switch target. */
1281 if (TREE_CODE (cond) == SSA_NAME)
1283 def_stmt = SSA_NAME_DEF_STMT (cond);
1284 if (is_gimple_assign (def_stmt))
1286 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1288 int need_precision;
1289 bool fail;
1291 def = gimple_assign_rhs1 (def_stmt);
1293 /* ??? Why was Jeff testing this? We are gimple... */
1294 gcc_checking_assert (is_gimple_val (def));
1296 to = TREE_TYPE (cond);
1297 ti = TREE_TYPE (def);
1299 /* If we have an extension that preserves value, then we
1300 can copy the source value into the switch. */
1302 need_precision = TYPE_PRECISION (ti);
1303 fail = false;
1304 if (! INTEGRAL_TYPE_P (ti))
1305 fail = true;
1306 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1307 fail = true;
1308 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1309 need_precision += 1;
1310 if (TYPE_PRECISION (to) < need_precision)
1311 fail = true;
1313 if (!fail)
1315 gimple_switch_set_index (stmt, def);
1316 update_stmt (stmt);
1323 /* For pointers p2 and p1 return p2 - p1 if the
1324 difference is known and constant, otherwise return NULL. */
1326 static tree
1327 constant_pointer_difference (tree p1, tree p2)
1329 int i, j;
1330 #define CPD_ITERATIONS 5
1331 tree exps[2][CPD_ITERATIONS];
1332 tree offs[2][CPD_ITERATIONS];
1333 int cnt[2];
1335 for (i = 0; i < 2; i++)
1337 tree p = i ? p1 : p2;
1338 tree off = size_zero_node;
1339 gimple stmt;
1340 enum tree_code code;
1342 /* For each of p1 and p2 we need to iterate at least
1343 twice, to handle ADDR_EXPR directly in p1/p2,
1344 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1345 on definition's stmt RHS. Iterate a few extra times. */
1346 j = 0;
1349 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1350 break;
1351 if (TREE_CODE (p) == ADDR_EXPR)
1353 tree q = TREE_OPERAND (p, 0);
1354 HOST_WIDE_INT offset;
1355 tree base = get_addr_base_and_unit_offset (q, &offset);
1356 if (base)
1358 q = base;
1359 if (offset)
1360 off = size_binop (PLUS_EXPR, off, size_int (offset));
1362 if (TREE_CODE (q) == MEM_REF
1363 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1365 p = TREE_OPERAND (q, 0);
1366 off = size_binop (PLUS_EXPR, off,
1367 double_int_to_tree (sizetype,
1368 mem_ref_offset (q)));
1370 else
1372 exps[i][j] = q;
1373 offs[i][j++] = off;
1374 break;
1377 if (TREE_CODE (p) != SSA_NAME)
1378 break;
1379 exps[i][j] = p;
1380 offs[i][j++] = off;
1381 if (j == CPD_ITERATIONS)
1382 break;
1383 stmt = SSA_NAME_DEF_STMT (p);
1384 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1385 break;
1386 code = gimple_assign_rhs_code (stmt);
1387 if (code == POINTER_PLUS_EXPR)
1389 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1390 break;
1391 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1392 p = gimple_assign_rhs1 (stmt);
1394 else if (code == ADDR_EXPR || code == NOP_EXPR)
1395 p = gimple_assign_rhs1 (stmt);
1396 else
1397 break;
1399 while (1);
1400 cnt[i] = j;
1403 for (i = 0; i < cnt[0]; i++)
1404 for (j = 0; j < cnt[1]; j++)
1405 if (exps[0][i] == exps[1][j])
1406 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1408 return NULL_TREE;
1411 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1412 Optimize
1413 memcpy (p, "abcd", 4);
1414 memset (p + 4, ' ', 3);
1415 into
1416 memcpy (p, "abcd ", 7);
1417 call if the latter can be stored by pieces during expansion. */
1419 static bool
1420 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1422 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1423 tree vuse = gimple_vuse (stmt2);
1424 if (vuse == NULL)
1425 return false;
1426 stmt1 = SSA_NAME_DEF_STMT (vuse);
1428 switch (DECL_FUNCTION_CODE (callee2))
1430 case BUILT_IN_MEMSET:
1431 if (gimple_call_num_args (stmt2) != 3
1432 || gimple_call_lhs (stmt2)
1433 || CHAR_BIT != 8
1434 || BITS_PER_UNIT != 8)
1435 break;
1436 else
1438 tree callee1;
1439 tree ptr1, src1, str1, off1, len1, lhs1;
1440 tree ptr2 = gimple_call_arg (stmt2, 0);
1441 tree val2 = gimple_call_arg (stmt2, 1);
1442 tree len2 = gimple_call_arg (stmt2, 2);
1443 tree diff, vdef, new_str_cst;
1444 gimple use_stmt;
1445 unsigned int ptr1_align;
1446 unsigned HOST_WIDE_INT src_len;
1447 char *src_buf;
1448 use_operand_p use_p;
1450 if (!host_integerp (val2, 0)
1451 || !host_integerp (len2, 1))
1452 break;
1453 if (is_gimple_call (stmt1))
1455 /* If first stmt is a call, it needs to be memcpy
1456 or mempcpy, with string literal as second argument and
1457 constant length. */
1458 callee1 = gimple_call_fndecl (stmt1);
1459 if (callee1 == NULL_TREE
1460 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1461 || gimple_call_num_args (stmt1) != 3)
1462 break;
1463 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1464 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1465 break;
1466 ptr1 = gimple_call_arg (stmt1, 0);
1467 src1 = gimple_call_arg (stmt1, 1);
1468 len1 = gimple_call_arg (stmt1, 2);
1469 lhs1 = gimple_call_lhs (stmt1);
1470 if (!host_integerp (len1, 1))
1471 break;
1472 str1 = string_constant (src1, &off1);
1473 if (str1 == NULL_TREE)
1474 break;
1475 if (!host_integerp (off1, 1)
1476 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1477 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1478 - tree_low_cst (off1, 1)) > 0
1479 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1480 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1481 != TYPE_MODE (char_type_node))
1482 break;
1484 else if (gimple_assign_single_p (stmt1))
1486 /* Otherwise look for length 1 memcpy optimized into
1487 assignment. */
1488 ptr1 = gimple_assign_lhs (stmt1);
1489 src1 = gimple_assign_rhs1 (stmt1);
1490 if (TREE_CODE (ptr1) != MEM_REF
1491 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1492 || !host_integerp (src1, 0))
1493 break;
1494 ptr1 = build_fold_addr_expr (ptr1);
1495 callee1 = NULL_TREE;
1496 len1 = size_one_node;
1497 lhs1 = NULL_TREE;
1498 off1 = size_zero_node;
1499 str1 = NULL_TREE;
1501 else
1502 break;
1504 diff = constant_pointer_difference (ptr1, ptr2);
1505 if (diff == NULL && lhs1 != NULL)
1507 diff = constant_pointer_difference (lhs1, ptr2);
1508 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1509 && diff != NULL)
1510 diff = size_binop (PLUS_EXPR, diff,
1511 fold_convert (sizetype, len1));
1513 /* If the difference between the second and first destination pointer
1514 is not constant, or is bigger than memcpy length, bail out. */
1515 if (diff == NULL
1516 || !host_integerp (diff, 1)
1517 || tree_int_cst_lt (len1, diff))
1518 break;
1520 /* Use maximum of difference plus memset length and memcpy length
1521 as the new memcpy length, if it is too big, bail out. */
1522 src_len = tree_low_cst (diff, 1);
1523 src_len += tree_low_cst (len2, 1);
1524 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1525 src_len = tree_low_cst (len1, 1);
1526 if (src_len > 1024)
1527 break;
1529 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1530 with bigger length will return different result. */
1531 if (lhs1 != NULL_TREE
1532 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1533 && (TREE_CODE (lhs1) != SSA_NAME
1534 || !single_imm_use (lhs1, &use_p, &use_stmt)
1535 || use_stmt != stmt2))
1536 break;
1538 /* If anything reads memory in between memcpy and memset
1539 call, the modified memcpy call might change it. */
1540 vdef = gimple_vdef (stmt1);
1541 if (vdef != NULL
1542 && (!single_imm_use (vdef, &use_p, &use_stmt)
1543 || use_stmt != stmt2))
1544 break;
1546 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1547 /* Construct the new source string literal. */
1548 src_buf = XALLOCAVEC (char, src_len + 1);
1549 if (callee1)
1550 memcpy (src_buf,
1551 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1552 tree_low_cst (len1, 1));
1553 else
1554 src_buf[0] = tree_low_cst (src1, 0);
1555 memset (src_buf + tree_low_cst (diff, 1),
1556 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1557 src_buf[src_len] = '\0';
1558 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1559 handle embedded '\0's. */
1560 if (strlen (src_buf) != src_len)
1561 break;
1562 rtl_profile_for_bb (gimple_bb (stmt2));
1563 /* If the new memcpy wouldn't be emitted by storing the literal
1564 by pieces, this optimization might enlarge .rodata too much,
1565 as commonly used string literals couldn't be shared any
1566 longer. */
1567 if (!can_store_by_pieces (src_len,
1568 builtin_strncpy_read_str,
1569 src_buf, ptr1_align, false))
1570 break;
1572 new_str_cst = build_string_literal (src_len, src_buf);
1573 if (callee1)
1575 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1576 memset call. */
1577 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1578 gimple_call_set_lhs (stmt1, NULL_TREE);
1579 gimple_call_set_arg (stmt1, 1, new_str_cst);
1580 gimple_call_set_arg (stmt1, 2,
1581 build_int_cst (TREE_TYPE (len1), src_len));
1582 update_stmt (stmt1);
1583 unlink_stmt_vdef (stmt2);
1584 gsi_remove (gsi_p, true);
1585 release_defs (stmt2);
1586 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1587 release_ssa_name (lhs1);
1588 return true;
1590 else
1592 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1593 assignment, remove STMT1 and change memset call into
1594 memcpy call. */
1595 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1597 if (!is_gimple_val (ptr1))
1598 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1599 true, GSI_SAME_STMT);
1600 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1601 gimple_call_set_arg (stmt2, 0, ptr1);
1602 gimple_call_set_arg (stmt2, 1, new_str_cst);
1603 gimple_call_set_arg (stmt2, 2,
1604 build_int_cst (TREE_TYPE (len2), src_len));
1605 unlink_stmt_vdef (stmt1);
1606 gsi_remove (&gsi, true);
1607 release_defs (stmt1);
1608 update_stmt (stmt2);
1609 return false;
1612 break;
1613 default:
1614 break;
1616 return false;
1619 /* Run bitwise and assignments throug the folder. If the first argument is an
1620 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1621 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1624 static void
1625 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1627 tree res;
1628 tree arg1 = gimple_assign_rhs1 (stmt);
1629 tree arg2 = gimple_assign_rhs2 (stmt);
1631 if (TREE_CODE (arg2) != INTEGER_CST)
1632 return;
1634 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1636 gimple def = SSA_NAME_DEF_STMT (arg1);
1638 if (gimple_assign_cast_p (def)
1639 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1641 tree op = gimple_assign_rhs1 (def);
1643 if (TREE_CODE (op) == ADDR_EXPR)
1644 arg1 = op;
1648 res = fold_binary_loc (gimple_location (stmt),
1649 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1650 arg1, arg2);
1651 if (res && is_gimple_min_invariant (res))
1653 gimple_assign_set_rhs_from_tree (gsi, res);
1654 update_stmt (stmt);
1656 return;
1660 /* Perform re-associations of the plus or minus statement STMT that are
1661 always permitted. Returns true if the CFG was changed. */
1663 static bool
1664 associate_plusminus (gimple stmt)
1666 tree rhs1 = gimple_assign_rhs1 (stmt);
1667 tree rhs2 = gimple_assign_rhs2 (stmt);
1668 enum tree_code code = gimple_assign_rhs_code (stmt);
1669 gimple_stmt_iterator gsi;
1670 bool changed;
1672 /* We can't reassociate at all for saturating types. */
1673 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1674 return false;
1676 /* First contract negates. */
1679 changed = false;
1681 /* A +- (-B) -> A -+ B. */
1682 if (TREE_CODE (rhs2) == SSA_NAME)
1684 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1685 if (is_gimple_assign (def_stmt)
1686 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1688 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1689 gimple_assign_set_rhs_code (stmt, code);
1690 rhs2 = gimple_assign_rhs1 (def_stmt);
1691 gimple_assign_set_rhs2 (stmt, rhs2);
1692 gimple_set_modified (stmt, true);
1693 changed = true;
1697 /* (-A) + B -> B - A. */
1698 if (TREE_CODE (rhs1) == SSA_NAME
1699 && code == PLUS_EXPR)
1701 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1702 if (is_gimple_assign (def_stmt)
1703 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1705 code = MINUS_EXPR;
1706 gimple_assign_set_rhs_code (stmt, code);
1707 rhs1 = rhs2;
1708 gimple_assign_set_rhs1 (stmt, rhs1);
1709 rhs2 = gimple_assign_rhs1 (def_stmt);
1710 gimple_assign_set_rhs2 (stmt, rhs2);
1711 gimple_set_modified (stmt, true);
1712 changed = true;
1716 while (changed);
1718 /* We can't reassociate floating-point or fixed-point plus or minus
1719 because of saturation to +-Inf. */
1720 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1721 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1722 goto out;
1724 /* Second match patterns that allow contracting a plus-minus pair
1725 irrespective of overflow issues.
1727 (A +- B) - A -> +- B
1728 (A +- B) -+ B -> A
1729 (CST +- A) +- CST -> CST +- A
1730 (A + CST) +- CST -> A + CST
1731 ~A + A -> -1
1732 ~A + 1 -> -A
1733 A - (A +- B) -> -+ B
1734 A +- (B +- A) -> +- B
1735 CST +- (CST +- A) -> CST +- A
1736 CST +- (A +- CST) -> CST +- A
1737 A + ~A -> -1
1739 via commutating the addition and contracting operations to zero
1740 by reassociation. */
1742 gsi = gsi_for_stmt (stmt);
1743 if (TREE_CODE (rhs1) == SSA_NAME)
1745 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1746 if (is_gimple_assign (def_stmt))
1748 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1749 if (def_code == PLUS_EXPR
1750 || def_code == MINUS_EXPR)
1752 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1753 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1754 if (operand_equal_p (def_rhs1, rhs2, 0)
1755 && code == MINUS_EXPR)
1757 /* (A +- B) - A -> +- B. */
1758 code = ((def_code == PLUS_EXPR)
1759 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1760 rhs1 = def_rhs2;
1761 rhs2 = NULL_TREE;
1762 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1763 gcc_assert (gsi_stmt (gsi) == stmt);
1764 gimple_set_modified (stmt, true);
1766 else if (operand_equal_p (def_rhs2, rhs2, 0)
1767 && code != def_code)
1769 /* (A +- B) -+ B -> A. */
1770 code = TREE_CODE (def_rhs1);
1771 rhs1 = def_rhs1;
1772 rhs2 = NULL_TREE;
1773 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1774 gcc_assert (gsi_stmt (gsi) == stmt);
1775 gimple_set_modified (stmt, true);
1777 else if (TREE_CODE (rhs2) == INTEGER_CST
1778 && TREE_CODE (def_rhs1) == INTEGER_CST)
1780 /* (CST +- A) +- CST -> CST +- A. */
1781 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1782 def_rhs1, rhs2);
1783 if (cst && !TREE_OVERFLOW (cst))
1785 code = def_code;
1786 gimple_assign_set_rhs_code (stmt, code);
1787 rhs1 = cst;
1788 gimple_assign_set_rhs1 (stmt, rhs1);
1789 rhs2 = def_rhs2;
1790 gimple_assign_set_rhs2 (stmt, rhs2);
1791 gimple_set_modified (stmt, true);
1794 else if (TREE_CODE (rhs2) == INTEGER_CST
1795 && TREE_CODE (def_rhs2) == INTEGER_CST
1796 && def_code == PLUS_EXPR)
1798 /* (A + CST) +- CST -> A + CST. */
1799 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1800 def_rhs2, rhs2);
1801 if (cst && !TREE_OVERFLOW (cst))
1803 code = PLUS_EXPR;
1804 gimple_assign_set_rhs_code (stmt, code);
1805 rhs1 = def_rhs1;
1806 gimple_assign_set_rhs1 (stmt, rhs1);
1807 rhs2 = cst;
1808 gimple_assign_set_rhs2 (stmt, rhs2);
1809 gimple_set_modified (stmt, true);
1813 else if (def_code == BIT_NOT_EXPR
1814 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1816 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1817 if (code == PLUS_EXPR
1818 && operand_equal_p (def_rhs1, rhs2, 0))
1820 /* ~A + A -> -1. */
1821 code = INTEGER_CST;
1822 rhs1 = build_int_cst (TREE_TYPE (rhs2), -1);
1823 rhs2 = NULL_TREE;
1824 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1825 gcc_assert (gsi_stmt (gsi) == stmt);
1826 gimple_set_modified (stmt, true);
1828 else if (code == PLUS_EXPR
1829 && integer_onep (rhs1))
1831 /* ~A + 1 -> -A. */
1832 code = NEGATE_EXPR;
1833 rhs1 = def_rhs1;
1834 rhs2 = NULL_TREE;
1835 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1836 gcc_assert (gsi_stmt (gsi) == stmt);
1837 gimple_set_modified (stmt, true);
1843 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
1845 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1846 if (is_gimple_assign (def_stmt))
1848 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1849 if (def_code == PLUS_EXPR
1850 || def_code == MINUS_EXPR)
1852 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1853 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1854 if (operand_equal_p (def_rhs1, rhs1, 0)
1855 && code == MINUS_EXPR)
1857 /* A - (A +- B) -> -+ B. */
1858 code = ((def_code == PLUS_EXPR)
1859 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
1860 rhs1 = def_rhs2;
1861 rhs2 = NULL_TREE;
1862 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1863 gcc_assert (gsi_stmt (gsi) == stmt);
1864 gimple_set_modified (stmt, true);
1866 else if (operand_equal_p (def_rhs2, rhs1, 0)
1867 && code != def_code)
1869 /* A +- (B +- A) -> +- B. */
1870 code = ((code == PLUS_EXPR)
1871 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
1872 rhs1 = def_rhs1;
1873 rhs2 = NULL_TREE;
1874 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1875 gcc_assert (gsi_stmt (gsi) == stmt);
1876 gimple_set_modified (stmt, true);
1878 else if (TREE_CODE (rhs1) == INTEGER_CST
1879 && TREE_CODE (def_rhs1) == INTEGER_CST)
1881 /* CST +- (CST +- A) -> CST +- A. */
1882 tree cst = fold_binary (code, TREE_TYPE (rhs2),
1883 rhs1, def_rhs1);
1884 if (cst && !TREE_OVERFLOW (cst))
1886 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
1887 gimple_assign_set_rhs_code (stmt, code);
1888 rhs1 = cst;
1889 gimple_assign_set_rhs1 (stmt, rhs1);
1890 rhs2 = def_rhs2;
1891 gimple_assign_set_rhs2 (stmt, rhs2);
1892 gimple_set_modified (stmt, true);
1895 else if (TREE_CODE (rhs1) == INTEGER_CST
1896 && TREE_CODE (def_rhs2) == INTEGER_CST)
1898 /* CST +- (A +- CST) -> CST +- A. */
1899 tree cst = fold_binary (def_code == code
1900 ? PLUS_EXPR : MINUS_EXPR,
1901 TREE_TYPE (rhs2),
1902 rhs1, def_rhs2);
1903 if (cst && !TREE_OVERFLOW (cst))
1905 rhs1 = cst;
1906 gimple_assign_set_rhs1 (stmt, rhs1);
1907 rhs2 = def_rhs1;
1908 gimple_assign_set_rhs2 (stmt, rhs2);
1909 gimple_set_modified (stmt, true);
1913 else if (def_code == BIT_NOT_EXPR
1914 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
1916 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1917 if (code == PLUS_EXPR
1918 && operand_equal_p (def_rhs1, rhs1, 0))
1920 /* A + ~A -> -1. */
1921 code = INTEGER_CST;
1922 rhs1 = build_int_cst (TREE_TYPE (rhs1), -1);
1923 rhs2 = NULL_TREE;
1924 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1925 gcc_assert (gsi_stmt (gsi) == stmt);
1926 gimple_set_modified (stmt, true);
1932 out:
1933 if (gimple_modified_p (stmt))
1935 fold_stmt_inplace (stmt);
1936 update_stmt (stmt);
1937 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
1938 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
1939 return true;
1942 return false;
1945 /* Main entry point for the forward propagation optimizer. */
1947 static unsigned int
1948 tree_ssa_forward_propagate_single_use_vars (void)
1950 basic_block bb;
1951 unsigned int todoflags = 0;
1953 cfg_changed = false;
1955 FOR_EACH_BB (bb)
1957 gimple_stmt_iterator gsi;
1959 /* Note we update GSI within the loop as necessary. */
1960 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1962 gimple stmt = gsi_stmt (gsi);
1964 /* If this statement sets an SSA_NAME to an address,
1965 try to propagate the address into the uses of the SSA_NAME. */
1966 if (is_gimple_assign (stmt))
1968 tree lhs = gimple_assign_lhs (stmt);
1969 tree rhs = gimple_assign_rhs1 (stmt);
1971 if (TREE_CODE (lhs) != SSA_NAME)
1973 gsi_next (&gsi);
1974 continue;
1977 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1978 /* Handle pointer conversions on invariant addresses
1979 as well, as this is valid gimple. */
1980 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1981 && TREE_CODE (rhs) == ADDR_EXPR
1982 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1984 tree base = get_base_address (TREE_OPERAND (rhs, 0));
1985 if ((!base
1986 || !DECL_P (base)
1987 || decl_address_invariant_p (base))
1988 && !stmt_references_abnormal_ssa_name (stmt)
1989 && forward_propagate_addr_expr (lhs, rhs))
1991 release_defs (stmt);
1992 todoflags |= TODO_remove_unused_locals;
1993 gsi_remove (&gsi, true);
1995 else
1996 gsi_next (&gsi);
1998 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1999 && can_propagate_from (stmt))
2001 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2002 /* ??? Better adjust the interface to that function
2003 instead of building new trees here. */
2004 && forward_propagate_addr_expr
2005 (lhs,
2006 build1 (ADDR_EXPR,
2007 TREE_TYPE (rhs),
2008 fold_build2 (MEM_REF,
2009 TREE_TYPE (TREE_TYPE (rhs)),
2010 rhs,
2011 fold_convert
2012 (ptr_type_node,
2013 gimple_assign_rhs2 (stmt))))))
2015 release_defs (stmt);
2016 todoflags |= TODO_remove_unused_locals;
2017 gsi_remove (&gsi, true);
2019 else if (is_gimple_min_invariant (rhs))
2021 /* Make sure to fold &a[0] + off_1 here. */
2022 fold_stmt_inplace (stmt);
2023 update_stmt (stmt);
2024 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2025 gsi_next (&gsi);
2027 else
2028 gsi_next (&gsi);
2030 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
2031 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
2032 && TREE_CODE (rhs) == SSA_NAME)
2034 simplify_not_neg_expr (&gsi);
2035 gsi_next (&gsi);
2037 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
2039 /* In this case the entire COND_EXPR is in rhs1. */
2040 int did_something;
2041 fold_defer_overflow_warnings ();
2042 did_something = forward_propagate_into_cond (&gsi);
2043 stmt = gsi_stmt (gsi);
2044 if (did_something == 2)
2045 cfg_changed = true;
2046 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
2047 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2048 gsi_next (&gsi);
2050 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
2051 == tcc_comparison)
2053 if (forward_propagate_comparison (stmt))
2055 release_defs (stmt);
2056 todoflags |= TODO_remove_unused_locals;
2057 gsi_remove (&gsi, true);
2059 else
2060 gsi_next (&gsi);
2062 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
2064 simplify_bitwise_and (&gsi, stmt);
2065 gsi_next (&gsi);
2067 else if (gimple_assign_rhs_code (stmt) == PLUS_EXPR
2068 || gimple_assign_rhs_code (stmt) == MINUS_EXPR)
2070 cfg_changed |= associate_plusminus (stmt);
2071 gsi_next (&gsi);
2073 else
2074 gsi_next (&gsi);
2076 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2078 simplify_gimple_switch (stmt);
2079 gsi_next (&gsi);
2081 else if (gimple_code (stmt) == GIMPLE_COND)
2083 int did_something;
2084 fold_defer_overflow_warnings ();
2085 did_something = forward_propagate_into_gimple_cond (stmt);
2086 if (did_something == 2)
2087 cfg_changed = true;
2088 fold_undefer_overflow_warnings (did_something, stmt,
2089 WARN_STRICT_OVERFLOW_CONDITIONAL);
2090 gsi_next (&gsi);
2092 else if (is_gimple_call (stmt))
2094 tree callee = gimple_call_fndecl (stmt);
2095 if (callee == NULL_TREE
2096 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2097 || !simplify_builtin_call (&gsi, callee))
2098 gsi_next (&gsi);
2100 else
2101 gsi_next (&gsi);
2105 if (cfg_changed)
2106 todoflags |= TODO_cleanup_cfg;
2107 return todoflags;
2111 static bool
2112 gate_forwprop (void)
2114 return flag_tree_forwprop;
2117 struct gimple_opt_pass pass_forwprop =
2120 GIMPLE_PASS,
2121 "forwprop", /* name */
2122 gate_forwprop, /* gate */
2123 tree_ssa_forward_propagate_single_use_vars, /* execute */
2124 NULL, /* sub */
2125 NULL, /* next */
2126 0, /* static_pass_number */
2127 TV_TREE_FORWPROP, /* tv_id */
2128 PROP_cfg | PROP_ssa, /* properties_required */
2129 0, /* properties_provided */
2130 0, /* properties_destroyed */
2131 0, /* todo_flags_start */
2132 TODO_dump_func
2133 | TODO_ggc_collect
2134 | TODO_update_ssa
2135 | TODO_verify_ssa /* todo_flags_finish */