* config/sh/sh.c (sh_gimplify_va_arg_expr): Don't call
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blobf26c47eab4e9b1e34c666eb773b3ba2345537a87
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 use_operand_p use_p;
264 ssa_op_iter iter;
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
270 return false;
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 return false;
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 return true;
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 return false;
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
296 return false;
299 return true;
302 /* Remove a copy chain ending in NAME along the defs.
303 If NAME was replaced in its only use then this function can be used
304 to clean up dead stmts. Returns true if cleanup-cfg has to run. */
306 static bool
307 remove_prop_source_from_use (tree name)
309 gimple_stmt_iterator gsi;
310 gimple stmt;
311 bool cfg_changed = false;
313 do {
314 basic_block bb;
316 if (!has_zero_uses (name))
317 return cfg_changed;
319 stmt = SSA_NAME_DEF_STMT (name);
320 gsi = gsi_for_stmt (stmt);
321 bb = gimple_bb (stmt);
322 release_defs (stmt);
323 gsi_remove (&gsi, true);
324 cfg_changed |= gimple_purge_dead_eh_edges (bb);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
329 return cfg_changed;
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
339 static tree
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
345 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt),
347 gimple_assign_rhs3 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
349 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt));
351 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
352 return build1 (code, type, gimple_assign_rhs1 (stmt));
353 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
354 return gimple_assign_rhs1 (stmt);
355 else
356 gcc_unreachable ();
359 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
360 the folded result in a form suitable for COND_EXPR_COND or
361 NULL_TREE, if there is no suitable simplified form. If
362 INVARIANT_ONLY is true only gimple_min_invariant results are
363 considered simplified. */
365 static tree
366 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
367 tree op0, tree op1, bool invariant_only)
369 tree t;
371 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
373 t = fold_binary_loc (loc, code, type, op0, op1);
374 if (!t)
375 return NULL_TREE;
377 /* Require that we got a boolean type out if we put one in. */
378 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
380 /* Canonicalize the combined condition for use in a COND_EXPR. */
381 t = canonicalize_cond_expr_cond (t);
383 /* Bail out if we required an invariant but didn't get one. */
384 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
385 return NULL_TREE;
387 return t;
390 /* Propagate from the ssa name definition statements of COND_EXPR
391 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
392 Returns zero if no statement was changed, one if there were
393 changes and two if cfg_cleanup needs to run.
395 This must be kept in sync with forward_propagate_into_cond. */
397 static int
398 forward_propagate_into_gimple_cond (gimple stmt)
400 int did_something = 0;
401 location_t loc = gimple_location (stmt);
403 do {
404 tree tmp = NULL_TREE;
405 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
406 gimple def_stmt;
407 bool single_use0_p = false, single_use1_p = false;
408 enum tree_code code = gimple_cond_code (stmt);
410 /* We can do tree combining on SSA_NAME and comparison expressions. */
411 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
413 /* For comparisons use the first operand, that is likely to
414 simplify comparisons against constants. */
415 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
417 name = gimple_cond_lhs (stmt);
418 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
419 if (def_stmt && can_propagate_from (def_stmt))
421 tree op1 = gimple_cond_rhs (stmt);
422 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
423 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
424 rhs0, op1, !single_use0_p);
427 /* If that wasn't successful, try the second operand. */
428 if (tmp == NULL_TREE
429 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
431 tree op0 = gimple_cond_lhs (stmt);
432 name = gimple_cond_rhs (stmt);
433 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
434 if (!def_stmt || !can_propagate_from (def_stmt))
435 return did_something;
437 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
438 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
439 rhs1, !single_use1_p);
441 /* If that wasn't successful either, try both operands. */
442 if (tmp == NULL_TREE
443 && rhs0 != NULL_TREE
444 && rhs1 != NULL_TREE)
445 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
446 fold_convert_loc (loc,
447 TREE_TYPE (rhs0),
448 rhs1),
449 !(single_use0_p && single_use1_p));
452 if (tmp)
454 if (dump_file && tmp)
456 tree cond = build2 (gimple_cond_code (stmt),
457 boolean_type_node,
458 gimple_cond_lhs (stmt),
459 gimple_cond_rhs (stmt));
460 fprintf (dump_file, " Replaced '");
461 print_generic_expr (dump_file, cond, 0);
462 fprintf (dump_file, "' with '");
463 print_generic_expr (dump_file, tmp, 0);
464 fprintf (dump_file, "'\n");
467 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
468 update_stmt (stmt);
470 /* Remove defining statements. */
471 if (remove_prop_source_from_use (name)
472 || is_gimple_min_invariant (tmp))
473 did_something = 2;
474 else if (did_something == 0)
475 did_something = 1;
477 /* Continue combining. */
478 continue;
481 break;
482 } while (1);
484 return did_something;
488 /* Propagate from the ssa name definition statements of COND_EXPR
489 in the rhs of statement STMT into the conditional if that simplifies it.
490 Returns zero if no statement was changed, one if there were
491 changes and two if cfg_cleanup needs to run.
493 This must be kept in sync with forward_propagate_into_gimple_cond. */
495 static int
496 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
498 gimple stmt = gsi_stmt (*gsi_p);
499 location_t loc = gimple_location (stmt);
500 int did_something = 0;
502 do {
503 tree tmp = NULL_TREE;
504 tree cond = gimple_assign_rhs1 (stmt);
505 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
506 gimple def_stmt;
507 bool single_use0_p = false, single_use1_p = false;
509 /* We can do tree combining on SSA_NAME and comparison expressions. */
510 if (COMPARISON_CLASS_P (cond)
511 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
513 /* For comparisons use the first operand, that is likely to
514 simplify comparisons against constants. */
515 name = TREE_OPERAND (cond, 0);
516 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
517 if (def_stmt && can_propagate_from (def_stmt))
519 tree op1 = TREE_OPERAND (cond, 1);
520 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
521 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
522 boolean_type_node,
523 rhs0, op1, !single_use0_p);
525 /* If that wasn't successful, try the second operand. */
526 if (tmp == NULL_TREE
527 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
529 tree op0 = TREE_OPERAND (cond, 0);
530 name = TREE_OPERAND (cond, 1);
531 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
532 if (!def_stmt || !can_propagate_from (def_stmt))
533 return did_something;
535 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
536 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
537 boolean_type_node,
538 op0, rhs1, !single_use1_p);
540 /* If that wasn't successful either, try both operands. */
541 if (tmp == NULL_TREE
542 && rhs0 != NULL_TREE
543 && rhs1 != NULL_TREE)
544 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
545 boolean_type_node,
546 rhs0,
547 fold_convert_loc (loc,
548 TREE_TYPE (rhs0),
549 rhs1),
550 !(single_use0_p && single_use1_p));
552 else if (TREE_CODE (cond) == SSA_NAME)
554 name = cond;
555 def_stmt = get_prop_source_stmt (name, true, NULL);
556 if (!def_stmt || !can_propagate_from (def_stmt))
557 return did_something;
559 rhs0 = gimple_assign_rhs1 (def_stmt);
560 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
561 build_int_cst (TREE_TYPE (rhs0), 0),
562 false);
565 if (tmp)
567 if (dump_file && tmp)
569 fprintf (dump_file, " Replaced '");
570 print_generic_expr (dump_file, cond, 0);
571 fprintf (dump_file, "' with '");
572 print_generic_expr (dump_file, tmp, 0);
573 fprintf (dump_file, "'\n");
576 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
577 stmt = gsi_stmt (*gsi_p);
578 update_stmt (stmt);
580 /* Remove defining statements. */
581 if (remove_prop_source_from_use (name)
582 || is_gimple_min_invariant (tmp))
583 did_something = 2;
584 else if (did_something == 0)
585 did_something = 1;
587 /* Continue combining. */
588 continue;
591 break;
592 } while (1);
594 return did_something;
597 /* We've just substituted an ADDR_EXPR into stmt. Update all the
598 relevant data structures to match. */
600 static void
601 tidy_after_forward_propagate_addr (gimple stmt)
603 /* We may have turned a trapping insn into a non-trapping insn. */
604 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
605 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
606 cfg_changed = true;
608 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
609 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
612 /* DEF_RHS contains the address of the 0th element in an array.
613 USE_STMT uses type of DEF_RHS to compute the address of an
614 arbitrary element within the array. The (variable) byte offset
615 of the element is contained in OFFSET.
617 We walk back through the use-def chains of OFFSET to verify that
618 it is indeed computing the offset of an element within the array
619 and extract the index corresponding to the given byte offset.
621 We then try to fold the entire address expression into a form
622 &array[index].
624 If we are successful, we replace the right hand side of USE_STMT
625 with the new address computation. */
627 static bool
628 forward_propagate_addr_into_variable_array_index (tree offset,
629 tree def_rhs,
630 gimple_stmt_iterator *use_stmt_gsi)
632 tree index, tunit;
633 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
634 tree new_rhs, tmp;
636 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
637 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
638 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
639 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
640 else
641 return false;
642 if (!host_integerp (tunit, 1))
643 return false;
645 /* Get the offset's defining statement. */
646 offset_def = SSA_NAME_DEF_STMT (offset);
648 /* Try to find an expression for a proper index. This is either a
649 multiplication expression by the element size or just the ssa name we came
650 along in case the element size is one. In that case, however, we do not
651 allow multiplications because they can be computing index to a higher
652 level dimension (PR 37861). */
653 if (integer_onep (tunit))
655 if (is_gimple_assign (offset_def)
656 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
657 return false;
659 index = offset;
661 else
663 /* The statement which defines OFFSET before type conversion
664 must be a simple GIMPLE_ASSIGN. */
665 if (!is_gimple_assign (offset_def))
666 return false;
668 /* The RHS of the statement which defines OFFSET must be a
669 multiplication of an object by the size of the array elements.
670 This implicitly verifies that the size of the array elements
671 is constant. */
672 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
673 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
674 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
676 /* The first operand to the MULT_EXPR is the desired index. */
677 index = gimple_assign_rhs1 (offset_def);
679 /* If we have idx * tunit + CST * tunit re-associate that. */
680 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
681 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
682 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
683 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
684 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
685 gimple_assign_rhs2 (offset_def),
686 tunit)) != NULL_TREE)
688 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
689 if (is_gimple_assign (offset_def2)
690 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
691 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
692 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
694 index = fold_build2 (gimple_assign_rhs_code (offset_def),
695 TREE_TYPE (offset),
696 gimple_assign_rhs1 (offset_def2), tmp);
698 else
699 return false;
701 else
702 return false;
705 /* Replace the pointer addition with array indexing. */
706 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
707 true, GSI_SAME_STMT);
708 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
710 new_rhs = unshare_expr (def_rhs);
711 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
713 else
715 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
716 unshare_expr (TREE_OPERAND (def_rhs, 0)),
717 index, integer_zero_node, NULL_TREE);
718 new_rhs = build_fold_addr_expr (new_rhs);
719 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
720 TREE_TYPE (new_rhs)))
722 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
723 NULL_TREE, true, GSI_SAME_STMT);
724 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
725 new_rhs);
728 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
729 use_stmt = gsi_stmt (*use_stmt_gsi);
731 /* That should have created gimple, so there is no need to
732 record information to undo the propagation. */
733 fold_stmt_inplace (use_stmt);
734 tidy_after_forward_propagate_addr (use_stmt);
735 return true;
738 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
739 ADDR_EXPR <whatever>.
741 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
742 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
743 node or for recovery of array indexing from pointer arithmetic.
745 Return true if the propagation was successful (the propagation can
746 be not totally successful, yet things may have been changed). */
748 static bool
749 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
750 gimple_stmt_iterator *use_stmt_gsi,
751 bool single_use_p)
753 tree lhs, rhs, rhs2, array_ref;
754 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
755 enum tree_code rhs_code;
756 bool res = true;
758 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
760 lhs = gimple_assign_lhs (use_stmt);
761 rhs_code = gimple_assign_rhs_code (use_stmt);
762 rhs = gimple_assign_rhs1 (use_stmt);
764 /* Trivial cases. The use statement could be a trivial copy or a
765 useless conversion. Recurse to the uses of the lhs as copyprop does
766 not copy through different variant pointers and FRE does not catch
767 all useless conversions. Treat the case of a single-use name and
768 a conversion to def_rhs type separate, though. */
769 if (TREE_CODE (lhs) == SSA_NAME
770 && ((rhs_code == SSA_NAME && rhs == name)
771 || CONVERT_EXPR_CODE_P (rhs_code)))
773 /* Only recurse if we don't deal with a single use or we cannot
774 do the propagation to the current statement. In particular
775 we can end up with a conversion needed for a non-invariant
776 address which we cannot do in a single statement. */
777 if (!single_use_p
778 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
779 && (!is_gimple_min_invariant (def_rhs)
780 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
781 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
782 && (TYPE_PRECISION (TREE_TYPE (lhs))
783 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
784 return forward_propagate_addr_expr (lhs, def_rhs);
786 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
787 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
788 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
789 else
790 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
791 return true;
794 /* Propagate through constant pointer adjustments. */
795 if (TREE_CODE (lhs) == SSA_NAME
796 && rhs_code == POINTER_PLUS_EXPR
797 && rhs == name
798 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
800 tree new_def_rhs;
801 /* As we come here with non-invariant addresses in def_rhs we need
802 to make sure we can build a valid constant offsetted address
803 for further propagation. Simply rely on fold building that
804 and check after the fact. */
805 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
806 def_rhs,
807 fold_convert (ptr_type_node,
808 gimple_assign_rhs2 (use_stmt)));
809 if (TREE_CODE (new_def_rhs) == MEM_REF
810 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
811 return false;
812 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
813 TREE_TYPE (rhs));
815 /* Recurse. If we could propagate into all uses of lhs do not
816 bother to replace into the current use but just pretend we did. */
817 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
818 && forward_propagate_addr_expr (lhs, new_def_rhs))
819 return true;
821 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
822 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
823 new_def_rhs, NULL_TREE);
824 else if (is_gimple_min_invariant (new_def_rhs))
825 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
826 new_def_rhs, NULL_TREE);
827 else
828 return false;
829 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
830 update_stmt (use_stmt);
831 return true;
834 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
835 ADDR_EXPR will not appear on the LHS. */
836 lhs = gimple_assign_lhs (use_stmt);
837 while (handled_component_p (lhs))
838 lhs = TREE_OPERAND (lhs, 0);
840 /* Now see if the LHS node is a MEM_REF using NAME. If so,
841 propagate the ADDR_EXPR into the use of NAME and fold the result. */
842 if (TREE_CODE (lhs) == MEM_REF
843 && TREE_OPERAND (lhs, 0) == name)
845 tree def_rhs_base;
846 HOST_WIDE_INT def_rhs_offset;
847 /* If the address is invariant we can always fold it. */
848 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
849 &def_rhs_offset)))
851 double_int off = mem_ref_offset (lhs);
852 tree new_ptr;
853 off = double_int_add (off,
854 shwi_to_double_int (def_rhs_offset));
855 if (TREE_CODE (def_rhs_base) == MEM_REF)
857 off = double_int_add (off, mem_ref_offset (def_rhs_base));
858 new_ptr = TREE_OPERAND (def_rhs_base, 0);
860 else
861 new_ptr = build_fold_addr_expr (def_rhs_base);
862 TREE_OPERAND (lhs, 0) = new_ptr;
863 TREE_OPERAND (lhs, 1)
864 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
865 tidy_after_forward_propagate_addr (use_stmt);
866 /* Continue propagating into the RHS if this was not the only use. */
867 if (single_use_p)
868 return true;
870 /* If the LHS is a plain dereference and the value type is the same as
871 that of the pointed-to type of the address we can put the
872 dereferenced address on the LHS preserving the original alias-type. */
873 else if (gimple_assign_lhs (use_stmt) == lhs
874 && useless_type_conversion_p
875 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
876 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
878 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
879 tree new_offset, new_base, saved;
880 while (handled_component_p (*def_rhs_basep))
881 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
882 saved = *def_rhs_basep;
883 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
885 new_base = TREE_OPERAND (*def_rhs_basep, 0);
886 new_offset
887 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
888 TREE_OPERAND (*def_rhs_basep, 1), 0);
890 else
892 new_base = build_fold_addr_expr (*def_rhs_basep);
893 new_offset = TREE_OPERAND (lhs, 1);
895 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
896 new_base, new_offset);
897 gimple_assign_set_lhs (use_stmt,
898 unshare_expr (TREE_OPERAND (def_rhs, 0)));
899 *def_rhs_basep = saved;
900 tidy_after_forward_propagate_addr (use_stmt);
901 /* Continue propagating into the RHS if this was not the
902 only use. */
903 if (single_use_p)
904 return true;
906 else
907 /* We can have a struct assignment dereferencing our name twice.
908 Note that we didn't propagate into the lhs to not falsely
909 claim we did when propagating into the rhs. */
910 res = false;
913 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
914 nodes from the RHS. */
915 rhs = gimple_assign_rhs1 (use_stmt);
916 if (TREE_CODE (rhs) == ADDR_EXPR)
917 rhs = TREE_OPERAND (rhs, 0);
918 while (handled_component_p (rhs))
919 rhs = TREE_OPERAND (rhs, 0);
921 /* Now see if the RHS node is a MEM_REF using NAME. If so,
922 propagate the ADDR_EXPR into the use of NAME and fold the result. */
923 if (TREE_CODE (rhs) == MEM_REF
924 && TREE_OPERAND (rhs, 0) == name)
926 tree def_rhs_base;
927 HOST_WIDE_INT def_rhs_offset;
928 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
929 &def_rhs_offset)))
931 double_int off = mem_ref_offset (rhs);
932 tree new_ptr;
933 off = double_int_add (off,
934 shwi_to_double_int (def_rhs_offset));
935 if (TREE_CODE (def_rhs_base) == MEM_REF)
937 off = double_int_add (off, mem_ref_offset (def_rhs_base));
938 new_ptr = TREE_OPERAND (def_rhs_base, 0);
940 else
941 new_ptr = build_fold_addr_expr (def_rhs_base);
942 TREE_OPERAND (rhs, 0) = new_ptr;
943 TREE_OPERAND (rhs, 1)
944 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
945 fold_stmt_inplace (use_stmt);
946 tidy_after_forward_propagate_addr (use_stmt);
947 return res;
949 /* If the LHS is a plain dereference and the value type is the same as
950 that of the pointed-to type of the address we can put the
951 dereferenced address on the LHS preserving the original alias-type. */
952 else if (gimple_assign_rhs1 (use_stmt) == rhs
953 && useless_type_conversion_p
954 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
955 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
957 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
958 tree new_offset, new_base, saved;
959 while (handled_component_p (*def_rhs_basep))
960 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
961 saved = *def_rhs_basep;
962 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
964 new_base = TREE_OPERAND (*def_rhs_basep, 0);
965 new_offset
966 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
967 TREE_OPERAND (*def_rhs_basep, 1), 0);
969 else
971 new_base = build_fold_addr_expr (*def_rhs_basep);
972 new_offset = TREE_OPERAND (rhs, 1);
974 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
975 new_base, new_offset);
976 gimple_assign_set_rhs1 (use_stmt,
977 unshare_expr (TREE_OPERAND (def_rhs, 0)));
978 *def_rhs_basep = saved;
979 fold_stmt_inplace (use_stmt);
980 tidy_after_forward_propagate_addr (use_stmt);
981 return res;
985 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
986 is nothing to do. */
987 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
988 || gimple_assign_rhs1 (use_stmt) != name)
989 return false;
991 /* The remaining cases are all for turning pointer arithmetic into
992 array indexing. They only apply when we have the address of
993 element zero in an array. If that is not the case then there
994 is nothing to do. */
995 array_ref = TREE_OPERAND (def_rhs, 0);
996 if ((TREE_CODE (array_ref) != ARRAY_REF
997 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
998 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
999 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1000 return false;
1002 rhs2 = gimple_assign_rhs2 (use_stmt);
1003 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1004 of the elements in X into &x[C1 + C2/element size]. */
1005 if (TREE_CODE (rhs2) == INTEGER_CST)
1007 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1008 TREE_TYPE (def_rhs),
1009 def_rhs, rhs2);
1010 if (new_rhs)
1012 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1013 new_rhs = unshare_expr (new_rhs);
1014 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1016 if (!is_gimple_min_invariant (new_rhs))
1017 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1018 true, NULL_TREE,
1019 true, GSI_SAME_STMT);
1020 new_rhs = fold_convert (type, new_rhs);
1022 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1023 use_stmt = gsi_stmt (*use_stmt_gsi);
1024 update_stmt (use_stmt);
1025 tidy_after_forward_propagate_addr (use_stmt);
1026 return true;
1030 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1031 converting a multiplication of an index by the size of the
1032 array elements, then the result is converted into the proper
1033 type for the arithmetic. */
1034 if (TREE_CODE (rhs2) == SSA_NAME
1035 && (TREE_CODE (array_ref) != ARRAY_REF
1036 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1037 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1038 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1039 different type than their operands. */
1040 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1041 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1042 use_stmt_gsi);
1043 return false;
1046 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1048 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1049 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1050 node or for recovery of array indexing from pointer arithmetic.
1051 Returns true, if all uses have been propagated into. */
1053 static bool
1054 forward_propagate_addr_expr (tree name, tree rhs)
1056 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1057 imm_use_iterator iter;
1058 gimple use_stmt;
1059 bool all = true;
1060 bool single_use_p = has_single_use (name);
1062 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1064 bool result;
1065 tree use_rhs;
1067 /* If the use is not in a simple assignment statement, then
1068 there is nothing we can do. */
1069 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1071 if (!is_gimple_debug (use_stmt))
1072 all = false;
1073 continue;
1076 /* If the use is in a deeper loop nest, then we do not want
1077 to propagate non-invariant ADDR_EXPRs into the loop as that
1078 is likely adding expression evaluations into the loop. */
1079 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1080 && !is_gimple_min_invariant (rhs))
1082 all = false;
1083 continue;
1087 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1088 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1089 single_use_p);
1090 /* If the use has moved to a different statement adjust
1091 the update machinery for the old statement too. */
1092 if (use_stmt != gsi_stmt (gsi))
1094 update_stmt (use_stmt);
1095 use_stmt = gsi_stmt (gsi);
1098 update_stmt (use_stmt);
1100 all &= result;
1102 /* Remove intermediate now unused copy and conversion chains. */
1103 use_rhs = gimple_assign_rhs1 (use_stmt);
1104 if (result
1105 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1106 && TREE_CODE (use_rhs) == SSA_NAME
1107 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1109 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1110 release_defs (use_stmt);
1111 gsi_remove (&gsi, true);
1115 return all && has_zero_uses (name);
1118 /* Forward propagate the comparison defined in STMT like
1119 cond_1 = x CMP y to uses of the form
1120 a_1 = (T')cond_1
1121 a_1 = !cond_1
1122 a_1 = cond_1 != 0
1123 Returns true if stmt is now unused. */
1125 static bool
1126 forward_propagate_comparison (gimple stmt)
1128 tree name = gimple_assign_lhs (stmt);
1129 gimple use_stmt;
1130 tree tmp = NULL_TREE;
1132 /* Don't propagate ssa names that occur in abnormal phis. */
1133 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1134 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1135 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1136 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1137 return false;
1139 /* Do not un-cse comparisons. But propagate through copies. */
1140 use_stmt = get_prop_dest_stmt (name, &name);
1141 if (!use_stmt)
1142 return false;
1144 /* Conversion of the condition result to another integral type. */
1145 if (is_gimple_assign (use_stmt)
1146 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1147 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1148 == tcc_comparison
1149 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1150 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1152 tree lhs = gimple_assign_lhs (use_stmt);
1154 /* We can propagate the condition into a conversion. */
1155 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1157 /* Avoid using fold here as that may create a COND_EXPR with
1158 non-boolean condition as canonical form. */
1159 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1160 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1162 /* We can propagate the condition into X op CST where op
1163 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1164 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1165 == tcc_comparison
1166 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1167 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1169 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1170 tree cst = gimple_assign_rhs2 (use_stmt);
1171 tree cond;
1173 cond = build2 (gimple_assign_rhs_code (stmt),
1174 TREE_TYPE (cst),
1175 gimple_assign_rhs1 (stmt),
1176 gimple_assign_rhs2 (stmt));
1178 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1179 code, TREE_TYPE (lhs),
1180 cond, cst, false);
1181 if (tmp == NULL_TREE)
1182 return false;
1184 /* We can propagate the condition into a statement that
1185 computes the logical negation of the comparison result. */
1186 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1188 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1189 bool nans = HONOR_NANS (TYPE_MODE (type));
1190 enum tree_code code;
1191 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1192 if (code == ERROR_MARK)
1193 return false;
1195 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1196 gimple_assign_rhs2 (stmt));
1198 else
1199 return false;
1202 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1203 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1204 use_stmt = gsi_stmt (gsi);
1205 update_stmt (use_stmt);
1208 if (dump_file && (dump_flags & TDF_DETAILS))
1210 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1211 stmt);
1212 fprintf (dump_file, " Replaced '");
1213 print_generic_expr (dump_file, old_rhs, dump_flags);
1214 fprintf (dump_file, "' with '");
1215 print_generic_expr (dump_file, tmp, dump_flags);
1216 fprintf (dump_file, "'\n");
1219 /* Remove defining statements. */
1220 return remove_prop_source_from_use (name);
1223 return false;
1226 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1227 If so, we can change STMT into lhs = y which can later be copy
1228 propagated. Similarly for negation.
1230 This could trivially be formulated as a forward propagation
1231 to immediate uses. However, we already had an implementation
1232 from DOM which used backward propagation via the use-def links.
1234 It turns out that backward propagation is actually faster as
1235 there's less work to do for each NOT/NEG expression we find.
1236 Backwards propagation needs to look at the statement in a single
1237 backlink. Forward propagation needs to look at potentially more
1238 than one forward link. */
1240 static void
1241 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1243 gimple stmt = gsi_stmt (*gsi_p);
1244 tree rhs = gimple_assign_rhs1 (stmt);
1245 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1247 /* See if the RHS_DEF_STMT has the same form as our statement. */
1248 if (is_gimple_assign (rhs_def_stmt)
1249 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1251 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1253 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1254 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1255 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1257 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1258 stmt = gsi_stmt (*gsi_p);
1259 update_stmt (stmt);
1264 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1265 the condition which we may be able to optimize better. */
1267 static void
1268 simplify_gimple_switch (gimple stmt)
1270 tree cond = gimple_switch_index (stmt);
1271 tree def, to, ti;
1272 gimple def_stmt;
1274 /* The optimization that we really care about is removing unnecessary
1275 casts. That will let us do much better in propagating the inferred
1276 constant at the switch target. */
1277 if (TREE_CODE (cond) == SSA_NAME)
1279 def_stmt = SSA_NAME_DEF_STMT (cond);
1280 if (is_gimple_assign (def_stmt))
1282 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1284 int need_precision;
1285 bool fail;
1287 def = gimple_assign_rhs1 (def_stmt);
1289 /* ??? Why was Jeff testing this? We are gimple... */
1290 gcc_checking_assert (is_gimple_val (def));
1292 to = TREE_TYPE (cond);
1293 ti = TREE_TYPE (def);
1295 /* If we have an extension that preserves value, then we
1296 can copy the source value into the switch. */
1298 need_precision = TYPE_PRECISION (ti);
1299 fail = false;
1300 if (! INTEGRAL_TYPE_P (ti))
1301 fail = true;
1302 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1303 fail = true;
1304 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1305 need_precision += 1;
1306 if (TYPE_PRECISION (to) < need_precision)
1307 fail = true;
1309 if (!fail)
1311 gimple_switch_set_index (stmt, def);
1312 update_stmt (stmt);
1319 /* For pointers p2 and p1 return p2 - p1 if the
1320 difference is known and constant, otherwise return NULL. */
1322 static tree
1323 constant_pointer_difference (tree p1, tree p2)
1325 int i, j;
1326 #define CPD_ITERATIONS 5
1327 tree exps[2][CPD_ITERATIONS];
1328 tree offs[2][CPD_ITERATIONS];
1329 int cnt[2];
1331 for (i = 0; i < 2; i++)
1333 tree p = i ? p1 : p2;
1334 tree off = size_zero_node;
1335 gimple stmt;
1336 enum tree_code code;
1338 /* For each of p1 and p2 we need to iterate at least
1339 twice, to handle ADDR_EXPR directly in p1/p2,
1340 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1341 on definition's stmt RHS. Iterate a few extra times. */
1342 j = 0;
1345 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1346 break;
1347 if (TREE_CODE (p) == ADDR_EXPR)
1349 tree q = TREE_OPERAND (p, 0);
1350 HOST_WIDE_INT offset;
1351 tree base = get_addr_base_and_unit_offset (q, &offset);
1352 if (base)
1354 q = base;
1355 if (offset)
1356 off = size_binop (PLUS_EXPR, off, size_int (offset));
1358 if (TREE_CODE (q) == MEM_REF
1359 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1361 p = TREE_OPERAND (q, 0);
1362 off = size_binop (PLUS_EXPR, off,
1363 double_int_to_tree (sizetype,
1364 mem_ref_offset (q)));
1366 else
1368 exps[i][j] = q;
1369 offs[i][j++] = off;
1370 break;
1373 if (TREE_CODE (p) != SSA_NAME)
1374 break;
1375 exps[i][j] = p;
1376 offs[i][j++] = off;
1377 if (j == CPD_ITERATIONS)
1378 break;
1379 stmt = SSA_NAME_DEF_STMT (p);
1380 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1381 break;
1382 code = gimple_assign_rhs_code (stmt);
1383 if (code == POINTER_PLUS_EXPR)
1385 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1386 break;
1387 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1388 p = gimple_assign_rhs1 (stmt);
1390 else if (code == ADDR_EXPR || code == NOP_EXPR)
1391 p = gimple_assign_rhs1 (stmt);
1392 else
1393 break;
1395 while (1);
1396 cnt[i] = j;
1399 for (i = 0; i < cnt[0]; i++)
1400 for (j = 0; j < cnt[1]; j++)
1401 if (exps[0][i] == exps[1][j])
1402 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1404 return NULL_TREE;
1407 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1408 Optimize
1409 memcpy (p, "abcd", 4);
1410 memset (p + 4, ' ', 3);
1411 into
1412 memcpy (p, "abcd ", 7);
1413 call if the latter can be stored by pieces during expansion. */
1415 static bool
1416 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1418 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1419 tree vuse = gimple_vuse (stmt2);
1420 if (vuse == NULL)
1421 return false;
1422 stmt1 = SSA_NAME_DEF_STMT (vuse);
1424 switch (DECL_FUNCTION_CODE (callee2))
1426 case BUILT_IN_MEMSET:
1427 if (gimple_call_num_args (stmt2) != 3
1428 || gimple_call_lhs (stmt2)
1429 || CHAR_BIT != 8
1430 || BITS_PER_UNIT != 8)
1431 break;
1432 else
1434 tree callee1;
1435 tree ptr1, src1, str1, off1, len1, lhs1;
1436 tree ptr2 = gimple_call_arg (stmt2, 0);
1437 tree val2 = gimple_call_arg (stmt2, 1);
1438 tree len2 = gimple_call_arg (stmt2, 2);
1439 tree diff, vdef, new_str_cst;
1440 gimple use_stmt;
1441 unsigned int ptr1_align;
1442 unsigned HOST_WIDE_INT src_len;
1443 char *src_buf;
1444 use_operand_p use_p;
1446 if (!host_integerp (val2, 0)
1447 || !host_integerp (len2, 1))
1448 break;
1449 if (is_gimple_call (stmt1))
1451 /* If first stmt is a call, it needs to be memcpy
1452 or mempcpy, with string literal as second argument and
1453 constant length. */
1454 callee1 = gimple_call_fndecl (stmt1);
1455 if (callee1 == NULL_TREE
1456 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1457 || gimple_call_num_args (stmt1) != 3)
1458 break;
1459 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1460 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1461 break;
1462 ptr1 = gimple_call_arg (stmt1, 0);
1463 src1 = gimple_call_arg (stmt1, 1);
1464 len1 = gimple_call_arg (stmt1, 2);
1465 lhs1 = gimple_call_lhs (stmt1);
1466 if (!host_integerp (len1, 1))
1467 break;
1468 str1 = string_constant (src1, &off1);
1469 if (str1 == NULL_TREE)
1470 break;
1471 if (!host_integerp (off1, 1)
1472 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1473 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1474 - tree_low_cst (off1, 1)) > 0
1475 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1476 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1477 != TYPE_MODE (char_type_node))
1478 break;
1480 else if (gimple_assign_single_p (stmt1))
1482 /* Otherwise look for length 1 memcpy optimized into
1483 assignment. */
1484 ptr1 = gimple_assign_lhs (stmt1);
1485 src1 = gimple_assign_rhs1 (stmt1);
1486 if (TREE_CODE (ptr1) != MEM_REF
1487 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1488 || !host_integerp (src1, 0))
1489 break;
1490 ptr1 = build_fold_addr_expr (ptr1);
1491 callee1 = NULL_TREE;
1492 len1 = size_one_node;
1493 lhs1 = NULL_TREE;
1494 off1 = size_zero_node;
1495 str1 = NULL_TREE;
1497 else
1498 break;
1500 diff = constant_pointer_difference (ptr1, ptr2);
1501 if (diff == NULL && lhs1 != NULL)
1503 diff = constant_pointer_difference (lhs1, ptr2);
1504 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1505 && diff != NULL)
1506 diff = size_binop (PLUS_EXPR, diff,
1507 fold_convert (sizetype, len1));
1509 /* If the difference between the second and first destination pointer
1510 is not constant, or is bigger than memcpy length, bail out. */
1511 if (diff == NULL
1512 || !host_integerp (diff, 1)
1513 || tree_int_cst_lt (len1, diff))
1514 break;
1516 /* Use maximum of difference plus memset length and memcpy length
1517 as the new memcpy length, if it is too big, bail out. */
1518 src_len = tree_low_cst (diff, 1);
1519 src_len += tree_low_cst (len2, 1);
1520 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1521 src_len = tree_low_cst (len1, 1);
1522 if (src_len > 1024)
1523 break;
1525 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1526 with bigger length will return different result. */
1527 if (lhs1 != NULL_TREE
1528 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1529 && (TREE_CODE (lhs1) != SSA_NAME
1530 || !single_imm_use (lhs1, &use_p, &use_stmt)
1531 || use_stmt != stmt2))
1532 break;
1534 /* If anything reads memory in between memcpy and memset
1535 call, the modified memcpy call might change it. */
1536 vdef = gimple_vdef (stmt1);
1537 if (vdef != NULL
1538 && (!single_imm_use (vdef, &use_p, &use_stmt)
1539 || use_stmt != stmt2))
1540 break;
1542 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1543 /* Construct the new source string literal. */
1544 src_buf = XALLOCAVEC (char, src_len + 1);
1545 if (callee1)
1546 memcpy (src_buf,
1547 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1548 tree_low_cst (len1, 1));
1549 else
1550 src_buf[0] = tree_low_cst (src1, 0);
1551 memset (src_buf + tree_low_cst (diff, 1),
1552 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1553 src_buf[src_len] = '\0';
1554 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1555 handle embedded '\0's. */
1556 if (strlen (src_buf) != src_len)
1557 break;
1558 rtl_profile_for_bb (gimple_bb (stmt2));
1559 /* If the new memcpy wouldn't be emitted by storing the literal
1560 by pieces, this optimization might enlarge .rodata too much,
1561 as commonly used string literals couldn't be shared any
1562 longer. */
1563 if (!can_store_by_pieces (src_len,
1564 builtin_strncpy_read_str,
1565 src_buf, ptr1_align, false))
1566 break;
1568 new_str_cst = build_string_literal (src_len, src_buf);
1569 if (callee1)
1571 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1572 memset call. */
1573 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1574 gimple_call_set_lhs (stmt1, NULL_TREE);
1575 gimple_call_set_arg (stmt1, 1, new_str_cst);
1576 gimple_call_set_arg (stmt1, 2,
1577 build_int_cst (TREE_TYPE (len1), src_len));
1578 update_stmt (stmt1);
1579 unlink_stmt_vdef (stmt2);
1580 gsi_remove (gsi_p, true);
1581 release_defs (stmt2);
1582 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1583 release_ssa_name (lhs1);
1584 return true;
1586 else
1588 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1589 assignment, remove STMT1 and change memset call into
1590 memcpy call. */
1591 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1593 if (!is_gimple_val (ptr1))
1594 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1595 true, GSI_SAME_STMT);
1596 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1597 gimple_call_set_arg (stmt2, 0, ptr1);
1598 gimple_call_set_arg (stmt2, 1, new_str_cst);
1599 gimple_call_set_arg (stmt2, 2,
1600 build_int_cst (TREE_TYPE (len2), src_len));
1601 unlink_stmt_vdef (stmt1);
1602 gsi_remove (&gsi, true);
1603 release_defs (stmt1);
1604 update_stmt (stmt2);
1605 return false;
1608 break;
1609 default:
1610 break;
1612 return false;
1615 /* Run bitwise and assignments throug the folder. If the first argument is an
1616 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1617 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1620 static void
1621 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1623 tree res;
1624 tree arg1 = gimple_assign_rhs1 (stmt);
1625 tree arg2 = gimple_assign_rhs2 (stmt);
1627 if (TREE_CODE (arg2) != INTEGER_CST)
1628 return;
1630 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1632 gimple def = SSA_NAME_DEF_STMT (arg1);
1634 if (gimple_assign_cast_p (def)
1635 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1637 tree op = gimple_assign_rhs1 (def);
1639 if (TREE_CODE (op) == ADDR_EXPR)
1640 arg1 = op;
1644 res = fold_binary_loc (gimple_location (stmt),
1645 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1646 arg1, arg2);
1647 if (res && is_gimple_min_invariant (res))
1649 gimple_assign_set_rhs_from_tree (gsi, res);
1650 update_stmt (stmt);
1652 return;
1656 /* Perform re-associations of the plus or minus statement STMT that are
1657 always permitted. Returns true if the CFG was changed. */
1659 static bool
1660 associate_plusminus (gimple stmt)
1662 tree rhs1 = gimple_assign_rhs1 (stmt);
1663 tree rhs2 = gimple_assign_rhs2 (stmt);
1664 enum tree_code code = gimple_assign_rhs_code (stmt);
1665 gimple_stmt_iterator gsi;
1666 bool changed;
1668 /* We can't reassociate at all for saturating types. */
1669 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1670 return false;
1672 /* First contract negates. */
1675 changed = false;
1677 /* A +- (-B) -> A -+ B. */
1678 if (TREE_CODE (rhs2) == SSA_NAME)
1680 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1681 if (is_gimple_assign (def_stmt)
1682 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1684 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1685 gimple_assign_set_rhs_code (stmt, code);
1686 rhs2 = gimple_assign_rhs1 (def_stmt);
1687 gimple_assign_set_rhs2 (stmt, rhs2);
1688 gimple_set_modified (stmt, true);
1689 changed = true;
1693 /* (-A) + B -> B - A. */
1694 if (TREE_CODE (rhs1) == SSA_NAME
1695 && code == PLUS_EXPR)
1697 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1698 if (is_gimple_assign (def_stmt)
1699 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1701 code = MINUS_EXPR;
1702 gimple_assign_set_rhs_code (stmt, code);
1703 rhs1 = rhs2;
1704 gimple_assign_set_rhs1 (stmt, rhs1);
1705 rhs2 = gimple_assign_rhs1 (def_stmt);
1706 gimple_assign_set_rhs2 (stmt, rhs2);
1707 gimple_set_modified (stmt, true);
1708 changed = true;
1712 while (changed);
1714 /* We can't reassociate floating-point or fixed-point plus or minus
1715 because of saturation to +-Inf. */
1716 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1717 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1718 goto out;
1720 /* Second match patterns that allow contracting a plus-minus pair
1721 irrespective of overflow issues.
1723 (A +- B) - A -> +- B
1724 (A +- B) -+ B -> A
1725 (CST +- A) +- CST -> CST +- A
1726 (A + CST) +- CST -> A + CST
1727 ~A + A -> -1
1728 ~A + 1 -> -A
1729 A - (A +- B) -> -+ B
1730 A +- (B +- A) -> +- B
1731 CST +- (CST +- A) -> CST +- A
1732 CST +- (A +- CST) -> CST +- A
1733 A + ~A -> -1
1735 via commutating the addition and contracting operations to zero
1736 by reassociation. */
1738 gsi = gsi_for_stmt (stmt);
1739 if (TREE_CODE (rhs1) == SSA_NAME)
1741 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1742 if (is_gimple_assign (def_stmt))
1744 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1745 if (def_code == PLUS_EXPR
1746 || def_code == MINUS_EXPR)
1748 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1749 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1750 if (operand_equal_p (def_rhs1, rhs2, 0)
1751 && code == MINUS_EXPR)
1753 /* (A +- B) - A -> +- B. */
1754 code = ((def_code == PLUS_EXPR)
1755 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1756 rhs1 = def_rhs2;
1757 rhs2 = NULL_TREE;
1758 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1759 gcc_assert (gsi_stmt (gsi) == stmt);
1760 gimple_set_modified (stmt, true);
1762 else if (operand_equal_p (def_rhs2, rhs2, 0)
1763 && code != def_code)
1765 /* (A +- B) -+ B -> A. */
1766 code = TREE_CODE (def_rhs1);
1767 rhs1 = def_rhs1;
1768 rhs2 = NULL_TREE;
1769 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1770 gcc_assert (gsi_stmt (gsi) == stmt);
1771 gimple_set_modified (stmt, true);
1773 else if (TREE_CODE (rhs2) == INTEGER_CST
1774 && TREE_CODE (def_rhs1) == INTEGER_CST)
1776 /* (CST +- A) +- CST -> CST +- A. */
1777 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1778 def_rhs1, rhs2);
1779 if (cst && !TREE_OVERFLOW (cst))
1781 code = def_code;
1782 gimple_assign_set_rhs_code (stmt, code);
1783 rhs1 = cst;
1784 gimple_assign_set_rhs1 (stmt, rhs1);
1785 rhs2 = def_rhs2;
1786 gimple_assign_set_rhs2 (stmt, rhs2);
1787 gimple_set_modified (stmt, true);
1790 else if (TREE_CODE (rhs2) == INTEGER_CST
1791 && TREE_CODE (def_rhs2) == INTEGER_CST
1792 && def_code == PLUS_EXPR)
1794 /* (A + CST) +- CST -> A + CST. */
1795 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1796 def_rhs2, rhs2);
1797 if (cst && !TREE_OVERFLOW (cst))
1799 code = PLUS_EXPR;
1800 gimple_assign_set_rhs_code (stmt, code);
1801 rhs1 = def_rhs1;
1802 gimple_assign_set_rhs1 (stmt, rhs1);
1803 rhs2 = cst;
1804 gimple_assign_set_rhs2 (stmt, rhs2);
1805 gimple_set_modified (stmt, true);
1809 else if (def_code == BIT_NOT_EXPR
1810 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1812 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1813 if (code == PLUS_EXPR
1814 && operand_equal_p (def_rhs1, rhs2, 0))
1816 /* ~A + A -> -1. */
1817 code = INTEGER_CST;
1818 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
1819 rhs2 = NULL_TREE;
1820 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1821 gcc_assert (gsi_stmt (gsi) == stmt);
1822 gimple_set_modified (stmt, true);
1824 else if (code == PLUS_EXPR
1825 && integer_onep (rhs1))
1827 /* ~A + 1 -> -A. */
1828 code = NEGATE_EXPR;
1829 rhs1 = def_rhs1;
1830 rhs2 = NULL_TREE;
1831 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1832 gcc_assert (gsi_stmt (gsi) == stmt);
1833 gimple_set_modified (stmt, true);
1839 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
1841 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1842 if (is_gimple_assign (def_stmt))
1844 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1845 if (def_code == PLUS_EXPR
1846 || def_code == MINUS_EXPR)
1848 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1849 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1850 if (operand_equal_p (def_rhs1, rhs1, 0)
1851 && code == MINUS_EXPR)
1853 /* A - (A +- B) -> -+ B. */
1854 code = ((def_code == PLUS_EXPR)
1855 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
1856 rhs1 = def_rhs2;
1857 rhs2 = NULL_TREE;
1858 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1859 gcc_assert (gsi_stmt (gsi) == stmt);
1860 gimple_set_modified (stmt, true);
1862 else if (operand_equal_p (def_rhs2, rhs1, 0)
1863 && code != def_code)
1865 /* A +- (B +- A) -> +- B. */
1866 code = ((code == PLUS_EXPR)
1867 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
1868 rhs1 = def_rhs1;
1869 rhs2 = NULL_TREE;
1870 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1871 gcc_assert (gsi_stmt (gsi) == stmt);
1872 gimple_set_modified (stmt, true);
1874 else if (TREE_CODE (rhs1) == INTEGER_CST
1875 && TREE_CODE (def_rhs1) == INTEGER_CST)
1877 /* CST +- (CST +- A) -> CST +- A. */
1878 tree cst = fold_binary (code, TREE_TYPE (rhs2),
1879 rhs1, def_rhs1);
1880 if (cst && !TREE_OVERFLOW (cst))
1882 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
1883 gimple_assign_set_rhs_code (stmt, code);
1884 rhs1 = cst;
1885 gimple_assign_set_rhs1 (stmt, rhs1);
1886 rhs2 = def_rhs2;
1887 gimple_assign_set_rhs2 (stmt, rhs2);
1888 gimple_set_modified (stmt, true);
1891 else if (TREE_CODE (rhs1) == INTEGER_CST
1892 && TREE_CODE (def_rhs2) == INTEGER_CST)
1894 /* CST +- (A +- CST) -> CST +- A. */
1895 tree cst = fold_binary (def_code == code
1896 ? PLUS_EXPR : MINUS_EXPR,
1897 TREE_TYPE (rhs2),
1898 rhs1, def_rhs2);
1899 if (cst && !TREE_OVERFLOW (cst))
1901 rhs1 = cst;
1902 gimple_assign_set_rhs1 (stmt, rhs1);
1903 rhs2 = def_rhs1;
1904 gimple_assign_set_rhs2 (stmt, rhs2);
1905 gimple_set_modified (stmt, true);
1909 else if (def_code == BIT_NOT_EXPR
1910 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
1912 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1913 if (code == PLUS_EXPR
1914 && operand_equal_p (def_rhs1, rhs1, 0))
1916 /* A + ~A -> -1. */
1917 code = INTEGER_CST;
1918 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
1919 rhs2 = NULL_TREE;
1920 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1921 gcc_assert (gsi_stmt (gsi) == stmt);
1922 gimple_set_modified (stmt, true);
1928 out:
1929 if (gimple_modified_p (stmt))
1931 fold_stmt_inplace (stmt);
1932 update_stmt (stmt);
1933 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
1934 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
1935 return true;
1938 return false;
1941 /* Main entry point for the forward propagation optimizer. */
1943 static unsigned int
1944 tree_ssa_forward_propagate_single_use_vars (void)
1946 basic_block bb;
1947 unsigned int todoflags = 0;
1949 cfg_changed = false;
1951 FOR_EACH_BB (bb)
1953 gimple_stmt_iterator gsi;
1955 /* Note we update GSI within the loop as necessary. */
1956 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1958 gimple stmt = gsi_stmt (gsi);
1960 /* If this statement sets an SSA_NAME to an address,
1961 try to propagate the address into the uses of the SSA_NAME. */
1962 if (is_gimple_assign (stmt))
1964 tree lhs = gimple_assign_lhs (stmt);
1965 tree rhs = gimple_assign_rhs1 (stmt);
1967 if (TREE_CODE (lhs) != SSA_NAME)
1969 gsi_next (&gsi);
1970 continue;
1973 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1974 /* Handle pointer conversions on invariant addresses
1975 as well, as this is valid gimple. */
1976 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1977 && TREE_CODE (rhs) == ADDR_EXPR
1978 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1980 tree base = get_base_address (TREE_OPERAND (rhs, 0));
1981 if ((!base
1982 || !DECL_P (base)
1983 || decl_address_invariant_p (base))
1984 && !stmt_references_abnormal_ssa_name (stmt)
1985 && forward_propagate_addr_expr (lhs, rhs))
1987 release_defs (stmt);
1988 todoflags |= TODO_remove_unused_locals;
1989 gsi_remove (&gsi, true);
1991 else
1992 gsi_next (&gsi);
1994 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1995 && can_propagate_from (stmt))
1997 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
1998 /* ??? Better adjust the interface to that function
1999 instead of building new trees here. */
2000 && forward_propagate_addr_expr
2001 (lhs,
2002 build1 (ADDR_EXPR,
2003 TREE_TYPE (rhs),
2004 fold_build2 (MEM_REF,
2005 TREE_TYPE (TREE_TYPE (rhs)),
2006 rhs,
2007 fold_convert
2008 (ptr_type_node,
2009 gimple_assign_rhs2 (stmt))))))
2011 release_defs (stmt);
2012 todoflags |= TODO_remove_unused_locals;
2013 gsi_remove (&gsi, true);
2015 else if (is_gimple_min_invariant (rhs))
2017 /* Make sure to fold &a[0] + off_1 here. */
2018 fold_stmt_inplace (stmt);
2019 update_stmt (stmt);
2020 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2021 gsi_next (&gsi);
2023 else
2024 gsi_next (&gsi);
2026 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
2027 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
2028 && TREE_CODE (rhs) == SSA_NAME)
2030 simplify_not_neg_expr (&gsi);
2031 gsi_next (&gsi);
2033 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
2035 /* In this case the entire COND_EXPR is in rhs1. */
2036 int did_something;
2037 fold_defer_overflow_warnings ();
2038 did_something = forward_propagate_into_cond (&gsi);
2039 stmt = gsi_stmt (gsi);
2040 if (did_something == 2)
2041 cfg_changed = true;
2042 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
2043 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2044 gsi_next (&gsi);
2046 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
2047 == tcc_comparison)
2049 if (forward_propagate_comparison (stmt))
2050 cfg_changed = true;
2051 gsi_next (&gsi);
2053 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
2055 simplify_bitwise_and (&gsi, stmt);
2056 gsi_next (&gsi);
2058 else if (gimple_assign_rhs_code (stmt) == PLUS_EXPR
2059 || gimple_assign_rhs_code (stmt) == MINUS_EXPR)
2061 cfg_changed |= associate_plusminus (stmt);
2062 gsi_next (&gsi);
2064 else
2065 gsi_next (&gsi);
2067 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2069 simplify_gimple_switch (stmt);
2070 gsi_next (&gsi);
2072 else if (gimple_code (stmt) == GIMPLE_COND)
2074 int did_something;
2075 fold_defer_overflow_warnings ();
2076 did_something = forward_propagate_into_gimple_cond (stmt);
2077 if (did_something == 2)
2078 cfg_changed = true;
2079 fold_undefer_overflow_warnings (did_something, stmt,
2080 WARN_STRICT_OVERFLOW_CONDITIONAL);
2081 gsi_next (&gsi);
2083 else if (is_gimple_call (stmt))
2085 tree callee = gimple_call_fndecl (stmt);
2086 if (callee == NULL_TREE
2087 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2088 || !simplify_builtin_call (&gsi, callee))
2089 gsi_next (&gsi);
2091 else
2092 gsi_next (&gsi);
2096 if (cfg_changed)
2097 todoflags |= TODO_cleanup_cfg;
2098 return todoflags;
2102 static bool
2103 gate_forwprop (void)
2105 return flag_tree_forwprop;
2108 struct gimple_opt_pass pass_forwprop =
2111 GIMPLE_PASS,
2112 "forwprop", /* name */
2113 gate_forwprop, /* gate */
2114 tree_ssa_forward_propagate_single_use_vars, /* execute */
2115 NULL, /* sub */
2116 NULL, /* next */
2117 0, /* static_pass_number */
2118 TV_TREE_FORWPROP, /* tv_id */
2119 PROP_cfg | PROP_ssa, /* properties_required */
2120 0, /* properties_provided */
2121 0, /* properties_destroyed */
2122 0, /* todo_flags_start */
2123 TODO_dump_func
2124 | TODO_ggc_collect
2125 | TODO_update_ssa
2126 | TODO_verify_ssa /* todo_flags_finish */