* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob5044afffcd9d1655bf999f0df4cb7a455a089dd4
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 One class of common cases we handle is forward propagating a single use
43 variable into a COND_EXPR.
45 bb0:
46 x = a COND b;
47 if (x) goto ... else goto ...
49 Will be transformed into:
51 bb0:
52 if (a COND b) goto ... else goto ...
54 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56 Or (assuming c1 and c2 are constants):
58 bb0:
59 x = a + c1;
60 if (x EQ/NEQ c2) goto ... else goto ...
62 Will be transformed into:
64 bb0:
65 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67 Similarly for x = a - c1.
71 bb0:
72 x = !a
73 if (x) goto ... else goto ...
75 Will be transformed into:
77 bb0:
78 if (a == 0) goto ... else goto ...
80 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
81 For these cases, we propagate A into all, possibly more than one,
82 COND_EXPRs that use X.
86 bb0:
87 x = (typecast) a
88 if (x) goto ... else goto ...
90 Will be transformed into:
92 bb0:
93 if (a != 0) goto ... else goto ...
95 (Assuming a is an integral type and x is a boolean or x is an
96 integral and a is a boolean.)
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
102 In addition to eliminating the variable and the statement which assigns
103 a value to the variable, we may be able to later thread the jump without
104 adding insane complexity in the dominator optimizer.
106 Also note these transformations can cascade. We handle this by having
107 a worklist of COND_EXPR statements to examine. As we make a change to
108 a statement, we put it back on the worklist to examine on the next
109 iteration of the main loop.
111 A second class of propagation opportunities arises for ADDR_EXPR
112 nodes.
114 ptr = &x->y->z;
115 res = *ptr;
117 Will get turned into
119 res = x->y->z;
122 ptr = (type1*)&type2var;
123 res = *ptr
125 Will get turned into (if type1 and type2 are the same size
126 and neither have volatile on them):
127 res = VIEW_CONVERT_EXPR<type1>(type2var)
131 ptr = &x[0];
132 ptr2 = ptr + <constant>;
134 Will get turned into
136 ptr2 = &x[constant/elementsize];
140 ptr = &x[0];
141 offset = index * element_size;
142 offset_p = (pointer) offset;
143 ptr2 = ptr + offset_p
145 Will get turned into:
147 ptr2 = &x[index];
150 ssa = (int) decl
151 res = ssa & 1
153 Provided that decl has known alignment >= 2, will get turned into
155 res = 0
157 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
158 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
159 {NOT_EXPR,NEG_EXPR}.
161 This will (of course) be extended as other needs arise. */
163 static bool forward_propagate_addr_expr (tree name, tree rhs);
165 /* Set to true if we delete EH edges during the optimization. */
166 static bool cfg_changed;
168 static tree rhs_to_tree (tree type, gimple stmt);
170 /* Get the next statement we can propagate NAME's value into skipping
171 trivial copies. Returns the statement that is suitable as a
172 propagation destination or NULL_TREE if there is no such one.
173 This only returns destinations in a single-use chain. FINAL_NAME_P
174 if non-NULL is written to the ssa name that represents the use. */
176 static gimple
177 get_prop_dest_stmt (tree name, tree *final_name_p)
179 use_operand_p use;
180 gimple use_stmt;
182 do {
183 /* If name has multiple uses, bail out. */
184 if (!single_imm_use (name, &use, &use_stmt))
185 return NULL;
187 /* If this is not a trivial copy, we found it. */
188 if (!gimple_assign_ssa_name_copy_p (use_stmt)
189 || gimple_assign_rhs1 (use_stmt) != name)
190 break;
192 /* Continue searching uses of the copy destination. */
193 name = gimple_assign_lhs (use_stmt);
194 } while (1);
196 if (final_name_p)
197 *final_name_p = name;
199 return use_stmt;
202 /* Get the statement we can propagate from into NAME skipping
203 trivial copies. Returns the statement which defines the
204 propagation source or NULL_TREE if there is no such one.
205 If SINGLE_USE_ONLY is set considers only sources which have
206 a single use chain up to NAME. If SINGLE_USE_P is non-null,
207 it is set to whether the chain to NAME is a single use chain
208 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210 static gimple
211 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
213 bool single_use = true;
215 do {
216 gimple def_stmt = SSA_NAME_DEF_STMT (name);
218 if (!has_single_use (name))
220 single_use = false;
221 if (single_use_only)
222 return NULL;
225 /* If name is defined by a PHI node or is the default def, bail out. */
226 if (!is_gimple_assign (def_stmt))
227 return NULL;
229 /* If def_stmt is not a simple copy, we possibly found it. */
230 if (!gimple_assign_ssa_name_copy_p (def_stmt))
232 tree rhs;
234 if (!single_use_only && single_use_p)
235 *single_use_p = single_use;
237 /* We can look through pointer conversions in the search
238 for a useful stmt for the comparison folding. */
239 rhs = gimple_assign_rhs1 (def_stmt);
240 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
241 && TREE_CODE (rhs) == SSA_NAME
242 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
243 && POINTER_TYPE_P (TREE_TYPE (rhs)))
244 name = rhs;
245 else
246 return def_stmt;
248 else
250 /* Continue searching the def of the copy source name. */
251 name = gimple_assign_rhs1 (def_stmt);
253 } while (1);
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
259 static bool
260 can_propagate_from (gimple def_stmt)
262 use_operand_p use_p;
263 ssa_op_iter iter;
265 gcc_assert (is_gimple_assign (def_stmt));
267 /* If the rhs has side-effects we cannot propagate from it. */
268 if (gimple_has_volatile_ops (def_stmt))
269 return false;
271 /* If the rhs is a load we cannot propagate from it. */
272 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
273 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
274 return false;
276 /* Constants can be always propagated. */
277 if (gimple_assign_single_p (def_stmt)
278 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
279 return true;
281 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
282 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
283 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
284 return false;
286 /* If the definition is a conversion of a pointer to a function type,
287 then we can not apply optimizations as some targets require
288 function pointers to be canonicalized and in this case this
289 optimization could eliminate a necessary canonicalization. */
290 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
292 tree rhs = gimple_assign_rhs1 (def_stmt);
293 if (POINTER_TYPE_P (TREE_TYPE (rhs))
294 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
295 return false;
298 return true;
301 /* Remove a copy chain ending in NAME along the defs but not
302 further or including UP_TO_STMT. If NAME was replaced in
303 its only use then this function can be used to clean up
304 dead stmts. Returns true if UP_TO_STMT can be removed
305 as well, otherwise false. */
307 static bool
308 remove_prop_source_from_use (tree name, gimple up_to_stmt)
310 gimple_stmt_iterator gsi;
311 gimple stmt;
313 do {
314 if (!has_zero_uses (name))
315 return false;
317 stmt = SSA_NAME_DEF_STMT (name);
318 if (stmt == up_to_stmt)
319 return true;
321 gsi = gsi_for_stmt (stmt);
322 release_defs (stmt);
323 gsi_remove (&gsi, true);
325 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
326 } while (name && TREE_CODE (name) == SSA_NAME);
328 return false;
331 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
332 converted to type TYPE.
334 This should disappear, but is needed so we can combine expressions and use
335 the fold() interfaces. Long term, we need to develop folding and combine
336 routines that deal with gimple exclusively . */
338 static tree
339 rhs_to_tree (tree type, gimple stmt)
341 location_t loc = gimple_location (stmt);
342 enum tree_code code = gimple_assign_rhs_code (stmt);
343 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
344 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
345 gimple_assign_rhs2 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
347 return build1 (code, type, gimple_assign_rhs1 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
349 return gimple_assign_rhs1 (stmt);
350 else
351 gcc_unreachable ();
354 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
355 the folded result in a form suitable for COND_EXPR_COND or
356 NULL_TREE, if there is no suitable simplified form. If
357 INVARIANT_ONLY is true only gimple_min_invariant results are
358 considered simplified. */
360 static tree
361 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
362 tree op0, tree op1, bool invariant_only)
364 tree t;
366 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
368 t = fold_binary_loc (loc, code, type, op0, op1);
369 if (!t)
370 return NULL_TREE;
372 /* Require that we got a boolean type out if we put one in. */
373 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
375 /* Canonicalize the combined condition for use in a COND_EXPR. */
376 t = canonicalize_cond_expr_cond (t);
378 /* Bail out if we required an invariant but didn't get one. */
379 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
380 return NULL_TREE;
382 return t;
385 /* Propagate from the ssa name definition statements of COND_EXPR
386 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
387 Returns zero if no statement was changed, one if there were
388 changes and two if cfg_cleanup needs to run.
390 This must be kept in sync with forward_propagate_into_cond. */
392 static int
393 forward_propagate_into_gimple_cond (gimple stmt)
395 int did_something = 0;
396 location_t loc = gimple_location (stmt);
398 do {
399 tree tmp = NULL_TREE;
400 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
401 gimple def_stmt;
402 bool single_use0_p = false, single_use1_p = false;
403 enum tree_code code = gimple_cond_code (stmt);
405 /* We can do tree combining on SSA_NAME and comparison expressions. */
406 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
408 /* For comparisons use the first operand, that is likely to
409 simplify comparisons against constants. */
410 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
412 name = gimple_cond_lhs (stmt);
413 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
414 if (def_stmt && can_propagate_from (def_stmt))
416 tree op1 = gimple_cond_rhs (stmt);
417 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
418 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
419 rhs0, op1, !single_use0_p);
422 /* If that wasn't successful, try the second operand. */
423 if (tmp == NULL_TREE
424 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
426 tree op0 = gimple_cond_lhs (stmt);
427 name = gimple_cond_rhs (stmt);
428 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
429 if (!def_stmt || !can_propagate_from (def_stmt))
430 return did_something;
432 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
433 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
434 rhs1, !single_use1_p);
436 /* If that wasn't successful either, try both operands. */
437 if (tmp == NULL_TREE
438 && rhs0 != NULL_TREE
439 && rhs1 != NULL_TREE)
440 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
441 fold_convert_loc (loc,
442 TREE_TYPE (rhs0),
443 rhs1),
444 !(single_use0_p && single_use1_p));
447 if (tmp)
449 if (dump_file && tmp)
451 tree cond = build2 (gimple_cond_code (stmt),
452 boolean_type_node,
453 gimple_cond_lhs (stmt),
454 gimple_cond_rhs (stmt));
455 fprintf (dump_file, " Replaced '");
456 print_generic_expr (dump_file, cond, 0);
457 fprintf (dump_file, "' with '");
458 print_generic_expr (dump_file, tmp, 0);
459 fprintf (dump_file, "'\n");
462 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
463 update_stmt (stmt);
465 /* Remove defining statements. */
466 remove_prop_source_from_use (name, NULL);
468 if (is_gimple_min_invariant (tmp))
469 did_something = 2;
470 else if (did_something == 0)
471 did_something = 1;
473 /* Continue combining. */
474 continue;
477 break;
478 } while (1);
480 return did_something;
484 /* Propagate from the ssa name definition statements of COND_EXPR
485 in the rhs of statement STMT into the conditional if that simplifies it.
486 Returns zero if no statement was changed, one if there were
487 changes and two if cfg_cleanup needs to run.
489 This must be kept in sync with forward_propagate_into_gimple_cond. */
491 static int
492 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
494 gimple stmt = gsi_stmt (*gsi_p);
495 location_t loc = gimple_location (stmt);
496 int did_something = 0;
498 do {
499 tree tmp = NULL_TREE;
500 tree cond = gimple_assign_rhs1 (stmt);
501 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
502 gimple def_stmt;
503 bool single_use0_p = false, single_use1_p = false;
505 /* We can do tree combining on SSA_NAME and comparison expressions. */
506 if (COMPARISON_CLASS_P (cond)
507 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
509 /* For comparisons use the first operand, that is likely to
510 simplify comparisons against constants. */
511 name = TREE_OPERAND (cond, 0);
512 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
513 if (def_stmt && can_propagate_from (def_stmt))
515 tree op1 = TREE_OPERAND (cond, 1);
516 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
517 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
518 boolean_type_node,
519 rhs0, op1, !single_use0_p);
521 /* If that wasn't successful, try the second operand. */
522 if (tmp == NULL_TREE
523 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
525 tree op0 = TREE_OPERAND (cond, 0);
526 name = TREE_OPERAND (cond, 1);
527 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
528 if (!def_stmt || !can_propagate_from (def_stmt))
529 return did_something;
531 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
532 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
533 boolean_type_node,
534 op0, rhs1, !single_use1_p);
536 /* If that wasn't successful either, try both operands. */
537 if (tmp == NULL_TREE
538 && rhs0 != NULL_TREE
539 && rhs1 != NULL_TREE)
540 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
541 boolean_type_node,
542 rhs0,
543 fold_convert_loc (loc,
544 TREE_TYPE (rhs0),
545 rhs1),
546 !(single_use0_p && single_use1_p));
548 else if (TREE_CODE (cond) == SSA_NAME)
550 name = cond;
551 def_stmt = get_prop_source_stmt (name, true, NULL);
552 if (def_stmt || !can_propagate_from (def_stmt))
553 return did_something;
555 rhs0 = gimple_assign_rhs1 (def_stmt);
556 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
557 build_int_cst (TREE_TYPE (rhs0), 0),
558 false);
561 if (tmp)
563 if (dump_file && tmp)
565 fprintf (dump_file, " Replaced '");
566 print_generic_expr (dump_file, cond, 0);
567 fprintf (dump_file, "' with '");
568 print_generic_expr (dump_file, tmp, 0);
569 fprintf (dump_file, "'\n");
572 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
573 stmt = gsi_stmt (*gsi_p);
574 update_stmt (stmt);
576 /* Remove defining statements. */
577 remove_prop_source_from_use (name, NULL);
579 if (is_gimple_min_invariant (tmp))
580 did_something = 2;
581 else if (did_something == 0)
582 did_something = 1;
584 /* Continue combining. */
585 continue;
588 break;
589 } while (1);
591 return did_something;
594 /* We've just substituted an ADDR_EXPR into stmt. Update all the
595 relevant data structures to match. */
597 static void
598 tidy_after_forward_propagate_addr (gimple stmt)
600 /* We may have turned a trapping insn into a non-trapping insn. */
601 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
602 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
603 cfg_changed = true;
605 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
606 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
609 /* DEF_RHS contains the address of the 0th element in an array.
610 USE_STMT uses type of DEF_RHS to compute the address of an
611 arbitrary element within the array. The (variable) byte offset
612 of the element is contained in OFFSET.
614 We walk back through the use-def chains of OFFSET to verify that
615 it is indeed computing the offset of an element within the array
616 and extract the index corresponding to the given byte offset.
618 We then try to fold the entire address expression into a form
619 &array[index].
621 If we are successful, we replace the right hand side of USE_STMT
622 with the new address computation. */
624 static bool
625 forward_propagate_addr_into_variable_array_index (tree offset,
626 tree def_rhs,
627 gimple_stmt_iterator *use_stmt_gsi)
629 tree index, tunit;
630 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
631 tree new_rhs, tmp;
633 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
634 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
635 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
636 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
637 else
638 return false;
639 if (!host_integerp (tunit, 1))
640 return false;
642 /* Get the offset's defining statement. */
643 offset_def = SSA_NAME_DEF_STMT (offset);
645 /* Try to find an expression for a proper index. This is either a
646 multiplication expression by the element size or just the ssa name we came
647 along in case the element size is one. In that case, however, we do not
648 allow multiplications because they can be computing index to a higher
649 level dimension (PR 37861). */
650 if (integer_onep (tunit))
652 if (is_gimple_assign (offset_def)
653 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
654 return false;
656 index = offset;
658 else
660 /* The statement which defines OFFSET before type conversion
661 must be a simple GIMPLE_ASSIGN. */
662 if (!is_gimple_assign (offset_def))
663 return false;
665 /* The RHS of the statement which defines OFFSET must be a
666 multiplication of an object by the size of the array elements.
667 This implicitly verifies that the size of the array elements
668 is constant. */
669 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
670 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
671 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
673 /* The first operand to the MULT_EXPR is the desired index. */
674 index = gimple_assign_rhs1 (offset_def);
676 /* If we have idx * tunit + CST * tunit re-associate that. */
677 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
678 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
679 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
680 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
681 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
682 gimple_assign_rhs2 (offset_def),
683 tunit)) != NULL_TREE)
685 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
686 if (is_gimple_assign (offset_def2)
687 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
688 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
689 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
691 index = fold_build2 (gimple_assign_rhs_code (offset_def),
692 TREE_TYPE (offset),
693 gimple_assign_rhs1 (offset_def2), tmp);
695 else
696 return false;
698 else
699 return false;
702 /* Replace the pointer addition with array indexing. */
703 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
704 true, GSI_SAME_STMT);
705 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
707 new_rhs = unshare_expr (def_rhs);
708 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
710 else
712 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
713 unshare_expr (TREE_OPERAND (def_rhs, 0)),
714 index, integer_zero_node, NULL_TREE);
715 new_rhs = build_fold_addr_expr (new_rhs);
716 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
717 TREE_TYPE (new_rhs)))
719 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
720 NULL_TREE, true, GSI_SAME_STMT);
721 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
722 new_rhs);
725 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
726 use_stmt = gsi_stmt (*use_stmt_gsi);
728 /* That should have created gimple, so there is no need to
729 record information to undo the propagation. */
730 fold_stmt_inplace (use_stmt);
731 tidy_after_forward_propagate_addr (use_stmt);
732 return true;
735 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
736 ADDR_EXPR <whatever>.
738 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
739 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
740 node or for recovery of array indexing from pointer arithmetic.
742 Return true if the propagation was successful (the propagation can
743 be not totally successful, yet things may have been changed). */
745 static bool
746 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
747 gimple_stmt_iterator *use_stmt_gsi,
748 bool single_use_p)
750 tree lhs, rhs, rhs2, array_ref;
751 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
752 enum tree_code rhs_code;
753 bool res = true;
755 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
757 lhs = gimple_assign_lhs (use_stmt);
758 rhs_code = gimple_assign_rhs_code (use_stmt);
759 rhs = gimple_assign_rhs1 (use_stmt);
761 /* Trivial cases. The use statement could be a trivial copy or a
762 useless conversion. Recurse to the uses of the lhs as copyprop does
763 not copy through different variant pointers and FRE does not catch
764 all useless conversions. Treat the case of a single-use name and
765 a conversion to def_rhs type separate, though. */
766 if (TREE_CODE (lhs) == SSA_NAME
767 && ((rhs_code == SSA_NAME && rhs == name)
768 || CONVERT_EXPR_CODE_P (rhs_code)))
770 /* Only recurse if we don't deal with a single use or we cannot
771 do the propagation to the current statement. In particular
772 we can end up with a conversion needed for a non-invariant
773 address which we cannot do in a single statement. */
774 if (!single_use_p
775 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
776 && (!is_gimple_min_invariant (def_rhs)
777 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
778 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
779 && (TYPE_PRECISION (TREE_TYPE (lhs))
780 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
781 return forward_propagate_addr_expr (lhs, def_rhs);
783 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
784 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
785 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
786 else
787 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
788 return true;
791 /* Propagate through constant pointer adjustments. */
792 if (TREE_CODE (lhs) == SSA_NAME
793 && rhs_code == POINTER_PLUS_EXPR
794 && rhs == name
795 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
797 tree new_def_rhs;
798 /* As we come here with non-invariant addresses in def_rhs we need
799 to make sure we can build a valid constant offsetted address
800 for further propagation. Simply rely on fold building that
801 and check after the fact. */
802 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
803 def_rhs,
804 fold_convert (ptr_type_node,
805 gimple_assign_rhs2 (use_stmt)));
806 if (TREE_CODE (new_def_rhs) == MEM_REF
807 && TREE_CODE (TREE_OPERAND (new_def_rhs, 0)) == ADDR_EXPR
808 && !DECL_P (TREE_OPERAND (TREE_OPERAND (new_def_rhs, 0), 0))
809 && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (new_def_rhs, 0), 0)))
810 return false;
811 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
812 TREE_TYPE (rhs));
814 /* Recurse. If we could propagate into all uses of lhs do not
815 bother to replace into the current use but just pretend we did. */
816 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
817 && forward_propagate_addr_expr (lhs, new_def_rhs))
818 return true;
820 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
821 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
822 new_def_rhs, NULL_TREE);
823 else if (is_gimple_min_invariant (new_def_rhs))
824 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
825 new_def_rhs, NULL_TREE);
826 else
827 return false;
828 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
829 update_stmt (use_stmt);
830 return true;
833 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
834 ADDR_EXPR will not appear on the LHS. */
835 lhs = gimple_assign_lhs (use_stmt);
836 while (handled_component_p (lhs))
837 lhs = TREE_OPERAND (lhs, 0);
839 /* Now see if the LHS node is a MEM_REF using NAME. If so,
840 propagate the ADDR_EXPR into the use of NAME and fold the result. */
841 if (TREE_CODE (lhs) == MEM_REF
842 && TREE_OPERAND (lhs, 0) == name)
844 tree def_rhs_base;
845 HOST_WIDE_INT def_rhs_offset;
846 /* If the address is invariant we can always fold it. */
847 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
848 &def_rhs_offset)))
850 double_int off = mem_ref_offset (lhs);
851 tree new_ptr;
852 off = double_int_add (off,
853 shwi_to_double_int (def_rhs_offset));
854 if (TREE_CODE (def_rhs_base) == MEM_REF)
856 off = double_int_add (off, mem_ref_offset (def_rhs_base));
857 new_ptr = TREE_OPERAND (def_rhs_base, 0);
859 else
860 new_ptr = build_fold_addr_expr (def_rhs_base);
861 TREE_OPERAND (lhs, 0) = new_ptr;
862 TREE_OPERAND (lhs, 1)
863 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
864 tidy_after_forward_propagate_addr (use_stmt);
865 /* Continue propagating into the RHS if this was not the only use. */
866 if (single_use_p)
867 return true;
869 /* If the LHS is a plain dereference and the value type is the same as
870 that of the pointed-to type of the address we can put the
871 dereferenced address on the LHS preserving the original alias-type. */
872 else if (gimple_assign_lhs (use_stmt) == lhs
873 && useless_type_conversion_p
874 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
875 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
877 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
878 tree new_offset, new_base, saved;
879 while (handled_component_p (*def_rhs_basep))
880 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
881 saved = *def_rhs_basep;
882 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
884 new_base = TREE_OPERAND (*def_rhs_basep, 0);
885 new_offset
886 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
887 TREE_OPERAND (*def_rhs_basep, 1), 0);
889 else
891 new_base = build_fold_addr_expr (*def_rhs_basep);
892 new_offset = TREE_OPERAND (lhs, 1);
894 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
895 new_base, new_offset);
896 gimple_assign_set_lhs (use_stmt,
897 unshare_expr (TREE_OPERAND (def_rhs, 0)));
898 *def_rhs_basep = saved;
899 tidy_after_forward_propagate_addr (use_stmt);
900 /* Continue propagating into the RHS if this was not the
901 only use. */
902 if (single_use_p)
903 return true;
905 else
906 /* We can have a struct assignment dereferencing our name twice.
907 Note that we didn't propagate into the lhs to not falsely
908 claim we did when propagating into the rhs. */
909 res = false;
912 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
913 nodes from the RHS. */
914 rhs = gimple_assign_rhs1 (use_stmt);
915 if (TREE_CODE (rhs) == ADDR_EXPR)
916 rhs = TREE_OPERAND (rhs, 0);
917 while (handled_component_p (rhs))
918 rhs = TREE_OPERAND (rhs, 0);
920 /* Now see if the RHS node is a MEM_REF using NAME. If so,
921 propagate the ADDR_EXPR into the use of NAME and fold the result. */
922 if (TREE_CODE (rhs) == MEM_REF
923 && TREE_OPERAND (rhs, 0) == name)
925 tree def_rhs_base;
926 HOST_WIDE_INT def_rhs_offset;
927 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
928 &def_rhs_offset)))
930 double_int off = mem_ref_offset (rhs);
931 tree new_ptr;
932 off = double_int_add (off,
933 shwi_to_double_int (def_rhs_offset));
934 if (TREE_CODE (def_rhs_base) == MEM_REF)
936 off = double_int_add (off, mem_ref_offset (def_rhs_base));
937 new_ptr = TREE_OPERAND (def_rhs_base, 0);
939 else
940 new_ptr = build_fold_addr_expr (def_rhs_base);
941 TREE_OPERAND (rhs, 0) = new_ptr;
942 TREE_OPERAND (rhs, 1)
943 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
944 fold_stmt_inplace (use_stmt);
945 tidy_after_forward_propagate_addr (use_stmt);
946 return res;
948 /* If the LHS is a plain dereference and the value type is the same as
949 that of the pointed-to type of the address we can put the
950 dereferenced address on the LHS preserving the original alias-type. */
951 else if (gimple_assign_rhs1 (use_stmt) == rhs
952 && useless_type_conversion_p
953 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
954 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
956 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
957 tree new_offset, new_base, saved;
958 while (handled_component_p (*def_rhs_basep))
959 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
960 saved = *def_rhs_basep;
961 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
963 new_base = TREE_OPERAND (*def_rhs_basep, 0);
964 new_offset
965 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
966 TREE_OPERAND (*def_rhs_basep, 1), 0);
968 else
970 new_base = build_fold_addr_expr (*def_rhs_basep);
971 new_offset = TREE_OPERAND (rhs, 1);
973 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
974 new_base, new_offset);
975 gimple_assign_set_rhs1 (use_stmt,
976 unshare_expr (TREE_OPERAND (def_rhs, 0)));
977 *def_rhs_basep = saved;
978 fold_stmt_inplace (use_stmt);
979 tidy_after_forward_propagate_addr (use_stmt);
980 return res;
984 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
985 is nothing to do. */
986 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
987 || gimple_assign_rhs1 (use_stmt) != name)
988 return false;
990 /* The remaining cases are all for turning pointer arithmetic into
991 array indexing. They only apply when we have the address of
992 element zero in an array. If that is not the case then there
993 is nothing to do. */
994 array_ref = TREE_OPERAND (def_rhs, 0);
995 if ((TREE_CODE (array_ref) != ARRAY_REF
996 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
997 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
998 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
999 return false;
1001 rhs2 = gimple_assign_rhs2 (use_stmt);
1002 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1003 of the elements in X into &x[C1 + C2/element size]. */
1004 if (TREE_CODE (rhs2) == INTEGER_CST)
1006 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1007 TREE_TYPE (def_rhs),
1008 def_rhs, rhs2);
1009 if (new_rhs)
1011 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1012 new_rhs = unshare_expr (new_rhs);
1013 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1015 if (!is_gimple_min_invariant (new_rhs))
1016 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1017 true, NULL_TREE,
1018 true, GSI_SAME_STMT);
1019 new_rhs = fold_convert (type, new_rhs);
1021 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1022 use_stmt = gsi_stmt (*use_stmt_gsi);
1023 update_stmt (use_stmt);
1024 tidy_after_forward_propagate_addr (use_stmt);
1025 return true;
1029 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1030 converting a multiplication of an index by the size of the
1031 array elements, then the result is converted into the proper
1032 type for the arithmetic. */
1033 if (TREE_CODE (rhs2) == SSA_NAME
1034 && (TREE_CODE (array_ref) != ARRAY_REF
1035 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1036 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1037 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1038 different type than their operands. */
1039 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1040 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1041 use_stmt_gsi);
1042 return false;
1045 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1047 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1048 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1049 node or for recovery of array indexing from pointer arithmetic.
1050 Returns true, if all uses have been propagated into. */
1052 static bool
1053 forward_propagate_addr_expr (tree name, tree rhs)
1055 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1056 imm_use_iterator iter;
1057 gimple use_stmt;
1058 bool all = true;
1059 bool single_use_p = has_single_use (name);
1061 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1063 bool result;
1064 tree use_rhs;
1066 /* If the use is not in a simple assignment statement, then
1067 there is nothing we can do. */
1068 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1070 if (!is_gimple_debug (use_stmt))
1071 all = false;
1072 continue;
1075 /* If the use is in a deeper loop nest, then we do not want
1076 to propagate non-invariant ADDR_EXPRs into the loop as that
1077 is likely adding expression evaluations into the loop. */
1078 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1079 && !is_gimple_min_invariant (rhs))
1081 all = false;
1082 continue;
1086 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1087 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1088 single_use_p);
1089 /* If the use has moved to a different statement adjust
1090 the update machinery for the old statement too. */
1091 if (use_stmt != gsi_stmt (gsi))
1093 update_stmt (use_stmt);
1094 use_stmt = gsi_stmt (gsi);
1097 update_stmt (use_stmt);
1099 all &= result;
1101 /* Remove intermediate now unused copy and conversion chains. */
1102 use_rhs = gimple_assign_rhs1 (use_stmt);
1103 if (result
1104 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1105 && TREE_CODE (use_rhs) == SSA_NAME
1106 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1108 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1109 release_defs (use_stmt);
1110 gsi_remove (&gsi, true);
1114 return all;
1117 /* Forward propagate the comparison defined in STMT like
1118 cond_1 = x CMP y to uses of the form
1119 a_1 = (T')cond_1
1120 a_1 = !cond_1
1121 a_1 = cond_1 != 0
1122 Returns true if stmt is now unused. */
1124 static bool
1125 forward_propagate_comparison (gimple stmt)
1127 tree name = gimple_assign_lhs (stmt);
1128 gimple use_stmt;
1129 tree tmp = NULL_TREE;
1131 /* Don't propagate ssa names that occur in abnormal phis. */
1132 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1133 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1134 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1135 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1136 return false;
1138 /* Do not un-cse comparisons. But propagate through copies. */
1139 use_stmt = get_prop_dest_stmt (name, &name);
1140 if (!use_stmt)
1141 return false;
1143 /* Conversion of the condition result to another integral type. */
1144 if (is_gimple_assign (use_stmt)
1145 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1146 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1147 == tcc_comparison
1148 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1149 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1151 tree lhs = gimple_assign_lhs (use_stmt);
1153 /* We can propagate the condition into a conversion. */
1154 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1156 /* Avoid using fold here as that may create a COND_EXPR with
1157 non-boolean condition as canonical form. */
1158 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1159 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1161 /* We can propagate the condition into X op CST where op
1162 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1163 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1164 == tcc_comparison
1165 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1166 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1168 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1169 tree cst = gimple_assign_rhs2 (use_stmt);
1170 tree cond;
1172 cond = build2 (gimple_assign_rhs_code (stmt),
1173 TREE_TYPE (cst),
1174 gimple_assign_rhs1 (stmt),
1175 gimple_assign_rhs2 (stmt));
1177 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1178 code, TREE_TYPE (lhs),
1179 cond, cst, false);
1180 if (tmp == NULL_TREE)
1181 return false;
1183 /* We can propagate the condition into a statement that
1184 computes the logical negation of the comparison result. */
1185 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1187 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1188 bool nans = HONOR_NANS (TYPE_MODE (type));
1189 enum tree_code code;
1190 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1191 if (code == ERROR_MARK)
1192 return false;
1194 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1195 gimple_assign_rhs2 (stmt));
1197 else
1198 return false;
1201 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1202 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1203 use_stmt = gsi_stmt (gsi);
1204 update_stmt (use_stmt);
1207 /* Remove defining statements. */
1208 remove_prop_source_from_use (name, stmt);
1210 if (dump_file && (dump_flags & TDF_DETAILS))
1212 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1213 stmt);
1214 fprintf (dump_file, " Replaced '");
1215 print_generic_expr (dump_file, old_rhs, dump_flags);
1216 fprintf (dump_file, "' with '");
1217 print_generic_expr (dump_file, tmp, dump_flags);
1218 fprintf (dump_file, "'\n");
1221 return true;
1224 return false;
1227 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1228 If so, we can change STMT into lhs = y which can later be copy
1229 propagated. Similarly for negation.
1231 This could trivially be formulated as a forward propagation
1232 to immediate uses. However, we already had an implementation
1233 from DOM which used backward propagation via the use-def links.
1235 It turns out that backward propagation is actually faster as
1236 there's less work to do for each NOT/NEG expression we find.
1237 Backwards propagation needs to look at the statement in a single
1238 backlink. Forward propagation needs to look at potentially more
1239 than one forward link. */
1241 static void
1242 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1244 gimple stmt = gsi_stmt (*gsi_p);
1245 tree rhs = gimple_assign_rhs1 (stmt);
1246 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1248 /* See if the RHS_DEF_STMT has the same form as our statement. */
1249 if (is_gimple_assign (rhs_def_stmt)
1250 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1252 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1254 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1255 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1256 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1258 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1259 stmt = gsi_stmt (*gsi_p);
1260 update_stmt (stmt);
1265 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1266 the condition which we may be able to optimize better. */
1268 static void
1269 simplify_gimple_switch (gimple stmt)
1271 tree cond = gimple_switch_index (stmt);
1272 tree def, to, ti;
1273 gimple def_stmt;
1275 /* The optimization that we really care about is removing unnecessary
1276 casts. That will let us do much better in propagating the inferred
1277 constant at the switch target. */
1278 if (TREE_CODE (cond) == SSA_NAME)
1280 def_stmt = SSA_NAME_DEF_STMT (cond);
1281 if (is_gimple_assign (def_stmt))
1283 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1285 int need_precision;
1286 bool fail;
1288 def = gimple_assign_rhs1 (def_stmt);
1290 #ifdef ENABLE_CHECKING
1291 /* ??? Why was Jeff testing this? We are gimple... */
1292 gcc_assert (is_gimple_val (def));
1293 #endif
1295 to = TREE_TYPE (cond);
1296 ti = TREE_TYPE (def);
1298 /* If we have an extension that preserves value, then we
1299 can copy the source value into the switch. */
1301 need_precision = TYPE_PRECISION (ti);
1302 fail = false;
1303 if (! INTEGRAL_TYPE_P (ti))
1304 fail = true;
1305 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1306 fail = true;
1307 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1308 need_precision += 1;
1309 if (TYPE_PRECISION (to) < need_precision)
1310 fail = true;
1312 if (!fail)
1314 gimple_switch_set_index (stmt, def);
1315 update_stmt (stmt);
1322 /* Run bitwise and assignments throug the folder. If the first argument is an
1323 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1324 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1327 static void
1328 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1330 tree res;
1331 tree arg1 = gimple_assign_rhs1 (stmt);
1332 tree arg2 = gimple_assign_rhs2 (stmt);
1334 if (TREE_CODE (arg2) != INTEGER_CST)
1335 return;
1337 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1339 gimple def = SSA_NAME_DEF_STMT (arg1);
1341 if (gimple_assign_cast_p (def)
1342 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1344 tree op = gimple_assign_rhs1 (def);
1346 if (TREE_CODE (op) == ADDR_EXPR)
1347 arg1 = op;
1351 res = fold_binary_loc (gimple_location (stmt),
1352 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1353 arg1, arg2);
1354 if (res && is_gimple_min_invariant (res))
1356 gimple_assign_set_rhs_from_tree (gsi, res);
1357 update_stmt (stmt);
1359 return;
1362 /* Main entry point for the forward propagation optimizer. */
1364 static unsigned int
1365 tree_ssa_forward_propagate_single_use_vars (void)
1367 basic_block bb;
1368 unsigned int todoflags = 0;
1370 cfg_changed = false;
1372 FOR_EACH_BB (bb)
1374 gimple_stmt_iterator gsi;
1376 /* Note we update GSI within the loop as necessary. */
1377 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1379 gimple stmt = gsi_stmt (gsi);
1381 /* If this statement sets an SSA_NAME to an address,
1382 try to propagate the address into the uses of the SSA_NAME. */
1383 if (is_gimple_assign (stmt))
1385 tree lhs = gimple_assign_lhs (stmt);
1386 tree rhs = gimple_assign_rhs1 (stmt);
1388 if (TREE_CODE (lhs) != SSA_NAME)
1390 gsi_next (&gsi);
1391 continue;
1394 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1395 /* Handle pointer conversions on invariant addresses
1396 as well, as this is valid gimple. */
1397 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1398 && TREE_CODE (rhs) == ADDR_EXPR
1399 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1401 STRIP_NOPS (rhs);
1402 if (!stmt_references_abnormal_ssa_name (stmt)
1403 && forward_propagate_addr_expr (lhs, rhs))
1405 release_defs (stmt);
1406 todoflags |= TODO_remove_unused_locals;
1407 gsi_remove (&gsi, true);
1409 else
1410 gsi_next (&gsi);
1412 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1414 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
1415 /* ??? Better adjust the interface to that function
1416 instead of building new trees here. */
1417 && forward_propagate_addr_expr
1418 (lhs,
1419 build1 (ADDR_EXPR,
1420 TREE_TYPE (rhs),
1421 fold_build2 (MEM_REF,
1422 TREE_TYPE (TREE_TYPE (rhs)),
1423 rhs,
1424 fold_convert
1425 (ptr_type_node,
1426 gimple_assign_rhs2 (stmt))))))
1428 release_defs (stmt);
1429 todoflags |= TODO_remove_unused_locals;
1430 gsi_remove (&gsi, true);
1432 else if (is_gimple_min_invariant (rhs))
1434 /* Make sure to fold &a[0] + off_1 here. */
1435 fold_stmt_inplace (stmt);
1436 update_stmt (stmt);
1437 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1438 gsi_next (&gsi);
1440 else
1441 gsi_next (&gsi);
1443 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1444 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1445 && TREE_CODE (rhs) == SSA_NAME)
1447 simplify_not_neg_expr (&gsi);
1448 gsi_next (&gsi);
1450 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1452 /* In this case the entire COND_EXPR is in rhs1. */
1453 int did_something;
1454 fold_defer_overflow_warnings ();
1455 did_something = forward_propagate_into_cond (&gsi);
1456 stmt = gsi_stmt (gsi);
1457 if (did_something == 2)
1458 cfg_changed = true;
1459 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1460 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1461 gsi_next (&gsi);
1463 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1464 == tcc_comparison)
1466 if (forward_propagate_comparison (stmt))
1468 release_defs (stmt);
1469 todoflags |= TODO_remove_unused_locals;
1470 gsi_remove (&gsi, true);
1472 else
1473 gsi_next (&gsi);
1475 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1477 simplify_bitwise_and (&gsi, stmt);
1478 gsi_next (&gsi);
1480 else
1481 gsi_next (&gsi);
1483 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1485 simplify_gimple_switch (stmt);
1486 gsi_next (&gsi);
1488 else if (gimple_code (stmt) == GIMPLE_COND)
1490 int did_something;
1491 fold_defer_overflow_warnings ();
1492 did_something = forward_propagate_into_gimple_cond (stmt);
1493 if (did_something == 2)
1494 cfg_changed = true;
1495 fold_undefer_overflow_warnings (did_something, stmt,
1496 WARN_STRICT_OVERFLOW_CONDITIONAL);
1497 gsi_next (&gsi);
1499 else
1500 gsi_next (&gsi);
1504 if (cfg_changed)
1505 todoflags |= TODO_cleanup_cfg;
1506 return todoflags;
1510 static bool
1511 gate_forwprop (void)
1513 return flag_tree_forwprop;
1516 struct gimple_opt_pass pass_forwprop =
1519 GIMPLE_PASS,
1520 "forwprop", /* name */
1521 gate_forwprop, /* gate */
1522 tree_ssa_forward_propagate_single_use_vars, /* execute */
1523 NULL, /* sub */
1524 NULL, /* next */
1525 0, /* static_pass_number */
1526 TV_TREE_FORWPROP, /* tv_id */
1527 PROP_cfg | PROP_ssa, /* properties_required */
1528 0, /* properties_provided */
1529 0, /* properties_destroyed */
1530 0, /* todo_flags_start */
1531 TODO_dump_func
1532 | TODO_ggc_collect
1533 | TODO_update_ssa
1534 | TODO_verify_ssa /* todo_flags_finish */