2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / tree-ssa-forwprop.c
blobb6278045c85849bd96375b8d06d9a2bf8006a01e
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 One class of common cases we handle is forward propagating a single use
43 variable into a COND_EXPR.
45 bb0:
46 x = a COND b;
47 if (x) goto ... else goto ...
49 Will be transformed into:
51 bb0:
52 if (a COND b) goto ... else goto ...
54 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56 Or (assuming c1 and c2 are constants):
58 bb0:
59 x = a + c1;
60 if (x EQ/NEQ c2) goto ... else goto ...
62 Will be transformed into:
64 bb0:
65 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67 Similarly for x = a - c1.
71 bb0:
72 x = !a
73 if (x) goto ... else goto ...
75 Will be transformed into:
77 bb0:
78 if (a == 0) goto ... else goto ...
80 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
81 For these cases, we propagate A into all, possibly more than one,
82 COND_EXPRs that use X.
86 bb0:
87 x = (typecast) a
88 if (x) goto ... else goto ...
90 Will be transformed into:
92 bb0:
93 if (a != 0) goto ... else goto ...
95 (Assuming a is an integral type and x is a boolean or x is an
96 integral and a is a boolean.)
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
102 In addition to eliminating the variable and the statement which assigns
103 a value to the variable, we may be able to later thread the jump without
104 adding insane complexity in the dominator optimizer.
106 Also note these transformations can cascade. We handle this by having
107 a worklist of COND_EXPR statements to examine. As we make a change to
108 a statement, we put it back on the worklist to examine on the next
109 iteration of the main loop.
111 A second class of propagation opportunities arises for ADDR_EXPR
112 nodes.
114 ptr = &x->y->z;
115 res = *ptr;
117 Will get turned into
119 res = x->y->z;
122 ptr = (type1*)&type2var;
123 res = *ptr
125 Will get turned into (if type1 and type2 are the same size
126 and neither have volatile on them):
127 res = VIEW_CONVERT_EXPR<type1>(type2var)
131 ptr = &x[0];
132 ptr2 = ptr + <constant>;
134 Will get turned into
136 ptr2 = &x[constant/elementsize];
140 ptr = &x[0];
141 offset = index * element_size;
142 offset_p = (pointer) offset;
143 ptr2 = ptr + offset_p
145 Will get turned into:
147 ptr2 = &x[index];
150 ssa = (int) decl
151 res = ssa & 1
153 Provided that decl has known alignment >= 2, will get turned into
155 res = 0
157 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
158 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
159 {NOT_EXPR,NEG_EXPR}.
161 This will (of course) be extended as other needs arise. */
163 static bool forward_propagate_addr_expr (tree name, tree rhs);
165 /* Set to true if we delete EH edges during the optimization. */
166 static bool cfg_changed;
168 static tree rhs_to_tree (tree type, gimple stmt);
170 /* Get the next statement we can propagate NAME's value into skipping
171 trivial copies. Returns the statement that is suitable as a
172 propagation destination or NULL_TREE if there is no such one.
173 This only returns destinations in a single-use chain. FINAL_NAME_P
174 if non-NULL is written to the ssa name that represents the use. */
176 static gimple
177 get_prop_dest_stmt (tree name, tree *final_name_p)
179 use_operand_p use;
180 gimple use_stmt;
182 do {
183 /* If name has multiple uses, bail out. */
184 if (!single_imm_use (name, &use, &use_stmt))
185 return NULL;
187 /* If this is not a trivial copy, we found it. */
188 if (!gimple_assign_ssa_name_copy_p (use_stmt)
189 || gimple_assign_rhs1 (use_stmt) != name)
190 break;
192 /* Continue searching uses of the copy destination. */
193 name = gimple_assign_lhs (use_stmt);
194 } while (1);
196 if (final_name_p)
197 *final_name_p = name;
199 return use_stmt;
202 /* Get the statement we can propagate from into NAME skipping
203 trivial copies. Returns the statement which defines the
204 propagation source or NULL_TREE if there is no such one.
205 If SINGLE_USE_ONLY is set considers only sources which have
206 a single use chain up to NAME. If SINGLE_USE_P is non-null,
207 it is set to whether the chain to NAME is a single use chain
208 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210 static gimple
211 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
213 bool single_use = true;
215 do {
216 gimple def_stmt = SSA_NAME_DEF_STMT (name);
218 if (!has_single_use (name))
220 single_use = false;
221 if (single_use_only)
222 return NULL;
225 /* If name is defined by a PHI node or is the default def, bail out. */
226 if (!is_gimple_assign (def_stmt))
227 return NULL;
229 /* If def_stmt is not a simple copy, we possibly found it. */
230 if (!gimple_assign_ssa_name_copy_p (def_stmt))
232 tree rhs;
234 if (!single_use_only && single_use_p)
235 *single_use_p = single_use;
237 /* We can look through pointer conversions in the search
238 for a useful stmt for the comparison folding. */
239 rhs = gimple_assign_rhs1 (def_stmt);
240 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
241 && TREE_CODE (rhs) == SSA_NAME
242 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
243 && POINTER_TYPE_P (TREE_TYPE (rhs)))
244 name = rhs;
245 else
246 return def_stmt;
248 else
250 /* Continue searching the def of the copy source name. */
251 name = gimple_assign_rhs1 (def_stmt);
253 } while (1);
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
259 static bool
260 can_propagate_from (gimple def_stmt)
262 use_operand_p use_p;
263 ssa_op_iter iter;
265 gcc_assert (is_gimple_assign (def_stmt));
267 /* If the rhs has side-effects we cannot propagate from it. */
268 if (gimple_has_volatile_ops (def_stmt))
269 return false;
271 /* If the rhs is a load we cannot propagate from it. */
272 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
273 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
274 return false;
276 /* Constants can be always propagated. */
277 if (gimple_assign_single_p (def_stmt)
278 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
279 return true;
281 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
282 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
283 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
284 return false;
286 /* If the definition is a conversion of a pointer to a function type,
287 then we can not apply optimizations as some targets require
288 function pointers to be canonicalized and in this case this
289 optimization could eliminate a necessary canonicalization. */
290 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
292 tree rhs = gimple_assign_rhs1 (def_stmt);
293 if (POINTER_TYPE_P (TREE_TYPE (rhs))
294 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
295 return false;
298 return true;
301 /* Remove a copy chain ending in NAME along the defs but not
302 further or including UP_TO_STMT. If NAME was replaced in
303 its only use then this function can be used to clean up
304 dead stmts. Returns true if UP_TO_STMT can be removed
305 as well, otherwise false. */
307 static bool
308 remove_prop_source_from_use (tree name, gimple up_to_stmt)
310 gimple_stmt_iterator gsi;
311 gimple stmt;
313 do {
314 if (!has_zero_uses (name))
315 return false;
317 stmt = SSA_NAME_DEF_STMT (name);
318 if (stmt == up_to_stmt)
319 return true;
321 gsi = gsi_for_stmt (stmt);
322 release_defs (stmt);
323 gsi_remove (&gsi, true);
325 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
326 } while (name && TREE_CODE (name) == SSA_NAME);
328 return false;
331 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
332 converted to type TYPE.
334 This should disappear, but is needed so we can combine expressions and use
335 the fold() interfaces. Long term, we need to develop folding and combine
336 routines that deal with gimple exclusively . */
338 static tree
339 rhs_to_tree (tree type, gimple stmt)
341 location_t loc = gimple_location (stmt);
342 enum tree_code code = gimple_assign_rhs_code (stmt);
343 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
344 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
345 gimple_assign_rhs2 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
347 return build1 (code, type, gimple_assign_rhs1 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
349 return gimple_assign_rhs1 (stmt);
350 else
351 gcc_unreachable ();
354 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
355 the folded result in a form suitable for COND_EXPR_COND or
356 NULL_TREE, if there is no suitable simplified form. If
357 INVARIANT_ONLY is true only gimple_min_invariant results are
358 considered simplified. */
360 static tree
361 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
362 tree op0, tree op1, bool invariant_only)
364 tree t;
366 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
368 t = fold_binary_loc (loc, code, type, op0, op1);
369 if (!t)
370 return NULL_TREE;
372 /* Require that we got a boolean type out if we put one in. */
373 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
375 /* Canonicalize the combined condition for use in a COND_EXPR. */
376 t = canonicalize_cond_expr_cond (t);
378 /* Bail out if we required an invariant but didn't get one. */
379 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
380 return NULL_TREE;
382 return t;
385 /* Propagate from the ssa name definition statements of COND_EXPR
386 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
387 Returns zero if no statement was changed, one if there were
388 changes and two if cfg_cleanup needs to run.
390 This must be kept in sync with forward_propagate_into_cond. */
392 static int
393 forward_propagate_into_gimple_cond (gimple stmt)
395 int did_something = 0;
396 location_t loc = gimple_location (stmt);
398 do {
399 tree tmp = NULL_TREE;
400 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
401 gimple def_stmt;
402 bool single_use0_p = false, single_use1_p = false;
403 enum tree_code code = gimple_cond_code (stmt);
405 /* We can do tree combining on SSA_NAME and comparison expressions. */
406 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
408 /* For comparisons use the first operand, that is likely to
409 simplify comparisons against constants. */
410 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
412 name = gimple_cond_lhs (stmt);
413 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
414 if (def_stmt && can_propagate_from (def_stmt))
416 tree op1 = gimple_cond_rhs (stmt);
417 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
418 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
419 rhs0, op1, !single_use0_p);
422 /* If that wasn't successful, try the second operand. */
423 if (tmp == NULL_TREE
424 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
426 tree op0 = gimple_cond_lhs (stmt);
427 name = gimple_cond_rhs (stmt);
428 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
429 if (!def_stmt || !can_propagate_from (def_stmt))
430 return did_something;
432 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
433 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
434 rhs1, !single_use1_p);
436 /* If that wasn't successful either, try both operands. */
437 if (tmp == NULL_TREE
438 && rhs0 != NULL_TREE
439 && rhs1 != NULL_TREE)
440 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
441 fold_convert_loc (loc,
442 TREE_TYPE (rhs0),
443 rhs1),
444 !(single_use0_p && single_use1_p));
447 if (tmp)
449 if (dump_file && tmp)
451 tree cond = build2 (gimple_cond_code (stmt),
452 boolean_type_node,
453 gimple_cond_lhs (stmt),
454 gimple_cond_rhs (stmt));
455 fprintf (dump_file, " Replaced '");
456 print_generic_expr (dump_file, cond, 0);
457 fprintf (dump_file, "' with '");
458 print_generic_expr (dump_file, tmp, 0);
459 fprintf (dump_file, "'\n");
462 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
463 update_stmt (stmt);
465 /* Remove defining statements. */
466 remove_prop_source_from_use (name, NULL);
468 if (is_gimple_min_invariant (tmp))
469 did_something = 2;
470 else if (did_something == 0)
471 did_something = 1;
473 /* Continue combining. */
474 continue;
477 break;
478 } while (1);
480 return did_something;
484 /* Propagate from the ssa name definition statements of COND_EXPR
485 in the rhs of statement STMT into the conditional if that simplifies it.
486 Returns zero if no statement was changed, one if there were
487 changes and two if cfg_cleanup needs to run.
489 This must be kept in sync with forward_propagate_into_gimple_cond. */
491 static int
492 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
494 gimple stmt = gsi_stmt (*gsi_p);
495 location_t loc = gimple_location (stmt);
496 int did_something = 0;
498 do {
499 tree tmp = NULL_TREE;
500 tree cond = gimple_assign_rhs1 (stmt);
501 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
502 gimple def_stmt;
503 bool single_use0_p = false, single_use1_p = false;
505 /* We can do tree combining on SSA_NAME and comparison expressions. */
506 if (COMPARISON_CLASS_P (cond)
507 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
509 /* For comparisons use the first operand, that is likely to
510 simplify comparisons against constants. */
511 name = TREE_OPERAND (cond, 0);
512 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
513 if (def_stmt && can_propagate_from (def_stmt))
515 tree op1 = TREE_OPERAND (cond, 1);
516 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
517 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
518 boolean_type_node,
519 rhs0, op1, !single_use0_p);
521 /* If that wasn't successful, try the second operand. */
522 if (tmp == NULL_TREE
523 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
525 tree op0 = TREE_OPERAND (cond, 0);
526 name = TREE_OPERAND (cond, 1);
527 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
528 if (!def_stmt || !can_propagate_from (def_stmt))
529 return did_something;
531 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
532 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
533 boolean_type_node,
534 op0, rhs1, !single_use1_p);
536 /* If that wasn't successful either, try both operands. */
537 if (tmp == NULL_TREE
538 && rhs0 != NULL_TREE
539 && rhs1 != NULL_TREE)
540 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
541 boolean_type_node,
542 rhs0,
543 fold_convert_loc (loc,
544 TREE_TYPE (rhs0),
545 rhs1),
546 !(single_use0_p && single_use1_p));
548 else if (TREE_CODE (cond) == SSA_NAME)
550 name = cond;
551 def_stmt = get_prop_source_stmt (name, true, NULL);
552 if (def_stmt || !can_propagate_from (def_stmt))
553 return did_something;
555 rhs0 = gimple_assign_rhs1 (def_stmt);
556 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
557 build_int_cst (TREE_TYPE (rhs0), 0),
558 false);
561 if (tmp)
563 if (dump_file && tmp)
565 fprintf (dump_file, " Replaced '");
566 print_generic_expr (dump_file, cond, 0);
567 fprintf (dump_file, "' with '");
568 print_generic_expr (dump_file, tmp, 0);
569 fprintf (dump_file, "'\n");
572 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
573 stmt = gsi_stmt (*gsi_p);
574 update_stmt (stmt);
576 /* Remove defining statements. */
577 remove_prop_source_from_use (name, NULL);
579 if (is_gimple_min_invariant (tmp))
580 did_something = 2;
581 else if (did_something == 0)
582 did_something = 1;
584 /* Continue combining. */
585 continue;
588 break;
589 } while (1);
591 return did_something;
594 /* We've just substituted an ADDR_EXPR into stmt. Update all the
595 relevant data structures to match. */
597 static void
598 tidy_after_forward_propagate_addr (gimple stmt)
600 /* We may have turned a trapping insn into a non-trapping insn. */
601 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
602 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
603 cfg_changed = true;
605 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
606 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
609 /* DEF_RHS contains the address of the 0th element in an array.
610 USE_STMT uses type of DEF_RHS to compute the address of an
611 arbitrary element within the array. The (variable) byte offset
612 of the element is contained in OFFSET.
614 We walk back through the use-def chains of OFFSET to verify that
615 it is indeed computing the offset of an element within the array
616 and extract the index corresponding to the given byte offset.
618 We then try to fold the entire address expression into a form
619 &array[index].
621 If we are successful, we replace the right hand side of USE_STMT
622 with the new address computation. */
624 static bool
625 forward_propagate_addr_into_variable_array_index (tree offset,
626 tree def_rhs,
627 gimple_stmt_iterator *use_stmt_gsi)
629 tree index, tunit;
630 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
631 tree new_rhs, tmp;
633 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
634 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
635 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
636 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
637 else
638 return false;
639 if (!host_integerp (tunit, 1))
640 return false;
642 /* Get the offset's defining statement. */
643 offset_def = SSA_NAME_DEF_STMT (offset);
645 /* Try to find an expression for a proper index. This is either a
646 multiplication expression by the element size or just the ssa name we came
647 along in case the element size is one. In that case, however, we do not
648 allow multiplications because they can be computing index to a higher
649 level dimension (PR 37861). */
650 if (integer_onep (tunit))
652 if (is_gimple_assign (offset_def)
653 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
654 return false;
656 index = offset;
658 else
660 /* The statement which defines OFFSET before type conversion
661 must be a simple GIMPLE_ASSIGN. */
662 if (!is_gimple_assign (offset_def))
663 return false;
665 /* The RHS of the statement which defines OFFSET must be a
666 multiplication of an object by the size of the array elements.
667 This implicitly verifies that the size of the array elements
668 is constant. */
669 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
670 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
671 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
673 /* The first operand to the MULT_EXPR is the desired index. */
674 index = gimple_assign_rhs1 (offset_def);
676 /* If we have idx * tunit + CST * tunit re-associate that. */
677 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
678 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
679 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
680 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
681 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
682 gimple_assign_rhs2 (offset_def),
683 tunit)) != NULL_TREE)
685 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
686 if (is_gimple_assign (offset_def2)
687 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
688 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
689 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
691 index = fold_build2 (gimple_assign_rhs_code (offset_def),
692 TREE_TYPE (offset),
693 gimple_assign_rhs1 (offset_def2), tmp);
695 else
696 return false;
698 else
699 return false;
702 /* Replace the pointer addition with array indexing. */
703 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
704 true, GSI_SAME_STMT);
705 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
707 new_rhs = unshare_expr (def_rhs);
708 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
710 else
712 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
713 unshare_expr (TREE_OPERAND (def_rhs, 0)),
714 index, integer_zero_node, NULL_TREE);
715 new_rhs = build_fold_addr_expr (new_rhs);
716 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
717 TREE_TYPE (new_rhs)))
719 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
720 NULL_TREE, true, GSI_SAME_STMT);
721 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
722 new_rhs);
725 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
726 use_stmt = gsi_stmt (*use_stmt_gsi);
728 /* That should have created gimple, so there is no need to
729 record information to undo the propagation. */
730 fold_stmt_inplace (use_stmt);
731 tidy_after_forward_propagate_addr (use_stmt);
732 return true;
735 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
736 ADDR_EXPR <whatever>.
738 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
739 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
740 node or for recovery of array indexing from pointer arithmetic.
742 Return true if the propagation was successful (the propagation can
743 be not totally successful, yet things may have been changed). */
745 static bool
746 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
747 gimple_stmt_iterator *use_stmt_gsi,
748 bool single_use_p)
750 tree lhs, rhs, rhs2, array_ref;
751 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
752 enum tree_code rhs_code;
753 bool res = true;
755 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
757 lhs = gimple_assign_lhs (use_stmt);
758 rhs_code = gimple_assign_rhs_code (use_stmt);
759 rhs = gimple_assign_rhs1 (use_stmt);
761 /* Trivial cases. The use statement could be a trivial copy or a
762 useless conversion. Recurse to the uses of the lhs as copyprop does
763 not copy through different variant pointers and FRE does not catch
764 all useless conversions. Treat the case of a single-use name and
765 a conversion to def_rhs type separate, though. */
766 if (TREE_CODE (lhs) == SSA_NAME
767 && ((rhs_code == SSA_NAME && rhs == name)
768 || CONVERT_EXPR_CODE_P (rhs_code)))
770 /* Only recurse if we don't deal with a single use or we cannot
771 do the propagation to the current statement. In particular
772 we can end up with a conversion needed for a non-invariant
773 address which we cannot do in a single statement. */
774 if (!single_use_p
775 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
776 && (!is_gimple_min_invariant (def_rhs)
777 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
778 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
779 && (TYPE_PRECISION (TREE_TYPE (lhs))
780 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
781 return forward_propagate_addr_expr (lhs, def_rhs);
783 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
784 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
785 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
786 else
787 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
788 return true;
791 /* Propagate through constant pointer adjustments. */
792 if (TREE_CODE (lhs) == SSA_NAME
793 && rhs_code == POINTER_PLUS_EXPR
794 && rhs == name
795 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
797 tree new_def_rhs;
798 /* As we come here with non-invariant addresses in def_rhs we need
799 to make sure we can build a valid constant offsetted address
800 for further propagation. Simply rely on fold building that
801 and check after the fact. */
802 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
803 def_rhs,
804 fold_convert (ptr_type_node,
805 gimple_assign_rhs2 (use_stmt)));
806 if (TREE_CODE (new_def_rhs) == MEM_REF
807 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
808 return false;
809 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
810 TREE_TYPE (rhs));
812 /* Recurse. If we could propagate into all uses of lhs do not
813 bother to replace into the current use but just pretend we did. */
814 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
815 && forward_propagate_addr_expr (lhs, new_def_rhs))
816 return true;
818 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
819 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
820 new_def_rhs, NULL_TREE);
821 else if (is_gimple_min_invariant (new_def_rhs))
822 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
823 new_def_rhs, NULL_TREE);
824 else
825 return false;
826 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
827 update_stmt (use_stmt);
828 return true;
831 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
832 ADDR_EXPR will not appear on the LHS. */
833 lhs = gimple_assign_lhs (use_stmt);
834 while (handled_component_p (lhs))
835 lhs = TREE_OPERAND (lhs, 0);
837 /* Now see if the LHS node is a MEM_REF using NAME. If so,
838 propagate the ADDR_EXPR into the use of NAME and fold the result. */
839 if (TREE_CODE (lhs) == MEM_REF
840 && TREE_OPERAND (lhs, 0) == name)
842 tree def_rhs_base;
843 HOST_WIDE_INT def_rhs_offset;
844 /* If the address is invariant we can always fold it. */
845 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
846 &def_rhs_offset)))
848 double_int off = mem_ref_offset (lhs);
849 tree new_ptr;
850 off = double_int_add (off,
851 shwi_to_double_int (def_rhs_offset));
852 if (TREE_CODE (def_rhs_base) == MEM_REF)
854 off = double_int_add (off, mem_ref_offset (def_rhs_base));
855 new_ptr = TREE_OPERAND (def_rhs_base, 0);
857 else
858 new_ptr = build_fold_addr_expr (def_rhs_base);
859 TREE_OPERAND (lhs, 0) = new_ptr;
860 TREE_OPERAND (lhs, 1)
861 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
862 tidy_after_forward_propagate_addr (use_stmt);
863 /* Continue propagating into the RHS if this was not the only use. */
864 if (single_use_p)
865 return true;
867 /* If the LHS is a plain dereference and the value type is the same as
868 that of the pointed-to type of the address we can put the
869 dereferenced address on the LHS preserving the original alias-type. */
870 else if (gimple_assign_lhs (use_stmt) == lhs
871 && useless_type_conversion_p
872 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
873 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
875 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
876 tree new_offset, new_base, saved;
877 while (handled_component_p (*def_rhs_basep))
878 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
879 saved = *def_rhs_basep;
880 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
882 new_base = TREE_OPERAND (*def_rhs_basep, 0);
883 new_offset
884 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
885 TREE_OPERAND (*def_rhs_basep, 1), 0);
887 else
889 new_base = build_fold_addr_expr (*def_rhs_basep);
890 new_offset = TREE_OPERAND (lhs, 1);
892 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
893 new_base, new_offset);
894 gimple_assign_set_lhs (use_stmt,
895 unshare_expr (TREE_OPERAND (def_rhs, 0)));
896 *def_rhs_basep = saved;
897 tidy_after_forward_propagate_addr (use_stmt);
898 /* Continue propagating into the RHS if this was not the
899 only use. */
900 if (single_use_p)
901 return true;
903 else
904 /* We can have a struct assignment dereferencing our name twice.
905 Note that we didn't propagate into the lhs to not falsely
906 claim we did when propagating into the rhs. */
907 res = false;
910 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
911 nodes from the RHS. */
912 rhs = gimple_assign_rhs1 (use_stmt);
913 if (TREE_CODE (rhs) == ADDR_EXPR)
914 rhs = TREE_OPERAND (rhs, 0);
915 while (handled_component_p (rhs))
916 rhs = TREE_OPERAND (rhs, 0);
918 /* Now see if the RHS node is a MEM_REF using NAME. If so,
919 propagate the ADDR_EXPR into the use of NAME and fold the result. */
920 if (TREE_CODE (rhs) == MEM_REF
921 && TREE_OPERAND (rhs, 0) == name)
923 tree def_rhs_base;
924 HOST_WIDE_INT def_rhs_offset;
925 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
926 &def_rhs_offset)))
928 double_int off = mem_ref_offset (rhs);
929 tree new_ptr;
930 off = double_int_add (off,
931 shwi_to_double_int (def_rhs_offset));
932 if (TREE_CODE (def_rhs_base) == MEM_REF)
934 off = double_int_add (off, mem_ref_offset (def_rhs_base));
935 new_ptr = TREE_OPERAND (def_rhs_base, 0);
937 else
938 new_ptr = build_fold_addr_expr (def_rhs_base);
939 TREE_OPERAND (rhs, 0) = new_ptr;
940 TREE_OPERAND (rhs, 1)
941 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
942 fold_stmt_inplace (use_stmt);
943 tidy_after_forward_propagate_addr (use_stmt);
944 return res;
946 /* If the LHS is a plain dereference and the value type is the same as
947 that of the pointed-to type of the address we can put the
948 dereferenced address on the LHS preserving the original alias-type. */
949 else if (gimple_assign_rhs1 (use_stmt) == rhs
950 && useless_type_conversion_p
951 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
952 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
954 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
955 tree new_offset, new_base, saved;
956 while (handled_component_p (*def_rhs_basep))
957 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
958 saved = *def_rhs_basep;
959 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
961 new_base = TREE_OPERAND (*def_rhs_basep, 0);
962 new_offset
963 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
964 TREE_OPERAND (*def_rhs_basep, 1), 0);
966 else
968 new_base = build_fold_addr_expr (*def_rhs_basep);
969 new_offset = TREE_OPERAND (rhs, 1);
971 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
972 new_base, new_offset);
973 gimple_assign_set_rhs1 (use_stmt,
974 unshare_expr (TREE_OPERAND (def_rhs, 0)));
975 *def_rhs_basep = saved;
976 fold_stmt_inplace (use_stmt);
977 tidy_after_forward_propagate_addr (use_stmt);
978 return res;
982 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
983 is nothing to do. */
984 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
985 || gimple_assign_rhs1 (use_stmt) != name)
986 return false;
988 /* The remaining cases are all for turning pointer arithmetic into
989 array indexing. They only apply when we have the address of
990 element zero in an array. If that is not the case then there
991 is nothing to do. */
992 array_ref = TREE_OPERAND (def_rhs, 0);
993 if ((TREE_CODE (array_ref) != ARRAY_REF
994 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
995 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
996 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
997 return false;
999 rhs2 = gimple_assign_rhs2 (use_stmt);
1000 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1001 of the elements in X into &x[C1 + C2/element size]. */
1002 if (TREE_CODE (rhs2) == INTEGER_CST)
1004 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1005 TREE_TYPE (def_rhs),
1006 def_rhs, rhs2);
1007 if (new_rhs)
1009 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1010 new_rhs = unshare_expr (new_rhs);
1011 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1013 if (!is_gimple_min_invariant (new_rhs))
1014 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1015 true, NULL_TREE,
1016 true, GSI_SAME_STMT);
1017 new_rhs = fold_convert (type, new_rhs);
1019 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1020 use_stmt = gsi_stmt (*use_stmt_gsi);
1021 update_stmt (use_stmt);
1022 tidy_after_forward_propagate_addr (use_stmt);
1023 return true;
1027 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1028 converting a multiplication of an index by the size of the
1029 array elements, then the result is converted into the proper
1030 type for the arithmetic. */
1031 if (TREE_CODE (rhs2) == SSA_NAME
1032 && (TREE_CODE (array_ref) != ARRAY_REF
1033 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1034 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1035 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1036 different type than their operands. */
1037 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1038 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1039 use_stmt_gsi);
1040 return false;
1043 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1045 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1046 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1047 node or for recovery of array indexing from pointer arithmetic.
1048 Returns true, if all uses have been propagated into. */
1050 static bool
1051 forward_propagate_addr_expr (tree name, tree rhs)
1053 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1054 imm_use_iterator iter;
1055 gimple use_stmt;
1056 bool all = true;
1057 bool single_use_p = has_single_use (name);
1059 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1061 bool result;
1062 tree use_rhs;
1064 /* If the use is not in a simple assignment statement, then
1065 there is nothing we can do. */
1066 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1068 if (!is_gimple_debug (use_stmt))
1069 all = false;
1070 continue;
1073 /* If the use is in a deeper loop nest, then we do not want
1074 to propagate non-invariant ADDR_EXPRs into the loop as that
1075 is likely adding expression evaluations into the loop. */
1076 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1077 && !is_gimple_min_invariant (rhs))
1079 all = false;
1080 continue;
1084 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1085 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1086 single_use_p);
1087 /* If the use has moved to a different statement adjust
1088 the update machinery for the old statement too. */
1089 if (use_stmt != gsi_stmt (gsi))
1091 update_stmt (use_stmt);
1092 use_stmt = gsi_stmt (gsi);
1095 update_stmt (use_stmt);
1097 all &= result;
1099 /* Remove intermediate now unused copy and conversion chains. */
1100 use_rhs = gimple_assign_rhs1 (use_stmt);
1101 if (result
1102 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1103 && TREE_CODE (use_rhs) == SSA_NAME
1104 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1106 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1107 release_defs (use_stmt);
1108 gsi_remove (&gsi, true);
1112 return all;
1115 /* Forward propagate the comparison defined in STMT like
1116 cond_1 = x CMP y to uses of the form
1117 a_1 = (T')cond_1
1118 a_1 = !cond_1
1119 a_1 = cond_1 != 0
1120 Returns true if stmt is now unused. */
1122 static bool
1123 forward_propagate_comparison (gimple stmt)
1125 tree name = gimple_assign_lhs (stmt);
1126 gimple use_stmt;
1127 tree tmp = NULL_TREE;
1129 /* Don't propagate ssa names that occur in abnormal phis. */
1130 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1131 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1132 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1133 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1134 return false;
1136 /* Do not un-cse comparisons. But propagate through copies. */
1137 use_stmt = get_prop_dest_stmt (name, &name);
1138 if (!use_stmt)
1139 return false;
1141 /* Conversion of the condition result to another integral type. */
1142 if (is_gimple_assign (use_stmt)
1143 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1144 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1145 == tcc_comparison
1146 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1147 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1149 tree lhs = gimple_assign_lhs (use_stmt);
1151 /* We can propagate the condition into a conversion. */
1152 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1154 /* Avoid using fold here as that may create a COND_EXPR with
1155 non-boolean condition as canonical form. */
1156 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1157 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1159 /* We can propagate the condition into X op CST where op
1160 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1161 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1162 == tcc_comparison
1163 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1164 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1166 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1167 tree cst = gimple_assign_rhs2 (use_stmt);
1168 tree cond;
1170 cond = build2 (gimple_assign_rhs_code (stmt),
1171 TREE_TYPE (cst),
1172 gimple_assign_rhs1 (stmt),
1173 gimple_assign_rhs2 (stmt));
1175 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1176 code, TREE_TYPE (lhs),
1177 cond, cst, false);
1178 if (tmp == NULL_TREE)
1179 return false;
1181 /* We can propagate the condition into a statement that
1182 computes the logical negation of the comparison result. */
1183 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1185 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1186 bool nans = HONOR_NANS (TYPE_MODE (type));
1187 enum tree_code code;
1188 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1189 if (code == ERROR_MARK)
1190 return false;
1192 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1193 gimple_assign_rhs2 (stmt));
1195 else
1196 return false;
1199 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1200 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1201 use_stmt = gsi_stmt (gsi);
1202 update_stmt (use_stmt);
1205 /* Remove defining statements. */
1206 remove_prop_source_from_use (name, stmt);
1208 if (dump_file && (dump_flags & TDF_DETAILS))
1210 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1211 stmt);
1212 fprintf (dump_file, " Replaced '");
1213 print_generic_expr (dump_file, old_rhs, dump_flags);
1214 fprintf (dump_file, "' with '");
1215 print_generic_expr (dump_file, tmp, dump_flags);
1216 fprintf (dump_file, "'\n");
1219 return true;
1222 return false;
1225 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1226 If so, we can change STMT into lhs = y which can later be copy
1227 propagated. Similarly for negation.
1229 This could trivially be formulated as a forward propagation
1230 to immediate uses. However, we already had an implementation
1231 from DOM which used backward propagation via the use-def links.
1233 It turns out that backward propagation is actually faster as
1234 there's less work to do for each NOT/NEG expression we find.
1235 Backwards propagation needs to look at the statement in a single
1236 backlink. Forward propagation needs to look at potentially more
1237 than one forward link. */
1239 static void
1240 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1242 gimple stmt = gsi_stmt (*gsi_p);
1243 tree rhs = gimple_assign_rhs1 (stmt);
1244 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1246 /* See if the RHS_DEF_STMT has the same form as our statement. */
1247 if (is_gimple_assign (rhs_def_stmt)
1248 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1250 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1252 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1253 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1254 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1256 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1257 stmt = gsi_stmt (*gsi_p);
1258 update_stmt (stmt);
1263 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1264 the condition which we may be able to optimize better. */
1266 static void
1267 simplify_gimple_switch (gimple stmt)
1269 tree cond = gimple_switch_index (stmt);
1270 tree def, to, ti;
1271 gimple def_stmt;
1273 /* The optimization that we really care about is removing unnecessary
1274 casts. That will let us do much better in propagating the inferred
1275 constant at the switch target. */
1276 if (TREE_CODE (cond) == SSA_NAME)
1278 def_stmt = SSA_NAME_DEF_STMT (cond);
1279 if (is_gimple_assign (def_stmt))
1281 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1283 int need_precision;
1284 bool fail;
1286 def = gimple_assign_rhs1 (def_stmt);
1288 #ifdef ENABLE_CHECKING
1289 /* ??? Why was Jeff testing this? We are gimple... */
1290 gcc_assert (is_gimple_val (def));
1291 #endif
1293 to = TREE_TYPE (cond);
1294 ti = TREE_TYPE (def);
1296 /* If we have an extension that preserves value, then we
1297 can copy the source value into the switch. */
1299 need_precision = TYPE_PRECISION (ti);
1300 fail = false;
1301 if (! INTEGRAL_TYPE_P (ti))
1302 fail = true;
1303 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1304 fail = true;
1305 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1306 need_precision += 1;
1307 if (TYPE_PRECISION (to) < need_precision)
1308 fail = true;
1310 if (!fail)
1312 gimple_switch_set_index (stmt, def);
1313 update_stmt (stmt);
1320 /* Run bitwise and assignments throug the folder. If the first argument is an
1321 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1322 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1325 static void
1326 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1328 tree res;
1329 tree arg1 = gimple_assign_rhs1 (stmt);
1330 tree arg2 = gimple_assign_rhs2 (stmt);
1332 if (TREE_CODE (arg2) != INTEGER_CST)
1333 return;
1335 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1337 gimple def = SSA_NAME_DEF_STMT (arg1);
1339 if (gimple_assign_cast_p (def)
1340 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1342 tree op = gimple_assign_rhs1 (def);
1344 if (TREE_CODE (op) == ADDR_EXPR)
1345 arg1 = op;
1349 res = fold_binary_loc (gimple_location (stmt),
1350 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1351 arg1, arg2);
1352 if (res && is_gimple_min_invariant (res))
1354 gimple_assign_set_rhs_from_tree (gsi, res);
1355 update_stmt (stmt);
1357 return;
1360 /* Main entry point for the forward propagation optimizer. */
1362 static unsigned int
1363 tree_ssa_forward_propagate_single_use_vars (void)
1365 basic_block bb;
1366 unsigned int todoflags = 0;
1368 cfg_changed = false;
1370 FOR_EACH_BB (bb)
1372 gimple_stmt_iterator gsi;
1374 /* Note we update GSI within the loop as necessary. */
1375 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1377 gimple stmt = gsi_stmt (gsi);
1379 /* If this statement sets an SSA_NAME to an address,
1380 try to propagate the address into the uses of the SSA_NAME. */
1381 if (is_gimple_assign (stmt))
1383 tree lhs = gimple_assign_lhs (stmt);
1384 tree rhs = gimple_assign_rhs1 (stmt);
1386 if (TREE_CODE (lhs) != SSA_NAME)
1388 gsi_next (&gsi);
1389 continue;
1392 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1393 /* Handle pointer conversions on invariant addresses
1394 as well, as this is valid gimple. */
1395 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1396 && TREE_CODE (rhs) == ADDR_EXPR
1397 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1399 tree base = get_base_address (TREE_OPERAND (rhs, 0));
1400 if ((!base
1401 || !DECL_P (base)
1402 || decl_address_invariant_p (base))
1403 && !stmt_references_abnormal_ssa_name (stmt)
1404 && forward_propagate_addr_expr (lhs, rhs))
1406 release_defs (stmt);
1407 todoflags |= TODO_remove_unused_locals;
1408 gsi_remove (&gsi, true);
1410 else
1411 gsi_next (&gsi);
1413 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1415 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
1416 /* ??? Better adjust the interface to that function
1417 instead of building new trees here. */
1418 && forward_propagate_addr_expr
1419 (lhs,
1420 build1 (ADDR_EXPR,
1421 TREE_TYPE (rhs),
1422 fold_build2 (MEM_REF,
1423 TREE_TYPE (TREE_TYPE (rhs)),
1424 rhs,
1425 fold_convert
1426 (ptr_type_node,
1427 gimple_assign_rhs2 (stmt))))))
1429 release_defs (stmt);
1430 todoflags |= TODO_remove_unused_locals;
1431 gsi_remove (&gsi, true);
1433 else if (is_gimple_min_invariant (rhs))
1435 /* Make sure to fold &a[0] + off_1 here. */
1436 fold_stmt_inplace (stmt);
1437 update_stmt (stmt);
1438 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1439 gsi_next (&gsi);
1441 else
1442 gsi_next (&gsi);
1444 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
1445 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
1446 && TREE_CODE (rhs) == SSA_NAME)
1448 simplify_not_neg_expr (&gsi);
1449 gsi_next (&gsi);
1451 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1453 /* In this case the entire COND_EXPR is in rhs1. */
1454 int did_something;
1455 fold_defer_overflow_warnings ();
1456 did_something = forward_propagate_into_cond (&gsi);
1457 stmt = gsi_stmt (gsi);
1458 if (did_something == 2)
1459 cfg_changed = true;
1460 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
1461 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
1462 gsi_next (&gsi);
1464 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1465 == tcc_comparison)
1467 if (forward_propagate_comparison (stmt))
1469 release_defs (stmt);
1470 todoflags |= TODO_remove_unused_locals;
1471 gsi_remove (&gsi, true);
1473 else
1474 gsi_next (&gsi);
1476 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
1478 simplify_bitwise_and (&gsi, stmt);
1479 gsi_next (&gsi);
1481 else
1482 gsi_next (&gsi);
1484 else if (gimple_code (stmt) == GIMPLE_SWITCH)
1486 simplify_gimple_switch (stmt);
1487 gsi_next (&gsi);
1489 else if (gimple_code (stmt) == GIMPLE_COND)
1491 int did_something;
1492 fold_defer_overflow_warnings ();
1493 did_something = forward_propagate_into_gimple_cond (stmt);
1494 if (did_something == 2)
1495 cfg_changed = true;
1496 fold_undefer_overflow_warnings (did_something, stmt,
1497 WARN_STRICT_OVERFLOW_CONDITIONAL);
1498 gsi_next (&gsi);
1500 else
1501 gsi_next (&gsi);
1505 if (cfg_changed)
1506 todoflags |= TODO_cleanup_cfg;
1507 return todoflags;
1511 static bool
1512 gate_forwprop (void)
1514 return flag_tree_forwprop;
1517 struct gimple_opt_pass pass_forwprop =
1520 GIMPLE_PASS,
1521 "forwprop", /* name */
1522 gate_forwprop, /* gate */
1523 tree_ssa_forward_propagate_single_use_vars, /* execute */
1524 NULL, /* sub */
1525 NULL, /* next */
1526 0, /* static_pass_number */
1527 TV_TREE_FORWPROP, /* tv_id */
1528 PROP_cfg | PROP_ssa, /* properties_required */
1529 0, /* properties_provided */
1530 0, /* properties_destroyed */
1531 0, /* todo_flags_start */
1532 TODO_dump_func
1533 | TODO_ggc_collect
1534 | TODO_update_ssa
1535 | TODO_verify_ssa /* todo_flags_finish */