dumping cleanup phase 1 -- Removing TODO_dump_func
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob86f45021229501a808bc9f1c3e97c200c8232bf0
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "timevar.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "gimple.h"
36 #include "expr.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
46 bb0:
47 x = a COND b;
48 if (x) goto ... else goto ...
50 Will be transformed into:
52 bb0:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
59 bb0:
60 x = a + c1;
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
65 bb0:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
72 bb0:
73 x = !a
74 if (x) goto ... else goto ...
76 Will be transformed into:
78 bb0:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
87 bb0:
88 x = (typecast) a
89 if (x) goto ... else goto ...
91 Will be transformed into:
93 bb0:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
113 nodes.
115 ptr = &x->y->z;
116 res = *ptr;
118 Will get turned into
120 res = x->y->z;
123 ptr = (type1*)&type2var;
124 res = *ptr
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
132 ptr = &x[0];
133 ptr2 = ptr + <constant>;
135 Will get turned into
137 ptr2 = &x[constant/elementsize];
141 ptr = &x[0];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
148 ptr2 = &x[index];
151 ssa = (int) decl
152 res = ssa & 1
154 Provided that decl has known alignment >= 2, will get turned into
156 res = 0
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
160 {NOT_EXPR,NEG_EXPR}.
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
177 static gimple
178 get_prop_dest_stmt (tree name, tree *final_name_p)
180 use_operand_p use;
181 gimple use_stmt;
183 do {
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
186 return NULL;
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
191 break;
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
195 } while (1);
197 if (final_name_p)
198 *final_name_p = name;
200 return use_stmt;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
211 static gimple
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
216 do {
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
221 single_use = false;
222 if (single_use_only)
223 return NULL;
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
228 return NULL;
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
233 tree rhs;
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
245 name = rhs;
246 else
247 return def_stmt;
249 else
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
254 } while (1);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
260 static bool
261 can_propagate_from (gimple def_stmt)
263 use_operand_p use_p;
264 ssa_op_iter iter;
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
270 return false;
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
275 return false;
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
280 return true;
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
285 return false;
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
296 return false;
299 return true;
302 /* Remove a copy chain ending in NAME along the defs.
303 If NAME was replaced in its only use then this function can be used
304 to clean up dead stmts. Returns true if cleanup-cfg has to run. */
306 static bool
307 remove_prop_source_from_use (tree name)
309 gimple_stmt_iterator gsi;
310 gimple stmt;
311 bool cfg_changed = false;
313 do {
314 basic_block bb;
316 if (!has_zero_uses (name))
317 return cfg_changed;
319 stmt = SSA_NAME_DEF_STMT (name);
320 gsi = gsi_for_stmt (stmt);
321 bb = gimple_bb (stmt);
322 release_defs (stmt);
323 gsi_remove (&gsi, true);
324 cfg_changed |= gimple_purge_dead_eh_edges (bb);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
329 return cfg_changed;
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
339 static tree
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
345 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt),
347 gimple_assign_rhs3 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
349 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt));
351 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
352 return build1 (code, type, gimple_assign_rhs1 (stmt));
353 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
354 return gimple_assign_rhs1 (stmt);
355 else
356 gcc_unreachable ();
359 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
360 the folded result in a form suitable for COND_EXPR_COND or
361 NULL_TREE, if there is no suitable simplified form. If
362 INVARIANT_ONLY is true only gimple_min_invariant results are
363 considered simplified. */
365 static tree
366 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
367 tree op0, tree op1, bool invariant_only)
369 tree t;
371 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
373 t = fold_binary_loc (loc, code, type, op0, op1);
374 if (!t)
375 return NULL_TREE;
377 /* Require that we got a boolean type out if we put one in. */
378 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
380 /* Canonicalize the combined condition for use in a COND_EXPR. */
381 t = canonicalize_cond_expr_cond (t);
383 /* Bail out if we required an invariant but didn't get one. */
384 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
385 return NULL_TREE;
387 return t;
390 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
391 of its operand. Return a new comparison tree or NULL_TREE if there
392 were no simplifying combines. */
394 static tree
395 forward_propagate_into_comparison_1 (location_t loc,
396 enum tree_code code, tree type,
397 tree op0, tree op1)
399 tree tmp = NULL_TREE;
400 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
401 bool single_use0_p = false, single_use1_p = false;
403 /* For comparisons use the first operand, that is likely to
404 simplify comparisons against constants. */
405 if (TREE_CODE (op0) == SSA_NAME)
407 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
408 if (def_stmt && can_propagate_from (def_stmt))
410 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
411 tmp = combine_cond_expr_cond (loc, code, type,
412 rhs0, op1, !single_use0_p);
413 if (tmp)
414 return tmp;
418 /* If that wasn't successful, try the second operand. */
419 if (TREE_CODE (op1) == SSA_NAME)
421 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
422 if (def_stmt && can_propagate_from (def_stmt))
424 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
425 tmp = combine_cond_expr_cond (loc, code, type,
426 op0, rhs1, !single_use1_p);
427 if (tmp)
428 return tmp;
432 /* If that wasn't successful either, try both operands. */
433 if (rhs0 != NULL_TREE
434 && rhs1 != NULL_TREE)
435 tmp = combine_cond_expr_cond (loc, code, type,
436 rhs0, rhs1,
437 !(single_use0_p && single_use1_p));
439 return tmp;
442 /* Propagate from the ssa name definition statements of the assignment
443 from a comparison at *GSI into the conditional if that simplifies it.
444 Returns true if the stmt was modified, false if not. */
446 static bool
447 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
449 gimple stmt = gsi_stmt (*gsi);
450 tree tmp;
452 /* Combine the comparison with defining statements. */
453 tmp = forward_propagate_into_comparison_1 (gimple_location (stmt),
454 gimple_assign_rhs_code (stmt),
455 TREE_TYPE
456 (gimple_assign_lhs (stmt)),
457 gimple_assign_rhs1 (stmt),
458 gimple_assign_rhs2 (stmt));
459 if (tmp)
461 gimple_assign_set_rhs_from_tree (gsi, tmp);
462 update_stmt (stmt);
463 return true;
466 return false;
469 /* Propagate from the ssa name definition statements of COND_EXPR
470 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
471 Returns zero if no statement was changed, one if there were
472 changes and two if cfg_cleanup needs to run.
474 This must be kept in sync with forward_propagate_into_cond. */
476 static int
477 forward_propagate_into_gimple_cond (gimple stmt)
479 int did_something = 0;
480 location_t loc = gimple_location (stmt);
481 tree tmp;
482 enum tree_code code = gimple_cond_code (stmt);
484 /* We can do tree combining on SSA_NAME and comparison expressions. */
485 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
486 return 0;
488 tmp = forward_propagate_into_comparison_1 (loc, code,
489 boolean_type_node,
490 gimple_cond_lhs (stmt),
491 gimple_cond_rhs (stmt));
492 if (tmp)
494 if (dump_file && tmp)
496 tree cond = build2 (gimple_cond_code (stmt),
497 boolean_type_node,
498 gimple_cond_lhs (stmt),
499 gimple_cond_rhs (stmt));
500 fprintf (dump_file, " Replaced '");
501 print_generic_expr (dump_file, cond, 0);
502 fprintf (dump_file, "' with '");
503 print_generic_expr (dump_file, tmp, 0);
504 fprintf (dump_file, "'\n");
507 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
508 update_stmt (stmt);
510 /* Remove defining statements. */
511 if (is_gimple_min_invariant (tmp))
512 did_something = 2;
513 else if (did_something == 0)
514 did_something = 1;
517 return did_something;
521 /* Propagate from the ssa name definition statements of COND_EXPR
522 in the rhs of statement STMT into the conditional if that simplifies it.
523 Returns zero if no statement was changed, one if there were
524 changes and two if cfg_cleanup needs to run.
526 This must be kept in sync with forward_propagate_into_gimple_cond. */
528 static int
529 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
531 gimple stmt = gsi_stmt (*gsi_p);
532 location_t loc = gimple_location (stmt);
533 int did_something = 0;
534 tree tmp = NULL_TREE;
535 tree cond = gimple_assign_rhs1 (stmt);
537 /* We can do tree combining on SSA_NAME and comparison expressions. */
538 if (COMPARISON_CLASS_P (cond))
539 tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond),
540 boolean_type_node,
541 TREE_OPERAND (cond, 0),
542 TREE_OPERAND (cond, 1));
543 else if (TREE_CODE (cond) == SSA_NAME)
545 tree name = cond, rhs0;
546 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
547 if (!def_stmt || !can_propagate_from (def_stmt))
548 return did_something;
550 rhs0 = gimple_assign_rhs1 (def_stmt);
551 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
552 build_int_cst (TREE_TYPE (rhs0), 0),
553 false);
556 if (tmp)
558 if (dump_file && tmp)
560 fprintf (dump_file, " Replaced '");
561 print_generic_expr (dump_file, cond, 0);
562 fprintf (dump_file, "' with '");
563 print_generic_expr (dump_file, tmp, 0);
564 fprintf (dump_file, "'\n");
567 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
568 stmt = gsi_stmt (*gsi_p);
569 update_stmt (stmt);
571 /* Remove defining statements. */
572 if (is_gimple_min_invariant (tmp))
573 did_something = 2;
574 else if (did_something == 0)
575 did_something = 1;
578 return did_something;
581 /* We've just substituted an ADDR_EXPR into stmt. Update all the
582 relevant data structures to match. */
584 static void
585 tidy_after_forward_propagate_addr (gimple stmt)
587 /* We may have turned a trapping insn into a non-trapping insn. */
588 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
589 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
590 cfg_changed = true;
592 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
593 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
596 /* DEF_RHS contains the address of the 0th element in an array.
597 USE_STMT uses type of DEF_RHS to compute the address of an
598 arbitrary element within the array. The (variable) byte offset
599 of the element is contained in OFFSET.
601 We walk back through the use-def chains of OFFSET to verify that
602 it is indeed computing the offset of an element within the array
603 and extract the index corresponding to the given byte offset.
605 We then try to fold the entire address expression into a form
606 &array[index].
608 If we are successful, we replace the right hand side of USE_STMT
609 with the new address computation. */
611 static bool
612 forward_propagate_addr_into_variable_array_index (tree offset,
613 tree def_rhs,
614 gimple_stmt_iterator *use_stmt_gsi)
616 tree index, tunit;
617 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
618 tree new_rhs, tmp;
620 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
621 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
622 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
623 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
624 else
625 return false;
626 if (!host_integerp (tunit, 1))
627 return false;
629 /* Get the offset's defining statement. */
630 offset_def = SSA_NAME_DEF_STMT (offset);
632 /* Try to find an expression for a proper index. This is either a
633 multiplication expression by the element size or just the ssa name we came
634 along in case the element size is one. In that case, however, we do not
635 allow multiplications because they can be computing index to a higher
636 level dimension (PR 37861). */
637 if (integer_onep (tunit))
639 if (is_gimple_assign (offset_def)
640 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
641 return false;
643 index = offset;
645 else
647 /* The statement which defines OFFSET before type conversion
648 must be a simple GIMPLE_ASSIGN. */
649 if (!is_gimple_assign (offset_def))
650 return false;
652 /* The RHS of the statement which defines OFFSET must be a
653 multiplication of an object by the size of the array elements.
654 This implicitly verifies that the size of the array elements
655 is constant. */
656 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
657 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
658 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
660 /* The first operand to the MULT_EXPR is the desired index. */
661 index = gimple_assign_rhs1 (offset_def);
663 /* If we have idx * tunit + CST * tunit re-associate that. */
664 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
665 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
666 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
667 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
668 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
669 gimple_assign_rhs2 (offset_def),
670 tunit)) != NULL_TREE)
672 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
673 if (is_gimple_assign (offset_def2)
674 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
675 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
676 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
678 index = fold_build2 (gimple_assign_rhs_code (offset_def),
679 TREE_TYPE (offset),
680 gimple_assign_rhs1 (offset_def2), tmp);
682 else
683 return false;
685 else
686 return false;
689 /* Replace the pointer addition with array indexing. */
690 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
691 true, GSI_SAME_STMT);
692 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
694 new_rhs = unshare_expr (def_rhs);
695 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
697 else
699 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
700 unshare_expr (TREE_OPERAND (def_rhs, 0)),
701 index, integer_zero_node, NULL_TREE);
702 new_rhs = build_fold_addr_expr (new_rhs);
703 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
704 TREE_TYPE (new_rhs)))
706 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
707 NULL_TREE, true, GSI_SAME_STMT);
708 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
709 new_rhs);
712 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
713 use_stmt = gsi_stmt (*use_stmt_gsi);
715 /* That should have created gimple, so there is no need to
716 record information to undo the propagation. */
717 fold_stmt_inplace (use_stmt);
718 tidy_after_forward_propagate_addr (use_stmt);
719 return true;
722 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
723 ADDR_EXPR <whatever>.
725 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
726 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
727 node or for recovery of array indexing from pointer arithmetic.
729 Return true if the propagation was successful (the propagation can
730 be not totally successful, yet things may have been changed). */
732 static bool
733 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
734 gimple_stmt_iterator *use_stmt_gsi,
735 bool single_use_p)
737 tree lhs, rhs, rhs2, array_ref;
738 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
739 enum tree_code rhs_code;
740 bool res = true;
742 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
744 lhs = gimple_assign_lhs (use_stmt);
745 rhs_code = gimple_assign_rhs_code (use_stmt);
746 rhs = gimple_assign_rhs1 (use_stmt);
748 /* Trivial cases. The use statement could be a trivial copy or a
749 useless conversion. Recurse to the uses of the lhs as copyprop does
750 not copy through different variant pointers and FRE does not catch
751 all useless conversions. Treat the case of a single-use name and
752 a conversion to def_rhs type separate, though. */
753 if (TREE_CODE (lhs) == SSA_NAME
754 && ((rhs_code == SSA_NAME && rhs == name)
755 || CONVERT_EXPR_CODE_P (rhs_code)))
757 /* Only recurse if we don't deal with a single use or we cannot
758 do the propagation to the current statement. In particular
759 we can end up with a conversion needed for a non-invariant
760 address which we cannot do in a single statement. */
761 if (!single_use_p
762 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
763 && (!is_gimple_min_invariant (def_rhs)
764 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
765 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
766 && (TYPE_PRECISION (TREE_TYPE (lhs))
767 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
768 return forward_propagate_addr_expr (lhs, def_rhs);
770 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
771 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
772 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
773 else
774 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
775 return true;
778 /* Propagate through constant pointer adjustments. */
779 if (TREE_CODE (lhs) == SSA_NAME
780 && rhs_code == POINTER_PLUS_EXPR
781 && rhs == name
782 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
784 tree new_def_rhs;
785 /* As we come here with non-invariant addresses in def_rhs we need
786 to make sure we can build a valid constant offsetted address
787 for further propagation. Simply rely on fold building that
788 and check after the fact. */
789 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
790 def_rhs,
791 fold_convert (ptr_type_node,
792 gimple_assign_rhs2 (use_stmt)));
793 if (TREE_CODE (new_def_rhs) == MEM_REF
794 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
795 return false;
796 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
797 TREE_TYPE (rhs));
799 /* Recurse. If we could propagate into all uses of lhs do not
800 bother to replace into the current use but just pretend we did. */
801 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
802 && forward_propagate_addr_expr (lhs, new_def_rhs))
803 return true;
805 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
806 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
807 new_def_rhs, NULL_TREE);
808 else if (is_gimple_min_invariant (new_def_rhs))
809 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
810 new_def_rhs, NULL_TREE);
811 else
812 return false;
813 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
814 update_stmt (use_stmt);
815 return true;
818 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
819 ADDR_EXPR will not appear on the LHS. */
820 lhs = gimple_assign_lhs (use_stmt);
821 while (handled_component_p (lhs))
822 lhs = TREE_OPERAND (lhs, 0);
824 /* Now see if the LHS node is a MEM_REF using NAME. If so,
825 propagate the ADDR_EXPR into the use of NAME and fold the result. */
826 if (TREE_CODE (lhs) == MEM_REF
827 && TREE_OPERAND (lhs, 0) == name)
829 tree def_rhs_base;
830 HOST_WIDE_INT def_rhs_offset;
831 /* If the address is invariant we can always fold it. */
832 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
833 &def_rhs_offset)))
835 double_int off = mem_ref_offset (lhs);
836 tree new_ptr;
837 off = double_int_add (off,
838 shwi_to_double_int (def_rhs_offset));
839 if (TREE_CODE (def_rhs_base) == MEM_REF)
841 off = double_int_add (off, mem_ref_offset (def_rhs_base));
842 new_ptr = TREE_OPERAND (def_rhs_base, 0);
844 else
845 new_ptr = build_fold_addr_expr (def_rhs_base);
846 TREE_OPERAND (lhs, 0) = new_ptr;
847 TREE_OPERAND (lhs, 1)
848 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
849 tidy_after_forward_propagate_addr (use_stmt);
850 /* Continue propagating into the RHS if this was not the only use. */
851 if (single_use_p)
852 return true;
854 /* If the LHS is a plain dereference and the value type is the same as
855 that of the pointed-to type of the address we can put the
856 dereferenced address on the LHS preserving the original alias-type. */
857 else if (gimple_assign_lhs (use_stmt) == lhs
858 && useless_type_conversion_p
859 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
860 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
862 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
863 tree new_offset, new_base, saved;
864 while (handled_component_p (*def_rhs_basep))
865 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
866 saved = *def_rhs_basep;
867 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
869 new_base = TREE_OPERAND (*def_rhs_basep, 0);
870 new_offset
871 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
872 TREE_OPERAND (*def_rhs_basep, 1));
874 else
876 new_base = build_fold_addr_expr (*def_rhs_basep);
877 new_offset = TREE_OPERAND (lhs, 1);
879 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
880 new_base, new_offset);
881 gimple_assign_set_lhs (use_stmt,
882 unshare_expr (TREE_OPERAND (def_rhs, 0)));
883 *def_rhs_basep = saved;
884 tidy_after_forward_propagate_addr (use_stmt);
885 /* Continue propagating into the RHS if this was not the
886 only use. */
887 if (single_use_p)
888 return true;
890 else
891 /* We can have a struct assignment dereferencing our name twice.
892 Note that we didn't propagate into the lhs to not falsely
893 claim we did when propagating into the rhs. */
894 res = false;
897 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
898 nodes from the RHS. */
899 rhs = gimple_assign_rhs1 (use_stmt);
900 if (TREE_CODE (rhs) == ADDR_EXPR)
901 rhs = TREE_OPERAND (rhs, 0);
902 while (handled_component_p (rhs))
903 rhs = TREE_OPERAND (rhs, 0);
905 /* Now see if the RHS node is a MEM_REF using NAME. If so,
906 propagate the ADDR_EXPR into the use of NAME and fold the result. */
907 if (TREE_CODE (rhs) == MEM_REF
908 && TREE_OPERAND (rhs, 0) == name)
910 tree def_rhs_base;
911 HOST_WIDE_INT def_rhs_offset;
912 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
913 &def_rhs_offset)))
915 double_int off = mem_ref_offset (rhs);
916 tree new_ptr;
917 off = double_int_add (off,
918 shwi_to_double_int (def_rhs_offset));
919 if (TREE_CODE (def_rhs_base) == MEM_REF)
921 off = double_int_add (off, mem_ref_offset (def_rhs_base));
922 new_ptr = TREE_OPERAND (def_rhs_base, 0);
924 else
925 new_ptr = build_fold_addr_expr (def_rhs_base);
926 TREE_OPERAND (rhs, 0) = new_ptr;
927 TREE_OPERAND (rhs, 1)
928 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
929 fold_stmt_inplace (use_stmt);
930 tidy_after_forward_propagate_addr (use_stmt);
931 return res;
933 /* If the LHS is a plain dereference and the value type is the same as
934 that of the pointed-to type of the address we can put the
935 dereferenced address on the LHS preserving the original alias-type. */
936 else if (gimple_assign_rhs1 (use_stmt) == rhs
937 && useless_type_conversion_p
938 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
939 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
941 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
942 tree new_offset, new_base, saved;
943 while (handled_component_p (*def_rhs_basep))
944 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
945 saved = *def_rhs_basep;
946 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
948 new_base = TREE_OPERAND (*def_rhs_basep, 0);
949 new_offset
950 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
951 TREE_OPERAND (*def_rhs_basep, 1));
953 else
955 new_base = build_fold_addr_expr (*def_rhs_basep);
956 new_offset = TREE_OPERAND (rhs, 1);
958 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
959 new_base, new_offset);
960 gimple_assign_set_rhs1 (use_stmt,
961 unshare_expr (TREE_OPERAND (def_rhs, 0)));
962 *def_rhs_basep = saved;
963 fold_stmt_inplace (use_stmt);
964 tidy_after_forward_propagate_addr (use_stmt);
965 return res;
969 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
970 is nothing to do. */
971 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
972 || gimple_assign_rhs1 (use_stmt) != name)
973 return false;
975 /* The remaining cases are all for turning pointer arithmetic into
976 array indexing. They only apply when we have the address of
977 element zero in an array. If that is not the case then there
978 is nothing to do. */
979 array_ref = TREE_OPERAND (def_rhs, 0);
980 if ((TREE_CODE (array_ref) != ARRAY_REF
981 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
982 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
983 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
984 return false;
986 rhs2 = gimple_assign_rhs2 (use_stmt);
987 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
988 of the elements in X into &x[C1 + C2/element size]. */
989 if (TREE_CODE (rhs2) == INTEGER_CST)
991 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
992 TREE_TYPE (def_rhs),
993 def_rhs, rhs2);
994 if (new_rhs)
996 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
997 new_rhs = unshare_expr (new_rhs);
998 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1000 if (!is_gimple_min_invariant (new_rhs))
1001 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1002 true, NULL_TREE,
1003 true, GSI_SAME_STMT);
1004 new_rhs = fold_convert (type, new_rhs);
1006 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1007 use_stmt = gsi_stmt (*use_stmt_gsi);
1008 update_stmt (use_stmt);
1009 tidy_after_forward_propagate_addr (use_stmt);
1010 return true;
1014 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1015 converting a multiplication of an index by the size of the
1016 array elements, then the result is converted into the proper
1017 type for the arithmetic. */
1018 if (TREE_CODE (rhs2) == SSA_NAME
1019 && (TREE_CODE (array_ref) != ARRAY_REF
1020 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1021 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1022 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1023 different type than their operands. */
1024 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1025 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1026 use_stmt_gsi);
1027 return false;
1030 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1032 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1033 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1034 node or for recovery of array indexing from pointer arithmetic.
1035 Returns true, if all uses have been propagated into. */
1037 static bool
1038 forward_propagate_addr_expr (tree name, tree rhs)
1040 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1041 imm_use_iterator iter;
1042 gimple use_stmt;
1043 bool all = true;
1044 bool single_use_p = has_single_use (name);
1046 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1048 bool result;
1049 tree use_rhs;
1051 /* If the use is not in a simple assignment statement, then
1052 there is nothing we can do. */
1053 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1055 if (!is_gimple_debug (use_stmt))
1056 all = false;
1057 continue;
1060 /* If the use is in a deeper loop nest, then we do not want
1061 to propagate non-invariant ADDR_EXPRs into the loop as that
1062 is likely adding expression evaluations into the loop. */
1063 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1064 && !is_gimple_min_invariant (rhs))
1066 all = false;
1067 continue;
1071 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1072 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1073 single_use_p);
1074 /* If the use has moved to a different statement adjust
1075 the update machinery for the old statement too. */
1076 if (use_stmt != gsi_stmt (gsi))
1078 update_stmt (use_stmt);
1079 use_stmt = gsi_stmt (gsi);
1082 update_stmt (use_stmt);
1084 all &= result;
1086 /* Remove intermediate now unused copy and conversion chains. */
1087 use_rhs = gimple_assign_rhs1 (use_stmt);
1088 if (result
1089 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1090 && TREE_CODE (use_rhs) == SSA_NAME
1091 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1093 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1094 release_defs (use_stmt);
1095 gsi_remove (&gsi, true);
1099 return all && has_zero_uses (name);
1103 /* Forward propagate the comparison defined in STMT like
1104 cond_1 = x CMP y to uses of the form
1105 a_1 = (T')cond_1
1106 a_1 = !cond_1
1107 a_1 = cond_1 != 0
1108 Returns true if stmt is now unused. */
1110 static bool
1111 forward_propagate_comparison (gimple stmt)
1113 tree name = gimple_assign_lhs (stmt);
1114 gimple use_stmt;
1115 tree tmp = NULL_TREE;
1117 /* Don't propagate ssa names that occur in abnormal phis. */
1118 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1119 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1120 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1121 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1122 return false;
1124 /* Do not un-cse comparisons. But propagate through copies. */
1125 use_stmt = get_prop_dest_stmt (name, &name);
1126 if (!use_stmt)
1127 return false;
1129 /* Conversion of the condition result to another integral type. */
1130 if (is_gimple_assign (use_stmt)
1131 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1132 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1133 == tcc_comparison
1134 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1135 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1137 tree lhs = gimple_assign_lhs (use_stmt);
1139 /* We can propagate the condition into a conversion. */
1140 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1142 /* Avoid using fold here as that may create a COND_EXPR with
1143 non-boolean condition as canonical form. */
1144 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1145 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1147 /* We can propagate the condition into X op CST where op
1148 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1149 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1150 == tcc_comparison
1151 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1152 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1154 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1155 tree cst = gimple_assign_rhs2 (use_stmt);
1156 tree cond;
1158 cond = build2 (gimple_assign_rhs_code (stmt),
1159 TREE_TYPE (cst),
1160 gimple_assign_rhs1 (stmt),
1161 gimple_assign_rhs2 (stmt));
1163 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1164 code, TREE_TYPE (lhs),
1165 cond, cst, false);
1166 if (tmp == NULL_TREE)
1167 return false;
1169 /* We can propagate the condition into a statement that
1170 computes the logical negation of the comparison result. */
1171 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1173 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1174 bool nans = HONOR_NANS (TYPE_MODE (type));
1175 enum tree_code code;
1176 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1177 if (code == ERROR_MARK)
1178 return false;
1180 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1181 gimple_assign_rhs2 (stmt));
1183 else
1184 return false;
1187 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1188 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1189 use_stmt = gsi_stmt (gsi);
1190 update_stmt (use_stmt);
1193 if (dump_file && (dump_flags & TDF_DETAILS))
1195 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1196 stmt);
1197 fprintf (dump_file, " Replaced '");
1198 print_generic_expr (dump_file, old_rhs, dump_flags);
1199 fprintf (dump_file, "' with '");
1200 print_generic_expr (dump_file, tmp, dump_flags);
1201 fprintf (dump_file, "'\n");
1204 /* Remove defining statements. */
1205 return remove_prop_source_from_use (name);
1208 return false;
1212 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1213 If so, we can change STMT into lhs = y which can later be copy
1214 propagated. Similarly for negation.
1216 This could trivially be formulated as a forward propagation
1217 to immediate uses. However, we already had an implementation
1218 from DOM which used backward propagation via the use-def links.
1220 It turns out that backward propagation is actually faster as
1221 there's less work to do for each NOT/NEG expression we find.
1222 Backwards propagation needs to look at the statement in a single
1223 backlink. Forward propagation needs to look at potentially more
1224 than one forward link.
1226 Returns true when the statement was changed. */
1228 static bool
1229 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1231 gimple stmt = gsi_stmt (*gsi_p);
1232 tree rhs = gimple_assign_rhs1 (stmt);
1233 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1235 /* See if the RHS_DEF_STMT has the same form as our statement. */
1236 if (is_gimple_assign (rhs_def_stmt)
1237 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1239 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1241 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1242 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1243 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1245 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1246 stmt = gsi_stmt (*gsi_p);
1247 update_stmt (stmt);
1248 return true;
1252 return false;
1255 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1256 the condition which we may be able to optimize better. */
1258 static bool
1259 simplify_gimple_switch (gimple stmt)
1261 tree cond = gimple_switch_index (stmt);
1262 tree def, to, ti;
1263 gimple def_stmt;
1265 /* The optimization that we really care about is removing unnecessary
1266 casts. That will let us do much better in propagating the inferred
1267 constant at the switch target. */
1268 if (TREE_CODE (cond) == SSA_NAME)
1270 def_stmt = SSA_NAME_DEF_STMT (cond);
1271 if (is_gimple_assign (def_stmt))
1273 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1275 int need_precision;
1276 bool fail;
1278 def = gimple_assign_rhs1 (def_stmt);
1280 /* ??? Why was Jeff testing this? We are gimple... */
1281 gcc_checking_assert (is_gimple_val (def));
1283 to = TREE_TYPE (cond);
1284 ti = TREE_TYPE (def);
1286 /* If we have an extension that preserves value, then we
1287 can copy the source value into the switch. */
1289 need_precision = TYPE_PRECISION (ti);
1290 fail = false;
1291 if (! INTEGRAL_TYPE_P (ti))
1292 fail = true;
1293 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1294 fail = true;
1295 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1296 need_precision += 1;
1297 if (TYPE_PRECISION (to) < need_precision)
1298 fail = true;
1300 if (!fail)
1302 gimple_switch_set_index (stmt, def);
1303 update_stmt (stmt);
1304 return true;
1310 return false;
1313 /* For pointers p2 and p1 return p2 - p1 if the
1314 difference is known and constant, otherwise return NULL. */
1316 static tree
1317 constant_pointer_difference (tree p1, tree p2)
1319 int i, j;
1320 #define CPD_ITERATIONS 5
1321 tree exps[2][CPD_ITERATIONS];
1322 tree offs[2][CPD_ITERATIONS];
1323 int cnt[2];
1325 for (i = 0; i < 2; i++)
1327 tree p = i ? p1 : p2;
1328 tree off = size_zero_node;
1329 gimple stmt;
1330 enum tree_code code;
1332 /* For each of p1 and p2 we need to iterate at least
1333 twice, to handle ADDR_EXPR directly in p1/p2,
1334 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1335 on definition's stmt RHS. Iterate a few extra times. */
1336 j = 0;
1339 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1340 break;
1341 if (TREE_CODE (p) == ADDR_EXPR)
1343 tree q = TREE_OPERAND (p, 0);
1344 HOST_WIDE_INT offset;
1345 tree base = get_addr_base_and_unit_offset (q, &offset);
1346 if (base)
1348 q = base;
1349 if (offset)
1350 off = size_binop (PLUS_EXPR, off, size_int (offset));
1352 if (TREE_CODE (q) == MEM_REF
1353 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1355 p = TREE_OPERAND (q, 0);
1356 off = size_binop (PLUS_EXPR, off,
1357 double_int_to_tree (sizetype,
1358 mem_ref_offset (q)));
1360 else
1362 exps[i][j] = q;
1363 offs[i][j++] = off;
1364 break;
1367 if (TREE_CODE (p) != SSA_NAME)
1368 break;
1369 exps[i][j] = p;
1370 offs[i][j++] = off;
1371 if (j == CPD_ITERATIONS)
1372 break;
1373 stmt = SSA_NAME_DEF_STMT (p);
1374 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1375 break;
1376 code = gimple_assign_rhs_code (stmt);
1377 if (code == POINTER_PLUS_EXPR)
1379 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1380 break;
1381 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1382 p = gimple_assign_rhs1 (stmt);
1384 else if (code == ADDR_EXPR || code == NOP_EXPR)
1385 p = gimple_assign_rhs1 (stmt);
1386 else
1387 break;
1389 while (1);
1390 cnt[i] = j;
1393 for (i = 0; i < cnt[0]; i++)
1394 for (j = 0; j < cnt[1]; j++)
1395 if (exps[0][i] == exps[1][j])
1396 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1398 return NULL_TREE;
1401 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1402 Optimize
1403 memcpy (p, "abcd", 4);
1404 memset (p + 4, ' ', 3);
1405 into
1406 memcpy (p, "abcd ", 7);
1407 call if the latter can be stored by pieces during expansion. */
1409 static bool
1410 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1412 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1413 tree vuse = gimple_vuse (stmt2);
1414 if (vuse == NULL)
1415 return false;
1416 stmt1 = SSA_NAME_DEF_STMT (vuse);
1418 switch (DECL_FUNCTION_CODE (callee2))
1420 case BUILT_IN_MEMSET:
1421 if (gimple_call_num_args (stmt2) != 3
1422 || gimple_call_lhs (stmt2)
1423 || CHAR_BIT != 8
1424 || BITS_PER_UNIT != 8)
1425 break;
1426 else
1428 tree callee1;
1429 tree ptr1, src1, str1, off1, len1, lhs1;
1430 tree ptr2 = gimple_call_arg (stmt2, 0);
1431 tree val2 = gimple_call_arg (stmt2, 1);
1432 tree len2 = gimple_call_arg (stmt2, 2);
1433 tree diff, vdef, new_str_cst;
1434 gimple use_stmt;
1435 unsigned int ptr1_align;
1436 unsigned HOST_WIDE_INT src_len;
1437 char *src_buf;
1438 use_operand_p use_p;
1440 if (!host_integerp (val2, 0)
1441 || !host_integerp (len2, 1))
1442 break;
1443 if (is_gimple_call (stmt1))
1445 /* If first stmt is a call, it needs to be memcpy
1446 or mempcpy, with string literal as second argument and
1447 constant length. */
1448 callee1 = gimple_call_fndecl (stmt1);
1449 if (callee1 == NULL_TREE
1450 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1451 || gimple_call_num_args (stmt1) != 3)
1452 break;
1453 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1454 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1455 break;
1456 ptr1 = gimple_call_arg (stmt1, 0);
1457 src1 = gimple_call_arg (stmt1, 1);
1458 len1 = gimple_call_arg (stmt1, 2);
1459 lhs1 = gimple_call_lhs (stmt1);
1460 if (!host_integerp (len1, 1))
1461 break;
1462 str1 = string_constant (src1, &off1);
1463 if (str1 == NULL_TREE)
1464 break;
1465 if (!host_integerp (off1, 1)
1466 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1467 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1468 - tree_low_cst (off1, 1)) > 0
1469 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1470 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1471 != TYPE_MODE (char_type_node))
1472 break;
1474 else if (gimple_assign_single_p (stmt1))
1476 /* Otherwise look for length 1 memcpy optimized into
1477 assignment. */
1478 ptr1 = gimple_assign_lhs (stmt1);
1479 src1 = gimple_assign_rhs1 (stmt1);
1480 if (TREE_CODE (ptr1) != MEM_REF
1481 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1482 || !host_integerp (src1, 0))
1483 break;
1484 ptr1 = build_fold_addr_expr (ptr1);
1485 callee1 = NULL_TREE;
1486 len1 = size_one_node;
1487 lhs1 = NULL_TREE;
1488 off1 = size_zero_node;
1489 str1 = NULL_TREE;
1491 else
1492 break;
1494 diff = constant_pointer_difference (ptr1, ptr2);
1495 if (diff == NULL && lhs1 != NULL)
1497 diff = constant_pointer_difference (lhs1, ptr2);
1498 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1499 && diff != NULL)
1500 diff = size_binop (PLUS_EXPR, diff,
1501 fold_convert (sizetype, len1));
1503 /* If the difference between the second and first destination pointer
1504 is not constant, or is bigger than memcpy length, bail out. */
1505 if (diff == NULL
1506 || !host_integerp (diff, 1)
1507 || tree_int_cst_lt (len1, diff))
1508 break;
1510 /* Use maximum of difference plus memset length and memcpy length
1511 as the new memcpy length, if it is too big, bail out. */
1512 src_len = tree_low_cst (diff, 1);
1513 src_len += tree_low_cst (len2, 1);
1514 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1515 src_len = tree_low_cst (len1, 1);
1516 if (src_len > 1024)
1517 break;
1519 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1520 with bigger length will return different result. */
1521 if (lhs1 != NULL_TREE
1522 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1523 && (TREE_CODE (lhs1) != SSA_NAME
1524 || !single_imm_use (lhs1, &use_p, &use_stmt)
1525 || use_stmt != stmt2))
1526 break;
1528 /* If anything reads memory in between memcpy and memset
1529 call, the modified memcpy call might change it. */
1530 vdef = gimple_vdef (stmt1);
1531 if (vdef != NULL
1532 && (!single_imm_use (vdef, &use_p, &use_stmt)
1533 || use_stmt != stmt2))
1534 break;
1536 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1537 /* Construct the new source string literal. */
1538 src_buf = XALLOCAVEC (char, src_len + 1);
1539 if (callee1)
1540 memcpy (src_buf,
1541 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1542 tree_low_cst (len1, 1));
1543 else
1544 src_buf[0] = tree_low_cst (src1, 0);
1545 memset (src_buf + tree_low_cst (diff, 1),
1546 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1547 src_buf[src_len] = '\0';
1548 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1549 handle embedded '\0's. */
1550 if (strlen (src_buf) != src_len)
1551 break;
1552 rtl_profile_for_bb (gimple_bb (stmt2));
1553 /* If the new memcpy wouldn't be emitted by storing the literal
1554 by pieces, this optimization might enlarge .rodata too much,
1555 as commonly used string literals couldn't be shared any
1556 longer. */
1557 if (!can_store_by_pieces (src_len,
1558 builtin_strncpy_read_str,
1559 src_buf, ptr1_align, false))
1560 break;
1562 new_str_cst = build_string_literal (src_len, src_buf);
1563 if (callee1)
1565 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1566 memset call. */
1567 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1568 gimple_call_set_lhs (stmt1, NULL_TREE);
1569 gimple_call_set_arg (stmt1, 1, new_str_cst);
1570 gimple_call_set_arg (stmt1, 2,
1571 build_int_cst (TREE_TYPE (len1), src_len));
1572 update_stmt (stmt1);
1573 unlink_stmt_vdef (stmt2);
1574 gsi_remove (gsi_p, true);
1575 release_defs (stmt2);
1576 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1577 release_ssa_name (lhs1);
1578 return true;
1580 else
1582 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1583 assignment, remove STMT1 and change memset call into
1584 memcpy call. */
1585 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1587 if (!is_gimple_val (ptr1))
1588 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1589 true, GSI_SAME_STMT);
1590 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1591 gimple_call_set_arg (stmt2, 0, ptr1);
1592 gimple_call_set_arg (stmt2, 1, new_str_cst);
1593 gimple_call_set_arg (stmt2, 2,
1594 build_int_cst (TREE_TYPE (len2), src_len));
1595 unlink_stmt_vdef (stmt1);
1596 gsi_remove (&gsi, true);
1597 release_defs (stmt1);
1598 update_stmt (stmt2);
1599 return false;
1602 break;
1603 default:
1604 break;
1606 return false;
1609 /* Simplify bitwise binary operations.
1610 Return true if a transformation applied, otherwise return false. */
1612 static bool
1613 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1615 gimple stmt = gsi_stmt (*gsi);
1616 tree arg1 = gimple_assign_rhs1 (stmt);
1617 tree arg2 = gimple_assign_rhs2 (stmt);
1618 enum tree_code code = gimple_assign_rhs_code (stmt);
1619 tree res;
1620 gimple def1 = NULL, def2 = NULL;
1621 tree def1_arg1, def2_arg1;
1622 enum tree_code def1_code, def2_code;
1624 /* If the first argument is an SSA name that is itself a result of a
1625 typecast of an ADDR_EXPR to an integer, feed the ADDR_EXPR to the
1626 folder rather than the ssa name. */
1627 if (code == BIT_AND_EXPR
1628 && TREE_CODE (arg2) == INTEGER_CST
1629 && TREE_CODE (arg1) == SSA_NAME)
1631 gimple def = SSA_NAME_DEF_STMT (arg1);
1632 tree op = arg1;
1634 /* ??? This looks bogus - the conversion could be truncating. */
1635 if (is_gimple_assign (def)
1636 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))
1637 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1639 tree opp = gimple_assign_rhs1 (def);
1640 if (TREE_CODE (opp) == ADDR_EXPR)
1641 op = opp;
1644 res = fold_binary_loc (gimple_location (stmt),
1645 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1646 op, arg2);
1647 if (res && is_gimple_min_invariant (res))
1649 gimple_assign_set_rhs_from_tree (gsi, res);
1650 update_stmt (stmt);
1651 return true;
1655 def1_code = TREE_CODE (arg1);
1656 def1_arg1 = arg1;
1657 if (TREE_CODE (arg1) == SSA_NAME)
1659 def1 = SSA_NAME_DEF_STMT (arg1);
1660 if (is_gimple_assign (def1))
1662 def1_code = gimple_assign_rhs_code (def1);
1663 def1_arg1 = gimple_assign_rhs1 (def1);
1667 def2_code = TREE_CODE (arg2);
1668 def2_arg1 = arg2;
1669 if (TREE_CODE (arg2) == SSA_NAME)
1671 def2 = SSA_NAME_DEF_STMT (arg2);
1672 if (is_gimple_assign (def2))
1674 def2_code = gimple_assign_rhs_code (def2);
1675 def2_arg1 = gimple_assign_rhs1 (def2);
1679 /* For bitwise binary operations apply operand conversions to the
1680 binary operation result instead of to the operands. This allows
1681 to combine successive conversions and bitwise binary operations. */
1682 if (CONVERT_EXPR_CODE_P (def1_code)
1683 && CONVERT_EXPR_CODE_P (def2_code)
1684 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1685 /* Make sure that the conversion widens the operands or that it
1686 changes the operation to a bitfield precision. */
1687 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1688 < TYPE_PRECISION (TREE_TYPE (arg1)))
1689 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1690 != MODE_INT)
1691 || (TYPE_PRECISION (TREE_TYPE (arg1))
1692 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1694 gimple newop;
1695 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1696 NULL);
1697 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1698 tem = make_ssa_name (tem, newop);
1699 gimple_assign_set_lhs (newop, tem);
1700 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1701 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1702 tem, NULL_TREE, NULL_TREE);
1703 update_stmt (gsi_stmt (*gsi));
1704 return true;
1707 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1708 if (code == BIT_AND_EXPR
1709 && def1_code == BIT_IOR_EXPR
1710 && TREE_CODE (arg2) == INTEGER_CST
1711 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1713 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1714 arg2, gimple_assign_rhs2 (def1));
1715 tree tem;
1716 gimple newop;
1717 if (integer_zerop (cst))
1719 gimple_assign_set_rhs1 (stmt, def1_arg1);
1720 update_stmt (stmt);
1721 return true;
1723 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1724 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1725 tem, def1_arg1, arg2);
1726 tem = make_ssa_name (tem, newop);
1727 gimple_assign_set_lhs (newop, tem);
1728 /* Make sure to re-process the new stmt as it's walking upwards. */
1729 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1730 gimple_assign_set_rhs1 (stmt, tem);
1731 gimple_assign_set_rhs2 (stmt, cst);
1732 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1733 update_stmt (stmt);
1734 return true;
1737 /* Combine successive equal operations with constants. */
1738 if ((code == BIT_AND_EXPR
1739 || code == BIT_IOR_EXPR
1740 || code == BIT_XOR_EXPR)
1741 && def1_code == code
1742 && TREE_CODE (arg2) == INTEGER_CST
1743 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1745 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1746 arg2, gimple_assign_rhs2 (def1));
1747 gimple_assign_set_rhs1 (stmt, def1_arg1);
1748 gimple_assign_set_rhs2 (stmt, cst);
1749 update_stmt (stmt);
1750 return true;
1753 return false;
1757 /* Perform re-associations of the plus or minus statement STMT that are
1758 always permitted. Returns true if the CFG was changed. */
1760 static bool
1761 associate_plusminus (gimple stmt)
1763 tree rhs1 = gimple_assign_rhs1 (stmt);
1764 tree rhs2 = gimple_assign_rhs2 (stmt);
1765 enum tree_code code = gimple_assign_rhs_code (stmt);
1766 gimple_stmt_iterator gsi;
1767 bool changed;
1769 /* We can't reassociate at all for saturating types. */
1770 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1771 return false;
1773 /* First contract negates. */
1776 changed = false;
1778 /* A +- (-B) -> A -+ B. */
1779 if (TREE_CODE (rhs2) == SSA_NAME)
1781 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1782 if (is_gimple_assign (def_stmt)
1783 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1785 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1786 gimple_assign_set_rhs_code (stmt, code);
1787 rhs2 = gimple_assign_rhs1 (def_stmt);
1788 gimple_assign_set_rhs2 (stmt, rhs2);
1789 gimple_set_modified (stmt, true);
1790 changed = true;
1794 /* (-A) + B -> B - A. */
1795 if (TREE_CODE (rhs1) == SSA_NAME
1796 && code == PLUS_EXPR)
1798 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1799 if (is_gimple_assign (def_stmt)
1800 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1802 code = MINUS_EXPR;
1803 gimple_assign_set_rhs_code (stmt, code);
1804 rhs1 = rhs2;
1805 gimple_assign_set_rhs1 (stmt, rhs1);
1806 rhs2 = gimple_assign_rhs1 (def_stmt);
1807 gimple_assign_set_rhs2 (stmt, rhs2);
1808 gimple_set_modified (stmt, true);
1809 changed = true;
1813 while (changed);
1815 /* We can't reassociate floating-point or fixed-point plus or minus
1816 because of saturation to +-Inf. */
1817 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1818 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1819 goto out;
1821 /* Second match patterns that allow contracting a plus-minus pair
1822 irrespective of overflow issues.
1824 (A +- B) - A -> +- B
1825 (A +- B) -+ B -> A
1826 (CST +- A) +- CST -> CST +- A
1827 (A + CST) +- CST -> A + CST
1828 ~A + A -> -1
1829 ~A + 1 -> -A
1830 A - (A +- B) -> -+ B
1831 A +- (B +- A) -> +- B
1832 CST +- (CST +- A) -> CST +- A
1833 CST +- (A +- CST) -> CST +- A
1834 A + ~A -> -1
1836 via commutating the addition and contracting operations to zero
1837 by reassociation. */
1839 gsi = gsi_for_stmt (stmt);
1840 if (TREE_CODE (rhs1) == SSA_NAME)
1842 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1843 if (is_gimple_assign (def_stmt))
1845 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1846 if (def_code == PLUS_EXPR
1847 || def_code == MINUS_EXPR)
1849 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1850 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1851 if (operand_equal_p (def_rhs1, rhs2, 0)
1852 && code == MINUS_EXPR)
1854 /* (A +- B) - A -> +- B. */
1855 code = ((def_code == PLUS_EXPR)
1856 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1857 rhs1 = def_rhs2;
1858 rhs2 = NULL_TREE;
1859 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1860 gcc_assert (gsi_stmt (gsi) == stmt);
1861 gimple_set_modified (stmt, true);
1863 else if (operand_equal_p (def_rhs2, rhs2, 0)
1864 && code != def_code)
1866 /* (A +- B) -+ B -> A. */
1867 code = TREE_CODE (def_rhs1);
1868 rhs1 = def_rhs1;
1869 rhs2 = NULL_TREE;
1870 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1871 gcc_assert (gsi_stmt (gsi) == stmt);
1872 gimple_set_modified (stmt, true);
1874 else if (TREE_CODE (rhs2) == INTEGER_CST
1875 && TREE_CODE (def_rhs1) == INTEGER_CST)
1877 /* (CST +- A) +- CST -> CST +- A. */
1878 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1879 def_rhs1, rhs2);
1880 if (cst && !TREE_OVERFLOW (cst))
1882 code = def_code;
1883 gimple_assign_set_rhs_code (stmt, code);
1884 rhs1 = cst;
1885 gimple_assign_set_rhs1 (stmt, rhs1);
1886 rhs2 = def_rhs2;
1887 gimple_assign_set_rhs2 (stmt, rhs2);
1888 gimple_set_modified (stmt, true);
1891 else if (TREE_CODE (rhs2) == INTEGER_CST
1892 && TREE_CODE (def_rhs2) == INTEGER_CST
1893 && def_code == PLUS_EXPR)
1895 /* (A + CST) +- CST -> A + CST. */
1896 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1897 def_rhs2, rhs2);
1898 if (cst && !TREE_OVERFLOW (cst))
1900 code = PLUS_EXPR;
1901 gimple_assign_set_rhs_code (stmt, code);
1902 rhs1 = def_rhs1;
1903 gimple_assign_set_rhs1 (stmt, rhs1);
1904 rhs2 = cst;
1905 gimple_assign_set_rhs2 (stmt, rhs2);
1906 gimple_set_modified (stmt, true);
1910 else if (def_code == BIT_NOT_EXPR
1911 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1913 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1914 if (code == PLUS_EXPR
1915 && operand_equal_p (def_rhs1, rhs2, 0))
1917 /* ~A + A -> -1. */
1918 code = INTEGER_CST;
1919 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
1920 rhs2 = NULL_TREE;
1921 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1922 gcc_assert (gsi_stmt (gsi) == stmt);
1923 gimple_set_modified (stmt, true);
1925 else if (code == PLUS_EXPR
1926 && integer_onep (rhs1))
1928 /* ~A + 1 -> -A. */
1929 code = NEGATE_EXPR;
1930 rhs1 = def_rhs1;
1931 rhs2 = NULL_TREE;
1932 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1933 gcc_assert (gsi_stmt (gsi) == stmt);
1934 gimple_set_modified (stmt, true);
1940 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
1942 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1943 if (is_gimple_assign (def_stmt))
1945 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1946 if (def_code == PLUS_EXPR
1947 || def_code == MINUS_EXPR)
1949 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1950 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1951 if (operand_equal_p (def_rhs1, rhs1, 0)
1952 && code == MINUS_EXPR)
1954 /* A - (A +- B) -> -+ B. */
1955 code = ((def_code == PLUS_EXPR)
1956 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
1957 rhs1 = def_rhs2;
1958 rhs2 = NULL_TREE;
1959 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1960 gcc_assert (gsi_stmt (gsi) == stmt);
1961 gimple_set_modified (stmt, true);
1963 else if (operand_equal_p (def_rhs2, rhs1, 0)
1964 && code != def_code)
1966 /* A +- (B +- A) -> +- B. */
1967 code = ((code == PLUS_EXPR)
1968 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
1969 rhs1 = def_rhs1;
1970 rhs2 = NULL_TREE;
1971 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1972 gcc_assert (gsi_stmt (gsi) == stmt);
1973 gimple_set_modified (stmt, true);
1975 else if (TREE_CODE (rhs1) == INTEGER_CST
1976 && TREE_CODE (def_rhs1) == INTEGER_CST)
1978 /* CST +- (CST +- A) -> CST +- A. */
1979 tree cst = fold_binary (code, TREE_TYPE (rhs2),
1980 rhs1, def_rhs1);
1981 if (cst && !TREE_OVERFLOW (cst))
1983 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
1984 gimple_assign_set_rhs_code (stmt, code);
1985 rhs1 = cst;
1986 gimple_assign_set_rhs1 (stmt, rhs1);
1987 rhs2 = def_rhs2;
1988 gimple_assign_set_rhs2 (stmt, rhs2);
1989 gimple_set_modified (stmt, true);
1992 else if (TREE_CODE (rhs1) == INTEGER_CST
1993 && TREE_CODE (def_rhs2) == INTEGER_CST)
1995 /* CST +- (A +- CST) -> CST +- A. */
1996 tree cst = fold_binary (def_code == code
1997 ? PLUS_EXPR : MINUS_EXPR,
1998 TREE_TYPE (rhs2),
1999 rhs1, def_rhs2);
2000 if (cst && !TREE_OVERFLOW (cst))
2002 rhs1 = cst;
2003 gimple_assign_set_rhs1 (stmt, rhs1);
2004 rhs2 = def_rhs1;
2005 gimple_assign_set_rhs2 (stmt, rhs2);
2006 gimple_set_modified (stmt, true);
2010 else if (def_code == BIT_NOT_EXPR
2011 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2013 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2014 if (code == PLUS_EXPR
2015 && operand_equal_p (def_rhs1, rhs1, 0))
2017 /* A + ~A -> -1. */
2018 code = INTEGER_CST;
2019 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2020 rhs2 = NULL_TREE;
2021 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2022 gcc_assert (gsi_stmt (gsi) == stmt);
2023 gimple_set_modified (stmt, true);
2029 out:
2030 if (gimple_modified_p (stmt))
2032 fold_stmt_inplace (stmt);
2033 update_stmt (stmt);
2034 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2035 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2036 return true;
2039 return false;
2042 /* Combine two conversions in a row for the second conversion at *GSI.
2043 Returns true if there were any changes made. */
2045 static bool
2046 combine_conversions (gimple_stmt_iterator *gsi)
2048 gimple stmt = gsi_stmt (*gsi);
2049 gimple def_stmt;
2050 tree op0, lhs;
2051 enum tree_code code = gimple_assign_rhs_code (stmt);
2053 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2054 || code == FLOAT_EXPR
2055 || code == FIX_TRUNC_EXPR);
2057 lhs = gimple_assign_lhs (stmt);
2058 op0 = gimple_assign_rhs1 (stmt);
2059 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2061 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2062 return true;
2065 if (TREE_CODE (op0) != SSA_NAME)
2066 return false;
2068 def_stmt = SSA_NAME_DEF_STMT (op0);
2069 if (!is_gimple_assign (def_stmt))
2070 return false;
2072 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2074 tree defop0 = gimple_assign_rhs1 (def_stmt);
2075 tree type = TREE_TYPE (lhs);
2076 tree inside_type = TREE_TYPE (defop0);
2077 tree inter_type = TREE_TYPE (op0);
2078 int inside_int = INTEGRAL_TYPE_P (inside_type);
2079 int inside_ptr = POINTER_TYPE_P (inside_type);
2080 int inside_float = FLOAT_TYPE_P (inside_type);
2081 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2082 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2083 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2084 int inter_int = INTEGRAL_TYPE_P (inter_type);
2085 int inter_ptr = POINTER_TYPE_P (inter_type);
2086 int inter_float = FLOAT_TYPE_P (inter_type);
2087 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2088 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2089 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2090 int final_int = INTEGRAL_TYPE_P (type);
2091 int final_ptr = POINTER_TYPE_P (type);
2092 int final_float = FLOAT_TYPE_P (type);
2093 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2094 unsigned int final_prec = TYPE_PRECISION (type);
2095 int final_unsignedp = TYPE_UNSIGNED (type);
2097 /* In addition to the cases of two conversions in a row
2098 handled below, if we are converting something to its own
2099 type via an object of identical or wider precision, neither
2100 conversion is needed. */
2101 if (useless_type_conversion_p (type, inside_type)
2102 && (((inter_int || inter_ptr) && final_int)
2103 || (inter_float && final_float))
2104 && inter_prec >= final_prec)
2106 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2107 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2108 update_stmt (stmt);
2109 return true;
2112 /* Likewise, if the intermediate and initial types are either both
2113 float or both integer, we don't need the middle conversion if the
2114 former is wider than the latter and doesn't change the signedness
2115 (for integers). Avoid this if the final type is a pointer since
2116 then we sometimes need the middle conversion. Likewise if the
2117 final type has a precision not equal to the size of its mode. */
2118 if (((inter_int && inside_int)
2119 || (inter_float && inside_float)
2120 || (inter_vec && inside_vec))
2121 && inter_prec >= inside_prec
2122 && (inter_float || inter_vec
2123 || inter_unsignedp == inside_unsignedp)
2124 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2125 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2126 && ! final_ptr
2127 && (! final_vec || inter_prec == inside_prec))
2129 gimple_assign_set_rhs1 (stmt, defop0);
2130 update_stmt (stmt);
2131 return true;
2134 /* If we have a sign-extension of a zero-extended value, we can
2135 replace that by a single zero-extension. */
2136 if (inside_int && inter_int && final_int
2137 && inside_prec < inter_prec && inter_prec < final_prec
2138 && inside_unsignedp && !inter_unsignedp)
2140 gimple_assign_set_rhs1 (stmt, defop0);
2141 update_stmt (stmt);
2142 return true;
2145 /* Two conversions in a row are not needed unless:
2146 - some conversion is floating-point (overstrict for now), or
2147 - some conversion is a vector (overstrict for now), or
2148 - the intermediate type is narrower than both initial and
2149 final, or
2150 - the intermediate type and innermost type differ in signedness,
2151 and the outermost type is wider than the intermediate, or
2152 - the initial type is a pointer type and the precisions of the
2153 intermediate and final types differ, or
2154 - the final type is a pointer type and the precisions of the
2155 initial and intermediate types differ. */
2156 if (! inside_float && ! inter_float && ! final_float
2157 && ! inside_vec && ! inter_vec && ! final_vec
2158 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2159 && ! (inside_int && inter_int
2160 && inter_unsignedp != inside_unsignedp
2161 && inter_prec < final_prec)
2162 && ((inter_unsignedp && inter_prec > inside_prec)
2163 == (final_unsignedp && final_prec > inter_prec))
2164 && ! (inside_ptr && inter_prec != final_prec)
2165 && ! (final_ptr && inside_prec != inter_prec)
2166 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2167 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2169 gimple_assign_set_rhs1 (stmt, defop0);
2170 update_stmt (stmt);
2171 return true;
2174 /* A truncation to an unsigned type should be canonicalized as
2175 bitwise and of a mask. */
2176 if (final_int && inter_int && inside_int
2177 && final_prec == inside_prec
2178 && final_prec > inter_prec
2179 && inter_unsignedp)
2181 tree tem;
2182 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2183 defop0,
2184 double_int_to_tree
2185 (inside_type, double_int_mask (inter_prec)));
2186 if (!useless_type_conversion_p (type, inside_type))
2188 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2189 GSI_SAME_STMT);
2190 gimple_assign_set_rhs1 (stmt, tem);
2192 else
2193 gimple_assign_set_rhs_from_tree (gsi, tem);
2194 update_stmt (gsi_stmt (*gsi));
2195 return true;
2199 return false;
2202 /* Main entry point for the forward propagation and statement combine
2203 optimizer. */
2205 static unsigned int
2206 ssa_forward_propagate_and_combine (void)
2208 basic_block bb;
2209 unsigned int todoflags = 0;
2211 cfg_changed = false;
2213 FOR_EACH_BB (bb)
2215 gimple_stmt_iterator gsi, prev;
2216 bool prev_initialized;
2218 /* Apply forward propagation to all stmts in the basic-block.
2219 Note we update GSI within the loop as necessary. */
2220 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2222 gimple stmt = gsi_stmt (gsi);
2223 tree lhs, rhs;
2224 enum tree_code code;
2226 if (!is_gimple_assign (stmt))
2228 gsi_next (&gsi);
2229 continue;
2232 lhs = gimple_assign_lhs (stmt);
2233 rhs = gimple_assign_rhs1 (stmt);
2234 code = gimple_assign_rhs_code (stmt);
2235 if (TREE_CODE (lhs) != SSA_NAME
2236 || has_zero_uses (lhs))
2238 gsi_next (&gsi);
2239 continue;
2242 /* If this statement sets an SSA_NAME to an address,
2243 try to propagate the address into the uses of the SSA_NAME. */
2244 if (code == ADDR_EXPR
2245 /* Handle pointer conversions on invariant addresses
2246 as well, as this is valid gimple. */
2247 || (CONVERT_EXPR_CODE_P (code)
2248 && TREE_CODE (rhs) == ADDR_EXPR
2249 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2251 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2252 if ((!base
2253 || !DECL_P (base)
2254 || decl_address_invariant_p (base))
2255 && !stmt_references_abnormal_ssa_name (stmt)
2256 && forward_propagate_addr_expr (lhs, rhs))
2258 release_defs (stmt);
2259 todoflags |= TODO_remove_unused_locals;
2260 gsi_remove (&gsi, true);
2262 else
2263 gsi_next (&gsi);
2265 else if (code == POINTER_PLUS_EXPR
2266 && can_propagate_from (stmt))
2268 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2269 /* ??? Better adjust the interface to that function
2270 instead of building new trees here. */
2271 && forward_propagate_addr_expr
2272 (lhs,
2273 build1 (ADDR_EXPR,
2274 TREE_TYPE (rhs),
2275 fold_build2 (MEM_REF,
2276 TREE_TYPE (TREE_TYPE (rhs)),
2277 rhs,
2278 fold_convert
2279 (ptr_type_node,
2280 gimple_assign_rhs2 (stmt))))))
2282 release_defs (stmt);
2283 todoflags |= TODO_remove_unused_locals;
2284 gsi_remove (&gsi, true);
2286 else if (is_gimple_min_invariant (rhs))
2288 /* Make sure to fold &a[0] + off_1 here. */
2289 fold_stmt_inplace (stmt);
2290 update_stmt (stmt);
2291 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2292 gsi_next (&gsi);
2294 else
2295 gsi_next (&gsi);
2297 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2299 forward_propagate_comparison (stmt);
2300 gsi_next (&gsi);
2302 else
2303 gsi_next (&gsi);
2306 /* Combine stmts with the stmts defining their operands.
2307 Note we update GSI within the loop as necessary. */
2308 prev_initialized = false;
2309 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2311 gimple stmt = gsi_stmt (gsi);
2312 bool changed = false;
2314 switch (gimple_code (stmt))
2316 case GIMPLE_ASSIGN:
2318 tree rhs1 = gimple_assign_rhs1 (stmt);
2319 enum tree_code code = gimple_assign_rhs_code (stmt);
2321 if ((code == BIT_NOT_EXPR
2322 || code == NEGATE_EXPR)
2323 && TREE_CODE (rhs1) == SSA_NAME)
2324 changed = simplify_not_neg_expr (&gsi);
2325 else if (code == COND_EXPR)
2327 /* In this case the entire COND_EXPR is in rhs1. */
2328 int did_something;
2329 fold_defer_overflow_warnings ();
2330 did_something = forward_propagate_into_cond (&gsi);
2331 stmt = gsi_stmt (gsi);
2332 if (did_something == 2)
2333 cfg_changed = true;
2334 fold_undefer_overflow_warnings
2335 (!TREE_NO_WARNING (rhs1) && did_something, stmt,
2336 WARN_STRICT_OVERFLOW_CONDITIONAL);
2337 changed = did_something != 0;
2339 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2341 bool no_warning = gimple_no_warning_p (stmt);
2342 fold_defer_overflow_warnings ();
2343 changed = forward_propagate_into_comparison (&gsi);
2344 fold_undefer_overflow_warnings
2345 (!no_warning && changed,
2346 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2348 else if (code == BIT_AND_EXPR
2349 || code == BIT_IOR_EXPR
2350 || code == BIT_XOR_EXPR)
2351 changed = simplify_bitwise_binary (&gsi);
2352 else if (code == PLUS_EXPR
2353 || code == MINUS_EXPR)
2354 changed = associate_plusminus (stmt);
2355 else if (CONVERT_EXPR_CODE_P (code)
2356 || code == FLOAT_EXPR
2357 || code == FIX_TRUNC_EXPR)
2358 changed = combine_conversions (&gsi);
2359 break;
2362 case GIMPLE_SWITCH:
2363 changed = simplify_gimple_switch (stmt);
2364 break;
2366 case GIMPLE_COND:
2368 int did_something;
2369 fold_defer_overflow_warnings ();
2370 did_something = forward_propagate_into_gimple_cond (stmt);
2371 if (did_something == 2)
2372 cfg_changed = true;
2373 fold_undefer_overflow_warnings
2374 (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2375 changed = did_something != 0;
2376 break;
2379 case GIMPLE_CALL:
2381 tree callee = gimple_call_fndecl (stmt);
2382 if (callee != NULL_TREE
2383 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2384 changed = simplify_builtin_call (&gsi, callee);
2385 break;
2388 default:;
2391 if (changed)
2393 /* If the stmt changed then re-visit it and the statements
2394 inserted before it. */
2395 if (!prev_initialized)
2396 gsi = gsi_start_bb (bb);
2397 else
2399 gsi = prev;
2400 gsi_next (&gsi);
2403 else
2405 prev = gsi;
2406 prev_initialized = true;
2407 gsi_next (&gsi);
2412 if (cfg_changed)
2413 todoflags |= TODO_cleanup_cfg;
2415 return todoflags;
2419 static bool
2420 gate_forwprop (void)
2422 return flag_tree_forwprop;
2425 struct gimple_opt_pass pass_forwprop =
2428 GIMPLE_PASS,
2429 "forwprop", /* name */
2430 gate_forwprop, /* gate */
2431 ssa_forward_propagate_and_combine, /* execute */
2432 NULL, /* sub */
2433 NULL, /* next */
2434 0, /* static_pass_number */
2435 TV_TREE_FORWPROP, /* tv_id */
2436 PROP_cfg | PROP_ssa, /* properties_required */
2437 0, /* properties_provided */
2438 0, /* properties_destroyed */
2439 0, /* todo_flags_start */
2440 TODO_ggc_collect
2441 | TODO_update_ssa
2442 | TODO_verify_ssa /* todo_flags_finish */