* decl.c (compute_array_index_type): Use type_dependent_expression_p.
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob532b9c5c688c9ba144ce7127b4eacdcd2bf66c47
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "gimple.h"
34 #include "expr.h"
35 #include "cfgloop.h"
37 /* This pass propagates the RHS of assignment statements into use
38 sites of the LHS of the assignment. It's basically a specialized
39 form of tree combination. It is hoped all of this can disappear
40 when we have a generalized tree combiner.
42 One class of common cases we handle is forward propagating a single use
43 variable into a COND_EXPR.
45 bb0:
46 x = a COND b;
47 if (x) goto ... else goto ...
49 Will be transformed into:
51 bb0:
52 if (a COND b) goto ... else goto ...
54 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
56 Or (assuming c1 and c2 are constants):
58 bb0:
59 x = a + c1;
60 if (x EQ/NEQ c2) goto ... else goto ...
62 Will be transformed into:
64 bb0:
65 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
67 Similarly for x = a - c1.
71 bb0:
72 x = !a
73 if (x) goto ... else goto ...
75 Will be transformed into:
77 bb0:
78 if (a == 0) goto ... else goto ...
80 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
81 For these cases, we propagate A into all, possibly more than one,
82 COND_EXPRs that use X.
86 bb0:
87 x = (typecast) a
88 if (x) goto ... else goto ...
90 Will be transformed into:
92 bb0:
93 if (a != 0) goto ... else goto ...
95 (Assuming a is an integral type and x is a boolean or x is an
96 integral and a is a boolean.)
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
102 In addition to eliminating the variable and the statement which assigns
103 a value to the variable, we may be able to later thread the jump without
104 adding insane complexity in the dominator optimizer.
106 Also note these transformations can cascade. We handle this by having
107 a worklist of COND_EXPR statements to examine. As we make a change to
108 a statement, we put it back on the worklist to examine on the next
109 iteration of the main loop.
111 A second class of propagation opportunities arises for ADDR_EXPR
112 nodes.
114 ptr = &x->y->z;
115 res = *ptr;
117 Will get turned into
119 res = x->y->z;
122 ptr = (type1*)&type2var;
123 res = *ptr
125 Will get turned into (if type1 and type2 are the same size
126 and neither have volatile on them):
127 res = VIEW_CONVERT_EXPR<type1>(type2var)
131 ptr = &x[0];
132 ptr2 = ptr + <constant>;
134 Will get turned into
136 ptr2 = &x[constant/elementsize];
140 ptr = &x[0];
141 offset = index * element_size;
142 offset_p = (pointer) offset;
143 ptr2 = ptr + offset_p
145 Will get turned into:
147 ptr2 = &x[index];
150 ssa = (int) decl
151 res = ssa & 1
153 Provided that decl has known alignment >= 2, will get turned into
155 res = 0
157 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
158 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
159 {NOT_EXPR,NEG_EXPR}.
161 This will (of course) be extended as other needs arise. */
163 static bool forward_propagate_addr_expr (tree name, tree rhs);
165 /* Set to true if we delete dead edges during the optimization. */
166 static bool cfg_changed;
168 static tree rhs_to_tree (tree type, gimple stmt);
170 /* Get the next statement we can propagate NAME's value into skipping
171 trivial copies. Returns the statement that is suitable as a
172 propagation destination or NULL_TREE if there is no such one.
173 This only returns destinations in a single-use chain. FINAL_NAME_P
174 if non-NULL is written to the ssa name that represents the use. */
176 static gimple
177 get_prop_dest_stmt (tree name, tree *final_name_p)
179 use_operand_p use;
180 gimple use_stmt;
182 do {
183 /* If name has multiple uses, bail out. */
184 if (!single_imm_use (name, &use, &use_stmt))
185 return NULL;
187 /* If this is not a trivial copy, we found it. */
188 if (!gimple_assign_ssa_name_copy_p (use_stmt)
189 || gimple_assign_rhs1 (use_stmt) != name)
190 break;
192 /* Continue searching uses of the copy destination. */
193 name = gimple_assign_lhs (use_stmt);
194 } while (1);
196 if (final_name_p)
197 *final_name_p = name;
199 return use_stmt;
202 /* Get the statement we can propagate from into NAME skipping
203 trivial copies. Returns the statement which defines the
204 propagation source or NULL_TREE if there is no such one.
205 If SINGLE_USE_ONLY is set considers only sources which have
206 a single use chain up to NAME. If SINGLE_USE_P is non-null,
207 it is set to whether the chain to NAME is a single use chain
208 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
210 static gimple
211 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
213 bool single_use = true;
215 do {
216 gimple def_stmt = SSA_NAME_DEF_STMT (name);
218 if (!has_single_use (name))
220 single_use = false;
221 if (single_use_only)
222 return NULL;
225 /* If name is defined by a PHI node or is the default def, bail out. */
226 if (!is_gimple_assign (def_stmt))
227 return NULL;
229 /* If def_stmt is not a simple copy, we possibly found it. */
230 if (!gimple_assign_ssa_name_copy_p (def_stmt))
232 tree rhs;
234 if (!single_use_only && single_use_p)
235 *single_use_p = single_use;
237 /* We can look through pointer conversions in the search
238 for a useful stmt for the comparison folding. */
239 rhs = gimple_assign_rhs1 (def_stmt);
240 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
241 && TREE_CODE (rhs) == SSA_NAME
242 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
243 && POINTER_TYPE_P (TREE_TYPE (rhs)))
244 name = rhs;
245 else
246 return def_stmt;
248 else
250 /* Continue searching the def of the copy source name. */
251 name = gimple_assign_rhs1 (def_stmt);
253 } while (1);
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
259 static bool
260 can_propagate_from (gimple def_stmt)
262 gcc_assert (is_gimple_assign (def_stmt));
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt))
266 return false;
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
271 return false;
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
276 return true;
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt))
280 return false;
282 /* If the definition is a conversion of a pointer to a function type,
283 then we can not apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
288 tree rhs = gimple_assign_rhs1 (def_stmt);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
291 return false;
294 return true;
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
301 names gracefully.
302 Returns true if cleanup-cfg has to run. */
304 static bool
305 remove_prop_source_from_use (tree name)
307 gimple_stmt_iterator gsi;
308 gimple stmt;
309 bool cfg_changed = false;
311 do {
312 basic_block bb;
314 if (SSA_NAME_IN_FREE_LIST (name)
315 || SSA_NAME_IS_DEFAULT_DEF (name)
316 || !has_zero_uses (name))
317 return cfg_changed;
319 stmt = SSA_NAME_DEF_STMT (name);
320 if (gimple_code (stmt) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt))
322 return cfg_changed;
324 bb = gimple_bb (stmt);
325 gsi = gsi_for_stmt (stmt);
326 unlink_stmt_vdef (stmt);
327 if (gsi_remove (&gsi, true))
328 cfg_changed |= gimple_purge_dead_eh_edges (bb);
329 release_defs (stmt);
331 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
332 } while (name && TREE_CODE (name) == SSA_NAME);
334 return cfg_changed;
337 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
338 converted to type TYPE.
340 This should disappear, but is needed so we can combine expressions and use
341 the fold() interfaces. Long term, we need to develop folding and combine
342 routines that deal with gimple exclusively . */
344 static tree
345 rhs_to_tree (tree type, gimple stmt)
347 location_t loc = gimple_location (stmt);
348 enum tree_code code = gimple_assign_rhs_code (stmt);
349 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
350 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
351 gimple_assign_rhs2 (stmt),
352 gimple_assign_rhs3 (stmt));
353 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
354 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
355 gimple_assign_rhs2 (stmt));
356 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
357 return build1 (code, type, gimple_assign_rhs1 (stmt));
358 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
359 return gimple_assign_rhs1 (stmt);
360 else
361 gcc_unreachable ();
364 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
365 the folded result in a form suitable for COND_EXPR_COND or
366 NULL_TREE, if there is no suitable simplified form. If
367 INVARIANT_ONLY is true only gimple_min_invariant results are
368 considered simplified. */
370 static tree
371 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
372 tree op0, tree op1, bool invariant_only)
374 tree t;
376 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
378 fold_defer_overflow_warnings ();
379 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
380 if (!t)
382 fold_undefer_overflow_warnings (false, NULL, 0);
383 return NULL_TREE;
386 /* Require that we got a boolean type out if we put one in. */
387 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
389 /* Canonicalize the combined condition for use in a COND_EXPR. */
390 t = canonicalize_cond_expr_cond (t);
392 /* Bail out if we required an invariant but didn't get one. */
393 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
395 fold_undefer_overflow_warnings (false, NULL, 0);
396 return NULL_TREE;
399 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
401 return t;
404 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
405 of its operand. Return a new comparison tree or NULL_TREE if there
406 were no simplifying combines. */
408 static tree
409 forward_propagate_into_comparison_1 (gimple stmt,
410 enum tree_code code, tree type,
411 tree op0, tree op1)
413 tree tmp = NULL_TREE;
414 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
415 bool single_use0_p = false, single_use1_p = false;
417 /* For comparisons use the first operand, that is likely to
418 simplify comparisons against constants. */
419 if (TREE_CODE (op0) == SSA_NAME)
421 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
422 if (def_stmt && can_propagate_from (def_stmt))
424 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
425 tmp = combine_cond_expr_cond (stmt, code, type,
426 rhs0, op1, !single_use0_p);
427 if (tmp)
428 return tmp;
432 /* If that wasn't successful, try the second operand. */
433 if (TREE_CODE (op1) == SSA_NAME)
435 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
436 if (def_stmt && can_propagate_from (def_stmt))
438 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
439 tmp = combine_cond_expr_cond (stmt, code, type,
440 op0, rhs1, !single_use1_p);
441 if (tmp)
442 return tmp;
446 /* If that wasn't successful either, try both operands. */
447 if (rhs0 != NULL_TREE
448 && rhs1 != NULL_TREE)
449 tmp = combine_cond_expr_cond (stmt, code, type,
450 rhs0, rhs1,
451 !(single_use0_p && single_use1_p));
453 return tmp;
456 /* Propagate from the ssa name definition statements of the assignment
457 from a comparison at *GSI into the conditional if that simplifies it.
458 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
459 otherwise returns 0. */
461 static int
462 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
464 gimple stmt = gsi_stmt (*gsi);
465 tree tmp;
466 bool cfg_changed = false;
467 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
468 tree rhs1 = gimple_assign_rhs1 (stmt);
469 tree rhs2 = gimple_assign_rhs2 (stmt);
471 /* Combine the comparison with defining statements. */
472 tmp = forward_propagate_into_comparison_1 (stmt,
473 gimple_assign_rhs_code (stmt),
474 type, rhs1, rhs2);
475 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
477 gimple_assign_set_rhs_from_tree (gsi, tmp);
478 fold_stmt (gsi);
479 update_stmt (gsi_stmt (*gsi));
481 if (TREE_CODE (rhs1) == SSA_NAME)
482 cfg_changed |= remove_prop_source_from_use (rhs1);
483 if (TREE_CODE (rhs2) == SSA_NAME)
484 cfg_changed |= remove_prop_source_from_use (rhs2);
485 return cfg_changed ? 2 : 1;
488 return 0;
491 /* Propagate from the ssa name definition statements of COND_EXPR
492 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
493 Returns zero if no statement was changed, one if there were
494 changes and two if cfg_cleanup needs to run.
496 This must be kept in sync with forward_propagate_into_cond. */
498 static int
499 forward_propagate_into_gimple_cond (gimple stmt)
501 tree tmp;
502 enum tree_code code = gimple_cond_code (stmt);
503 bool cfg_changed = false;
504 tree rhs1 = gimple_cond_lhs (stmt);
505 tree rhs2 = gimple_cond_rhs (stmt);
507 /* We can do tree combining on SSA_NAME and comparison expressions. */
508 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
509 return 0;
511 tmp = forward_propagate_into_comparison_1 (stmt, code,
512 boolean_type_node,
513 rhs1, rhs2);
514 if (tmp)
516 if (dump_file && tmp)
518 fprintf (dump_file, " Replaced '");
519 print_gimple_expr (dump_file, stmt, 0, 0);
520 fprintf (dump_file, "' with '");
521 print_generic_expr (dump_file, tmp, 0);
522 fprintf (dump_file, "'\n");
525 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
526 update_stmt (stmt);
528 if (TREE_CODE (rhs1) == SSA_NAME)
529 cfg_changed |= remove_prop_source_from_use (rhs1);
530 if (TREE_CODE (rhs2) == SSA_NAME)
531 cfg_changed |= remove_prop_source_from_use (rhs2);
532 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
535 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
536 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
537 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
538 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
539 && ((code == EQ_EXPR
540 && integer_zerop (rhs2))
541 || (code == NE_EXPR
542 && integer_onep (rhs2))))
544 basic_block bb = gimple_bb (stmt);
545 gimple_cond_set_code (stmt, NE_EXPR);
546 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
547 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
548 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
549 return 1;
552 return 0;
556 /* Propagate from the ssa name definition statements of COND_EXPR
557 in the rhs of statement STMT into the conditional if that simplifies it.
558 Returns true zero if the stmt was changed. */
560 static bool
561 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
563 gimple stmt = gsi_stmt (*gsi_p);
564 tree tmp = NULL_TREE;
565 tree cond = gimple_assign_rhs1 (stmt);
566 bool swap = false;
568 /* We can do tree combining on SSA_NAME and comparison expressions. */
569 if (COMPARISON_CLASS_P (cond))
570 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
571 boolean_type_node,
572 TREE_OPERAND (cond, 0),
573 TREE_OPERAND (cond, 1));
574 else if (TREE_CODE (cond) == SSA_NAME)
576 enum tree_code code;
577 tree name = cond;
578 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
579 if (!def_stmt || !can_propagate_from (def_stmt))
580 return 0;
582 code = gimple_assign_rhs_code (def_stmt);
583 if (TREE_CODE_CLASS (code) == tcc_comparison)
584 tmp = fold_build2_loc (gimple_location (def_stmt),
585 code,
586 boolean_type_node,
587 gimple_assign_rhs1 (def_stmt),
588 gimple_assign_rhs2 (def_stmt));
589 else if ((code == BIT_NOT_EXPR
590 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
591 || (code == BIT_XOR_EXPR
592 && integer_onep (gimple_assign_rhs2 (def_stmt))))
594 tmp = gimple_assign_rhs1 (def_stmt);
595 swap = true;
599 if (tmp
600 && is_gimple_condexpr (tmp))
602 if (dump_file && tmp)
604 fprintf (dump_file, " Replaced '");
605 print_generic_expr (dump_file, cond, 0);
606 fprintf (dump_file, "' with '");
607 print_generic_expr (dump_file, tmp, 0);
608 fprintf (dump_file, "'\n");
611 if (integer_onep (tmp))
612 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
613 else if (integer_zerop (tmp))
614 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
615 else
617 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
618 if (swap)
620 tree t = gimple_assign_rhs2 (stmt);
621 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
622 gimple_assign_set_rhs3 (stmt, t);
625 stmt = gsi_stmt (*gsi_p);
626 update_stmt (stmt);
628 return true;
631 return 0;
634 /* Propagate from the ssa name definition statements of COND_EXPR
635 values in the rhs of statement STMT into the conditional arms
636 if that simplifies it.
637 Returns true if the stmt was changed. */
639 static bool
640 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
642 gimple stmt = gsi_stmt (*gsi_p);
643 tree cond, val1, val2;
644 bool changed = false;
646 cond = gimple_assign_rhs1 (stmt);
647 val1 = gimple_assign_rhs2 (stmt);
648 if (TREE_CODE (val1) == SSA_NAME)
650 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
651 if (is_gimple_assign (def_stmt)
652 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
653 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
655 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
656 gimple_assign_set_rhs2 (stmt, val1);
657 changed = true;
660 val2 = gimple_assign_rhs3 (stmt);
661 if (TREE_CODE (val2) == SSA_NAME)
663 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
664 if (is_gimple_assign (def_stmt)
665 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
666 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
668 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
669 gimple_assign_set_rhs3 (stmt, val2);
670 changed = true;
673 if (operand_equal_p (val1, val2, 0))
675 gimple_assign_set_rhs_from_tree (gsi_p, val1);
676 stmt = gsi_stmt (*gsi_p);
677 changed = true;
680 if (changed)
681 update_stmt (stmt);
683 return changed;
686 /* We've just substituted an ADDR_EXPR into stmt. Update all the
687 relevant data structures to match. */
689 static void
690 tidy_after_forward_propagate_addr (gimple stmt)
692 /* We may have turned a trapping insn into a non-trapping insn. */
693 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
694 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
695 cfg_changed = true;
697 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
698 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
701 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
702 ADDR_EXPR <whatever>.
704 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
705 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
706 node or for recovery of array indexing from pointer arithmetic.
708 Return true if the propagation was successful (the propagation can
709 be not totally successful, yet things may have been changed). */
711 static bool
712 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
713 gimple_stmt_iterator *use_stmt_gsi,
714 bool single_use_p)
716 tree lhs, rhs, rhs2, array_ref;
717 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
718 enum tree_code rhs_code;
719 bool res = true;
721 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
723 lhs = gimple_assign_lhs (use_stmt);
724 rhs_code = gimple_assign_rhs_code (use_stmt);
725 rhs = gimple_assign_rhs1 (use_stmt);
727 /* Trivial cases. The use statement could be a trivial copy or a
728 useless conversion. Recurse to the uses of the lhs as copyprop does
729 not copy through different variant pointers and FRE does not catch
730 all useless conversions. Treat the case of a single-use name and
731 a conversion to def_rhs type separate, though. */
732 if (TREE_CODE (lhs) == SSA_NAME
733 && ((rhs_code == SSA_NAME && rhs == name)
734 || CONVERT_EXPR_CODE_P (rhs_code)))
736 /* Only recurse if we don't deal with a single use or we cannot
737 do the propagation to the current statement. In particular
738 we can end up with a conversion needed for a non-invariant
739 address which we cannot do in a single statement. */
740 if (!single_use_p
741 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
742 && (!is_gimple_min_invariant (def_rhs)
743 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
744 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
745 && (TYPE_PRECISION (TREE_TYPE (lhs))
746 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
747 return forward_propagate_addr_expr (lhs, def_rhs);
749 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
750 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
751 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
752 else
753 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
754 return true;
757 /* Propagate through constant pointer adjustments. */
758 if (TREE_CODE (lhs) == SSA_NAME
759 && rhs_code == POINTER_PLUS_EXPR
760 && rhs == name
761 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
763 tree new_def_rhs;
764 /* As we come here with non-invariant addresses in def_rhs we need
765 to make sure we can build a valid constant offsetted address
766 for further propagation. Simply rely on fold building that
767 and check after the fact. */
768 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
769 def_rhs,
770 fold_convert (ptr_type_node,
771 gimple_assign_rhs2 (use_stmt)));
772 if (TREE_CODE (new_def_rhs) == MEM_REF
773 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
774 return false;
775 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
776 TREE_TYPE (rhs));
778 /* Recurse. If we could propagate into all uses of lhs do not
779 bother to replace into the current use but just pretend we did. */
780 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
781 && forward_propagate_addr_expr (lhs, new_def_rhs))
782 return true;
784 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
785 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
786 new_def_rhs, NULL_TREE);
787 else if (is_gimple_min_invariant (new_def_rhs))
788 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
789 new_def_rhs, NULL_TREE);
790 else
791 return false;
792 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
793 update_stmt (use_stmt);
794 return true;
797 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
798 ADDR_EXPR will not appear on the LHS. */
799 lhs = gimple_assign_lhs (use_stmt);
800 while (handled_component_p (lhs))
801 lhs = TREE_OPERAND (lhs, 0);
803 /* Now see if the LHS node is a MEM_REF using NAME. If so,
804 propagate the ADDR_EXPR into the use of NAME and fold the result. */
805 if (TREE_CODE (lhs) == MEM_REF
806 && TREE_OPERAND (lhs, 0) == name)
808 tree def_rhs_base;
809 HOST_WIDE_INT def_rhs_offset;
810 /* If the address is invariant we can always fold it. */
811 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
812 &def_rhs_offset)))
814 double_int off = mem_ref_offset (lhs);
815 tree new_ptr;
816 off = double_int_add (off,
817 shwi_to_double_int (def_rhs_offset));
818 if (TREE_CODE (def_rhs_base) == MEM_REF)
820 off = double_int_add (off, mem_ref_offset (def_rhs_base));
821 new_ptr = TREE_OPERAND (def_rhs_base, 0);
823 else
824 new_ptr = build_fold_addr_expr (def_rhs_base);
825 TREE_OPERAND (lhs, 0) = new_ptr;
826 TREE_OPERAND (lhs, 1)
827 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
828 tidy_after_forward_propagate_addr (use_stmt);
829 /* Continue propagating into the RHS if this was not the only use. */
830 if (single_use_p)
831 return true;
833 /* If the LHS is a plain dereference and the value type is the same as
834 that of the pointed-to type of the address we can put the
835 dereferenced address on the LHS preserving the original alias-type. */
836 else if (gimple_assign_lhs (use_stmt) == lhs
837 && integer_zerop (TREE_OPERAND (lhs, 1))
838 && useless_type_conversion_p
839 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
840 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
842 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
843 tree new_offset, new_base, saved, new_lhs;
844 while (handled_component_p (*def_rhs_basep))
845 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
846 saved = *def_rhs_basep;
847 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
849 new_base = TREE_OPERAND (*def_rhs_basep, 0);
850 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
851 TREE_OPERAND (*def_rhs_basep, 1));
853 else
855 new_base = build_fold_addr_expr (*def_rhs_basep);
856 new_offset = TREE_OPERAND (lhs, 1);
858 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
859 new_base, new_offset);
860 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
861 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
862 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
863 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
864 gimple_assign_set_lhs (use_stmt, new_lhs);
865 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
866 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
867 *def_rhs_basep = saved;
868 tidy_after_forward_propagate_addr (use_stmt);
869 /* Continue propagating into the RHS if this was not the
870 only use. */
871 if (single_use_p)
872 return true;
874 else
875 /* We can have a struct assignment dereferencing our name twice.
876 Note that we didn't propagate into the lhs to not falsely
877 claim we did when propagating into the rhs. */
878 res = false;
881 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
882 nodes from the RHS. */
883 rhs = gimple_assign_rhs1 (use_stmt);
884 if (TREE_CODE (rhs) == ADDR_EXPR)
885 rhs = TREE_OPERAND (rhs, 0);
886 while (handled_component_p (rhs))
887 rhs = TREE_OPERAND (rhs, 0);
889 /* Now see if the RHS node is a MEM_REF using NAME. If so,
890 propagate the ADDR_EXPR into the use of NAME and fold the result. */
891 if (TREE_CODE (rhs) == MEM_REF
892 && TREE_OPERAND (rhs, 0) == name)
894 tree def_rhs_base;
895 HOST_WIDE_INT def_rhs_offset;
896 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
897 &def_rhs_offset)))
899 double_int off = mem_ref_offset (rhs);
900 tree new_ptr;
901 off = double_int_add (off,
902 shwi_to_double_int (def_rhs_offset));
903 if (TREE_CODE (def_rhs_base) == MEM_REF)
905 off = double_int_add (off, mem_ref_offset (def_rhs_base));
906 new_ptr = TREE_OPERAND (def_rhs_base, 0);
908 else
909 new_ptr = build_fold_addr_expr (def_rhs_base);
910 TREE_OPERAND (rhs, 0) = new_ptr;
911 TREE_OPERAND (rhs, 1)
912 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
913 fold_stmt_inplace (use_stmt_gsi);
914 tidy_after_forward_propagate_addr (use_stmt);
915 return res;
917 /* If the RHS is a plain dereference and the value type is the same as
918 that of the pointed-to type of the address we can put the
919 dereferenced address on the RHS preserving the original alias-type. */
920 else if (gimple_assign_rhs1 (use_stmt) == rhs
921 && integer_zerop (TREE_OPERAND (rhs, 1))
922 && useless_type_conversion_p
923 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
924 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
926 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
927 tree new_offset, new_base, saved, new_rhs;
928 while (handled_component_p (*def_rhs_basep))
929 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
930 saved = *def_rhs_basep;
931 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
933 new_base = TREE_OPERAND (*def_rhs_basep, 0);
934 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
935 TREE_OPERAND (*def_rhs_basep, 1));
937 else
939 new_base = build_fold_addr_expr (*def_rhs_basep);
940 new_offset = TREE_OPERAND (rhs, 1);
942 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
943 new_base, new_offset);
944 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
945 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
946 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
947 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
948 gimple_assign_set_rhs1 (use_stmt, new_rhs);
949 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
950 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
951 *def_rhs_basep = saved;
952 fold_stmt_inplace (use_stmt_gsi);
953 tidy_after_forward_propagate_addr (use_stmt);
954 return res;
958 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
959 is nothing to do. */
960 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
961 || gimple_assign_rhs1 (use_stmt) != name)
962 return false;
964 /* The remaining cases are all for turning pointer arithmetic into
965 array indexing. They only apply when we have the address of
966 element zero in an array. If that is not the case then there
967 is nothing to do. */
968 array_ref = TREE_OPERAND (def_rhs, 0);
969 if ((TREE_CODE (array_ref) != ARRAY_REF
970 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
971 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
972 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
973 return false;
975 rhs2 = gimple_assign_rhs2 (use_stmt);
976 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
977 if (TREE_CODE (rhs2) == INTEGER_CST)
979 tree new_rhs = build1_loc (gimple_location (use_stmt),
980 ADDR_EXPR, TREE_TYPE (def_rhs),
981 fold_build2 (MEM_REF,
982 TREE_TYPE (TREE_TYPE (def_rhs)),
983 unshare_expr (def_rhs),
984 fold_convert (ptr_type_node,
985 rhs2)));
986 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
987 use_stmt = gsi_stmt (*use_stmt_gsi);
988 update_stmt (use_stmt);
989 tidy_after_forward_propagate_addr (use_stmt);
990 return true;
993 return false;
996 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
998 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
999 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1000 node or for recovery of array indexing from pointer arithmetic.
1001 Returns true, if all uses have been propagated into. */
1003 static bool
1004 forward_propagate_addr_expr (tree name, tree rhs)
1006 int stmt_loop_depth = bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name)));
1007 imm_use_iterator iter;
1008 gimple use_stmt;
1009 bool all = true;
1010 bool single_use_p = has_single_use (name);
1012 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1014 bool result;
1015 tree use_rhs;
1017 /* If the use is not in a simple assignment statement, then
1018 there is nothing we can do. */
1019 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1021 if (!is_gimple_debug (use_stmt))
1022 all = false;
1023 continue;
1026 /* If the use is in a deeper loop nest, then we do not want
1027 to propagate non-invariant ADDR_EXPRs into the loop as that
1028 is likely adding expression evaluations into the loop. */
1029 if (bb_loop_depth (gimple_bb (use_stmt)) > stmt_loop_depth
1030 && !is_gimple_min_invariant (rhs))
1032 all = false;
1033 continue;
1037 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1038 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1039 single_use_p);
1040 /* If the use has moved to a different statement adjust
1041 the update machinery for the old statement too. */
1042 if (use_stmt != gsi_stmt (gsi))
1044 update_stmt (use_stmt);
1045 use_stmt = gsi_stmt (gsi);
1048 update_stmt (use_stmt);
1050 all &= result;
1052 /* Remove intermediate now unused copy and conversion chains. */
1053 use_rhs = gimple_assign_rhs1 (use_stmt);
1054 if (result
1055 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1056 && TREE_CODE (use_rhs) == SSA_NAME
1057 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1059 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1060 release_defs (use_stmt);
1061 gsi_remove (&gsi, true);
1065 return all && has_zero_uses (name);
1069 /* Forward propagate the comparison defined in *DEFGSI like
1070 cond_1 = x CMP y to uses of the form
1071 a_1 = (T')cond_1
1072 a_1 = !cond_1
1073 a_1 = cond_1 != 0
1074 Returns true if stmt is now unused. Advance DEFGSI to the next
1075 statement. */
1077 static bool
1078 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1080 gimple stmt = gsi_stmt (*defgsi);
1081 tree name = gimple_assign_lhs (stmt);
1082 gimple use_stmt;
1083 tree tmp = NULL_TREE;
1084 gimple_stmt_iterator gsi;
1085 enum tree_code code;
1086 tree lhs;
1088 /* Don't propagate ssa names that occur in abnormal phis. */
1089 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1090 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1091 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1092 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1093 goto bailout;
1095 /* Do not un-cse comparisons. But propagate through copies. */
1096 use_stmt = get_prop_dest_stmt (name, &name);
1097 if (!use_stmt
1098 || !is_gimple_assign (use_stmt))
1099 goto bailout;
1101 code = gimple_assign_rhs_code (use_stmt);
1102 lhs = gimple_assign_lhs (use_stmt);
1103 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1104 goto bailout;
1106 /* We can propagate the condition into a statement that
1107 computes the logical negation of the comparison result. */
1108 if ((code == BIT_NOT_EXPR
1109 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1110 || (code == BIT_XOR_EXPR
1111 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1113 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1114 bool nans = HONOR_NANS (TYPE_MODE (type));
1115 enum tree_code inv_code;
1116 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1117 if (inv_code == ERROR_MARK)
1118 goto bailout;
1120 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1121 gimple_assign_rhs2 (stmt));
1123 else
1124 goto bailout;
1126 gsi = gsi_for_stmt (use_stmt);
1127 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1128 use_stmt = gsi_stmt (gsi);
1129 update_stmt (use_stmt);
1131 if (dump_file && (dump_flags & TDF_DETAILS))
1133 fprintf (dump_file, " Replaced '");
1134 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1135 fprintf (dump_file, "' with '");
1136 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1137 fprintf (dump_file, "'\n");
1140 /* When we remove stmt now the iterator defgsi goes off it's current
1141 sequence, hence advance it now. */
1142 gsi_next (defgsi);
1144 /* Remove defining statements. */
1145 return remove_prop_source_from_use (name);
1147 bailout:
1148 gsi_next (defgsi);
1149 return false;
1153 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1154 If so, we can change STMT into lhs = y which can later be copy
1155 propagated. Similarly for negation.
1157 This could trivially be formulated as a forward propagation
1158 to immediate uses. However, we already had an implementation
1159 from DOM which used backward propagation via the use-def links.
1161 It turns out that backward propagation is actually faster as
1162 there's less work to do for each NOT/NEG expression we find.
1163 Backwards propagation needs to look at the statement in a single
1164 backlink. Forward propagation needs to look at potentially more
1165 than one forward link.
1167 Returns true when the statement was changed. */
1169 static bool
1170 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1172 gimple stmt = gsi_stmt (*gsi_p);
1173 tree rhs = gimple_assign_rhs1 (stmt);
1174 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1176 /* See if the RHS_DEF_STMT has the same form as our statement. */
1177 if (is_gimple_assign (rhs_def_stmt)
1178 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1180 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1182 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1183 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1184 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1186 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1187 stmt = gsi_stmt (*gsi_p);
1188 update_stmt (stmt);
1189 return true;
1193 return false;
1196 /* Helper function for simplify_gimple_switch. Remove case labels that
1197 have values outside the range of the new type. */
1199 static void
1200 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1202 unsigned int branch_num = gimple_switch_num_labels (stmt);
1203 VEC(tree, heap) *labels = VEC_alloc (tree, heap, branch_num);
1204 unsigned int i, len;
1206 /* Collect the existing case labels in a VEC, and preprocess it as if
1207 we are gimplifying a GENERIC SWITCH_EXPR. */
1208 for (i = 1; i < branch_num; i++)
1209 VEC_quick_push (tree, labels, gimple_switch_label (stmt, i));
1210 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1212 /* If any labels were removed, replace the existing case labels
1213 in the GIMPLE_SWITCH statement with the correct ones.
1214 Note that the type updates were done in-place on the case labels,
1215 so we only have to replace the case labels in the GIMPLE_SWITCH
1216 if the number of labels changed. */
1217 len = VEC_length (tree, labels);
1218 if (len < branch_num - 1)
1220 bitmap target_blocks;
1221 edge_iterator ei;
1222 edge e;
1224 /* Corner case: *all* case labels have been removed as being
1225 out-of-range for INDEX_TYPE. Push one label and let the
1226 CFG cleanups deal with this further. */
1227 if (len == 0)
1229 tree label, elt;
1231 label = CASE_LABEL (gimple_switch_default_label (stmt));
1232 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1233 VEC_quick_push (tree, labels, elt);
1234 len = 1;
1237 for (i = 0; i < VEC_length (tree, labels); i++)
1238 gimple_switch_set_label (stmt, i + 1, VEC_index (tree, labels, i));
1239 for (i++ ; i < branch_num; i++)
1240 gimple_switch_set_label (stmt, i, NULL_TREE);
1241 gimple_switch_set_num_labels (stmt, len + 1);
1243 /* Cleanup any edges that are now dead. */
1244 target_blocks = BITMAP_ALLOC (NULL);
1245 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1247 tree elt = gimple_switch_label (stmt, i);
1248 basic_block target = label_to_block (CASE_LABEL (elt));
1249 bitmap_set_bit (target_blocks, target->index);
1251 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1253 if (! bitmap_bit_p (target_blocks, e->dest->index))
1255 remove_edge (e);
1256 cfg_changed = true;
1257 free_dominance_info (CDI_DOMINATORS);
1259 else
1260 ei_next (&ei);
1262 BITMAP_FREE (target_blocks);
1265 VEC_free (tree, heap, labels);
1268 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1269 the condition which we may be able to optimize better. */
1271 static bool
1272 simplify_gimple_switch (gimple stmt)
1274 tree cond = gimple_switch_index (stmt);
1275 tree def, to, ti;
1276 gimple def_stmt;
1278 /* The optimization that we really care about is removing unnecessary
1279 casts. That will let us do much better in propagating the inferred
1280 constant at the switch target. */
1281 if (TREE_CODE (cond) == SSA_NAME)
1283 def_stmt = SSA_NAME_DEF_STMT (cond);
1284 if (is_gimple_assign (def_stmt))
1286 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1288 int need_precision;
1289 bool fail;
1291 def = gimple_assign_rhs1 (def_stmt);
1293 to = TREE_TYPE (cond);
1294 ti = TREE_TYPE (def);
1296 /* If we have an extension that preserves value, then we
1297 can copy the source value into the switch. */
1299 need_precision = TYPE_PRECISION (ti);
1300 fail = false;
1301 if (! INTEGRAL_TYPE_P (ti))
1302 fail = true;
1303 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1304 fail = true;
1305 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1306 need_precision += 1;
1307 if (TYPE_PRECISION (to) < need_precision)
1308 fail = true;
1310 if (!fail)
1312 gimple_switch_set_index (stmt, def);
1313 simplify_gimple_switch_label_vec (stmt, ti);
1314 update_stmt (stmt);
1315 return true;
1321 return false;
1324 /* For pointers p2 and p1 return p2 - p1 if the
1325 difference is known and constant, otherwise return NULL. */
1327 static tree
1328 constant_pointer_difference (tree p1, tree p2)
1330 int i, j;
1331 #define CPD_ITERATIONS 5
1332 tree exps[2][CPD_ITERATIONS];
1333 tree offs[2][CPD_ITERATIONS];
1334 int cnt[2];
1336 for (i = 0; i < 2; i++)
1338 tree p = i ? p1 : p2;
1339 tree off = size_zero_node;
1340 gimple stmt;
1341 enum tree_code code;
1343 /* For each of p1 and p2 we need to iterate at least
1344 twice, to handle ADDR_EXPR directly in p1/p2,
1345 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1346 on definition's stmt RHS. Iterate a few extra times. */
1347 j = 0;
1350 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1351 break;
1352 if (TREE_CODE (p) == ADDR_EXPR)
1354 tree q = TREE_OPERAND (p, 0);
1355 HOST_WIDE_INT offset;
1356 tree base = get_addr_base_and_unit_offset (q, &offset);
1357 if (base)
1359 q = base;
1360 if (offset)
1361 off = size_binop (PLUS_EXPR, off, size_int (offset));
1363 if (TREE_CODE (q) == MEM_REF
1364 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1366 p = TREE_OPERAND (q, 0);
1367 off = size_binop (PLUS_EXPR, off,
1368 double_int_to_tree (sizetype,
1369 mem_ref_offset (q)));
1371 else
1373 exps[i][j] = q;
1374 offs[i][j++] = off;
1375 break;
1378 if (TREE_CODE (p) != SSA_NAME)
1379 break;
1380 exps[i][j] = p;
1381 offs[i][j++] = off;
1382 if (j == CPD_ITERATIONS)
1383 break;
1384 stmt = SSA_NAME_DEF_STMT (p);
1385 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1386 break;
1387 code = gimple_assign_rhs_code (stmt);
1388 if (code == POINTER_PLUS_EXPR)
1390 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1391 break;
1392 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1393 p = gimple_assign_rhs1 (stmt);
1395 else if (code == ADDR_EXPR || code == NOP_EXPR)
1396 p = gimple_assign_rhs1 (stmt);
1397 else
1398 break;
1400 while (1);
1401 cnt[i] = j;
1404 for (i = 0; i < cnt[0]; i++)
1405 for (j = 0; j < cnt[1]; j++)
1406 if (exps[0][i] == exps[1][j])
1407 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1409 return NULL_TREE;
1412 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1413 Optimize
1414 memcpy (p, "abcd", 4);
1415 memset (p + 4, ' ', 3);
1416 into
1417 memcpy (p, "abcd ", 7);
1418 call if the latter can be stored by pieces during expansion. */
1420 static bool
1421 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1423 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1424 tree vuse = gimple_vuse (stmt2);
1425 if (vuse == NULL)
1426 return false;
1427 stmt1 = SSA_NAME_DEF_STMT (vuse);
1429 switch (DECL_FUNCTION_CODE (callee2))
1431 case BUILT_IN_MEMSET:
1432 if (gimple_call_num_args (stmt2) != 3
1433 || gimple_call_lhs (stmt2)
1434 || CHAR_BIT != 8
1435 || BITS_PER_UNIT != 8)
1436 break;
1437 else
1439 tree callee1;
1440 tree ptr1, src1, str1, off1, len1, lhs1;
1441 tree ptr2 = gimple_call_arg (stmt2, 0);
1442 tree val2 = gimple_call_arg (stmt2, 1);
1443 tree len2 = gimple_call_arg (stmt2, 2);
1444 tree diff, vdef, new_str_cst;
1445 gimple use_stmt;
1446 unsigned int ptr1_align;
1447 unsigned HOST_WIDE_INT src_len;
1448 char *src_buf;
1449 use_operand_p use_p;
1451 if (!host_integerp (val2, 0)
1452 || !host_integerp (len2, 1))
1453 break;
1454 if (is_gimple_call (stmt1))
1456 /* If first stmt is a call, it needs to be memcpy
1457 or mempcpy, with string literal as second argument and
1458 constant length. */
1459 callee1 = gimple_call_fndecl (stmt1);
1460 if (callee1 == NULL_TREE
1461 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1462 || gimple_call_num_args (stmt1) != 3)
1463 break;
1464 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1465 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1466 break;
1467 ptr1 = gimple_call_arg (stmt1, 0);
1468 src1 = gimple_call_arg (stmt1, 1);
1469 len1 = gimple_call_arg (stmt1, 2);
1470 lhs1 = gimple_call_lhs (stmt1);
1471 if (!host_integerp (len1, 1))
1472 break;
1473 str1 = string_constant (src1, &off1);
1474 if (str1 == NULL_TREE)
1475 break;
1476 if (!host_integerp (off1, 1)
1477 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1478 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1479 - tree_low_cst (off1, 1)) > 0
1480 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1481 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1482 != TYPE_MODE (char_type_node))
1483 break;
1485 else if (gimple_assign_single_p (stmt1))
1487 /* Otherwise look for length 1 memcpy optimized into
1488 assignment. */
1489 ptr1 = gimple_assign_lhs (stmt1);
1490 src1 = gimple_assign_rhs1 (stmt1);
1491 if (TREE_CODE (ptr1) != MEM_REF
1492 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1493 || !host_integerp (src1, 0))
1494 break;
1495 ptr1 = build_fold_addr_expr (ptr1);
1496 callee1 = NULL_TREE;
1497 len1 = size_one_node;
1498 lhs1 = NULL_TREE;
1499 off1 = size_zero_node;
1500 str1 = NULL_TREE;
1502 else
1503 break;
1505 diff = constant_pointer_difference (ptr1, ptr2);
1506 if (diff == NULL && lhs1 != NULL)
1508 diff = constant_pointer_difference (lhs1, ptr2);
1509 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1510 && diff != NULL)
1511 diff = size_binop (PLUS_EXPR, diff,
1512 fold_convert (sizetype, len1));
1514 /* If the difference between the second and first destination pointer
1515 is not constant, or is bigger than memcpy length, bail out. */
1516 if (diff == NULL
1517 || !host_integerp (diff, 1)
1518 || tree_int_cst_lt (len1, diff))
1519 break;
1521 /* Use maximum of difference plus memset length and memcpy length
1522 as the new memcpy length, if it is too big, bail out. */
1523 src_len = tree_low_cst (diff, 1);
1524 src_len += tree_low_cst (len2, 1);
1525 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1526 src_len = tree_low_cst (len1, 1);
1527 if (src_len > 1024)
1528 break;
1530 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1531 with bigger length will return different result. */
1532 if (lhs1 != NULL_TREE
1533 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1534 && (TREE_CODE (lhs1) != SSA_NAME
1535 || !single_imm_use (lhs1, &use_p, &use_stmt)
1536 || use_stmt != stmt2))
1537 break;
1539 /* If anything reads memory in between memcpy and memset
1540 call, the modified memcpy call might change it. */
1541 vdef = gimple_vdef (stmt1);
1542 if (vdef != NULL
1543 && (!single_imm_use (vdef, &use_p, &use_stmt)
1544 || use_stmt != stmt2))
1545 break;
1547 ptr1_align = get_pointer_alignment (ptr1);
1548 /* Construct the new source string literal. */
1549 src_buf = XALLOCAVEC (char, src_len + 1);
1550 if (callee1)
1551 memcpy (src_buf,
1552 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1553 tree_low_cst (len1, 1));
1554 else
1555 src_buf[0] = tree_low_cst (src1, 0);
1556 memset (src_buf + tree_low_cst (diff, 1),
1557 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
1558 src_buf[src_len] = '\0';
1559 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1560 handle embedded '\0's. */
1561 if (strlen (src_buf) != src_len)
1562 break;
1563 rtl_profile_for_bb (gimple_bb (stmt2));
1564 /* If the new memcpy wouldn't be emitted by storing the literal
1565 by pieces, this optimization might enlarge .rodata too much,
1566 as commonly used string literals couldn't be shared any
1567 longer. */
1568 if (!can_store_by_pieces (src_len,
1569 builtin_strncpy_read_str,
1570 src_buf, ptr1_align, false))
1571 break;
1573 new_str_cst = build_string_literal (src_len, src_buf);
1574 if (callee1)
1576 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1577 memset call. */
1578 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1579 gimple_call_set_lhs (stmt1, NULL_TREE);
1580 gimple_call_set_arg (stmt1, 1, new_str_cst);
1581 gimple_call_set_arg (stmt1, 2,
1582 build_int_cst (TREE_TYPE (len1), src_len));
1583 update_stmt (stmt1);
1584 unlink_stmt_vdef (stmt2);
1585 gsi_remove (gsi_p, true);
1586 release_defs (stmt2);
1587 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1588 release_ssa_name (lhs1);
1589 return true;
1591 else
1593 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1594 assignment, remove STMT1 and change memset call into
1595 memcpy call. */
1596 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1598 if (!is_gimple_val (ptr1))
1599 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1600 true, GSI_SAME_STMT);
1601 gimple_call_set_fndecl (stmt2,
1602 builtin_decl_explicit (BUILT_IN_MEMCPY));
1603 gimple_call_set_arg (stmt2, 0, ptr1);
1604 gimple_call_set_arg (stmt2, 1, new_str_cst);
1605 gimple_call_set_arg (stmt2, 2,
1606 build_int_cst (TREE_TYPE (len2), src_len));
1607 unlink_stmt_vdef (stmt1);
1608 gsi_remove (&gsi, true);
1609 release_defs (stmt1);
1610 update_stmt (stmt2);
1611 return false;
1614 break;
1615 default:
1616 break;
1618 return false;
1621 /* Checks if expression has type of one-bit precision, or is a known
1622 truth-valued expression. */
1623 static bool
1624 truth_valued_ssa_name (tree name)
1626 gimple def;
1627 tree type = TREE_TYPE (name);
1629 if (!INTEGRAL_TYPE_P (type))
1630 return false;
1631 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1632 necessarily one and so ~X is not equal to !X. */
1633 if (TYPE_PRECISION (type) == 1)
1634 return true;
1635 def = SSA_NAME_DEF_STMT (name);
1636 if (is_gimple_assign (def))
1637 return truth_value_p (gimple_assign_rhs_code (def));
1638 return false;
1641 /* Helper routine for simplify_bitwise_binary_1 function.
1642 Return for the SSA name NAME the expression X if it mets condition
1643 NAME = !X. Otherwise return NULL_TREE.
1644 Detected patterns for NAME = !X are:
1645 !X and X == 0 for X with integral type.
1646 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1647 static tree
1648 lookup_logical_inverted_value (tree name)
1650 tree op1, op2;
1651 enum tree_code code;
1652 gimple def;
1654 /* If name has none-intergal type, or isn't a SSA_NAME, then
1655 return. */
1656 if (TREE_CODE (name) != SSA_NAME
1657 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1658 return NULL_TREE;
1659 def = SSA_NAME_DEF_STMT (name);
1660 if (!is_gimple_assign (def))
1661 return NULL_TREE;
1663 code = gimple_assign_rhs_code (def);
1664 op1 = gimple_assign_rhs1 (def);
1665 op2 = NULL_TREE;
1667 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1668 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1669 if (code == EQ_EXPR || code == NE_EXPR
1670 || code == BIT_XOR_EXPR)
1671 op2 = gimple_assign_rhs2 (def);
1673 switch (code)
1675 case BIT_NOT_EXPR:
1676 if (truth_valued_ssa_name (name))
1677 return op1;
1678 break;
1679 case EQ_EXPR:
1680 /* Check if we have X == 0 and X has an integral type. */
1681 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1682 break;
1683 if (integer_zerop (op2))
1684 return op1;
1685 break;
1686 case NE_EXPR:
1687 /* Check if we have X != 1 and X is a truth-valued. */
1688 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1689 break;
1690 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1691 return op1;
1692 break;
1693 case BIT_XOR_EXPR:
1694 /* Check if we have X ^ 1 and X is truth valued. */
1695 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1696 return op1;
1697 break;
1698 default:
1699 break;
1702 return NULL_TREE;
1705 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1706 operations CODE, if one operand has the logically inverted
1707 value of the other. */
1708 static tree
1709 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1710 tree arg1, tree arg2)
1712 tree anot;
1714 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1715 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1716 && code != BIT_XOR_EXPR)
1717 return NULL_TREE;
1719 /* First check if operands ARG1 and ARG2 are equal. If so
1720 return NULL_TREE as this optimization is handled fold_stmt. */
1721 if (arg1 == arg2)
1722 return NULL_TREE;
1723 /* See if we have in arguments logical-not patterns. */
1724 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1725 || anot != arg2)
1726 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1727 || anot != arg1))
1728 return NULL_TREE;
1730 /* X & !X -> 0. */
1731 if (code == BIT_AND_EXPR)
1732 return fold_convert (type, integer_zero_node);
1733 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1734 if (truth_valued_ssa_name (anot))
1735 return fold_convert (type, integer_one_node);
1737 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1738 return NULL_TREE;
1741 /* Given a ssa_name in NAME see if it was defined by an assignment and
1742 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1743 to the second operand on the rhs. */
1745 static inline void
1746 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1748 gimple def;
1749 enum tree_code code1;
1750 tree arg11;
1751 tree arg21;
1752 tree arg31;
1753 enum gimple_rhs_class grhs_class;
1755 code1 = TREE_CODE (name);
1756 arg11 = name;
1757 arg21 = NULL_TREE;
1758 grhs_class = get_gimple_rhs_class (code1);
1760 if (code1 == SSA_NAME)
1762 def = SSA_NAME_DEF_STMT (name);
1764 if (def && is_gimple_assign (def)
1765 && can_propagate_from (def))
1767 code1 = gimple_assign_rhs_code (def);
1768 arg11 = gimple_assign_rhs1 (def);
1769 arg21 = gimple_assign_rhs2 (def);
1770 arg31 = gimple_assign_rhs2 (def);
1773 else if (grhs_class == GIMPLE_TERNARY_RHS
1774 || GIMPLE_BINARY_RHS
1775 || GIMPLE_UNARY_RHS
1776 || GIMPLE_SINGLE_RHS)
1777 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1779 *code = code1;
1780 *arg1 = arg11;
1781 if (arg2)
1782 *arg2 = arg21;
1783 /* Ignore arg3 currently. */
1786 /* Simplify bitwise binary operations.
1787 Return true if a transformation applied, otherwise return false. */
1789 static bool
1790 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1792 gimple stmt = gsi_stmt (*gsi);
1793 tree arg1 = gimple_assign_rhs1 (stmt);
1794 tree arg2 = gimple_assign_rhs2 (stmt);
1795 enum tree_code code = gimple_assign_rhs_code (stmt);
1796 tree res;
1797 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1798 enum tree_code def1_code, def2_code;
1800 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1801 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1803 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1804 if (TREE_CODE (arg2) == INTEGER_CST
1805 && CONVERT_EXPR_CODE_P (def1_code)
1806 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1807 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1809 gimple newop;
1810 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1811 newop =
1812 gimple_build_assign_with_ops (code, tem, def1_arg1,
1813 fold_convert_loc (gimple_location (stmt),
1814 TREE_TYPE (def1_arg1),
1815 arg2));
1816 gimple_set_location (newop, gimple_location (stmt));
1817 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1818 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1819 tem, NULL_TREE, NULL_TREE);
1820 update_stmt (gsi_stmt (*gsi));
1821 return true;
1824 /* For bitwise binary operations apply operand conversions to the
1825 binary operation result instead of to the operands. This allows
1826 to combine successive conversions and bitwise binary operations. */
1827 if (CONVERT_EXPR_CODE_P (def1_code)
1828 && CONVERT_EXPR_CODE_P (def2_code)
1829 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1830 /* Make sure that the conversion widens the operands, or has same
1831 precision, or that it changes the operation to a bitfield
1832 precision. */
1833 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1834 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1835 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1836 != MODE_INT)
1837 || (TYPE_PRECISION (TREE_TYPE (arg1))
1838 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1840 gimple newop;
1841 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1842 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1843 gimple_set_location (newop, gimple_location (stmt));
1844 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1845 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1846 tem, NULL_TREE, NULL_TREE);
1847 update_stmt (gsi_stmt (*gsi));
1848 return true;
1852 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1853 if (def1_code == def2_code
1854 && def1_code == BIT_AND_EXPR
1855 && operand_equal_for_phi_arg_p (def1_arg2,
1856 def2_arg2))
1858 tree b = def1_arg2;
1859 tree a = def1_arg1;
1860 tree c = def2_arg1;
1861 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
1862 /* If A OP0 C (this usually means C is the same as A) is 0
1863 then fold it down correctly. */
1864 if (integer_zerop (inner))
1866 gimple_assign_set_rhs_from_tree (gsi, inner);
1867 update_stmt (stmt);
1868 return true;
1870 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
1871 then fold it down correctly. */
1872 else if (TREE_CODE (inner) == SSA_NAME)
1874 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
1875 inner, b);
1876 gimple_assign_set_rhs_from_tree (gsi, outer);
1877 update_stmt (stmt);
1878 return true;
1880 else
1882 gimple newop;
1883 tree tem;
1884 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
1885 newop = gimple_build_assign_with_ops (code, tem, a, c);
1886 gimple_set_location (newop, gimple_location (stmt));
1887 /* Make sure to re-process the new stmt as it's walking upwards. */
1888 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1889 gimple_assign_set_rhs1 (stmt, tem);
1890 gimple_assign_set_rhs2 (stmt, b);
1891 gimple_assign_set_rhs_code (stmt, def1_code);
1892 update_stmt (stmt);
1893 return true;
1897 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1898 if (code == BIT_AND_EXPR
1899 && def1_code == BIT_IOR_EXPR
1900 && TREE_CODE (arg2) == INTEGER_CST
1901 && TREE_CODE (def1_arg2) == INTEGER_CST)
1903 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1904 arg2, def1_arg2);
1905 tree tem;
1906 gimple newop;
1907 if (integer_zerop (cst))
1909 gimple_assign_set_rhs1 (stmt, def1_arg1);
1910 update_stmt (stmt);
1911 return true;
1913 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
1914 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1915 tem, def1_arg1, arg2);
1916 gimple_set_location (newop, gimple_location (stmt));
1917 /* Make sure to re-process the new stmt as it's walking upwards. */
1918 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1919 gimple_assign_set_rhs1 (stmt, tem);
1920 gimple_assign_set_rhs2 (stmt, cst);
1921 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1922 update_stmt (stmt);
1923 return true;
1926 /* Combine successive equal operations with constants. */
1927 if ((code == BIT_AND_EXPR
1928 || code == BIT_IOR_EXPR
1929 || code == BIT_XOR_EXPR)
1930 && def1_code == code
1931 && TREE_CODE (arg2) == INTEGER_CST
1932 && TREE_CODE (def1_arg2) == INTEGER_CST)
1934 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1935 arg2, def1_arg2);
1936 gimple_assign_set_rhs1 (stmt, def1_arg1);
1937 gimple_assign_set_rhs2 (stmt, cst);
1938 update_stmt (stmt);
1939 return true;
1942 /* Canonicalize X ^ ~0 to ~X. */
1943 if (code == BIT_XOR_EXPR
1944 && TREE_CODE (arg2) == INTEGER_CST
1945 && integer_all_onesp (arg2))
1947 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1948 gcc_assert (gsi_stmt (*gsi) == stmt);
1949 update_stmt (stmt);
1950 return true;
1953 /* Try simple folding for X op !X, and X op X. */
1954 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1955 if (res != NULL_TREE)
1957 gimple_assign_set_rhs_from_tree (gsi, res);
1958 update_stmt (gsi_stmt (*gsi));
1959 return true;
1962 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
1964 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
1965 if (def1_code == ocode)
1967 tree x = arg2;
1968 enum tree_code coden;
1969 tree a1, a2;
1970 /* ( X | Y) & X -> X */
1971 /* ( X & Y) | X -> X */
1972 if (x == def1_arg1
1973 || x == def1_arg2)
1975 gimple_assign_set_rhs_from_tree (gsi, x);
1976 update_stmt (gsi_stmt (*gsi));
1977 return true;
1980 defcodefor_name (def1_arg1, &coden, &a1, &a2);
1981 /* (~X | Y) & X -> X & Y */
1982 /* (~X & Y) | X -> X | Y */
1983 if (coden == BIT_NOT_EXPR && a1 == x)
1985 gimple_assign_set_rhs_with_ops (gsi, code,
1986 x, def1_arg2);
1987 gcc_assert (gsi_stmt (*gsi) == stmt);
1988 update_stmt (stmt);
1989 return true;
1991 defcodefor_name (def1_arg2, &coden, &a1, &a2);
1992 /* (Y | ~X) & X -> X & Y */
1993 /* (Y & ~X) | X -> X | Y */
1994 if (coden == BIT_NOT_EXPR && a1 == x)
1996 gimple_assign_set_rhs_with_ops (gsi, code,
1997 x, def1_arg1);
1998 gcc_assert (gsi_stmt (*gsi) == stmt);
1999 update_stmt (stmt);
2000 return true;
2003 if (def2_code == ocode)
2005 enum tree_code coden;
2006 tree a1;
2007 tree x = arg1;
2008 /* X & ( X | Y) -> X */
2009 /* X | ( X & Y) -> X */
2010 if (x == def2_arg1
2011 || x == def2_arg2)
2013 gimple_assign_set_rhs_from_tree (gsi, x);
2014 update_stmt (gsi_stmt (*gsi));
2015 return true;
2017 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2018 /* (~X | Y) & X -> X & Y */
2019 /* (~X & Y) | X -> X | Y */
2020 if (coden == BIT_NOT_EXPR && a1 == x)
2022 gimple_assign_set_rhs_with_ops (gsi, code,
2023 x, def2_arg2);
2024 gcc_assert (gsi_stmt (*gsi) == stmt);
2025 update_stmt (stmt);
2026 return true;
2028 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2029 /* (Y | ~X) & X -> X & Y */
2030 /* (Y & ~X) | X -> X | Y */
2031 if (coden == BIT_NOT_EXPR && a1 == x)
2033 gimple_assign_set_rhs_with_ops (gsi, code,
2034 x, def2_arg1);
2035 gcc_assert (gsi_stmt (*gsi) == stmt);
2036 update_stmt (stmt);
2037 return true;
2042 return false;
2046 /* Perform re-associations of the plus or minus statement STMT that are
2047 always permitted. Returns true if the CFG was changed. */
2049 static bool
2050 associate_plusminus (gimple_stmt_iterator *gsi)
2052 gimple stmt = gsi_stmt (*gsi);
2053 tree rhs1 = gimple_assign_rhs1 (stmt);
2054 tree rhs2 = gimple_assign_rhs2 (stmt);
2055 enum tree_code code = gimple_assign_rhs_code (stmt);
2056 bool changed;
2058 /* We can't reassociate at all for saturating types. */
2059 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2060 return false;
2062 /* First contract negates. */
2065 changed = false;
2067 /* A +- (-B) -> A -+ B. */
2068 if (TREE_CODE (rhs2) == SSA_NAME)
2070 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2071 if (is_gimple_assign (def_stmt)
2072 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2073 && can_propagate_from (def_stmt))
2075 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2076 gimple_assign_set_rhs_code (stmt, code);
2077 rhs2 = gimple_assign_rhs1 (def_stmt);
2078 gimple_assign_set_rhs2 (stmt, rhs2);
2079 gimple_set_modified (stmt, true);
2080 changed = true;
2084 /* (-A) + B -> B - A. */
2085 if (TREE_CODE (rhs1) == SSA_NAME
2086 && code == PLUS_EXPR)
2088 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2089 if (is_gimple_assign (def_stmt)
2090 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2091 && can_propagate_from (def_stmt))
2093 code = MINUS_EXPR;
2094 gimple_assign_set_rhs_code (stmt, code);
2095 rhs1 = rhs2;
2096 gimple_assign_set_rhs1 (stmt, rhs1);
2097 rhs2 = gimple_assign_rhs1 (def_stmt);
2098 gimple_assign_set_rhs2 (stmt, rhs2);
2099 gimple_set_modified (stmt, true);
2100 changed = true;
2104 while (changed);
2106 /* We can't reassociate floating-point or fixed-point plus or minus
2107 because of saturation to +-Inf. */
2108 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2109 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2110 goto out;
2112 /* Second match patterns that allow contracting a plus-minus pair
2113 irrespective of overflow issues.
2115 (A +- B) - A -> +- B
2116 (A +- B) -+ B -> A
2117 (CST +- A) +- CST -> CST +- A
2118 (A + CST) +- CST -> A + CST
2119 ~A + A -> -1
2120 ~A + 1 -> -A
2121 A - (A +- B) -> -+ B
2122 A +- (B +- A) -> +- B
2123 CST +- (CST +- A) -> CST +- A
2124 CST +- (A +- CST) -> CST +- A
2125 A + ~A -> -1
2127 via commutating the addition and contracting operations to zero
2128 by reassociation. */
2130 if (TREE_CODE (rhs1) == SSA_NAME)
2132 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2133 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2135 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2136 if (def_code == PLUS_EXPR
2137 || def_code == MINUS_EXPR)
2139 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2140 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2141 if (operand_equal_p (def_rhs1, rhs2, 0)
2142 && code == MINUS_EXPR)
2144 /* (A +- B) - A -> +- B. */
2145 code = ((def_code == PLUS_EXPR)
2146 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2147 rhs1 = def_rhs2;
2148 rhs2 = NULL_TREE;
2149 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2150 gcc_assert (gsi_stmt (*gsi) == stmt);
2151 gimple_set_modified (stmt, true);
2153 else if (operand_equal_p (def_rhs2, rhs2, 0)
2154 && code != def_code)
2156 /* (A +- B) -+ B -> A. */
2157 code = TREE_CODE (def_rhs1);
2158 rhs1 = def_rhs1;
2159 rhs2 = NULL_TREE;
2160 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2161 gcc_assert (gsi_stmt (*gsi) == stmt);
2162 gimple_set_modified (stmt, true);
2164 else if (TREE_CODE (rhs2) == INTEGER_CST
2165 && TREE_CODE (def_rhs1) == INTEGER_CST)
2167 /* (CST +- A) +- CST -> CST +- A. */
2168 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2169 def_rhs1, rhs2);
2170 if (cst && !TREE_OVERFLOW (cst))
2172 code = def_code;
2173 gimple_assign_set_rhs_code (stmt, code);
2174 rhs1 = cst;
2175 gimple_assign_set_rhs1 (stmt, rhs1);
2176 rhs2 = def_rhs2;
2177 gimple_assign_set_rhs2 (stmt, rhs2);
2178 gimple_set_modified (stmt, true);
2181 else if (TREE_CODE (rhs2) == INTEGER_CST
2182 && TREE_CODE (def_rhs2) == INTEGER_CST
2183 && def_code == PLUS_EXPR)
2185 /* (A + CST) +- CST -> A + CST. */
2186 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2187 def_rhs2, rhs2);
2188 if (cst && !TREE_OVERFLOW (cst))
2190 code = PLUS_EXPR;
2191 gimple_assign_set_rhs_code (stmt, code);
2192 rhs1 = def_rhs1;
2193 gimple_assign_set_rhs1 (stmt, rhs1);
2194 rhs2 = cst;
2195 gimple_assign_set_rhs2 (stmt, rhs2);
2196 gimple_set_modified (stmt, true);
2200 else if (def_code == BIT_NOT_EXPR
2201 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2203 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2204 if (code == PLUS_EXPR
2205 && operand_equal_p (def_rhs1, rhs2, 0))
2207 /* ~A + A -> -1. */
2208 code = INTEGER_CST;
2209 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2210 rhs2 = NULL_TREE;
2211 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2212 gcc_assert (gsi_stmt (*gsi) == stmt);
2213 gimple_set_modified (stmt, true);
2215 else if (code == PLUS_EXPR
2216 && integer_onep (rhs1))
2218 /* ~A + 1 -> -A. */
2219 code = NEGATE_EXPR;
2220 rhs1 = def_rhs1;
2221 rhs2 = NULL_TREE;
2222 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2223 gcc_assert (gsi_stmt (*gsi) == stmt);
2224 gimple_set_modified (stmt, true);
2230 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2232 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2233 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2235 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2236 if (def_code == PLUS_EXPR
2237 || def_code == MINUS_EXPR)
2239 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2240 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2241 if (operand_equal_p (def_rhs1, rhs1, 0)
2242 && code == MINUS_EXPR)
2244 /* A - (A +- B) -> -+ B. */
2245 code = ((def_code == PLUS_EXPR)
2246 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2247 rhs1 = def_rhs2;
2248 rhs2 = NULL_TREE;
2249 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2250 gcc_assert (gsi_stmt (*gsi) == stmt);
2251 gimple_set_modified (stmt, true);
2253 else if (operand_equal_p (def_rhs2, rhs1, 0)
2254 && code != def_code)
2256 /* A +- (B +- A) -> +- B. */
2257 code = ((code == PLUS_EXPR)
2258 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2259 rhs1 = def_rhs1;
2260 rhs2 = NULL_TREE;
2261 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2262 gcc_assert (gsi_stmt (*gsi) == stmt);
2263 gimple_set_modified (stmt, true);
2265 else if (TREE_CODE (rhs1) == INTEGER_CST
2266 && TREE_CODE (def_rhs1) == INTEGER_CST)
2268 /* CST +- (CST +- A) -> CST +- A. */
2269 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2270 rhs1, def_rhs1);
2271 if (cst && !TREE_OVERFLOW (cst))
2273 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2274 gimple_assign_set_rhs_code (stmt, code);
2275 rhs1 = cst;
2276 gimple_assign_set_rhs1 (stmt, rhs1);
2277 rhs2 = def_rhs2;
2278 gimple_assign_set_rhs2 (stmt, rhs2);
2279 gimple_set_modified (stmt, true);
2282 else if (TREE_CODE (rhs1) == INTEGER_CST
2283 && TREE_CODE (def_rhs2) == INTEGER_CST)
2285 /* CST +- (A +- CST) -> CST +- A. */
2286 tree cst = fold_binary (def_code == code
2287 ? PLUS_EXPR : MINUS_EXPR,
2288 TREE_TYPE (rhs2),
2289 rhs1, def_rhs2);
2290 if (cst && !TREE_OVERFLOW (cst))
2292 rhs1 = cst;
2293 gimple_assign_set_rhs1 (stmt, rhs1);
2294 rhs2 = def_rhs1;
2295 gimple_assign_set_rhs2 (stmt, rhs2);
2296 gimple_set_modified (stmt, true);
2300 else if (def_code == BIT_NOT_EXPR
2301 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2303 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2304 if (code == PLUS_EXPR
2305 && operand_equal_p (def_rhs1, rhs1, 0))
2307 /* A + ~A -> -1. */
2308 code = INTEGER_CST;
2309 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2310 rhs2 = NULL_TREE;
2311 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2312 gcc_assert (gsi_stmt (*gsi) == stmt);
2313 gimple_set_modified (stmt, true);
2319 out:
2320 if (gimple_modified_p (stmt))
2322 fold_stmt_inplace (gsi);
2323 update_stmt (stmt);
2324 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2325 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2326 return true;
2329 return false;
2332 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2333 true if anything changed, false otherwise. */
2335 static bool
2336 associate_pointerplus (gimple_stmt_iterator *gsi)
2338 gimple stmt = gsi_stmt (*gsi);
2339 gimple def_stmt;
2340 tree ptr, rhs, algn;
2342 /* Pattern match
2343 tem = (sizetype) ptr;
2344 tem = tem & algn;
2345 tem = -tem;
2346 ... = ptr p+ tem;
2347 and produce the simpler and easier to analyze with respect to alignment
2348 ... = ptr & ~algn; */
2349 ptr = gimple_assign_rhs1 (stmt);
2350 rhs = gimple_assign_rhs2 (stmt);
2351 if (TREE_CODE (rhs) != SSA_NAME)
2352 return false;
2353 def_stmt = SSA_NAME_DEF_STMT (rhs);
2354 if (!is_gimple_assign (def_stmt)
2355 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2356 return false;
2357 rhs = gimple_assign_rhs1 (def_stmt);
2358 if (TREE_CODE (rhs) != SSA_NAME)
2359 return false;
2360 def_stmt = SSA_NAME_DEF_STMT (rhs);
2361 if (!is_gimple_assign (def_stmt)
2362 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2363 return false;
2364 rhs = gimple_assign_rhs1 (def_stmt);
2365 algn = gimple_assign_rhs2 (def_stmt);
2366 if (TREE_CODE (rhs) != SSA_NAME
2367 || TREE_CODE (algn) != INTEGER_CST)
2368 return false;
2369 def_stmt = SSA_NAME_DEF_STMT (rhs);
2370 if (!is_gimple_assign (def_stmt)
2371 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2372 return false;
2373 if (gimple_assign_rhs1 (def_stmt) != ptr)
2374 return false;
2376 algn = double_int_to_tree (TREE_TYPE (ptr),
2377 double_int_not (tree_to_double_int (algn)));
2378 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2379 fold_stmt_inplace (gsi);
2380 update_stmt (stmt);
2382 return true;
2385 /* Combine two conversions in a row for the second conversion at *GSI.
2386 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2387 run. Else it returns 0. */
2389 static int
2390 combine_conversions (gimple_stmt_iterator *gsi)
2392 gimple stmt = gsi_stmt (*gsi);
2393 gimple def_stmt;
2394 tree op0, lhs;
2395 enum tree_code code = gimple_assign_rhs_code (stmt);
2396 enum tree_code code2;
2398 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2399 || code == FLOAT_EXPR
2400 || code == FIX_TRUNC_EXPR);
2402 lhs = gimple_assign_lhs (stmt);
2403 op0 = gimple_assign_rhs1 (stmt);
2404 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2406 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2407 return 1;
2410 if (TREE_CODE (op0) != SSA_NAME)
2411 return 0;
2413 def_stmt = SSA_NAME_DEF_STMT (op0);
2414 if (!is_gimple_assign (def_stmt))
2415 return 0;
2417 code2 = gimple_assign_rhs_code (def_stmt);
2419 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2421 tree defop0 = gimple_assign_rhs1 (def_stmt);
2422 tree type = TREE_TYPE (lhs);
2423 tree inside_type = TREE_TYPE (defop0);
2424 tree inter_type = TREE_TYPE (op0);
2425 int inside_int = INTEGRAL_TYPE_P (inside_type);
2426 int inside_ptr = POINTER_TYPE_P (inside_type);
2427 int inside_float = FLOAT_TYPE_P (inside_type);
2428 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2429 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2430 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2431 int inter_int = INTEGRAL_TYPE_P (inter_type);
2432 int inter_ptr = POINTER_TYPE_P (inter_type);
2433 int inter_float = FLOAT_TYPE_P (inter_type);
2434 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2435 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2436 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2437 int final_int = INTEGRAL_TYPE_P (type);
2438 int final_ptr = POINTER_TYPE_P (type);
2439 int final_float = FLOAT_TYPE_P (type);
2440 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2441 unsigned int final_prec = TYPE_PRECISION (type);
2442 int final_unsignedp = TYPE_UNSIGNED (type);
2444 /* Don't propagate ssa names that occur in abnormal phis. */
2445 if (TREE_CODE (defop0) == SSA_NAME
2446 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2447 return 0;
2449 /* In addition to the cases of two conversions in a row
2450 handled below, if we are converting something to its own
2451 type via an object of identical or wider precision, neither
2452 conversion is needed. */
2453 if (useless_type_conversion_p (type, inside_type)
2454 && (((inter_int || inter_ptr) && final_int)
2455 || (inter_float && final_float))
2456 && inter_prec >= final_prec)
2458 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2459 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2460 update_stmt (stmt);
2461 return remove_prop_source_from_use (op0) ? 2 : 1;
2464 /* Likewise, if the intermediate and initial types are either both
2465 float or both integer, we don't need the middle conversion if the
2466 former is wider than the latter and doesn't change the signedness
2467 (for integers). Avoid this if the final type is a pointer since
2468 then we sometimes need the middle conversion. Likewise if the
2469 final type has a precision not equal to the size of its mode. */
2470 if (((inter_int && inside_int)
2471 || (inter_float && inside_float)
2472 || (inter_vec && inside_vec))
2473 && inter_prec >= inside_prec
2474 && (inter_float || inter_vec
2475 || inter_unsignedp == inside_unsignedp)
2476 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2477 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2478 && ! final_ptr
2479 && (! final_vec || inter_prec == inside_prec))
2481 gimple_assign_set_rhs1 (stmt, defop0);
2482 update_stmt (stmt);
2483 return remove_prop_source_from_use (op0) ? 2 : 1;
2486 /* If we have a sign-extension of a zero-extended value, we can
2487 replace that by a single zero-extension. Likewise if the
2488 final conversion does not change precision we can drop the
2489 intermediate conversion. */
2490 if (inside_int && inter_int && final_int
2491 && ((inside_prec < inter_prec && inter_prec < final_prec
2492 && inside_unsignedp && !inter_unsignedp)
2493 || final_prec == inter_prec))
2495 gimple_assign_set_rhs1 (stmt, defop0);
2496 update_stmt (stmt);
2497 return remove_prop_source_from_use (op0) ? 2 : 1;
2500 /* Two conversions in a row are not needed unless:
2501 - some conversion is floating-point (overstrict for now), or
2502 - some conversion is a vector (overstrict for now), or
2503 - the intermediate type is narrower than both initial and
2504 final, or
2505 - the intermediate type and innermost type differ in signedness,
2506 and the outermost type is wider than the intermediate, or
2507 - the initial type is a pointer type and the precisions of the
2508 intermediate and final types differ, or
2509 - the final type is a pointer type and the precisions of the
2510 initial and intermediate types differ. */
2511 if (! inside_float && ! inter_float && ! final_float
2512 && ! inside_vec && ! inter_vec && ! final_vec
2513 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2514 && ! (inside_int && inter_int
2515 && inter_unsignedp != inside_unsignedp
2516 && inter_prec < final_prec)
2517 && ((inter_unsignedp && inter_prec > inside_prec)
2518 == (final_unsignedp && final_prec > inter_prec))
2519 && ! (inside_ptr && inter_prec != final_prec)
2520 && ! (final_ptr && inside_prec != inter_prec)
2521 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2522 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2524 gimple_assign_set_rhs1 (stmt, defop0);
2525 update_stmt (stmt);
2526 return remove_prop_source_from_use (op0) ? 2 : 1;
2529 /* A truncation to an unsigned type should be canonicalized as
2530 bitwise and of a mask. */
2531 if (final_int && inter_int && inside_int
2532 && final_prec == inside_prec
2533 && final_prec > inter_prec
2534 && inter_unsignedp)
2536 tree tem;
2537 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2538 defop0,
2539 double_int_to_tree
2540 (inside_type, double_int_mask (inter_prec)));
2541 if (!useless_type_conversion_p (type, inside_type))
2543 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2544 GSI_SAME_STMT);
2545 gimple_assign_set_rhs1 (stmt, tem);
2547 else
2548 gimple_assign_set_rhs_from_tree (gsi, tem);
2549 update_stmt (gsi_stmt (*gsi));
2550 return 1;
2553 /* If we are converting an integer to a floating-point that can
2554 represent it exactly and back to an integer, we can skip the
2555 floating-point conversion. */
2556 if (inside_int && inter_float && final_int &&
2557 (unsigned) significand_size (TYPE_MODE (inter_type))
2558 >= inside_prec - !inside_unsignedp)
2560 if (useless_type_conversion_p (type, inside_type))
2562 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2563 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2564 update_stmt (stmt);
2565 return remove_prop_source_from_use (op0) ? 2 : 1;
2567 else
2569 gimple_assign_set_rhs1 (stmt, defop0);
2570 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2571 update_stmt (stmt);
2572 return remove_prop_source_from_use (op0) ? 2 : 1;
2577 return 0;
2580 /* Determine whether applying the 2 permutations (mask1 then mask2)
2581 gives back one of the input. */
2583 static int
2584 is_combined_permutation_identity (tree mask1, tree mask2)
2586 tree mask;
2587 unsigned int nelts, i, j;
2588 bool maybe_identity1 = true;
2589 bool maybe_identity2 = true;
2591 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
2592 && TREE_CODE (mask2) == VECTOR_CST);
2593 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
2594 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
2596 nelts = VECTOR_CST_NELTS (mask);
2597 for (i = 0; i < nelts; i++)
2599 tree val = VECTOR_CST_ELT (mask, i);
2600 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2601 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
2602 if (j == i)
2603 maybe_identity2 = false;
2604 else if (j == i + nelts)
2605 maybe_identity1 = false;
2606 else
2607 return 0;
2609 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
2612 /* Combine two shuffles in a row. Returns 1 if there were any changes
2613 made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2615 static int
2616 simplify_permutation (gimple_stmt_iterator *gsi)
2618 gimple stmt = gsi_stmt (*gsi);
2619 gimple def_stmt;
2620 tree op0, op1, op2, op3;
2621 enum tree_code code = gimple_assign_rhs_code (stmt);
2622 enum tree_code code2;
2624 gcc_checking_assert (code == VEC_PERM_EXPR);
2626 op0 = gimple_assign_rhs1 (stmt);
2627 op1 = gimple_assign_rhs2 (stmt);
2628 op2 = gimple_assign_rhs3 (stmt);
2630 if (TREE_CODE (op0) != SSA_NAME)
2631 return 0;
2633 if (TREE_CODE (op2) != VECTOR_CST)
2634 return 0;
2636 if (op0 != op1)
2637 return 0;
2639 def_stmt = SSA_NAME_DEF_STMT (op0);
2640 if (!def_stmt || !is_gimple_assign (def_stmt)
2641 || !can_propagate_from (def_stmt))
2642 return 0;
2644 code2 = gimple_assign_rhs_code (def_stmt);
2646 /* Two consecutive shuffles. */
2647 if (code2 == VEC_PERM_EXPR)
2649 tree orig;
2650 int ident;
2651 op3 = gimple_assign_rhs3 (def_stmt);
2652 if (TREE_CODE (op3) != VECTOR_CST)
2653 return 0;
2654 ident = is_combined_permutation_identity (op3, op2);
2655 if (!ident)
2656 return 0;
2657 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2658 : gimple_assign_rhs2 (def_stmt);
2659 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2660 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2661 gimple_set_num_ops (stmt, 2);
2662 update_stmt (stmt);
2663 return remove_prop_source_from_use (op0) ? 2 : 1;
2666 return false;
2669 /* Main entry point for the forward propagation and statement combine
2670 optimizer. */
2672 static unsigned int
2673 ssa_forward_propagate_and_combine (void)
2675 basic_block bb;
2676 unsigned int todoflags = 0;
2678 cfg_changed = false;
2680 FOR_EACH_BB (bb)
2682 gimple_stmt_iterator gsi;
2684 /* Apply forward propagation to all stmts in the basic-block.
2685 Note we update GSI within the loop as necessary. */
2686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2688 gimple stmt = gsi_stmt (gsi);
2689 tree lhs, rhs;
2690 enum tree_code code;
2692 if (!is_gimple_assign (stmt))
2694 gsi_next (&gsi);
2695 continue;
2698 lhs = gimple_assign_lhs (stmt);
2699 rhs = gimple_assign_rhs1 (stmt);
2700 code = gimple_assign_rhs_code (stmt);
2701 if (TREE_CODE (lhs) != SSA_NAME
2702 || has_zero_uses (lhs))
2704 gsi_next (&gsi);
2705 continue;
2708 /* If this statement sets an SSA_NAME to an address,
2709 try to propagate the address into the uses of the SSA_NAME. */
2710 if (code == ADDR_EXPR
2711 /* Handle pointer conversions on invariant addresses
2712 as well, as this is valid gimple. */
2713 || (CONVERT_EXPR_CODE_P (code)
2714 && TREE_CODE (rhs) == ADDR_EXPR
2715 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2717 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2718 if ((!base
2719 || !DECL_P (base)
2720 || decl_address_invariant_p (base))
2721 && !stmt_references_abnormal_ssa_name (stmt)
2722 && forward_propagate_addr_expr (lhs, rhs))
2724 release_defs (stmt);
2725 todoflags |= TODO_remove_unused_locals;
2726 gsi_remove (&gsi, true);
2728 else
2729 gsi_next (&gsi);
2731 else if (code == POINTER_PLUS_EXPR)
2733 tree off = gimple_assign_rhs2 (stmt);
2734 if (TREE_CODE (off) == INTEGER_CST
2735 && can_propagate_from (stmt)
2736 && !simple_iv_increment_p (stmt)
2737 /* ??? Better adjust the interface to that function
2738 instead of building new trees here. */
2739 && forward_propagate_addr_expr
2740 (lhs,
2741 build1_loc (gimple_location (stmt),
2742 ADDR_EXPR, TREE_TYPE (rhs),
2743 fold_build2 (MEM_REF,
2744 TREE_TYPE (TREE_TYPE (rhs)),
2745 rhs,
2746 fold_convert (ptr_type_node,
2747 off)))))
2749 release_defs (stmt);
2750 todoflags |= TODO_remove_unused_locals;
2751 gsi_remove (&gsi, true);
2753 else if (is_gimple_min_invariant (rhs))
2755 /* Make sure to fold &a[0] + off_1 here. */
2756 fold_stmt_inplace (&gsi);
2757 update_stmt (stmt);
2758 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2759 gsi_next (&gsi);
2761 else
2762 gsi_next (&gsi);
2764 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2766 if (forward_propagate_comparison (&gsi))
2767 cfg_changed = true;
2769 else
2770 gsi_next (&gsi);
2773 /* Combine stmts with the stmts defining their operands.
2774 Note we update GSI within the loop as necessary. */
2775 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2777 gimple stmt = gsi_stmt (gsi);
2778 bool changed = false;
2780 /* Mark stmt as potentially needing revisiting. */
2781 gimple_set_plf (stmt, GF_PLF_1, false);
2783 switch (gimple_code (stmt))
2785 case GIMPLE_ASSIGN:
2787 tree rhs1 = gimple_assign_rhs1 (stmt);
2788 enum tree_code code = gimple_assign_rhs_code (stmt);
2790 if ((code == BIT_NOT_EXPR
2791 || code == NEGATE_EXPR)
2792 && TREE_CODE (rhs1) == SSA_NAME)
2793 changed = simplify_not_neg_expr (&gsi);
2794 else if (code == COND_EXPR
2795 || code == VEC_COND_EXPR)
2797 /* In this case the entire COND_EXPR is in rhs1. */
2798 if (forward_propagate_into_cond (&gsi)
2799 || combine_cond_exprs (&gsi))
2801 changed = true;
2802 stmt = gsi_stmt (gsi);
2805 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2807 int did_something;
2808 did_something = forward_propagate_into_comparison (&gsi);
2809 if (did_something == 2)
2810 cfg_changed = true;
2811 changed = did_something != 0;
2813 else if (code == BIT_AND_EXPR
2814 || code == BIT_IOR_EXPR
2815 || code == BIT_XOR_EXPR)
2816 changed = simplify_bitwise_binary (&gsi);
2817 else if (code == PLUS_EXPR
2818 || code == MINUS_EXPR)
2819 changed = associate_plusminus (&gsi);
2820 else if (code == POINTER_PLUS_EXPR)
2821 changed = associate_pointerplus (&gsi);
2822 else if (CONVERT_EXPR_CODE_P (code)
2823 || code == FLOAT_EXPR
2824 || code == FIX_TRUNC_EXPR)
2826 int did_something = combine_conversions (&gsi);
2827 if (did_something == 2)
2828 cfg_changed = true;
2829 changed = did_something != 0;
2831 else if (code == VEC_PERM_EXPR)
2833 int did_something = simplify_permutation (&gsi);
2834 if (did_something == 2)
2835 cfg_changed = true;
2836 changed = did_something != 0;
2838 break;
2841 case GIMPLE_SWITCH:
2842 changed = simplify_gimple_switch (stmt);
2843 break;
2845 case GIMPLE_COND:
2847 int did_something;
2848 did_something = forward_propagate_into_gimple_cond (stmt);
2849 if (did_something == 2)
2850 cfg_changed = true;
2851 changed = did_something != 0;
2852 break;
2855 case GIMPLE_CALL:
2857 tree callee = gimple_call_fndecl (stmt);
2858 if (callee != NULL_TREE
2859 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2860 changed = simplify_builtin_call (&gsi, callee);
2861 break;
2864 default:;
2867 if (changed)
2869 /* If the stmt changed then re-visit it and the statements
2870 inserted before it. */
2871 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2872 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2873 break;
2874 if (gsi_end_p (gsi))
2875 gsi = gsi_start_bb (bb);
2876 else
2877 gsi_next (&gsi);
2879 else
2881 /* Stmt no longer needs to be revisited. */
2882 gimple_set_plf (stmt, GF_PLF_1, true);
2883 gsi_next (&gsi);
2888 if (cfg_changed)
2889 todoflags |= TODO_cleanup_cfg;
2891 return todoflags;
2895 static bool
2896 gate_forwprop (void)
2898 return flag_tree_forwprop;
2901 struct gimple_opt_pass pass_forwprop =
2904 GIMPLE_PASS,
2905 "forwprop", /* name */
2906 gate_forwprop, /* gate */
2907 ssa_forward_propagate_and_combine, /* execute */
2908 NULL, /* sub */
2909 NULL, /* next */
2910 0, /* static_pass_number */
2911 TV_TREE_FORWPROP, /* tv_id */
2912 PROP_cfg | PROP_ssa, /* properties_required */
2913 0, /* properties_provided */
2914 0, /* properties_destroyed */
2915 0, /* todo_flags_start */
2916 TODO_ggc_collect
2917 | TODO_update_ssa
2918 | TODO_verify_ssa /* todo_flags_finish */