PR c++/54021
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob2ab4b2375faf1457db038123d94ef83ee9f7aff5
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "gimple.h"
34 #include "expr.h"
36 /* This pass propagates the RHS of assignment statements into use
37 sites of the LHS of the assignment. It's basically a specialized
38 form of tree combination. It is hoped all of this can disappear
39 when we have a generalized tree combiner.
41 One class of common cases we handle is forward propagating a single use
42 variable into a COND_EXPR.
44 bb0:
45 x = a COND b;
46 if (x) goto ... else goto ...
48 Will be transformed into:
50 bb0:
51 if (a COND b) goto ... else goto ...
53 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
55 Or (assuming c1 and c2 are constants):
57 bb0:
58 x = a + c1;
59 if (x EQ/NEQ c2) goto ... else goto ...
61 Will be transformed into:
63 bb0:
64 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
66 Similarly for x = a - c1.
70 bb0:
71 x = !a
72 if (x) goto ... else goto ...
74 Will be transformed into:
76 bb0:
77 if (a == 0) goto ... else goto ...
79 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
80 For these cases, we propagate A into all, possibly more than one,
81 COND_EXPRs that use X.
85 bb0:
86 x = (typecast) a
87 if (x) goto ... else goto ...
89 Will be transformed into:
91 bb0:
92 if (a != 0) goto ... else goto ...
94 (Assuming a is an integral type and x is a boolean or x is an
95 integral and a is a boolean.)
97 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
98 For these cases, we propagate A into all, possibly more than one,
99 COND_EXPRs that use X.
101 In addition to eliminating the variable and the statement which assigns
102 a value to the variable, we may be able to later thread the jump without
103 adding insane complexity in the dominator optimizer.
105 Also note these transformations can cascade. We handle this by having
106 a worklist of COND_EXPR statements to examine. As we make a change to
107 a statement, we put it back on the worklist to examine on the next
108 iteration of the main loop.
110 A second class of propagation opportunities arises for ADDR_EXPR
111 nodes.
113 ptr = &x->y->z;
114 res = *ptr;
116 Will get turned into
118 res = x->y->z;
121 ptr = (type1*)&type2var;
122 res = *ptr
124 Will get turned into (if type1 and type2 are the same size
125 and neither have volatile on them):
126 res = VIEW_CONVERT_EXPR<type1>(type2var)
130 ptr = &x[0];
131 ptr2 = ptr + <constant>;
133 Will get turned into
135 ptr2 = &x[constant/elementsize];
139 ptr = &x[0];
140 offset = index * element_size;
141 offset_p = (pointer) offset;
142 ptr2 = ptr + offset_p
144 Will get turned into:
146 ptr2 = &x[index];
149 ssa = (int) decl
150 res = ssa & 1
152 Provided that decl has known alignment >= 2, will get turned into
154 res = 0
156 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
157 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
158 {NOT_EXPR,NEG_EXPR}.
160 This will (of course) be extended as other needs arise. */
162 static bool forward_propagate_addr_expr (tree name, tree rhs);
164 /* Set to true if we delete dead edges during the optimization. */
165 static bool cfg_changed;
167 static tree rhs_to_tree (tree type, gimple stmt);
169 /* Get the next statement we can propagate NAME's value into skipping
170 trivial copies. Returns the statement that is suitable as a
171 propagation destination or NULL_TREE if there is no such one.
172 This only returns destinations in a single-use chain. FINAL_NAME_P
173 if non-NULL is written to the ssa name that represents the use. */
175 static gimple
176 get_prop_dest_stmt (tree name, tree *final_name_p)
178 use_operand_p use;
179 gimple use_stmt;
181 do {
182 /* If name has multiple uses, bail out. */
183 if (!single_imm_use (name, &use, &use_stmt))
184 return NULL;
186 /* If this is not a trivial copy, we found it. */
187 if (!gimple_assign_ssa_name_copy_p (use_stmt)
188 || gimple_assign_rhs1 (use_stmt) != name)
189 break;
191 /* Continue searching uses of the copy destination. */
192 name = gimple_assign_lhs (use_stmt);
193 } while (1);
195 if (final_name_p)
196 *final_name_p = name;
198 return use_stmt;
201 /* Get the statement we can propagate from into NAME skipping
202 trivial copies. Returns the statement which defines the
203 propagation source or NULL_TREE if there is no such one.
204 If SINGLE_USE_ONLY is set considers only sources which have
205 a single use chain up to NAME. If SINGLE_USE_P is non-null,
206 it is set to whether the chain to NAME is a single use chain
207 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
209 static gimple
210 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
212 bool single_use = true;
214 do {
215 gimple def_stmt = SSA_NAME_DEF_STMT (name);
217 if (!has_single_use (name))
219 single_use = false;
220 if (single_use_only)
221 return NULL;
224 /* If name is defined by a PHI node or is the default def, bail out. */
225 if (!is_gimple_assign (def_stmt))
226 return NULL;
228 /* If def_stmt is not a simple copy, we possibly found it. */
229 if (!gimple_assign_ssa_name_copy_p (def_stmt))
231 tree rhs;
233 if (!single_use_only && single_use_p)
234 *single_use_p = single_use;
236 /* We can look through pointer conversions in the search
237 for a useful stmt for the comparison folding. */
238 rhs = gimple_assign_rhs1 (def_stmt);
239 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
240 && TREE_CODE (rhs) == SSA_NAME
241 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
242 && POINTER_TYPE_P (TREE_TYPE (rhs)))
243 name = rhs;
244 else
245 return def_stmt;
247 else
249 /* Continue searching the def of the copy source name. */
250 name = gimple_assign_rhs1 (def_stmt);
252 } while (1);
255 /* Checks if the destination ssa name in DEF_STMT can be used as
256 propagation source. Returns true if so, otherwise false. */
258 static bool
259 can_propagate_from (gimple def_stmt)
261 gcc_assert (is_gimple_assign (def_stmt));
263 /* If the rhs has side-effects we cannot propagate from it. */
264 if (gimple_has_volatile_ops (def_stmt))
265 return false;
267 /* If the rhs is a load we cannot propagate from it. */
268 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
269 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
270 return false;
272 /* Constants can be always propagated. */
273 if (gimple_assign_single_p (def_stmt)
274 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
275 return true;
277 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
278 if (stmt_references_abnormal_ssa_name (def_stmt))
279 return false;
281 /* If the definition is a conversion of a pointer to a function type,
282 then we can not apply optimizations as some targets require
283 function pointers to be canonicalized and in this case this
284 optimization could eliminate a necessary canonicalization. */
285 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287 tree rhs = gimple_assign_rhs1 (def_stmt);
288 if (POINTER_TYPE_P (TREE_TYPE (rhs))
289 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
290 return false;
293 return true;
296 /* Remove a chain of dead statements starting at the definition of
297 NAME. The chain is linked via the first operand of the defining statements.
298 If NAME was replaced in its only use then this function can be used
299 to clean up dead stmts. The function handles already released SSA
300 names gracefully.
301 Returns true if cleanup-cfg has to run. */
303 static bool
304 remove_prop_source_from_use (tree name)
306 gimple_stmt_iterator gsi;
307 gimple stmt;
308 bool cfg_changed = false;
310 do {
311 basic_block bb;
313 if (SSA_NAME_IN_FREE_LIST (name)
314 || SSA_NAME_IS_DEFAULT_DEF (name)
315 || !has_zero_uses (name))
316 return cfg_changed;
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (gimple_code (stmt) == GIMPLE_PHI
320 || gimple_has_side_effects (stmt))
321 return cfg_changed;
323 bb = gimple_bb (stmt);
324 gsi = gsi_for_stmt (stmt);
325 unlink_stmt_vdef (stmt);
326 if (gsi_remove (&gsi, true))
327 cfg_changed |= gimple_purge_dead_eh_edges (bb);
328 release_defs (stmt);
330 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
331 } while (name && TREE_CODE (name) == SSA_NAME);
333 return cfg_changed;
336 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
337 converted to type TYPE.
339 This should disappear, but is needed so we can combine expressions and use
340 the fold() interfaces. Long term, we need to develop folding and combine
341 routines that deal with gimple exclusively . */
343 static tree
344 rhs_to_tree (tree type, gimple stmt)
346 location_t loc = gimple_location (stmt);
347 enum tree_code code = gimple_assign_rhs_code (stmt);
348 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
349 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt),
351 gimple_assign_rhs3 (stmt));
352 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
353 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 gimple_assign_rhs2 (stmt));
355 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
356 return build1 (code, type, gimple_assign_rhs1 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
358 return gimple_assign_rhs1 (stmt);
359 else
360 gcc_unreachable ();
363 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
364 the folded result in a form suitable for COND_EXPR_COND or
365 NULL_TREE, if there is no suitable simplified form. If
366 INVARIANT_ONLY is true only gimple_min_invariant results are
367 considered simplified. */
369 static tree
370 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
371 tree op0, tree op1, bool invariant_only)
373 tree t;
375 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
377 fold_defer_overflow_warnings ();
378 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
379 if (!t)
381 fold_undefer_overflow_warnings (false, NULL, 0);
382 return NULL_TREE;
385 /* Require that we got a boolean type out if we put one in. */
386 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
388 /* Canonicalize the combined condition for use in a COND_EXPR. */
389 t = canonicalize_cond_expr_cond (t);
391 /* Bail out if we required an invariant but didn't get one. */
392 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
394 fold_undefer_overflow_warnings (false, NULL, 0);
395 return NULL_TREE;
398 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
400 return t;
403 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
404 of its operand. Return a new comparison tree or NULL_TREE if there
405 were no simplifying combines. */
407 static tree
408 forward_propagate_into_comparison_1 (gimple stmt,
409 enum tree_code code, tree type,
410 tree op0, tree op1)
412 tree tmp = NULL_TREE;
413 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
414 bool single_use0_p = false, single_use1_p = false;
416 /* For comparisons use the first operand, that is likely to
417 simplify comparisons against constants. */
418 if (TREE_CODE (op0) == SSA_NAME)
420 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
421 if (def_stmt && can_propagate_from (def_stmt))
423 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
424 tmp = combine_cond_expr_cond (stmt, code, type,
425 rhs0, op1, !single_use0_p);
426 if (tmp)
427 return tmp;
431 /* If that wasn't successful, try the second operand. */
432 if (TREE_CODE (op1) == SSA_NAME)
434 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
435 if (def_stmt && can_propagate_from (def_stmt))
437 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
438 tmp = combine_cond_expr_cond (stmt, code, type,
439 op0, rhs1, !single_use1_p);
440 if (tmp)
441 return tmp;
445 /* If that wasn't successful either, try both operands. */
446 if (rhs0 != NULL_TREE
447 && rhs1 != NULL_TREE)
448 tmp = combine_cond_expr_cond (stmt, code, type,
449 rhs0, rhs1,
450 !(single_use0_p && single_use1_p));
452 return tmp;
455 /* Propagate from the ssa name definition statements of the assignment
456 from a comparison at *GSI into the conditional if that simplifies it.
457 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
458 otherwise returns 0. */
460 static int
461 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
463 gimple stmt = gsi_stmt (*gsi);
464 tree tmp;
465 bool cfg_changed = false;
466 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
467 tree rhs1 = gimple_assign_rhs1 (stmt);
468 tree rhs2 = gimple_assign_rhs2 (stmt);
470 /* Combine the comparison with defining statements. */
471 tmp = forward_propagate_into_comparison_1 (stmt,
472 gimple_assign_rhs_code (stmt),
473 type, rhs1, rhs2);
474 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
476 gimple_assign_set_rhs_from_tree (gsi, tmp);
477 fold_stmt (gsi);
478 update_stmt (gsi_stmt (*gsi));
480 if (TREE_CODE (rhs1) == SSA_NAME)
481 cfg_changed |= remove_prop_source_from_use (rhs1);
482 if (TREE_CODE (rhs2) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs2);
484 return cfg_changed ? 2 : 1;
487 return 0;
490 /* Propagate from the ssa name definition statements of COND_EXPR
491 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
492 Returns zero if no statement was changed, one if there were
493 changes and two if cfg_cleanup needs to run.
495 This must be kept in sync with forward_propagate_into_cond. */
497 static int
498 forward_propagate_into_gimple_cond (gimple stmt)
500 tree tmp;
501 enum tree_code code = gimple_cond_code (stmt);
502 bool cfg_changed = false;
503 tree rhs1 = gimple_cond_lhs (stmt);
504 tree rhs2 = gimple_cond_rhs (stmt);
506 /* We can do tree combining on SSA_NAME and comparison expressions. */
507 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
508 return 0;
510 tmp = forward_propagate_into_comparison_1 (stmt, code,
511 boolean_type_node,
512 rhs1, rhs2);
513 if (tmp)
515 if (dump_file && tmp)
517 fprintf (dump_file, " Replaced '");
518 print_gimple_expr (dump_file, stmt, 0, 0);
519 fprintf (dump_file, "' with '");
520 print_generic_expr (dump_file, tmp, 0);
521 fprintf (dump_file, "'\n");
524 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
525 update_stmt (stmt);
527 if (TREE_CODE (rhs1) == SSA_NAME)
528 cfg_changed |= remove_prop_source_from_use (rhs1);
529 if (TREE_CODE (rhs2) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs2);
531 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
534 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
535 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
536 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
537 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
538 && ((code == EQ_EXPR
539 && integer_zerop (rhs2))
540 || (code == NE_EXPR
541 && integer_onep (rhs2))))
543 basic_block bb = gimple_bb (stmt);
544 gimple_cond_set_code (stmt, NE_EXPR);
545 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
546 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
547 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
548 return 1;
551 return 0;
555 /* Propagate from the ssa name definition statements of COND_EXPR
556 in the rhs of statement STMT into the conditional if that simplifies it.
557 Returns true zero if the stmt was changed. */
559 static bool
560 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
562 gimple stmt = gsi_stmt (*gsi_p);
563 tree tmp = NULL_TREE;
564 tree cond = gimple_assign_rhs1 (stmt);
565 bool swap = false;
567 /* We can do tree combining on SSA_NAME and comparison expressions. */
568 if (COMPARISON_CLASS_P (cond))
569 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
570 boolean_type_node,
571 TREE_OPERAND (cond, 0),
572 TREE_OPERAND (cond, 1));
573 else if (TREE_CODE (cond) == SSA_NAME)
575 enum tree_code code;
576 tree name = cond;
577 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
578 if (!def_stmt || !can_propagate_from (def_stmt))
579 return 0;
581 code = gimple_assign_rhs_code (def_stmt);
582 if (TREE_CODE_CLASS (code) == tcc_comparison)
583 tmp = fold_build2_loc (gimple_location (def_stmt),
584 code,
585 boolean_type_node,
586 gimple_assign_rhs1 (def_stmt),
587 gimple_assign_rhs2 (def_stmt));
588 else if ((code == BIT_NOT_EXPR
589 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
590 || (code == BIT_XOR_EXPR
591 && integer_onep (gimple_assign_rhs2 (def_stmt))))
593 tmp = gimple_assign_rhs1 (def_stmt);
594 swap = true;
598 if (tmp
599 && is_gimple_condexpr (tmp))
601 if (dump_file && tmp)
603 fprintf (dump_file, " Replaced '");
604 print_generic_expr (dump_file, cond, 0);
605 fprintf (dump_file, "' with '");
606 print_generic_expr (dump_file, tmp, 0);
607 fprintf (dump_file, "'\n");
610 if (integer_onep (tmp))
611 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
612 else if (integer_zerop (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
614 else
616 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
617 if (swap)
619 tree t = gimple_assign_rhs2 (stmt);
620 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
621 gimple_assign_set_rhs3 (stmt, t);
624 stmt = gsi_stmt (*gsi_p);
625 update_stmt (stmt);
627 return true;
630 return 0;
633 /* Propagate from the ssa name definition statements of COND_EXPR
634 values in the rhs of statement STMT into the conditional arms
635 if that simplifies it.
636 Returns true if the stmt was changed. */
638 static bool
639 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
641 gimple stmt = gsi_stmt (*gsi_p);
642 tree cond, val1, val2;
643 bool changed = false;
645 cond = gimple_assign_rhs1 (stmt);
646 val1 = gimple_assign_rhs2 (stmt);
647 if (TREE_CODE (val1) == SSA_NAME)
649 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
650 if (is_gimple_assign (def_stmt)
651 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
652 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
654 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
655 gimple_assign_set_rhs2 (stmt, val1);
656 changed = true;
659 val2 = gimple_assign_rhs3 (stmt);
660 if (TREE_CODE (val2) == SSA_NAME)
662 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
663 if (is_gimple_assign (def_stmt)
664 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
665 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
667 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
668 gimple_assign_set_rhs3 (stmt, val2);
669 changed = true;
672 if (operand_equal_p (val1, val2, 0))
674 gimple_assign_set_rhs_from_tree (gsi_p, val1);
675 stmt = gsi_stmt (*gsi_p);
676 changed = true;
679 if (changed)
680 update_stmt (stmt);
682 return changed;
685 /* We've just substituted an ADDR_EXPR into stmt. Update all the
686 relevant data structures to match. */
688 static void
689 tidy_after_forward_propagate_addr (gimple stmt)
691 /* We may have turned a trapping insn into a non-trapping insn. */
692 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
693 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
694 cfg_changed = true;
696 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
697 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
700 /* DEF_RHS contains the address of the 0th element in an array.
701 USE_STMT uses type of DEF_RHS to compute the address of an
702 arbitrary element within the array. The (variable) byte offset
703 of the element is contained in OFFSET.
705 We walk back through the use-def chains of OFFSET to verify that
706 it is indeed computing the offset of an element within the array
707 and extract the index corresponding to the given byte offset.
709 We then try to fold the entire address expression into a form
710 &array[index].
712 If we are successful, we replace the right hand side of USE_STMT
713 with the new address computation. */
715 static bool
716 forward_propagate_addr_into_variable_array_index (tree offset,
717 tree def_rhs,
718 gimple_stmt_iterator *use_stmt_gsi)
720 tree index, tunit;
721 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
722 tree new_rhs, tmp;
724 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
725 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
726 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
727 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
728 else
729 return false;
730 if (!host_integerp (tunit, 1))
731 return false;
733 /* Get the offset's defining statement. */
734 offset_def = SSA_NAME_DEF_STMT (offset);
736 /* Try to find an expression for a proper index. This is either a
737 multiplication expression by the element size or just the ssa name we came
738 along in case the element size is one. In that case, however, we do not
739 allow multiplications because they can be computing index to a higher
740 level dimension (PR 37861). */
741 if (integer_onep (tunit))
743 if (is_gimple_assign (offset_def)
744 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
745 return false;
747 index = offset;
749 else
751 /* The statement which defines OFFSET before type conversion
752 must be a simple GIMPLE_ASSIGN. */
753 if (!is_gimple_assign (offset_def))
754 return false;
756 /* The RHS of the statement which defines OFFSET must be a
757 multiplication of an object by the size of the array elements.
758 This implicitly verifies that the size of the array elements
759 is constant. */
760 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
761 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
762 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
764 /* The first operand to the MULT_EXPR is the desired index. */
765 index = gimple_assign_rhs1 (offset_def);
767 /* If we have idx * tunit + CST * tunit re-associate that. */
768 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
769 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
770 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
771 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
772 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
773 gimple_assign_rhs2 (offset_def),
774 tunit)) != NULL_TREE)
776 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
777 if (is_gimple_assign (offset_def2)
778 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
779 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
780 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
782 index = fold_build2 (gimple_assign_rhs_code (offset_def),
783 TREE_TYPE (offset),
784 gimple_assign_rhs1 (offset_def2), tmp);
786 else
787 return false;
789 else
790 return false;
793 /* Replace the pointer addition with array indexing. */
794 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
795 true, GSI_SAME_STMT);
796 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
798 new_rhs = unshare_expr (def_rhs);
799 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
801 else
803 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
804 unshare_expr (TREE_OPERAND (def_rhs, 0)),
805 index, integer_zero_node, NULL_TREE);
806 new_rhs = build_fold_addr_expr (new_rhs);
807 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
808 TREE_TYPE (new_rhs)))
810 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
811 NULL_TREE, true, GSI_SAME_STMT);
812 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
813 new_rhs);
816 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
817 fold_stmt (use_stmt_gsi);
818 tidy_after_forward_propagate_addr (gsi_stmt (*use_stmt_gsi));
819 return true;
822 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
823 ADDR_EXPR <whatever>.
825 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
826 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
827 node or for recovery of array indexing from pointer arithmetic.
829 Return true if the propagation was successful (the propagation can
830 be not totally successful, yet things may have been changed). */
832 static bool
833 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
834 gimple_stmt_iterator *use_stmt_gsi,
835 bool single_use_p)
837 tree lhs, rhs, rhs2, array_ref;
838 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
839 enum tree_code rhs_code;
840 bool res = true;
842 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
844 lhs = gimple_assign_lhs (use_stmt);
845 rhs_code = gimple_assign_rhs_code (use_stmt);
846 rhs = gimple_assign_rhs1 (use_stmt);
848 /* Trivial cases. The use statement could be a trivial copy or a
849 useless conversion. Recurse to the uses of the lhs as copyprop does
850 not copy through different variant pointers and FRE does not catch
851 all useless conversions. Treat the case of a single-use name and
852 a conversion to def_rhs type separate, though. */
853 if (TREE_CODE (lhs) == SSA_NAME
854 && ((rhs_code == SSA_NAME && rhs == name)
855 || CONVERT_EXPR_CODE_P (rhs_code)))
857 /* Only recurse if we don't deal with a single use or we cannot
858 do the propagation to the current statement. In particular
859 we can end up with a conversion needed for a non-invariant
860 address which we cannot do in a single statement. */
861 if (!single_use_p
862 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
863 && (!is_gimple_min_invariant (def_rhs)
864 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
865 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
866 && (TYPE_PRECISION (TREE_TYPE (lhs))
867 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
868 return forward_propagate_addr_expr (lhs, def_rhs);
870 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
871 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
872 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
873 else
874 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
875 return true;
878 /* Propagate through constant pointer adjustments. */
879 if (TREE_CODE (lhs) == SSA_NAME
880 && rhs_code == POINTER_PLUS_EXPR
881 && rhs == name
882 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
884 tree new_def_rhs;
885 /* As we come here with non-invariant addresses in def_rhs we need
886 to make sure we can build a valid constant offsetted address
887 for further propagation. Simply rely on fold building that
888 and check after the fact. */
889 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
890 def_rhs,
891 fold_convert (ptr_type_node,
892 gimple_assign_rhs2 (use_stmt)));
893 if (TREE_CODE (new_def_rhs) == MEM_REF
894 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
895 return false;
896 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
897 TREE_TYPE (rhs));
899 /* Recurse. If we could propagate into all uses of lhs do not
900 bother to replace into the current use but just pretend we did. */
901 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
902 && forward_propagate_addr_expr (lhs, new_def_rhs))
903 return true;
905 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
906 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
907 new_def_rhs, NULL_TREE);
908 else if (is_gimple_min_invariant (new_def_rhs))
909 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
910 new_def_rhs, NULL_TREE);
911 else
912 return false;
913 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
914 update_stmt (use_stmt);
915 return true;
918 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
919 ADDR_EXPR will not appear on the LHS. */
920 lhs = gimple_assign_lhs (use_stmt);
921 while (handled_component_p (lhs))
922 lhs = TREE_OPERAND (lhs, 0);
924 /* Now see if the LHS node is a MEM_REF using NAME. If so,
925 propagate the ADDR_EXPR into the use of NAME and fold the result. */
926 if (TREE_CODE (lhs) == MEM_REF
927 && TREE_OPERAND (lhs, 0) == name)
929 tree def_rhs_base;
930 HOST_WIDE_INT def_rhs_offset;
931 /* If the address is invariant we can always fold it. */
932 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
933 &def_rhs_offset)))
935 double_int off = mem_ref_offset (lhs);
936 tree new_ptr;
937 off = double_int_add (off,
938 shwi_to_double_int (def_rhs_offset));
939 if (TREE_CODE (def_rhs_base) == MEM_REF)
941 off = double_int_add (off, mem_ref_offset (def_rhs_base));
942 new_ptr = TREE_OPERAND (def_rhs_base, 0);
944 else
945 new_ptr = build_fold_addr_expr (def_rhs_base);
946 TREE_OPERAND (lhs, 0) = new_ptr;
947 TREE_OPERAND (lhs, 1)
948 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
949 tidy_after_forward_propagate_addr (use_stmt);
950 /* Continue propagating into the RHS if this was not the only use. */
951 if (single_use_p)
952 return true;
954 /* If the LHS is a plain dereference and the value type is the same as
955 that of the pointed-to type of the address we can put the
956 dereferenced address on the LHS preserving the original alias-type. */
957 else if (gimple_assign_lhs (use_stmt) == lhs
958 && integer_zerop (TREE_OPERAND (lhs, 1))
959 && useless_type_conversion_p
960 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
961 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
963 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
964 tree new_offset, new_base, saved, new_lhs;
965 while (handled_component_p (*def_rhs_basep))
966 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
967 saved = *def_rhs_basep;
968 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
970 new_base = TREE_OPERAND (*def_rhs_basep, 0);
971 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
972 TREE_OPERAND (*def_rhs_basep, 1));
974 else
976 new_base = build_fold_addr_expr (*def_rhs_basep);
977 new_offset = TREE_OPERAND (lhs, 1);
979 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
980 new_base, new_offset);
981 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
982 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
983 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
984 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
985 gimple_assign_set_lhs (use_stmt, new_lhs);
986 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
987 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
988 *def_rhs_basep = saved;
989 tidy_after_forward_propagate_addr (use_stmt);
990 /* Continue propagating into the RHS if this was not the
991 only use. */
992 if (single_use_p)
993 return true;
995 else
996 /* We can have a struct assignment dereferencing our name twice.
997 Note that we didn't propagate into the lhs to not falsely
998 claim we did when propagating into the rhs. */
999 res = false;
1002 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
1003 nodes from the RHS. */
1004 rhs = gimple_assign_rhs1 (use_stmt);
1005 if (TREE_CODE (rhs) == ADDR_EXPR)
1006 rhs = TREE_OPERAND (rhs, 0);
1007 while (handled_component_p (rhs))
1008 rhs = TREE_OPERAND (rhs, 0);
1010 /* Now see if the RHS node is a MEM_REF using NAME. If so,
1011 propagate the ADDR_EXPR into the use of NAME and fold the result. */
1012 if (TREE_CODE (rhs) == MEM_REF
1013 && TREE_OPERAND (rhs, 0) == name)
1015 tree def_rhs_base;
1016 HOST_WIDE_INT def_rhs_offset;
1017 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
1018 &def_rhs_offset)))
1020 double_int off = mem_ref_offset (rhs);
1021 tree new_ptr;
1022 off = double_int_add (off,
1023 shwi_to_double_int (def_rhs_offset));
1024 if (TREE_CODE (def_rhs_base) == MEM_REF)
1026 off = double_int_add (off, mem_ref_offset (def_rhs_base));
1027 new_ptr = TREE_OPERAND (def_rhs_base, 0);
1029 else
1030 new_ptr = build_fold_addr_expr (def_rhs_base);
1031 TREE_OPERAND (rhs, 0) = new_ptr;
1032 TREE_OPERAND (rhs, 1)
1033 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
1034 fold_stmt_inplace (use_stmt_gsi);
1035 tidy_after_forward_propagate_addr (use_stmt);
1036 return res;
1038 /* If the RHS is a plain dereference and the value type is the same as
1039 that of the pointed-to type of the address we can put the
1040 dereferenced address on the RHS preserving the original alias-type. */
1041 else if (gimple_assign_rhs1 (use_stmt) == rhs
1042 && integer_zerop (TREE_OPERAND (rhs, 1))
1043 && useless_type_conversion_p
1044 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
1045 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
1047 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
1048 tree new_offset, new_base, saved, new_rhs;
1049 while (handled_component_p (*def_rhs_basep))
1050 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
1051 saved = *def_rhs_basep;
1052 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
1054 new_base = TREE_OPERAND (*def_rhs_basep, 0);
1055 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
1056 TREE_OPERAND (*def_rhs_basep, 1));
1058 else
1060 new_base = build_fold_addr_expr (*def_rhs_basep);
1061 new_offset = TREE_OPERAND (rhs, 1);
1063 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1064 new_base, new_offset);
1065 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
1066 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
1067 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
1068 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
1069 gimple_assign_set_rhs1 (use_stmt, new_rhs);
1070 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
1071 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
1072 *def_rhs_basep = saved;
1073 fold_stmt_inplace (use_stmt_gsi);
1074 tidy_after_forward_propagate_addr (use_stmt);
1075 return res;
1079 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1080 is nothing to do. */
1081 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1082 || gimple_assign_rhs1 (use_stmt) != name)
1083 return false;
1085 /* The remaining cases are all for turning pointer arithmetic into
1086 array indexing. They only apply when we have the address of
1087 element zero in an array. If that is not the case then there
1088 is nothing to do. */
1089 array_ref = TREE_OPERAND (def_rhs, 0);
1090 if ((TREE_CODE (array_ref) != ARRAY_REF
1091 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1092 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1093 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1094 return false;
1096 rhs2 = gimple_assign_rhs2 (use_stmt);
1097 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1098 if (TREE_CODE (rhs2) == INTEGER_CST)
1100 tree new_rhs = build1_loc (gimple_location (use_stmt),
1101 ADDR_EXPR, TREE_TYPE (def_rhs),
1102 fold_build2 (MEM_REF,
1103 TREE_TYPE (TREE_TYPE (def_rhs)),
1104 unshare_expr (def_rhs),
1105 fold_convert (ptr_type_node,
1106 rhs2)));
1107 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1108 use_stmt = gsi_stmt (*use_stmt_gsi);
1109 update_stmt (use_stmt);
1110 tidy_after_forward_propagate_addr (use_stmt);
1111 return true;
1114 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1115 converting a multiplication of an index by the size of the
1116 array elements, then the result is converted into the proper
1117 type for the arithmetic. */
1118 if (TREE_CODE (rhs2) == SSA_NAME
1119 && (TREE_CODE (array_ref) != ARRAY_REF
1120 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1121 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1122 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1123 different type than their operands. */
1124 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1125 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1126 use_stmt_gsi);
1127 return false;
1130 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1132 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1133 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1134 node or for recovery of array indexing from pointer arithmetic.
1135 Returns true, if all uses have been propagated into. */
1137 static bool
1138 forward_propagate_addr_expr (tree name, tree rhs)
1140 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1141 imm_use_iterator iter;
1142 gimple use_stmt;
1143 bool all = true;
1144 bool single_use_p = has_single_use (name);
1146 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1148 bool result;
1149 tree use_rhs;
1151 /* If the use is not in a simple assignment statement, then
1152 there is nothing we can do. */
1153 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1155 if (!is_gimple_debug (use_stmt))
1156 all = false;
1157 continue;
1160 /* If the use is in a deeper loop nest, then we do not want
1161 to propagate non-invariant ADDR_EXPRs into the loop as that
1162 is likely adding expression evaluations into the loop. */
1163 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1164 && !is_gimple_min_invariant (rhs))
1166 all = false;
1167 continue;
1171 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1172 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1173 single_use_p);
1174 /* If the use has moved to a different statement adjust
1175 the update machinery for the old statement too. */
1176 if (use_stmt != gsi_stmt (gsi))
1178 update_stmt (use_stmt);
1179 use_stmt = gsi_stmt (gsi);
1182 update_stmt (use_stmt);
1184 all &= result;
1186 /* Remove intermediate now unused copy and conversion chains. */
1187 use_rhs = gimple_assign_rhs1 (use_stmt);
1188 if (result
1189 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1190 && TREE_CODE (use_rhs) == SSA_NAME
1191 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1193 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1194 release_defs (use_stmt);
1195 gsi_remove (&gsi, true);
1199 return all && has_zero_uses (name);
1203 /* Forward propagate the comparison defined in *DEFGSI like
1204 cond_1 = x CMP y to uses of the form
1205 a_1 = (T')cond_1
1206 a_1 = !cond_1
1207 a_1 = cond_1 != 0
1208 Returns true if stmt is now unused. Advance DEFGSI to the next
1209 statement. */
1211 static bool
1212 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1214 gimple stmt = gsi_stmt (*defgsi);
1215 tree name = gimple_assign_lhs (stmt);
1216 gimple use_stmt;
1217 tree tmp = NULL_TREE;
1218 gimple_stmt_iterator gsi;
1219 enum tree_code code;
1220 tree lhs;
1222 /* Don't propagate ssa names that occur in abnormal phis. */
1223 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1224 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1225 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1226 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1227 goto bailout;
1229 /* Do not un-cse comparisons. But propagate through copies. */
1230 use_stmt = get_prop_dest_stmt (name, &name);
1231 if (!use_stmt
1232 || !is_gimple_assign (use_stmt))
1233 goto bailout;
1235 code = gimple_assign_rhs_code (use_stmt);
1236 lhs = gimple_assign_lhs (use_stmt);
1237 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1238 goto bailout;
1240 /* We can propagate the condition into a statement that
1241 computes the logical negation of the comparison result. */
1242 if ((code == BIT_NOT_EXPR
1243 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1244 || (code == BIT_XOR_EXPR
1245 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1247 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1248 bool nans = HONOR_NANS (TYPE_MODE (type));
1249 enum tree_code inv_code;
1250 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1251 if (inv_code == ERROR_MARK)
1252 goto bailout;
1254 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1255 gimple_assign_rhs2 (stmt));
1257 else
1258 goto bailout;
1260 gsi = gsi_for_stmt (use_stmt);
1261 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1262 use_stmt = gsi_stmt (gsi);
1263 update_stmt (use_stmt);
1265 if (dump_file && (dump_flags & TDF_DETAILS))
1267 fprintf (dump_file, " Replaced '");
1268 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1269 fprintf (dump_file, "' with '");
1270 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1271 fprintf (dump_file, "'\n");
1274 /* When we remove stmt now the iterator defgsi goes off it's current
1275 sequence, hence advance it now. */
1276 gsi_next (defgsi);
1278 /* Remove defining statements. */
1279 return remove_prop_source_from_use (name);
1281 bailout:
1282 gsi_next (defgsi);
1283 return false;
1287 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1288 If so, we can change STMT into lhs = y which can later be copy
1289 propagated. Similarly for negation.
1291 This could trivially be formulated as a forward propagation
1292 to immediate uses. However, we already had an implementation
1293 from DOM which used backward propagation via the use-def links.
1295 It turns out that backward propagation is actually faster as
1296 there's less work to do for each NOT/NEG expression we find.
1297 Backwards propagation needs to look at the statement in a single
1298 backlink. Forward propagation needs to look at potentially more
1299 than one forward link.
1301 Returns true when the statement was changed. */
1303 static bool
1304 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1306 gimple stmt = gsi_stmt (*gsi_p);
1307 tree rhs = gimple_assign_rhs1 (stmt);
1308 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1310 /* See if the RHS_DEF_STMT has the same form as our statement. */
1311 if (is_gimple_assign (rhs_def_stmt)
1312 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1314 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1316 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1317 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1318 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1320 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1321 stmt = gsi_stmt (*gsi_p);
1322 update_stmt (stmt);
1323 return true;
1327 return false;
1330 /* Helper function for simplify_gimple_switch. Remove case labels that
1331 have values outside the range of the new type. */
1333 static void
1334 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1336 unsigned int branch_num = gimple_switch_num_labels (stmt);
1337 VEC(tree, heap) *labels = VEC_alloc (tree, heap, branch_num);
1338 unsigned int i, len;
1340 /* Collect the existing case labels in a VEC, and preprocess it as if
1341 we are gimplifying a GENERIC SWITCH_EXPR. */
1342 for (i = 1; i < branch_num; i++)
1343 VEC_quick_push (tree, labels, gimple_switch_label (stmt, i));
1344 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1346 /* If any labels were removed, replace the existing case labels
1347 in the GIMPLE_SWITCH statement with the correct ones.
1348 Note that the type updates were done in-place on the case labels,
1349 so we only have to replace the case labels in the GIMPLE_SWITCH
1350 if the number of labels changed. */
1351 len = VEC_length (tree, labels);
1352 if (len < branch_num - 1)
1354 bitmap target_blocks;
1355 edge_iterator ei;
1356 edge e;
1358 /* Corner case: *all* case labels have been removed as being
1359 out-of-range for INDEX_TYPE. Push one label and let the
1360 CFG cleanups deal with this further. */
1361 if (len == 0)
1363 tree label, elt;
1365 label = CASE_LABEL (gimple_switch_default_label (stmt));
1366 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1367 VEC_quick_push (tree, labels, elt);
1368 len = 1;
1371 for (i = 0; i < VEC_length (tree, labels); i++)
1372 gimple_switch_set_label (stmt, i + 1, VEC_index (tree, labels, i));
1373 for (i++ ; i < branch_num; i++)
1374 gimple_switch_set_label (stmt, i, NULL_TREE);
1375 gimple_switch_set_num_labels (stmt, len + 1);
1377 /* Cleanup any edges that are now dead. */
1378 target_blocks = BITMAP_ALLOC (NULL);
1379 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1381 tree elt = gimple_switch_label (stmt, i);
1382 basic_block target = label_to_block (CASE_LABEL (elt));
1383 bitmap_set_bit (target_blocks, target->index);
1385 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1387 if (! bitmap_bit_p (target_blocks, e->dest->index))
1389 remove_edge (e);
1390 cfg_changed = true;
1391 free_dominance_info (CDI_DOMINATORS);
1393 else
1394 ei_next (&ei);
1396 BITMAP_FREE (target_blocks);
1399 VEC_free (tree, heap, labels);
1402 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1403 the condition which we may be able to optimize better. */
1405 static bool
1406 simplify_gimple_switch (gimple stmt)
1408 tree cond = gimple_switch_index (stmt);
1409 tree def, to, ti;
1410 gimple def_stmt;
1412 /* The optimization that we really care about is removing unnecessary
1413 casts. That will let us do much better in propagating the inferred
1414 constant at the switch target. */
1415 if (TREE_CODE (cond) == SSA_NAME)
1417 def_stmt = SSA_NAME_DEF_STMT (cond);
1418 if (is_gimple_assign (def_stmt))
1420 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1422 int need_precision;
1423 bool fail;
1425 def = gimple_assign_rhs1 (def_stmt);
1427 to = TREE_TYPE (cond);
1428 ti = TREE_TYPE (def);
1430 /* If we have an extension that preserves value, then we
1431 can copy the source value into the switch. */
1433 need_precision = TYPE_PRECISION (ti);
1434 fail = false;
1435 if (! INTEGRAL_TYPE_P (ti))
1436 fail = true;
1437 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1438 fail = true;
1439 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1440 need_precision += 1;
1441 if (TYPE_PRECISION (to) < need_precision)
1442 fail = true;
1444 if (!fail)
1446 gimple_switch_set_index (stmt, def);
1447 simplify_gimple_switch_label_vec (stmt, ti);
1448 update_stmt (stmt);
1449 return true;
1455 return false;
1458 /* For pointers p2 and p1 return p2 - p1 if the
1459 difference is known and constant, otherwise return NULL. */
1461 static tree
1462 constant_pointer_difference (tree p1, tree p2)
1464 int i, j;
1465 #define CPD_ITERATIONS 5
1466 tree exps[2][CPD_ITERATIONS];
1467 tree offs[2][CPD_ITERATIONS];
1468 int cnt[2];
1470 for (i = 0; i < 2; i++)
1472 tree p = i ? p1 : p2;
1473 tree off = size_zero_node;
1474 gimple stmt;
1475 enum tree_code code;
1477 /* For each of p1 and p2 we need to iterate at least
1478 twice, to handle ADDR_EXPR directly in p1/p2,
1479 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1480 on definition's stmt RHS. Iterate a few extra times. */
1481 j = 0;
1484 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1485 break;
1486 if (TREE_CODE (p) == ADDR_EXPR)
1488 tree q = TREE_OPERAND (p, 0);
1489 HOST_WIDE_INT offset;
1490 tree base = get_addr_base_and_unit_offset (q, &offset);
1491 if (base)
1493 q = base;
1494 if (offset)
1495 off = size_binop (PLUS_EXPR, off, size_int (offset));
1497 if (TREE_CODE (q) == MEM_REF
1498 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1500 p = TREE_OPERAND (q, 0);
1501 off = size_binop (PLUS_EXPR, off,
1502 double_int_to_tree (sizetype,
1503 mem_ref_offset (q)));
1505 else
1507 exps[i][j] = q;
1508 offs[i][j++] = off;
1509 break;
1512 if (TREE_CODE (p) != SSA_NAME)
1513 break;
1514 exps[i][j] = p;
1515 offs[i][j++] = off;
1516 if (j == CPD_ITERATIONS)
1517 break;
1518 stmt = SSA_NAME_DEF_STMT (p);
1519 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1520 break;
1521 code = gimple_assign_rhs_code (stmt);
1522 if (code == POINTER_PLUS_EXPR)
1524 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1525 break;
1526 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1527 p = gimple_assign_rhs1 (stmt);
1529 else if (code == ADDR_EXPR || code == NOP_EXPR)
1530 p = gimple_assign_rhs1 (stmt);
1531 else
1532 break;
1534 while (1);
1535 cnt[i] = j;
1538 for (i = 0; i < cnt[0]; i++)
1539 for (j = 0; j < cnt[1]; j++)
1540 if (exps[0][i] == exps[1][j])
1541 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1543 return NULL_TREE;
1546 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1547 Optimize
1548 memcpy (p, "abcd", 4);
1549 memset (p + 4, ' ', 3);
1550 into
1551 memcpy (p, "abcd ", 7);
1552 call if the latter can be stored by pieces during expansion. */
1554 static bool
1555 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1557 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1558 tree vuse = gimple_vuse (stmt2);
1559 if (vuse == NULL)
1560 return false;
1561 stmt1 = SSA_NAME_DEF_STMT (vuse);
1563 switch (DECL_FUNCTION_CODE (callee2))
1565 case BUILT_IN_MEMSET:
1566 if (gimple_call_num_args (stmt2) != 3
1567 || gimple_call_lhs (stmt2)
1568 || CHAR_BIT != 8
1569 || BITS_PER_UNIT != 8)
1570 break;
1571 else
1573 tree callee1;
1574 tree ptr1, src1, str1, off1, len1, lhs1;
1575 tree ptr2 = gimple_call_arg (stmt2, 0);
1576 tree val2 = gimple_call_arg (stmt2, 1);
1577 tree len2 = gimple_call_arg (stmt2, 2);
1578 tree diff, vdef, new_str_cst;
1579 gimple use_stmt;
1580 unsigned int ptr1_align;
1581 unsigned HOST_WIDE_INT src_len;
1582 char *src_buf;
1583 use_operand_p use_p;
1585 if (!host_integerp (val2, 0)
1586 || !host_integerp (len2, 1))
1587 break;
1588 if (is_gimple_call (stmt1))
1590 /* If first stmt is a call, it needs to be memcpy
1591 or mempcpy, with string literal as second argument and
1592 constant length. */
1593 callee1 = gimple_call_fndecl (stmt1);
1594 if (callee1 == NULL_TREE
1595 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1596 || gimple_call_num_args (stmt1) != 3)
1597 break;
1598 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1599 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1600 break;
1601 ptr1 = gimple_call_arg (stmt1, 0);
1602 src1 = gimple_call_arg (stmt1, 1);
1603 len1 = gimple_call_arg (stmt1, 2);
1604 lhs1 = gimple_call_lhs (stmt1);
1605 if (!host_integerp (len1, 1))
1606 break;
1607 str1 = string_constant (src1, &off1);
1608 if (str1 == NULL_TREE)
1609 break;
1610 if (!host_integerp (off1, 1)
1611 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1612 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1613 - tree_low_cst (off1, 1)) > 0
1614 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1615 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1616 != TYPE_MODE (char_type_node))
1617 break;
1619 else if (gimple_assign_single_p (stmt1))
1621 /* Otherwise look for length 1 memcpy optimized into
1622 assignment. */
1623 ptr1 = gimple_assign_lhs (stmt1);
1624 src1 = gimple_assign_rhs1 (stmt1);
1625 if (TREE_CODE (ptr1) != MEM_REF
1626 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1627 || !host_integerp (src1, 0))
1628 break;
1629 ptr1 = build_fold_addr_expr (ptr1);
1630 callee1 = NULL_TREE;
1631 len1 = size_one_node;
1632 lhs1 = NULL_TREE;
1633 off1 = size_zero_node;
1634 str1 = NULL_TREE;
1636 else
1637 break;
1639 diff = constant_pointer_difference (ptr1, ptr2);
1640 if (diff == NULL && lhs1 != NULL)
1642 diff = constant_pointer_difference (lhs1, ptr2);
1643 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1644 && diff != NULL)
1645 diff = size_binop (PLUS_EXPR, diff,
1646 fold_convert (sizetype, len1));
1648 /* If the difference between the second and first destination pointer
1649 is not constant, or is bigger than memcpy length, bail out. */
1650 if (diff == NULL
1651 || !host_integerp (diff, 1)
1652 || tree_int_cst_lt (len1, diff))
1653 break;
1655 /* Use maximum of difference plus memset length and memcpy length
1656 as the new memcpy length, if it is too big, bail out. */
1657 src_len = tree_low_cst (diff, 1);
1658 src_len += tree_low_cst (len2, 1);
1659 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1660 src_len = tree_low_cst (len1, 1);
1661 if (src_len > 1024)
1662 break;
1664 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1665 with bigger length will return different result. */
1666 if (lhs1 != NULL_TREE
1667 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1668 && (TREE_CODE (lhs1) != SSA_NAME
1669 || !single_imm_use (lhs1, &use_p, &use_stmt)
1670 || use_stmt != stmt2))
1671 break;
1673 /* If anything reads memory in between memcpy and memset
1674 call, the modified memcpy call might change it. */
1675 vdef = gimple_vdef (stmt1);
1676 if (vdef != NULL
1677 && (!single_imm_use (vdef, &use_p, &use_stmt)
1678 || use_stmt != stmt2))
1679 break;
1681 ptr1_align = get_pointer_alignment (ptr1);
1682 /* Construct the new source string literal. */
1683 src_buf = XALLOCAVEC (char, src_len + 1);
1684 if (callee1)
1685 memcpy (src_buf,
1686 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1687 tree_low_cst (len1, 1));
1688 else
1689 src_buf[0] = tree_low_cst (src1, 0);
1690 memset (src_buf + tree_low_cst (diff, 1),
1691 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1692 src_buf[src_len] = '\0';
1693 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1694 handle embedded '\0's. */
1695 if (strlen (src_buf) != src_len)
1696 break;
1697 rtl_profile_for_bb (gimple_bb (stmt2));
1698 /* If the new memcpy wouldn't be emitted by storing the literal
1699 by pieces, this optimization might enlarge .rodata too much,
1700 as commonly used string literals couldn't be shared any
1701 longer. */
1702 if (!can_store_by_pieces (src_len,
1703 builtin_strncpy_read_str,
1704 src_buf, ptr1_align, false))
1705 break;
1707 new_str_cst = build_string_literal (src_len, src_buf);
1708 if (callee1)
1710 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1711 memset call. */
1712 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1713 gimple_call_set_lhs (stmt1, NULL_TREE);
1714 gimple_call_set_arg (stmt1, 1, new_str_cst);
1715 gimple_call_set_arg (stmt1, 2,
1716 build_int_cst (TREE_TYPE (len1), src_len));
1717 update_stmt (stmt1);
1718 unlink_stmt_vdef (stmt2);
1719 gsi_remove (gsi_p, true);
1720 release_defs (stmt2);
1721 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1722 release_ssa_name (lhs1);
1723 return true;
1725 else
1727 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1728 assignment, remove STMT1 and change memset call into
1729 memcpy call. */
1730 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1732 if (!is_gimple_val (ptr1))
1733 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1734 true, GSI_SAME_STMT);
1735 gimple_call_set_fndecl (stmt2,
1736 builtin_decl_explicit (BUILT_IN_MEMCPY));
1737 gimple_call_set_arg (stmt2, 0, ptr1);
1738 gimple_call_set_arg (stmt2, 1, new_str_cst);
1739 gimple_call_set_arg (stmt2, 2,
1740 build_int_cst (TREE_TYPE (len2), src_len));
1741 unlink_stmt_vdef (stmt1);
1742 gsi_remove (&gsi, true);
1743 release_defs (stmt1);
1744 update_stmt (stmt2);
1745 return false;
1748 break;
1749 default:
1750 break;
1752 return false;
1755 /* Checks if expression has type of one-bit precision, or is a known
1756 truth-valued expression. */
1757 static bool
1758 truth_valued_ssa_name (tree name)
1760 gimple def;
1761 tree type = TREE_TYPE (name);
1763 if (!INTEGRAL_TYPE_P (type))
1764 return false;
1765 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1766 necessarily one and so ~X is not equal to !X. */
1767 if (TYPE_PRECISION (type) == 1)
1768 return true;
1769 def = SSA_NAME_DEF_STMT (name);
1770 if (is_gimple_assign (def))
1771 return truth_value_p (gimple_assign_rhs_code (def));
1772 return false;
1775 /* Helper routine for simplify_bitwise_binary_1 function.
1776 Return for the SSA name NAME the expression X if it mets condition
1777 NAME = !X. Otherwise return NULL_TREE.
1778 Detected patterns for NAME = !X are:
1779 !X and X == 0 for X with integral type.
1780 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1781 static tree
1782 lookup_logical_inverted_value (tree name)
1784 tree op1, op2;
1785 enum tree_code code;
1786 gimple def;
1788 /* If name has none-intergal type, or isn't a SSA_NAME, then
1789 return. */
1790 if (TREE_CODE (name) != SSA_NAME
1791 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1792 return NULL_TREE;
1793 def = SSA_NAME_DEF_STMT (name);
1794 if (!is_gimple_assign (def))
1795 return NULL_TREE;
1797 code = gimple_assign_rhs_code (def);
1798 op1 = gimple_assign_rhs1 (def);
1799 op2 = NULL_TREE;
1801 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1802 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1803 if (code == EQ_EXPR || code == NE_EXPR
1804 || code == BIT_XOR_EXPR)
1805 op2 = gimple_assign_rhs2 (def);
1807 switch (code)
1809 case BIT_NOT_EXPR:
1810 if (truth_valued_ssa_name (name))
1811 return op1;
1812 break;
1813 case EQ_EXPR:
1814 /* Check if we have X == 0 and X has an integral type. */
1815 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1816 break;
1817 if (integer_zerop (op2))
1818 return op1;
1819 break;
1820 case NE_EXPR:
1821 /* Check if we have X != 1 and X is a truth-valued. */
1822 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1823 break;
1824 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1825 return op1;
1826 break;
1827 case BIT_XOR_EXPR:
1828 /* Check if we have X ^ 1 and X is truth valued. */
1829 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1830 return op1;
1831 break;
1832 default:
1833 break;
1836 return NULL_TREE;
1839 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1840 operations CODE, if one operand has the logically inverted
1841 value of the other. */
1842 static tree
1843 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1844 tree arg1, tree arg2)
1846 tree anot;
1848 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1849 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1850 && code != BIT_XOR_EXPR)
1851 return NULL_TREE;
1853 /* First check if operands ARG1 and ARG2 are equal. If so
1854 return NULL_TREE as this optimization is handled fold_stmt. */
1855 if (arg1 == arg2)
1856 return NULL_TREE;
1857 /* See if we have in arguments logical-not patterns. */
1858 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1859 || anot != arg2)
1860 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1861 || anot != arg1))
1862 return NULL_TREE;
1864 /* X & !X -> 0. */
1865 if (code == BIT_AND_EXPR)
1866 return fold_convert (type, integer_zero_node);
1867 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1868 if (truth_valued_ssa_name (anot))
1869 return fold_convert (type, integer_one_node);
1871 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1872 return NULL_TREE;
1875 /* Given a ssa_name in NAME see if it was defined by an assignment and
1876 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1877 to the second operand on the rhs. */
1879 static inline void
1880 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1882 gimple def;
1883 enum tree_code code1;
1884 tree arg11;
1885 tree arg21;
1886 tree arg31;
1887 enum gimple_rhs_class grhs_class;
1889 code1 = TREE_CODE (name);
1890 arg11 = name;
1891 arg21 = NULL_TREE;
1892 grhs_class = get_gimple_rhs_class (code1);
1894 if (code1 == SSA_NAME)
1896 def = SSA_NAME_DEF_STMT (name);
1898 if (def && is_gimple_assign (def)
1899 && can_propagate_from (def))
1901 code1 = gimple_assign_rhs_code (def);
1902 arg11 = gimple_assign_rhs1 (def);
1903 arg21 = gimple_assign_rhs2 (def);
1904 arg31 = gimple_assign_rhs2 (def);
1907 else if (grhs_class == GIMPLE_TERNARY_RHS
1908 || GIMPLE_BINARY_RHS
1909 || GIMPLE_UNARY_RHS
1910 || GIMPLE_SINGLE_RHS)
1911 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1913 *code = code1;
1914 *arg1 = arg11;
1915 if (arg2)
1916 *arg2 = arg21;
1917 /* Ignore arg3 currently. */
1920 /* Simplify bitwise binary operations.
1921 Return true if a transformation applied, otherwise return false. */
1923 static bool
1924 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1926 gimple stmt = gsi_stmt (*gsi);
1927 tree arg1 = gimple_assign_rhs1 (stmt);
1928 tree arg2 = gimple_assign_rhs2 (stmt);
1929 enum tree_code code = gimple_assign_rhs_code (stmt);
1930 tree res;
1931 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1932 enum tree_code def1_code, def2_code;
1934 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1935 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1937 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1938 if (TREE_CODE (arg2) == INTEGER_CST
1939 && CONVERT_EXPR_CODE_P (def1_code)
1940 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1941 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1943 gimple newop;
1944 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1945 newop =
1946 gimple_build_assign_with_ops (code, tem, def1_arg1,
1947 fold_convert_loc (gimple_location (stmt),
1948 TREE_TYPE (def1_arg1),
1949 arg2));
1950 tem = make_ssa_name (tem, newop);
1951 gimple_assign_set_lhs (newop, tem);
1952 gimple_set_location (newop, gimple_location (stmt));
1953 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1954 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1955 tem, NULL_TREE, NULL_TREE);
1956 update_stmt (gsi_stmt (*gsi));
1957 return true;
1960 /* For bitwise binary operations apply operand conversions to the
1961 binary operation result instead of to the operands. This allows
1962 to combine successive conversions and bitwise binary operations. */
1963 if (CONVERT_EXPR_CODE_P (def1_code)
1964 && CONVERT_EXPR_CODE_P (def2_code)
1965 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1966 /* Make sure that the conversion widens the operands, or has same
1967 precision, or that it changes the operation to a bitfield
1968 precision. */
1969 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1970 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1971 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1972 != MODE_INT)
1973 || (TYPE_PRECISION (TREE_TYPE (arg1))
1974 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1976 gimple newop;
1977 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1978 NULL);
1979 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1980 tem = make_ssa_name (tem, newop);
1981 gimple_assign_set_lhs (newop, tem);
1982 gimple_set_location (newop, gimple_location (stmt));
1983 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1984 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1985 tem, NULL_TREE, NULL_TREE);
1986 update_stmt (gsi_stmt (*gsi));
1987 return true;
1991 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1992 if (def1_code == def2_code
1993 && def1_code == BIT_AND_EXPR
1994 && operand_equal_for_phi_arg_p (def1_arg2,
1995 def2_arg2))
1997 tree b = def1_arg2;
1998 tree a = def1_arg1;
1999 tree c = def2_arg1;
2000 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2001 /* If A OP0 C (this usually means C is the same as A) is 0
2002 then fold it down correctly. */
2003 if (integer_zerop (inner))
2005 gimple_assign_set_rhs_from_tree (gsi, inner);
2006 update_stmt (stmt);
2007 return true;
2009 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2010 then fold it down correctly. */
2011 else if (TREE_CODE (inner) == SSA_NAME)
2013 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2014 inner, b);
2015 gimple_assign_set_rhs_from_tree (gsi, outer);
2016 update_stmt (stmt);
2017 return true;
2019 else
2021 gimple newop;
2022 tree tem;
2023 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
2024 newop = gimple_build_assign_with_ops (code, tem, a, c);
2025 tem = make_ssa_name (tem, newop);
2026 gimple_assign_set_lhs (newop, tem);
2027 gimple_set_location (newop, gimple_location (stmt));
2028 /* Make sure to re-process the new stmt as it's walking upwards. */
2029 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2030 gimple_assign_set_rhs1 (stmt, tem);
2031 gimple_assign_set_rhs2 (stmt, b);
2032 gimple_assign_set_rhs_code (stmt, def1_code);
2033 update_stmt (stmt);
2034 return true;
2038 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2039 if (code == BIT_AND_EXPR
2040 && def1_code == BIT_IOR_EXPR
2041 && TREE_CODE (arg2) == INTEGER_CST
2042 && TREE_CODE (def1_arg2) == INTEGER_CST)
2044 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2045 arg2, def1_arg2);
2046 tree tem;
2047 gimple newop;
2048 if (integer_zerop (cst))
2050 gimple_assign_set_rhs1 (stmt, def1_arg1);
2051 update_stmt (stmt);
2052 return true;
2054 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
2055 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2056 tem, def1_arg1, arg2);
2057 tem = make_ssa_name (tem, newop);
2058 gimple_assign_set_lhs (newop, tem);
2059 gimple_set_location (newop, gimple_location (stmt));
2060 /* Make sure to re-process the new stmt as it's walking upwards. */
2061 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2062 gimple_assign_set_rhs1 (stmt, tem);
2063 gimple_assign_set_rhs2 (stmt, cst);
2064 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2065 update_stmt (stmt);
2066 return true;
2069 /* Combine successive equal operations with constants. */
2070 if ((code == BIT_AND_EXPR
2071 || code == BIT_IOR_EXPR
2072 || code == BIT_XOR_EXPR)
2073 && def1_code == code
2074 && TREE_CODE (arg2) == INTEGER_CST
2075 && TREE_CODE (def1_arg2) == INTEGER_CST)
2077 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2078 arg2, def1_arg2);
2079 gimple_assign_set_rhs1 (stmt, def1_arg1);
2080 gimple_assign_set_rhs2 (stmt, cst);
2081 update_stmt (stmt);
2082 return true;
2085 /* Canonicalize X ^ ~0 to ~X. */
2086 if (code == BIT_XOR_EXPR
2087 && TREE_CODE (arg2) == INTEGER_CST
2088 && integer_all_onesp (arg2))
2090 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2091 gcc_assert (gsi_stmt (*gsi) == stmt);
2092 update_stmt (stmt);
2093 return true;
2096 /* Try simple folding for X op !X, and X op X. */
2097 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2098 if (res != NULL_TREE)
2100 gimple_assign_set_rhs_from_tree (gsi, res);
2101 update_stmt (gsi_stmt (*gsi));
2102 return true;
2105 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2107 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2108 if (def1_code == ocode)
2110 tree x = arg2;
2111 enum tree_code coden;
2112 tree a1, a2;
2113 /* ( X | Y) & X -> X */
2114 /* ( X & Y) | X -> X */
2115 if (x == def1_arg1
2116 || x == def1_arg2)
2118 gimple_assign_set_rhs_from_tree (gsi, x);
2119 update_stmt (gsi_stmt (*gsi));
2120 return true;
2123 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2124 /* (~X | Y) & X -> X & Y */
2125 /* (~X & Y) | X -> X | Y */
2126 if (coden == BIT_NOT_EXPR && a1 == x)
2128 gimple_assign_set_rhs_with_ops (gsi, code,
2129 x, def1_arg2);
2130 gcc_assert (gsi_stmt (*gsi) == stmt);
2131 update_stmt (stmt);
2132 return true;
2134 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2135 /* (Y | ~X) & X -> X & Y */
2136 /* (Y & ~X) | X -> X | Y */
2137 if (coden == BIT_NOT_EXPR && a1 == x)
2139 gimple_assign_set_rhs_with_ops (gsi, code,
2140 x, def1_arg1);
2141 gcc_assert (gsi_stmt (*gsi) == stmt);
2142 update_stmt (stmt);
2143 return true;
2146 if (def2_code == ocode)
2148 enum tree_code coden;
2149 tree a1;
2150 tree x = arg1;
2151 /* X & ( X | Y) -> X */
2152 /* X | ( X & Y) -> X */
2153 if (x == def2_arg1
2154 || x == def2_arg2)
2156 gimple_assign_set_rhs_from_tree (gsi, x);
2157 update_stmt (gsi_stmt (*gsi));
2158 return true;
2160 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2161 /* (~X | Y) & X -> X & Y */
2162 /* (~X & Y) | X -> X | Y */
2163 if (coden == BIT_NOT_EXPR && a1 == x)
2165 gimple_assign_set_rhs_with_ops (gsi, code,
2166 x, def2_arg2);
2167 gcc_assert (gsi_stmt (*gsi) == stmt);
2168 update_stmt (stmt);
2169 return true;
2171 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2172 /* (Y | ~X) & X -> X & Y */
2173 /* (Y & ~X) | X -> X | Y */
2174 if (coden == BIT_NOT_EXPR && a1 == x)
2176 gimple_assign_set_rhs_with_ops (gsi, code,
2177 x, def2_arg1);
2178 gcc_assert (gsi_stmt (*gsi) == stmt);
2179 update_stmt (stmt);
2180 return true;
2185 return false;
2189 /* Perform re-associations of the plus or minus statement STMT that are
2190 always permitted. Returns true if the CFG was changed. */
2192 static bool
2193 associate_plusminus (gimple_stmt_iterator *gsi)
2195 gimple stmt = gsi_stmt (*gsi);
2196 tree rhs1 = gimple_assign_rhs1 (stmt);
2197 tree rhs2 = gimple_assign_rhs2 (stmt);
2198 enum tree_code code = gimple_assign_rhs_code (stmt);
2199 bool changed;
2201 /* We can't reassociate at all for saturating types. */
2202 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2203 return false;
2205 /* First contract negates. */
2208 changed = false;
2210 /* A +- (-B) -> A -+ B. */
2211 if (TREE_CODE (rhs2) == SSA_NAME)
2213 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2214 if (is_gimple_assign (def_stmt)
2215 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2216 && can_propagate_from (def_stmt))
2218 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2219 gimple_assign_set_rhs_code (stmt, code);
2220 rhs2 = gimple_assign_rhs1 (def_stmt);
2221 gimple_assign_set_rhs2 (stmt, rhs2);
2222 gimple_set_modified (stmt, true);
2223 changed = true;
2227 /* (-A) + B -> B - A. */
2228 if (TREE_CODE (rhs1) == SSA_NAME
2229 && code == PLUS_EXPR)
2231 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2232 if (is_gimple_assign (def_stmt)
2233 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2234 && can_propagate_from (def_stmt))
2236 code = MINUS_EXPR;
2237 gimple_assign_set_rhs_code (stmt, code);
2238 rhs1 = rhs2;
2239 gimple_assign_set_rhs1 (stmt, rhs1);
2240 rhs2 = gimple_assign_rhs1 (def_stmt);
2241 gimple_assign_set_rhs2 (stmt, rhs2);
2242 gimple_set_modified (stmt, true);
2243 changed = true;
2247 while (changed);
2249 /* We can't reassociate floating-point or fixed-point plus or minus
2250 because of saturation to +-Inf. */
2251 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2252 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2253 goto out;
2255 /* Second match patterns that allow contracting a plus-minus pair
2256 irrespective of overflow issues.
2258 (A +- B) - A -> +- B
2259 (A +- B) -+ B -> A
2260 (CST +- A) +- CST -> CST +- A
2261 (A + CST) +- CST -> A + CST
2262 ~A + A -> -1
2263 ~A + 1 -> -A
2264 A - (A +- B) -> -+ B
2265 A +- (B +- A) -> +- B
2266 CST +- (CST +- A) -> CST +- A
2267 CST +- (A +- CST) -> CST +- A
2268 A + ~A -> -1
2270 via commutating the addition and contracting operations to zero
2271 by reassociation. */
2273 if (TREE_CODE (rhs1) == SSA_NAME)
2275 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2276 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2278 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2279 if (def_code == PLUS_EXPR
2280 || def_code == MINUS_EXPR)
2282 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2283 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2284 if (operand_equal_p (def_rhs1, rhs2, 0)
2285 && code == MINUS_EXPR)
2287 /* (A +- B) - A -> +- B. */
2288 code = ((def_code == PLUS_EXPR)
2289 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2290 rhs1 = def_rhs2;
2291 rhs2 = NULL_TREE;
2292 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2293 gcc_assert (gsi_stmt (*gsi) == stmt);
2294 gimple_set_modified (stmt, true);
2296 else if (operand_equal_p (def_rhs2, rhs2, 0)
2297 && code != def_code)
2299 /* (A +- B) -+ B -> A. */
2300 code = TREE_CODE (def_rhs1);
2301 rhs1 = def_rhs1;
2302 rhs2 = NULL_TREE;
2303 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2304 gcc_assert (gsi_stmt (*gsi) == stmt);
2305 gimple_set_modified (stmt, true);
2307 else if (TREE_CODE (rhs2) == INTEGER_CST
2308 && TREE_CODE (def_rhs1) == INTEGER_CST)
2310 /* (CST +- A) +- CST -> CST +- A. */
2311 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2312 def_rhs1, rhs2);
2313 if (cst && !TREE_OVERFLOW (cst))
2315 code = def_code;
2316 gimple_assign_set_rhs_code (stmt, code);
2317 rhs1 = cst;
2318 gimple_assign_set_rhs1 (stmt, rhs1);
2319 rhs2 = def_rhs2;
2320 gimple_assign_set_rhs2 (stmt, rhs2);
2321 gimple_set_modified (stmt, true);
2324 else if (TREE_CODE (rhs2) == INTEGER_CST
2325 && TREE_CODE (def_rhs2) == INTEGER_CST
2326 && def_code == PLUS_EXPR)
2328 /* (A + CST) +- CST -> A + CST. */
2329 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2330 def_rhs2, rhs2);
2331 if (cst && !TREE_OVERFLOW (cst))
2333 code = PLUS_EXPR;
2334 gimple_assign_set_rhs_code (stmt, code);
2335 rhs1 = def_rhs1;
2336 gimple_assign_set_rhs1 (stmt, rhs1);
2337 rhs2 = cst;
2338 gimple_assign_set_rhs2 (stmt, rhs2);
2339 gimple_set_modified (stmt, true);
2343 else if (def_code == BIT_NOT_EXPR
2344 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2346 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2347 if (code == PLUS_EXPR
2348 && operand_equal_p (def_rhs1, rhs2, 0))
2350 /* ~A + A -> -1. */
2351 code = INTEGER_CST;
2352 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2353 rhs2 = NULL_TREE;
2354 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2355 gcc_assert (gsi_stmt (*gsi) == stmt);
2356 gimple_set_modified (stmt, true);
2358 else if (code == PLUS_EXPR
2359 && integer_onep (rhs1))
2361 /* ~A + 1 -> -A. */
2362 code = NEGATE_EXPR;
2363 rhs1 = def_rhs1;
2364 rhs2 = NULL_TREE;
2365 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2366 gcc_assert (gsi_stmt (*gsi) == stmt);
2367 gimple_set_modified (stmt, true);
2373 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2375 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2376 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2378 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2379 if (def_code == PLUS_EXPR
2380 || def_code == MINUS_EXPR)
2382 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2383 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2384 if (operand_equal_p (def_rhs1, rhs1, 0)
2385 && code == MINUS_EXPR)
2387 /* A - (A +- B) -> -+ B. */
2388 code = ((def_code == PLUS_EXPR)
2389 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2390 rhs1 = def_rhs2;
2391 rhs2 = NULL_TREE;
2392 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2393 gcc_assert (gsi_stmt (*gsi) == stmt);
2394 gimple_set_modified (stmt, true);
2396 else if (operand_equal_p (def_rhs2, rhs1, 0)
2397 && code != def_code)
2399 /* A +- (B +- A) -> +- B. */
2400 code = ((code == PLUS_EXPR)
2401 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2402 rhs1 = def_rhs1;
2403 rhs2 = NULL_TREE;
2404 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2405 gcc_assert (gsi_stmt (*gsi) == stmt);
2406 gimple_set_modified (stmt, true);
2408 else if (TREE_CODE (rhs1) == INTEGER_CST
2409 && TREE_CODE (def_rhs1) == INTEGER_CST)
2411 /* CST +- (CST +- A) -> CST +- A. */
2412 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2413 rhs1, def_rhs1);
2414 if (cst && !TREE_OVERFLOW (cst))
2416 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2417 gimple_assign_set_rhs_code (stmt, code);
2418 rhs1 = cst;
2419 gimple_assign_set_rhs1 (stmt, rhs1);
2420 rhs2 = def_rhs2;
2421 gimple_assign_set_rhs2 (stmt, rhs2);
2422 gimple_set_modified (stmt, true);
2425 else if (TREE_CODE (rhs1) == INTEGER_CST
2426 && TREE_CODE (def_rhs2) == INTEGER_CST)
2428 /* CST +- (A +- CST) -> CST +- A. */
2429 tree cst = fold_binary (def_code == code
2430 ? PLUS_EXPR : MINUS_EXPR,
2431 TREE_TYPE (rhs2),
2432 rhs1, def_rhs2);
2433 if (cst && !TREE_OVERFLOW (cst))
2435 rhs1 = cst;
2436 gimple_assign_set_rhs1 (stmt, rhs1);
2437 rhs2 = def_rhs1;
2438 gimple_assign_set_rhs2 (stmt, rhs2);
2439 gimple_set_modified (stmt, true);
2443 else if (def_code == BIT_NOT_EXPR
2444 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2446 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2447 if (code == PLUS_EXPR
2448 && operand_equal_p (def_rhs1, rhs1, 0))
2450 /* A + ~A -> -1. */
2451 code = INTEGER_CST;
2452 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2453 rhs2 = NULL_TREE;
2454 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2455 gcc_assert (gsi_stmt (*gsi) == stmt);
2456 gimple_set_modified (stmt, true);
2462 out:
2463 if (gimple_modified_p (stmt))
2465 fold_stmt_inplace (gsi);
2466 update_stmt (stmt);
2467 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2468 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2469 return true;
2472 return false;
2475 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2476 true if anything changed, false otherwise. */
2478 static bool
2479 associate_pointerplus (gimple_stmt_iterator *gsi)
2481 gimple stmt = gsi_stmt (*gsi);
2482 gimple def_stmt;
2483 tree ptr, rhs, algn;
2485 /* Pattern match
2486 tem = (sizetype) ptr;
2487 tem = tem & algn;
2488 tem = -tem;
2489 ... = ptr p+ tem;
2490 and produce the simpler and easier to analyze with respect to alignment
2491 ... = ptr & ~algn; */
2492 ptr = gimple_assign_rhs1 (stmt);
2493 rhs = gimple_assign_rhs2 (stmt);
2494 if (TREE_CODE (rhs) != SSA_NAME)
2495 return false;
2496 def_stmt = SSA_NAME_DEF_STMT (rhs);
2497 if (!is_gimple_assign (def_stmt)
2498 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2499 return false;
2500 rhs = gimple_assign_rhs1 (def_stmt);
2501 if (TREE_CODE (rhs) != SSA_NAME)
2502 return false;
2503 def_stmt = SSA_NAME_DEF_STMT (rhs);
2504 if (!is_gimple_assign (def_stmt)
2505 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2506 return false;
2507 rhs = gimple_assign_rhs1 (def_stmt);
2508 algn = gimple_assign_rhs2 (def_stmt);
2509 if (TREE_CODE (rhs) != SSA_NAME
2510 || TREE_CODE (algn) != INTEGER_CST)
2511 return false;
2512 def_stmt = SSA_NAME_DEF_STMT (rhs);
2513 if (!is_gimple_assign (def_stmt)
2514 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2515 return false;
2516 if (gimple_assign_rhs1 (def_stmt) != ptr)
2517 return false;
2519 algn = double_int_to_tree (TREE_TYPE (ptr),
2520 double_int_not (tree_to_double_int (algn)));
2521 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2522 fold_stmt_inplace (gsi);
2523 update_stmt (stmt);
2525 return true;
2528 /* Combine two conversions in a row for the second conversion at *GSI.
2529 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2530 run. Else it returns 0. */
2532 static int
2533 combine_conversions (gimple_stmt_iterator *gsi)
2535 gimple stmt = gsi_stmt (*gsi);
2536 gimple def_stmt;
2537 tree op0, lhs;
2538 enum tree_code code = gimple_assign_rhs_code (stmt);
2539 enum tree_code code2;
2541 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2542 || code == FLOAT_EXPR
2543 || code == FIX_TRUNC_EXPR);
2545 lhs = gimple_assign_lhs (stmt);
2546 op0 = gimple_assign_rhs1 (stmt);
2547 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2549 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2550 return 1;
2553 if (TREE_CODE (op0) != SSA_NAME)
2554 return 0;
2556 def_stmt = SSA_NAME_DEF_STMT (op0);
2557 if (!is_gimple_assign (def_stmt))
2558 return 0;
2560 code2 = gimple_assign_rhs_code (def_stmt);
2562 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2564 tree defop0 = gimple_assign_rhs1 (def_stmt);
2565 tree type = TREE_TYPE (lhs);
2566 tree inside_type = TREE_TYPE (defop0);
2567 tree inter_type = TREE_TYPE (op0);
2568 int inside_int = INTEGRAL_TYPE_P (inside_type);
2569 int inside_ptr = POINTER_TYPE_P (inside_type);
2570 int inside_float = FLOAT_TYPE_P (inside_type);
2571 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2572 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2573 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2574 int inter_int = INTEGRAL_TYPE_P (inter_type);
2575 int inter_ptr = POINTER_TYPE_P (inter_type);
2576 int inter_float = FLOAT_TYPE_P (inter_type);
2577 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2578 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2579 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2580 int final_int = INTEGRAL_TYPE_P (type);
2581 int final_ptr = POINTER_TYPE_P (type);
2582 int final_float = FLOAT_TYPE_P (type);
2583 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2584 unsigned int final_prec = TYPE_PRECISION (type);
2585 int final_unsignedp = TYPE_UNSIGNED (type);
2587 /* In addition to the cases of two conversions in a row
2588 handled below, if we are converting something to its own
2589 type via an object of identical or wider precision, neither
2590 conversion is needed. */
2591 if (useless_type_conversion_p (type, inside_type)
2592 && (((inter_int || inter_ptr) && final_int)
2593 || (inter_float && final_float))
2594 && inter_prec >= final_prec)
2596 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2597 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2598 update_stmt (stmt);
2599 return remove_prop_source_from_use (op0) ? 2 : 1;
2602 /* Likewise, if the intermediate and initial types are either both
2603 float or both integer, we don't need the middle conversion if the
2604 former is wider than the latter and doesn't change the signedness
2605 (for integers). Avoid this if the final type is a pointer since
2606 then we sometimes need the middle conversion. Likewise if the
2607 final type has a precision not equal to the size of its mode. */
2608 if (((inter_int && inside_int)
2609 || (inter_float && inside_float)
2610 || (inter_vec && inside_vec))
2611 && inter_prec >= inside_prec
2612 && (inter_float || inter_vec
2613 || inter_unsignedp == inside_unsignedp)
2614 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2615 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2616 && ! final_ptr
2617 && (! final_vec || inter_prec == inside_prec))
2619 gimple_assign_set_rhs1 (stmt, defop0);
2620 update_stmt (stmt);
2621 return remove_prop_source_from_use (op0) ? 2 : 1;
2624 /* If we have a sign-extension of a zero-extended value, we can
2625 replace that by a single zero-extension. Likewise if the
2626 final conversion does not change precision we can drop the
2627 intermediate conversion. */
2628 if (inside_int && inter_int && final_int
2629 && ((inside_prec < inter_prec && inter_prec < final_prec
2630 && inside_unsignedp && !inter_unsignedp)
2631 || final_prec == inter_prec))
2633 gimple_assign_set_rhs1 (stmt, defop0);
2634 update_stmt (stmt);
2635 return remove_prop_source_from_use (op0) ? 2 : 1;
2638 /* Two conversions in a row are not needed unless:
2639 - some conversion is floating-point (overstrict for now), or
2640 - some conversion is a vector (overstrict for now), or
2641 - the intermediate type is narrower than both initial and
2642 final, or
2643 - the intermediate type and innermost type differ in signedness,
2644 and the outermost type is wider than the intermediate, or
2645 - the initial type is a pointer type and the precisions of the
2646 intermediate and final types differ, or
2647 - the final type is a pointer type and the precisions of the
2648 initial and intermediate types differ. */
2649 if (! inside_float && ! inter_float && ! final_float
2650 && ! inside_vec && ! inter_vec && ! final_vec
2651 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2652 && ! (inside_int && inter_int
2653 && inter_unsignedp != inside_unsignedp
2654 && inter_prec < final_prec)
2655 && ((inter_unsignedp && inter_prec > inside_prec)
2656 == (final_unsignedp && final_prec > inter_prec))
2657 && ! (inside_ptr && inter_prec != final_prec)
2658 && ! (final_ptr && inside_prec != inter_prec)
2659 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2660 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2662 gimple_assign_set_rhs1 (stmt, defop0);
2663 update_stmt (stmt);
2664 return remove_prop_source_from_use (op0) ? 2 : 1;
2667 /* A truncation to an unsigned type should be canonicalized as
2668 bitwise and of a mask. */
2669 if (final_int && inter_int && inside_int
2670 && final_prec == inside_prec
2671 && final_prec > inter_prec
2672 && inter_unsignedp)
2674 tree tem;
2675 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2676 defop0,
2677 double_int_to_tree
2678 (inside_type, double_int_mask (inter_prec)));
2679 if (!useless_type_conversion_p (type, inside_type))
2681 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2682 GSI_SAME_STMT);
2683 gimple_assign_set_rhs1 (stmt, tem);
2685 else
2686 gimple_assign_set_rhs_from_tree (gsi, tem);
2687 update_stmt (gsi_stmt (*gsi));
2688 return 1;
2691 /* If we are converting an integer to a floating-point that can
2692 represent it exactly and back to an integer, we can skip the
2693 floating-point conversion. */
2694 if (inside_int && inter_float && final_int &&
2695 (unsigned) significand_size (TYPE_MODE (inter_type))
2696 >= inside_prec - !inside_unsignedp)
2698 if (useless_type_conversion_p (type, inside_type))
2700 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2701 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2702 update_stmt (stmt);
2703 return remove_prop_source_from_use (op0) ? 2 : 1;
2705 else
2707 gimple_assign_set_rhs1 (stmt, defop0);
2708 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2709 update_stmt (stmt);
2710 return remove_prop_source_from_use (op0) ? 2 : 1;
2715 return 0;
2718 /* Main entry point for the forward propagation and statement combine
2719 optimizer. */
2721 static unsigned int
2722 ssa_forward_propagate_and_combine (void)
2724 basic_block bb;
2725 unsigned int todoflags = 0;
2727 cfg_changed = false;
2729 FOR_EACH_BB (bb)
2731 gimple_stmt_iterator gsi;
2733 /* Apply forward propagation to all stmts in the basic-block.
2734 Note we update GSI within the loop as necessary. */
2735 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2737 gimple stmt = gsi_stmt (gsi);
2738 tree lhs, rhs;
2739 enum tree_code code;
2741 if (!is_gimple_assign (stmt))
2743 gsi_next (&gsi);
2744 continue;
2747 lhs = gimple_assign_lhs (stmt);
2748 rhs = gimple_assign_rhs1 (stmt);
2749 code = gimple_assign_rhs_code (stmt);
2750 if (TREE_CODE (lhs) != SSA_NAME
2751 || has_zero_uses (lhs))
2753 gsi_next (&gsi);
2754 continue;
2757 /* If this statement sets an SSA_NAME to an address,
2758 try to propagate the address into the uses of the SSA_NAME. */
2759 if (code == ADDR_EXPR
2760 /* Handle pointer conversions on invariant addresses
2761 as well, as this is valid gimple. */
2762 || (CONVERT_EXPR_CODE_P (code)
2763 && TREE_CODE (rhs) == ADDR_EXPR
2764 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2766 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2767 if ((!base
2768 || !DECL_P (base)
2769 || decl_address_invariant_p (base))
2770 && !stmt_references_abnormal_ssa_name (stmt)
2771 && forward_propagate_addr_expr (lhs, rhs))
2773 release_defs (stmt);
2774 todoflags |= TODO_remove_unused_locals;
2775 gsi_remove (&gsi, true);
2777 else
2778 gsi_next (&gsi);
2780 else if (code == POINTER_PLUS_EXPR)
2782 tree off = gimple_assign_rhs2 (stmt);
2783 if (TREE_CODE (off) == INTEGER_CST
2784 && can_propagate_from (stmt)
2785 && !simple_iv_increment_p (stmt)
2786 /* ??? Better adjust the interface to that function
2787 instead of building new trees here. */
2788 && forward_propagate_addr_expr
2789 (lhs,
2790 build1_loc (gimple_location (stmt),
2791 ADDR_EXPR, TREE_TYPE (rhs),
2792 fold_build2 (MEM_REF,
2793 TREE_TYPE (TREE_TYPE (rhs)),
2794 rhs,
2795 fold_convert (ptr_type_node,
2796 off)))))
2798 release_defs (stmt);
2799 todoflags |= TODO_remove_unused_locals;
2800 gsi_remove (&gsi, true);
2802 else if (is_gimple_min_invariant (rhs))
2804 /* Make sure to fold &a[0] + off_1 here. */
2805 fold_stmt_inplace (&gsi);
2806 update_stmt (stmt);
2807 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2808 gsi_next (&gsi);
2810 else
2811 gsi_next (&gsi);
2813 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2815 if (forward_propagate_comparison (&gsi))
2816 cfg_changed = true;
2818 else
2819 gsi_next (&gsi);
2822 /* Combine stmts with the stmts defining their operands.
2823 Note we update GSI within the loop as necessary. */
2824 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2826 gimple stmt = gsi_stmt (gsi);
2827 bool changed = false;
2829 /* Mark stmt as potentially needing revisiting. */
2830 gimple_set_plf (stmt, GF_PLF_1, false);
2832 switch (gimple_code (stmt))
2834 case GIMPLE_ASSIGN:
2836 tree rhs1 = gimple_assign_rhs1 (stmt);
2837 enum tree_code code = gimple_assign_rhs_code (stmt);
2839 if ((code == BIT_NOT_EXPR
2840 || code == NEGATE_EXPR)
2841 && TREE_CODE (rhs1) == SSA_NAME)
2842 changed = simplify_not_neg_expr (&gsi);
2843 else if (code == COND_EXPR
2844 || code == VEC_COND_EXPR)
2846 /* In this case the entire COND_EXPR is in rhs1. */
2847 if (forward_propagate_into_cond (&gsi)
2848 || combine_cond_exprs (&gsi))
2850 changed = true;
2851 stmt = gsi_stmt (gsi);
2854 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2856 int did_something;
2857 did_something = forward_propagate_into_comparison (&gsi);
2858 if (did_something == 2)
2859 cfg_changed = true;
2860 changed = did_something != 0;
2862 else if (code == BIT_AND_EXPR
2863 || code == BIT_IOR_EXPR
2864 || code == BIT_XOR_EXPR)
2865 changed = simplify_bitwise_binary (&gsi);
2866 else if (code == PLUS_EXPR
2867 || code == MINUS_EXPR)
2868 changed = associate_plusminus (&gsi);
2869 else if (code == POINTER_PLUS_EXPR)
2870 changed = associate_pointerplus (&gsi);
2871 else if (CONVERT_EXPR_CODE_P (code)
2872 || code == FLOAT_EXPR
2873 || code == FIX_TRUNC_EXPR)
2875 int did_something = combine_conversions (&gsi);
2876 if (did_something == 2)
2877 cfg_changed = true;
2878 changed = did_something != 0;
2880 break;
2883 case GIMPLE_SWITCH:
2884 changed = simplify_gimple_switch (stmt);
2885 break;
2887 case GIMPLE_COND:
2889 int did_something;
2890 did_something = forward_propagate_into_gimple_cond (stmt);
2891 if (did_something == 2)
2892 cfg_changed = true;
2893 changed = did_something != 0;
2894 break;
2897 case GIMPLE_CALL:
2899 tree callee = gimple_call_fndecl (stmt);
2900 if (callee != NULL_TREE
2901 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2902 changed = simplify_builtin_call (&gsi, callee);
2903 break;
2906 default:;
2909 if (changed)
2911 /* If the stmt changed then re-visit it and the statements
2912 inserted before it. */
2913 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2914 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2915 break;
2916 if (gsi_end_p (gsi))
2917 gsi = gsi_start_bb (bb);
2918 else
2919 gsi_next (&gsi);
2921 else
2923 /* Stmt no longer needs to be revisited. */
2924 gimple_set_plf (stmt, GF_PLF_1, true);
2925 gsi_next (&gsi);
2930 if (cfg_changed)
2931 todoflags |= TODO_cleanup_cfg;
2933 return todoflags;
2937 static bool
2938 gate_forwprop (void)
2940 return flag_tree_forwprop;
2943 struct gimple_opt_pass pass_forwprop =
2946 GIMPLE_PASS,
2947 "forwprop", /* name */
2948 gate_forwprop, /* gate */
2949 ssa_forward_propagate_and_combine, /* execute */
2950 NULL, /* sub */
2951 NULL, /* next */
2952 0, /* static_pass_number */
2953 TV_TREE_FORWPROP, /* tv_id */
2954 PROP_cfg | PROP_ssa, /* properties_required */
2955 0, /* properties_provided */
2956 0, /* properties_destroyed */
2957 0, /* todo_flags_start */
2958 TODO_ggc_collect
2959 | TODO_update_ssa
2960 | TODO_verify_ssa /* todo_flags_finish */