2014-10-24 Christophe Lyon <christophe.lyon@linaro.org>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob0284301ab2db8e921f2a30d088d1abb19c6e06cf
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-fold.h"
32 #include "tree-eh.h"
33 #include "gimple-expr.h"
34 #include "is-a.h"
35 #include "gimple.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-ssa.h"
40 #include "tree-cfg.h"
41 #include "tree-phinodes.h"
42 #include "ssa-iterators.h"
43 #include "stringpool.h"
44 #include "tree-ssanames.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
49 #include "flags.h"
50 #include "diagnostic.h"
51 #include "expr.h"
52 #include "cfgloop.h"
53 #include "optabs.h"
54 #include "tree-ssa-propagate.h"
55 #include "tree-ssa-dom.h"
56 #include "builtins.h"
58 /* This pass propagates the RHS of assignment statements into use
59 sites of the LHS of the assignment. It's basically a specialized
60 form of tree combination. It is hoped all of this can disappear
61 when we have a generalized tree combiner.
63 One class of common cases we handle is forward propagating a single use
64 variable into a COND_EXPR.
66 bb0:
67 x = a COND b;
68 if (x) goto ... else goto ...
70 Will be transformed into:
72 bb0:
73 if (a COND b) goto ... else goto ...
75 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
77 Or (assuming c1 and c2 are constants):
79 bb0:
80 x = a + c1;
81 if (x EQ/NEQ c2) goto ... else goto ...
83 Will be transformed into:
85 bb0:
86 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
88 Similarly for x = a - c1.
92 bb0:
93 x = !a
94 if (x) goto ... else goto ...
96 Will be transformed into:
98 bb0:
99 if (a == 0) goto ... else goto ...
101 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
102 For these cases, we propagate A into all, possibly more than one,
103 COND_EXPRs that use X.
107 bb0:
108 x = (typecast) a
109 if (x) goto ... else goto ...
111 Will be transformed into:
113 bb0:
114 if (a != 0) goto ... else goto ...
116 (Assuming a is an integral type and x is a boolean or x is an
117 integral and a is a boolean.)
119 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
120 For these cases, we propagate A into all, possibly more than one,
121 COND_EXPRs that use X.
123 In addition to eliminating the variable and the statement which assigns
124 a value to the variable, we may be able to later thread the jump without
125 adding insane complexity in the dominator optimizer.
127 Also note these transformations can cascade. We handle this by having
128 a worklist of COND_EXPR statements to examine. As we make a change to
129 a statement, we put it back on the worklist to examine on the next
130 iteration of the main loop.
132 A second class of propagation opportunities arises for ADDR_EXPR
133 nodes.
135 ptr = &x->y->z;
136 res = *ptr;
138 Will get turned into
140 res = x->y->z;
143 ptr = (type1*)&type2var;
144 res = *ptr
146 Will get turned into (if type1 and type2 are the same size
147 and neither have volatile on them):
148 res = VIEW_CONVERT_EXPR<type1>(type2var)
152 ptr = &x[0];
153 ptr2 = ptr + <constant>;
155 Will get turned into
157 ptr2 = &x[constant/elementsize];
161 ptr = &x[0];
162 offset = index * element_size;
163 offset_p = (pointer) offset;
164 ptr2 = ptr + offset_p
166 Will get turned into:
168 ptr2 = &x[index];
171 ssa = (int) decl
172 res = ssa & 1
174 Provided that decl has known alignment >= 2, will get turned into
176 res = 0
178 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
179 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
180 {NOT_EXPR,NEG_EXPR}.
182 This will (of course) be extended as other needs arise. */
184 static bool forward_propagate_addr_expr (tree, tree, bool);
186 /* Set to true if we delete dead edges during the optimization. */
187 static bool cfg_changed;
189 static tree rhs_to_tree (tree type, gimple stmt);
191 /* Get the next statement we can propagate NAME's value into skipping
192 trivial copies. Returns the statement that is suitable as a
193 propagation destination or NULL_TREE if there is no such one.
194 This only returns destinations in a single-use chain. FINAL_NAME_P
195 if non-NULL is written to the ssa name that represents the use. */
197 static gimple
198 get_prop_dest_stmt (tree name, tree *final_name_p)
200 use_operand_p use;
201 gimple use_stmt;
203 do {
204 /* If name has multiple uses, bail out. */
205 if (!single_imm_use (name, &use, &use_stmt))
206 return NULL;
208 /* If this is not a trivial copy, we found it. */
209 if (!gimple_assign_ssa_name_copy_p (use_stmt)
210 || gimple_assign_rhs1 (use_stmt) != name)
211 break;
213 /* Continue searching uses of the copy destination. */
214 name = gimple_assign_lhs (use_stmt);
215 } while (1);
217 if (final_name_p)
218 *final_name_p = name;
220 return use_stmt;
223 /* Get the statement we can propagate from into NAME skipping
224 trivial copies. Returns the statement which defines the
225 propagation source or NULL_TREE if there is no such one.
226 If SINGLE_USE_ONLY is set considers only sources which have
227 a single use chain up to NAME. If SINGLE_USE_P is non-null,
228 it is set to whether the chain to NAME is a single use chain
229 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
231 static gimple
232 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
234 bool single_use = true;
236 do {
237 gimple def_stmt = SSA_NAME_DEF_STMT (name);
239 if (!has_single_use (name))
241 single_use = false;
242 if (single_use_only)
243 return NULL;
246 /* If name is defined by a PHI node or is the default def, bail out. */
247 if (!is_gimple_assign (def_stmt))
248 return NULL;
250 /* If def_stmt is a simple copy, continue looking. */
251 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
252 name = gimple_assign_rhs1 (def_stmt);
253 else
255 if (!single_use_only && single_use_p)
256 *single_use_p = single_use;
258 return def_stmt;
260 } while (1);
263 /* Checks if the destination ssa name in DEF_STMT can be used as
264 propagation source. Returns true if so, otherwise false. */
266 static bool
267 can_propagate_from (gimple def_stmt)
269 gcc_assert (is_gimple_assign (def_stmt));
271 /* If the rhs has side-effects we cannot propagate from it. */
272 if (gimple_has_volatile_ops (def_stmt))
273 return false;
275 /* If the rhs is a load we cannot propagate from it. */
276 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
277 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
278 return false;
280 /* Constants can be always propagated. */
281 if (gimple_assign_single_p (def_stmt)
282 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
283 return true;
285 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
286 if (stmt_references_abnormal_ssa_name (def_stmt))
287 return false;
289 /* If the definition is a conversion of a pointer to a function type,
290 then we can not apply optimizations as some targets require
291 function pointers to be canonicalized and in this case this
292 optimization could eliminate a necessary canonicalization. */
293 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
295 tree rhs = gimple_assign_rhs1 (def_stmt);
296 if (POINTER_TYPE_P (TREE_TYPE (rhs))
297 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
298 return false;
301 return true;
304 /* Remove a chain of dead statements starting at the definition of
305 NAME. The chain is linked via the first operand of the defining statements.
306 If NAME was replaced in its only use then this function can be used
307 to clean up dead stmts. The function handles already released SSA
308 names gracefully.
309 Returns true if cleanup-cfg has to run. */
311 static bool
312 remove_prop_source_from_use (tree name)
314 gimple_stmt_iterator gsi;
315 gimple stmt;
316 bool cfg_changed = false;
318 do {
319 basic_block bb;
321 if (SSA_NAME_IN_FREE_LIST (name)
322 || SSA_NAME_IS_DEFAULT_DEF (name)
323 || !has_zero_uses (name))
324 return cfg_changed;
326 stmt = SSA_NAME_DEF_STMT (name);
327 if (gimple_code (stmt) == GIMPLE_PHI
328 || gimple_has_side_effects (stmt))
329 return cfg_changed;
331 bb = gimple_bb (stmt);
332 gsi = gsi_for_stmt (stmt);
333 unlink_stmt_vdef (stmt);
334 if (gsi_remove (&gsi, true))
335 cfg_changed |= gimple_purge_dead_eh_edges (bb);
336 release_defs (stmt);
338 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
339 } while (name && TREE_CODE (name) == SSA_NAME);
341 return cfg_changed;
344 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
345 converted to type TYPE.
347 This should disappear, but is needed so we can combine expressions and use
348 the fold() interfaces. Long term, we need to develop folding and combine
349 routines that deal with gimple exclusively . */
351 static tree
352 rhs_to_tree (tree type, gimple stmt)
354 location_t loc = gimple_location (stmt);
355 enum tree_code code = gimple_assign_rhs_code (stmt);
356 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
357 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
358 gimple_assign_rhs2 (stmt),
359 gimple_assign_rhs3 (stmt));
360 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
361 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
362 gimple_assign_rhs2 (stmt));
363 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
364 return build1 (code, type, gimple_assign_rhs1 (stmt));
365 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
366 return gimple_assign_rhs1 (stmt);
367 else
368 gcc_unreachable ();
371 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
372 the folded result in a form suitable for COND_EXPR_COND or
373 NULL_TREE, if there is no suitable simplified form. If
374 INVARIANT_ONLY is true only gimple_min_invariant results are
375 considered simplified. */
377 static tree
378 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
379 tree op0, tree op1, bool invariant_only)
381 tree t;
383 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
385 fold_defer_overflow_warnings ();
386 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
387 if (!t)
389 fold_undefer_overflow_warnings (false, NULL, 0);
390 return NULL_TREE;
393 /* Require that we got a boolean type out if we put one in. */
394 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
396 /* Canonicalize the combined condition for use in a COND_EXPR. */
397 t = canonicalize_cond_expr_cond (t);
399 /* Bail out if we required an invariant but didn't get one. */
400 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
402 fold_undefer_overflow_warnings (false, NULL, 0);
403 return NULL_TREE;
406 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
408 return t;
411 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
412 of its operand. Return a new comparison tree or NULL_TREE if there
413 were no simplifying combines. */
415 static tree
416 forward_propagate_into_comparison_1 (gimple stmt,
417 enum tree_code code, tree type,
418 tree op0, tree op1)
420 tree tmp = NULL_TREE;
421 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
422 bool single_use0_p = false, single_use1_p = false;
424 /* For comparisons use the first operand, that is likely to
425 simplify comparisons against constants. */
426 if (TREE_CODE (op0) == SSA_NAME)
428 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
429 if (def_stmt && can_propagate_from (def_stmt))
431 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
432 tmp = combine_cond_expr_cond (stmt, code, type,
433 rhs0, op1, !single_use0_p);
434 if (tmp)
435 return tmp;
439 /* If that wasn't successful, try the second operand. */
440 if (TREE_CODE (op1) == SSA_NAME)
442 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
443 if (def_stmt && can_propagate_from (def_stmt))
445 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
446 tmp = combine_cond_expr_cond (stmt, code, type,
447 op0, rhs1, !single_use1_p);
448 if (tmp)
449 return tmp;
453 /* If that wasn't successful either, try both operands. */
454 if (rhs0 != NULL_TREE
455 && rhs1 != NULL_TREE)
456 tmp = combine_cond_expr_cond (stmt, code, type,
457 rhs0, rhs1,
458 !(single_use0_p && single_use1_p));
460 return tmp;
463 /* Propagate from the ssa name definition statements of the assignment
464 from a comparison at *GSI into the conditional if that simplifies it.
465 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
466 otherwise returns 0. */
468 static int
469 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
471 gimple stmt = gsi_stmt (*gsi);
472 tree tmp;
473 bool cfg_changed = false;
474 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
475 tree rhs1 = gimple_assign_rhs1 (stmt);
476 tree rhs2 = gimple_assign_rhs2 (stmt);
478 /* Combine the comparison with defining statements. */
479 tmp = forward_propagate_into_comparison_1 (stmt,
480 gimple_assign_rhs_code (stmt),
481 type, rhs1, rhs2);
482 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
484 gimple_assign_set_rhs_from_tree (gsi, tmp);
485 fold_stmt (gsi);
486 update_stmt (gsi_stmt (*gsi));
488 if (TREE_CODE (rhs1) == SSA_NAME)
489 cfg_changed |= remove_prop_source_from_use (rhs1);
490 if (TREE_CODE (rhs2) == SSA_NAME)
491 cfg_changed |= remove_prop_source_from_use (rhs2);
492 return cfg_changed ? 2 : 1;
495 return 0;
498 /* Propagate from the ssa name definition statements of COND_EXPR
499 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
500 Returns zero if no statement was changed, one if there were
501 changes and two if cfg_cleanup needs to run.
503 This must be kept in sync with forward_propagate_into_cond. */
505 static int
506 forward_propagate_into_gimple_cond (gimple stmt)
508 tree tmp;
509 enum tree_code code = gimple_cond_code (stmt);
510 bool cfg_changed = false;
511 tree rhs1 = gimple_cond_lhs (stmt);
512 tree rhs2 = gimple_cond_rhs (stmt);
514 /* We can do tree combining on SSA_NAME and comparison expressions. */
515 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
516 return 0;
518 tmp = forward_propagate_into_comparison_1 (stmt, code,
519 boolean_type_node,
520 rhs1, rhs2);
521 if (tmp)
523 if (dump_file && tmp)
525 fprintf (dump_file, " Replaced '");
526 print_gimple_expr (dump_file, stmt, 0, 0);
527 fprintf (dump_file, "' with '");
528 print_generic_expr (dump_file, tmp, 0);
529 fprintf (dump_file, "'\n");
532 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
533 update_stmt (stmt);
535 if (TREE_CODE (rhs1) == SSA_NAME)
536 cfg_changed |= remove_prop_source_from_use (rhs1);
537 if (TREE_CODE (rhs2) == SSA_NAME)
538 cfg_changed |= remove_prop_source_from_use (rhs2);
539 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
542 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
543 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
544 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
545 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
546 && ((code == EQ_EXPR
547 && integer_zerop (rhs2))
548 || (code == NE_EXPR
549 && integer_onep (rhs2))))
551 basic_block bb = gimple_bb (stmt);
552 gimple_cond_set_code (stmt, NE_EXPR);
553 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
554 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
555 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
556 return 1;
559 return 0;
563 /* Propagate from the ssa name definition statements of COND_EXPR
564 in the rhs of statement STMT into the conditional if that simplifies it.
565 Returns true zero if the stmt was changed. */
567 static bool
568 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
570 gimple stmt = gsi_stmt (*gsi_p);
571 tree tmp = NULL_TREE;
572 tree cond = gimple_assign_rhs1 (stmt);
573 enum tree_code code = gimple_assign_rhs_code (stmt);
574 bool swap = false;
576 /* We can do tree combining on SSA_NAME and comparison expressions. */
577 if (COMPARISON_CLASS_P (cond))
578 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
579 TREE_TYPE (cond),
580 TREE_OPERAND (cond, 0),
581 TREE_OPERAND (cond, 1));
582 else if (TREE_CODE (cond) == SSA_NAME)
584 enum tree_code def_code;
585 tree name = cond;
586 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
587 if (!def_stmt || !can_propagate_from (def_stmt))
588 return 0;
590 def_code = gimple_assign_rhs_code (def_stmt);
591 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
592 tmp = fold_build2_loc (gimple_location (def_stmt),
593 def_code,
594 TREE_TYPE (cond),
595 gimple_assign_rhs1 (def_stmt),
596 gimple_assign_rhs2 (def_stmt));
597 else if (code == COND_EXPR
598 && ((def_code == BIT_NOT_EXPR
599 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
600 || (def_code == BIT_XOR_EXPR
601 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
603 tmp = gimple_assign_rhs1 (def_stmt);
604 swap = true;
608 if (tmp
609 && is_gimple_condexpr (tmp))
611 if (dump_file && tmp)
613 fprintf (dump_file, " Replaced '");
614 print_generic_expr (dump_file, cond, 0);
615 fprintf (dump_file, "' with '");
616 print_generic_expr (dump_file, tmp, 0);
617 fprintf (dump_file, "'\n");
620 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
621 : integer_onep (tmp))
622 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
623 else if (integer_zerop (tmp))
624 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
625 else
627 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
628 if (swap)
630 tree t = gimple_assign_rhs2 (stmt);
631 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
632 gimple_assign_set_rhs3 (stmt, t);
635 stmt = gsi_stmt (*gsi_p);
636 update_stmt (stmt);
638 return true;
641 return 0;
644 /* Propagate from the ssa name definition statements of COND_EXPR
645 values in the rhs of statement STMT into the conditional arms
646 if that simplifies it.
647 Returns true if the stmt was changed. */
649 static bool
650 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
652 gimple stmt = gsi_stmt (*gsi_p);
653 tree cond, val1, val2;
654 bool changed = false;
656 cond = gimple_assign_rhs1 (stmt);
657 val1 = gimple_assign_rhs2 (stmt);
658 if (TREE_CODE (val1) == SSA_NAME)
660 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
661 if (is_gimple_assign (def_stmt)
662 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
663 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
665 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
666 gimple_assign_set_rhs2 (stmt, val1);
667 changed = true;
670 val2 = gimple_assign_rhs3 (stmt);
671 if (TREE_CODE (val2) == SSA_NAME)
673 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
674 if (is_gimple_assign (def_stmt)
675 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
676 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
678 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
679 gimple_assign_set_rhs3 (stmt, val2);
680 changed = true;
683 if (operand_equal_p (val1, val2, 0))
685 gimple_assign_set_rhs_from_tree (gsi_p, val1);
686 stmt = gsi_stmt (*gsi_p);
687 changed = true;
690 if (changed)
691 update_stmt (stmt);
693 return changed;
696 /* We've just substituted an ADDR_EXPR into stmt. Update all the
697 relevant data structures to match. */
699 static void
700 tidy_after_forward_propagate_addr (gimple stmt)
702 /* We may have turned a trapping insn into a non-trapping insn. */
703 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
704 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
705 cfg_changed = true;
707 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
708 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
711 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
712 ADDR_EXPR <whatever>.
714 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
715 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
716 node or for recovery of array indexing from pointer arithmetic.
718 Return true if the propagation was successful (the propagation can
719 be not totally successful, yet things may have been changed). */
721 static bool
722 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
723 gimple_stmt_iterator *use_stmt_gsi,
724 bool single_use_p)
726 tree lhs, rhs, rhs2, array_ref;
727 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
728 enum tree_code rhs_code;
729 bool res = true;
731 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
733 lhs = gimple_assign_lhs (use_stmt);
734 rhs_code = gimple_assign_rhs_code (use_stmt);
735 rhs = gimple_assign_rhs1 (use_stmt);
737 /* Do not perform copy-propagation but recurse through copy chains. */
738 if (TREE_CODE (lhs) == SSA_NAME
739 && rhs_code == SSA_NAME)
740 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
742 /* The use statement could be a conversion. Recurse to the uses of the
743 lhs as copyprop does not copy through pointer to integer to pointer
744 conversions and FRE does not catch all cases either.
745 Treat the case of a single-use name and
746 a conversion to def_rhs type separate, though. */
747 if (TREE_CODE (lhs) == SSA_NAME
748 && CONVERT_EXPR_CODE_P (rhs_code))
750 /* If there is a point in a conversion chain where the types match
751 so we can remove a conversion re-materialize the address here
752 and stop. */
753 if (single_use_p
754 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
756 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
757 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
758 return true;
761 /* Else recurse if the conversion preserves the address value. */
762 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
763 || POINTER_TYPE_P (TREE_TYPE (lhs)))
764 && (TYPE_PRECISION (TREE_TYPE (lhs))
765 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
766 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
768 return false;
771 /* If this isn't a conversion chain from this on we only can propagate
772 into compatible pointer contexts. */
773 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
774 return false;
776 /* Propagate through constant pointer adjustments. */
777 if (TREE_CODE (lhs) == SSA_NAME
778 && rhs_code == POINTER_PLUS_EXPR
779 && rhs == name
780 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
782 tree new_def_rhs;
783 /* As we come here with non-invariant addresses in def_rhs we need
784 to make sure we can build a valid constant offsetted address
785 for further propagation. Simply rely on fold building that
786 and check after the fact. */
787 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
788 def_rhs,
789 fold_convert (ptr_type_node,
790 gimple_assign_rhs2 (use_stmt)));
791 if (TREE_CODE (new_def_rhs) == MEM_REF
792 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
793 return false;
794 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
795 TREE_TYPE (rhs));
797 /* Recurse. If we could propagate into all uses of lhs do not
798 bother to replace into the current use but just pretend we did. */
799 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
800 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
801 return true;
803 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
804 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
805 new_def_rhs, NULL_TREE);
806 else if (is_gimple_min_invariant (new_def_rhs))
807 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
808 new_def_rhs, NULL_TREE);
809 else
810 return false;
811 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
812 update_stmt (use_stmt);
813 return true;
816 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
817 ADDR_EXPR will not appear on the LHS. */
818 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
819 while (handled_component_p (*lhsp))
820 lhsp = &TREE_OPERAND (*lhsp, 0);
821 lhs = *lhsp;
823 /* Now see if the LHS node is a MEM_REF using NAME. If so,
824 propagate the ADDR_EXPR into the use of NAME and fold the result. */
825 if (TREE_CODE (lhs) == MEM_REF
826 && TREE_OPERAND (lhs, 0) == name)
828 tree def_rhs_base;
829 HOST_WIDE_INT def_rhs_offset;
830 /* If the address is invariant we can always fold it. */
831 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
832 &def_rhs_offset)))
834 offset_int off = mem_ref_offset (lhs);
835 tree new_ptr;
836 off += def_rhs_offset;
837 if (TREE_CODE (def_rhs_base) == MEM_REF)
839 off += mem_ref_offset (def_rhs_base);
840 new_ptr = TREE_OPERAND (def_rhs_base, 0);
842 else
843 new_ptr = build_fold_addr_expr (def_rhs_base);
844 TREE_OPERAND (lhs, 0) = new_ptr;
845 TREE_OPERAND (lhs, 1)
846 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
847 tidy_after_forward_propagate_addr (use_stmt);
848 /* Continue propagating into the RHS if this was not the only use. */
849 if (single_use_p)
850 return true;
852 /* If the LHS is a plain dereference and the value type is the same as
853 that of the pointed-to type of the address we can put the
854 dereferenced address on the LHS preserving the original alias-type. */
855 else if (integer_zerop (TREE_OPERAND (lhs, 1))
856 && ((gimple_assign_lhs (use_stmt) == lhs
857 && useless_type_conversion_p
858 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
859 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
860 || types_compatible_p (TREE_TYPE (lhs),
861 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
862 /* Don't forward anything into clobber stmts if it would result
863 in the lhs no longer being a MEM_REF. */
864 && (!gimple_clobber_p (use_stmt)
865 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
867 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
868 tree new_offset, new_base, saved, new_lhs;
869 while (handled_component_p (*def_rhs_basep))
870 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
871 saved = *def_rhs_basep;
872 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
874 new_base = TREE_OPERAND (*def_rhs_basep, 0);
875 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
876 TREE_OPERAND (*def_rhs_basep, 1));
878 else
880 new_base = build_fold_addr_expr (*def_rhs_basep);
881 new_offset = TREE_OPERAND (lhs, 1);
883 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
884 new_base, new_offset);
885 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
886 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
887 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
888 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
889 *lhsp = new_lhs;
890 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
891 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
892 *def_rhs_basep = saved;
893 tidy_after_forward_propagate_addr (use_stmt);
894 /* Continue propagating into the RHS if this was not the
895 only use. */
896 if (single_use_p)
897 return true;
899 else
900 /* We can have a struct assignment dereferencing our name twice.
901 Note that we didn't propagate into the lhs to not falsely
902 claim we did when propagating into the rhs. */
903 res = false;
906 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
907 nodes from the RHS. */
908 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
909 if (TREE_CODE (*rhsp) == ADDR_EXPR)
910 rhsp = &TREE_OPERAND (*rhsp, 0);
911 while (handled_component_p (*rhsp))
912 rhsp = &TREE_OPERAND (*rhsp, 0);
913 rhs = *rhsp;
915 /* Now see if the RHS node is a MEM_REF using NAME. If so,
916 propagate the ADDR_EXPR into the use of NAME and fold the result. */
917 if (TREE_CODE (rhs) == MEM_REF
918 && TREE_OPERAND (rhs, 0) == name)
920 tree def_rhs_base;
921 HOST_WIDE_INT def_rhs_offset;
922 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
923 &def_rhs_offset)))
925 offset_int off = mem_ref_offset (rhs);
926 tree new_ptr;
927 off += def_rhs_offset;
928 if (TREE_CODE (def_rhs_base) == MEM_REF)
930 off += mem_ref_offset (def_rhs_base);
931 new_ptr = TREE_OPERAND (def_rhs_base, 0);
933 else
934 new_ptr = build_fold_addr_expr (def_rhs_base);
935 TREE_OPERAND (rhs, 0) = new_ptr;
936 TREE_OPERAND (rhs, 1)
937 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
938 fold_stmt_inplace (use_stmt_gsi);
939 tidy_after_forward_propagate_addr (use_stmt);
940 return res;
942 /* If the RHS is a plain dereference and the value type is the same as
943 that of the pointed-to type of the address we can put the
944 dereferenced address on the RHS preserving the original alias-type. */
945 else if (integer_zerop (TREE_OPERAND (rhs, 1))
946 && ((gimple_assign_rhs1 (use_stmt) == rhs
947 && useless_type_conversion_p
948 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
949 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
950 || types_compatible_p (TREE_TYPE (rhs),
951 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
953 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
954 tree new_offset, new_base, saved, new_rhs;
955 while (handled_component_p (*def_rhs_basep))
956 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
957 saved = *def_rhs_basep;
958 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
960 new_base = TREE_OPERAND (*def_rhs_basep, 0);
961 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
962 TREE_OPERAND (*def_rhs_basep, 1));
964 else
966 new_base = build_fold_addr_expr (*def_rhs_basep);
967 new_offset = TREE_OPERAND (rhs, 1);
969 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
970 new_base, new_offset);
971 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
972 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
973 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
974 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
975 *rhsp = new_rhs;
976 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
977 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
978 *def_rhs_basep = saved;
979 fold_stmt_inplace (use_stmt_gsi);
980 tidy_after_forward_propagate_addr (use_stmt);
981 return res;
985 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
986 is nothing to do. */
987 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
988 || gimple_assign_rhs1 (use_stmt) != name)
989 return false;
991 /* The remaining cases are all for turning pointer arithmetic into
992 array indexing. They only apply when we have the address of
993 element zero in an array. If that is not the case then there
994 is nothing to do. */
995 array_ref = TREE_OPERAND (def_rhs, 0);
996 if ((TREE_CODE (array_ref) != ARRAY_REF
997 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
998 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
999 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1000 return false;
1002 rhs2 = gimple_assign_rhs2 (use_stmt);
1003 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1004 if (TREE_CODE (rhs2) == INTEGER_CST)
1006 tree new_rhs = build1_loc (gimple_location (use_stmt),
1007 ADDR_EXPR, TREE_TYPE (def_rhs),
1008 fold_build2 (MEM_REF,
1009 TREE_TYPE (TREE_TYPE (def_rhs)),
1010 unshare_expr (def_rhs),
1011 fold_convert (ptr_type_node,
1012 rhs2)));
1013 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1014 use_stmt = gsi_stmt (*use_stmt_gsi);
1015 update_stmt (use_stmt);
1016 tidy_after_forward_propagate_addr (use_stmt);
1017 return true;
1020 return false;
1023 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1025 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1026 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1027 node or for recovery of array indexing from pointer arithmetic.
1029 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1030 the single use in the previous invocation. Pass true when calling
1031 this as toplevel.
1033 Returns true, if all uses have been propagated into. */
1035 static bool
1036 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1038 imm_use_iterator iter;
1039 gimple use_stmt;
1040 bool all = true;
1041 bool single_use_p = parent_single_use_p && has_single_use (name);
1043 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1045 bool result;
1046 tree use_rhs;
1048 /* If the use is not in a simple assignment statement, then
1049 there is nothing we can do. */
1050 if (!is_gimple_assign (use_stmt))
1052 if (!is_gimple_debug (use_stmt))
1053 all = false;
1054 continue;
1057 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1058 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1059 single_use_p);
1060 /* If the use has moved to a different statement adjust
1061 the update machinery for the old statement too. */
1062 if (use_stmt != gsi_stmt (gsi))
1064 update_stmt (use_stmt);
1065 use_stmt = gsi_stmt (gsi);
1067 update_stmt (use_stmt);
1068 all &= result;
1070 /* Remove intermediate now unused copy and conversion chains. */
1071 use_rhs = gimple_assign_rhs1 (use_stmt);
1072 if (result
1073 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1074 && TREE_CODE (use_rhs) == SSA_NAME
1075 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1077 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1078 release_defs (use_stmt);
1079 gsi_remove (&gsi, true);
1083 return all && has_zero_uses (name);
1087 /* Forward propagate the comparison defined in *DEFGSI like
1088 cond_1 = x CMP y to uses of the form
1089 a_1 = (T')cond_1
1090 a_1 = !cond_1
1091 a_1 = cond_1 != 0
1092 Returns true if stmt is now unused. Advance DEFGSI to the next
1093 statement. */
1095 static bool
1096 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1098 gimple stmt = gsi_stmt (*defgsi);
1099 tree name = gimple_assign_lhs (stmt);
1100 gimple use_stmt;
1101 tree tmp = NULL_TREE;
1102 gimple_stmt_iterator gsi;
1103 enum tree_code code;
1104 tree lhs;
1106 /* Don't propagate ssa names that occur in abnormal phis. */
1107 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1108 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1109 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1110 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1111 goto bailout;
1113 /* Do not un-cse comparisons. But propagate through copies. */
1114 use_stmt = get_prop_dest_stmt (name, &name);
1115 if (!use_stmt
1116 || !is_gimple_assign (use_stmt))
1117 goto bailout;
1119 code = gimple_assign_rhs_code (use_stmt);
1120 lhs = gimple_assign_lhs (use_stmt);
1121 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1122 goto bailout;
1124 /* We can propagate the condition into a statement that
1125 computes the logical negation of the comparison result. */
1126 if ((code == BIT_NOT_EXPR
1127 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1128 || (code == BIT_XOR_EXPR
1129 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1131 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1132 bool nans = HONOR_NANS (TYPE_MODE (type));
1133 enum tree_code inv_code;
1134 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1135 if (inv_code == ERROR_MARK)
1136 goto bailout;
1138 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1139 gimple_assign_rhs2 (stmt));
1141 else
1142 goto bailout;
1144 gsi = gsi_for_stmt (use_stmt);
1145 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1146 use_stmt = gsi_stmt (gsi);
1147 update_stmt (use_stmt);
1149 if (dump_file && (dump_flags & TDF_DETAILS))
1151 fprintf (dump_file, " Replaced '");
1152 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1153 fprintf (dump_file, "' with '");
1154 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1155 fprintf (dump_file, "'\n");
1158 /* When we remove stmt now the iterator defgsi goes off it's current
1159 sequence, hence advance it now. */
1160 gsi_next (defgsi);
1162 /* Remove defining statements. */
1163 return remove_prop_source_from_use (name);
1165 bailout:
1166 gsi_next (defgsi);
1167 return false;
1171 /* GSI_P points to a statement which performs a narrowing integral
1172 conversion.
1174 Look for cases like:
1176 t = x & c;
1177 y = (T) t;
1179 Turn them into:
1181 t = x & c;
1182 y = (T) x;
1184 If T is narrower than X's type and C merely masks off bits outside
1185 of (T) and nothing else.
1187 Normally we'd let DCE remove the dead statement. But no DCE runs
1188 after the last forwprop/combine pass, so we remove the obviously
1189 dead code ourselves.
1191 Return TRUE if a change was made, FALSE otherwise. */
1193 static bool
1194 simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1196 gimple stmt = gsi_stmt (*gsi_p);
1197 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1199 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1200 the only use of the BIT_AND_EXPR result is the conversion. */
1201 if (is_gimple_assign (rhs_def_stmt)
1202 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1203 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1205 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1206 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1207 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1209 /* Now verify suitability of the BIT_AND_EXPR's operands.
1210 The first must be an SSA_NAME that we can propagate and the
1211 second must be an integer constant that masks out all the
1212 bits outside the final result's type, but nothing else. */
1213 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1214 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1215 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1216 && operand_equal_p (rhs_def_operand2,
1217 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1218 TYPE_PRECISION (lhs_type)),
1221 /* This is an optimizable case. Replace the source operand
1222 in the conversion with the first source operand of the
1223 BIT_AND_EXPR. */
1224 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1225 stmt = gsi_stmt (*gsi_p);
1226 update_stmt (stmt);
1228 /* There is no DCE after the last forwprop pass. It's
1229 easy to clean up the first order effects here. */
1230 gimple_stmt_iterator si;
1231 si = gsi_for_stmt (rhs_def_stmt);
1232 gsi_remove (&si, true);
1233 release_defs (rhs_def_stmt);
1234 return true;
1238 return false;
1242 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1243 If so, we can change STMT into lhs = y which can later be copy
1244 propagated. Similarly for negation.
1246 This could trivially be formulated as a forward propagation
1247 to immediate uses. However, we already had an implementation
1248 from DOM which used backward propagation via the use-def links.
1250 It turns out that backward propagation is actually faster as
1251 there's less work to do for each NOT/NEG expression we find.
1252 Backwards propagation needs to look at the statement in a single
1253 backlink. Forward propagation needs to look at potentially more
1254 than one forward link.
1256 Returns true when the statement was changed. */
1258 static bool
1259 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1261 gimple stmt = gsi_stmt (*gsi_p);
1262 tree rhs = gimple_assign_rhs1 (stmt);
1263 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1265 /* See if the RHS_DEF_STMT has the same form as our statement. */
1266 if (is_gimple_assign (rhs_def_stmt)
1267 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1269 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1271 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1272 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1273 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1275 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1276 stmt = gsi_stmt (*gsi_p);
1277 update_stmt (stmt);
1278 return true;
1282 return false;
1285 /* Helper function for simplify_gimple_switch. Remove case labels that
1286 have values outside the range of the new type. */
1288 static void
1289 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1291 unsigned int branch_num = gimple_switch_num_labels (stmt);
1292 auto_vec<tree> labels (branch_num);
1293 unsigned int i, len;
1295 /* Collect the existing case labels in a VEC, and preprocess it as if
1296 we are gimplifying a GENERIC SWITCH_EXPR. */
1297 for (i = 1; i < branch_num; i++)
1298 labels.quick_push (gimple_switch_label (stmt, i));
1299 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1301 /* If any labels were removed, replace the existing case labels
1302 in the GIMPLE_SWITCH statement with the correct ones.
1303 Note that the type updates were done in-place on the case labels,
1304 so we only have to replace the case labels in the GIMPLE_SWITCH
1305 if the number of labels changed. */
1306 len = labels.length ();
1307 if (len < branch_num - 1)
1309 bitmap target_blocks;
1310 edge_iterator ei;
1311 edge e;
1313 /* Corner case: *all* case labels have been removed as being
1314 out-of-range for INDEX_TYPE. Push one label and let the
1315 CFG cleanups deal with this further. */
1316 if (len == 0)
1318 tree label, elt;
1320 label = CASE_LABEL (gimple_switch_default_label (stmt));
1321 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1322 labels.quick_push (elt);
1323 len = 1;
1326 for (i = 0; i < labels.length (); i++)
1327 gimple_switch_set_label (stmt, i + 1, labels[i]);
1328 for (i++ ; i < branch_num; i++)
1329 gimple_switch_set_label (stmt, i, NULL_TREE);
1330 gimple_switch_set_num_labels (stmt, len + 1);
1332 /* Cleanup any edges that are now dead. */
1333 target_blocks = BITMAP_ALLOC (NULL);
1334 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1336 tree elt = gimple_switch_label (stmt, i);
1337 basic_block target = label_to_block (CASE_LABEL (elt));
1338 bitmap_set_bit (target_blocks, target->index);
1340 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1342 if (! bitmap_bit_p (target_blocks, e->dest->index))
1344 remove_edge (e);
1345 cfg_changed = true;
1346 free_dominance_info (CDI_DOMINATORS);
1348 else
1349 ei_next (&ei);
1351 BITMAP_FREE (target_blocks);
1355 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1356 the condition which we may be able to optimize better. */
1358 static bool
1359 simplify_gimple_switch (gimple stmt)
1361 /* The optimization that we really care about is removing unnecessary
1362 casts. That will let us do much better in propagating the inferred
1363 constant at the switch target. */
1364 tree cond = gimple_switch_index (stmt);
1365 if (TREE_CODE (cond) == SSA_NAME)
1367 gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1368 if (gimple_assign_cast_p (def_stmt))
1370 tree def = gimple_assign_rhs1 (def_stmt);
1371 if (TREE_CODE (def) != SSA_NAME)
1372 return false;
1374 /* If we have an extension or sign-change that preserves the
1375 values we check against then we can copy the source value into
1376 the switch. */
1377 tree ti = TREE_TYPE (def);
1378 if (INTEGRAL_TYPE_P (ti)
1379 && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1381 size_t n = gimple_switch_num_labels (stmt);
1382 tree min = NULL_TREE, max = NULL_TREE;
1383 if (n > 1)
1385 min = CASE_LOW (gimple_switch_label (stmt, 1));
1386 if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1387 max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1388 else
1389 max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1391 if ((!min || int_fits_type_p (min, ti))
1392 && (!max || int_fits_type_p (max, ti)))
1394 gimple_switch_set_index (stmt, def);
1395 simplify_gimple_switch_label_vec (stmt, ti);
1396 update_stmt (stmt);
1397 return true;
1403 return false;
1406 /* For pointers p2 and p1 return p2 - p1 if the
1407 difference is known and constant, otherwise return NULL. */
1409 static tree
1410 constant_pointer_difference (tree p1, tree p2)
1412 int i, j;
1413 #define CPD_ITERATIONS 5
1414 tree exps[2][CPD_ITERATIONS];
1415 tree offs[2][CPD_ITERATIONS];
1416 int cnt[2];
1418 for (i = 0; i < 2; i++)
1420 tree p = i ? p1 : p2;
1421 tree off = size_zero_node;
1422 gimple stmt;
1423 enum tree_code code;
1425 /* For each of p1 and p2 we need to iterate at least
1426 twice, to handle ADDR_EXPR directly in p1/p2,
1427 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1428 on definition's stmt RHS. Iterate a few extra times. */
1429 j = 0;
1432 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1433 break;
1434 if (TREE_CODE (p) == ADDR_EXPR)
1436 tree q = TREE_OPERAND (p, 0);
1437 HOST_WIDE_INT offset;
1438 tree base = get_addr_base_and_unit_offset (q, &offset);
1439 if (base)
1441 q = base;
1442 if (offset)
1443 off = size_binop (PLUS_EXPR, off, size_int (offset));
1445 if (TREE_CODE (q) == MEM_REF
1446 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1448 p = TREE_OPERAND (q, 0);
1449 off = size_binop (PLUS_EXPR, off,
1450 wide_int_to_tree (sizetype,
1451 mem_ref_offset (q)));
1453 else
1455 exps[i][j] = q;
1456 offs[i][j++] = off;
1457 break;
1460 if (TREE_CODE (p) != SSA_NAME)
1461 break;
1462 exps[i][j] = p;
1463 offs[i][j++] = off;
1464 if (j == CPD_ITERATIONS)
1465 break;
1466 stmt = SSA_NAME_DEF_STMT (p);
1467 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1468 break;
1469 code = gimple_assign_rhs_code (stmt);
1470 if (code == POINTER_PLUS_EXPR)
1472 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1473 break;
1474 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1475 p = gimple_assign_rhs1 (stmt);
1477 else if (code == ADDR_EXPR || code == NOP_EXPR)
1478 p = gimple_assign_rhs1 (stmt);
1479 else
1480 break;
1482 while (1);
1483 cnt[i] = j;
1486 for (i = 0; i < cnt[0]; i++)
1487 for (j = 0; j < cnt[1]; j++)
1488 if (exps[0][i] == exps[1][j])
1489 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1491 return NULL_TREE;
1494 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1495 Optimize
1496 memcpy (p, "abcd", 4);
1497 memset (p + 4, ' ', 3);
1498 into
1499 memcpy (p, "abcd ", 7);
1500 call if the latter can be stored by pieces during expansion. */
1502 static bool
1503 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1505 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1506 tree vuse = gimple_vuse (stmt2);
1507 if (vuse == NULL)
1508 return false;
1509 stmt1 = SSA_NAME_DEF_STMT (vuse);
1511 switch (DECL_FUNCTION_CODE (callee2))
1513 case BUILT_IN_MEMSET:
1514 if (gimple_call_num_args (stmt2) != 3
1515 || gimple_call_lhs (stmt2)
1516 || CHAR_BIT != 8
1517 || BITS_PER_UNIT != 8)
1518 break;
1519 else
1521 tree callee1;
1522 tree ptr1, src1, str1, off1, len1, lhs1;
1523 tree ptr2 = gimple_call_arg (stmt2, 0);
1524 tree val2 = gimple_call_arg (stmt2, 1);
1525 tree len2 = gimple_call_arg (stmt2, 2);
1526 tree diff, vdef, new_str_cst;
1527 gimple use_stmt;
1528 unsigned int ptr1_align;
1529 unsigned HOST_WIDE_INT src_len;
1530 char *src_buf;
1531 use_operand_p use_p;
1533 if (!tree_fits_shwi_p (val2)
1534 || !tree_fits_uhwi_p (len2))
1535 break;
1536 if (is_gimple_call (stmt1))
1538 /* If first stmt is a call, it needs to be memcpy
1539 or mempcpy, with string literal as second argument and
1540 constant length. */
1541 callee1 = gimple_call_fndecl (stmt1);
1542 if (callee1 == NULL_TREE
1543 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1544 || gimple_call_num_args (stmt1) != 3)
1545 break;
1546 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1547 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1548 break;
1549 ptr1 = gimple_call_arg (stmt1, 0);
1550 src1 = gimple_call_arg (stmt1, 1);
1551 len1 = gimple_call_arg (stmt1, 2);
1552 lhs1 = gimple_call_lhs (stmt1);
1553 if (!tree_fits_uhwi_p (len1))
1554 break;
1555 str1 = string_constant (src1, &off1);
1556 if (str1 == NULL_TREE)
1557 break;
1558 if (!tree_fits_uhwi_p (off1)
1559 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1560 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1561 - tree_to_uhwi (off1)) > 0
1562 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1563 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1564 != TYPE_MODE (char_type_node))
1565 break;
1567 else if (gimple_assign_single_p (stmt1))
1569 /* Otherwise look for length 1 memcpy optimized into
1570 assignment. */
1571 ptr1 = gimple_assign_lhs (stmt1);
1572 src1 = gimple_assign_rhs1 (stmt1);
1573 if (TREE_CODE (ptr1) != MEM_REF
1574 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1575 || !tree_fits_shwi_p (src1))
1576 break;
1577 ptr1 = build_fold_addr_expr (ptr1);
1578 callee1 = NULL_TREE;
1579 len1 = size_one_node;
1580 lhs1 = NULL_TREE;
1581 off1 = size_zero_node;
1582 str1 = NULL_TREE;
1584 else
1585 break;
1587 diff = constant_pointer_difference (ptr1, ptr2);
1588 if (diff == NULL && lhs1 != NULL)
1590 diff = constant_pointer_difference (lhs1, ptr2);
1591 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1592 && diff != NULL)
1593 diff = size_binop (PLUS_EXPR, diff,
1594 fold_convert (sizetype, len1));
1596 /* If the difference between the second and first destination pointer
1597 is not constant, or is bigger than memcpy length, bail out. */
1598 if (diff == NULL
1599 || !tree_fits_uhwi_p (diff)
1600 || tree_int_cst_lt (len1, diff))
1601 break;
1603 /* Use maximum of difference plus memset length and memcpy length
1604 as the new memcpy length, if it is too big, bail out. */
1605 src_len = tree_to_uhwi (diff);
1606 src_len += tree_to_uhwi (len2);
1607 if (src_len < tree_to_uhwi (len1))
1608 src_len = tree_to_uhwi (len1);
1609 if (src_len > 1024)
1610 break;
1612 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1613 with bigger length will return different result. */
1614 if (lhs1 != NULL_TREE
1615 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1616 && (TREE_CODE (lhs1) != SSA_NAME
1617 || !single_imm_use (lhs1, &use_p, &use_stmt)
1618 || use_stmt != stmt2))
1619 break;
1621 /* If anything reads memory in between memcpy and memset
1622 call, the modified memcpy call might change it. */
1623 vdef = gimple_vdef (stmt1);
1624 if (vdef != NULL
1625 && (!single_imm_use (vdef, &use_p, &use_stmt)
1626 || use_stmt != stmt2))
1627 break;
1629 ptr1_align = get_pointer_alignment (ptr1);
1630 /* Construct the new source string literal. */
1631 src_buf = XALLOCAVEC (char, src_len + 1);
1632 if (callee1)
1633 memcpy (src_buf,
1634 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1635 tree_to_uhwi (len1));
1636 else
1637 src_buf[0] = tree_to_shwi (src1);
1638 memset (src_buf + tree_to_uhwi (diff),
1639 tree_to_shwi (val2), tree_to_uhwi (len2));
1640 src_buf[src_len] = '\0';
1641 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1642 handle embedded '\0's. */
1643 if (strlen (src_buf) != src_len)
1644 break;
1645 rtl_profile_for_bb (gimple_bb (stmt2));
1646 /* If the new memcpy wouldn't be emitted by storing the literal
1647 by pieces, this optimization might enlarge .rodata too much,
1648 as commonly used string literals couldn't be shared any
1649 longer. */
1650 if (!can_store_by_pieces (src_len,
1651 builtin_strncpy_read_str,
1652 src_buf, ptr1_align, false))
1653 break;
1655 new_str_cst = build_string_literal (src_len, src_buf);
1656 if (callee1)
1658 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1659 memset call. */
1660 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1661 gimple_call_set_lhs (stmt1, NULL_TREE);
1662 gimple_call_set_arg (stmt1, 1, new_str_cst);
1663 gimple_call_set_arg (stmt1, 2,
1664 build_int_cst (TREE_TYPE (len1), src_len));
1665 update_stmt (stmt1);
1666 unlink_stmt_vdef (stmt2);
1667 gsi_remove (gsi_p, true);
1668 release_defs (stmt2);
1669 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1670 release_ssa_name (lhs1);
1671 return true;
1673 else
1675 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1676 assignment, remove STMT1 and change memset call into
1677 memcpy call. */
1678 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1680 if (!is_gimple_val (ptr1))
1681 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1682 true, GSI_SAME_STMT);
1683 gimple_call_set_fndecl (stmt2,
1684 builtin_decl_explicit (BUILT_IN_MEMCPY));
1685 gimple_call_set_arg (stmt2, 0, ptr1);
1686 gimple_call_set_arg (stmt2, 1, new_str_cst);
1687 gimple_call_set_arg (stmt2, 2,
1688 build_int_cst (TREE_TYPE (len2), src_len));
1689 unlink_stmt_vdef (stmt1);
1690 gsi_remove (&gsi, true);
1691 release_defs (stmt1);
1692 update_stmt (stmt2);
1693 return false;
1696 break;
1697 default:
1698 break;
1700 return false;
1703 /* Checks if expression has type of one-bit precision, or is a known
1704 truth-valued expression. */
1705 static bool
1706 truth_valued_ssa_name (tree name)
1708 gimple def;
1709 tree type = TREE_TYPE (name);
1711 if (!INTEGRAL_TYPE_P (type))
1712 return false;
1713 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1714 necessarily one and so ~X is not equal to !X. */
1715 if (TYPE_PRECISION (type) == 1)
1716 return true;
1717 def = SSA_NAME_DEF_STMT (name);
1718 if (is_gimple_assign (def))
1719 return truth_value_p (gimple_assign_rhs_code (def));
1720 return false;
1723 /* Helper routine for simplify_bitwise_binary_1 function.
1724 Return for the SSA name NAME the expression X if it mets condition
1725 NAME = !X. Otherwise return NULL_TREE.
1726 Detected patterns for NAME = !X are:
1727 !X and X == 0 for X with integral type.
1728 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1729 static tree
1730 lookup_logical_inverted_value (tree name)
1732 tree op1, op2;
1733 enum tree_code code;
1734 gimple def;
1736 /* If name has none-intergal type, or isn't a SSA_NAME, then
1737 return. */
1738 if (TREE_CODE (name) != SSA_NAME
1739 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1740 return NULL_TREE;
1741 def = SSA_NAME_DEF_STMT (name);
1742 if (!is_gimple_assign (def))
1743 return NULL_TREE;
1745 code = gimple_assign_rhs_code (def);
1746 op1 = gimple_assign_rhs1 (def);
1747 op2 = NULL_TREE;
1749 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1750 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1751 if (code == EQ_EXPR || code == NE_EXPR
1752 || code == BIT_XOR_EXPR)
1753 op2 = gimple_assign_rhs2 (def);
1755 switch (code)
1757 case BIT_NOT_EXPR:
1758 if (truth_valued_ssa_name (name))
1759 return op1;
1760 break;
1761 case EQ_EXPR:
1762 /* Check if we have X == 0 and X has an integral type. */
1763 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1764 break;
1765 if (integer_zerop (op2))
1766 return op1;
1767 break;
1768 case NE_EXPR:
1769 /* Check if we have X != 1 and X is a truth-valued. */
1770 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1771 break;
1772 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1773 return op1;
1774 break;
1775 case BIT_XOR_EXPR:
1776 /* Check if we have X ^ 1 and X is truth valued. */
1777 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1778 return op1;
1779 break;
1780 default:
1781 break;
1784 return NULL_TREE;
1787 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1788 operations CODE, if one operand has the logically inverted
1789 value of the other. */
1790 static tree
1791 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1792 tree arg1, tree arg2)
1794 tree anot;
1796 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1797 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1798 && code != BIT_XOR_EXPR)
1799 return NULL_TREE;
1801 /* First check if operands ARG1 and ARG2 are equal. If so
1802 return NULL_TREE as this optimization is handled fold_stmt. */
1803 if (arg1 == arg2)
1804 return NULL_TREE;
1805 /* See if we have in arguments logical-not patterns. */
1806 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1807 || anot != arg2)
1808 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1809 || anot != arg1))
1810 return NULL_TREE;
1812 /* X & !X -> 0. */
1813 if (code == BIT_AND_EXPR)
1814 return fold_convert (type, integer_zero_node);
1815 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1816 if (truth_valued_ssa_name (anot))
1817 return fold_convert (type, integer_one_node);
1819 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1820 return NULL_TREE;
1823 /* Given a ssa_name in NAME see if it was defined by an assignment and
1824 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1825 to the second operand on the rhs. */
1827 static inline void
1828 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1830 gimple def;
1831 enum tree_code code1;
1832 tree arg11;
1833 tree arg21;
1834 tree arg31;
1835 enum gimple_rhs_class grhs_class;
1837 code1 = TREE_CODE (name);
1838 arg11 = name;
1839 arg21 = NULL_TREE;
1840 grhs_class = get_gimple_rhs_class (code1);
1842 if (code1 == SSA_NAME)
1844 def = SSA_NAME_DEF_STMT (name);
1846 if (def && is_gimple_assign (def)
1847 && can_propagate_from (def))
1849 code1 = gimple_assign_rhs_code (def);
1850 arg11 = gimple_assign_rhs1 (def);
1851 arg21 = gimple_assign_rhs2 (def);
1852 arg31 = gimple_assign_rhs2 (def);
1855 else if (grhs_class == GIMPLE_TERNARY_RHS
1856 || GIMPLE_BINARY_RHS
1857 || GIMPLE_UNARY_RHS
1858 || GIMPLE_SINGLE_RHS)
1859 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1861 *code = code1;
1862 *arg1 = arg11;
1863 if (arg2)
1864 *arg2 = arg21;
1865 /* Ignore arg3 currently. */
1868 /* Return true if a conversion of an operand from type FROM to type TO
1869 should be applied after performing the operation instead. */
1871 static bool
1872 hoist_conversion_for_bitop_p (tree to, tree from)
1874 /* That's a good idea if the conversion widens the operand, thus
1875 after hoisting the conversion the operation will be narrower. */
1876 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1877 return true;
1879 /* It's also a good idea if the conversion is to a non-integer mode. */
1880 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1881 return true;
1883 /* Or if the precision of TO is not the same as the precision
1884 of its mode. */
1885 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1886 return true;
1888 return false;
1891 /* GSI points to a statement of the form
1893 result = OP0 CODE OP1
1895 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1896 BIT_AND_EXPR or BIT_IOR_EXPR.
1898 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1899 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1900 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1902 If a simplification is made, return TRUE, else return FALSE. */
1903 static bool
1904 simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1905 enum tree_code code,
1906 tree op0, tree op1)
1908 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1910 if (!is_gimple_assign (op0_def_stmt)
1911 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1912 return false;
1914 tree x = gimple_assign_rhs1 (op0_def_stmt);
1915 if (TREE_CODE (x) == SSA_NAME
1916 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1917 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1918 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1920 enum tree_code newcode;
1922 gimple stmt = gsi_stmt (*gsi);
1923 gimple_assign_set_rhs1 (stmt, x);
1924 gimple_assign_set_rhs2 (stmt, op1);
1925 if (code == BIT_AND_EXPR)
1926 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1927 else
1928 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1929 gimple_assign_set_rhs_code (stmt, newcode);
1930 update_stmt (stmt);
1931 return true;
1933 return false;
1937 /* Simplify bitwise binary operations.
1938 Return true if a transformation applied, otherwise return false. */
1940 static bool
1941 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1943 gimple stmt = gsi_stmt (*gsi);
1944 tree arg1 = gimple_assign_rhs1 (stmt);
1945 tree arg2 = gimple_assign_rhs2 (stmt);
1946 enum tree_code code = gimple_assign_rhs_code (stmt);
1947 tree res;
1948 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1949 enum tree_code def1_code, def2_code;
1951 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1952 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1954 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1955 when profitable. */
1956 if (TREE_CODE (arg2) == INTEGER_CST
1957 && CONVERT_EXPR_CODE_P (def1_code)
1958 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
1959 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1960 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1962 gimple newop;
1963 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1964 newop =
1965 gimple_build_assign_with_ops (code, tem, def1_arg1,
1966 fold_convert_loc (gimple_location (stmt),
1967 TREE_TYPE (def1_arg1),
1968 arg2));
1969 gimple_set_location (newop, gimple_location (stmt));
1970 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1971 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1972 tem, NULL_TREE, NULL_TREE);
1973 update_stmt (gsi_stmt (*gsi));
1974 return true;
1977 /* For bitwise binary operations apply operand conversions to the
1978 binary operation result instead of to the operands. This allows
1979 to combine successive conversions and bitwise binary operations. */
1980 if (CONVERT_EXPR_CODE_P (def1_code)
1981 && CONVERT_EXPR_CODE_P (def2_code)
1982 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1983 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1985 gimple newop;
1986 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1987 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1988 gimple_set_location (newop, gimple_location (stmt));
1989 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1990 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1991 tem, NULL_TREE, NULL_TREE);
1992 update_stmt (gsi_stmt (*gsi));
1993 return true;
1997 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1998 if (def1_code == def2_code
1999 && def1_code == BIT_AND_EXPR
2000 && operand_equal_for_phi_arg_p (def1_arg2,
2001 def2_arg2))
2003 tree b = def1_arg2;
2004 tree a = def1_arg1;
2005 tree c = def2_arg1;
2006 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2007 /* If A OP0 C (this usually means C is the same as A) is 0
2008 then fold it down correctly. */
2009 if (integer_zerop (inner))
2011 gimple_assign_set_rhs_from_tree (gsi, inner);
2012 update_stmt (stmt);
2013 return true;
2015 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2016 then fold it down correctly. */
2017 else if (TREE_CODE (inner) == SSA_NAME)
2019 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2020 inner, b);
2021 gimple_assign_set_rhs_from_tree (gsi, outer);
2022 update_stmt (stmt);
2023 return true;
2025 else
2027 gimple newop;
2028 tree tem;
2029 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2030 newop = gimple_build_assign_with_ops (code, tem, a, c);
2031 gimple_set_location (newop, gimple_location (stmt));
2032 /* Make sure to re-process the new stmt as it's walking upwards. */
2033 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2034 gimple_assign_set_rhs1 (stmt, tem);
2035 gimple_assign_set_rhs2 (stmt, b);
2036 gimple_assign_set_rhs_code (stmt, def1_code);
2037 update_stmt (stmt);
2038 return true;
2042 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2043 if (code == BIT_AND_EXPR
2044 && def1_code == BIT_IOR_EXPR
2045 && CONSTANT_CLASS_P (arg2)
2046 && CONSTANT_CLASS_P (def1_arg2))
2048 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2049 arg2, def1_arg2);
2050 tree tem;
2051 gimple newop;
2052 if (integer_zerop (cst))
2054 gimple_assign_set_rhs1 (stmt, def1_arg1);
2055 update_stmt (stmt);
2056 return true;
2058 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2059 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2060 tem, def1_arg1, arg2);
2061 gimple_set_location (newop, gimple_location (stmt));
2062 /* Make sure to re-process the new stmt as it's walking upwards. */
2063 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2064 gimple_assign_set_rhs1 (stmt, tem);
2065 gimple_assign_set_rhs2 (stmt, cst);
2066 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2067 update_stmt (stmt);
2068 return true;
2071 /* Combine successive equal operations with constants. */
2072 if ((code == BIT_AND_EXPR
2073 || code == BIT_IOR_EXPR
2074 || code == BIT_XOR_EXPR)
2075 && def1_code == code
2076 && CONSTANT_CLASS_P (arg2)
2077 && CONSTANT_CLASS_P (def1_arg2))
2079 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2080 arg2, def1_arg2);
2081 gimple_assign_set_rhs1 (stmt, def1_arg1);
2082 gimple_assign_set_rhs2 (stmt, cst);
2083 update_stmt (stmt);
2084 return true;
2087 /* Canonicalize X ^ ~0 to ~X. */
2088 if (code == BIT_XOR_EXPR
2089 && integer_all_onesp (arg2))
2091 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2092 gcc_assert (gsi_stmt (*gsi) == stmt);
2093 update_stmt (stmt);
2094 return true;
2097 /* Try simple folding for X op !X, and X op X. */
2098 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2099 if (res != NULL_TREE)
2101 gimple_assign_set_rhs_from_tree (gsi, res);
2102 update_stmt (gsi_stmt (*gsi));
2103 return true;
2106 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2108 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2109 if (def1_code == ocode)
2111 tree x = arg2;
2112 enum tree_code coden;
2113 tree a1, a2;
2114 /* ( X | Y) & X -> X */
2115 /* ( X & Y) | X -> X */
2116 if (x == def1_arg1
2117 || x == def1_arg2)
2119 gimple_assign_set_rhs_from_tree (gsi, x);
2120 update_stmt (gsi_stmt (*gsi));
2121 return true;
2124 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2125 /* (~X | Y) & X -> X & Y */
2126 /* (~X & Y) | X -> X | Y */
2127 if (coden == BIT_NOT_EXPR && a1 == x)
2129 gimple_assign_set_rhs_with_ops (gsi, code,
2130 x, def1_arg2);
2131 gcc_assert (gsi_stmt (*gsi) == stmt);
2132 update_stmt (stmt);
2133 return true;
2135 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2136 /* (Y | ~X) & X -> X & Y */
2137 /* (Y & ~X) | X -> X | Y */
2138 if (coden == BIT_NOT_EXPR && a1 == x)
2140 gimple_assign_set_rhs_with_ops (gsi, code,
2141 x, def1_arg1);
2142 gcc_assert (gsi_stmt (*gsi) == stmt);
2143 update_stmt (stmt);
2144 return true;
2147 if (def2_code == ocode)
2149 enum tree_code coden;
2150 tree a1;
2151 tree x = arg1;
2152 /* X & ( X | Y) -> X */
2153 /* X | ( X & Y) -> X */
2154 if (x == def2_arg1
2155 || x == def2_arg2)
2157 gimple_assign_set_rhs_from_tree (gsi, x);
2158 update_stmt (gsi_stmt (*gsi));
2159 return true;
2161 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2162 /* (~X | Y) & X -> X & Y */
2163 /* (~X & Y) | X -> X | Y */
2164 if (coden == BIT_NOT_EXPR && a1 == x)
2166 gimple_assign_set_rhs_with_ops (gsi, code,
2167 x, def2_arg2);
2168 gcc_assert (gsi_stmt (*gsi) == stmt);
2169 update_stmt (stmt);
2170 return true;
2172 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2173 /* (Y | ~X) & X -> X & Y */
2174 /* (Y & ~X) | X -> X | Y */
2175 if (coden == BIT_NOT_EXPR && a1 == x)
2177 gimple_assign_set_rhs_with_ops (gsi, code,
2178 x, def2_arg1);
2179 gcc_assert (gsi_stmt (*gsi) == stmt);
2180 update_stmt (stmt);
2181 return true;
2185 /* If arg1 and arg2 are booleans (or any single bit type)
2186 then try to simplify:
2188 (~X & Y) -> X < Y
2189 (X & ~Y) -> Y < X
2190 (~X | Y) -> X <= Y
2191 (X | ~Y) -> Y <= X
2193 But only do this if our result feeds into a comparison as
2194 this transformation is not always a win, particularly on
2195 targets with and-not instructions. */
2196 if (TREE_CODE (arg1) == SSA_NAME
2197 && TREE_CODE (arg2) == SSA_NAME
2198 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2199 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2200 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2201 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2202 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2204 use_operand_p use_p;
2205 gimple use_stmt;
2207 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2209 if (gimple_code (use_stmt) == GIMPLE_COND
2210 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2211 && integer_zerop (gimple_cond_rhs (use_stmt))
2212 && gimple_cond_code (use_stmt) == NE_EXPR)
2214 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2215 return true;
2216 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2217 return true;
2222 return false;
2226 /* Recognize rotation patterns. Return true if a transformation
2227 applied, otherwise return false.
2229 We are looking for X with unsigned type T with bitsize B, OP being
2230 +, | or ^, some type T2 wider than T and
2231 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2232 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2233 (X << Y) OP (X >> (B - Y))
2234 (X << (int) Y) OP (X >> (int) (B - Y))
2235 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2236 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2237 (X << Y) | (X >> ((-Y) & (B - 1)))
2238 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2239 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2240 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2242 and transform these into:
2243 X r<< CNT1
2244 X r<< Y
2246 Note, in the patterns with T2 type, the type of OP operands
2247 might be even a signed type, but should have precision B. */
2249 static bool
2250 simplify_rotate (gimple_stmt_iterator *gsi)
2252 gimple stmt = gsi_stmt (*gsi);
2253 tree arg[2], rtype, rotcnt = NULL_TREE;
2254 tree def_arg1[2], def_arg2[2];
2255 enum tree_code def_code[2];
2256 tree lhs;
2257 int i;
2258 bool swapped_p = false;
2259 gimple g;
2261 arg[0] = gimple_assign_rhs1 (stmt);
2262 arg[1] = gimple_assign_rhs2 (stmt);
2263 rtype = TREE_TYPE (arg[0]);
2265 /* Only create rotates in complete modes. Other cases are not
2266 expanded properly. */
2267 if (!INTEGRAL_TYPE_P (rtype)
2268 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2269 return false;
2271 for (i = 0; i < 2; i++)
2272 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2274 /* Look through narrowing conversions. */
2275 if (CONVERT_EXPR_CODE_P (def_code[0])
2276 && CONVERT_EXPR_CODE_P (def_code[1])
2277 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2278 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2279 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2280 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2281 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2282 && has_single_use (arg[0])
2283 && has_single_use (arg[1]))
2285 for (i = 0; i < 2; i++)
2287 arg[i] = def_arg1[i];
2288 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2292 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2293 for (i = 0; i < 2; i++)
2294 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2295 return false;
2296 else if (!has_single_use (arg[i]))
2297 return false;
2298 if (def_code[0] == def_code[1])
2299 return false;
2301 /* If we've looked through narrowing conversions before, look through
2302 widening conversions from unsigned type with the same precision
2303 as rtype here. */
2304 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2305 for (i = 0; i < 2; i++)
2307 tree tem;
2308 enum tree_code code;
2309 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2310 if (!CONVERT_EXPR_CODE_P (code)
2311 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2312 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2313 return false;
2314 def_arg1[i] = tem;
2316 /* Both shifts have to use the same first operand. */
2317 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2318 return false;
2319 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2320 return false;
2322 /* CNT1 + CNT2 == B case above. */
2323 if (tree_fits_uhwi_p (def_arg2[0])
2324 && tree_fits_uhwi_p (def_arg2[1])
2325 && tree_to_uhwi (def_arg2[0])
2326 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
2327 rotcnt = def_arg2[0];
2328 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2329 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2330 return false;
2331 else
2333 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2334 enum tree_code cdef_code[2];
2335 /* Look through conversion of the shift count argument.
2336 The C/C++ FE cast any shift count argument to integer_type_node.
2337 The only problem might be if the shift count type maximum value
2338 is equal or smaller than number of bits in rtype. */
2339 for (i = 0; i < 2; i++)
2341 def_arg2_alt[i] = def_arg2[i];
2342 defcodefor_name (def_arg2[i], &cdef_code[i],
2343 &cdef_arg1[i], &cdef_arg2[i]);
2344 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2345 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2346 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2347 > floor_log2 (TYPE_PRECISION (rtype))
2348 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2349 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2351 def_arg2_alt[i] = cdef_arg1[i];
2352 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2353 &cdef_arg1[i], &cdef_arg2[i]);
2356 for (i = 0; i < 2; i++)
2357 /* Check for one shift count being Y and the other B - Y,
2358 with optional casts. */
2359 if (cdef_code[i] == MINUS_EXPR
2360 && tree_fits_shwi_p (cdef_arg1[i])
2361 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
2362 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2364 tree tem;
2365 enum tree_code code;
2367 if (cdef_arg2[i] == def_arg2[1 - i]
2368 || cdef_arg2[i] == def_arg2_alt[1 - i])
2370 rotcnt = cdef_arg2[i];
2371 break;
2373 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2374 if (CONVERT_EXPR_CODE_P (code)
2375 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2376 && TYPE_PRECISION (TREE_TYPE (tem))
2377 > floor_log2 (TYPE_PRECISION (rtype))
2378 && TYPE_PRECISION (TREE_TYPE (tem))
2379 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2380 && (tem == def_arg2[1 - i]
2381 || tem == def_arg2_alt[1 - i]))
2383 rotcnt = tem;
2384 break;
2387 /* The above sequence isn't safe for Y being 0,
2388 because then one of the shifts triggers undefined behavior.
2389 This alternative is safe even for rotation count of 0.
2390 One shift count is Y and the other (-Y) & (B - 1). */
2391 else if (cdef_code[i] == BIT_AND_EXPR
2392 && tree_fits_shwi_p (cdef_arg2[i])
2393 && tree_to_shwi (cdef_arg2[i])
2394 == TYPE_PRECISION (rtype) - 1
2395 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2396 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2398 tree tem;
2399 enum tree_code code;
2401 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2402 if (CONVERT_EXPR_CODE_P (code)
2403 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2404 && TYPE_PRECISION (TREE_TYPE (tem))
2405 > floor_log2 (TYPE_PRECISION (rtype))
2406 && TYPE_PRECISION (TREE_TYPE (tem))
2407 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2408 defcodefor_name (tem, &code, &tem, NULL);
2410 if (code == NEGATE_EXPR)
2412 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2414 rotcnt = tem;
2415 break;
2417 defcodefor_name (tem, &code, &tem, NULL);
2418 if (CONVERT_EXPR_CODE_P (code)
2419 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2420 && TYPE_PRECISION (TREE_TYPE (tem))
2421 > floor_log2 (TYPE_PRECISION (rtype))
2422 && TYPE_PRECISION (TREE_TYPE (tem))
2423 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2424 && (tem == def_arg2[1 - i]
2425 || tem == def_arg2_alt[1 - i]))
2427 rotcnt = tem;
2428 break;
2432 if (rotcnt == NULL_TREE)
2433 return false;
2434 swapped_p = i != 1;
2437 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2438 TREE_TYPE (rotcnt)))
2440 g = gimple_build_assign_with_ops (NOP_EXPR,
2441 make_ssa_name (TREE_TYPE (def_arg2[0]),
2442 NULL),
2443 rotcnt, NULL_TREE);
2444 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2445 rotcnt = gimple_assign_lhs (g);
2447 lhs = gimple_assign_lhs (stmt);
2448 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2449 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2450 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2451 ? LROTATE_EXPR : RROTATE_EXPR,
2452 lhs, def_arg1[0], rotcnt);
2453 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2455 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2456 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2457 lhs, NULL_TREE);
2459 gsi_replace (gsi, g, false);
2460 return true;
2463 /* Perform re-associations of the plus or minus statement STMT that are
2464 always permitted. Returns true if the CFG was changed. */
2466 static bool
2467 associate_plusminus (gimple_stmt_iterator *gsi)
2469 gimple stmt = gsi_stmt (*gsi);
2470 tree rhs1 = gimple_assign_rhs1 (stmt);
2471 tree rhs2 = gimple_assign_rhs2 (stmt);
2472 enum tree_code code = gimple_assign_rhs_code (stmt);
2473 bool changed;
2475 /* We can't reassociate at all for saturating types. */
2476 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2477 return false;
2479 /* First contract negates. */
2482 changed = false;
2484 /* A +- (-B) -> A -+ B. */
2485 if (TREE_CODE (rhs2) == SSA_NAME)
2487 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2488 if (is_gimple_assign (def_stmt)
2489 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2490 && can_propagate_from (def_stmt))
2492 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2493 gimple_assign_set_rhs_code (stmt, code);
2494 rhs2 = gimple_assign_rhs1 (def_stmt);
2495 gimple_assign_set_rhs2 (stmt, rhs2);
2496 gimple_set_modified (stmt, true);
2497 changed = true;
2501 /* (-A) + B -> B - A. */
2502 if (TREE_CODE (rhs1) == SSA_NAME
2503 && code == PLUS_EXPR)
2505 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2506 if (is_gimple_assign (def_stmt)
2507 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2508 && can_propagate_from (def_stmt))
2510 code = MINUS_EXPR;
2511 gimple_assign_set_rhs_code (stmt, code);
2512 rhs1 = rhs2;
2513 gimple_assign_set_rhs1 (stmt, rhs1);
2514 rhs2 = gimple_assign_rhs1 (def_stmt);
2515 gimple_assign_set_rhs2 (stmt, rhs2);
2516 gimple_set_modified (stmt, true);
2517 changed = true;
2521 while (changed);
2523 /* We can't reassociate floating-point or fixed-point plus or minus
2524 because of saturation to +-Inf. */
2525 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2526 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2527 goto out;
2529 /* Second match patterns that allow contracting a plus-minus pair
2530 irrespective of overflow issues.
2532 (A +- B) - A -> +- B
2533 (A +- B) -+ B -> A
2534 (CST +- A) +- CST -> CST +- A
2535 (A +- CST) +- CST -> A +- CST
2536 ~A + A -> -1
2537 ~A + 1 -> -A
2538 A - (A +- B) -> -+ B
2539 A +- (B +- A) -> +- B
2540 CST +- (CST +- A) -> CST +- A
2541 CST +- (A +- CST) -> CST +- A
2542 A + ~A -> -1
2543 (T)(P + A) - (T)P -> (T)A
2545 via commutating the addition and contracting operations to zero
2546 by reassociation. */
2548 if (TREE_CODE (rhs1) == SSA_NAME)
2550 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2551 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2553 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2554 if (def_code == PLUS_EXPR
2555 || def_code == MINUS_EXPR)
2557 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2558 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2559 if (operand_equal_p (def_rhs1, rhs2, 0)
2560 && code == MINUS_EXPR)
2562 /* (A +- B) - A -> +- B. */
2563 code = ((def_code == PLUS_EXPR)
2564 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2565 rhs1 = def_rhs2;
2566 rhs2 = NULL_TREE;
2567 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2568 gcc_assert (gsi_stmt (*gsi) == stmt);
2569 gimple_set_modified (stmt, true);
2571 else if (operand_equal_p (def_rhs2, rhs2, 0)
2572 && code != def_code)
2574 /* (A +- B) -+ B -> A. */
2575 code = TREE_CODE (def_rhs1);
2576 rhs1 = def_rhs1;
2577 rhs2 = NULL_TREE;
2578 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2579 gcc_assert (gsi_stmt (*gsi) == stmt);
2580 gimple_set_modified (stmt, true);
2582 else if (CONSTANT_CLASS_P (rhs2)
2583 && CONSTANT_CLASS_P (def_rhs1))
2585 /* (CST +- A) +- CST -> CST +- A. */
2586 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2587 def_rhs1, rhs2);
2588 if (cst && !TREE_OVERFLOW (cst))
2590 code = def_code;
2591 gimple_assign_set_rhs_code (stmt, code);
2592 rhs1 = cst;
2593 gimple_assign_set_rhs1 (stmt, rhs1);
2594 rhs2 = def_rhs2;
2595 gimple_assign_set_rhs2 (stmt, rhs2);
2596 gimple_set_modified (stmt, true);
2599 else if (CONSTANT_CLASS_P (rhs2)
2600 && CONSTANT_CLASS_P (def_rhs2))
2602 /* (A +- CST) +- CST -> A +- CST. */
2603 enum tree_code mix = (code == def_code)
2604 ? PLUS_EXPR : MINUS_EXPR;
2605 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2606 def_rhs2, rhs2);
2607 if (cst && !TREE_OVERFLOW (cst))
2609 code = def_code;
2610 gimple_assign_set_rhs_code (stmt, code);
2611 rhs1 = def_rhs1;
2612 gimple_assign_set_rhs1 (stmt, rhs1);
2613 rhs2 = cst;
2614 gimple_assign_set_rhs2 (stmt, rhs2);
2615 gimple_set_modified (stmt, true);
2619 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2621 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2622 if (operand_equal_p (def_rhs1, rhs2, 0))
2624 /* ~A + A -> -1. */
2625 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2626 rhs2 = NULL_TREE;
2627 code = TREE_CODE (rhs1);
2628 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2629 gcc_assert (gsi_stmt (*gsi) == stmt);
2630 gimple_set_modified (stmt, true);
2632 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2633 && integer_onep (rhs2))
2634 || (TREE_CODE (rhs2) == COMPLEX_CST
2635 && integer_onep (TREE_REALPART (rhs2))
2636 && integer_onep (TREE_IMAGPART (rhs2))))
2638 /* ~A + 1 -> -A. */
2639 code = NEGATE_EXPR;
2640 rhs1 = def_rhs1;
2641 rhs2 = NULL_TREE;
2642 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2643 gcc_assert (gsi_stmt (*gsi) == stmt);
2644 gimple_set_modified (stmt, true);
2647 else if (code == MINUS_EXPR
2648 && CONVERT_EXPR_CODE_P (def_code)
2649 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2650 && TREE_CODE (rhs2) == SSA_NAME)
2652 /* (T)(P + A) - (T)P -> (T)A. */
2653 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs2);
2654 if (is_gimple_assign (def_stmt2)
2655 && can_propagate_from (def_stmt2)
2656 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt2))
2657 && TREE_CODE (gimple_assign_rhs1 (def_stmt2)) == SSA_NAME)
2659 /* Now we have (T)X - (T)P. */
2660 tree p = gimple_assign_rhs1 (def_stmt2);
2661 def_stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
2662 if (is_gimple_assign (def_stmt2)
2663 && can_propagate_from (def_stmt2)
2664 && (gimple_assign_rhs_code (def_stmt2) == POINTER_PLUS_EXPR
2665 || gimple_assign_rhs_code (def_stmt2) == PLUS_EXPR)
2666 && gimple_assign_rhs1 (def_stmt2) == p)
2668 /* And finally (T)(P + A) - (T)P. */
2669 tree a = gimple_assign_rhs2 (def_stmt2);
2670 if (TYPE_PRECISION (TREE_TYPE (rhs1))
2671 <= TYPE_PRECISION (TREE_TYPE (a))
2672 /* For integer types, if A has a smaller type
2673 than T the result depends on the possible
2674 overflow in P + A.
2675 E.g. T=size_t, A=(unsigned)429497295, P>0.
2676 However, if an overflow in P + A would cause
2677 undefined behavior, we can assume that there
2678 is no overflow. */
2679 || (INTEGRAL_TYPE_P (TREE_TYPE (p))
2680 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (p)))
2681 /* For pointer types, if the conversion of A to the
2682 final type requires a sign- or zero-extension,
2683 then we have to punt - it is not defined which
2684 one is correct. */
2685 || (POINTER_TYPE_P (TREE_TYPE (p))
2686 && TREE_CODE (a) == INTEGER_CST
2687 && tree_int_cst_sign_bit (a) == 0))
2689 if (issue_strict_overflow_warning
2690 (WARN_STRICT_OVERFLOW_MISC)
2691 && TYPE_PRECISION (TREE_TYPE (rhs1))
2692 > TYPE_PRECISION (TREE_TYPE (a))
2693 && INTEGRAL_TYPE_P (TREE_TYPE (p)))
2694 warning_at (gimple_location (stmt),
2695 OPT_Wstrict_overflow,
2696 "assuming signed overflow does not "
2697 "occur when assuming that "
2698 "(T)(P + A) - (T)P is always (T)A");
2699 if (useless_type_conversion_p (TREE_TYPE (rhs1),
2700 TREE_TYPE (a)))
2701 code = TREE_CODE (a);
2702 else
2703 code = NOP_EXPR;
2704 rhs1 = a;
2705 rhs2 = NULL_TREE;
2706 gimple_assign_set_rhs_with_ops (gsi, code, rhs1,
2707 rhs2);
2708 gcc_assert (gsi_stmt (*gsi) == stmt);
2709 gimple_set_modified (stmt, true);
2717 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2719 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2720 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2722 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2723 if (def_code == PLUS_EXPR
2724 || def_code == MINUS_EXPR)
2726 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2727 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2728 if (operand_equal_p (def_rhs1, rhs1, 0)
2729 && code == MINUS_EXPR)
2731 /* A - (A +- B) -> -+ B. */
2732 code = ((def_code == PLUS_EXPR)
2733 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2734 rhs1 = def_rhs2;
2735 rhs2 = NULL_TREE;
2736 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2737 gcc_assert (gsi_stmt (*gsi) == stmt);
2738 gimple_set_modified (stmt, true);
2740 else if (operand_equal_p (def_rhs2, rhs1, 0)
2741 && code != def_code)
2743 /* A +- (B +- A) -> +- B. */
2744 code = ((code == PLUS_EXPR)
2745 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2746 rhs1 = def_rhs1;
2747 rhs2 = NULL_TREE;
2748 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2749 gcc_assert (gsi_stmt (*gsi) == stmt);
2750 gimple_set_modified (stmt, true);
2752 else if (CONSTANT_CLASS_P (rhs1)
2753 && CONSTANT_CLASS_P (def_rhs1))
2755 /* CST +- (CST +- A) -> CST +- A. */
2756 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2757 rhs1, def_rhs1);
2758 if (cst && !TREE_OVERFLOW (cst))
2760 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2761 gimple_assign_set_rhs_code (stmt, code);
2762 rhs1 = cst;
2763 gimple_assign_set_rhs1 (stmt, rhs1);
2764 rhs2 = def_rhs2;
2765 gimple_assign_set_rhs2 (stmt, rhs2);
2766 gimple_set_modified (stmt, true);
2769 else if (CONSTANT_CLASS_P (rhs1)
2770 && CONSTANT_CLASS_P (def_rhs2))
2772 /* CST +- (A +- CST) -> CST +- A. */
2773 tree cst = fold_binary (def_code == code
2774 ? PLUS_EXPR : MINUS_EXPR,
2775 TREE_TYPE (rhs2),
2776 rhs1, def_rhs2);
2777 if (cst && !TREE_OVERFLOW (cst))
2779 rhs1 = cst;
2780 gimple_assign_set_rhs1 (stmt, rhs1);
2781 rhs2 = def_rhs1;
2782 gimple_assign_set_rhs2 (stmt, rhs2);
2783 gimple_set_modified (stmt, true);
2787 else if (def_code == BIT_NOT_EXPR)
2789 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2790 if (code == PLUS_EXPR
2791 && operand_equal_p (def_rhs1, rhs1, 0))
2793 /* A + ~A -> -1. */
2794 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2795 rhs2 = NULL_TREE;
2796 code = TREE_CODE (rhs1);
2797 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2798 gcc_assert (gsi_stmt (*gsi) == stmt);
2799 gimple_set_modified (stmt, true);
2805 out:
2806 if (gimple_modified_p (stmt))
2808 fold_stmt_inplace (gsi);
2809 update_stmt (stmt);
2810 return true;
2813 return false;
2816 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2817 true if anything changed, false otherwise. */
2819 static bool
2820 associate_pointerplus_align (gimple_stmt_iterator *gsi)
2822 gimple stmt = gsi_stmt (*gsi);
2823 gimple def_stmt;
2824 tree ptr, rhs, algn;
2826 /* Pattern match
2827 tem = (sizetype) ptr;
2828 tem = tem & algn;
2829 tem = -tem;
2830 ... = ptr p+ tem;
2831 and produce the simpler and easier to analyze with respect to alignment
2832 ... = ptr & ~algn; */
2833 ptr = gimple_assign_rhs1 (stmt);
2834 rhs = gimple_assign_rhs2 (stmt);
2835 if (TREE_CODE (rhs) != SSA_NAME)
2836 return false;
2837 def_stmt = SSA_NAME_DEF_STMT (rhs);
2838 if (!is_gimple_assign (def_stmt)
2839 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2840 return false;
2841 rhs = gimple_assign_rhs1 (def_stmt);
2842 if (TREE_CODE (rhs) != SSA_NAME)
2843 return false;
2844 def_stmt = SSA_NAME_DEF_STMT (rhs);
2845 if (!is_gimple_assign (def_stmt)
2846 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2847 return false;
2848 rhs = gimple_assign_rhs1 (def_stmt);
2849 algn = gimple_assign_rhs2 (def_stmt);
2850 if (TREE_CODE (rhs) != SSA_NAME
2851 || TREE_CODE (algn) != INTEGER_CST)
2852 return false;
2853 def_stmt = SSA_NAME_DEF_STMT (rhs);
2854 if (!is_gimple_assign (def_stmt)
2855 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2856 return false;
2857 if (gimple_assign_rhs1 (def_stmt) != ptr)
2858 return false;
2860 algn = wide_int_to_tree (TREE_TYPE (ptr), wi::bit_not (algn));
2861 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2862 fold_stmt_inplace (gsi);
2863 update_stmt (stmt);
2865 return true;
2868 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2869 true if anything changed, false otherwise. */
2871 static bool
2872 associate_pointerplus_diff (gimple_stmt_iterator *gsi)
2874 gimple stmt = gsi_stmt (*gsi);
2875 gimple def_stmt;
2876 tree ptr1, rhs;
2878 /* Pattern match
2879 tem1 = (long) ptr1;
2880 tem2 = (long) ptr2;
2881 tem3 = tem2 - tem1;
2882 tem4 = (unsigned long) tem3;
2883 tem5 = ptr1 + tem4;
2884 and produce
2885 tem5 = ptr2; */
2886 ptr1 = gimple_assign_rhs1 (stmt);
2887 rhs = gimple_assign_rhs2 (stmt);
2888 if (TREE_CODE (rhs) != SSA_NAME)
2889 return false;
2890 gimple minus = SSA_NAME_DEF_STMT (rhs);
2891 /* Conditionally look through a sign-changing conversion. */
2892 if (is_gimple_assign (minus)
2893 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (minus))
2894 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (minus)))
2895 == TYPE_PRECISION (TREE_TYPE (rhs)))
2896 && TREE_CODE (gimple_assign_rhs1 (minus)) == SSA_NAME)
2897 minus = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (minus));
2898 if (!is_gimple_assign (minus))
2899 return false;
2900 if (gimple_assign_rhs_code (minus) != MINUS_EXPR)
2901 return false;
2902 rhs = gimple_assign_rhs2 (minus);
2903 if (TREE_CODE (rhs) != SSA_NAME)
2904 return false;
2905 def_stmt = SSA_NAME_DEF_STMT (rhs);
2906 if (!is_gimple_assign (def_stmt)
2907 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
2908 || gimple_assign_rhs1 (def_stmt) != ptr1)
2909 return false;
2910 rhs = gimple_assign_rhs1 (minus);
2911 if (TREE_CODE (rhs) != SSA_NAME)
2912 return false;
2913 def_stmt = SSA_NAME_DEF_STMT (rhs);
2914 if (!is_gimple_assign (def_stmt)
2915 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2916 return false;
2917 rhs = gimple_assign_rhs1 (def_stmt);
2918 if (! useless_type_conversion_p (TREE_TYPE (ptr1), TREE_TYPE (rhs)))
2919 return false;
2921 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (rhs), rhs, NULL_TREE);
2922 update_stmt (stmt);
2924 return true;
2927 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2928 true if anything changed, false otherwise. */
2930 static bool
2931 associate_pointerplus (gimple_stmt_iterator *gsi)
2933 gimple stmt = gsi_stmt (*gsi);
2934 gimple def_stmt;
2935 tree ptr, off1, off2;
2937 if (associate_pointerplus_align (gsi)
2938 || associate_pointerplus_diff (gsi))
2939 return true;
2941 /* Associate (p +p off1) +p off2 as (p +p (off1 + off2)). */
2942 ptr = gimple_assign_rhs1 (stmt);
2943 off1 = gimple_assign_rhs2 (stmt);
2944 if (TREE_CODE (ptr) != SSA_NAME
2945 || !has_single_use (ptr))
2946 return false;
2947 def_stmt = SSA_NAME_DEF_STMT (ptr);
2948 if (!is_gimple_assign (def_stmt)
2949 || gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
2950 || !can_propagate_from (def_stmt))
2951 return false;
2952 ptr = gimple_assign_rhs1 (def_stmt);
2953 off2 = gimple_assign_rhs2 (def_stmt);
2954 if (!types_compatible_p (TREE_TYPE (off1), TREE_TYPE (off2)))
2955 return false;
2957 tree off = make_ssa_name (TREE_TYPE (off1), NULL);
2958 gimple ostmt = gimple_build_assign_with_ops (PLUS_EXPR, off, off1, off2);
2959 gsi_insert_before (gsi, ostmt, GSI_SAME_STMT);
2961 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR, ptr, off);
2962 update_stmt (stmt);
2964 return true;
2967 /* Combine two conversions in a row for the second conversion at *GSI.
2968 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2969 run. Else it returns 0. */
2971 static int
2972 combine_conversions (gimple_stmt_iterator *gsi)
2974 gimple stmt = gsi_stmt (*gsi);
2975 gimple def_stmt;
2976 tree op0, lhs;
2977 enum tree_code code = gimple_assign_rhs_code (stmt);
2978 enum tree_code code2;
2980 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2981 || code == FLOAT_EXPR
2982 || code == FIX_TRUNC_EXPR);
2984 lhs = gimple_assign_lhs (stmt);
2985 op0 = gimple_assign_rhs1 (stmt);
2986 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2988 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2989 return 1;
2992 if (TREE_CODE (op0) != SSA_NAME)
2993 return 0;
2995 def_stmt = SSA_NAME_DEF_STMT (op0);
2996 if (!is_gimple_assign (def_stmt))
2997 return 0;
2999 code2 = gimple_assign_rhs_code (def_stmt);
3001 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
3003 tree defop0 = gimple_assign_rhs1 (def_stmt);
3004 tree type = TREE_TYPE (lhs);
3005 tree inside_type = TREE_TYPE (defop0);
3006 tree inter_type = TREE_TYPE (op0);
3007 int inside_int = INTEGRAL_TYPE_P (inside_type);
3008 int inside_ptr = POINTER_TYPE_P (inside_type);
3009 int inside_float = FLOAT_TYPE_P (inside_type);
3010 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
3011 unsigned int inside_prec = TYPE_PRECISION (inside_type);
3012 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
3013 int inter_int = INTEGRAL_TYPE_P (inter_type);
3014 int inter_ptr = POINTER_TYPE_P (inter_type);
3015 int inter_float = FLOAT_TYPE_P (inter_type);
3016 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
3017 unsigned int inter_prec = TYPE_PRECISION (inter_type);
3018 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
3019 int final_int = INTEGRAL_TYPE_P (type);
3020 int final_ptr = POINTER_TYPE_P (type);
3021 int final_float = FLOAT_TYPE_P (type);
3022 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
3023 unsigned int final_prec = TYPE_PRECISION (type);
3024 int final_unsignedp = TYPE_UNSIGNED (type);
3026 /* Don't propagate ssa names that occur in abnormal phis. */
3027 if (TREE_CODE (defop0) == SSA_NAME
3028 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
3029 return 0;
3031 /* In addition to the cases of two conversions in a row
3032 handled below, if we are converting something to its own
3033 type via an object of identical or wider precision, neither
3034 conversion is needed. */
3035 if (useless_type_conversion_p (type, inside_type)
3036 && (((inter_int || inter_ptr) && final_int)
3037 || (inter_float && final_float))
3038 && inter_prec >= final_prec)
3040 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
3041 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
3042 update_stmt (stmt);
3043 return remove_prop_source_from_use (op0) ? 2 : 1;
3046 /* Likewise, if the intermediate and initial types are either both
3047 float or both integer, we don't need the middle conversion if the
3048 former is wider than the latter and doesn't change the signedness
3049 (for integers). Avoid this if the final type is a pointer since
3050 then we sometimes need the middle conversion. Likewise if the
3051 final type has a precision not equal to the size of its mode. */
3052 if (((inter_int && inside_int)
3053 || (inter_float && inside_float)
3054 || (inter_vec && inside_vec))
3055 && inter_prec >= inside_prec
3056 && (inter_float || inter_vec
3057 || inter_unsignedp == inside_unsignedp)
3058 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
3059 && TYPE_MODE (type) == TYPE_MODE (inter_type))
3060 && ! final_ptr
3061 && (! final_vec || inter_prec == inside_prec))
3063 gimple_assign_set_rhs1 (stmt, defop0);
3064 update_stmt (stmt);
3065 return remove_prop_source_from_use (op0) ? 2 : 1;
3068 /* If we have a sign-extension of a zero-extended value, we can
3069 replace that by a single zero-extension. Likewise if the
3070 final conversion does not change precision we can drop the
3071 intermediate conversion. */
3072 if (inside_int && inter_int && final_int
3073 && ((inside_prec < inter_prec && inter_prec < final_prec
3074 && inside_unsignedp && !inter_unsignedp)
3075 || final_prec == inter_prec))
3077 gimple_assign_set_rhs1 (stmt, defop0);
3078 update_stmt (stmt);
3079 return remove_prop_source_from_use (op0) ? 2 : 1;
3082 /* Two conversions in a row are not needed unless:
3083 - some conversion is floating-point (overstrict for now), or
3084 - some conversion is a vector (overstrict for now), or
3085 - the intermediate type is narrower than both initial and
3086 final, or
3087 - the intermediate type and innermost type differ in signedness,
3088 and the outermost type is wider than the intermediate, or
3089 - the initial type is a pointer type and the precisions of the
3090 intermediate and final types differ, or
3091 - the final type is a pointer type and the precisions of the
3092 initial and intermediate types differ. */
3093 if (! inside_float && ! inter_float && ! final_float
3094 && ! inside_vec && ! inter_vec && ! final_vec
3095 && (inter_prec >= inside_prec || inter_prec >= final_prec)
3096 && ! (inside_int && inter_int
3097 && inter_unsignedp != inside_unsignedp
3098 && inter_prec < final_prec)
3099 && ((inter_unsignedp && inter_prec > inside_prec)
3100 == (final_unsignedp && final_prec > inter_prec))
3101 && ! (inside_ptr && inter_prec != final_prec)
3102 && ! (final_ptr && inside_prec != inter_prec)
3103 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
3104 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
3106 gimple_assign_set_rhs1 (stmt, defop0);
3107 update_stmt (stmt);
3108 return remove_prop_source_from_use (op0) ? 2 : 1;
3111 /* A truncation to an unsigned type should be canonicalized as
3112 bitwise and of a mask. */
3113 if (final_int && inter_int && inside_int
3114 && final_prec == inside_prec
3115 && final_prec > inter_prec
3116 && inter_unsignedp)
3118 tree tem;
3119 tem = fold_build2 (BIT_AND_EXPR, inside_type,
3120 defop0,
3121 wide_int_to_tree
3122 (inside_type,
3123 wi::mask (inter_prec, false,
3124 TYPE_PRECISION (inside_type))));
3125 if (!useless_type_conversion_p (type, inside_type))
3127 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
3128 GSI_SAME_STMT);
3129 gimple_assign_set_rhs1 (stmt, tem);
3131 else
3132 gimple_assign_set_rhs_from_tree (gsi, tem);
3133 update_stmt (gsi_stmt (*gsi));
3134 return 1;
3137 /* If we are converting an integer to a floating-point that can
3138 represent it exactly and back to an integer, we can skip the
3139 floating-point conversion. */
3140 if (inside_int && inter_float && final_int &&
3141 (unsigned) significand_size (TYPE_MODE (inter_type))
3142 >= inside_prec - !inside_unsignedp)
3144 if (useless_type_conversion_p (type, inside_type))
3146 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
3147 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
3148 update_stmt (stmt);
3149 return remove_prop_source_from_use (op0) ? 2 : 1;
3151 else
3153 gimple_assign_set_rhs1 (stmt, defop0);
3154 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
3155 update_stmt (stmt);
3156 return remove_prop_source_from_use (op0) ? 2 : 1;
3161 return 0;
3164 /* Combine VIEW_CONVERT_EXPRs with their defining statement. */
3166 static bool
3167 simplify_vce (gimple_stmt_iterator *gsi)
3169 gimple stmt = gsi_stmt (*gsi);
3170 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
3172 /* Drop useless VIEW_CONVERT_EXPRs. */
3173 tree op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3174 if (useless_type_conversion_p (type, TREE_TYPE (op)))
3176 gimple_assign_set_rhs1 (stmt, op);
3177 update_stmt (stmt);
3178 return true;
3181 if (TREE_CODE (op) != SSA_NAME)
3182 return false;
3184 gimple def_stmt = SSA_NAME_DEF_STMT (op);
3185 if (!is_gimple_assign (def_stmt))
3186 return false;
3188 tree def_op = gimple_assign_rhs1 (def_stmt);
3189 switch (gimple_assign_rhs_code (def_stmt))
3191 CASE_CONVERT:
3192 /* Strip integral conversions that do not change the precision. */
3193 if ((INTEGRAL_TYPE_P (TREE_TYPE (op))
3194 || POINTER_TYPE_P (TREE_TYPE (op)))
3195 && (INTEGRAL_TYPE_P (TREE_TYPE (def_op))
3196 || POINTER_TYPE_P (TREE_TYPE (def_op)))
3197 && (TYPE_PRECISION (TREE_TYPE (op))
3198 == TYPE_PRECISION (TREE_TYPE (def_op))))
3200 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) = def_op;
3201 update_stmt (stmt);
3202 return true;
3204 break;
3206 case VIEW_CONVERT_EXPR:
3207 /* Series of VIEW_CONVERT_EXPRs on register operands can
3208 be contracted. */
3209 if (TREE_CODE (TREE_OPERAND (def_op, 0)) == SSA_NAME)
3211 if (useless_type_conversion_p (type,
3212 TREE_TYPE (TREE_OPERAND (def_op, 0))))
3213 gimple_assign_set_rhs1 (stmt, TREE_OPERAND (def_op, 0));
3214 else
3215 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)
3216 = TREE_OPERAND (def_op, 0);
3217 update_stmt (stmt);
3218 return true;
3221 default:;
3224 return false;
3227 /* Combine an element access with a shuffle. Returns true if there were
3228 any changes made, else it returns false. */
3230 static bool
3231 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
3233 gimple stmt = gsi_stmt (*gsi);
3234 gimple def_stmt;
3235 tree op, op0, op1, op2;
3236 tree elem_type;
3237 unsigned idx, n, size;
3238 enum tree_code code;
3240 op = gimple_assign_rhs1 (stmt);
3241 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3243 op0 = TREE_OPERAND (op, 0);
3244 if (TREE_CODE (op0) != SSA_NAME
3245 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3246 return false;
3248 def_stmt = get_prop_source_stmt (op0, false, NULL);
3249 if (!def_stmt || !can_propagate_from (def_stmt))
3250 return false;
3252 op1 = TREE_OPERAND (op, 1);
3253 op2 = TREE_OPERAND (op, 2);
3254 code = gimple_assign_rhs_code (def_stmt);
3256 if (code == CONSTRUCTOR)
3258 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3259 gimple_assign_rhs1 (def_stmt), op1, op2);
3260 if (!tem || !valid_gimple_rhs_p (tem))
3261 return false;
3262 gimple_assign_set_rhs_from_tree (gsi, tem);
3263 update_stmt (gsi_stmt (*gsi));
3264 return true;
3267 elem_type = TREE_TYPE (TREE_TYPE (op0));
3268 if (TREE_TYPE (op) != elem_type)
3269 return false;
3271 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3272 n = TREE_INT_CST_LOW (op1) / size;
3273 if (n != 1)
3274 return false;
3275 idx = TREE_INT_CST_LOW (op2) / size;
3277 if (code == VEC_PERM_EXPR)
3279 tree p, m, index, tem;
3280 unsigned nelts;
3281 m = gimple_assign_rhs3 (def_stmt);
3282 if (TREE_CODE (m) != VECTOR_CST)
3283 return false;
3284 nelts = VECTOR_CST_NELTS (m);
3285 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3286 idx %= 2 * nelts;
3287 if (idx < nelts)
3289 p = gimple_assign_rhs1 (def_stmt);
3291 else
3293 p = gimple_assign_rhs2 (def_stmt);
3294 idx -= nelts;
3296 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3297 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3298 unshare_expr (p), op1, index);
3299 gimple_assign_set_rhs1 (stmt, tem);
3300 fold_stmt (gsi);
3301 update_stmt (gsi_stmt (*gsi));
3302 return true;
3305 return false;
3308 /* Determine whether applying the 2 permutations (mask1 then mask2)
3309 gives back one of the input. */
3311 static int
3312 is_combined_permutation_identity (tree mask1, tree mask2)
3314 tree mask;
3315 unsigned int nelts, i, j;
3316 bool maybe_identity1 = true;
3317 bool maybe_identity2 = true;
3319 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3320 && TREE_CODE (mask2) == VECTOR_CST);
3321 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3322 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3324 nelts = VECTOR_CST_NELTS (mask);
3325 for (i = 0; i < nelts; i++)
3327 tree val = VECTOR_CST_ELT (mask, i);
3328 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3329 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3330 if (j == i)
3331 maybe_identity2 = false;
3332 else if (j == i + nelts)
3333 maybe_identity1 = false;
3334 else
3335 return 0;
3337 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3340 /* Combine a shuffle with its arguments. Returns 1 if there were any
3341 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3343 static int
3344 simplify_permutation (gimple_stmt_iterator *gsi)
3346 gimple stmt = gsi_stmt (*gsi);
3347 gimple def_stmt;
3348 tree op0, op1, op2, op3, arg0, arg1;
3349 enum tree_code code;
3350 bool single_use_op0 = false;
3352 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
3354 op0 = gimple_assign_rhs1 (stmt);
3355 op1 = gimple_assign_rhs2 (stmt);
3356 op2 = gimple_assign_rhs3 (stmt);
3358 if (TREE_CODE (op2) != VECTOR_CST)
3359 return 0;
3361 if (TREE_CODE (op0) == VECTOR_CST)
3363 code = VECTOR_CST;
3364 arg0 = op0;
3366 else if (TREE_CODE (op0) == SSA_NAME)
3368 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3369 if (!def_stmt || !can_propagate_from (def_stmt))
3370 return 0;
3372 code = gimple_assign_rhs_code (def_stmt);
3373 arg0 = gimple_assign_rhs1 (def_stmt);
3375 else
3376 return 0;
3378 /* Two consecutive shuffles. */
3379 if (code == VEC_PERM_EXPR)
3381 tree orig;
3382 int ident;
3384 if (op0 != op1)
3385 return 0;
3386 op3 = gimple_assign_rhs3 (def_stmt);
3387 if (TREE_CODE (op3) != VECTOR_CST)
3388 return 0;
3389 ident = is_combined_permutation_identity (op3, op2);
3390 if (!ident)
3391 return 0;
3392 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3393 : gimple_assign_rhs2 (def_stmt);
3394 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3395 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3396 gimple_set_num_ops (stmt, 2);
3397 update_stmt (stmt);
3398 return remove_prop_source_from_use (op0) ? 2 : 1;
3401 /* Shuffle of a constructor. */
3402 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3404 tree opt;
3405 bool ret = false;
3406 if (op0 != op1)
3408 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
3409 return 0;
3411 if (TREE_CODE (op1) == VECTOR_CST)
3412 arg1 = op1;
3413 else if (TREE_CODE (op1) == SSA_NAME)
3415 enum tree_code code2;
3417 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3418 if (!def_stmt2 || !can_propagate_from (def_stmt2))
3419 return 0;
3421 code2 = gimple_assign_rhs_code (def_stmt2);
3422 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3423 return 0;
3424 arg1 = gimple_assign_rhs1 (def_stmt2);
3426 else
3427 return 0;
3429 else
3431 /* Already used twice in this statement. */
3432 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3433 return 0;
3434 arg1 = arg0;
3436 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
3437 if (!opt
3438 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
3439 return 0;
3440 gimple_assign_set_rhs_from_tree (gsi, opt);
3441 update_stmt (gsi_stmt (*gsi));
3442 if (TREE_CODE (op0) == SSA_NAME)
3443 ret = remove_prop_source_from_use (op0);
3444 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3445 ret |= remove_prop_source_from_use (op1);
3446 return ret ? 2 : 1;
3449 return 0;
3452 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3454 static bool
3455 simplify_vector_constructor (gimple_stmt_iterator *gsi)
3457 gimple stmt = gsi_stmt (*gsi);
3458 gimple def_stmt;
3459 tree op, op2, orig, type, elem_type;
3460 unsigned elem_size, nelts, i;
3461 enum tree_code code;
3462 constructor_elt *elt;
3463 unsigned char *sel;
3464 bool maybe_ident;
3466 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3468 op = gimple_assign_rhs1 (stmt);
3469 type = TREE_TYPE (op);
3470 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3472 nelts = TYPE_VECTOR_SUBPARTS (type);
3473 elem_type = TREE_TYPE (type);
3474 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3476 sel = XALLOCAVEC (unsigned char, nelts);
3477 orig = NULL;
3478 maybe_ident = true;
3479 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
3481 tree ref, op1;
3483 if (i >= nelts)
3484 return false;
3486 if (TREE_CODE (elt->value) != SSA_NAME)
3487 return false;
3488 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3489 if (!def_stmt)
3490 return false;
3491 code = gimple_assign_rhs_code (def_stmt);
3492 if (code != BIT_FIELD_REF)
3493 return false;
3494 op1 = gimple_assign_rhs1 (def_stmt);
3495 ref = TREE_OPERAND (op1, 0);
3496 if (orig)
3498 if (ref != orig)
3499 return false;
3501 else
3503 if (TREE_CODE (ref) != SSA_NAME)
3504 return false;
3505 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3506 return false;
3507 orig = ref;
3509 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3510 return false;
3511 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3512 if (sel[i] != i) maybe_ident = false;
3514 if (i < nelts)
3515 return false;
3517 if (maybe_ident)
3518 gimple_assign_set_rhs_from_tree (gsi, orig);
3519 else
3521 tree mask_type, *mask_elts;
3523 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3524 return false;
3525 mask_type
3526 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3527 nelts);
3528 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3529 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3530 != GET_MODE_SIZE (TYPE_MODE (type)))
3531 return false;
3532 mask_elts = XALLOCAVEC (tree, nelts);
3533 for (i = 0; i < nelts; i++)
3534 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3535 op2 = build_vector (mask_type, mask_elts);
3536 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3538 update_stmt (gsi_stmt (*gsi));
3539 return true;
3542 /* Simplify multiplications.
3543 Return true if a transformation applied, otherwise return false. */
3545 static bool
3546 simplify_mult (gimple_stmt_iterator *gsi)
3548 gimple stmt = gsi_stmt (*gsi);
3549 tree arg1 = gimple_assign_rhs1 (stmt);
3550 tree arg2 = gimple_assign_rhs2 (stmt);
3552 if (TREE_CODE (arg1) != SSA_NAME)
3553 return false;
3555 gimple def_stmt = SSA_NAME_DEF_STMT (arg1);
3556 if (!is_gimple_assign (def_stmt))
3557 return false;
3559 /* Look through a sign-changing conversion. */
3560 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
3562 if (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (def_stmt)))
3563 != TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
3564 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
3565 return false;
3566 def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
3567 if (!is_gimple_assign (def_stmt))
3568 return false;
3571 if (gimple_assign_rhs_code (def_stmt) == EXACT_DIV_EXPR)
3573 if (operand_equal_p (gimple_assign_rhs2 (def_stmt), arg2, 0))
3575 tree res = gimple_assign_rhs1 (def_stmt);
3576 if (useless_type_conversion_p (TREE_TYPE (arg1), TREE_TYPE (res)))
3577 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (res), res,
3578 NULL_TREE);
3579 else
3580 gimple_assign_set_rhs_with_ops (gsi, NOP_EXPR, res, NULL_TREE);
3581 gcc_assert (gsi_stmt (*gsi) == stmt);
3582 update_stmt (stmt);
3583 return true;
3587 return false;
3589 /* Main entry point for the forward propagation and statement combine
3590 optimizer. */
3592 namespace {
3594 const pass_data pass_data_forwprop =
3596 GIMPLE_PASS, /* type */
3597 "forwprop", /* name */
3598 OPTGROUP_NONE, /* optinfo_flags */
3599 TV_TREE_FORWPROP, /* tv_id */
3600 ( PROP_cfg | PROP_ssa ), /* properties_required */
3601 0, /* properties_provided */
3602 0, /* properties_destroyed */
3603 0, /* todo_flags_start */
3604 TODO_update_ssa, /* todo_flags_finish */
3607 class pass_forwprop : public gimple_opt_pass
3609 public:
3610 pass_forwprop (gcc::context *ctxt)
3611 : gimple_opt_pass (pass_data_forwprop, ctxt)
3614 /* opt_pass methods: */
3615 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3616 virtual bool gate (function *) { return flag_tree_forwprop; }
3617 virtual unsigned int execute (function *);
3619 }; // class pass_forwprop
3621 unsigned int
3622 pass_forwprop::execute (function *fun)
3624 basic_block bb;
3625 unsigned int todoflags = 0;
3627 cfg_changed = false;
3629 FOR_EACH_BB_FN (bb, fun)
3631 gimple_stmt_iterator gsi;
3633 /* Apply forward propagation to all stmts in the basic-block.
3634 Note we update GSI within the loop as necessary. */
3635 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3637 gimple stmt = gsi_stmt (gsi);
3638 tree lhs, rhs;
3639 enum tree_code code;
3641 if (!is_gimple_assign (stmt))
3643 gsi_next (&gsi);
3644 continue;
3647 lhs = gimple_assign_lhs (stmt);
3648 rhs = gimple_assign_rhs1 (stmt);
3649 code = gimple_assign_rhs_code (stmt);
3650 if (TREE_CODE (lhs) != SSA_NAME
3651 || has_zero_uses (lhs))
3653 gsi_next (&gsi);
3654 continue;
3657 /* If this statement sets an SSA_NAME to an address,
3658 try to propagate the address into the uses of the SSA_NAME. */
3659 if (code == ADDR_EXPR
3660 /* Handle pointer conversions on invariant addresses
3661 as well, as this is valid gimple. */
3662 || (CONVERT_EXPR_CODE_P (code)
3663 && TREE_CODE (rhs) == ADDR_EXPR
3664 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3666 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3667 if ((!base
3668 || !DECL_P (base)
3669 || decl_address_invariant_p (base))
3670 && !stmt_references_abnormal_ssa_name (stmt)
3671 && forward_propagate_addr_expr (lhs, rhs, true))
3673 release_defs (stmt);
3674 gsi_remove (&gsi, true);
3676 else
3677 gsi_next (&gsi);
3679 else if (code == POINTER_PLUS_EXPR)
3681 tree off = gimple_assign_rhs2 (stmt);
3682 if (TREE_CODE (off) == INTEGER_CST
3683 && can_propagate_from (stmt)
3684 && !simple_iv_increment_p (stmt)
3685 /* ??? Better adjust the interface to that function
3686 instead of building new trees here. */
3687 && forward_propagate_addr_expr
3688 (lhs,
3689 build1_loc (gimple_location (stmt),
3690 ADDR_EXPR, TREE_TYPE (rhs),
3691 fold_build2 (MEM_REF,
3692 TREE_TYPE (TREE_TYPE (rhs)),
3693 rhs,
3694 fold_convert (ptr_type_node,
3695 off))), true))
3697 release_defs (stmt);
3698 gsi_remove (&gsi, true);
3700 else if (is_gimple_min_invariant (rhs))
3702 /* Make sure to fold &a[0] + off_1 here. */
3703 fold_stmt_inplace (&gsi);
3704 update_stmt (stmt);
3705 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3706 gsi_next (&gsi);
3708 else
3709 gsi_next (&gsi);
3711 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3713 if (forward_propagate_comparison (&gsi))
3714 cfg_changed = true;
3716 else
3717 gsi_next (&gsi);
3720 /* Combine stmts with the stmts defining their operands.
3721 Note we update GSI within the loop as necessary. */
3722 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3724 gimple stmt = gsi_stmt (gsi);
3725 bool changed = false;
3727 /* Mark stmt as potentially needing revisiting. */
3728 gimple_set_plf (stmt, GF_PLF_1, false);
3730 switch (gimple_code (stmt))
3732 case GIMPLE_ASSIGN:
3734 tree rhs1 = gimple_assign_rhs1 (stmt);
3735 enum tree_code code = gimple_assign_rhs_code (stmt);
3737 if ((code == BIT_NOT_EXPR
3738 || code == NEGATE_EXPR)
3739 && TREE_CODE (rhs1) == SSA_NAME)
3740 changed = simplify_not_neg_expr (&gsi);
3741 else if (code == COND_EXPR
3742 || code == VEC_COND_EXPR)
3744 /* In this case the entire COND_EXPR is in rhs1. */
3745 if (forward_propagate_into_cond (&gsi)
3746 || combine_cond_exprs (&gsi))
3748 changed = true;
3749 stmt = gsi_stmt (gsi);
3752 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3754 int did_something;
3755 did_something = forward_propagate_into_comparison (&gsi);
3756 if (did_something == 2)
3757 cfg_changed = true;
3758 changed = did_something != 0;
3760 else if ((code == PLUS_EXPR
3761 || code == BIT_IOR_EXPR
3762 || code == BIT_XOR_EXPR)
3763 && simplify_rotate (&gsi))
3764 changed = true;
3765 else if (code == BIT_AND_EXPR
3766 || code == BIT_IOR_EXPR
3767 || code == BIT_XOR_EXPR)
3768 changed = simplify_bitwise_binary (&gsi);
3769 else if (code == MULT_EXPR)
3771 changed = simplify_mult (&gsi);
3772 if (changed
3773 && maybe_clean_or_replace_eh_stmt (stmt, stmt)
3774 && gimple_purge_dead_eh_edges (bb))
3775 cfg_changed = true;
3777 else if (code == PLUS_EXPR
3778 || code == MINUS_EXPR)
3780 changed = associate_plusminus (&gsi);
3781 if (changed
3782 && maybe_clean_or_replace_eh_stmt (stmt, stmt)
3783 && gimple_purge_dead_eh_edges (bb))
3784 cfg_changed = true;
3786 else if (code == POINTER_PLUS_EXPR)
3787 changed = associate_pointerplus (&gsi);
3788 else if (CONVERT_EXPR_CODE_P (code)
3789 || code == FLOAT_EXPR
3790 || code == FIX_TRUNC_EXPR)
3792 int did_something = combine_conversions (&gsi);
3793 if (did_something == 2)
3794 cfg_changed = true;
3796 /* If we have a narrowing conversion to an integral
3797 type that is fed by a BIT_AND_EXPR, we might be
3798 able to remove the BIT_AND_EXPR if it merely
3799 masks off bits outside the final type (and nothing
3800 else. */
3801 if (! did_something)
3803 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3804 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3805 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3806 && INTEGRAL_TYPE_P (outer_type)
3807 && INTEGRAL_TYPE_P (inner_type)
3808 && (TYPE_PRECISION (outer_type)
3809 <= TYPE_PRECISION (inner_type)))
3810 did_something = simplify_conversion_from_bitmask (&gsi);
3813 changed = did_something != 0;
3815 else if (code == VIEW_CONVERT_EXPR)
3816 changed = simplify_vce (&gsi);
3817 else if (code == VEC_PERM_EXPR)
3819 int did_something = simplify_permutation (&gsi);
3820 if (did_something == 2)
3821 cfg_changed = true;
3822 changed = did_something != 0;
3824 else if (code == BIT_FIELD_REF)
3825 changed = simplify_bitfield_ref (&gsi);
3826 else if (code == CONSTRUCTOR
3827 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3828 changed = simplify_vector_constructor (&gsi);
3829 break;
3832 case GIMPLE_SWITCH:
3833 changed = simplify_gimple_switch (stmt);
3834 break;
3836 case GIMPLE_COND:
3838 int did_something;
3839 did_something = forward_propagate_into_gimple_cond (stmt);
3840 if (did_something == 2)
3841 cfg_changed = true;
3842 changed = did_something != 0;
3843 break;
3846 case GIMPLE_CALL:
3848 tree callee = gimple_call_fndecl (stmt);
3849 if (callee != NULL_TREE
3850 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3851 changed = simplify_builtin_call (&gsi, callee);
3852 break;
3855 default:;
3858 if (changed)
3860 /* If the stmt changed then re-visit it and the statements
3861 inserted before it. */
3862 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3863 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3864 break;
3865 if (gsi_end_p (gsi))
3866 gsi = gsi_start_bb (bb);
3867 else
3868 gsi_next (&gsi);
3870 else
3872 /* Stmt no longer needs to be revisited. */
3873 gimple_set_plf (stmt, GF_PLF_1, true);
3874 gsi_next (&gsi);
3879 if (cfg_changed)
3880 todoflags |= TODO_cleanup_cfg;
3882 return todoflags;
3885 } // anon namespace
3887 gimple_opt_pass *
3888 make_pass_forwprop (gcc::context *ctxt)
3890 return new pass_forwprop (ctxt);