2013-10-31 Steve Ellcey <sellcey@mips.com>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob93b89704a2f5d864acc795fc183a04b724e52ed3
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "gimple.h"
29 #include "gimple-ssa.h"
30 #include "tree-cfg.h"
31 #include "tree-phinodes.h"
32 #include "ssa-iterators.h"
33 #include "tree-ssanames.h"
34 #include "tree-dfa.h"
35 #include "tree-pass.h"
36 #include "langhooks.h"
37 #include "flags.h"
38 #include "expr.h"
39 #include "cfgloop.h"
40 #include "optabs.h"
41 #include "tree-ssa-propagate.h"
42 #include "tree-ssa-dom.h"
44 /* This pass propagates the RHS of assignment statements into use
45 sites of the LHS of the assignment. It's basically a specialized
46 form of tree combination. It is hoped all of this can disappear
47 when we have a generalized tree combiner.
49 One class of common cases we handle is forward propagating a single use
50 variable into a COND_EXPR.
52 bb0:
53 x = a COND b;
54 if (x) goto ... else goto ...
56 Will be transformed into:
58 bb0:
59 if (a COND b) goto ... else goto ...
61 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
63 Or (assuming c1 and c2 are constants):
65 bb0:
66 x = a + c1;
67 if (x EQ/NEQ c2) goto ... else goto ...
69 Will be transformed into:
71 bb0:
72 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
74 Similarly for x = a - c1.
78 bb0:
79 x = !a
80 if (x) goto ... else goto ...
82 Will be transformed into:
84 bb0:
85 if (a == 0) goto ... else goto ...
87 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
88 For these cases, we propagate A into all, possibly more than one,
89 COND_EXPRs that use X.
93 bb0:
94 x = (typecast) a
95 if (x) goto ... else goto ...
97 Will be transformed into:
99 bb0:
100 if (a != 0) goto ... else goto ...
102 (Assuming a is an integral type and x is a boolean or x is an
103 integral and a is a boolean.)
105 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
106 For these cases, we propagate A into all, possibly more than one,
107 COND_EXPRs that use X.
109 In addition to eliminating the variable and the statement which assigns
110 a value to the variable, we may be able to later thread the jump without
111 adding insane complexity in the dominator optimizer.
113 Also note these transformations can cascade. We handle this by having
114 a worklist of COND_EXPR statements to examine. As we make a change to
115 a statement, we put it back on the worklist to examine on the next
116 iteration of the main loop.
118 A second class of propagation opportunities arises for ADDR_EXPR
119 nodes.
121 ptr = &x->y->z;
122 res = *ptr;
124 Will get turned into
126 res = x->y->z;
129 ptr = (type1*)&type2var;
130 res = *ptr
132 Will get turned into (if type1 and type2 are the same size
133 and neither have volatile on them):
134 res = VIEW_CONVERT_EXPR<type1>(type2var)
138 ptr = &x[0];
139 ptr2 = ptr + <constant>;
141 Will get turned into
143 ptr2 = &x[constant/elementsize];
147 ptr = &x[0];
148 offset = index * element_size;
149 offset_p = (pointer) offset;
150 ptr2 = ptr + offset_p
152 Will get turned into:
154 ptr2 = &x[index];
157 ssa = (int) decl
158 res = ssa & 1
160 Provided that decl has known alignment >= 2, will get turned into
162 res = 0
164 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
165 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
166 {NOT_EXPR,NEG_EXPR}.
168 This will (of course) be extended as other needs arise. */
170 static bool forward_propagate_addr_expr (tree, tree, bool);
172 /* Set to true if we delete dead edges during the optimization. */
173 static bool cfg_changed;
175 static tree rhs_to_tree (tree type, gimple stmt);
177 /* Get the next statement we can propagate NAME's value into skipping
178 trivial copies. Returns the statement that is suitable as a
179 propagation destination or NULL_TREE if there is no such one.
180 This only returns destinations in a single-use chain. FINAL_NAME_P
181 if non-NULL is written to the ssa name that represents the use. */
183 static gimple
184 get_prop_dest_stmt (tree name, tree *final_name_p)
186 use_operand_p use;
187 gimple use_stmt;
189 do {
190 /* If name has multiple uses, bail out. */
191 if (!single_imm_use (name, &use, &use_stmt))
192 return NULL;
194 /* If this is not a trivial copy, we found it. */
195 if (!gimple_assign_ssa_name_copy_p (use_stmt)
196 || gimple_assign_rhs1 (use_stmt) != name)
197 break;
199 /* Continue searching uses of the copy destination. */
200 name = gimple_assign_lhs (use_stmt);
201 } while (1);
203 if (final_name_p)
204 *final_name_p = name;
206 return use_stmt;
209 /* Get the statement we can propagate from into NAME skipping
210 trivial copies. Returns the statement which defines the
211 propagation source or NULL_TREE if there is no such one.
212 If SINGLE_USE_ONLY is set considers only sources which have
213 a single use chain up to NAME. If SINGLE_USE_P is non-null,
214 it is set to whether the chain to NAME is a single use chain
215 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
217 static gimple
218 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
220 bool single_use = true;
222 do {
223 gimple def_stmt = SSA_NAME_DEF_STMT (name);
225 if (!has_single_use (name))
227 single_use = false;
228 if (single_use_only)
229 return NULL;
232 /* If name is defined by a PHI node or is the default def, bail out. */
233 if (!is_gimple_assign (def_stmt))
234 return NULL;
236 /* If def_stmt is a simple copy, continue looking. */
237 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
238 name = gimple_assign_rhs1 (def_stmt);
239 else
241 if (!single_use_only && single_use_p)
242 *single_use_p = single_use;
244 return def_stmt;
246 } while (1);
249 /* Checks if the destination ssa name in DEF_STMT can be used as
250 propagation source. Returns true if so, otherwise false. */
252 static bool
253 can_propagate_from (gimple def_stmt)
255 gcc_assert (is_gimple_assign (def_stmt));
257 /* If the rhs has side-effects we cannot propagate from it. */
258 if (gimple_has_volatile_ops (def_stmt))
259 return false;
261 /* If the rhs is a load we cannot propagate from it. */
262 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
263 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
264 return false;
266 /* Constants can be always propagated. */
267 if (gimple_assign_single_p (def_stmt)
268 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
269 return true;
271 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
272 if (stmt_references_abnormal_ssa_name (def_stmt))
273 return false;
275 /* If the definition is a conversion of a pointer to a function type,
276 then we can not apply optimizations as some targets require
277 function pointers to be canonicalized and in this case this
278 optimization could eliminate a necessary canonicalization. */
279 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
281 tree rhs = gimple_assign_rhs1 (def_stmt);
282 if (POINTER_TYPE_P (TREE_TYPE (rhs))
283 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
284 return false;
287 return true;
290 /* Remove a chain of dead statements starting at the definition of
291 NAME. The chain is linked via the first operand of the defining statements.
292 If NAME was replaced in its only use then this function can be used
293 to clean up dead stmts. The function handles already released SSA
294 names gracefully.
295 Returns true if cleanup-cfg has to run. */
297 static bool
298 remove_prop_source_from_use (tree name)
300 gimple_stmt_iterator gsi;
301 gimple stmt;
302 bool cfg_changed = false;
304 do {
305 basic_block bb;
307 if (SSA_NAME_IN_FREE_LIST (name)
308 || SSA_NAME_IS_DEFAULT_DEF (name)
309 || !has_zero_uses (name))
310 return cfg_changed;
312 stmt = SSA_NAME_DEF_STMT (name);
313 if (gimple_code (stmt) == GIMPLE_PHI
314 || gimple_has_side_effects (stmt))
315 return cfg_changed;
317 bb = gimple_bb (stmt);
318 gsi = gsi_for_stmt (stmt);
319 unlink_stmt_vdef (stmt);
320 if (gsi_remove (&gsi, true))
321 cfg_changed |= gimple_purge_dead_eh_edges (bb);
322 release_defs (stmt);
324 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
325 } while (name && TREE_CODE (name) == SSA_NAME);
327 return cfg_changed;
330 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
331 converted to type TYPE.
333 This should disappear, but is needed so we can combine expressions and use
334 the fold() interfaces. Long term, we need to develop folding and combine
335 routines that deal with gimple exclusively . */
337 static tree
338 rhs_to_tree (tree type, gimple stmt)
340 location_t loc = gimple_location (stmt);
341 enum tree_code code = gimple_assign_rhs_code (stmt);
342 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
343 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
344 gimple_assign_rhs2 (stmt),
345 gimple_assign_rhs3 (stmt));
346 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
347 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
348 gimple_assign_rhs2 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
350 return build1 (code, type, gimple_assign_rhs1 (stmt));
351 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
352 return gimple_assign_rhs1 (stmt);
353 else
354 gcc_unreachable ();
357 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
358 the folded result in a form suitable for COND_EXPR_COND or
359 NULL_TREE, if there is no suitable simplified form. If
360 INVARIANT_ONLY is true only gimple_min_invariant results are
361 considered simplified. */
363 static tree
364 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
365 tree op0, tree op1, bool invariant_only)
367 tree t;
369 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
371 fold_defer_overflow_warnings ();
372 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
373 if (!t)
375 fold_undefer_overflow_warnings (false, NULL, 0);
376 return NULL_TREE;
379 /* Require that we got a boolean type out if we put one in. */
380 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
382 /* Canonicalize the combined condition for use in a COND_EXPR. */
383 t = canonicalize_cond_expr_cond (t);
385 /* Bail out if we required an invariant but didn't get one. */
386 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
388 fold_undefer_overflow_warnings (false, NULL, 0);
389 return NULL_TREE;
392 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
394 return t;
397 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
398 of its operand. Return a new comparison tree or NULL_TREE if there
399 were no simplifying combines. */
401 static tree
402 forward_propagate_into_comparison_1 (gimple stmt,
403 enum tree_code code, tree type,
404 tree op0, tree op1)
406 tree tmp = NULL_TREE;
407 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
408 bool single_use0_p = false, single_use1_p = false;
410 /* For comparisons use the first operand, that is likely to
411 simplify comparisons against constants. */
412 if (TREE_CODE (op0) == SSA_NAME)
414 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
415 if (def_stmt && can_propagate_from (def_stmt))
417 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
418 tmp = combine_cond_expr_cond (stmt, code, type,
419 rhs0, op1, !single_use0_p);
420 if (tmp)
421 return tmp;
425 /* If that wasn't successful, try the second operand. */
426 if (TREE_CODE (op1) == SSA_NAME)
428 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
429 if (def_stmt && can_propagate_from (def_stmt))
431 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
432 tmp = combine_cond_expr_cond (stmt, code, type,
433 op0, rhs1, !single_use1_p);
434 if (tmp)
435 return tmp;
439 /* If that wasn't successful either, try both operands. */
440 if (rhs0 != NULL_TREE
441 && rhs1 != NULL_TREE)
442 tmp = combine_cond_expr_cond (stmt, code, type,
443 rhs0, rhs1,
444 !(single_use0_p && single_use1_p));
446 return tmp;
449 /* Propagate from the ssa name definition statements of the assignment
450 from a comparison at *GSI into the conditional if that simplifies it.
451 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
452 otherwise returns 0. */
454 static int
455 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
457 gimple stmt = gsi_stmt (*gsi);
458 tree tmp;
459 bool cfg_changed = false;
460 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
461 tree rhs1 = gimple_assign_rhs1 (stmt);
462 tree rhs2 = gimple_assign_rhs2 (stmt);
464 /* Combine the comparison with defining statements. */
465 tmp = forward_propagate_into_comparison_1 (stmt,
466 gimple_assign_rhs_code (stmt),
467 type, rhs1, rhs2);
468 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
470 gimple_assign_set_rhs_from_tree (gsi, tmp);
471 fold_stmt (gsi);
472 update_stmt (gsi_stmt (*gsi));
474 if (TREE_CODE (rhs1) == SSA_NAME)
475 cfg_changed |= remove_prop_source_from_use (rhs1);
476 if (TREE_CODE (rhs2) == SSA_NAME)
477 cfg_changed |= remove_prop_source_from_use (rhs2);
478 return cfg_changed ? 2 : 1;
481 return 0;
484 /* Propagate from the ssa name definition statements of COND_EXPR
485 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
486 Returns zero if no statement was changed, one if there were
487 changes and two if cfg_cleanup needs to run.
489 This must be kept in sync with forward_propagate_into_cond. */
491 static int
492 forward_propagate_into_gimple_cond (gimple stmt)
494 tree tmp;
495 enum tree_code code = gimple_cond_code (stmt);
496 bool cfg_changed = false;
497 tree rhs1 = gimple_cond_lhs (stmt);
498 tree rhs2 = gimple_cond_rhs (stmt);
500 /* We can do tree combining on SSA_NAME and comparison expressions. */
501 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
502 return 0;
504 tmp = forward_propagate_into_comparison_1 (stmt, code,
505 boolean_type_node,
506 rhs1, rhs2);
507 if (tmp)
509 if (dump_file && tmp)
511 fprintf (dump_file, " Replaced '");
512 print_gimple_expr (dump_file, stmt, 0, 0);
513 fprintf (dump_file, "' with '");
514 print_generic_expr (dump_file, tmp, 0);
515 fprintf (dump_file, "'\n");
518 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
519 update_stmt (stmt);
521 if (TREE_CODE (rhs1) == SSA_NAME)
522 cfg_changed |= remove_prop_source_from_use (rhs1);
523 if (TREE_CODE (rhs2) == SSA_NAME)
524 cfg_changed |= remove_prop_source_from_use (rhs2);
525 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
528 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
529 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
530 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
531 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
532 && ((code == EQ_EXPR
533 && integer_zerop (rhs2))
534 || (code == NE_EXPR
535 && integer_onep (rhs2))))
537 basic_block bb = gimple_bb (stmt);
538 gimple_cond_set_code (stmt, NE_EXPR);
539 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
540 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
541 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
542 return 1;
545 return 0;
549 /* Propagate from the ssa name definition statements of COND_EXPR
550 in the rhs of statement STMT into the conditional if that simplifies it.
551 Returns true zero if the stmt was changed. */
553 static bool
554 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
556 gimple stmt = gsi_stmt (*gsi_p);
557 tree tmp = NULL_TREE;
558 tree cond = gimple_assign_rhs1 (stmt);
559 enum tree_code code = gimple_assign_rhs_code (stmt);
560 bool swap = false;
562 /* We can do tree combining on SSA_NAME and comparison expressions. */
563 if (COMPARISON_CLASS_P (cond))
564 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
565 TREE_TYPE (cond),
566 TREE_OPERAND (cond, 0),
567 TREE_OPERAND (cond, 1));
568 else if (TREE_CODE (cond) == SSA_NAME)
570 enum tree_code def_code;
571 tree name = cond;
572 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
573 if (!def_stmt || !can_propagate_from (def_stmt))
574 return 0;
576 def_code = gimple_assign_rhs_code (def_stmt);
577 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
578 tmp = fold_build2_loc (gimple_location (def_stmt),
579 def_code,
580 TREE_TYPE (cond),
581 gimple_assign_rhs1 (def_stmt),
582 gimple_assign_rhs2 (def_stmt));
583 else if (code == COND_EXPR
584 && ((def_code == BIT_NOT_EXPR
585 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
586 || (def_code == BIT_XOR_EXPR
587 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
589 tmp = gimple_assign_rhs1 (def_stmt);
590 swap = true;
594 if (tmp
595 && is_gimple_condexpr (tmp))
597 if (dump_file && tmp)
599 fprintf (dump_file, " Replaced '");
600 print_generic_expr (dump_file, cond, 0);
601 fprintf (dump_file, "' with '");
602 print_generic_expr (dump_file, tmp, 0);
603 fprintf (dump_file, "'\n");
606 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
607 : integer_onep (tmp))
608 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
609 else if (integer_zerop (tmp))
610 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
611 else
613 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
614 if (swap)
616 tree t = gimple_assign_rhs2 (stmt);
617 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
618 gimple_assign_set_rhs3 (stmt, t);
621 stmt = gsi_stmt (*gsi_p);
622 update_stmt (stmt);
624 return true;
627 return 0;
630 /* Propagate from the ssa name definition statements of COND_EXPR
631 values in the rhs of statement STMT into the conditional arms
632 if that simplifies it.
633 Returns true if the stmt was changed. */
635 static bool
636 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
638 gimple stmt = gsi_stmt (*gsi_p);
639 tree cond, val1, val2;
640 bool changed = false;
642 cond = gimple_assign_rhs1 (stmt);
643 val1 = gimple_assign_rhs2 (stmt);
644 if (TREE_CODE (val1) == SSA_NAME)
646 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
647 if (is_gimple_assign (def_stmt)
648 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
649 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
651 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
652 gimple_assign_set_rhs2 (stmt, val1);
653 changed = true;
656 val2 = gimple_assign_rhs3 (stmt);
657 if (TREE_CODE (val2) == SSA_NAME)
659 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
660 if (is_gimple_assign (def_stmt)
661 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
662 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
664 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
665 gimple_assign_set_rhs3 (stmt, val2);
666 changed = true;
669 if (operand_equal_p (val1, val2, 0))
671 gimple_assign_set_rhs_from_tree (gsi_p, val1);
672 stmt = gsi_stmt (*gsi_p);
673 changed = true;
676 if (changed)
677 update_stmt (stmt);
679 return changed;
682 /* We've just substituted an ADDR_EXPR into stmt. Update all the
683 relevant data structures to match. */
685 static void
686 tidy_after_forward_propagate_addr (gimple stmt)
688 /* We may have turned a trapping insn into a non-trapping insn. */
689 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
690 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
691 cfg_changed = true;
693 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
694 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
697 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
698 ADDR_EXPR <whatever>.
700 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
701 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
702 node or for recovery of array indexing from pointer arithmetic.
704 Return true if the propagation was successful (the propagation can
705 be not totally successful, yet things may have been changed). */
707 static bool
708 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
709 gimple_stmt_iterator *use_stmt_gsi,
710 bool single_use_p)
712 tree lhs, rhs, rhs2, array_ref;
713 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
714 enum tree_code rhs_code;
715 bool res = true;
717 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
719 lhs = gimple_assign_lhs (use_stmt);
720 rhs_code = gimple_assign_rhs_code (use_stmt);
721 rhs = gimple_assign_rhs1 (use_stmt);
723 /* Do not perform copy-propagation but recurse through copy chains. */
724 if (TREE_CODE (lhs) == SSA_NAME
725 && rhs_code == SSA_NAME)
726 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
728 /* The use statement could be a conversion. Recurse to the uses of the
729 lhs as copyprop does not copy through pointer to integer to pointer
730 conversions and FRE does not catch all cases either.
731 Treat the case of a single-use name and
732 a conversion to def_rhs type separate, though. */
733 if (TREE_CODE (lhs) == SSA_NAME
734 && CONVERT_EXPR_CODE_P (rhs_code))
736 /* If there is a point in a conversion chain where the types match
737 so we can remove a conversion re-materialize the address here
738 and stop. */
739 if (single_use_p
740 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
742 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
743 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
744 return true;
747 /* Else recurse if the conversion preserves the address value. */
748 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
749 || POINTER_TYPE_P (TREE_TYPE (lhs)))
750 && (TYPE_PRECISION (TREE_TYPE (lhs))
751 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
752 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
754 return false;
757 /* If this isn't a conversion chain from this on we only can propagate
758 into compatible pointer contexts. */
759 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
760 return false;
762 /* Propagate through constant pointer adjustments. */
763 if (TREE_CODE (lhs) == SSA_NAME
764 && rhs_code == POINTER_PLUS_EXPR
765 && rhs == name
766 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
768 tree new_def_rhs;
769 /* As we come here with non-invariant addresses in def_rhs we need
770 to make sure we can build a valid constant offsetted address
771 for further propagation. Simply rely on fold building that
772 and check after the fact. */
773 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
774 def_rhs,
775 fold_convert (ptr_type_node,
776 gimple_assign_rhs2 (use_stmt)));
777 if (TREE_CODE (new_def_rhs) == MEM_REF
778 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
779 return false;
780 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
781 TREE_TYPE (rhs));
783 /* Recurse. If we could propagate into all uses of lhs do not
784 bother to replace into the current use but just pretend we did. */
785 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
786 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
787 return true;
789 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
790 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
791 new_def_rhs, NULL_TREE);
792 else if (is_gimple_min_invariant (new_def_rhs))
793 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
794 new_def_rhs, NULL_TREE);
795 else
796 return false;
797 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
798 update_stmt (use_stmt);
799 return true;
802 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
803 ADDR_EXPR will not appear on the LHS. */
804 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
805 while (handled_component_p (*lhsp))
806 lhsp = &TREE_OPERAND (*lhsp, 0);
807 lhs = *lhsp;
809 /* Now see if the LHS node is a MEM_REF using NAME. If so,
810 propagate the ADDR_EXPR into the use of NAME and fold the result. */
811 if (TREE_CODE (lhs) == MEM_REF
812 && TREE_OPERAND (lhs, 0) == name)
814 tree def_rhs_base;
815 HOST_WIDE_INT def_rhs_offset;
816 /* If the address is invariant we can always fold it. */
817 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
818 &def_rhs_offset)))
820 double_int off = mem_ref_offset (lhs);
821 tree new_ptr;
822 off += double_int::from_shwi (def_rhs_offset);
823 if (TREE_CODE (def_rhs_base) == MEM_REF)
825 off += mem_ref_offset (def_rhs_base);
826 new_ptr = TREE_OPERAND (def_rhs_base, 0);
828 else
829 new_ptr = build_fold_addr_expr (def_rhs_base);
830 TREE_OPERAND (lhs, 0) = new_ptr;
831 TREE_OPERAND (lhs, 1)
832 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
833 tidy_after_forward_propagate_addr (use_stmt);
834 /* Continue propagating into the RHS if this was not the only use. */
835 if (single_use_p)
836 return true;
838 /* If the LHS is a plain dereference and the value type is the same as
839 that of the pointed-to type of the address we can put the
840 dereferenced address on the LHS preserving the original alias-type. */
841 else if (integer_zerop (TREE_OPERAND (lhs, 1))
842 && ((gimple_assign_lhs (use_stmt) == lhs
843 && useless_type_conversion_p
844 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
845 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
846 || types_compatible_p (TREE_TYPE (lhs),
847 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
848 /* Don't forward anything into clobber stmts if it would result
849 in the lhs no longer being a MEM_REF. */
850 && (!gimple_clobber_p (use_stmt)
851 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
853 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
854 tree new_offset, new_base, saved, new_lhs;
855 while (handled_component_p (*def_rhs_basep))
856 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
857 saved = *def_rhs_basep;
858 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
860 new_base = TREE_OPERAND (*def_rhs_basep, 0);
861 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
862 TREE_OPERAND (*def_rhs_basep, 1));
864 else
866 new_base = build_fold_addr_expr (*def_rhs_basep);
867 new_offset = TREE_OPERAND (lhs, 1);
869 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
870 new_base, new_offset);
871 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
872 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
873 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
874 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
875 *lhsp = new_lhs;
876 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
877 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
878 *def_rhs_basep = saved;
879 tidy_after_forward_propagate_addr (use_stmt);
880 /* Continue propagating into the RHS if this was not the
881 only use. */
882 if (single_use_p)
883 return true;
885 else
886 /* We can have a struct assignment dereferencing our name twice.
887 Note that we didn't propagate into the lhs to not falsely
888 claim we did when propagating into the rhs. */
889 res = false;
892 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
893 nodes from the RHS. */
894 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
895 if (TREE_CODE (*rhsp) == ADDR_EXPR)
896 rhsp = &TREE_OPERAND (*rhsp, 0);
897 while (handled_component_p (*rhsp))
898 rhsp = &TREE_OPERAND (*rhsp, 0);
899 rhs = *rhsp;
901 /* Now see if the RHS node is a MEM_REF using NAME. If so,
902 propagate the ADDR_EXPR into the use of NAME and fold the result. */
903 if (TREE_CODE (rhs) == MEM_REF
904 && TREE_OPERAND (rhs, 0) == name)
906 tree def_rhs_base;
907 HOST_WIDE_INT def_rhs_offset;
908 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
909 &def_rhs_offset)))
911 double_int off = mem_ref_offset (rhs);
912 tree new_ptr;
913 off += double_int::from_shwi (def_rhs_offset);
914 if (TREE_CODE (def_rhs_base) == MEM_REF)
916 off += mem_ref_offset (def_rhs_base);
917 new_ptr = TREE_OPERAND (def_rhs_base, 0);
919 else
920 new_ptr = build_fold_addr_expr (def_rhs_base);
921 TREE_OPERAND (rhs, 0) = new_ptr;
922 TREE_OPERAND (rhs, 1)
923 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
924 fold_stmt_inplace (use_stmt_gsi);
925 tidy_after_forward_propagate_addr (use_stmt);
926 return res;
928 /* If the RHS is a plain dereference and the value type is the same as
929 that of the pointed-to type of the address we can put the
930 dereferenced address on the RHS preserving the original alias-type. */
931 else if (integer_zerop (TREE_OPERAND (rhs, 1))
932 && ((gimple_assign_rhs1 (use_stmt) == rhs
933 && useless_type_conversion_p
934 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
935 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
936 || types_compatible_p (TREE_TYPE (rhs),
937 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
939 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
940 tree new_offset, new_base, saved, new_rhs;
941 while (handled_component_p (*def_rhs_basep))
942 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
943 saved = *def_rhs_basep;
944 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
946 new_base = TREE_OPERAND (*def_rhs_basep, 0);
947 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
948 TREE_OPERAND (*def_rhs_basep, 1));
950 else
952 new_base = build_fold_addr_expr (*def_rhs_basep);
953 new_offset = TREE_OPERAND (rhs, 1);
955 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
956 new_base, new_offset);
957 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
958 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
959 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
960 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
961 *rhsp = new_rhs;
962 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
963 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
964 *def_rhs_basep = saved;
965 fold_stmt_inplace (use_stmt_gsi);
966 tidy_after_forward_propagate_addr (use_stmt);
967 return res;
971 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
972 is nothing to do. */
973 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
974 || gimple_assign_rhs1 (use_stmt) != name)
975 return false;
977 /* The remaining cases are all for turning pointer arithmetic into
978 array indexing. They only apply when we have the address of
979 element zero in an array. If that is not the case then there
980 is nothing to do. */
981 array_ref = TREE_OPERAND (def_rhs, 0);
982 if ((TREE_CODE (array_ref) != ARRAY_REF
983 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
984 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
985 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
986 return false;
988 rhs2 = gimple_assign_rhs2 (use_stmt);
989 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
990 if (TREE_CODE (rhs2) == INTEGER_CST)
992 tree new_rhs = build1_loc (gimple_location (use_stmt),
993 ADDR_EXPR, TREE_TYPE (def_rhs),
994 fold_build2 (MEM_REF,
995 TREE_TYPE (TREE_TYPE (def_rhs)),
996 unshare_expr (def_rhs),
997 fold_convert (ptr_type_node,
998 rhs2)));
999 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1000 use_stmt = gsi_stmt (*use_stmt_gsi);
1001 update_stmt (use_stmt);
1002 tidy_after_forward_propagate_addr (use_stmt);
1003 return true;
1006 return false;
1009 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1011 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1012 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1013 node or for recovery of array indexing from pointer arithmetic.
1015 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1016 the single use in the previous invocation. Pass true when calling
1017 this as toplevel.
1019 Returns true, if all uses have been propagated into. */
1021 static bool
1022 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1024 imm_use_iterator iter;
1025 gimple use_stmt;
1026 bool all = true;
1027 bool single_use_p = parent_single_use_p && has_single_use (name);
1029 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1031 bool result;
1032 tree use_rhs;
1034 /* If the use is not in a simple assignment statement, then
1035 there is nothing we can do. */
1036 if (!is_gimple_assign (use_stmt))
1038 if (!is_gimple_debug (use_stmt))
1039 all = false;
1040 continue;
1043 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1044 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1045 single_use_p);
1046 /* If the use has moved to a different statement adjust
1047 the update machinery for the old statement too. */
1048 if (use_stmt != gsi_stmt (gsi))
1050 update_stmt (use_stmt);
1051 use_stmt = gsi_stmt (gsi);
1053 update_stmt (use_stmt);
1054 all &= result;
1056 /* Remove intermediate now unused copy and conversion chains. */
1057 use_rhs = gimple_assign_rhs1 (use_stmt);
1058 if (result
1059 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1060 && TREE_CODE (use_rhs) == SSA_NAME
1061 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1063 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1064 release_defs (use_stmt);
1065 gsi_remove (&gsi, true);
1069 return all && has_zero_uses (name);
1073 /* Forward propagate the comparison defined in *DEFGSI like
1074 cond_1 = x CMP y to uses of the form
1075 a_1 = (T')cond_1
1076 a_1 = !cond_1
1077 a_1 = cond_1 != 0
1078 Returns true if stmt is now unused. Advance DEFGSI to the next
1079 statement. */
1081 static bool
1082 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1084 gimple stmt = gsi_stmt (*defgsi);
1085 tree name = gimple_assign_lhs (stmt);
1086 gimple use_stmt;
1087 tree tmp = NULL_TREE;
1088 gimple_stmt_iterator gsi;
1089 enum tree_code code;
1090 tree lhs;
1092 /* Don't propagate ssa names that occur in abnormal phis. */
1093 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1094 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1095 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1096 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1097 goto bailout;
1099 /* Do not un-cse comparisons. But propagate through copies. */
1100 use_stmt = get_prop_dest_stmt (name, &name);
1101 if (!use_stmt
1102 || !is_gimple_assign (use_stmt))
1103 goto bailout;
1105 code = gimple_assign_rhs_code (use_stmt);
1106 lhs = gimple_assign_lhs (use_stmt);
1107 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1108 goto bailout;
1110 /* We can propagate the condition into a statement that
1111 computes the logical negation of the comparison result. */
1112 if ((code == BIT_NOT_EXPR
1113 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1114 || (code == BIT_XOR_EXPR
1115 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1117 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1118 bool nans = HONOR_NANS (TYPE_MODE (type));
1119 enum tree_code inv_code;
1120 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1121 if (inv_code == ERROR_MARK)
1122 goto bailout;
1124 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1125 gimple_assign_rhs2 (stmt));
1127 else
1128 goto bailout;
1130 gsi = gsi_for_stmt (use_stmt);
1131 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1132 use_stmt = gsi_stmt (gsi);
1133 update_stmt (use_stmt);
1135 if (dump_file && (dump_flags & TDF_DETAILS))
1137 fprintf (dump_file, " Replaced '");
1138 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1139 fprintf (dump_file, "' with '");
1140 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1141 fprintf (dump_file, "'\n");
1144 /* When we remove stmt now the iterator defgsi goes off it's current
1145 sequence, hence advance it now. */
1146 gsi_next (defgsi);
1148 /* Remove defining statements. */
1149 return remove_prop_source_from_use (name);
1151 bailout:
1152 gsi_next (defgsi);
1153 return false;
1157 /* GSI_P points to a statement which performs a narrowing integral
1158 conversion.
1160 Look for cases like:
1162 t = x & c;
1163 y = (T) t;
1165 Turn them into:
1167 t = x & c;
1168 y = (T) x;
1170 If T is narrower than X's type and C merely masks off bits outside
1171 of (T) and nothing else.
1173 Normally we'd let DCE remove the dead statement. But no DCE runs
1174 after the last forwprop/combine pass, so we remove the obviously
1175 dead code ourselves.
1177 Return TRUE if a change was made, FALSE otherwise. */
1179 static bool
1180 simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1182 gimple stmt = gsi_stmt (*gsi_p);
1183 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1185 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1186 the only use of the BIT_AND_EXPR result is the conversion. */
1187 if (is_gimple_assign (rhs_def_stmt)
1188 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1189 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1191 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1192 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1193 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1195 /* Now verify suitability of the BIT_AND_EXPR's operands.
1196 The first must be an SSA_NAME that we can propagate and the
1197 second must be an integer constant that masks out all the
1198 bits outside the final result's type, but nothing else. */
1199 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1200 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1201 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1202 && operand_equal_p (rhs_def_operand2,
1203 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1204 TYPE_PRECISION (lhs_type)),
1207 /* This is an optimizable case. Replace the source operand
1208 in the conversion with the first source operand of the
1209 BIT_AND_EXPR. */
1210 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1211 stmt = gsi_stmt (*gsi_p);
1212 update_stmt (stmt);
1214 /* There is no DCE after the last forwprop pass. It's
1215 easy to clean up the first order effects here. */
1216 gimple_stmt_iterator si;
1217 si = gsi_for_stmt (rhs_def_stmt);
1218 gsi_remove (&si, true);
1219 release_defs (rhs_def_stmt);
1220 return true;
1224 return false;
1228 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1229 If so, we can change STMT into lhs = y which can later be copy
1230 propagated. Similarly for negation.
1232 This could trivially be formulated as a forward propagation
1233 to immediate uses. However, we already had an implementation
1234 from DOM which used backward propagation via the use-def links.
1236 It turns out that backward propagation is actually faster as
1237 there's less work to do for each NOT/NEG expression we find.
1238 Backwards propagation needs to look at the statement in a single
1239 backlink. Forward propagation needs to look at potentially more
1240 than one forward link.
1242 Returns true when the statement was changed. */
1244 static bool
1245 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1247 gimple stmt = gsi_stmt (*gsi_p);
1248 tree rhs = gimple_assign_rhs1 (stmt);
1249 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1251 /* See if the RHS_DEF_STMT has the same form as our statement. */
1252 if (is_gimple_assign (rhs_def_stmt)
1253 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1255 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1257 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1258 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1259 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1261 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1262 stmt = gsi_stmt (*gsi_p);
1263 update_stmt (stmt);
1264 return true;
1268 return false;
1271 /* Helper function for simplify_gimple_switch. Remove case labels that
1272 have values outside the range of the new type. */
1274 static void
1275 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1277 unsigned int branch_num = gimple_switch_num_labels (stmt);
1278 vec<tree> labels;
1279 labels.create (branch_num);
1280 unsigned int i, len;
1282 /* Collect the existing case labels in a VEC, and preprocess it as if
1283 we are gimplifying a GENERIC SWITCH_EXPR. */
1284 for (i = 1; i < branch_num; i++)
1285 labels.quick_push (gimple_switch_label (stmt, i));
1286 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1288 /* If any labels were removed, replace the existing case labels
1289 in the GIMPLE_SWITCH statement with the correct ones.
1290 Note that the type updates were done in-place on the case labels,
1291 so we only have to replace the case labels in the GIMPLE_SWITCH
1292 if the number of labels changed. */
1293 len = labels.length ();
1294 if (len < branch_num - 1)
1296 bitmap target_blocks;
1297 edge_iterator ei;
1298 edge e;
1300 /* Corner case: *all* case labels have been removed as being
1301 out-of-range for INDEX_TYPE. Push one label and let the
1302 CFG cleanups deal with this further. */
1303 if (len == 0)
1305 tree label, elt;
1307 label = CASE_LABEL (gimple_switch_default_label (stmt));
1308 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1309 labels.quick_push (elt);
1310 len = 1;
1313 for (i = 0; i < labels.length (); i++)
1314 gimple_switch_set_label (stmt, i + 1, labels[i]);
1315 for (i++ ; i < branch_num; i++)
1316 gimple_switch_set_label (stmt, i, NULL_TREE);
1317 gimple_switch_set_num_labels (stmt, len + 1);
1319 /* Cleanup any edges that are now dead. */
1320 target_blocks = BITMAP_ALLOC (NULL);
1321 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1323 tree elt = gimple_switch_label (stmt, i);
1324 basic_block target = label_to_block (CASE_LABEL (elt));
1325 bitmap_set_bit (target_blocks, target->index);
1327 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1329 if (! bitmap_bit_p (target_blocks, e->dest->index))
1331 remove_edge (e);
1332 cfg_changed = true;
1333 free_dominance_info (CDI_DOMINATORS);
1335 else
1336 ei_next (&ei);
1338 BITMAP_FREE (target_blocks);
1341 labels.release ();
1344 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1345 the condition which we may be able to optimize better. */
1347 static bool
1348 simplify_gimple_switch (gimple stmt)
1350 tree cond = gimple_switch_index (stmt);
1351 tree def, to, ti;
1352 gimple def_stmt;
1354 /* The optimization that we really care about is removing unnecessary
1355 casts. That will let us do much better in propagating the inferred
1356 constant at the switch target. */
1357 if (TREE_CODE (cond) == SSA_NAME)
1359 def_stmt = SSA_NAME_DEF_STMT (cond);
1360 if (is_gimple_assign (def_stmt))
1362 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1364 int need_precision;
1365 bool fail;
1367 def = gimple_assign_rhs1 (def_stmt);
1369 to = TREE_TYPE (cond);
1370 ti = TREE_TYPE (def);
1372 /* If we have an extension that preserves value, then we
1373 can copy the source value into the switch. */
1375 need_precision = TYPE_PRECISION (ti);
1376 fail = false;
1377 if (! INTEGRAL_TYPE_P (ti))
1378 fail = true;
1379 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1380 fail = true;
1381 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1382 need_precision += 1;
1383 if (TYPE_PRECISION (to) < need_precision)
1384 fail = true;
1386 if (!fail)
1388 gimple_switch_set_index (stmt, def);
1389 simplify_gimple_switch_label_vec (stmt, ti);
1390 update_stmt (stmt);
1391 return true;
1397 return false;
1400 /* For pointers p2 and p1 return p2 - p1 if the
1401 difference is known and constant, otherwise return NULL. */
1403 static tree
1404 constant_pointer_difference (tree p1, tree p2)
1406 int i, j;
1407 #define CPD_ITERATIONS 5
1408 tree exps[2][CPD_ITERATIONS];
1409 tree offs[2][CPD_ITERATIONS];
1410 int cnt[2];
1412 for (i = 0; i < 2; i++)
1414 tree p = i ? p1 : p2;
1415 tree off = size_zero_node;
1416 gimple stmt;
1417 enum tree_code code;
1419 /* For each of p1 and p2 we need to iterate at least
1420 twice, to handle ADDR_EXPR directly in p1/p2,
1421 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1422 on definition's stmt RHS. Iterate a few extra times. */
1423 j = 0;
1426 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1427 break;
1428 if (TREE_CODE (p) == ADDR_EXPR)
1430 tree q = TREE_OPERAND (p, 0);
1431 HOST_WIDE_INT offset;
1432 tree base = get_addr_base_and_unit_offset (q, &offset);
1433 if (base)
1435 q = base;
1436 if (offset)
1437 off = size_binop (PLUS_EXPR, off, size_int (offset));
1439 if (TREE_CODE (q) == MEM_REF
1440 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1442 p = TREE_OPERAND (q, 0);
1443 off = size_binop (PLUS_EXPR, off,
1444 double_int_to_tree (sizetype,
1445 mem_ref_offset (q)));
1447 else
1449 exps[i][j] = q;
1450 offs[i][j++] = off;
1451 break;
1454 if (TREE_CODE (p) != SSA_NAME)
1455 break;
1456 exps[i][j] = p;
1457 offs[i][j++] = off;
1458 if (j == CPD_ITERATIONS)
1459 break;
1460 stmt = SSA_NAME_DEF_STMT (p);
1461 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1462 break;
1463 code = gimple_assign_rhs_code (stmt);
1464 if (code == POINTER_PLUS_EXPR)
1466 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1467 break;
1468 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1469 p = gimple_assign_rhs1 (stmt);
1471 else if (code == ADDR_EXPR || code == NOP_EXPR)
1472 p = gimple_assign_rhs1 (stmt);
1473 else
1474 break;
1476 while (1);
1477 cnt[i] = j;
1480 for (i = 0; i < cnt[0]; i++)
1481 for (j = 0; j < cnt[1]; j++)
1482 if (exps[0][i] == exps[1][j])
1483 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1485 return NULL_TREE;
1488 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1489 Optimize
1490 memcpy (p, "abcd", 4);
1491 memset (p + 4, ' ', 3);
1492 into
1493 memcpy (p, "abcd ", 7);
1494 call if the latter can be stored by pieces during expansion. */
1496 static bool
1497 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1499 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1500 tree vuse = gimple_vuse (stmt2);
1501 if (vuse == NULL)
1502 return false;
1503 stmt1 = SSA_NAME_DEF_STMT (vuse);
1505 switch (DECL_FUNCTION_CODE (callee2))
1507 case BUILT_IN_MEMSET:
1508 if (gimple_call_num_args (stmt2) != 3
1509 || gimple_call_lhs (stmt2)
1510 || CHAR_BIT != 8
1511 || BITS_PER_UNIT != 8)
1512 break;
1513 else
1515 tree callee1;
1516 tree ptr1, src1, str1, off1, len1, lhs1;
1517 tree ptr2 = gimple_call_arg (stmt2, 0);
1518 tree val2 = gimple_call_arg (stmt2, 1);
1519 tree len2 = gimple_call_arg (stmt2, 2);
1520 tree diff, vdef, new_str_cst;
1521 gimple use_stmt;
1522 unsigned int ptr1_align;
1523 unsigned HOST_WIDE_INT src_len;
1524 char *src_buf;
1525 use_operand_p use_p;
1527 if (!host_integerp (val2, 0)
1528 || !host_integerp (len2, 1))
1529 break;
1530 if (is_gimple_call (stmt1))
1532 /* If first stmt is a call, it needs to be memcpy
1533 or mempcpy, with string literal as second argument and
1534 constant length. */
1535 callee1 = gimple_call_fndecl (stmt1);
1536 if (callee1 == NULL_TREE
1537 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1538 || gimple_call_num_args (stmt1) != 3)
1539 break;
1540 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1541 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1542 break;
1543 ptr1 = gimple_call_arg (stmt1, 0);
1544 src1 = gimple_call_arg (stmt1, 1);
1545 len1 = gimple_call_arg (stmt1, 2);
1546 lhs1 = gimple_call_lhs (stmt1);
1547 if (!host_integerp (len1, 1))
1548 break;
1549 str1 = string_constant (src1, &off1);
1550 if (str1 == NULL_TREE)
1551 break;
1552 if (!host_integerp (off1, 1)
1553 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1554 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1555 - tree_low_cst (off1, 1)) > 0
1556 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1557 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1558 != TYPE_MODE (char_type_node))
1559 break;
1561 else if (gimple_assign_single_p (stmt1))
1563 /* Otherwise look for length 1 memcpy optimized into
1564 assignment. */
1565 ptr1 = gimple_assign_lhs (stmt1);
1566 src1 = gimple_assign_rhs1 (stmt1);
1567 if (TREE_CODE (ptr1) != MEM_REF
1568 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1569 || !host_integerp (src1, 0))
1570 break;
1571 ptr1 = build_fold_addr_expr (ptr1);
1572 callee1 = NULL_TREE;
1573 len1 = size_one_node;
1574 lhs1 = NULL_TREE;
1575 off1 = size_zero_node;
1576 str1 = NULL_TREE;
1578 else
1579 break;
1581 diff = constant_pointer_difference (ptr1, ptr2);
1582 if (diff == NULL && lhs1 != NULL)
1584 diff = constant_pointer_difference (lhs1, ptr2);
1585 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1586 && diff != NULL)
1587 diff = size_binop (PLUS_EXPR, diff,
1588 fold_convert (sizetype, len1));
1590 /* If the difference between the second and first destination pointer
1591 is not constant, or is bigger than memcpy length, bail out. */
1592 if (diff == NULL
1593 || !host_integerp (diff, 1)
1594 || tree_int_cst_lt (len1, diff))
1595 break;
1597 /* Use maximum of difference plus memset length and memcpy length
1598 as the new memcpy length, if it is too big, bail out. */
1599 src_len = tree_low_cst (diff, 1);
1600 src_len += tree_low_cst (len2, 1);
1601 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1602 src_len = tree_low_cst (len1, 1);
1603 if (src_len > 1024)
1604 break;
1606 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1607 with bigger length will return different result. */
1608 if (lhs1 != NULL_TREE
1609 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1610 && (TREE_CODE (lhs1) != SSA_NAME
1611 || !single_imm_use (lhs1, &use_p, &use_stmt)
1612 || use_stmt != stmt2))
1613 break;
1615 /* If anything reads memory in between memcpy and memset
1616 call, the modified memcpy call might change it. */
1617 vdef = gimple_vdef (stmt1);
1618 if (vdef != NULL
1619 && (!single_imm_use (vdef, &use_p, &use_stmt)
1620 || use_stmt != stmt2))
1621 break;
1623 ptr1_align = get_pointer_alignment (ptr1);
1624 /* Construct the new source string literal. */
1625 src_buf = XALLOCAVEC (char, src_len + 1);
1626 if (callee1)
1627 memcpy (src_buf,
1628 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1629 tree_low_cst (len1, 1));
1630 else
1631 src_buf[0] = tree_low_cst (src1, 0);
1632 memset (src_buf + tree_low_cst (diff, 1),
1633 tree_low_cst (val2, 0), tree_low_cst (len2, 1));
1634 src_buf[src_len] = '\0';
1635 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1636 handle embedded '\0's. */
1637 if (strlen (src_buf) != src_len)
1638 break;
1639 rtl_profile_for_bb (gimple_bb (stmt2));
1640 /* If the new memcpy wouldn't be emitted by storing the literal
1641 by pieces, this optimization might enlarge .rodata too much,
1642 as commonly used string literals couldn't be shared any
1643 longer. */
1644 if (!can_store_by_pieces (src_len,
1645 builtin_strncpy_read_str,
1646 src_buf, ptr1_align, false))
1647 break;
1649 new_str_cst = build_string_literal (src_len, src_buf);
1650 if (callee1)
1652 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1653 memset call. */
1654 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1655 gimple_call_set_lhs (stmt1, NULL_TREE);
1656 gimple_call_set_arg (stmt1, 1, new_str_cst);
1657 gimple_call_set_arg (stmt1, 2,
1658 build_int_cst (TREE_TYPE (len1), src_len));
1659 update_stmt (stmt1);
1660 unlink_stmt_vdef (stmt2);
1661 gsi_remove (gsi_p, true);
1662 release_defs (stmt2);
1663 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1664 release_ssa_name (lhs1);
1665 return true;
1667 else
1669 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1670 assignment, remove STMT1 and change memset call into
1671 memcpy call. */
1672 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1674 if (!is_gimple_val (ptr1))
1675 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1676 true, GSI_SAME_STMT);
1677 gimple_call_set_fndecl (stmt2,
1678 builtin_decl_explicit (BUILT_IN_MEMCPY));
1679 gimple_call_set_arg (stmt2, 0, ptr1);
1680 gimple_call_set_arg (stmt2, 1, new_str_cst);
1681 gimple_call_set_arg (stmt2, 2,
1682 build_int_cst (TREE_TYPE (len2), src_len));
1683 unlink_stmt_vdef (stmt1);
1684 gsi_remove (&gsi, true);
1685 release_defs (stmt1);
1686 update_stmt (stmt2);
1687 return false;
1690 break;
1691 default:
1692 break;
1694 return false;
1697 /* Checks if expression has type of one-bit precision, or is a known
1698 truth-valued expression. */
1699 static bool
1700 truth_valued_ssa_name (tree name)
1702 gimple def;
1703 tree type = TREE_TYPE (name);
1705 if (!INTEGRAL_TYPE_P (type))
1706 return false;
1707 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1708 necessarily one and so ~X is not equal to !X. */
1709 if (TYPE_PRECISION (type) == 1)
1710 return true;
1711 def = SSA_NAME_DEF_STMT (name);
1712 if (is_gimple_assign (def))
1713 return truth_value_p (gimple_assign_rhs_code (def));
1714 return false;
1717 /* Helper routine for simplify_bitwise_binary_1 function.
1718 Return for the SSA name NAME the expression X if it mets condition
1719 NAME = !X. Otherwise return NULL_TREE.
1720 Detected patterns for NAME = !X are:
1721 !X and X == 0 for X with integral type.
1722 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1723 static tree
1724 lookup_logical_inverted_value (tree name)
1726 tree op1, op2;
1727 enum tree_code code;
1728 gimple def;
1730 /* If name has none-intergal type, or isn't a SSA_NAME, then
1731 return. */
1732 if (TREE_CODE (name) != SSA_NAME
1733 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1734 return NULL_TREE;
1735 def = SSA_NAME_DEF_STMT (name);
1736 if (!is_gimple_assign (def))
1737 return NULL_TREE;
1739 code = gimple_assign_rhs_code (def);
1740 op1 = gimple_assign_rhs1 (def);
1741 op2 = NULL_TREE;
1743 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1744 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1745 if (code == EQ_EXPR || code == NE_EXPR
1746 || code == BIT_XOR_EXPR)
1747 op2 = gimple_assign_rhs2 (def);
1749 switch (code)
1751 case BIT_NOT_EXPR:
1752 if (truth_valued_ssa_name (name))
1753 return op1;
1754 break;
1755 case EQ_EXPR:
1756 /* Check if we have X == 0 and X has an integral type. */
1757 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1758 break;
1759 if (integer_zerop (op2))
1760 return op1;
1761 break;
1762 case NE_EXPR:
1763 /* Check if we have X != 1 and X is a truth-valued. */
1764 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1765 break;
1766 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1767 return op1;
1768 break;
1769 case BIT_XOR_EXPR:
1770 /* Check if we have X ^ 1 and X is truth valued. */
1771 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1772 return op1;
1773 break;
1774 default:
1775 break;
1778 return NULL_TREE;
1781 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1782 operations CODE, if one operand has the logically inverted
1783 value of the other. */
1784 static tree
1785 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1786 tree arg1, tree arg2)
1788 tree anot;
1790 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1791 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1792 && code != BIT_XOR_EXPR)
1793 return NULL_TREE;
1795 /* First check if operands ARG1 and ARG2 are equal. If so
1796 return NULL_TREE as this optimization is handled fold_stmt. */
1797 if (arg1 == arg2)
1798 return NULL_TREE;
1799 /* See if we have in arguments logical-not patterns. */
1800 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1801 || anot != arg2)
1802 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1803 || anot != arg1))
1804 return NULL_TREE;
1806 /* X & !X -> 0. */
1807 if (code == BIT_AND_EXPR)
1808 return fold_convert (type, integer_zero_node);
1809 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1810 if (truth_valued_ssa_name (anot))
1811 return fold_convert (type, integer_one_node);
1813 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1814 return NULL_TREE;
1817 /* Given a ssa_name in NAME see if it was defined by an assignment and
1818 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1819 to the second operand on the rhs. */
1821 static inline void
1822 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1824 gimple def;
1825 enum tree_code code1;
1826 tree arg11;
1827 tree arg21;
1828 tree arg31;
1829 enum gimple_rhs_class grhs_class;
1831 code1 = TREE_CODE (name);
1832 arg11 = name;
1833 arg21 = NULL_TREE;
1834 grhs_class = get_gimple_rhs_class (code1);
1836 if (code1 == SSA_NAME)
1838 def = SSA_NAME_DEF_STMT (name);
1840 if (def && is_gimple_assign (def)
1841 && can_propagate_from (def))
1843 code1 = gimple_assign_rhs_code (def);
1844 arg11 = gimple_assign_rhs1 (def);
1845 arg21 = gimple_assign_rhs2 (def);
1846 arg31 = gimple_assign_rhs2 (def);
1849 else if (grhs_class == GIMPLE_TERNARY_RHS
1850 || GIMPLE_BINARY_RHS
1851 || GIMPLE_UNARY_RHS
1852 || GIMPLE_SINGLE_RHS)
1853 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1855 *code = code1;
1856 *arg1 = arg11;
1857 if (arg2)
1858 *arg2 = arg21;
1859 /* Ignore arg3 currently. */
1862 /* Return true if a conversion of an operand from type FROM to type TO
1863 should be applied after performing the operation instead. */
1865 static bool
1866 hoist_conversion_for_bitop_p (tree to, tree from)
1868 /* That's a good idea if the conversion widens the operand, thus
1869 after hoisting the conversion the operation will be narrower. */
1870 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1871 return true;
1873 /* It's also a good idea if the conversion is to a non-integer mode. */
1874 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1875 return true;
1877 /* Or if the precision of TO is not the same as the precision
1878 of its mode. */
1879 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1880 return true;
1882 return false;
1885 /* GSI points to a statement of the form
1887 result = OP0 CODE OP1
1889 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1890 BIT_AND_EXPR or BIT_IOR_EXPR.
1892 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1893 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1894 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1896 If a simplification is made, return TRUE, else return FALSE. */
1897 static bool
1898 simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1899 enum tree_code code,
1900 tree op0, tree op1)
1902 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1904 if (!is_gimple_assign (op0_def_stmt)
1905 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1906 return false;
1908 tree x = gimple_assign_rhs1 (op0_def_stmt);
1909 if (TREE_CODE (x) == SSA_NAME
1910 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1911 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1912 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1914 enum tree_code newcode;
1916 gimple stmt = gsi_stmt (*gsi);
1917 gimple_assign_set_rhs1 (stmt, x);
1918 gimple_assign_set_rhs2 (stmt, op1);
1919 if (code == BIT_AND_EXPR)
1920 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1921 else
1922 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1923 gimple_assign_set_rhs_code (stmt, newcode);
1924 update_stmt (stmt);
1925 return true;
1927 return false;
1931 /* Simplify bitwise binary operations.
1932 Return true if a transformation applied, otherwise return false. */
1934 static bool
1935 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1937 gimple stmt = gsi_stmt (*gsi);
1938 tree arg1 = gimple_assign_rhs1 (stmt);
1939 tree arg2 = gimple_assign_rhs2 (stmt);
1940 enum tree_code code = gimple_assign_rhs_code (stmt);
1941 tree res;
1942 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1943 enum tree_code def1_code, def2_code;
1945 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1946 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1948 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1949 when profitable. */
1950 if (TREE_CODE (arg2) == INTEGER_CST
1951 && CONVERT_EXPR_CODE_P (def1_code)
1952 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
1953 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1954 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1956 gimple newop;
1957 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1958 newop =
1959 gimple_build_assign_with_ops (code, tem, def1_arg1,
1960 fold_convert_loc (gimple_location (stmt),
1961 TREE_TYPE (def1_arg1),
1962 arg2));
1963 gimple_set_location (newop, gimple_location (stmt));
1964 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1965 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1966 tem, NULL_TREE, NULL_TREE);
1967 update_stmt (gsi_stmt (*gsi));
1968 return true;
1971 /* For bitwise binary operations apply operand conversions to the
1972 binary operation result instead of to the operands. This allows
1973 to combine successive conversions and bitwise binary operations. */
1974 if (CONVERT_EXPR_CODE_P (def1_code)
1975 && CONVERT_EXPR_CODE_P (def2_code)
1976 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1977 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1979 gimple newop;
1980 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1981 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1982 gimple_set_location (newop, gimple_location (stmt));
1983 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1984 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1985 tem, NULL_TREE, NULL_TREE);
1986 update_stmt (gsi_stmt (*gsi));
1987 return true;
1991 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1992 if (def1_code == def2_code
1993 && def1_code == BIT_AND_EXPR
1994 && operand_equal_for_phi_arg_p (def1_arg2,
1995 def2_arg2))
1997 tree b = def1_arg2;
1998 tree a = def1_arg1;
1999 tree c = def2_arg1;
2000 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2001 /* If A OP0 C (this usually means C is the same as A) is 0
2002 then fold it down correctly. */
2003 if (integer_zerop (inner))
2005 gimple_assign_set_rhs_from_tree (gsi, inner);
2006 update_stmt (stmt);
2007 return true;
2009 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2010 then fold it down correctly. */
2011 else if (TREE_CODE (inner) == SSA_NAME)
2013 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2014 inner, b);
2015 gimple_assign_set_rhs_from_tree (gsi, outer);
2016 update_stmt (stmt);
2017 return true;
2019 else
2021 gimple newop;
2022 tree tem;
2023 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2024 newop = gimple_build_assign_with_ops (code, tem, a, c);
2025 gimple_set_location (newop, gimple_location (stmt));
2026 /* Make sure to re-process the new stmt as it's walking upwards. */
2027 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2028 gimple_assign_set_rhs1 (stmt, tem);
2029 gimple_assign_set_rhs2 (stmt, b);
2030 gimple_assign_set_rhs_code (stmt, def1_code);
2031 update_stmt (stmt);
2032 return true;
2036 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2037 if (code == BIT_AND_EXPR
2038 && def1_code == BIT_IOR_EXPR
2039 && CONSTANT_CLASS_P (arg2)
2040 && CONSTANT_CLASS_P (def1_arg2))
2042 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2043 arg2, def1_arg2);
2044 tree tem;
2045 gimple newop;
2046 if (integer_zerop (cst))
2048 gimple_assign_set_rhs1 (stmt, def1_arg1);
2049 update_stmt (stmt);
2050 return true;
2052 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2053 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2054 tem, def1_arg1, arg2);
2055 gimple_set_location (newop, gimple_location (stmt));
2056 /* Make sure to re-process the new stmt as it's walking upwards. */
2057 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2058 gimple_assign_set_rhs1 (stmt, tem);
2059 gimple_assign_set_rhs2 (stmt, cst);
2060 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2061 update_stmt (stmt);
2062 return true;
2065 /* Combine successive equal operations with constants. */
2066 if ((code == BIT_AND_EXPR
2067 || code == BIT_IOR_EXPR
2068 || code == BIT_XOR_EXPR)
2069 && def1_code == code
2070 && CONSTANT_CLASS_P (arg2)
2071 && CONSTANT_CLASS_P (def1_arg2))
2073 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2074 arg2, def1_arg2);
2075 gimple_assign_set_rhs1 (stmt, def1_arg1);
2076 gimple_assign_set_rhs2 (stmt, cst);
2077 update_stmt (stmt);
2078 return true;
2081 /* Canonicalize X ^ ~0 to ~X. */
2082 if (code == BIT_XOR_EXPR
2083 && integer_all_onesp (arg2))
2085 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2086 gcc_assert (gsi_stmt (*gsi) == stmt);
2087 update_stmt (stmt);
2088 return true;
2091 /* Try simple folding for X op !X, and X op X. */
2092 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2093 if (res != NULL_TREE)
2095 gimple_assign_set_rhs_from_tree (gsi, res);
2096 update_stmt (gsi_stmt (*gsi));
2097 return true;
2100 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2102 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2103 if (def1_code == ocode)
2105 tree x = arg2;
2106 enum tree_code coden;
2107 tree a1, a2;
2108 /* ( X | Y) & X -> X */
2109 /* ( X & Y) | X -> X */
2110 if (x == def1_arg1
2111 || x == def1_arg2)
2113 gimple_assign_set_rhs_from_tree (gsi, x);
2114 update_stmt (gsi_stmt (*gsi));
2115 return true;
2118 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2119 /* (~X | Y) & X -> X & Y */
2120 /* (~X & Y) | X -> X | Y */
2121 if (coden == BIT_NOT_EXPR && a1 == x)
2123 gimple_assign_set_rhs_with_ops (gsi, code,
2124 x, def1_arg2);
2125 gcc_assert (gsi_stmt (*gsi) == stmt);
2126 update_stmt (stmt);
2127 return true;
2129 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2130 /* (Y | ~X) & X -> X & Y */
2131 /* (Y & ~X) | X -> X | Y */
2132 if (coden == BIT_NOT_EXPR && a1 == x)
2134 gimple_assign_set_rhs_with_ops (gsi, code,
2135 x, def1_arg1);
2136 gcc_assert (gsi_stmt (*gsi) == stmt);
2137 update_stmt (stmt);
2138 return true;
2141 if (def2_code == ocode)
2143 enum tree_code coden;
2144 tree a1;
2145 tree x = arg1;
2146 /* X & ( X | Y) -> X */
2147 /* X | ( X & Y) -> X */
2148 if (x == def2_arg1
2149 || x == def2_arg2)
2151 gimple_assign_set_rhs_from_tree (gsi, x);
2152 update_stmt (gsi_stmt (*gsi));
2153 return true;
2155 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2156 /* (~X | Y) & X -> X & Y */
2157 /* (~X & Y) | X -> X | Y */
2158 if (coden == BIT_NOT_EXPR && a1 == x)
2160 gimple_assign_set_rhs_with_ops (gsi, code,
2161 x, def2_arg2);
2162 gcc_assert (gsi_stmt (*gsi) == stmt);
2163 update_stmt (stmt);
2164 return true;
2166 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2167 /* (Y | ~X) & X -> X & Y */
2168 /* (Y & ~X) | X -> X | Y */
2169 if (coden == BIT_NOT_EXPR && a1 == x)
2171 gimple_assign_set_rhs_with_ops (gsi, code,
2172 x, def2_arg1);
2173 gcc_assert (gsi_stmt (*gsi) == stmt);
2174 update_stmt (stmt);
2175 return true;
2179 /* If arg1 and arg2 are booleans (or any single bit type)
2180 then try to simplify:
2182 (~X & Y) -> X < Y
2183 (X & ~Y) -> Y < X
2184 (~X | Y) -> X <= Y
2185 (X | ~Y) -> Y <= X
2187 But only do this if our result feeds into a comparison as
2188 this transformation is not always a win, particularly on
2189 targets with and-not instructions. */
2190 if (TREE_CODE (arg1) == SSA_NAME
2191 && TREE_CODE (arg2) == SSA_NAME
2192 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2193 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2194 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2195 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2196 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2198 use_operand_p use_p;
2199 gimple use_stmt;
2201 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2203 if (gimple_code (use_stmt) == GIMPLE_COND
2204 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2205 && integer_zerop (gimple_cond_rhs (use_stmt))
2206 && gimple_cond_code (use_stmt) == NE_EXPR)
2208 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2209 return true;
2210 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2211 return true;
2216 return false;
2220 /* Recognize rotation patterns. Return true if a transformation
2221 applied, otherwise return false.
2223 We are looking for X with unsigned type T with bitsize B, OP being
2224 +, | or ^, some type T2 wider than T and
2225 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2226 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2227 (X << Y) OP (X >> (B - Y))
2228 (X << (int) Y) OP (X >> (int) (B - Y))
2229 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2230 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2231 (X << Y) | (X >> ((-Y) & (B - 1)))
2232 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2233 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2234 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2236 and transform these into:
2237 X r<< CNT1
2238 X r<< Y
2240 Note, in the patterns with T2 type, the type of OP operands
2241 might be even a signed type, but should have precision B. */
2243 static bool
2244 simplify_rotate (gimple_stmt_iterator *gsi)
2246 gimple stmt = gsi_stmt (*gsi);
2247 tree arg[2], rtype, rotcnt = NULL_TREE;
2248 tree def_arg1[2], def_arg2[2];
2249 enum tree_code def_code[2];
2250 tree lhs;
2251 int i;
2252 bool swapped_p = false;
2253 gimple g;
2255 arg[0] = gimple_assign_rhs1 (stmt);
2256 arg[1] = gimple_assign_rhs2 (stmt);
2257 rtype = TREE_TYPE (arg[0]);
2259 /* Only create rotates in complete modes. Other cases are not
2260 expanded properly. */
2261 if (!INTEGRAL_TYPE_P (rtype)
2262 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2263 return false;
2265 for (i = 0; i < 2; i++)
2266 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2268 /* Look through narrowing conversions. */
2269 if (CONVERT_EXPR_CODE_P (def_code[0])
2270 && CONVERT_EXPR_CODE_P (def_code[1])
2271 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2272 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2273 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2274 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2275 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2276 && has_single_use (arg[0])
2277 && has_single_use (arg[1]))
2279 for (i = 0; i < 2; i++)
2281 arg[i] = def_arg1[i];
2282 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2286 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2287 for (i = 0; i < 2; i++)
2288 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2289 return false;
2290 else if (!has_single_use (arg[i]))
2291 return false;
2292 if (def_code[0] == def_code[1])
2293 return false;
2295 /* If we've looked through narrowing conversions before, look through
2296 widening conversions from unsigned type with the same precision
2297 as rtype here. */
2298 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2299 for (i = 0; i < 2; i++)
2301 tree tem;
2302 enum tree_code code;
2303 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2304 if (!CONVERT_EXPR_CODE_P (code)
2305 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2306 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2307 return false;
2308 def_arg1[i] = tem;
2310 /* Both shifts have to use the same first operand. */
2311 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2312 return false;
2313 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2314 return false;
2316 /* CNT1 + CNT2 == B case above. */
2317 if (host_integerp (def_arg2[0], 1)
2318 && host_integerp (def_arg2[1], 1)
2319 && (unsigned HOST_WIDE_INT) tree_low_cst (def_arg2[0], 1)
2320 + tree_low_cst (def_arg2[1], 1) == TYPE_PRECISION (rtype))
2321 rotcnt = def_arg2[0];
2322 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2323 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2324 return false;
2325 else
2327 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2328 enum tree_code cdef_code[2];
2329 /* Look through conversion of the shift count argument.
2330 The C/C++ FE cast any shift count argument to integer_type_node.
2331 The only problem might be if the shift count type maximum value
2332 is equal or smaller than number of bits in rtype. */
2333 for (i = 0; i < 2; i++)
2335 def_arg2_alt[i] = def_arg2[i];
2336 defcodefor_name (def_arg2[i], &cdef_code[i],
2337 &cdef_arg1[i], &cdef_arg2[i]);
2338 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2339 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2340 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2341 > floor_log2 (TYPE_PRECISION (rtype))
2342 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2343 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2345 def_arg2_alt[i] = cdef_arg1[i];
2346 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2347 &cdef_arg1[i], &cdef_arg2[i]);
2350 for (i = 0; i < 2; i++)
2351 /* Check for one shift count being Y and the other B - Y,
2352 with optional casts. */
2353 if (cdef_code[i] == MINUS_EXPR
2354 && host_integerp (cdef_arg1[i], 0)
2355 && tree_low_cst (cdef_arg1[i], 0) == TYPE_PRECISION (rtype)
2356 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2358 tree tem;
2359 enum tree_code code;
2361 if (cdef_arg2[i] == def_arg2[1 - i]
2362 || cdef_arg2[i] == def_arg2_alt[1 - i])
2364 rotcnt = cdef_arg2[i];
2365 break;
2367 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2368 if (CONVERT_EXPR_CODE_P (code)
2369 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2370 && TYPE_PRECISION (TREE_TYPE (tem))
2371 > floor_log2 (TYPE_PRECISION (rtype))
2372 && TYPE_PRECISION (TREE_TYPE (tem))
2373 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2374 && (tem == def_arg2[1 - i]
2375 || tem == def_arg2_alt[1 - i]))
2377 rotcnt = tem;
2378 break;
2381 /* The above sequence isn't safe for Y being 0,
2382 because then one of the shifts triggers undefined behavior.
2383 This alternative is safe even for rotation count of 0.
2384 One shift count is Y and the other (-Y) & (B - 1). */
2385 else if (cdef_code[i] == BIT_AND_EXPR
2386 && host_integerp (cdef_arg2[i], 0)
2387 && tree_low_cst (cdef_arg2[i], 0)
2388 == TYPE_PRECISION (rtype) - 1
2389 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2390 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2392 tree tem;
2393 enum tree_code code;
2395 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2396 if (CONVERT_EXPR_CODE_P (code)
2397 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2398 && TYPE_PRECISION (TREE_TYPE (tem))
2399 > floor_log2 (TYPE_PRECISION (rtype))
2400 && TYPE_PRECISION (TREE_TYPE (tem))
2401 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2402 defcodefor_name (tem, &code, &tem, NULL);
2404 if (code == NEGATE_EXPR)
2406 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2408 rotcnt = tem;
2409 break;
2411 defcodefor_name (tem, &code, &tem, NULL);
2412 if (CONVERT_EXPR_CODE_P (code)
2413 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2414 && TYPE_PRECISION (TREE_TYPE (tem))
2415 > floor_log2 (TYPE_PRECISION (rtype))
2416 && TYPE_PRECISION (TREE_TYPE (tem))
2417 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2418 && (tem == def_arg2[1 - i]
2419 || tem == def_arg2_alt[1 - i]))
2421 rotcnt = tem;
2422 break;
2426 if (rotcnt == NULL_TREE)
2427 return false;
2428 swapped_p = i != 1;
2431 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2432 TREE_TYPE (rotcnt)))
2434 g = gimple_build_assign_with_ops (NOP_EXPR,
2435 make_ssa_name (TREE_TYPE (def_arg2[0]),
2436 NULL),
2437 rotcnt, NULL_TREE);
2438 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2439 rotcnt = gimple_assign_lhs (g);
2441 lhs = gimple_assign_lhs (stmt);
2442 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2443 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2444 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2445 ? LROTATE_EXPR : RROTATE_EXPR,
2446 lhs, def_arg1[0], rotcnt);
2447 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2449 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2450 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2451 lhs, NULL_TREE);
2453 gsi_replace (gsi, g, false);
2454 return true;
2457 /* Perform re-associations of the plus or minus statement STMT that are
2458 always permitted. Returns true if the CFG was changed. */
2460 static bool
2461 associate_plusminus (gimple_stmt_iterator *gsi)
2463 gimple stmt = gsi_stmt (*gsi);
2464 tree rhs1 = gimple_assign_rhs1 (stmt);
2465 tree rhs2 = gimple_assign_rhs2 (stmt);
2466 enum tree_code code = gimple_assign_rhs_code (stmt);
2467 bool changed;
2469 /* We can't reassociate at all for saturating types. */
2470 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2471 return false;
2473 /* First contract negates. */
2476 changed = false;
2478 /* A +- (-B) -> A -+ B. */
2479 if (TREE_CODE (rhs2) == SSA_NAME)
2481 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2482 if (is_gimple_assign (def_stmt)
2483 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2484 && can_propagate_from (def_stmt))
2486 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2487 gimple_assign_set_rhs_code (stmt, code);
2488 rhs2 = gimple_assign_rhs1 (def_stmt);
2489 gimple_assign_set_rhs2 (stmt, rhs2);
2490 gimple_set_modified (stmt, true);
2491 changed = true;
2495 /* (-A) + B -> B - A. */
2496 if (TREE_CODE (rhs1) == SSA_NAME
2497 && code == PLUS_EXPR)
2499 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2500 if (is_gimple_assign (def_stmt)
2501 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2502 && can_propagate_from (def_stmt))
2504 code = MINUS_EXPR;
2505 gimple_assign_set_rhs_code (stmt, code);
2506 rhs1 = rhs2;
2507 gimple_assign_set_rhs1 (stmt, rhs1);
2508 rhs2 = gimple_assign_rhs1 (def_stmt);
2509 gimple_assign_set_rhs2 (stmt, rhs2);
2510 gimple_set_modified (stmt, true);
2511 changed = true;
2515 while (changed);
2517 /* We can't reassociate floating-point or fixed-point plus or minus
2518 because of saturation to +-Inf. */
2519 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2520 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2521 goto out;
2523 /* Second match patterns that allow contracting a plus-minus pair
2524 irrespective of overflow issues.
2526 (A +- B) - A -> +- B
2527 (A +- B) -+ B -> A
2528 (CST +- A) +- CST -> CST +- A
2529 (A +- CST) +- CST -> A +- CST
2530 ~A + A -> -1
2531 ~A + 1 -> -A
2532 A - (A +- B) -> -+ B
2533 A +- (B +- A) -> +- B
2534 CST +- (CST +- A) -> CST +- A
2535 CST +- (A +- CST) -> CST +- A
2536 A + ~A -> -1
2538 via commutating the addition and contracting operations to zero
2539 by reassociation. */
2541 if (TREE_CODE (rhs1) == SSA_NAME)
2543 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2544 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2546 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2547 if (def_code == PLUS_EXPR
2548 || def_code == MINUS_EXPR)
2550 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2551 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2552 if (operand_equal_p (def_rhs1, rhs2, 0)
2553 && code == MINUS_EXPR)
2555 /* (A +- B) - A -> +- B. */
2556 code = ((def_code == PLUS_EXPR)
2557 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2558 rhs1 = def_rhs2;
2559 rhs2 = NULL_TREE;
2560 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2561 gcc_assert (gsi_stmt (*gsi) == stmt);
2562 gimple_set_modified (stmt, true);
2564 else if (operand_equal_p (def_rhs2, rhs2, 0)
2565 && code != def_code)
2567 /* (A +- B) -+ B -> A. */
2568 code = TREE_CODE (def_rhs1);
2569 rhs1 = def_rhs1;
2570 rhs2 = NULL_TREE;
2571 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2572 gcc_assert (gsi_stmt (*gsi) == stmt);
2573 gimple_set_modified (stmt, true);
2575 else if (CONSTANT_CLASS_P (rhs2)
2576 && CONSTANT_CLASS_P (def_rhs1))
2578 /* (CST +- A) +- CST -> CST +- A. */
2579 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2580 def_rhs1, rhs2);
2581 if (cst && !TREE_OVERFLOW (cst))
2583 code = def_code;
2584 gimple_assign_set_rhs_code (stmt, code);
2585 rhs1 = cst;
2586 gimple_assign_set_rhs1 (stmt, rhs1);
2587 rhs2 = def_rhs2;
2588 gimple_assign_set_rhs2 (stmt, rhs2);
2589 gimple_set_modified (stmt, true);
2592 else if (CONSTANT_CLASS_P (rhs2)
2593 && CONSTANT_CLASS_P (def_rhs2))
2595 /* (A +- CST) +- CST -> A +- CST. */
2596 enum tree_code mix = (code == def_code)
2597 ? PLUS_EXPR : MINUS_EXPR;
2598 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2599 def_rhs2, rhs2);
2600 if (cst && !TREE_OVERFLOW (cst))
2602 code = def_code;
2603 gimple_assign_set_rhs_code (stmt, code);
2604 rhs1 = def_rhs1;
2605 gimple_assign_set_rhs1 (stmt, rhs1);
2606 rhs2 = cst;
2607 gimple_assign_set_rhs2 (stmt, rhs2);
2608 gimple_set_modified (stmt, true);
2612 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2614 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2615 if (operand_equal_p (def_rhs1, rhs2, 0))
2617 /* ~A + A -> -1. */
2618 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2619 rhs2 = NULL_TREE;
2620 code = TREE_CODE (rhs1);
2621 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2622 gcc_assert (gsi_stmt (*gsi) == stmt);
2623 gimple_set_modified (stmt, true);
2625 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2626 && integer_onep (rhs2))
2627 || (TREE_CODE (rhs2) == COMPLEX_CST
2628 && integer_onep (TREE_REALPART (rhs2))
2629 && integer_onep (TREE_IMAGPART (rhs2))))
2631 /* ~A + 1 -> -A. */
2632 code = NEGATE_EXPR;
2633 rhs1 = def_rhs1;
2634 rhs2 = NULL_TREE;
2635 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2636 gcc_assert (gsi_stmt (*gsi) == stmt);
2637 gimple_set_modified (stmt, true);
2643 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2645 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2646 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2648 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2649 if (def_code == PLUS_EXPR
2650 || def_code == MINUS_EXPR)
2652 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2653 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2654 if (operand_equal_p (def_rhs1, rhs1, 0)
2655 && code == MINUS_EXPR)
2657 /* A - (A +- B) -> -+ B. */
2658 code = ((def_code == PLUS_EXPR)
2659 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2660 rhs1 = def_rhs2;
2661 rhs2 = NULL_TREE;
2662 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2663 gcc_assert (gsi_stmt (*gsi) == stmt);
2664 gimple_set_modified (stmt, true);
2666 else if (operand_equal_p (def_rhs2, rhs1, 0)
2667 && code != def_code)
2669 /* A +- (B +- A) -> +- B. */
2670 code = ((code == PLUS_EXPR)
2671 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2672 rhs1 = def_rhs1;
2673 rhs2 = NULL_TREE;
2674 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2675 gcc_assert (gsi_stmt (*gsi) == stmt);
2676 gimple_set_modified (stmt, true);
2678 else if (CONSTANT_CLASS_P (rhs1)
2679 && CONSTANT_CLASS_P (def_rhs1))
2681 /* CST +- (CST +- A) -> CST +- A. */
2682 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2683 rhs1, def_rhs1);
2684 if (cst && !TREE_OVERFLOW (cst))
2686 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2687 gimple_assign_set_rhs_code (stmt, code);
2688 rhs1 = cst;
2689 gimple_assign_set_rhs1 (stmt, rhs1);
2690 rhs2 = def_rhs2;
2691 gimple_assign_set_rhs2 (stmt, rhs2);
2692 gimple_set_modified (stmt, true);
2695 else if (CONSTANT_CLASS_P (rhs1)
2696 && CONSTANT_CLASS_P (def_rhs2))
2698 /* CST +- (A +- CST) -> CST +- A. */
2699 tree cst = fold_binary (def_code == code
2700 ? PLUS_EXPR : MINUS_EXPR,
2701 TREE_TYPE (rhs2),
2702 rhs1, def_rhs2);
2703 if (cst && !TREE_OVERFLOW (cst))
2705 rhs1 = cst;
2706 gimple_assign_set_rhs1 (stmt, rhs1);
2707 rhs2 = def_rhs1;
2708 gimple_assign_set_rhs2 (stmt, rhs2);
2709 gimple_set_modified (stmt, true);
2713 else if (def_code == BIT_NOT_EXPR)
2715 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2716 if (code == PLUS_EXPR
2717 && operand_equal_p (def_rhs1, rhs1, 0))
2719 /* A + ~A -> -1. */
2720 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2721 rhs2 = NULL_TREE;
2722 code = TREE_CODE (rhs1);
2723 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2724 gcc_assert (gsi_stmt (*gsi) == stmt);
2725 gimple_set_modified (stmt, true);
2731 out:
2732 if (gimple_modified_p (stmt))
2734 fold_stmt_inplace (gsi);
2735 update_stmt (stmt);
2736 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2737 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2738 return true;
2741 return false;
2744 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2745 true if anything changed, false otherwise. */
2747 static bool
2748 associate_pointerplus (gimple_stmt_iterator *gsi)
2750 gimple stmt = gsi_stmt (*gsi);
2751 gimple def_stmt;
2752 tree ptr, rhs, algn;
2754 /* Pattern match
2755 tem = (sizetype) ptr;
2756 tem = tem & algn;
2757 tem = -tem;
2758 ... = ptr p+ tem;
2759 and produce the simpler and easier to analyze with respect to alignment
2760 ... = ptr & ~algn; */
2761 ptr = gimple_assign_rhs1 (stmt);
2762 rhs = gimple_assign_rhs2 (stmt);
2763 if (TREE_CODE (rhs) != SSA_NAME)
2764 return false;
2765 def_stmt = SSA_NAME_DEF_STMT (rhs);
2766 if (!is_gimple_assign (def_stmt)
2767 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2768 return false;
2769 rhs = gimple_assign_rhs1 (def_stmt);
2770 if (TREE_CODE (rhs) != SSA_NAME)
2771 return false;
2772 def_stmt = SSA_NAME_DEF_STMT (rhs);
2773 if (!is_gimple_assign (def_stmt)
2774 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2775 return false;
2776 rhs = gimple_assign_rhs1 (def_stmt);
2777 algn = gimple_assign_rhs2 (def_stmt);
2778 if (TREE_CODE (rhs) != SSA_NAME
2779 || TREE_CODE (algn) != INTEGER_CST)
2780 return false;
2781 def_stmt = SSA_NAME_DEF_STMT (rhs);
2782 if (!is_gimple_assign (def_stmt)
2783 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2784 return false;
2785 if (gimple_assign_rhs1 (def_stmt) != ptr)
2786 return false;
2788 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
2789 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2790 fold_stmt_inplace (gsi);
2791 update_stmt (stmt);
2793 return true;
2796 /* Combine two conversions in a row for the second conversion at *GSI.
2797 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2798 run. Else it returns 0. */
2800 static int
2801 combine_conversions (gimple_stmt_iterator *gsi)
2803 gimple stmt = gsi_stmt (*gsi);
2804 gimple def_stmt;
2805 tree op0, lhs;
2806 enum tree_code code = gimple_assign_rhs_code (stmt);
2807 enum tree_code code2;
2809 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2810 || code == FLOAT_EXPR
2811 || code == FIX_TRUNC_EXPR);
2813 lhs = gimple_assign_lhs (stmt);
2814 op0 = gimple_assign_rhs1 (stmt);
2815 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2817 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2818 return 1;
2821 if (TREE_CODE (op0) != SSA_NAME)
2822 return 0;
2824 def_stmt = SSA_NAME_DEF_STMT (op0);
2825 if (!is_gimple_assign (def_stmt))
2826 return 0;
2828 code2 = gimple_assign_rhs_code (def_stmt);
2830 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2832 tree defop0 = gimple_assign_rhs1 (def_stmt);
2833 tree type = TREE_TYPE (lhs);
2834 tree inside_type = TREE_TYPE (defop0);
2835 tree inter_type = TREE_TYPE (op0);
2836 int inside_int = INTEGRAL_TYPE_P (inside_type);
2837 int inside_ptr = POINTER_TYPE_P (inside_type);
2838 int inside_float = FLOAT_TYPE_P (inside_type);
2839 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2840 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2841 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2842 int inter_int = INTEGRAL_TYPE_P (inter_type);
2843 int inter_ptr = POINTER_TYPE_P (inter_type);
2844 int inter_float = FLOAT_TYPE_P (inter_type);
2845 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2846 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2847 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2848 int final_int = INTEGRAL_TYPE_P (type);
2849 int final_ptr = POINTER_TYPE_P (type);
2850 int final_float = FLOAT_TYPE_P (type);
2851 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2852 unsigned int final_prec = TYPE_PRECISION (type);
2853 int final_unsignedp = TYPE_UNSIGNED (type);
2855 /* Don't propagate ssa names that occur in abnormal phis. */
2856 if (TREE_CODE (defop0) == SSA_NAME
2857 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2858 return 0;
2860 /* In addition to the cases of two conversions in a row
2861 handled below, if we are converting something to its own
2862 type via an object of identical or wider precision, neither
2863 conversion is needed. */
2864 if (useless_type_conversion_p (type, inside_type)
2865 && (((inter_int || inter_ptr) && final_int)
2866 || (inter_float && final_float))
2867 && inter_prec >= final_prec)
2869 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2870 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2871 update_stmt (stmt);
2872 return remove_prop_source_from_use (op0) ? 2 : 1;
2875 /* Likewise, if the intermediate and initial types are either both
2876 float or both integer, we don't need the middle conversion if the
2877 former is wider than the latter and doesn't change the signedness
2878 (for integers). Avoid this if the final type is a pointer since
2879 then we sometimes need the middle conversion. Likewise if the
2880 final type has a precision not equal to the size of its mode. */
2881 if (((inter_int && inside_int)
2882 || (inter_float && inside_float)
2883 || (inter_vec && inside_vec))
2884 && inter_prec >= inside_prec
2885 && (inter_float || inter_vec
2886 || inter_unsignedp == inside_unsignedp)
2887 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2888 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2889 && ! final_ptr
2890 && (! final_vec || inter_prec == inside_prec))
2892 gimple_assign_set_rhs1 (stmt, defop0);
2893 update_stmt (stmt);
2894 return remove_prop_source_from_use (op0) ? 2 : 1;
2897 /* If we have a sign-extension of a zero-extended value, we can
2898 replace that by a single zero-extension. Likewise if the
2899 final conversion does not change precision we can drop the
2900 intermediate conversion. */
2901 if (inside_int && inter_int && final_int
2902 && ((inside_prec < inter_prec && inter_prec < final_prec
2903 && inside_unsignedp && !inter_unsignedp)
2904 || final_prec == inter_prec))
2906 gimple_assign_set_rhs1 (stmt, defop0);
2907 update_stmt (stmt);
2908 return remove_prop_source_from_use (op0) ? 2 : 1;
2911 /* Two conversions in a row are not needed unless:
2912 - some conversion is floating-point (overstrict for now), or
2913 - some conversion is a vector (overstrict for now), or
2914 - the intermediate type is narrower than both initial and
2915 final, or
2916 - the intermediate type and innermost type differ in signedness,
2917 and the outermost type is wider than the intermediate, or
2918 - the initial type is a pointer type and the precisions of the
2919 intermediate and final types differ, or
2920 - the final type is a pointer type and the precisions of the
2921 initial and intermediate types differ. */
2922 if (! inside_float && ! inter_float && ! final_float
2923 && ! inside_vec && ! inter_vec && ! final_vec
2924 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2925 && ! (inside_int && inter_int
2926 && inter_unsignedp != inside_unsignedp
2927 && inter_prec < final_prec)
2928 && ((inter_unsignedp && inter_prec > inside_prec)
2929 == (final_unsignedp && final_prec > inter_prec))
2930 && ! (inside_ptr && inter_prec != final_prec)
2931 && ! (final_ptr && inside_prec != inter_prec)
2932 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2933 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2935 gimple_assign_set_rhs1 (stmt, defop0);
2936 update_stmt (stmt);
2937 return remove_prop_source_from_use (op0) ? 2 : 1;
2940 /* A truncation to an unsigned type should be canonicalized as
2941 bitwise and of a mask. */
2942 if (final_int && inter_int && inside_int
2943 && final_prec == inside_prec
2944 && final_prec > inter_prec
2945 && inter_unsignedp)
2947 tree tem;
2948 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2949 defop0,
2950 double_int_to_tree
2951 (inside_type, double_int::mask (inter_prec)));
2952 if (!useless_type_conversion_p (type, inside_type))
2954 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2955 GSI_SAME_STMT);
2956 gimple_assign_set_rhs1 (stmt, tem);
2958 else
2959 gimple_assign_set_rhs_from_tree (gsi, tem);
2960 update_stmt (gsi_stmt (*gsi));
2961 return 1;
2964 /* If we are converting an integer to a floating-point that can
2965 represent it exactly and back to an integer, we can skip the
2966 floating-point conversion. */
2967 if (inside_int && inter_float && final_int &&
2968 (unsigned) significand_size (TYPE_MODE (inter_type))
2969 >= inside_prec - !inside_unsignedp)
2971 if (useless_type_conversion_p (type, inside_type))
2973 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2974 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2975 update_stmt (stmt);
2976 return remove_prop_source_from_use (op0) ? 2 : 1;
2978 else
2980 gimple_assign_set_rhs1 (stmt, defop0);
2981 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2982 update_stmt (stmt);
2983 return remove_prop_source_from_use (op0) ? 2 : 1;
2988 return 0;
2991 /* Combine an element access with a shuffle. Returns true if there were
2992 any changes made, else it returns false. */
2994 static bool
2995 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2997 gimple stmt = gsi_stmt (*gsi);
2998 gimple def_stmt;
2999 tree op, op0, op1, op2;
3000 tree elem_type;
3001 unsigned idx, n, size;
3002 enum tree_code code;
3004 op = gimple_assign_rhs1 (stmt);
3005 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3007 op0 = TREE_OPERAND (op, 0);
3008 if (TREE_CODE (op0) != SSA_NAME
3009 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3010 return false;
3012 def_stmt = get_prop_source_stmt (op0, false, NULL);
3013 if (!def_stmt || !can_propagate_from (def_stmt))
3014 return false;
3016 op1 = TREE_OPERAND (op, 1);
3017 op2 = TREE_OPERAND (op, 2);
3018 code = gimple_assign_rhs_code (def_stmt);
3020 if (code == CONSTRUCTOR)
3022 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3023 gimple_assign_rhs1 (def_stmt), op1, op2);
3024 if (!tem || !valid_gimple_rhs_p (tem))
3025 return false;
3026 gimple_assign_set_rhs_from_tree (gsi, tem);
3027 update_stmt (gsi_stmt (*gsi));
3028 return true;
3031 elem_type = TREE_TYPE (TREE_TYPE (op0));
3032 if (TREE_TYPE (op) != elem_type)
3033 return false;
3035 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3036 n = TREE_INT_CST_LOW (op1) / size;
3037 if (n != 1)
3038 return false;
3039 idx = TREE_INT_CST_LOW (op2) / size;
3041 if (code == VEC_PERM_EXPR)
3043 tree p, m, index, tem;
3044 unsigned nelts;
3045 m = gimple_assign_rhs3 (def_stmt);
3046 if (TREE_CODE (m) != VECTOR_CST)
3047 return false;
3048 nelts = VECTOR_CST_NELTS (m);
3049 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3050 idx %= 2 * nelts;
3051 if (idx < nelts)
3053 p = gimple_assign_rhs1 (def_stmt);
3055 else
3057 p = gimple_assign_rhs2 (def_stmt);
3058 idx -= nelts;
3060 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3061 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3062 unshare_expr (p), op1, index);
3063 gimple_assign_set_rhs1 (stmt, tem);
3064 fold_stmt (gsi);
3065 update_stmt (gsi_stmt (*gsi));
3066 return true;
3069 return false;
3072 /* Determine whether applying the 2 permutations (mask1 then mask2)
3073 gives back one of the input. */
3075 static int
3076 is_combined_permutation_identity (tree mask1, tree mask2)
3078 tree mask;
3079 unsigned int nelts, i, j;
3080 bool maybe_identity1 = true;
3081 bool maybe_identity2 = true;
3083 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3084 && TREE_CODE (mask2) == VECTOR_CST);
3085 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3086 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3088 nelts = VECTOR_CST_NELTS (mask);
3089 for (i = 0; i < nelts; i++)
3091 tree val = VECTOR_CST_ELT (mask, i);
3092 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3093 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3094 if (j == i)
3095 maybe_identity2 = false;
3096 else if (j == i + nelts)
3097 maybe_identity1 = false;
3098 else
3099 return 0;
3101 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3104 /* Combine a shuffle with its arguments. Returns 1 if there were any
3105 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3107 static int
3108 simplify_permutation (gimple_stmt_iterator *gsi)
3110 gimple stmt = gsi_stmt (*gsi);
3111 gimple def_stmt;
3112 tree op0, op1, op2, op3, arg0, arg1;
3113 enum tree_code code;
3114 bool single_use_op0 = false;
3116 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
3118 op0 = gimple_assign_rhs1 (stmt);
3119 op1 = gimple_assign_rhs2 (stmt);
3120 op2 = gimple_assign_rhs3 (stmt);
3122 if (TREE_CODE (op2) != VECTOR_CST)
3123 return 0;
3125 if (TREE_CODE (op0) == VECTOR_CST)
3127 code = VECTOR_CST;
3128 arg0 = op0;
3130 else if (TREE_CODE (op0) == SSA_NAME)
3132 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3133 if (!def_stmt || !can_propagate_from (def_stmt))
3134 return 0;
3136 code = gimple_assign_rhs_code (def_stmt);
3137 arg0 = gimple_assign_rhs1 (def_stmt);
3139 else
3140 return 0;
3142 /* Two consecutive shuffles. */
3143 if (code == VEC_PERM_EXPR)
3145 tree orig;
3146 int ident;
3148 if (op0 != op1)
3149 return 0;
3150 op3 = gimple_assign_rhs3 (def_stmt);
3151 if (TREE_CODE (op3) != VECTOR_CST)
3152 return 0;
3153 ident = is_combined_permutation_identity (op3, op2);
3154 if (!ident)
3155 return 0;
3156 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3157 : gimple_assign_rhs2 (def_stmt);
3158 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3159 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3160 gimple_set_num_ops (stmt, 2);
3161 update_stmt (stmt);
3162 return remove_prop_source_from_use (op0) ? 2 : 1;
3165 /* Shuffle of a constructor. */
3166 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3168 tree opt;
3169 bool ret = false;
3170 if (op0 != op1)
3172 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
3173 return 0;
3175 if (TREE_CODE (op1) == VECTOR_CST)
3176 arg1 = op1;
3177 else if (TREE_CODE (op1) == SSA_NAME)
3179 enum tree_code code2;
3181 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3182 if (!def_stmt2 || !can_propagate_from (def_stmt2))
3183 return 0;
3185 code2 = gimple_assign_rhs_code (def_stmt2);
3186 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3187 return 0;
3188 arg1 = gimple_assign_rhs1 (def_stmt2);
3190 else
3191 return 0;
3193 else
3195 /* Already used twice in this statement. */
3196 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3197 return 0;
3198 arg1 = arg0;
3200 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
3201 if (!opt
3202 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
3203 return 0;
3204 gimple_assign_set_rhs_from_tree (gsi, opt);
3205 update_stmt (gsi_stmt (*gsi));
3206 if (TREE_CODE (op0) == SSA_NAME)
3207 ret = remove_prop_source_from_use (op0);
3208 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3209 ret |= remove_prop_source_from_use (op1);
3210 return ret ? 2 : 1;
3213 return 0;
3216 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3218 static bool
3219 simplify_vector_constructor (gimple_stmt_iterator *gsi)
3221 gimple stmt = gsi_stmt (*gsi);
3222 gimple def_stmt;
3223 tree op, op2, orig, type, elem_type;
3224 unsigned elem_size, nelts, i;
3225 enum tree_code code;
3226 constructor_elt *elt;
3227 unsigned char *sel;
3228 bool maybe_ident;
3230 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3232 op = gimple_assign_rhs1 (stmt);
3233 type = TREE_TYPE (op);
3234 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3236 nelts = TYPE_VECTOR_SUBPARTS (type);
3237 elem_type = TREE_TYPE (type);
3238 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3240 sel = XALLOCAVEC (unsigned char, nelts);
3241 orig = NULL;
3242 maybe_ident = true;
3243 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
3245 tree ref, op1;
3247 if (i >= nelts)
3248 return false;
3250 if (TREE_CODE (elt->value) != SSA_NAME)
3251 return false;
3252 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3253 if (!def_stmt)
3254 return false;
3255 code = gimple_assign_rhs_code (def_stmt);
3256 if (code != BIT_FIELD_REF)
3257 return false;
3258 op1 = gimple_assign_rhs1 (def_stmt);
3259 ref = TREE_OPERAND (op1, 0);
3260 if (orig)
3262 if (ref != orig)
3263 return false;
3265 else
3267 if (TREE_CODE (ref) != SSA_NAME)
3268 return false;
3269 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3270 return false;
3271 orig = ref;
3273 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3274 return false;
3275 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3276 if (sel[i] != i) maybe_ident = false;
3278 if (i < nelts)
3279 return false;
3281 if (maybe_ident)
3282 gimple_assign_set_rhs_from_tree (gsi, orig);
3283 else
3285 tree mask_type, *mask_elts;
3287 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3288 return false;
3289 mask_type
3290 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3291 nelts);
3292 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3293 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3294 != GET_MODE_SIZE (TYPE_MODE (type)))
3295 return false;
3296 mask_elts = XALLOCAVEC (tree, nelts);
3297 for (i = 0; i < nelts; i++)
3298 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3299 op2 = build_vector (mask_type, mask_elts);
3300 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3302 update_stmt (gsi_stmt (*gsi));
3303 return true;
3306 /* Main entry point for the forward propagation and statement combine
3307 optimizer. */
3309 static unsigned int
3310 ssa_forward_propagate_and_combine (void)
3312 basic_block bb;
3313 unsigned int todoflags = 0;
3315 cfg_changed = false;
3317 FOR_EACH_BB (bb)
3319 gimple_stmt_iterator gsi;
3321 /* Apply forward propagation to all stmts in the basic-block.
3322 Note we update GSI within the loop as necessary. */
3323 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3325 gimple stmt = gsi_stmt (gsi);
3326 tree lhs, rhs;
3327 enum tree_code code;
3329 if (!is_gimple_assign (stmt))
3331 gsi_next (&gsi);
3332 continue;
3335 lhs = gimple_assign_lhs (stmt);
3336 rhs = gimple_assign_rhs1 (stmt);
3337 code = gimple_assign_rhs_code (stmt);
3338 if (TREE_CODE (lhs) != SSA_NAME
3339 || has_zero_uses (lhs))
3341 gsi_next (&gsi);
3342 continue;
3345 /* If this statement sets an SSA_NAME to an address,
3346 try to propagate the address into the uses of the SSA_NAME. */
3347 if (code == ADDR_EXPR
3348 /* Handle pointer conversions on invariant addresses
3349 as well, as this is valid gimple. */
3350 || (CONVERT_EXPR_CODE_P (code)
3351 && TREE_CODE (rhs) == ADDR_EXPR
3352 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3354 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3355 if ((!base
3356 || !DECL_P (base)
3357 || decl_address_invariant_p (base))
3358 && !stmt_references_abnormal_ssa_name (stmt)
3359 && forward_propagate_addr_expr (lhs, rhs, true))
3361 release_defs (stmt);
3362 gsi_remove (&gsi, true);
3364 else
3365 gsi_next (&gsi);
3367 else if (code == POINTER_PLUS_EXPR)
3369 tree off = gimple_assign_rhs2 (stmt);
3370 if (TREE_CODE (off) == INTEGER_CST
3371 && can_propagate_from (stmt)
3372 && !simple_iv_increment_p (stmt)
3373 /* ??? Better adjust the interface to that function
3374 instead of building new trees here. */
3375 && forward_propagate_addr_expr
3376 (lhs,
3377 build1_loc (gimple_location (stmt),
3378 ADDR_EXPR, TREE_TYPE (rhs),
3379 fold_build2 (MEM_REF,
3380 TREE_TYPE (TREE_TYPE (rhs)),
3381 rhs,
3382 fold_convert (ptr_type_node,
3383 off))), true))
3385 release_defs (stmt);
3386 gsi_remove (&gsi, true);
3388 else if (is_gimple_min_invariant (rhs))
3390 /* Make sure to fold &a[0] + off_1 here. */
3391 fold_stmt_inplace (&gsi);
3392 update_stmt (stmt);
3393 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3394 gsi_next (&gsi);
3396 else
3397 gsi_next (&gsi);
3399 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3401 if (forward_propagate_comparison (&gsi))
3402 cfg_changed = true;
3404 else
3405 gsi_next (&gsi);
3408 /* Combine stmts with the stmts defining their operands.
3409 Note we update GSI within the loop as necessary. */
3410 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3412 gimple stmt = gsi_stmt (gsi);
3413 bool changed = false;
3415 /* Mark stmt as potentially needing revisiting. */
3416 gimple_set_plf (stmt, GF_PLF_1, false);
3418 switch (gimple_code (stmt))
3420 case GIMPLE_ASSIGN:
3422 tree rhs1 = gimple_assign_rhs1 (stmt);
3423 enum tree_code code = gimple_assign_rhs_code (stmt);
3425 if ((code == BIT_NOT_EXPR
3426 || code == NEGATE_EXPR)
3427 && TREE_CODE (rhs1) == SSA_NAME)
3428 changed = simplify_not_neg_expr (&gsi);
3429 else if (code == COND_EXPR
3430 || code == VEC_COND_EXPR)
3432 /* In this case the entire COND_EXPR is in rhs1. */
3433 if (forward_propagate_into_cond (&gsi)
3434 || combine_cond_exprs (&gsi))
3436 changed = true;
3437 stmt = gsi_stmt (gsi);
3440 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3442 int did_something;
3443 did_something = forward_propagate_into_comparison (&gsi);
3444 if (did_something == 2)
3445 cfg_changed = true;
3446 changed = did_something != 0;
3448 else if ((code == PLUS_EXPR
3449 || code == BIT_IOR_EXPR
3450 || code == BIT_XOR_EXPR)
3451 && simplify_rotate (&gsi))
3452 changed = true;
3453 else if (code == BIT_AND_EXPR
3454 || code == BIT_IOR_EXPR
3455 || code == BIT_XOR_EXPR)
3456 changed = simplify_bitwise_binary (&gsi);
3457 else if (code == PLUS_EXPR
3458 || code == MINUS_EXPR)
3459 changed = associate_plusminus (&gsi);
3460 else if (code == POINTER_PLUS_EXPR)
3461 changed = associate_pointerplus (&gsi);
3462 else if (CONVERT_EXPR_CODE_P (code)
3463 || code == FLOAT_EXPR
3464 || code == FIX_TRUNC_EXPR)
3466 int did_something = combine_conversions (&gsi);
3467 if (did_something == 2)
3468 cfg_changed = true;
3470 /* If we have a narrowing conversion to an integral
3471 type that is fed by a BIT_AND_EXPR, we might be
3472 able to remove the BIT_AND_EXPR if it merely
3473 masks off bits outside the final type (and nothing
3474 else. */
3475 if (! did_something)
3477 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3478 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3479 if (INTEGRAL_TYPE_P (outer_type)
3480 && INTEGRAL_TYPE_P (inner_type)
3481 && (TYPE_PRECISION (outer_type)
3482 <= TYPE_PRECISION (inner_type)))
3483 did_something = simplify_conversion_from_bitmask (&gsi);
3486 changed = did_something != 0;
3488 else if (code == VEC_PERM_EXPR)
3490 int did_something = simplify_permutation (&gsi);
3491 if (did_something == 2)
3492 cfg_changed = true;
3493 changed = did_something != 0;
3495 else if (code == BIT_FIELD_REF)
3496 changed = simplify_bitfield_ref (&gsi);
3497 else if (code == CONSTRUCTOR
3498 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3499 changed = simplify_vector_constructor (&gsi);
3500 break;
3503 case GIMPLE_SWITCH:
3504 changed = simplify_gimple_switch (stmt);
3505 break;
3507 case GIMPLE_COND:
3509 int did_something;
3510 did_something = forward_propagate_into_gimple_cond (stmt);
3511 if (did_something == 2)
3512 cfg_changed = true;
3513 changed = did_something != 0;
3514 break;
3517 case GIMPLE_CALL:
3519 tree callee = gimple_call_fndecl (stmt);
3520 if (callee != NULL_TREE
3521 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3522 changed = simplify_builtin_call (&gsi, callee);
3523 break;
3526 default:;
3529 if (changed)
3531 /* If the stmt changed then re-visit it and the statements
3532 inserted before it. */
3533 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3534 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3535 break;
3536 if (gsi_end_p (gsi))
3537 gsi = gsi_start_bb (bb);
3538 else
3539 gsi_next (&gsi);
3541 else
3543 /* Stmt no longer needs to be revisited. */
3544 gimple_set_plf (stmt, GF_PLF_1, true);
3545 gsi_next (&gsi);
3550 if (cfg_changed)
3551 todoflags |= TODO_cleanup_cfg;
3553 return todoflags;
3557 static bool
3558 gate_forwprop (void)
3560 return flag_tree_forwprop;
3563 namespace {
3565 const pass_data pass_data_forwprop =
3567 GIMPLE_PASS, /* type */
3568 "forwprop", /* name */
3569 OPTGROUP_NONE, /* optinfo_flags */
3570 true, /* has_gate */
3571 true, /* has_execute */
3572 TV_TREE_FORWPROP, /* tv_id */
3573 ( PROP_cfg | PROP_ssa ), /* properties_required */
3574 0, /* properties_provided */
3575 0, /* properties_destroyed */
3576 0, /* todo_flags_start */
3577 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3580 class pass_forwprop : public gimple_opt_pass
3582 public:
3583 pass_forwprop (gcc::context *ctxt)
3584 : gimple_opt_pass (pass_data_forwprop, ctxt)
3587 /* opt_pass methods: */
3588 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3589 bool gate () { return gate_forwprop (); }
3590 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3592 }; // class pass_forwprop
3594 } // anon namespace
3596 gimple_opt_pass *
3597 make_pass_forwprop (gcc::context *ctxt)
3599 return new pass_forwprop (ctxt);