gcc/c-family/
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob817fa9f461451ef2adef7d47618287a550d22c8b
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "gimple.h"
29 #include "gimplify.h"
30 #include "gimple-iterator.h"
31 #include "gimplify-me.h"
32 #include "gimple-ssa.h"
33 #include "tree-cfg.h"
34 #include "tree-phinodes.h"
35 #include "ssa-iterators.h"
36 #include "tree-ssanames.h"
37 #include "tree-dfa.h"
38 #include "tree-pass.h"
39 #include "langhooks.h"
40 #include "flags.h"
41 #include "expr.h"
42 #include "cfgloop.h"
43 #include "optabs.h"
44 #include "tree-ssa-propagate.h"
45 #include "tree-ssa-dom.h"
47 /* This pass propagates the RHS of assignment statements into use
48 sites of the LHS of the assignment. It's basically a specialized
49 form of tree combination. It is hoped all of this can disappear
50 when we have a generalized tree combiner.
52 One class of common cases we handle is forward propagating a single use
53 variable into a COND_EXPR.
55 bb0:
56 x = a COND b;
57 if (x) goto ... else goto ...
59 Will be transformed into:
61 bb0:
62 if (a COND b) goto ... else goto ...
64 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
66 Or (assuming c1 and c2 are constants):
68 bb0:
69 x = a + c1;
70 if (x EQ/NEQ c2) goto ... else goto ...
72 Will be transformed into:
74 bb0:
75 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
77 Similarly for x = a - c1.
81 bb0:
82 x = !a
83 if (x) goto ... else goto ...
85 Will be transformed into:
87 bb0:
88 if (a == 0) goto ... else goto ...
90 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
91 For these cases, we propagate A into all, possibly more than one,
92 COND_EXPRs that use X.
96 bb0:
97 x = (typecast) a
98 if (x) goto ... else goto ...
100 Will be transformed into:
102 bb0:
103 if (a != 0) goto ... else goto ...
105 (Assuming a is an integral type and x is a boolean or x is an
106 integral and a is a boolean.)
108 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
109 For these cases, we propagate A into all, possibly more than one,
110 COND_EXPRs that use X.
112 In addition to eliminating the variable and the statement which assigns
113 a value to the variable, we may be able to later thread the jump without
114 adding insane complexity in the dominator optimizer.
116 Also note these transformations can cascade. We handle this by having
117 a worklist of COND_EXPR statements to examine. As we make a change to
118 a statement, we put it back on the worklist to examine on the next
119 iteration of the main loop.
121 A second class of propagation opportunities arises for ADDR_EXPR
122 nodes.
124 ptr = &x->y->z;
125 res = *ptr;
127 Will get turned into
129 res = x->y->z;
132 ptr = (type1*)&type2var;
133 res = *ptr
135 Will get turned into (if type1 and type2 are the same size
136 and neither have volatile on them):
137 res = VIEW_CONVERT_EXPR<type1>(type2var)
141 ptr = &x[0];
142 ptr2 = ptr + <constant>;
144 Will get turned into
146 ptr2 = &x[constant/elementsize];
150 ptr = &x[0];
151 offset = index * element_size;
152 offset_p = (pointer) offset;
153 ptr2 = ptr + offset_p
155 Will get turned into:
157 ptr2 = &x[index];
160 ssa = (int) decl
161 res = ssa & 1
163 Provided that decl has known alignment >= 2, will get turned into
165 res = 0
167 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
168 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
169 {NOT_EXPR,NEG_EXPR}.
171 This will (of course) be extended as other needs arise. */
173 static bool forward_propagate_addr_expr (tree, tree, bool);
175 /* Set to true if we delete dead edges during the optimization. */
176 static bool cfg_changed;
178 static tree rhs_to_tree (tree type, gimple stmt);
180 /* Get the next statement we can propagate NAME's value into skipping
181 trivial copies. Returns the statement that is suitable as a
182 propagation destination or NULL_TREE if there is no such one.
183 This only returns destinations in a single-use chain. FINAL_NAME_P
184 if non-NULL is written to the ssa name that represents the use. */
186 static gimple
187 get_prop_dest_stmt (tree name, tree *final_name_p)
189 use_operand_p use;
190 gimple use_stmt;
192 do {
193 /* If name has multiple uses, bail out. */
194 if (!single_imm_use (name, &use, &use_stmt))
195 return NULL;
197 /* If this is not a trivial copy, we found it. */
198 if (!gimple_assign_ssa_name_copy_p (use_stmt)
199 || gimple_assign_rhs1 (use_stmt) != name)
200 break;
202 /* Continue searching uses of the copy destination. */
203 name = gimple_assign_lhs (use_stmt);
204 } while (1);
206 if (final_name_p)
207 *final_name_p = name;
209 return use_stmt;
212 /* Get the statement we can propagate from into NAME skipping
213 trivial copies. Returns the statement which defines the
214 propagation source or NULL_TREE if there is no such one.
215 If SINGLE_USE_ONLY is set considers only sources which have
216 a single use chain up to NAME. If SINGLE_USE_P is non-null,
217 it is set to whether the chain to NAME is a single use chain
218 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
220 static gimple
221 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
223 bool single_use = true;
225 do {
226 gimple def_stmt = SSA_NAME_DEF_STMT (name);
228 if (!has_single_use (name))
230 single_use = false;
231 if (single_use_only)
232 return NULL;
235 /* If name is defined by a PHI node or is the default def, bail out. */
236 if (!is_gimple_assign (def_stmt))
237 return NULL;
239 /* If def_stmt is a simple copy, continue looking. */
240 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
241 name = gimple_assign_rhs1 (def_stmt);
242 else
244 if (!single_use_only && single_use_p)
245 *single_use_p = single_use;
247 return def_stmt;
249 } while (1);
252 /* Checks if the destination ssa name in DEF_STMT can be used as
253 propagation source. Returns true if so, otherwise false. */
255 static bool
256 can_propagate_from (gimple def_stmt)
258 gcc_assert (is_gimple_assign (def_stmt));
260 /* If the rhs has side-effects we cannot propagate from it. */
261 if (gimple_has_volatile_ops (def_stmt))
262 return false;
264 /* If the rhs is a load we cannot propagate from it. */
265 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
266 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
267 return false;
269 /* Constants can be always propagated. */
270 if (gimple_assign_single_p (def_stmt)
271 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
272 return true;
274 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
275 if (stmt_references_abnormal_ssa_name (def_stmt))
276 return false;
278 /* If the definition is a conversion of a pointer to a function type,
279 then we can not apply optimizations as some targets require
280 function pointers to be canonicalized and in this case this
281 optimization could eliminate a necessary canonicalization. */
282 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
284 tree rhs = gimple_assign_rhs1 (def_stmt);
285 if (POINTER_TYPE_P (TREE_TYPE (rhs))
286 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
287 return false;
290 return true;
293 /* Remove a chain of dead statements starting at the definition of
294 NAME. The chain is linked via the first operand of the defining statements.
295 If NAME was replaced in its only use then this function can be used
296 to clean up dead stmts. The function handles already released SSA
297 names gracefully.
298 Returns true if cleanup-cfg has to run. */
300 static bool
301 remove_prop_source_from_use (tree name)
303 gimple_stmt_iterator gsi;
304 gimple stmt;
305 bool cfg_changed = false;
307 do {
308 basic_block bb;
310 if (SSA_NAME_IN_FREE_LIST (name)
311 || SSA_NAME_IS_DEFAULT_DEF (name)
312 || !has_zero_uses (name))
313 return cfg_changed;
315 stmt = SSA_NAME_DEF_STMT (name);
316 if (gimple_code (stmt) == GIMPLE_PHI
317 || gimple_has_side_effects (stmt))
318 return cfg_changed;
320 bb = gimple_bb (stmt);
321 gsi = gsi_for_stmt (stmt);
322 unlink_stmt_vdef (stmt);
323 if (gsi_remove (&gsi, true))
324 cfg_changed |= gimple_purge_dead_eh_edges (bb);
325 release_defs (stmt);
327 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
328 } while (name && TREE_CODE (name) == SSA_NAME);
330 return cfg_changed;
333 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
334 converted to type TYPE.
336 This should disappear, but is needed so we can combine expressions and use
337 the fold() interfaces. Long term, we need to develop folding and combine
338 routines that deal with gimple exclusively . */
340 static tree
341 rhs_to_tree (tree type, gimple stmt)
343 location_t loc = gimple_location (stmt);
344 enum tree_code code = gimple_assign_rhs_code (stmt);
345 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
346 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
347 gimple_assign_rhs2 (stmt),
348 gimple_assign_rhs3 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
350 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
351 gimple_assign_rhs2 (stmt));
352 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
353 return build1 (code, type, gimple_assign_rhs1 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
355 return gimple_assign_rhs1 (stmt);
356 else
357 gcc_unreachable ();
360 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
361 the folded result in a form suitable for COND_EXPR_COND or
362 NULL_TREE, if there is no suitable simplified form. If
363 INVARIANT_ONLY is true only gimple_min_invariant results are
364 considered simplified. */
366 static tree
367 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
368 tree op0, tree op1, bool invariant_only)
370 tree t;
372 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
374 fold_defer_overflow_warnings ();
375 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
376 if (!t)
378 fold_undefer_overflow_warnings (false, NULL, 0);
379 return NULL_TREE;
382 /* Require that we got a boolean type out if we put one in. */
383 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
385 /* Canonicalize the combined condition for use in a COND_EXPR. */
386 t = canonicalize_cond_expr_cond (t);
388 /* Bail out if we required an invariant but didn't get one. */
389 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
391 fold_undefer_overflow_warnings (false, NULL, 0);
392 return NULL_TREE;
395 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
397 return t;
400 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
401 of its operand. Return a new comparison tree or NULL_TREE if there
402 were no simplifying combines. */
404 static tree
405 forward_propagate_into_comparison_1 (gimple stmt,
406 enum tree_code code, tree type,
407 tree op0, tree op1)
409 tree tmp = NULL_TREE;
410 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
411 bool single_use0_p = false, single_use1_p = false;
413 /* For comparisons use the first operand, that is likely to
414 simplify comparisons against constants. */
415 if (TREE_CODE (op0) == SSA_NAME)
417 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
418 if (def_stmt && can_propagate_from (def_stmt))
420 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
421 tmp = combine_cond_expr_cond (stmt, code, type,
422 rhs0, op1, !single_use0_p);
423 if (tmp)
424 return tmp;
428 /* If that wasn't successful, try the second operand. */
429 if (TREE_CODE (op1) == SSA_NAME)
431 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
432 if (def_stmt && can_propagate_from (def_stmt))
434 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
435 tmp = combine_cond_expr_cond (stmt, code, type,
436 op0, rhs1, !single_use1_p);
437 if (tmp)
438 return tmp;
442 /* If that wasn't successful either, try both operands. */
443 if (rhs0 != NULL_TREE
444 && rhs1 != NULL_TREE)
445 tmp = combine_cond_expr_cond (stmt, code, type,
446 rhs0, rhs1,
447 !(single_use0_p && single_use1_p));
449 return tmp;
452 /* Propagate from the ssa name definition statements of the assignment
453 from a comparison at *GSI into the conditional if that simplifies it.
454 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
455 otherwise returns 0. */
457 static int
458 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
460 gimple stmt = gsi_stmt (*gsi);
461 tree tmp;
462 bool cfg_changed = false;
463 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
464 tree rhs1 = gimple_assign_rhs1 (stmt);
465 tree rhs2 = gimple_assign_rhs2 (stmt);
467 /* Combine the comparison with defining statements. */
468 tmp = forward_propagate_into_comparison_1 (stmt,
469 gimple_assign_rhs_code (stmt),
470 type, rhs1, rhs2);
471 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
473 gimple_assign_set_rhs_from_tree (gsi, tmp);
474 fold_stmt (gsi);
475 update_stmt (gsi_stmt (*gsi));
477 if (TREE_CODE (rhs1) == SSA_NAME)
478 cfg_changed |= remove_prop_source_from_use (rhs1);
479 if (TREE_CODE (rhs2) == SSA_NAME)
480 cfg_changed |= remove_prop_source_from_use (rhs2);
481 return cfg_changed ? 2 : 1;
484 return 0;
487 /* Propagate from the ssa name definition statements of COND_EXPR
488 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
489 Returns zero if no statement was changed, one if there were
490 changes and two if cfg_cleanup needs to run.
492 This must be kept in sync with forward_propagate_into_cond. */
494 static int
495 forward_propagate_into_gimple_cond (gimple stmt)
497 tree tmp;
498 enum tree_code code = gimple_cond_code (stmt);
499 bool cfg_changed = false;
500 tree rhs1 = gimple_cond_lhs (stmt);
501 tree rhs2 = gimple_cond_rhs (stmt);
503 /* We can do tree combining on SSA_NAME and comparison expressions. */
504 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
505 return 0;
507 tmp = forward_propagate_into_comparison_1 (stmt, code,
508 boolean_type_node,
509 rhs1, rhs2);
510 if (tmp)
512 if (dump_file && tmp)
514 fprintf (dump_file, " Replaced '");
515 print_gimple_expr (dump_file, stmt, 0, 0);
516 fprintf (dump_file, "' with '");
517 print_generic_expr (dump_file, tmp, 0);
518 fprintf (dump_file, "'\n");
521 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
522 update_stmt (stmt);
524 if (TREE_CODE (rhs1) == SSA_NAME)
525 cfg_changed |= remove_prop_source_from_use (rhs1);
526 if (TREE_CODE (rhs2) == SSA_NAME)
527 cfg_changed |= remove_prop_source_from_use (rhs2);
528 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
531 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
532 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
533 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
534 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
535 && ((code == EQ_EXPR
536 && integer_zerop (rhs2))
537 || (code == NE_EXPR
538 && integer_onep (rhs2))))
540 basic_block bb = gimple_bb (stmt);
541 gimple_cond_set_code (stmt, NE_EXPR);
542 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
543 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
544 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
545 return 1;
548 return 0;
552 /* Propagate from the ssa name definition statements of COND_EXPR
553 in the rhs of statement STMT into the conditional if that simplifies it.
554 Returns true zero if the stmt was changed. */
556 static bool
557 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
559 gimple stmt = gsi_stmt (*gsi_p);
560 tree tmp = NULL_TREE;
561 tree cond = gimple_assign_rhs1 (stmt);
562 enum tree_code code = gimple_assign_rhs_code (stmt);
563 bool swap = false;
565 /* We can do tree combining on SSA_NAME and comparison expressions. */
566 if (COMPARISON_CLASS_P (cond))
567 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
568 TREE_TYPE (cond),
569 TREE_OPERAND (cond, 0),
570 TREE_OPERAND (cond, 1));
571 else if (TREE_CODE (cond) == SSA_NAME)
573 enum tree_code def_code;
574 tree name = cond;
575 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
576 if (!def_stmt || !can_propagate_from (def_stmt))
577 return 0;
579 def_code = gimple_assign_rhs_code (def_stmt);
580 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
581 tmp = fold_build2_loc (gimple_location (def_stmt),
582 def_code,
583 TREE_TYPE (cond),
584 gimple_assign_rhs1 (def_stmt),
585 gimple_assign_rhs2 (def_stmt));
586 else if (code == COND_EXPR
587 && ((def_code == BIT_NOT_EXPR
588 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
589 || (def_code == BIT_XOR_EXPR
590 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
592 tmp = gimple_assign_rhs1 (def_stmt);
593 swap = true;
597 if (tmp
598 && is_gimple_condexpr (tmp))
600 if (dump_file && tmp)
602 fprintf (dump_file, " Replaced '");
603 print_generic_expr (dump_file, cond, 0);
604 fprintf (dump_file, "' with '");
605 print_generic_expr (dump_file, tmp, 0);
606 fprintf (dump_file, "'\n");
609 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
610 : integer_onep (tmp))
611 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
612 else if (integer_zerop (tmp))
613 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
614 else
616 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
617 if (swap)
619 tree t = gimple_assign_rhs2 (stmt);
620 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
621 gimple_assign_set_rhs3 (stmt, t);
624 stmt = gsi_stmt (*gsi_p);
625 update_stmt (stmt);
627 return true;
630 return 0;
633 /* Propagate from the ssa name definition statements of COND_EXPR
634 values in the rhs of statement STMT into the conditional arms
635 if that simplifies it.
636 Returns true if the stmt was changed. */
638 static bool
639 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
641 gimple stmt = gsi_stmt (*gsi_p);
642 tree cond, val1, val2;
643 bool changed = false;
645 cond = gimple_assign_rhs1 (stmt);
646 val1 = gimple_assign_rhs2 (stmt);
647 if (TREE_CODE (val1) == SSA_NAME)
649 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
650 if (is_gimple_assign (def_stmt)
651 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
652 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
654 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
655 gimple_assign_set_rhs2 (stmt, val1);
656 changed = true;
659 val2 = gimple_assign_rhs3 (stmt);
660 if (TREE_CODE (val2) == SSA_NAME)
662 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
663 if (is_gimple_assign (def_stmt)
664 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
665 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
667 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
668 gimple_assign_set_rhs3 (stmt, val2);
669 changed = true;
672 if (operand_equal_p (val1, val2, 0))
674 gimple_assign_set_rhs_from_tree (gsi_p, val1);
675 stmt = gsi_stmt (*gsi_p);
676 changed = true;
679 if (changed)
680 update_stmt (stmt);
682 return changed;
685 /* We've just substituted an ADDR_EXPR into stmt. Update all the
686 relevant data structures to match. */
688 static void
689 tidy_after_forward_propagate_addr (gimple stmt)
691 /* We may have turned a trapping insn into a non-trapping insn. */
692 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
693 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
694 cfg_changed = true;
696 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
697 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
700 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
701 ADDR_EXPR <whatever>.
703 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
704 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
705 node or for recovery of array indexing from pointer arithmetic.
707 Return true if the propagation was successful (the propagation can
708 be not totally successful, yet things may have been changed). */
710 static bool
711 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
712 gimple_stmt_iterator *use_stmt_gsi,
713 bool single_use_p)
715 tree lhs, rhs, rhs2, array_ref;
716 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
717 enum tree_code rhs_code;
718 bool res = true;
720 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
722 lhs = gimple_assign_lhs (use_stmt);
723 rhs_code = gimple_assign_rhs_code (use_stmt);
724 rhs = gimple_assign_rhs1 (use_stmt);
726 /* Do not perform copy-propagation but recurse through copy chains. */
727 if (TREE_CODE (lhs) == SSA_NAME
728 && rhs_code == SSA_NAME)
729 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
731 /* The use statement could be a conversion. Recurse to the uses of the
732 lhs as copyprop does not copy through pointer to integer to pointer
733 conversions and FRE does not catch all cases either.
734 Treat the case of a single-use name and
735 a conversion to def_rhs type separate, though. */
736 if (TREE_CODE (lhs) == SSA_NAME
737 && CONVERT_EXPR_CODE_P (rhs_code))
739 /* If there is a point in a conversion chain where the types match
740 so we can remove a conversion re-materialize the address here
741 and stop. */
742 if (single_use_p
743 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
745 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
746 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
747 return true;
750 /* Else recurse if the conversion preserves the address value. */
751 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
752 || POINTER_TYPE_P (TREE_TYPE (lhs)))
753 && (TYPE_PRECISION (TREE_TYPE (lhs))
754 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
755 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
757 return false;
760 /* If this isn't a conversion chain from this on we only can propagate
761 into compatible pointer contexts. */
762 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
763 return false;
765 /* Propagate through constant pointer adjustments. */
766 if (TREE_CODE (lhs) == SSA_NAME
767 && rhs_code == POINTER_PLUS_EXPR
768 && rhs == name
769 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
771 tree new_def_rhs;
772 /* As we come here with non-invariant addresses in def_rhs we need
773 to make sure we can build a valid constant offsetted address
774 for further propagation. Simply rely on fold building that
775 and check after the fact. */
776 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
777 def_rhs,
778 fold_convert (ptr_type_node,
779 gimple_assign_rhs2 (use_stmt)));
780 if (TREE_CODE (new_def_rhs) == MEM_REF
781 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
782 return false;
783 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
784 TREE_TYPE (rhs));
786 /* Recurse. If we could propagate into all uses of lhs do not
787 bother to replace into the current use but just pretend we did. */
788 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
789 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
790 return true;
792 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
793 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
794 new_def_rhs, NULL_TREE);
795 else if (is_gimple_min_invariant (new_def_rhs))
796 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
797 new_def_rhs, NULL_TREE);
798 else
799 return false;
800 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
801 update_stmt (use_stmt);
802 return true;
805 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
806 ADDR_EXPR will not appear on the LHS. */
807 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
808 while (handled_component_p (*lhsp))
809 lhsp = &TREE_OPERAND (*lhsp, 0);
810 lhs = *lhsp;
812 /* Now see if the LHS node is a MEM_REF using NAME. If so,
813 propagate the ADDR_EXPR into the use of NAME and fold the result. */
814 if (TREE_CODE (lhs) == MEM_REF
815 && TREE_OPERAND (lhs, 0) == name)
817 tree def_rhs_base;
818 HOST_WIDE_INT def_rhs_offset;
819 /* If the address is invariant we can always fold it. */
820 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
821 &def_rhs_offset)))
823 double_int off = mem_ref_offset (lhs);
824 tree new_ptr;
825 off += double_int::from_shwi (def_rhs_offset);
826 if (TREE_CODE (def_rhs_base) == MEM_REF)
828 off += mem_ref_offset (def_rhs_base);
829 new_ptr = TREE_OPERAND (def_rhs_base, 0);
831 else
832 new_ptr = build_fold_addr_expr (def_rhs_base);
833 TREE_OPERAND (lhs, 0) = new_ptr;
834 TREE_OPERAND (lhs, 1)
835 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
836 tidy_after_forward_propagate_addr (use_stmt);
837 /* Continue propagating into the RHS if this was not the only use. */
838 if (single_use_p)
839 return true;
841 /* If the LHS is a plain dereference and the value type is the same as
842 that of the pointed-to type of the address we can put the
843 dereferenced address on the LHS preserving the original alias-type. */
844 else if (integer_zerop (TREE_OPERAND (lhs, 1))
845 && ((gimple_assign_lhs (use_stmt) == lhs
846 && useless_type_conversion_p
847 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
848 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
849 || types_compatible_p (TREE_TYPE (lhs),
850 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
851 /* Don't forward anything into clobber stmts if it would result
852 in the lhs no longer being a MEM_REF. */
853 && (!gimple_clobber_p (use_stmt)
854 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
856 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
857 tree new_offset, new_base, saved, new_lhs;
858 while (handled_component_p (*def_rhs_basep))
859 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
860 saved = *def_rhs_basep;
861 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
863 new_base = TREE_OPERAND (*def_rhs_basep, 0);
864 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
865 TREE_OPERAND (*def_rhs_basep, 1));
867 else
869 new_base = build_fold_addr_expr (*def_rhs_basep);
870 new_offset = TREE_OPERAND (lhs, 1);
872 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
873 new_base, new_offset);
874 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
875 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
876 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
877 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
878 *lhsp = new_lhs;
879 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
880 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
881 *def_rhs_basep = saved;
882 tidy_after_forward_propagate_addr (use_stmt);
883 /* Continue propagating into the RHS if this was not the
884 only use. */
885 if (single_use_p)
886 return true;
888 else
889 /* We can have a struct assignment dereferencing our name twice.
890 Note that we didn't propagate into the lhs to not falsely
891 claim we did when propagating into the rhs. */
892 res = false;
895 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
896 nodes from the RHS. */
897 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
898 if (TREE_CODE (*rhsp) == ADDR_EXPR)
899 rhsp = &TREE_OPERAND (*rhsp, 0);
900 while (handled_component_p (*rhsp))
901 rhsp = &TREE_OPERAND (*rhsp, 0);
902 rhs = *rhsp;
904 /* Now see if the RHS node is a MEM_REF using NAME. If so,
905 propagate the ADDR_EXPR into the use of NAME and fold the result. */
906 if (TREE_CODE (rhs) == MEM_REF
907 && TREE_OPERAND (rhs, 0) == name)
909 tree def_rhs_base;
910 HOST_WIDE_INT def_rhs_offset;
911 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
912 &def_rhs_offset)))
914 double_int off = mem_ref_offset (rhs);
915 tree new_ptr;
916 off += double_int::from_shwi (def_rhs_offset);
917 if (TREE_CODE (def_rhs_base) == MEM_REF)
919 off += mem_ref_offset (def_rhs_base);
920 new_ptr = TREE_OPERAND (def_rhs_base, 0);
922 else
923 new_ptr = build_fold_addr_expr (def_rhs_base);
924 TREE_OPERAND (rhs, 0) = new_ptr;
925 TREE_OPERAND (rhs, 1)
926 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
927 fold_stmt_inplace (use_stmt_gsi);
928 tidy_after_forward_propagate_addr (use_stmt);
929 return res;
931 /* If the RHS is a plain dereference and the value type is the same as
932 that of the pointed-to type of the address we can put the
933 dereferenced address on the RHS preserving the original alias-type. */
934 else if (integer_zerop (TREE_OPERAND (rhs, 1))
935 && ((gimple_assign_rhs1 (use_stmt) == rhs
936 && useless_type_conversion_p
937 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
938 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
939 || types_compatible_p (TREE_TYPE (rhs),
940 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
942 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
943 tree new_offset, new_base, saved, new_rhs;
944 while (handled_component_p (*def_rhs_basep))
945 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
946 saved = *def_rhs_basep;
947 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
949 new_base = TREE_OPERAND (*def_rhs_basep, 0);
950 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
951 TREE_OPERAND (*def_rhs_basep, 1));
953 else
955 new_base = build_fold_addr_expr (*def_rhs_basep);
956 new_offset = TREE_OPERAND (rhs, 1);
958 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
959 new_base, new_offset);
960 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
961 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
962 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
963 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
964 *rhsp = new_rhs;
965 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
966 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
967 *def_rhs_basep = saved;
968 fold_stmt_inplace (use_stmt_gsi);
969 tidy_after_forward_propagate_addr (use_stmt);
970 return res;
974 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
975 is nothing to do. */
976 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
977 || gimple_assign_rhs1 (use_stmt) != name)
978 return false;
980 /* The remaining cases are all for turning pointer arithmetic into
981 array indexing. They only apply when we have the address of
982 element zero in an array. If that is not the case then there
983 is nothing to do. */
984 array_ref = TREE_OPERAND (def_rhs, 0);
985 if ((TREE_CODE (array_ref) != ARRAY_REF
986 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
987 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
988 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
989 return false;
991 rhs2 = gimple_assign_rhs2 (use_stmt);
992 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
993 if (TREE_CODE (rhs2) == INTEGER_CST)
995 tree new_rhs = build1_loc (gimple_location (use_stmt),
996 ADDR_EXPR, TREE_TYPE (def_rhs),
997 fold_build2 (MEM_REF,
998 TREE_TYPE (TREE_TYPE (def_rhs)),
999 unshare_expr (def_rhs),
1000 fold_convert (ptr_type_node,
1001 rhs2)));
1002 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1003 use_stmt = gsi_stmt (*use_stmt_gsi);
1004 update_stmt (use_stmt);
1005 tidy_after_forward_propagate_addr (use_stmt);
1006 return true;
1009 return false;
1012 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1014 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1015 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1016 node or for recovery of array indexing from pointer arithmetic.
1018 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1019 the single use in the previous invocation. Pass true when calling
1020 this as toplevel.
1022 Returns true, if all uses have been propagated into. */
1024 static bool
1025 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1027 imm_use_iterator iter;
1028 gimple use_stmt;
1029 bool all = true;
1030 bool single_use_p = parent_single_use_p && has_single_use (name);
1032 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1034 bool result;
1035 tree use_rhs;
1037 /* If the use is not in a simple assignment statement, then
1038 there is nothing we can do. */
1039 if (!is_gimple_assign (use_stmt))
1041 if (!is_gimple_debug (use_stmt))
1042 all = false;
1043 continue;
1046 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1047 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1048 single_use_p);
1049 /* If the use has moved to a different statement adjust
1050 the update machinery for the old statement too. */
1051 if (use_stmt != gsi_stmt (gsi))
1053 update_stmt (use_stmt);
1054 use_stmt = gsi_stmt (gsi);
1056 update_stmt (use_stmt);
1057 all &= result;
1059 /* Remove intermediate now unused copy and conversion chains. */
1060 use_rhs = gimple_assign_rhs1 (use_stmt);
1061 if (result
1062 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1063 && TREE_CODE (use_rhs) == SSA_NAME
1064 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1066 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1067 release_defs (use_stmt);
1068 gsi_remove (&gsi, true);
1072 return all && has_zero_uses (name);
1076 /* Forward propagate the comparison defined in *DEFGSI like
1077 cond_1 = x CMP y to uses of the form
1078 a_1 = (T')cond_1
1079 a_1 = !cond_1
1080 a_1 = cond_1 != 0
1081 Returns true if stmt is now unused. Advance DEFGSI to the next
1082 statement. */
1084 static bool
1085 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1087 gimple stmt = gsi_stmt (*defgsi);
1088 tree name = gimple_assign_lhs (stmt);
1089 gimple use_stmt;
1090 tree tmp = NULL_TREE;
1091 gimple_stmt_iterator gsi;
1092 enum tree_code code;
1093 tree lhs;
1095 /* Don't propagate ssa names that occur in abnormal phis. */
1096 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1097 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1098 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1099 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1100 goto bailout;
1102 /* Do not un-cse comparisons. But propagate through copies. */
1103 use_stmt = get_prop_dest_stmt (name, &name);
1104 if (!use_stmt
1105 || !is_gimple_assign (use_stmt))
1106 goto bailout;
1108 code = gimple_assign_rhs_code (use_stmt);
1109 lhs = gimple_assign_lhs (use_stmt);
1110 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1111 goto bailout;
1113 /* We can propagate the condition into a statement that
1114 computes the logical negation of the comparison result. */
1115 if ((code == BIT_NOT_EXPR
1116 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1117 || (code == BIT_XOR_EXPR
1118 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1120 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1121 bool nans = HONOR_NANS (TYPE_MODE (type));
1122 enum tree_code inv_code;
1123 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1124 if (inv_code == ERROR_MARK)
1125 goto bailout;
1127 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1128 gimple_assign_rhs2 (stmt));
1130 else
1131 goto bailout;
1133 gsi = gsi_for_stmt (use_stmt);
1134 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1135 use_stmt = gsi_stmt (gsi);
1136 update_stmt (use_stmt);
1138 if (dump_file && (dump_flags & TDF_DETAILS))
1140 fprintf (dump_file, " Replaced '");
1141 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1142 fprintf (dump_file, "' with '");
1143 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1144 fprintf (dump_file, "'\n");
1147 /* When we remove stmt now the iterator defgsi goes off it's current
1148 sequence, hence advance it now. */
1149 gsi_next (defgsi);
1151 /* Remove defining statements. */
1152 return remove_prop_source_from_use (name);
1154 bailout:
1155 gsi_next (defgsi);
1156 return false;
1160 /* GSI_P points to a statement which performs a narrowing integral
1161 conversion.
1163 Look for cases like:
1165 t = x & c;
1166 y = (T) t;
1168 Turn them into:
1170 t = x & c;
1171 y = (T) x;
1173 If T is narrower than X's type and C merely masks off bits outside
1174 of (T) and nothing else.
1176 Normally we'd let DCE remove the dead statement. But no DCE runs
1177 after the last forwprop/combine pass, so we remove the obviously
1178 dead code ourselves.
1180 Return TRUE if a change was made, FALSE otherwise. */
1182 static bool
1183 simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1185 gimple stmt = gsi_stmt (*gsi_p);
1186 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1188 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1189 the only use of the BIT_AND_EXPR result is the conversion. */
1190 if (is_gimple_assign (rhs_def_stmt)
1191 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1192 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1194 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1195 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1196 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1198 /* Now verify suitability of the BIT_AND_EXPR's operands.
1199 The first must be an SSA_NAME that we can propagate and the
1200 second must be an integer constant that masks out all the
1201 bits outside the final result's type, but nothing else. */
1202 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1203 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1204 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1205 && operand_equal_p (rhs_def_operand2,
1206 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1207 TYPE_PRECISION (lhs_type)),
1210 /* This is an optimizable case. Replace the source operand
1211 in the conversion with the first source operand of the
1212 BIT_AND_EXPR. */
1213 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1214 stmt = gsi_stmt (*gsi_p);
1215 update_stmt (stmt);
1217 /* There is no DCE after the last forwprop pass. It's
1218 easy to clean up the first order effects here. */
1219 gimple_stmt_iterator si;
1220 si = gsi_for_stmt (rhs_def_stmt);
1221 gsi_remove (&si, true);
1222 release_defs (rhs_def_stmt);
1223 return true;
1227 return false;
1231 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1232 If so, we can change STMT into lhs = y which can later be copy
1233 propagated. Similarly for negation.
1235 This could trivially be formulated as a forward propagation
1236 to immediate uses. However, we already had an implementation
1237 from DOM which used backward propagation via the use-def links.
1239 It turns out that backward propagation is actually faster as
1240 there's less work to do for each NOT/NEG expression we find.
1241 Backwards propagation needs to look at the statement in a single
1242 backlink. Forward propagation needs to look at potentially more
1243 than one forward link.
1245 Returns true when the statement was changed. */
1247 static bool
1248 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1250 gimple stmt = gsi_stmt (*gsi_p);
1251 tree rhs = gimple_assign_rhs1 (stmt);
1252 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1254 /* See if the RHS_DEF_STMT has the same form as our statement. */
1255 if (is_gimple_assign (rhs_def_stmt)
1256 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1258 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1260 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1261 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1262 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1264 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1265 stmt = gsi_stmt (*gsi_p);
1266 update_stmt (stmt);
1267 return true;
1271 return false;
1274 /* Helper function for simplify_gimple_switch. Remove case labels that
1275 have values outside the range of the new type. */
1277 static void
1278 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1280 unsigned int branch_num = gimple_switch_num_labels (stmt);
1281 vec<tree> labels;
1282 labels.create (branch_num);
1283 unsigned int i, len;
1285 /* Collect the existing case labels in a VEC, and preprocess it as if
1286 we are gimplifying a GENERIC SWITCH_EXPR. */
1287 for (i = 1; i < branch_num; i++)
1288 labels.quick_push (gimple_switch_label (stmt, i));
1289 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1291 /* If any labels were removed, replace the existing case labels
1292 in the GIMPLE_SWITCH statement with the correct ones.
1293 Note that the type updates were done in-place on the case labels,
1294 so we only have to replace the case labels in the GIMPLE_SWITCH
1295 if the number of labels changed. */
1296 len = labels.length ();
1297 if (len < branch_num - 1)
1299 bitmap target_blocks;
1300 edge_iterator ei;
1301 edge e;
1303 /* Corner case: *all* case labels have been removed as being
1304 out-of-range for INDEX_TYPE. Push one label and let the
1305 CFG cleanups deal with this further. */
1306 if (len == 0)
1308 tree label, elt;
1310 label = CASE_LABEL (gimple_switch_default_label (stmt));
1311 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1312 labels.quick_push (elt);
1313 len = 1;
1316 for (i = 0; i < labels.length (); i++)
1317 gimple_switch_set_label (stmt, i + 1, labels[i]);
1318 for (i++ ; i < branch_num; i++)
1319 gimple_switch_set_label (stmt, i, NULL_TREE);
1320 gimple_switch_set_num_labels (stmt, len + 1);
1322 /* Cleanup any edges that are now dead. */
1323 target_blocks = BITMAP_ALLOC (NULL);
1324 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1326 tree elt = gimple_switch_label (stmt, i);
1327 basic_block target = label_to_block (CASE_LABEL (elt));
1328 bitmap_set_bit (target_blocks, target->index);
1330 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1332 if (! bitmap_bit_p (target_blocks, e->dest->index))
1334 remove_edge (e);
1335 cfg_changed = true;
1336 free_dominance_info (CDI_DOMINATORS);
1338 else
1339 ei_next (&ei);
1341 BITMAP_FREE (target_blocks);
1344 labels.release ();
1347 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1348 the condition which we may be able to optimize better. */
1350 static bool
1351 simplify_gimple_switch (gimple stmt)
1353 tree cond = gimple_switch_index (stmt);
1354 tree def, to, ti;
1355 gimple def_stmt;
1357 /* The optimization that we really care about is removing unnecessary
1358 casts. That will let us do much better in propagating the inferred
1359 constant at the switch target. */
1360 if (TREE_CODE (cond) == SSA_NAME)
1362 def_stmt = SSA_NAME_DEF_STMT (cond);
1363 if (is_gimple_assign (def_stmt))
1365 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1367 int need_precision;
1368 bool fail;
1370 def = gimple_assign_rhs1 (def_stmt);
1372 to = TREE_TYPE (cond);
1373 ti = TREE_TYPE (def);
1375 /* If we have an extension that preserves value, then we
1376 can copy the source value into the switch. */
1378 need_precision = TYPE_PRECISION (ti);
1379 fail = false;
1380 if (! INTEGRAL_TYPE_P (ti))
1381 fail = true;
1382 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1383 fail = true;
1384 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1385 need_precision += 1;
1386 if (TYPE_PRECISION (to) < need_precision)
1387 fail = true;
1389 if (!fail)
1391 gimple_switch_set_index (stmt, def);
1392 simplify_gimple_switch_label_vec (stmt, ti);
1393 update_stmt (stmt);
1394 return true;
1400 return false;
1403 /* For pointers p2 and p1 return p2 - p1 if the
1404 difference is known and constant, otherwise return NULL. */
1406 static tree
1407 constant_pointer_difference (tree p1, tree p2)
1409 int i, j;
1410 #define CPD_ITERATIONS 5
1411 tree exps[2][CPD_ITERATIONS];
1412 tree offs[2][CPD_ITERATIONS];
1413 int cnt[2];
1415 for (i = 0; i < 2; i++)
1417 tree p = i ? p1 : p2;
1418 tree off = size_zero_node;
1419 gimple stmt;
1420 enum tree_code code;
1422 /* For each of p1 and p2 we need to iterate at least
1423 twice, to handle ADDR_EXPR directly in p1/p2,
1424 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1425 on definition's stmt RHS. Iterate a few extra times. */
1426 j = 0;
1429 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1430 break;
1431 if (TREE_CODE (p) == ADDR_EXPR)
1433 tree q = TREE_OPERAND (p, 0);
1434 HOST_WIDE_INT offset;
1435 tree base = get_addr_base_and_unit_offset (q, &offset);
1436 if (base)
1438 q = base;
1439 if (offset)
1440 off = size_binop (PLUS_EXPR, off, size_int (offset));
1442 if (TREE_CODE (q) == MEM_REF
1443 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1445 p = TREE_OPERAND (q, 0);
1446 off = size_binop (PLUS_EXPR, off,
1447 double_int_to_tree (sizetype,
1448 mem_ref_offset (q)));
1450 else
1452 exps[i][j] = q;
1453 offs[i][j++] = off;
1454 break;
1457 if (TREE_CODE (p) != SSA_NAME)
1458 break;
1459 exps[i][j] = p;
1460 offs[i][j++] = off;
1461 if (j == CPD_ITERATIONS)
1462 break;
1463 stmt = SSA_NAME_DEF_STMT (p);
1464 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1465 break;
1466 code = gimple_assign_rhs_code (stmt);
1467 if (code == POINTER_PLUS_EXPR)
1469 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1470 break;
1471 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1472 p = gimple_assign_rhs1 (stmt);
1474 else if (code == ADDR_EXPR || code == NOP_EXPR)
1475 p = gimple_assign_rhs1 (stmt);
1476 else
1477 break;
1479 while (1);
1480 cnt[i] = j;
1483 for (i = 0; i < cnt[0]; i++)
1484 for (j = 0; j < cnt[1]; j++)
1485 if (exps[0][i] == exps[1][j])
1486 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1488 return NULL_TREE;
1491 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1492 Optimize
1493 memcpy (p, "abcd", 4);
1494 memset (p + 4, ' ', 3);
1495 into
1496 memcpy (p, "abcd ", 7);
1497 call if the latter can be stored by pieces during expansion. */
1499 static bool
1500 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1502 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1503 tree vuse = gimple_vuse (stmt2);
1504 if (vuse == NULL)
1505 return false;
1506 stmt1 = SSA_NAME_DEF_STMT (vuse);
1508 switch (DECL_FUNCTION_CODE (callee2))
1510 case BUILT_IN_MEMSET:
1511 if (gimple_call_num_args (stmt2) != 3
1512 || gimple_call_lhs (stmt2)
1513 || CHAR_BIT != 8
1514 || BITS_PER_UNIT != 8)
1515 break;
1516 else
1518 tree callee1;
1519 tree ptr1, src1, str1, off1, len1, lhs1;
1520 tree ptr2 = gimple_call_arg (stmt2, 0);
1521 tree val2 = gimple_call_arg (stmt2, 1);
1522 tree len2 = gimple_call_arg (stmt2, 2);
1523 tree diff, vdef, new_str_cst;
1524 gimple use_stmt;
1525 unsigned int ptr1_align;
1526 unsigned HOST_WIDE_INT src_len;
1527 char *src_buf;
1528 use_operand_p use_p;
1530 if (!tree_fits_shwi_p (val2)
1531 || !tree_fits_uhwi_p (len2))
1532 break;
1533 if (is_gimple_call (stmt1))
1535 /* If first stmt is a call, it needs to be memcpy
1536 or mempcpy, with string literal as second argument and
1537 constant length. */
1538 callee1 = gimple_call_fndecl (stmt1);
1539 if (callee1 == NULL_TREE
1540 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1541 || gimple_call_num_args (stmt1) != 3)
1542 break;
1543 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1544 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1545 break;
1546 ptr1 = gimple_call_arg (stmt1, 0);
1547 src1 = gimple_call_arg (stmt1, 1);
1548 len1 = gimple_call_arg (stmt1, 2);
1549 lhs1 = gimple_call_lhs (stmt1);
1550 if (!tree_fits_uhwi_p (len1))
1551 break;
1552 str1 = string_constant (src1, &off1);
1553 if (str1 == NULL_TREE)
1554 break;
1555 if (!tree_fits_uhwi_p (off1)
1556 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1557 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1558 - tree_to_uhwi (off1)) > 0
1559 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1560 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1561 != TYPE_MODE (char_type_node))
1562 break;
1564 else if (gimple_assign_single_p (stmt1))
1566 /* Otherwise look for length 1 memcpy optimized into
1567 assignment. */
1568 ptr1 = gimple_assign_lhs (stmt1);
1569 src1 = gimple_assign_rhs1 (stmt1);
1570 if (TREE_CODE (ptr1) != MEM_REF
1571 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1572 || !tree_fits_shwi_p (src1))
1573 break;
1574 ptr1 = build_fold_addr_expr (ptr1);
1575 callee1 = NULL_TREE;
1576 len1 = size_one_node;
1577 lhs1 = NULL_TREE;
1578 off1 = size_zero_node;
1579 str1 = NULL_TREE;
1581 else
1582 break;
1584 diff = constant_pointer_difference (ptr1, ptr2);
1585 if (diff == NULL && lhs1 != NULL)
1587 diff = constant_pointer_difference (lhs1, ptr2);
1588 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1589 && diff != NULL)
1590 diff = size_binop (PLUS_EXPR, diff,
1591 fold_convert (sizetype, len1));
1593 /* If the difference between the second and first destination pointer
1594 is not constant, or is bigger than memcpy length, bail out. */
1595 if (diff == NULL
1596 || !tree_fits_uhwi_p (diff)
1597 || tree_int_cst_lt (len1, diff))
1598 break;
1600 /* Use maximum of difference plus memset length and memcpy length
1601 as the new memcpy length, if it is too big, bail out. */
1602 src_len = tree_to_uhwi (diff);
1603 src_len += tree_to_uhwi (len2);
1604 if (src_len < tree_to_uhwi (len1))
1605 src_len = tree_to_uhwi (len1);
1606 if (src_len > 1024)
1607 break;
1609 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1610 with bigger length will return different result. */
1611 if (lhs1 != NULL_TREE
1612 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1613 && (TREE_CODE (lhs1) != SSA_NAME
1614 || !single_imm_use (lhs1, &use_p, &use_stmt)
1615 || use_stmt != stmt2))
1616 break;
1618 /* If anything reads memory in between memcpy and memset
1619 call, the modified memcpy call might change it. */
1620 vdef = gimple_vdef (stmt1);
1621 if (vdef != NULL
1622 && (!single_imm_use (vdef, &use_p, &use_stmt)
1623 || use_stmt != stmt2))
1624 break;
1626 ptr1_align = get_pointer_alignment (ptr1);
1627 /* Construct the new source string literal. */
1628 src_buf = XALLOCAVEC (char, src_len + 1);
1629 if (callee1)
1630 memcpy (src_buf,
1631 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1632 tree_to_uhwi (len1));
1633 else
1634 src_buf[0] = tree_to_shwi (src1);
1635 memset (src_buf + tree_to_uhwi (diff),
1636 tree_to_shwi (val2), tree_to_uhwi (len2));
1637 src_buf[src_len] = '\0';
1638 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1639 handle embedded '\0's. */
1640 if (strlen (src_buf) != src_len)
1641 break;
1642 rtl_profile_for_bb (gimple_bb (stmt2));
1643 /* If the new memcpy wouldn't be emitted by storing the literal
1644 by pieces, this optimization might enlarge .rodata too much,
1645 as commonly used string literals couldn't be shared any
1646 longer. */
1647 if (!can_store_by_pieces (src_len,
1648 builtin_strncpy_read_str,
1649 src_buf, ptr1_align, false))
1650 break;
1652 new_str_cst = build_string_literal (src_len, src_buf);
1653 if (callee1)
1655 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1656 memset call. */
1657 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1658 gimple_call_set_lhs (stmt1, NULL_TREE);
1659 gimple_call_set_arg (stmt1, 1, new_str_cst);
1660 gimple_call_set_arg (stmt1, 2,
1661 build_int_cst (TREE_TYPE (len1), src_len));
1662 update_stmt (stmt1);
1663 unlink_stmt_vdef (stmt2);
1664 gsi_remove (gsi_p, true);
1665 release_defs (stmt2);
1666 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1667 release_ssa_name (lhs1);
1668 return true;
1670 else
1672 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1673 assignment, remove STMT1 and change memset call into
1674 memcpy call. */
1675 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1677 if (!is_gimple_val (ptr1))
1678 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1679 true, GSI_SAME_STMT);
1680 gimple_call_set_fndecl (stmt2,
1681 builtin_decl_explicit (BUILT_IN_MEMCPY));
1682 gimple_call_set_arg (stmt2, 0, ptr1);
1683 gimple_call_set_arg (stmt2, 1, new_str_cst);
1684 gimple_call_set_arg (stmt2, 2,
1685 build_int_cst (TREE_TYPE (len2), src_len));
1686 unlink_stmt_vdef (stmt1);
1687 gsi_remove (&gsi, true);
1688 release_defs (stmt1);
1689 update_stmt (stmt2);
1690 return false;
1693 break;
1694 default:
1695 break;
1697 return false;
1700 /* Checks if expression has type of one-bit precision, or is a known
1701 truth-valued expression. */
1702 static bool
1703 truth_valued_ssa_name (tree name)
1705 gimple def;
1706 tree type = TREE_TYPE (name);
1708 if (!INTEGRAL_TYPE_P (type))
1709 return false;
1710 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1711 necessarily one and so ~X is not equal to !X. */
1712 if (TYPE_PRECISION (type) == 1)
1713 return true;
1714 def = SSA_NAME_DEF_STMT (name);
1715 if (is_gimple_assign (def))
1716 return truth_value_p (gimple_assign_rhs_code (def));
1717 return false;
1720 /* Helper routine for simplify_bitwise_binary_1 function.
1721 Return for the SSA name NAME the expression X if it mets condition
1722 NAME = !X. Otherwise return NULL_TREE.
1723 Detected patterns for NAME = !X are:
1724 !X and X == 0 for X with integral type.
1725 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1726 static tree
1727 lookup_logical_inverted_value (tree name)
1729 tree op1, op2;
1730 enum tree_code code;
1731 gimple def;
1733 /* If name has none-intergal type, or isn't a SSA_NAME, then
1734 return. */
1735 if (TREE_CODE (name) != SSA_NAME
1736 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1737 return NULL_TREE;
1738 def = SSA_NAME_DEF_STMT (name);
1739 if (!is_gimple_assign (def))
1740 return NULL_TREE;
1742 code = gimple_assign_rhs_code (def);
1743 op1 = gimple_assign_rhs1 (def);
1744 op2 = NULL_TREE;
1746 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1747 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1748 if (code == EQ_EXPR || code == NE_EXPR
1749 || code == BIT_XOR_EXPR)
1750 op2 = gimple_assign_rhs2 (def);
1752 switch (code)
1754 case BIT_NOT_EXPR:
1755 if (truth_valued_ssa_name (name))
1756 return op1;
1757 break;
1758 case EQ_EXPR:
1759 /* Check if we have X == 0 and X has an integral type. */
1760 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1761 break;
1762 if (integer_zerop (op2))
1763 return op1;
1764 break;
1765 case NE_EXPR:
1766 /* Check if we have X != 1 and X is a truth-valued. */
1767 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1768 break;
1769 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1770 return op1;
1771 break;
1772 case BIT_XOR_EXPR:
1773 /* Check if we have X ^ 1 and X is truth valued. */
1774 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1775 return op1;
1776 break;
1777 default:
1778 break;
1781 return NULL_TREE;
1784 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1785 operations CODE, if one operand has the logically inverted
1786 value of the other. */
1787 static tree
1788 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1789 tree arg1, tree arg2)
1791 tree anot;
1793 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1794 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1795 && code != BIT_XOR_EXPR)
1796 return NULL_TREE;
1798 /* First check if operands ARG1 and ARG2 are equal. If so
1799 return NULL_TREE as this optimization is handled fold_stmt. */
1800 if (arg1 == arg2)
1801 return NULL_TREE;
1802 /* See if we have in arguments logical-not patterns. */
1803 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1804 || anot != arg2)
1805 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1806 || anot != arg1))
1807 return NULL_TREE;
1809 /* X & !X -> 0. */
1810 if (code == BIT_AND_EXPR)
1811 return fold_convert (type, integer_zero_node);
1812 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1813 if (truth_valued_ssa_name (anot))
1814 return fold_convert (type, integer_one_node);
1816 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1817 return NULL_TREE;
1820 /* Given a ssa_name in NAME see if it was defined by an assignment and
1821 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1822 to the second operand on the rhs. */
1824 static inline void
1825 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1827 gimple def;
1828 enum tree_code code1;
1829 tree arg11;
1830 tree arg21;
1831 tree arg31;
1832 enum gimple_rhs_class grhs_class;
1834 code1 = TREE_CODE (name);
1835 arg11 = name;
1836 arg21 = NULL_TREE;
1837 grhs_class = get_gimple_rhs_class (code1);
1839 if (code1 == SSA_NAME)
1841 def = SSA_NAME_DEF_STMT (name);
1843 if (def && is_gimple_assign (def)
1844 && can_propagate_from (def))
1846 code1 = gimple_assign_rhs_code (def);
1847 arg11 = gimple_assign_rhs1 (def);
1848 arg21 = gimple_assign_rhs2 (def);
1849 arg31 = gimple_assign_rhs2 (def);
1852 else if (grhs_class == GIMPLE_TERNARY_RHS
1853 || GIMPLE_BINARY_RHS
1854 || GIMPLE_UNARY_RHS
1855 || GIMPLE_SINGLE_RHS)
1856 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1858 *code = code1;
1859 *arg1 = arg11;
1860 if (arg2)
1861 *arg2 = arg21;
1862 /* Ignore arg3 currently. */
1865 /* Return true if a conversion of an operand from type FROM to type TO
1866 should be applied after performing the operation instead. */
1868 static bool
1869 hoist_conversion_for_bitop_p (tree to, tree from)
1871 /* That's a good idea if the conversion widens the operand, thus
1872 after hoisting the conversion the operation will be narrower. */
1873 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1874 return true;
1876 /* It's also a good idea if the conversion is to a non-integer mode. */
1877 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1878 return true;
1880 /* Or if the precision of TO is not the same as the precision
1881 of its mode. */
1882 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1883 return true;
1885 return false;
1888 /* GSI points to a statement of the form
1890 result = OP0 CODE OP1
1892 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1893 BIT_AND_EXPR or BIT_IOR_EXPR.
1895 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1896 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1897 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1899 If a simplification is made, return TRUE, else return FALSE. */
1900 static bool
1901 simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1902 enum tree_code code,
1903 tree op0, tree op1)
1905 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1907 if (!is_gimple_assign (op0_def_stmt)
1908 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1909 return false;
1911 tree x = gimple_assign_rhs1 (op0_def_stmt);
1912 if (TREE_CODE (x) == SSA_NAME
1913 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1914 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1915 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1917 enum tree_code newcode;
1919 gimple stmt = gsi_stmt (*gsi);
1920 gimple_assign_set_rhs1 (stmt, x);
1921 gimple_assign_set_rhs2 (stmt, op1);
1922 if (code == BIT_AND_EXPR)
1923 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1924 else
1925 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1926 gimple_assign_set_rhs_code (stmt, newcode);
1927 update_stmt (stmt);
1928 return true;
1930 return false;
1934 /* Simplify bitwise binary operations.
1935 Return true if a transformation applied, otherwise return false. */
1937 static bool
1938 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1940 gimple stmt = gsi_stmt (*gsi);
1941 tree arg1 = gimple_assign_rhs1 (stmt);
1942 tree arg2 = gimple_assign_rhs2 (stmt);
1943 enum tree_code code = gimple_assign_rhs_code (stmt);
1944 tree res;
1945 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1946 enum tree_code def1_code, def2_code;
1948 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1949 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1951 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1952 when profitable. */
1953 if (TREE_CODE (arg2) == INTEGER_CST
1954 && CONVERT_EXPR_CODE_P (def1_code)
1955 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
1956 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1957 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1959 gimple newop;
1960 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1961 newop =
1962 gimple_build_assign_with_ops (code, tem, def1_arg1,
1963 fold_convert_loc (gimple_location (stmt),
1964 TREE_TYPE (def1_arg1),
1965 arg2));
1966 gimple_set_location (newop, gimple_location (stmt));
1967 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1968 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1969 tem, NULL_TREE, NULL_TREE);
1970 update_stmt (gsi_stmt (*gsi));
1971 return true;
1974 /* For bitwise binary operations apply operand conversions to the
1975 binary operation result instead of to the operands. This allows
1976 to combine successive conversions and bitwise binary operations. */
1977 if (CONVERT_EXPR_CODE_P (def1_code)
1978 && CONVERT_EXPR_CODE_P (def2_code)
1979 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1980 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1982 gimple newop;
1983 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1984 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1985 gimple_set_location (newop, gimple_location (stmt));
1986 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1987 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1988 tem, NULL_TREE, NULL_TREE);
1989 update_stmt (gsi_stmt (*gsi));
1990 return true;
1994 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1995 if (def1_code == def2_code
1996 && def1_code == BIT_AND_EXPR
1997 && operand_equal_for_phi_arg_p (def1_arg2,
1998 def2_arg2))
2000 tree b = def1_arg2;
2001 tree a = def1_arg1;
2002 tree c = def2_arg1;
2003 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2004 /* If A OP0 C (this usually means C is the same as A) is 0
2005 then fold it down correctly. */
2006 if (integer_zerop (inner))
2008 gimple_assign_set_rhs_from_tree (gsi, inner);
2009 update_stmt (stmt);
2010 return true;
2012 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2013 then fold it down correctly. */
2014 else if (TREE_CODE (inner) == SSA_NAME)
2016 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2017 inner, b);
2018 gimple_assign_set_rhs_from_tree (gsi, outer);
2019 update_stmt (stmt);
2020 return true;
2022 else
2024 gimple newop;
2025 tree tem;
2026 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2027 newop = gimple_build_assign_with_ops (code, tem, a, c);
2028 gimple_set_location (newop, gimple_location (stmt));
2029 /* Make sure to re-process the new stmt as it's walking upwards. */
2030 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2031 gimple_assign_set_rhs1 (stmt, tem);
2032 gimple_assign_set_rhs2 (stmt, b);
2033 gimple_assign_set_rhs_code (stmt, def1_code);
2034 update_stmt (stmt);
2035 return true;
2039 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2040 if (code == BIT_AND_EXPR
2041 && def1_code == BIT_IOR_EXPR
2042 && CONSTANT_CLASS_P (arg2)
2043 && CONSTANT_CLASS_P (def1_arg2))
2045 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2046 arg2, def1_arg2);
2047 tree tem;
2048 gimple newop;
2049 if (integer_zerop (cst))
2051 gimple_assign_set_rhs1 (stmt, def1_arg1);
2052 update_stmt (stmt);
2053 return true;
2055 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2056 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2057 tem, def1_arg1, arg2);
2058 gimple_set_location (newop, gimple_location (stmt));
2059 /* Make sure to re-process the new stmt as it's walking upwards. */
2060 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2061 gimple_assign_set_rhs1 (stmt, tem);
2062 gimple_assign_set_rhs2 (stmt, cst);
2063 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2064 update_stmt (stmt);
2065 return true;
2068 /* Combine successive equal operations with constants. */
2069 if ((code == BIT_AND_EXPR
2070 || code == BIT_IOR_EXPR
2071 || code == BIT_XOR_EXPR)
2072 && def1_code == code
2073 && CONSTANT_CLASS_P (arg2)
2074 && CONSTANT_CLASS_P (def1_arg2))
2076 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2077 arg2, def1_arg2);
2078 gimple_assign_set_rhs1 (stmt, def1_arg1);
2079 gimple_assign_set_rhs2 (stmt, cst);
2080 update_stmt (stmt);
2081 return true;
2084 /* Canonicalize X ^ ~0 to ~X. */
2085 if (code == BIT_XOR_EXPR
2086 && integer_all_onesp (arg2))
2088 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2089 gcc_assert (gsi_stmt (*gsi) == stmt);
2090 update_stmt (stmt);
2091 return true;
2094 /* Try simple folding for X op !X, and X op X. */
2095 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2096 if (res != NULL_TREE)
2098 gimple_assign_set_rhs_from_tree (gsi, res);
2099 update_stmt (gsi_stmt (*gsi));
2100 return true;
2103 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2105 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2106 if (def1_code == ocode)
2108 tree x = arg2;
2109 enum tree_code coden;
2110 tree a1, a2;
2111 /* ( X | Y) & X -> X */
2112 /* ( X & Y) | X -> X */
2113 if (x == def1_arg1
2114 || x == def1_arg2)
2116 gimple_assign_set_rhs_from_tree (gsi, x);
2117 update_stmt (gsi_stmt (*gsi));
2118 return true;
2121 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2122 /* (~X | Y) & X -> X & Y */
2123 /* (~X & Y) | X -> X | Y */
2124 if (coden == BIT_NOT_EXPR && a1 == x)
2126 gimple_assign_set_rhs_with_ops (gsi, code,
2127 x, def1_arg2);
2128 gcc_assert (gsi_stmt (*gsi) == stmt);
2129 update_stmt (stmt);
2130 return true;
2132 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2133 /* (Y | ~X) & X -> X & Y */
2134 /* (Y & ~X) | X -> X | Y */
2135 if (coden == BIT_NOT_EXPR && a1 == x)
2137 gimple_assign_set_rhs_with_ops (gsi, code,
2138 x, def1_arg1);
2139 gcc_assert (gsi_stmt (*gsi) == stmt);
2140 update_stmt (stmt);
2141 return true;
2144 if (def2_code == ocode)
2146 enum tree_code coden;
2147 tree a1;
2148 tree x = arg1;
2149 /* X & ( X | Y) -> X */
2150 /* X | ( X & Y) -> X */
2151 if (x == def2_arg1
2152 || x == def2_arg2)
2154 gimple_assign_set_rhs_from_tree (gsi, x);
2155 update_stmt (gsi_stmt (*gsi));
2156 return true;
2158 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2159 /* (~X | Y) & X -> X & Y */
2160 /* (~X & Y) | X -> X | Y */
2161 if (coden == BIT_NOT_EXPR && a1 == x)
2163 gimple_assign_set_rhs_with_ops (gsi, code,
2164 x, def2_arg2);
2165 gcc_assert (gsi_stmt (*gsi) == stmt);
2166 update_stmt (stmt);
2167 return true;
2169 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2170 /* (Y | ~X) & X -> X & Y */
2171 /* (Y & ~X) | X -> X | Y */
2172 if (coden == BIT_NOT_EXPR && a1 == x)
2174 gimple_assign_set_rhs_with_ops (gsi, code,
2175 x, def2_arg1);
2176 gcc_assert (gsi_stmt (*gsi) == stmt);
2177 update_stmt (stmt);
2178 return true;
2182 /* If arg1 and arg2 are booleans (or any single bit type)
2183 then try to simplify:
2185 (~X & Y) -> X < Y
2186 (X & ~Y) -> Y < X
2187 (~X | Y) -> X <= Y
2188 (X | ~Y) -> Y <= X
2190 But only do this if our result feeds into a comparison as
2191 this transformation is not always a win, particularly on
2192 targets with and-not instructions. */
2193 if (TREE_CODE (arg1) == SSA_NAME
2194 && TREE_CODE (arg2) == SSA_NAME
2195 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2196 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2197 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2198 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2199 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2201 use_operand_p use_p;
2202 gimple use_stmt;
2204 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2206 if (gimple_code (use_stmt) == GIMPLE_COND
2207 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2208 && integer_zerop (gimple_cond_rhs (use_stmt))
2209 && gimple_cond_code (use_stmt) == NE_EXPR)
2211 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2212 return true;
2213 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2214 return true;
2219 return false;
2223 /* Recognize rotation patterns. Return true if a transformation
2224 applied, otherwise return false.
2226 We are looking for X with unsigned type T with bitsize B, OP being
2227 +, | or ^, some type T2 wider than T and
2228 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2229 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2230 (X << Y) OP (X >> (B - Y))
2231 (X << (int) Y) OP (X >> (int) (B - Y))
2232 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2233 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2234 (X << Y) | (X >> ((-Y) & (B - 1)))
2235 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2236 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2237 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2239 and transform these into:
2240 X r<< CNT1
2241 X r<< Y
2243 Note, in the patterns with T2 type, the type of OP operands
2244 might be even a signed type, but should have precision B. */
2246 static bool
2247 simplify_rotate (gimple_stmt_iterator *gsi)
2249 gimple stmt = gsi_stmt (*gsi);
2250 tree arg[2], rtype, rotcnt = NULL_TREE;
2251 tree def_arg1[2], def_arg2[2];
2252 enum tree_code def_code[2];
2253 tree lhs;
2254 int i;
2255 bool swapped_p = false;
2256 gimple g;
2258 arg[0] = gimple_assign_rhs1 (stmt);
2259 arg[1] = gimple_assign_rhs2 (stmt);
2260 rtype = TREE_TYPE (arg[0]);
2262 /* Only create rotates in complete modes. Other cases are not
2263 expanded properly. */
2264 if (!INTEGRAL_TYPE_P (rtype)
2265 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2266 return false;
2268 for (i = 0; i < 2; i++)
2269 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2271 /* Look through narrowing conversions. */
2272 if (CONVERT_EXPR_CODE_P (def_code[0])
2273 && CONVERT_EXPR_CODE_P (def_code[1])
2274 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2275 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2276 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2277 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2278 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2279 && has_single_use (arg[0])
2280 && has_single_use (arg[1]))
2282 for (i = 0; i < 2; i++)
2284 arg[i] = def_arg1[i];
2285 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2289 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2290 for (i = 0; i < 2; i++)
2291 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2292 return false;
2293 else if (!has_single_use (arg[i]))
2294 return false;
2295 if (def_code[0] == def_code[1])
2296 return false;
2298 /* If we've looked through narrowing conversions before, look through
2299 widening conversions from unsigned type with the same precision
2300 as rtype here. */
2301 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2302 for (i = 0; i < 2; i++)
2304 tree tem;
2305 enum tree_code code;
2306 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2307 if (!CONVERT_EXPR_CODE_P (code)
2308 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2309 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2310 return false;
2311 def_arg1[i] = tem;
2313 /* Both shifts have to use the same first operand. */
2314 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2315 return false;
2316 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2317 return false;
2319 /* CNT1 + CNT2 == B case above. */
2320 if (tree_fits_uhwi_p (def_arg2[0])
2321 && tree_fits_uhwi_p (def_arg2[1])
2322 && tree_to_uhwi (def_arg2[0])
2323 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
2324 rotcnt = def_arg2[0];
2325 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2326 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2327 return false;
2328 else
2330 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2331 enum tree_code cdef_code[2];
2332 /* Look through conversion of the shift count argument.
2333 The C/C++ FE cast any shift count argument to integer_type_node.
2334 The only problem might be if the shift count type maximum value
2335 is equal or smaller than number of bits in rtype. */
2336 for (i = 0; i < 2; i++)
2338 def_arg2_alt[i] = def_arg2[i];
2339 defcodefor_name (def_arg2[i], &cdef_code[i],
2340 &cdef_arg1[i], &cdef_arg2[i]);
2341 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2342 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2343 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2344 > floor_log2 (TYPE_PRECISION (rtype))
2345 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2346 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2348 def_arg2_alt[i] = cdef_arg1[i];
2349 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2350 &cdef_arg1[i], &cdef_arg2[i]);
2353 for (i = 0; i < 2; i++)
2354 /* Check for one shift count being Y and the other B - Y,
2355 with optional casts. */
2356 if (cdef_code[i] == MINUS_EXPR
2357 && tree_fits_shwi_p (cdef_arg1[i])
2358 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
2359 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2361 tree tem;
2362 enum tree_code code;
2364 if (cdef_arg2[i] == def_arg2[1 - i]
2365 || cdef_arg2[i] == def_arg2_alt[1 - i])
2367 rotcnt = cdef_arg2[i];
2368 break;
2370 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2371 if (CONVERT_EXPR_CODE_P (code)
2372 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2373 && TYPE_PRECISION (TREE_TYPE (tem))
2374 > floor_log2 (TYPE_PRECISION (rtype))
2375 && TYPE_PRECISION (TREE_TYPE (tem))
2376 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2377 && (tem == def_arg2[1 - i]
2378 || tem == def_arg2_alt[1 - i]))
2380 rotcnt = tem;
2381 break;
2384 /* The above sequence isn't safe for Y being 0,
2385 because then one of the shifts triggers undefined behavior.
2386 This alternative is safe even for rotation count of 0.
2387 One shift count is Y and the other (-Y) & (B - 1). */
2388 else if (cdef_code[i] == BIT_AND_EXPR
2389 && tree_fits_shwi_p (cdef_arg2[i])
2390 && tree_to_shwi (cdef_arg2[i])
2391 == TYPE_PRECISION (rtype) - 1
2392 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2393 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2395 tree tem;
2396 enum tree_code code;
2398 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2399 if (CONVERT_EXPR_CODE_P (code)
2400 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2401 && TYPE_PRECISION (TREE_TYPE (tem))
2402 > floor_log2 (TYPE_PRECISION (rtype))
2403 && TYPE_PRECISION (TREE_TYPE (tem))
2404 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2405 defcodefor_name (tem, &code, &tem, NULL);
2407 if (code == NEGATE_EXPR)
2409 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2411 rotcnt = tem;
2412 break;
2414 defcodefor_name (tem, &code, &tem, NULL);
2415 if (CONVERT_EXPR_CODE_P (code)
2416 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2417 && TYPE_PRECISION (TREE_TYPE (tem))
2418 > floor_log2 (TYPE_PRECISION (rtype))
2419 && TYPE_PRECISION (TREE_TYPE (tem))
2420 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2421 && (tem == def_arg2[1 - i]
2422 || tem == def_arg2_alt[1 - i]))
2424 rotcnt = tem;
2425 break;
2429 if (rotcnt == NULL_TREE)
2430 return false;
2431 swapped_p = i != 1;
2434 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2435 TREE_TYPE (rotcnt)))
2437 g = gimple_build_assign_with_ops (NOP_EXPR,
2438 make_ssa_name (TREE_TYPE (def_arg2[0]),
2439 NULL),
2440 rotcnt, NULL_TREE);
2441 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2442 rotcnt = gimple_assign_lhs (g);
2444 lhs = gimple_assign_lhs (stmt);
2445 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2446 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2447 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2448 ? LROTATE_EXPR : RROTATE_EXPR,
2449 lhs, def_arg1[0], rotcnt);
2450 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2452 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2453 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2454 lhs, NULL_TREE);
2456 gsi_replace (gsi, g, false);
2457 return true;
2460 /* Perform re-associations of the plus or minus statement STMT that are
2461 always permitted. Returns true if the CFG was changed. */
2463 static bool
2464 associate_plusminus (gimple_stmt_iterator *gsi)
2466 gimple stmt = gsi_stmt (*gsi);
2467 tree rhs1 = gimple_assign_rhs1 (stmt);
2468 tree rhs2 = gimple_assign_rhs2 (stmt);
2469 enum tree_code code = gimple_assign_rhs_code (stmt);
2470 bool changed;
2472 /* We can't reassociate at all for saturating types. */
2473 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2474 return false;
2476 /* First contract negates. */
2479 changed = false;
2481 /* A +- (-B) -> A -+ B. */
2482 if (TREE_CODE (rhs2) == SSA_NAME)
2484 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2485 if (is_gimple_assign (def_stmt)
2486 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2487 && can_propagate_from (def_stmt))
2489 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2490 gimple_assign_set_rhs_code (stmt, code);
2491 rhs2 = gimple_assign_rhs1 (def_stmt);
2492 gimple_assign_set_rhs2 (stmt, rhs2);
2493 gimple_set_modified (stmt, true);
2494 changed = true;
2498 /* (-A) + B -> B - A. */
2499 if (TREE_CODE (rhs1) == SSA_NAME
2500 && code == PLUS_EXPR)
2502 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2503 if (is_gimple_assign (def_stmt)
2504 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2505 && can_propagate_from (def_stmt))
2507 code = MINUS_EXPR;
2508 gimple_assign_set_rhs_code (stmt, code);
2509 rhs1 = rhs2;
2510 gimple_assign_set_rhs1 (stmt, rhs1);
2511 rhs2 = gimple_assign_rhs1 (def_stmt);
2512 gimple_assign_set_rhs2 (stmt, rhs2);
2513 gimple_set_modified (stmt, true);
2514 changed = true;
2518 while (changed);
2520 /* We can't reassociate floating-point or fixed-point plus or minus
2521 because of saturation to +-Inf. */
2522 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2523 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2524 goto out;
2526 /* Second match patterns that allow contracting a plus-minus pair
2527 irrespective of overflow issues.
2529 (A +- B) - A -> +- B
2530 (A +- B) -+ B -> A
2531 (CST +- A) +- CST -> CST +- A
2532 (A +- CST) +- CST -> A +- CST
2533 ~A + A -> -1
2534 ~A + 1 -> -A
2535 A - (A +- B) -> -+ B
2536 A +- (B +- A) -> +- B
2537 CST +- (CST +- A) -> CST +- A
2538 CST +- (A +- CST) -> CST +- A
2539 A + ~A -> -1
2541 via commutating the addition and contracting operations to zero
2542 by reassociation. */
2544 if (TREE_CODE (rhs1) == SSA_NAME)
2546 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2547 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2549 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2550 if (def_code == PLUS_EXPR
2551 || def_code == MINUS_EXPR)
2553 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2554 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2555 if (operand_equal_p (def_rhs1, rhs2, 0)
2556 && code == MINUS_EXPR)
2558 /* (A +- B) - A -> +- B. */
2559 code = ((def_code == PLUS_EXPR)
2560 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2561 rhs1 = def_rhs2;
2562 rhs2 = NULL_TREE;
2563 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2564 gcc_assert (gsi_stmt (*gsi) == stmt);
2565 gimple_set_modified (stmt, true);
2567 else if (operand_equal_p (def_rhs2, rhs2, 0)
2568 && code != def_code)
2570 /* (A +- B) -+ B -> A. */
2571 code = TREE_CODE (def_rhs1);
2572 rhs1 = def_rhs1;
2573 rhs2 = NULL_TREE;
2574 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2575 gcc_assert (gsi_stmt (*gsi) == stmt);
2576 gimple_set_modified (stmt, true);
2578 else if (CONSTANT_CLASS_P (rhs2)
2579 && CONSTANT_CLASS_P (def_rhs1))
2581 /* (CST +- A) +- CST -> CST +- A. */
2582 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2583 def_rhs1, rhs2);
2584 if (cst && !TREE_OVERFLOW (cst))
2586 code = def_code;
2587 gimple_assign_set_rhs_code (stmt, code);
2588 rhs1 = cst;
2589 gimple_assign_set_rhs1 (stmt, rhs1);
2590 rhs2 = def_rhs2;
2591 gimple_assign_set_rhs2 (stmt, rhs2);
2592 gimple_set_modified (stmt, true);
2595 else if (CONSTANT_CLASS_P (rhs2)
2596 && CONSTANT_CLASS_P (def_rhs2))
2598 /* (A +- CST) +- CST -> A +- CST. */
2599 enum tree_code mix = (code == def_code)
2600 ? PLUS_EXPR : MINUS_EXPR;
2601 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2602 def_rhs2, rhs2);
2603 if (cst && !TREE_OVERFLOW (cst))
2605 code = def_code;
2606 gimple_assign_set_rhs_code (stmt, code);
2607 rhs1 = def_rhs1;
2608 gimple_assign_set_rhs1 (stmt, rhs1);
2609 rhs2 = cst;
2610 gimple_assign_set_rhs2 (stmt, rhs2);
2611 gimple_set_modified (stmt, true);
2615 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2617 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2618 if (operand_equal_p (def_rhs1, rhs2, 0))
2620 /* ~A + A -> -1. */
2621 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2622 rhs2 = NULL_TREE;
2623 code = TREE_CODE (rhs1);
2624 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2625 gcc_assert (gsi_stmt (*gsi) == stmt);
2626 gimple_set_modified (stmt, true);
2628 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2629 && integer_onep (rhs2))
2630 || (TREE_CODE (rhs2) == COMPLEX_CST
2631 && integer_onep (TREE_REALPART (rhs2))
2632 && integer_onep (TREE_IMAGPART (rhs2))))
2634 /* ~A + 1 -> -A. */
2635 code = NEGATE_EXPR;
2636 rhs1 = def_rhs1;
2637 rhs2 = NULL_TREE;
2638 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2639 gcc_assert (gsi_stmt (*gsi) == stmt);
2640 gimple_set_modified (stmt, true);
2646 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2648 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2649 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2651 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2652 if (def_code == PLUS_EXPR
2653 || def_code == MINUS_EXPR)
2655 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2656 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2657 if (operand_equal_p (def_rhs1, rhs1, 0)
2658 && code == MINUS_EXPR)
2660 /* A - (A +- B) -> -+ B. */
2661 code = ((def_code == PLUS_EXPR)
2662 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2663 rhs1 = def_rhs2;
2664 rhs2 = NULL_TREE;
2665 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2666 gcc_assert (gsi_stmt (*gsi) == stmt);
2667 gimple_set_modified (stmt, true);
2669 else if (operand_equal_p (def_rhs2, rhs1, 0)
2670 && code != def_code)
2672 /* A +- (B +- A) -> +- B. */
2673 code = ((code == PLUS_EXPR)
2674 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2675 rhs1 = def_rhs1;
2676 rhs2 = NULL_TREE;
2677 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2678 gcc_assert (gsi_stmt (*gsi) == stmt);
2679 gimple_set_modified (stmt, true);
2681 else if (CONSTANT_CLASS_P (rhs1)
2682 && CONSTANT_CLASS_P (def_rhs1))
2684 /* CST +- (CST +- A) -> CST +- A. */
2685 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2686 rhs1, def_rhs1);
2687 if (cst && !TREE_OVERFLOW (cst))
2689 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2690 gimple_assign_set_rhs_code (stmt, code);
2691 rhs1 = cst;
2692 gimple_assign_set_rhs1 (stmt, rhs1);
2693 rhs2 = def_rhs2;
2694 gimple_assign_set_rhs2 (stmt, rhs2);
2695 gimple_set_modified (stmt, true);
2698 else if (CONSTANT_CLASS_P (rhs1)
2699 && CONSTANT_CLASS_P (def_rhs2))
2701 /* CST +- (A +- CST) -> CST +- A. */
2702 tree cst = fold_binary (def_code == code
2703 ? PLUS_EXPR : MINUS_EXPR,
2704 TREE_TYPE (rhs2),
2705 rhs1, def_rhs2);
2706 if (cst && !TREE_OVERFLOW (cst))
2708 rhs1 = cst;
2709 gimple_assign_set_rhs1 (stmt, rhs1);
2710 rhs2 = def_rhs1;
2711 gimple_assign_set_rhs2 (stmt, rhs2);
2712 gimple_set_modified (stmt, true);
2716 else if (def_code == BIT_NOT_EXPR)
2718 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2719 if (code == PLUS_EXPR
2720 && operand_equal_p (def_rhs1, rhs1, 0))
2722 /* A + ~A -> -1. */
2723 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2724 rhs2 = NULL_TREE;
2725 code = TREE_CODE (rhs1);
2726 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2727 gcc_assert (gsi_stmt (*gsi) == stmt);
2728 gimple_set_modified (stmt, true);
2734 out:
2735 if (gimple_modified_p (stmt))
2737 fold_stmt_inplace (gsi);
2738 update_stmt (stmt);
2739 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2740 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2741 return true;
2744 return false;
2747 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2748 true if anything changed, false otherwise. */
2750 static bool
2751 associate_pointerplus (gimple_stmt_iterator *gsi)
2753 gimple stmt = gsi_stmt (*gsi);
2754 gimple def_stmt;
2755 tree ptr, rhs, algn;
2757 /* Pattern match
2758 tem = (sizetype) ptr;
2759 tem = tem & algn;
2760 tem = -tem;
2761 ... = ptr p+ tem;
2762 and produce the simpler and easier to analyze with respect to alignment
2763 ... = ptr & ~algn; */
2764 ptr = gimple_assign_rhs1 (stmt);
2765 rhs = gimple_assign_rhs2 (stmt);
2766 if (TREE_CODE (rhs) != SSA_NAME)
2767 return false;
2768 def_stmt = SSA_NAME_DEF_STMT (rhs);
2769 if (!is_gimple_assign (def_stmt)
2770 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2771 return false;
2772 rhs = gimple_assign_rhs1 (def_stmt);
2773 if (TREE_CODE (rhs) != SSA_NAME)
2774 return false;
2775 def_stmt = SSA_NAME_DEF_STMT (rhs);
2776 if (!is_gimple_assign (def_stmt)
2777 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2778 return false;
2779 rhs = gimple_assign_rhs1 (def_stmt);
2780 algn = gimple_assign_rhs2 (def_stmt);
2781 if (TREE_CODE (rhs) != SSA_NAME
2782 || TREE_CODE (algn) != INTEGER_CST)
2783 return false;
2784 def_stmt = SSA_NAME_DEF_STMT (rhs);
2785 if (!is_gimple_assign (def_stmt)
2786 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2787 return false;
2788 if (gimple_assign_rhs1 (def_stmt) != ptr)
2789 return false;
2791 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
2792 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2793 fold_stmt_inplace (gsi);
2794 update_stmt (stmt);
2796 return true;
2799 /* Combine two conversions in a row for the second conversion at *GSI.
2800 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2801 run. Else it returns 0. */
2803 static int
2804 combine_conversions (gimple_stmt_iterator *gsi)
2806 gimple stmt = gsi_stmt (*gsi);
2807 gimple def_stmt;
2808 tree op0, lhs;
2809 enum tree_code code = gimple_assign_rhs_code (stmt);
2810 enum tree_code code2;
2812 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2813 || code == FLOAT_EXPR
2814 || code == FIX_TRUNC_EXPR);
2816 lhs = gimple_assign_lhs (stmt);
2817 op0 = gimple_assign_rhs1 (stmt);
2818 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2820 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2821 return 1;
2824 if (TREE_CODE (op0) != SSA_NAME)
2825 return 0;
2827 def_stmt = SSA_NAME_DEF_STMT (op0);
2828 if (!is_gimple_assign (def_stmt))
2829 return 0;
2831 code2 = gimple_assign_rhs_code (def_stmt);
2833 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2835 tree defop0 = gimple_assign_rhs1 (def_stmt);
2836 tree type = TREE_TYPE (lhs);
2837 tree inside_type = TREE_TYPE (defop0);
2838 tree inter_type = TREE_TYPE (op0);
2839 int inside_int = INTEGRAL_TYPE_P (inside_type);
2840 int inside_ptr = POINTER_TYPE_P (inside_type);
2841 int inside_float = FLOAT_TYPE_P (inside_type);
2842 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2843 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2844 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2845 int inter_int = INTEGRAL_TYPE_P (inter_type);
2846 int inter_ptr = POINTER_TYPE_P (inter_type);
2847 int inter_float = FLOAT_TYPE_P (inter_type);
2848 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2849 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2850 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2851 int final_int = INTEGRAL_TYPE_P (type);
2852 int final_ptr = POINTER_TYPE_P (type);
2853 int final_float = FLOAT_TYPE_P (type);
2854 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2855 unsigned int final_prec = TYPE_PRECISION (type);
2856 int final_unsignedp = TYPE_UNSIGNED (type);
2858 /* Don't propagate ssa names that occur in abnormal phis. */
2859 if (TREE_CODE (defop0) == SSA_NAME
2860 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2861 return 0;
2863 /* In addition to the cases of two conversions in a row
2864 handled below, if we are converting something to its own
2865 type via an object of identical or wider precision, neither
2866 conversion is needed. */
2867 if (useless_type_conversion_p (type, inside_type)
2868 && (((inter_int || inter_ptr) && final_int)
2869 || (inter_float && final_float))
2870 && inter_prec >= final_prec)
2872 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2873 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2874 update_stmt (stmt);
2875 return remove_prop_source_from_use (op0) ? 2 : 1;
2878 /* Likewise, if the intermediate and initial types are either both
2879 float or both integer, we don't need the middle conversion if the
2880 former is wider than the latter and doesn't change the signedness
2881 (for integers). Avoid this if the final type is a pointer since
2882 then we sometimes need the middle conversion. Likewise if the
2883 final type has a precision not equal to the size of its mode. */
2884 if (((inter_int && inside_int)
2885 || (inter_float && inside_float)
2886 || (inter_vec && inside_vec))
2887 && inter_prec >= inside_prec
2888 && (inter_float || inter_vec
2889 || inter_unsignedp == inside_unsignedp)
2890 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2891 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2892 && ! final_ptr
2893 && (! final_vec || inter_prec == inside_prec))
2895 gimple_assign_set_rhs1 (stmt, defop0);
2896 update_stmt (stmt);
2897 return remove_prop_source_from_use (op0) ? 2 : 1;
2900 /* If we have a sign-extension of a zero-extended value, we can
2901 replace that by a single zero-extension. Likewise if the
2902 final conversion does not change precision we can drop the
2903 intermediate conversion. */
2904 if (inside_int && inter_int && final_int
2905 && ((inside_prec < inter_prec && inter_prec < final_prec
2906 && inside_unsignedp && !inter_unsignedp)
2907 || final_prec == inter_prec))
2909 gimple_assign_set_rhs1 (stmt, defop0);
2910 update_stmt (stmt);
2911 return remove_prop_source_from_use (op0) ? 2 : 1;
2914 /* Two conversions in a row are not needed unless:
2915 - some conversion is floating-point (overstrict for now), or
2916 - some conversion is a vector (overstrict for now), or
2917 - the intermediate type is narrower than both initial and
2918 final, or
2919 - the intermediate type and innermost type differ in signedness,
2920 and the outermost type is wider than the intermediate, or
2921 - the initial type is a pointer type and the precisions of the
2922 intermediate and final types differ, or
2923 - the final type is a pointer type and the precisions of the
2924 initial and intermediate types differ. */
2925 if (! inside_float && ! inter_float && ! final_float
2926 && ! inside_vec && ! inter_vec && ! final_vec
2927 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2928 && ! (inside_int && inter_int
2929 && inter_unsignedp != inside_unsignedp
2930 && inter_prec < final_prec)
2931 && ((inter_unsignedp && inter_prec > inside_prec)
2932 == (final_unsignedp && final_prec > inter_prec))
2933 && ! (inside_ptr && inter_prec != final_prec)
2934 && ! (final_ptr && inside_prec != inter_prec)
2935 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2936 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2938 gimple_assign_set_rhs1 (stmt, defop0);
2939 update_stmt (stmt);
2940 return remove_prop_source_from_use (op0) ? 2 : 1;
2943 /* A truncation to an unsigned type should be canonicalized as
2944 bitwise and of a mask. */
2945 if (final_int && inter_int && inside_int
2946 && final_prec == inside_prec
2947 && final_prec > inter_prec
2948 && inter_unsignedp)
2950 tree tem;
2951 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2952 defop0,
2953 double_int_to_tree
2954 (inside_type, double_int::mask (inter_prec)));
2955 if (!useless_type_conversion_p (type, inside_type))
2957 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2958 GSI_SAME_STMT);
2959 gimple_assign_set_rhs1 (stmt, tem);
2961 else
2962 gimple_assign_set_rhs_from_tree (gsi, tem);
2963 update_stmt (gsi_stmt (*gsi));
2964 return 1;
2967 /* If we are converting an integer to a floating-point that can
2968 represent it exactly and back to an integer, we can skip the
2969 floating-point conversion. */
2970 if (inside_int && inter_float && final_int &&
2971 (unsigned) significand_size (TYPE_MODE (inter_type))
2972 >= inside_prec - !inside_unsignedp)
2974 if (useless_type_conversion_p (type, inside_type))
2976 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2977 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2978 update_stmt (stmt);
2979 return remove_prop_source_from_use (op0) ? 2 : 1;
2981 else
2983 gimple_assign_set_rhs1 (stmt, defop0);
2984 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2985 update_stmt (stmt);
2986 return remove_prop_source_from_use (op0) ? 2 : 1;
2991 return 0;
2994 /* Combine an element access with a shuffle. Returns true if there were
2995 any changes made, else it returns false. */
2997 static bool
2998 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
3000 gimple stmt = gsi_stmt (*gsi);
3001 gimple def_stmt;
3002 tree op, op0, op1, op2;
3003 tree elem_type;
3004 unsigned idx, n, size;
3005 enum tree_code code;
3007 op = gimple_assign_rhs1 (stmt);
3008 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3010 op0 = TREE_OPERAND (op, 0);
3011 if (TREE_CODE (op0) != SSA_NAME
3012 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3013 return false;
3015 def_stmt = get_prop_source_stmt (op0, false, NULL);
3016 if (!def_stmt || !can_propagate_from (def_stmt))
3017 return false;
3019 op1 = TREE_OPERAND (op, 1);
3020 op2 = TREE_OPERAND (op, 2);
3021 code = gimple_assign_rhs_code (def_stmt);
3023 if (code == CONSTRUCTOR)
3025 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3026 gimple_assign_rhs1 (def_stmt), op1, op2);
3027 if (!tem || !valid_gimple_rhs_p (tem))
3028 return false;
3029 gimple_assign_set_rhs_from_tree (gsi, tem);
3030 update_stmt (gsi_stmt (*gsi));
3031 return true;
3034 elem_type = TREE_TYPE (TREE_TYPE (op0));
3035 if (TREE_TYPE (op) != elem_type)
3036 return false;
3038 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3039 n = TREE_INT_CST_LOW (op1) / size;
3040 if (n != 1)
3041 return false;
3042 idx = TREE_INT_CST_LOW (op2) / size;
3044 if (code == VEC_PERM_EXPR)
3046 tree p, m, index, tem;
3047 unsigned nelts;
3048 m = gimple_assign_rhs3 (def_stmt);
3049 if (TREE_CODE (m) != VECTOR_CST)
3050 return false;
3051 nelts = VECTOR_CST_NELTS (m);
3052 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3053 idx %= 2 * nelts;
3054 if (idx < nelts)
3056 p = gimple_assign_rhs1 (def_stmt);
3058 else
3060 p = gimple_assign_rhs2 (def_stmt);
3061 idx -= nelts;
3063 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3064 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3065 unshare_expr (p), op1, index);
3066 gimple_assign_set_rhs1 (stmt, tem);
3067 fold_stmt (gsi);
3068 update_stmt (gsi_stmt (*gsi));
3069 return true;
3072 return false;
3075 /* Determine whether applying the 2 permutations (mask1 then mask2)
3076 gives back one of the input. */
3078 static int
3079 is_combined_permutation_identity (tree mask1, tree mask2)
3081 tree mask;
3082 unsigned int nelts, i, j;
3083 bool maybe_identity1 = true;
3084 bool maybe_identity2 = true;
3086 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3087 && TREE_CODE (mask2) == VECTOR_CST);
3088 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3089 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3091 nelts = VECTOR_CST_NELTS (mask);
3092 for (i = 0; i < nelts; i++)
3094 tree val = VECTOR_CST_ELT (mask, i);
3095 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3096 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3097 if (j == i)
3098 maybe_identity2 = false;
3099 else if (j == i + nelts)
3100 maybe_identity1 = false;
3101 else
3102 return 0;
3104 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3107 /* Combine a shuffle with its arguments. Returns 1 if there were any
3108 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3110 static int
3111 simplify_permutation (gimple_stmt_iterator *gsi)
3113 gimple stmt = gsi_stmt (*gsi);
3114 gimple def_stmt;
3115 tree op0, op1, op2, op3, arg0, arg1;
3116 enum tree_code code;
3117 bool single_use_op0 = false;
3119 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
3121 op0 = gimple_assign_rhs1 (stmt);
3122 op1 = gimple_assign_rhs2 (stmt);
3123 op2 = gimple_assign_rhs3 (stmt);
3125 if (TREE_CODE (op2) != VECTOR_CST)
3126 return 0;
3128 if (TREE_CODE (op0) == VECTOR_CST)
3130 code = VECTOR_CST;
3131 arg0 = op0;
3133 else if (TREE_CODE (op0) == SSA_NAME)
3135 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3136 if (!def_stmt || !can_propagate_from (def_stmt))
3137 return 0;
3139 code = gimple_assign_rhs_code (def_stmt);
3140 arg0 = gimple_assign_rhs1 (def_stmt);
3142 else
3143 return 0;
3145 /* Two consecutive shuffles. */
3146 if (code == VEC_PERM_EXPR)
3148 tree orig;
3149 int ident;
3151 if (op0 != op1)
3152 return 0;
3153 op3 = gimple_assign_rhs3 (def_stmt);
3154 if (TREE_CODE (op3) != VECTOR_CST)
3155 return 0;
3156 ident = is_combined_permutation_identity (op3, op2);
3157 if (!ident)
3158 return 0;
3159 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3160 : gimple_assign_rhs2 (def_stmt);
3161 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3162 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3163 gimple_set_num_ops (stmt, 2);
3164 update_stmt (stmt);
3165 return remove_prop_source_from_use (op0) ? 2 : 1;
3168 /* Shuffle of a constructor. */
3169 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3171 tree opt;
3172 bool ret = false;
3173 if (op0 != op1)
3175 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
3176 return 0;
3178 if (TREE_CODE (op1) == VECTOR_CST)
3179 arg1 = op1;
3180 else if (TREE_CODE (op1) == SSA_NAME)
3182 enum tree_code code2;
3184 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3185 if (!def_stmt2 || !can_propagate_from (def_stmt2))
3186 return 0;
3188 code2 = gimple_assign_rhs_code (def_stmt2);
3189 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3190 return 0;
3191 arg1 = gimple_assign_rhs1 (def_stmt2);
3193 else
3194 return 0;
3196 else
3198 /* Already used twice in this statement. */
3199 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3200 return 0;
3201 arg1 = arg0;
3203 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
3204 if (!opt
3205 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
3206 return 0;
3207 gimple_assign_set_rhs_from_tree (gsi, opt);
3208 update_stmt (gsi_stmt (*gsi));
3209 if (TREE_CODE (op0) == SSA_NAME)
3210 ret = remove_prop_source_from_use (op0);
3211 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3212 ret |= remove_prop_source_from_use (op1);
3213 return ret ? 2 : 1;
3216 return 0;
3219 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3221 static bool
3222 simplify_vector_constructor (gimple_stmt_iterator *gsi)
3224 gimple stmt = gsi_stmt (*gsi);
3225 gimple def_stmt;
3226 tree op, op2, orig, type, elem_type;
3227 unsigned elem_size, nelts, i;
3228 enum tree_code code;
3229 constructor_elt *elt;
3230 unsigned char *sel;
3231 bool maybe_ident;
3233 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3235 op = gimple_assign_rhs1 (stmt);
3236 type = TREE_TYPE (op);
3237 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3239 nelts = TYPE_VECTOR_SUBPARTS (type);
3240 elem_type = TREE_TYPE (type);
3241 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3243 sel = XALLOCAVEC (unsigned char, nelts);
3244 orig = NULL;
3245 maybe_ident = true;
3246 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
3248 tree ref, op1;
3250 if (i >= nelts)
3251 return false;
3253 if (TREE_CODE (elt->value) != SSA_NAME)
3254 return false;
3255 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3256 if (!def_stmt)
3257 return false;
3258 code = gimple_assign_rhs_code (def_stmt);
3259 if (code != BIT_FIELD_REF)
3260 return false;
3261 op1 = gimple_assign_rhs1 (def_stmt);
3262 ref = TREE_OPERAND (op1, 0);
3263 if (orig)
3265 if (ref != orig)
3266 return false;
3268 else
3270 if (TREE_CODE (ref) != SSA_NAME)
3271 return false;
3272 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3273 return false;
3274 orig = ref;
3276 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3277 return false;
3278 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3279 if (sel[i] != i) maybe_ident = false;
3281 if (i < nelts)
3282 return false;
3284 if (maybe_ident)
3285 gimple_assign_set_rhs_from_tree (gsi, orig);
3286 else
3288 tree mask_type, *mask_elts;
3290 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3291 return false;
3292 mask_type
3293 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3294 nelts);
3295 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3296 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3297 != GET_MODE_SIZE (TYPE_MODE (type)))
3298 return false;
3299 mask_elts = XALLOCAVEC (tree, nelts);
3300 for (i = 0; i < nelts; i++)
3301 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3302 op2 = build_vector (mask_type, mask_elts);
3303 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3305 update_stmt (gsi_stmt (*gsi));
3306 return true;
3309 /* Main entry point for the forward propagation and statement combine
3310 optimizer. */
3312 static unsigned int
3313 ssa_forward_propagate_and_combine (void)
3315 basic_block bb;
3316 unsigned int todoflags = 0;
3318 cfg_changed = false;
3320 FOR_EACH_BB (bb)
3322 gimple_stmt_iterator gsi;
3324 /* Apply forward propagation to all stmts in the basic-block.
3325 Note we update GSI within the loop as necessary. */
3326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3328 gimple stmt = gsi_stmt (gsi);
3329 tree lhs, rhs;
3330 enum tree_code code;
3332 if (!is_gimple_assign (stmt))
3334 gsi_next (&gsi);
3335 continue;
3338 lhs = gimple_assign_lhs (stmt);
3339 rhs = gimple_assign_rhs1 (stmt);
3340 code = gimple_assign_rhs_code (stmt);
3341 if (TREE_CODE (lhs) != SSA_NAME
3342 || has_zero_uses (lhs))
3344 gsi_next (&gsi);
3345 continue;
3348 /* If this statement sets an SSA_NAME to an address,
3349 try to propagate the address into the uses of the SSA_NAME. */
3350 if (code == ADDR_EXPR
3351 /* Handle pointer conversions on invariant addresses
3352 as well, as this is valid gimple. */
3353 || (CONVERT_EXPR_CODE_P (code)
3354 && TREE_CODE (rhs) == ADDR_EXPR
3355 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3357 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3358 if ((!base
3359 || !DECL_P (base)
3360 || decl_address_invariant_p (base))
3361 && !stmt_references_abnormal_ssa_name (stmt)
3362 && forward_propagate_addr_expr (lhs, rhs, true))
3364 release_defs (stmt);
3365 gsi_remove (&gsi, true);
3367 else
3368 gsi_next (&gsi);
3370 else if (code == POINTER_PLUS_EXPR)
3372 tree off = gimple_assign_rhs2 (stmt);
3373 if (TREE_CODE (off) == INTEGER_CST
3374 && can_propagate_from (stmt)
3375 && !simple_iv_increment_p (stmt)
3376 /* ??? Better adjust the interface to that function
3377 instead of building new trees here. */
3378 && forward_propagate_addr_expr
3379 (lhs,
3380 build1_loc (gimple_location (stmt),
3381 ADDR_EXPR, TREE_TYPE (rhs),
3382 fold_build2 (MEM_REF,
3383 TREE_TYPE (TREE_TYPE (rhs)),
3384 rhs,
3385 fold_convert (ptr_type_node,
3386 off))), true))
3388 release_defs (stmt);
3389 gsi_remove (&gsi, true);
3391 else if (is_gimple_min_invariant (rhs))
3393 /* Make sure to fold &a[0] + off_1 here. */
3394 fold_stmt_inplace (&gsi);
3395 update_stmt (stmt);
3396 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3397 gsi_next (&gsi);
3399 else
3400 gsi_next (&gsi);
3402 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3404 if (forward_propagate_comparison (&gsi))
3405 cfg_changed = true;
3407 else
3408 gsi_next (&gsi);
3411 /* Combine stmts with the stmts defining their operands.
3412 Note we update GSI within the loop as necessary. */
3413 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3415 gimple stmt = gsi_stmt (gsi);
3416 bool changed = false;
3418 /* Mark stmt as potentially needing revisiting. */
3419 gimple_set_plf (stmt, GF_PLF_1, false);
3421 switch (gimple_code (stmt))
3423 case GIMPLE_ASSIGN:
3425 tree rhs1 = gimple_assign_rhs1 (stmt);
3426 enum tree_code code = gimple_assign_rhs_code (stmt);
3428 if ((code == BIT_NOT_EXPR
3429 || code == NEGATE_EXPR)
3430 && TREE_CODE (rhs1) == SSA_NAME)
3431 changed = simplify_not_neg_expr (&gsi);
3432 else if (code == COND_EXPR
3433 || code == VEC_COND_EXPR)
3435 /* In this case the entire COND_EXPR is in rhs1. */
3436 if (forward_propagate_into_cond (&gsi)
3437 || combine_cond_exprs (&gsi))
3439 changed = true;
3440 stmt = gsi_stmt (gsi);
3443 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3445 int did_something;
3446 did_something = forward_propagate_into_comparison (&gsi);
3447 if (did_something == 2)
3448 cfg_changed = true;
3449 changed = did_something != 0;
3451 else if ((code == PLUS_EXPR
3452 || code == BIT_IOR_EXPR
3453 || code == BIT_XOR_EXPR)
3454 && simplify_rotate (&gsi))
3455 changed = true;
3456 else if (code == BIT_AND_EXPR
3457 || code == BIT_IOR_EXPR
3458 || code == BIT_XOR_EXPR)
3459 changed = simplify_bitwise_binary (&gsi);
3460 else if (code == PLUS_EXPR
3461 || code == MINUS_EXPR)
3462 changed = associate_plusminus (&gsi);
3463 else if (code == POINTER_PLUS_EXPR)
3464 changed = associate_pointerplus (&gsi);
3465 else if (CONVERT_EXPR_CODE_P (code)
3466 || code == FLOAT_EXPR
3467 || code == FIX_TRUNC_EXPR)
3469 int did_something = combine_conversions (&gsi);
3470 if (did_something == 2)
3471 cfg_changed = true;
3473 /* If we have a narrowing conversion to an integral
3474 type that is fed by a BIT_AND_EXPR, we might be
3475 able to remove the BIT_AND_EXPR if it merely
3476 masks off bits outside the final type (and nothing
3477 else. */
3478 if (! did_something)
3480 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3481 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3482 if (INTEGRAL_TYPE_P (outer_type)
3483 && INTEGRAL_TYPE_P (inner_type)
3484 && (TYPE_PRECISION (outer_type)
3485 <= TYPE_PRECISION (inner_type)))
3486 did_something = simplify_conversion_from_bitmask (&gsi);
3489 changed = did_something != 0;
3491 else if (code == VEC_PERM_EXPR)
3493 int did_something = simplify_permutation (&gsi);
3494 if (did_something == 2)
3495 cfg_changed = true;
3496 changed = did_something != 0;
3498 else if (code == BIT_FIELD_REF)
3499 changed = simplify_bitfield_ref (&gsi);
3500 else if (code == CONSTRUCTOR
3501 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3502 changed = simplify_vector_constructor (&gsi);
3503 break;
3506 case GIMPLE_SWITCH:
3507 changed = simplify_gimple_switch (stmt);
3508 break;
3510 case GIMPLE_COND:
3512 int did_something;
3513 did_something = forward_propagate_into_gimple_cond (stmt);
3514 if (did_something == 2)
3515 cfg_changed = true;
3516 changed = did_something != 0;
3517 break;
3520 case GIMPLE_CALL:
3522 tree callee = gimple_call_fndecl (stmt);
3523 if (callee != NULL_TREE
3524 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3525 changed = simplify_builtin_call (&gsi, callee);
3526 break;
3529 default:;
3532 if (changed)
3534 /* If the stmt changed then re-visit it and the statements
3535 inserted before it. */
3536 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3537 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3538 break;
3539 if (gsi_end_p (gsi))
3540 gsi = gsi_start_bb (bb);
3541 else
3542 gsi_next (&gsi);
3544 else
3546 /* Stmt no longer needs to be revisited. */
3547 gimple_set_plf (stmt, GF_PLF_1, true);
3548 gsi_next (&gsi);
3553 if (cfg_changed)
3554 todoflags |= TODO_cleanup_cfg;
3556 return todoflags;
3560 static bool
3561 gate_forwprop (void)
3563 return flag_tree_forwprop;
3566 namespace {
3568 const pass_data pass_data_forwprop =
3570 GIMPLE_PASS, /* type */
3571 "forwprop", /* name */
3572 OPTGROUP_NONE, /* optinfo_flags */
3573 true, /* has_gate */
3574 true, /* has_execute */
3575 TV_TREE_FORWPROP, /* tv_id */
3576 ( PROP_cfg | PROP_ssa ), /* properties_required */
3577 0, /* properties_provided */
3578 0, /* properties_destroyed */
3579 0, /* todo_flags_start */
3580 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3583 class pass_forwprop : public gimple_opt_pass
3585 public:
3586 pass_forwprop (gcc::context *ctxt)
3587 : gimple_opt_pass (pass_data_forwprop, ctxt)
3590 /* opt_pass methods: */
3591 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3592 bool gate () { return gate_forwprop (); }
3593 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3595 }; // class pass_forwprop
3597 } // anon namespace
3599 gimple_opt_pass *
3600 make_pass_forwprop (gcc::context *ctxt)
3602 return new pass_forwprop (ctxt);