2013-11-21 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-forwprop.c
blob0c9a79f6092fcb5d2c2cf7a28c9651d63dd5daa2
1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "gimple-iterator.h"
32 #include "gimplify-me.h"
33 #include "gimple-ssa.h"
34 #include "tree-cfg.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "expr.h"
40 #include "tree-dfa.h"
41 #include "tree-pass.h"
42 #include "langhooks.h"
43 #include "flags.h"
44 #include "expr.h"
45 #include "cfgloop.h"
46 #include "optabs.h"
47 #include "tree-ssa-propagate.h"
48 #include "tree-ssa-dom.h"
50 /* This pass propagates the RHS of assignment statements into use
51 sites of the LHS of the assignment. It's basically a specialized
52 form of tree combination. It is hoped all of this can disappear
53 when we have a generalized tree combiner.
55 One class of common cases we handle is forward propagating a single use
56 variable into a COND_EXPR.
58 bb0:
59 x = a COND b;
60 if (x) goto ... else goto ...
62 Will be transformed into:
64 bb0:
65 if (a COND b) goto ... else goto ...
67 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
69 Or (assuming c1 and c2 are constants):
71 bb0:
72 x = a + c1;
73 if (x EQ/NEQ c2) goto ... else goto ...
75 Will be transformed into:
77 bb0:
78 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
80 Similarly for x = a - c1.
84 bb0:
85 x = !a
86 if (x) goto ... else goto ...
88 Will be transformed into:
90 bb0:
91 if (a == 0) goto ... else goto ...
93 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
94 For these cases, we propagate A into all, possibly more than one,
95 COND_EXPRs that use X.
99 bb0:
100 x = (typecast) a
101 if (x) goto ... else goto ...
103 Will be transformed into:
105 bb0:
106 if (a != 0) goto ... else goto ...
108 (Assuming a is an integral type and x is a boolean or x is an
109 integral and a is a boolean.)
111 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
112 For these cases, we propagate A into all, possibly more than one,
113 COND_EXPRs that use X.
115 In addition to eliminating the variable and the statement which assigns
116 a value to the variable, we may be able to later thread the jump without
117 adding insane complexity in the dominator optimizer.
119 Also note these transformations can cascade. We handle this by having
120 a worklist of COND_EXPR statements to examine. As we make a change to
121 a statement, we put it back on the worklist to examine on the next
122 iteration of the main loop.
124 A second class of propagation opportunities arises for ADDR_EXPR
125 nodes.
127 ptr = &x->y->z;
128 res = *ptr;
130 Will get turned into
132 res = x->y->z;
135 ptr = (type1*)&type2var;
136 res = *ptr
138 Will get turned into (if type1 and type2 are the same size
139 and neither have volatile on them):
140 res = VIEW_CONVERT_EXPR<type1>(type2var)
144 ptr = &x[0];
145 ptr2 = ptr + <constant>;
147 Will get turned into
149 ptr2 = &x[constant/elementsize];
153 ptr = &x[0];
154 offset = index * element_size;
155 offset_p = (pointer) offset;
156 ptr2 = ptr + offset_p
158 Will get turned into:
160 ptr2 = &x[index];
163 ssa = (int) decl
164 res = ssa & 1
166 Provided that decl has known alignment >= 2, will get turned into
168 res = 0
170 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
171 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
172 {NOT_EXPR,NEG_EXPR}.
174 This will (of course) be extended as other needs arise. */
176 static bool forward_propagate_addr_expr (tree, tree, bool);
178 /* Set to true if we delete dead edges during the optimization. */
179 static bool cfg_changed;
181 static tree rhs_to_tree (tree type, gimple stmt);
183 /* Get the next statement we can propagate NAME's value into skipping
184 trivial copies. Returns the statement that is suitable as a
185 propagation destination or NULL_TREE if there is no such one.
186 This only returns destinations in a single-use chain. FINAL_NAME_P
187 if non-NULL is written to the ssa name that represents the use. */
189 static gimple
190 get_prop_dest_stmt (tree name, tree *final_name_p)
192 use_operand_p use;
193 gimple use_stmt;
195 do {
196 /* If name has multiple uses, bail out. */
197 if (!single_imm_use (name, &use, &use_stmt))
198 return NULL;
200 /* If this is not a trivial copy, we found it. */
201 if (!gimple_assign_ssa_name_copy_p (use_stmt)
202 || gimple_assign_rhs1 (use_stmt) != name)
203 break;
205 /* Continue searching uses of the copy destination. */
206 name = gimple_assign_lhs (use_stmt);
207 } while (1);
209 if (final_name_p)
210 *final_name_p = name;
212 return use_stmt;
215 /* Get the statement we can propagate from into NAME skipping
216 trivial copies. Returns the statement which defines the
217 propagation source or NULL_TREE if there is no such one.
218 If SINGLE_USE_ONLY is set considers only sources which have
219 a single use chain up to NAME. If SINGLE_USE_P is non-null,
220 it is set to whether the chain to NAME is a single use chain
221 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
223 static gimple
224 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
226 bool single_use = true;
228 do {
229 gimple def_stmt = SSA_NAME_DEF_STMT (name);
231 if (!has_single_use (name))
233 single_use = false;
234 if (single_use_only)
235 return NULL;
238 /* If name is defined by a PHI node or is the default def, bail out. */
239 if (!is_gimple_assign (def_stmt))
240 return NULL;
242 /* If def_stmt is a simple copy, continue looking. */
243 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
244 name = gimple_assign_rhs1 (def_stmt);
245 else
247 if (!single_use_only && single_use_p)
248 *single_use_p = single_use;
250 return def_stmt;
252 } while (1);
255 /* Checks if the destination ssa name in DEF_STMT can be used as
256 propagation source. Returns true if so, otherwise false. */
258 static bool
259 can_propagate_from (gimple def_stmt)
261 gcc_assert (is_gimple_assign (def_stmt));
263 /* If the rhs has side-effects we cannot propagate from it. */
264 if (gimple_has_volatile_ops (def_stmt))
265 return false;
267 /* If the rhs is a load we cannot propagate from it. */
268 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
269 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
270 return false;
272 /* Constants can be always propagated. */
273 if (gimple_assign_single_p (def_stmt)
274 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
275 return true;
277 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
278 if (stmt_references_abnormal_ssa_name (def_stmt))
279 return false;
281 /* If the definition is a conversion of a pointer to a function type,
282 then we can not apply optimizations as some targets require
283 function pointers to be canonicalized and in this case this
284 optimization could eliminate a necessary canonicalization. */
285 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
287 tree rhs = gimple_assign_rhs1 (def_stmt);
288 if (POINTER_TYPE_P (TREE_TYPE (rhs))
289 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
290 return false;
293 return true;
296 /* Remove a chain of dead statements starting at the definition of
297 NAME. The chain is linked via the first operand of the defining statements.
298 If NAME was replaced in its only use then this function can be used
299 to clean up dead stmts. The function handles already released SSA
300 names gracefully.
301 Returns true if cleanup-cfg has to run. */
303 static bool
304 remove_prop_source_from_use (tree name)
306 gimple_stmt_iterator gsi;
307 gimple stmt;
308 bool cfg_changed = false;
310 do {
311 basic_block bb;
313 if (SSA_NAME_IN_FREE_LIST (name)
314 || SSA_NAME_IS_DEFAULT_DEF (name)
315 || !has_zero_uses (name))
316 return cfg_changed;
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (gimple_code (stmt) == GIMPLE_PHI
320 || gimple_has_side_effects (stmt))
321 return cfg_changed;
323 bb = gimple_bb (stmt);
324 gsi = gsi_for_stmt (stmt);
325 unlink_stmt_vdef (stmt);
326 if (gsi_remove (&gsi, true))
327 cfg_changed |= gimple_purge_dead_eh_edges (bb);
328 release_defs (stmt);
330 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
331 } while (name && TREE_CODE (name) == SSA_NAME);
333 return cfg_changed;
336 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
337 converted to type TYPE.
339 This should disappear, but is needed so we can combine expressions and use
340 the fold() interfaces. Long term, we need to develop folding and combine
341 routines that deal with gimple exclusively . */
343 static tree
344 rhs_to_tree (tree type, gimple stmt)
346 location_t loc = gimple_location (stmt);
347 enum tree_code code = gimple_assign_rhs_code (stmt);
348 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
349 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt),
351 gimple_assign_rhs3 (stmt));
352 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
353 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 gimple_assign_rhs2 (stmt));
355 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
356 return build1 (code, type, gimple_assign_rhs1 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
358 return gimple_assign_rhs1 (stmt);
359 else
360 gcc_unreachable ();
363 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
364 the folded result in a form suitable for COND_EXPR_COND or
365 NULL_TREE, if there is no suitable simplified form. If
366 INVARIANT_ONLY is true only gimple_min_invariant results are
367 considered simplified. */
369 static tree
370 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
371 tree op0, tree op1, bool invariant_only)
373 tree t;
375 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
377 fold_defer_overflow_warnings ();
378 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
379 if (!t)
381 fold_undefer_overflow_warnings (false, NULL, 0);
382 return NULL_TREE;
385 /* Require that we got a boolean type out if we put one in. */
386 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
388 /* Canonicalize the combined condition for use in a COND_EXPR. */
389 t = canonicalize_cond_expr_cond (t);
391 /* Bail out if we required an invariant but didn't get one. */
392 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
394 fold_undefer_overflow_warnings (false, NULL, 0);
395 return NULL_TREE;
398 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
400 return t;
403 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
404 of its operand. Return a new comparison tree or NULL_TREE if there
405 were no simplifying combines. */
407 static tree
408 forward_propagate_into_comparison_1 (gimple stmt,
409 enum tree_code code, tree type,
410 tree op0, tree op1)
412 tree tmp = NULL_TREE;
413 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
414 bool single_use0_p = false, single_use1_p = false;
416 /* For comparisons use the first operand, that is likely to
417 simplify comparisons against constants. */
418 if (TREE_CODE (op0) == SSA_NAME)
420 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
421 if (def_stmt && can_propagate_from (def_stmt))
423 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
424 tmp = combine_cond_expr_cond (stmt, code, type,
425 rhs0, op1, !single_use0_p);
426 if (tmp)
427 return tmp;
431 /* If that wasn't successful, try the second operand. */
432 if (TREE_CODE (op1) == SSA_NAME)
434 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
435 if (def_stmt && can_propagate_from (def_stmt))
437 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
438 tmp = combine_cond_expr_cond (stmt, code, type,
439 op0, rhs1, !single_use1_p);
440 if (tmp)
441 return tmp;
445 /* If that wasn't successful either, try both operands. */
446 if (rhs0 != NULL_TREE
447 && rhs1 != NULL_TREE)
448 tmp = combine_cond_expr_cond (stmt, code, type,
449 rhs0, rhs1,
450 !(single_use0_p && single_use1_p));
452 return tmp;
455 /* Propagate from the ssa name definition statements of the assignment
456 from a comparison at *GSI into the conditional if that simplifies it.
457 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
458 otherwise returns 0. */
460 static int
461 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
463 gimple stmt = gsi_stmt (*gsi);
464 tree tmp;
465 bool cfg_changed = false;
466 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
467 tree rhs1 = gimple_assign_rhs1 (stmt);
468 tree rhs2 = gimple_assign_rhs2 (stmt);
470 /* Combine the comparison with defining statements. */
471 tmp = forward_propagate_into_comparison_1 (stmt,
472 gimple_assign_rhs_code (stmt),
473 type, rhs1, rhs2);
474 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
476 gimple_assign_set_rhs_from_tree (gsi, tmp);
477 fold_stmt (gsi);
478 update_stmt (gsi_stmt (*gsi));
480 if (TREE_CODE (rhs1) == SSA_NAME)
481 cfg_changed |= remove_prop_source_from_use (rhs1);
482 if (TREE_CODE (rhs2) == SSA_NAME)
483 cfg_changed |= remove_prop_source_from_use (rhs2);
484 return cfg_changed ? 2 : 1;
487 return 0;
490 /* Propagate from the ssa name definition statements of COND_EXPR
491 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
492 Returns zero if no statement was changed, one if there were
493 changes and two if cfg_cleanup needs to run.
495 This must be kept in sync with forward_propagate_into_cond. */
497 static int
498 forward_propagate_into_gimple_cond (gimple stmt)
500 tree tmp;
501 enum tree_code code = gimple_cond_code (stmt);
502 bool cfg_changed = false;
503 tree rhs1 = gimple_cond_lhs (stmt);
504 tree rhs2 = gimple_cond_rhs (stmt);
506 /* We can do tree combining on SSA_NAME and comparison expressions. */
507 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
508 return 0;
510 tmp = forward_propagate_into_comparison_1 (stmt, code,
511 boolean_type_node,
512 rhs1, rhs2);
513 if (tmp)
515 if (dump_file && tmp)
517 fprintf (dump_file, " Replaced '");
518 print_gimple_expr (dump_file, stmt, 0, 0);
519 fprintf (dump_file, "' with '");
520 print_generic_expr (dump_file, tmp, 0);
521 fprintf (dump_file, "'\n");
524 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
525 update_stmt (stmt);
527 if (TREE_CODE (rhs1) == SSA_NAME)
528 cfg_changed |= remove_prop_source_from_use (rhs1);
529 if (TREE_CODE (rhs2) == SSA_NAME)
530 cfg_changed |= remove_prop_source_from_use (rhs2);
531 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
534 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
535 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
536 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
537 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
538 && ((code == EQ_EXPR
539 && integer_zerop (rhs2))
540 || (code == NE_EXPR
541 && integer_onep (rhs2))))
543 basic_block bb = gimple_bb (stmt);
544 gimple_cond_set_code (stmt, NE_EXPR);
545 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
546 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
547 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
548 return 1;
551 return 0;
555 /* Propagate from the ssa name definition statements of COND_EXPR
556 in the rhs of statement STMT into the conditional if that simplifies it.
557 Returns true zero if the stmt was changed. */
559 static bool
560 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
562 gimple stmt = gsi_stmt (*gsi_p);
563 tree tmp = NULL_TREE;
564 tree cond = gimple_assign_rhs1 (stmt);
565 enum tree_code code = gimple_assign_rhs_code (stmt);
566 bool swap = false;
568 /* We can do tree combining on SSA_NAME and comparison expressions. */
569 if (COMPARISON_CLASS_P (cond))
570 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
571 TREE_TYPE (cond),
572 TREE_OPERAND (cond, 0),
573 TREE_OPERAND (cond, 1));
574 else if (TREE_CODE (cond) == SSA_NAME)
576 enum tree_code def_code;
577 tree name = cond;
578 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
579 if (!def_stmt || !can_propagate_from (def_stmt))
580 return 0;
582 def_code = gimple_assign_rhs_code (def_stmt);
583 if (TREE_CODE_CLASS (def_code) == tcc_comparison)
584 tmp = fold_build2_loc (gimple_location (def_stmt),
585 def_code,
586 TREE_TYPE (cond),
587 gimple_assign_rhs1 (def_stmt),
588 gimple_assign_rhs2 (def_stmt));
589 else if (code == COND_EXPR
590 && ((def_code == BIT_NOT_EXPR
591 && TYPE_PRECISION (TREE_TYPE (cond)) == 1)
592 || (def_code == BIT_XOR_EXPR
593 && integer_onep (gimple_assign_rhs2 (def_stmt)))))
595 tmp = gimple_assign_rhs1 (def_stmt);
596 swap = true;
600 if (tmp
601 && is_gimple_condexpr (tmp))
603 if (dump_file && tmp)
605 fprintf (dump_file, " Replaced '");
606 print_generic_expr (dump_file, cond, 0);
607 fprintf (dump_file, "' with '");
608 print_generic_expr (dump_file, tmp, 0);
609 fprintf (dump_file, "'\n");
612 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
613 : integer_onep (tmp))
614 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
615 else if (integer_zerop (tmp))
616 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
617 else
619 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
620 if (swap)
622 tree t = gimple_assign_rhs2 (stmt);
623 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt));
624 gimple_assign_set_rhs3 (stmt, t);
627 stmt = gsi_stmt (*gsi_p);
628 update_stmt (stmt);
630 return true;
633 return 0;
636 /* Propagate from the ssa name definition statements of COND_EXPR
637 values in the rhs of statement STMT into the conditional arms
638 if that simplifies it.
639 Returns true if the stmt was changed. */
641 static bool
642 combine_cond_exprs (gimple_stmt_iterator *gsi_p)
644 gimple stmt = gsi_stmt (*gsi_p);
645 tree cond, val1, val2;
646 bool changed = false;
648 cond = gimple_assign_rhs1 (stmt);
649 val1 = gimple_assign_rhs2 (stmt);
650 if (TREE_CODE (val1) == SSA_NAME)
652 gimple def_stmt = SSA_NAME_DEF_STMT (val1);
653 if (is_gimple_assign (def_stmt)
654 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
655 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
657 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt));
658 gimple_assign_set_rhs2 (stmt, val1);
659 changed = true;
662 val2 = gimple_assign_rhs3 (stmt);
663 if (TREE_CODE (val2) == SSA_NAME)
665 gimple def_stmt = SSA_NAME_DEF_STMT (val2);
666 if (is_gimple_assign (def_stmt)
667 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt)
668 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0))
670 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt));
671 gimple_assign_set_rhs3 (stmt, val2);
672 changed = true;
675 if (operand_equal_p (val1, val2, 0))
677 gimple_assign_set_rhs_from_tree (gsi_p, val1);
678 stmt = gsi_stmt (*gsi_p);
679 changed = true;
682 if (changed)
683 update_stmt (stmt);
685 return changed;
688 /* We've just substituted an ADDR_EXPR into stmt. Update all the
689 relevant data structures to match. */
691 static void
692 tidy_after_forward_propagate_addr (gimple stmt)
694 /* We may have turned a trapping insn into a non-trapping insn. */
695 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
696 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
697 cfg_changed = true;
699 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
700 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
703 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
704 ADDR_EXPR <whatever>.
706 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
707 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
708 node or for recovery of array indexing from pointer arithmetic.
710 Return true if the propagation was successful (the propagation can
711 be not totally successful, yet things may have been changed). */
713 static bool
714 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
715 gimple_stmt_iterator *use_stmt_gsi,
716 bool single_use_p)
718 tree lhs, rhs, rhs2, array_ref;
719 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
720 enum tree_code rhs_code;
721 bool res = true;
723 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
725 lhs = gimple_assign_lhs (use_stmt);
726 rhs_code = gimple_assign_rhs_code (use_stmt);
727 rhs = gimple_assign_rhs1 (use_stmt);
729 /* Do not perform copy-propagation but recurse through copy chains. */
730 if (TREE_CODE (lhs) == SSA_NAME
731 && rhs_code == SSA_NAME)
732 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
734 /* The use statement could be a conversion. Recurse to the uses of the
735 lhs as copyprop does not copy through pointer to integer to pointer
736 conversions and FRE does not catch all cases either.
737 Treat the case of a single-use name and
738 a conversion to def_rhs type separate, though. */
739 if (TREE_CODE (lhs) == SSA_NAME
740 && CONVERT_EXPR_CODE_P (rhs_code))
742 /* If there is a point in a conversion chain where the types match
743 so we can remove a conversion re-materialize the address here
744 and stop. */
745 if (single_use_p
746 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
748 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
749 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
750 return true;
753 /* Else recurse if the conversion preserves the address value. */
754 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
755 || POINTER_TYPE_P (TREE_TYPE (lhs)))
756 && (TYPE_PRECISION (TREE_TYPE (lhs))
757 >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
758 return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
760 return false;
763 /* If this isn't a conversion chain from this on we only can propagate
764 into compatible pointer contexts. */
765 if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
766 return false;
768 /* Propagate through constant pointer adjustments. */
769 if (TREE_CODE (lhs) == SSA_NAME
770 && rhs_code == POINTER_PLUS_EXPR
771 && rhs == name
772 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
774 tree new_def_rhs;
775 /* As we come here with non-invariant addresses in def_rhs we need
776 to make sure we can build a valid constant offsetted address
777 for further propagation. Simply rely on fold building that
778 and check after the fact. */
779 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
780 def_rhs,
781 fold_convert (ptr_type_node,
782 gimple_assign_rhs2 (use_stmt)));
783 if (TREE_CODE (new_def_rhs) == MEM_REF
784 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
785 return false;
786 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
787 TREE_TYPE (rhs));
789 /* Recurse. If we could propagate into all uses of lhs do not
790 bother to replace into the current use but just pretend we did. */
791 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
792 && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
793 return true;
795 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
796 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
797 new_def_rhs, NULL_TREE);
798 else if (is_gimple_min_invariant (new_def_rhs))
799 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
800 new_def_rhs, NULL_TREE);
801 else
802 return false;
803 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
804 update_stmt (use_stmt);
805 return true;
808 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
809 ADDR_EXPR will not appear on the LHS. */
810 tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
811 while (handled_component_p (*lhsp))
812 lhsp = &TREE_OPERAND (*lhsp, 0);
813 lhs = *lhsp;
815 /* Now see if the LHS node is a MEM_REF using NAME. If so,
816 propagate the ADDR_EXPR into the use of NAME and fold the result. */
817 if (TREE_CODE (lhs) == MEM_REF
818 && TREE_OPERAND (lhs, 0) == name)
820 tree def_rhs_base;
821 HOST_WIDE_INT def_rhs_offset;
822 /* If the address is invariant we can always fold it. */
823 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
824 &def_rhs_offset)))
826 double_int off = mem_ref_offset (lhs);
827 tree new_ptr;
828 off += double_int::from_shwi (def_rhs_offset);
829 if (TREE_CODE (def_rhs_base) == MEM_REF)
831 off += mem_ref_offset (def_rhs_base);
832 new_ptr = TREE_OPERAND (def_rhs_base, 0);
834 else
835 new_ptr = build_fold_addr_expr (def_rhs_base);
836 TREE_OPERAND (lhs, 0) = new_ptr;
837 TREE_OPERAND (lhs, 1)
838 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
839 tidy_after_forward_propagate_addr (use_stmt);
840 /* Continue propagating into the RHS if this was not the only use. */
841 if (single_use_p)
842 return true;
844 /* If the LHS is a plain dereference and the value type is the same as
845 that of the pointed-to type of the address we can put the
846 dereferenced address on the LHS preserving the original alias-type. */
847 else if (integer_zerop (TREE_OPERAND (lhs, 1))
848 && ((gimple_assign_lhs (use_stmt) == lhs
849 && useless_type_conversion_p
850 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
851 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
852 || types_compatible_p (TREE_TYPE (lhs),
853 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
854 /* Don't forward anything into clobber stmts if it would result
855 in the lhs no longer being a MEM_REF. */
856 && (!gimple_clobber_p (use_stmt)
857 || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
859 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
860 tree new_offset, new_base, saved, new_lhs;
861 while (handled_component_p (*def_rhs_basep))
862 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
863 saved = *def_rhs_basep;
864 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
866 new_base = TREE_OPERAND (*def_rhs_basep, 0);
867 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
868 TREE_OPERAND (*def_rhs_basep, 1));
870 else
872 new_base = build_fold_addr_expr (*def_rhs_basep);
873 new_offset = TREE_OPERAND (lhs, 1);
875 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
876 new_base, new_offset);
877 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
878 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
879 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
880 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
881 *lhsp = new_lhs;
882 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
883 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
884 *def_rhs_basep = saved;
885 tidy_after_forward_propagate_addr (use_stmt);
886 /* Continue propagating into the RHS if this was not the
887 only use. */
888 if (single_use_p)
889 return true;
891 else
892 /* We can have a struct assignment dereferencing our name twice.
893 Note that we didn't propagate into the lhs to not falsely
894 claim we did when propagating into the rhs. */
895 res = false;
898 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
899 nodes from the RHS. */
900 tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
901 if (TREE_CODE (*rhsp) == ADDR_EXPR)
902 rhsp = &TREE_OPERAND (*rhsp, 0);
903 while (handled_component_p (*rhsp))
904 rhsp = &TREE_OPERAND (*rhsp, 0);
905 rhs = *rhsp;
907 /* Now see if the RHS node is a MEM_REF using NAME. If so,
908 propagate the ADDR_EXPR into the use of NAME and fold the result. */
909 if (TREE_CODE (rhs) == MEM_REF
910 && TREE_OPERAND (rhs, 0) == name)
912 tree def_rhs_base;
913 HOST_WIDE_INT def_rhs_offset;
914 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
915 &def_rhs_offset)))
917 double_int off = mem_ref_offset (rhs);
918 tree new_ptr;
919 off += double_int::from_shwi (def_rhs_offset);
920 if (TREE_CODE (def_rhs_base) == MEM_REF)
922 off += mem_ref_offset (def_rhs_base);
923 new_ptr = TREE_OPERAND (def_rhs_base, 0);
925 else
926 new_ptr = build_fold_addr_expr (def_rhs_base);
927 TREE_OPERAND (rhs, 0) = new_ptr;
928 TREE_OPERAND (rhs, 1)
929 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
930 fold_stmt_inplace (use_stmt_gsi);
931 tidy_after_forward_propagate_addr (use_stmt);
932 return res;
934 /* If the RHS is a plain dereference and the value type is the same as
935 that of the pointed-to type of the address we can put the
936 dereferenced address on the RHS preserving the original alias-type. */
937 else if (integer_zerop (TREE_OPERAND (rhs, 1))
938 && ((gimple_assign_rhs1 (use_stmt) == rhs
939 && useless_type_conversion_p
940 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
941 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
942 || types_compatible_p (TREE_TYPE (rhs),
943 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
945 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
946 tree new_offset, new_base, saved, new_rhs;
947 while (handled_component_p (*def_rhs_basep))
948 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
949 saved = *def_rhs_basep;
950 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
952 new_base = TREE_OPERAND (*def_rhs_basep, 0);
953 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
954 TREE_OPERAND (*def_rhs_basep, 1));
956 else
958 new_base = build_fold_addr_expr (*def_rhs_basep);
959 new_offset = TREE_OPERAND (rhs, 1);
961 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
962 new_base, new_offset);
963 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
964 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
965 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
966 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
967 *rhsp = new_rhs;
968 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
969 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
970 *def_rhs_basep = saved;
971 fold_stmt_inplace (use_stmt_gsi);
972 tidy_after_forward_propagate_addr (use_stmt);
973 return res;
977 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
978 is nothing to do. */
979 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
980 || gimple_assign_rhs1 (use_stmt) != name)
981 return false;
983 /* The remaining cases are all for turning pointer arithmetic into
984 array indexing. They only apply when we have the address of
985 element zero in an array. If that is not the case then there
986 is nothing to do. */
987 array_ref = TREE_OPERAND (def_rhs, 0);
988 if ((TREE_CODE (array_ref) != ARRAY_REF
989 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
990 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
991 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
992 return false;
994 rhs2 = gimple_assign_rhs2 (use_stmt);
995 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
996 if (TREE_CODE (rhs2) == INTEGER_CST)
998 tree new_rhs = build1_loc (gimple_location (use_stmt),
999 ADDR_EXPR, TREE_TYPE (def_rhs),
1000 fold_build2 (MEM_REF,
1001 TREE_TYPE (TREE_TYPE (def_rhs)),
1002 unshare_expr (def_rhs),
1003 fold_convert (ptr_type_node,
1004 rhs2)));
1005 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1006 use_stmt = gsi_stmt (*use_stmt_gsi);
1007 update_stmt (use_stmt);
1008 tidy_after_forward_propagate_addr (use_stmt);
1009 return true;
1012 return false;
1015 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1017 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1018 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1019 node or for recovery of array indexing from pointer arithmetic.
1021 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1022 the single use in the previous invocation. Pass true when calling
1023 this as toplevel.
1025 Returns true, if all uses have been propagated into. */
1027 static bool
1028 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1030 imm_use_iterator iter;
1031 gimple use_stmt;
1032 bool all = true;
1033 bool single_use_p = parent_single_use_p && has_single_use (name);
1035 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1037 bool result;
1038 tree use_rhs;
1040 /* If the use is not in a simple assignment statement, then
1041 there is nothing we can do. */
1042 if (!is_gimple_assign (use_stmt))
1044 if (!is_gimple_debug (use_stmt))
1045 all = false;
1046 continue;
1049 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1050 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1051 single_use_p);
1052 /* If the use has moved to a different statement adjust
1053 the update machinery for the old statement too. */
1054 if (use_stmt != gsi_stmt (gsi))
1056 update_stmt (use_stmt);
1057 use_stmt = gsi_stmt (gsi);
1059 update_stmt (use_stmt);
1060 all &= result;
1062 /* Remove intermediate now unused copy and conversion chains. */
1063 use_rhs = gimple_assign_rhs1 (use_stmt);
1064 if (result
1065 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1066 && TREE_CODE (use_rhs) == SSA_NAME
1067 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1069 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1070 release_defs (use_stmt);
1071 gsi_remove (&gsi, true);
1075 return all && has_zero_uses (name);
1079 /* Forward propagate the comparison defined in *DEFGSI like
1080 cond_1 = x CMP y to uses of the form
1081 a_1 = (T')cond_1
1082 a_1 = !cond_1
1083 a_1 = cond_1 != 0
1084 Returns true if stmt is now unused. Advance DEFGSI to the next
1085 statement. */
1087 static bool
1088 forward_propagate_comparison (gimple_stmt_iterator *defgsi)
1090 gimple stmt = gsi_stmt (*defgsi);
1091 tree name = gimple_assign_lhs (stmt);
1092 gimple use_stmt;
1093 tree tmp = NULL_TREE;
1094 gimple_stmt_iterator gsi;
1095 enum tree_code code;
1096 tree lhs;
1098 /* Don't propagate ssa names that occur in abnormal phis. */
1099 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1100 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1101 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1102 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1103 goto bailout;
1105 /* Do not un-cse comparisons. But propagate through copies. */
1106 use_stmt = get_prop_dest_stmt (name, &name);
1107 if (!use_stmt
1108 || !is_gimple_assign (use_stmt))
1109 goto bailout;
1111 code = gimple_assign_rhs_code (use_stmt);
1112 lhs = gimple_assign_lhs (use_stmt);
1113 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1114 goto bailout;
1116 /* We can propagate the condition into a statement that
1117 computes the logical negation of the comparison result. */
1118 if ((code == BIT_NOT_EXPR
1119 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1120 || (code == BIT_XOR_EXPR
1121 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1123 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1124 bool nans = HONOR_NANS (TYPE_MODE (type));
1125 enum tree_code inv_code;
1126 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1127 if (inv_code == ERROR_MARK)
1128 goto bailout;
1130 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1131 gimple_assign_rhs2 (stmt));
1133 else
1134 goto bailout;
1136 gsi = gsi_for_stmt (use_stmt);
1137 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1138 use_stmt = gsi_stmt (gsi);
1139 update_stmt (use_stmt);
1141 if (dump_file && (dump_flags & TDF_DETAILS))
1143 fprintf (dump_file, " Replaced '");
1144 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1145 fprintf (dump_file, "' with '");
1146 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1147 fprintf (dump_file, "'\n");
1150 /* When we remove stmt now the iterator defgsi goes off it's current
1151 sequence, hence advance it now. */
1152 gsi_next (defgsi);
1154 /* Remove defining statements. */
1155 return remove_prop_source_from_use (name);
1157 bailout:
1158 gsi_next (defgsi);
1159 return false;
1163 /* GSI_P points to a statement which performs a narrowing integral
1164 conversion.
1166 Look for cases like:
1168 t = x & c;
1169 y = (T) t;
1171 Turn them into:
1173 t = x & c;
1174 y = (T) x;
1176 If T is narrower than X's type and C merely masks off bits outside
1177 of (T) and nothing else.
1179 Normally we'd let DCE remove the dead statement. But no DCE runs
1180 after the last forwprop/combine pass, so we remove the obviously
1181 dead code ourselves.
1183 Return TRUE if a change was made, FALSE otherwise. */
1185 static bool
1186 simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
1188 gimple stmt = gsi_stmt (*gsi_p);
1189 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
1191 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1192 the only use of the BIT_AND_EXPR result is the conversion. */
1193 if (is_gimple_assign (rhs_def_stmt)
1194 && gimple_assign_rhs_code (rhs_def_stmt) == BIT_AND_EXPR
1195 && has_single_use (gimple_assign_lhs (rhs_def_stmt)))
1197 tree rhs_def_operand1 = gimple_assign_rhs1 (rhs_def_stmt);
1198 tree rhs_def_operand2 = gimple_assign_rhs2 (rhs_def_stmt);
1199 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1201 /* Now verify suitability of the BIT_AND_EXPR's operands.
1202 The first must be an SSA_NAME that we can propagate and the
1203 second must be an integer constant that masks out all the
1204 bits outside the final result's type, but nothing else. */
1205 if (TREE_CODE (rhs_def_operand1) == SSA_NAME
1206 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1)
1207 && TREE_CODE (rhs_def_operand2) == INTEGER_CST
1208 && operand_equal_p (rhs_def_operand2,
1209 build_low_bits_mask (TREE_TYPE (rhs_def_operand2),
1210 TYPE_PRECISION (lhs_type)),
1213 /* This is an optimizable case. Replace the source operand
1214 in the conversion with the first source operand of the
1215 BIT_AND_EXPR. */
1216 gimple_assign_set_rhs1 (stmt, rhs_def_operand1);
1217 stmt = gsi_stmt (*gsi_p);
1218 update_stmt (stmt);
1220 /* There is no DCE after the last forwprop pass. It's
1221 easy to clean up the first order effects here. */
1222 gimple_stmt_iterator si;
1223 si = gsi_for_stmt (rhs_def_stmt);
1224 gsi_remove (&si, true);
1225 release_defs (rhs_def_stmt);
1226 return true;
1230 return false;
1234 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1235 If so, we can change STMT into lhs = y which can later be copy
1236 propagated. Similarly for negation.
1238 This could trivially be formulated as a forward propagation
1239 to immediate uses. However, we already had an implementation
1240 from DOM which used backward propagation via the use-def links.
1242 It turns out that backward propagation is actually faster as
1243 there's less work to do for each NOT/NEG expression we find.
1244 Backwards propagation needs to look at the statement in a single
1245 backlink. Forward propagation needs to look at potentially more
1246 than one forward link.
1248 Returns true when the statement was changed. */
1250 static bool
1251 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1253 gimple stmt = gsi_stmt (*gsi_p);
1254 tree rhs = gimple_assign_rhs1 (stmt);
1255 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1257 /* See if the RHS_DEF_STMT has the same form as our statement. */
1258 if (is_gimple_assign (rhs_def_stmt)
1259 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1261 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1263 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1264 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1265 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1267 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1268 stmt = gsi_stmt (*gsi_p);
1269 update_stmt (stmt);
1270 return true;
1274 return false;
1277 /* Helper function for simplify_gimple_switch. Remove case labels that
1278 have values outside the range of the new type. */
1280 static void
1281 simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
1283 unsigned int branch_num = gimple_switch_num_labels (stmt);
1284 vec<tree> labels;
1285 labels.create (branch_num);
1286 unsigned int i, len;
1288 /* Collect the existing case labels in a VEC, and preprocess it as if
1289 we are gimplifying a GENERIC SWITCH_EXPR. */
1290 for (i = 1; i < branch_num; i++)
1291 labels.quick_push (gimple_switch_label (stmt, i));
1292 preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1294 /* If any labels were removed, replace the existing case labels
1295 in the GIMPLE_SWITCH statement with the correct ones.
1296 Note that the type updates were done in-place on the case labels,
1297 so we only have to replace the case labels in the GIMPLE_SWITCH
1298 if the number of labels changed. */
1299 len = labels.length ();
1300 if (len < branch_num - 1)
1302 bitmap target_blocks;
1303 edge_iterator ei;
1304 edge e;
1306 /* Corner case: *all* case labels have been removed as being
1307 out-of-range for INDEX_TYPE. Push one label and let the
1308 CFG cleanups deal with this further. */
1309 if (len == 0)
1311 tree label, elt;
1313 label = CASE_LABEL (gimple_switch_default_label (stmt));
1314 elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1315 labels.quick_push (elt);
1316 len = 1;
1319 for (i = 0; i < labels.length (); i++)
1320 gimple_switch_set_label (stmt, i + 1, labels[i]);
1321 for (i++ ; i < branch_num; i++)
1322 gimple_switch_set_label (stmt, i, NULL_TREE);
1323 gimple_switch_set_num_labels (stmt, len + 1);
1325 /* Cleanup any edges that are now dead. */
1326 target_blocks = BITMAP_ALLOC (NULL);
1327 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1329 tree elt = gimple_switch_label (stmt, i);
1330 basic_block target = label_to_block (CASE_LABEL (elt));
1331 bitmap_set_bit (target_blocks, target->index);
1333 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1335 if (! bitmap_bit_p (target_blocks, e->dest->index))
1337 remove_edge (e);
1338 cfg_changed = true;
1339 free_dominance_info (CDI_DOMINATORS);
1341 else
1342 ei_next (&ei);
1344 BITMAP_FREE (target_blocks);
1347 labels.release ();
1350 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1351 the condition which we may be able to optimize better. */
1353 static bool
1354 simplify_gimple_switch (gimple stmt)
1356 tree cond = gimple_switch_index (stmt);
1357 tree def, to, ti;
1358 gimple def_stmt;
1360 /* The optimization that we really care about is removing unnecessary
1361 casts. That will let us do much better in propagating the inferred
1362 constant at the switch target. */
1363 if (TREE_CODE (cond) == SSA_NAME)
1365 def_stmt = SSA_NAME_DEF_STMT (cond);
1366 if (is_gimple_assign (def_stmt))
1368 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1370 int need_precision;
1371 bool fail;
1373 def = gimple_assign_rhs1 (def_stmt);
1375 to = TREE_TYPE (cond);
1376 ti = TREE_TYPE (def);
1378 /* If we have an extension that preserves value, then we
1379 can copy the source value into the switch. */
1381 need_precision = TYPE_PRECISION (ti);
1382 fail = false;
1383 if (! INTEGRAL_TYPE_P (ti))
1384 fail = true;
1385 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1386 fail = true;
1387 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1388 need_precision += 1;
1389 if (TYPE_PRECISION (to) < need_precision)
1390 fail = true;
1392 if (!fail)
1394 gimple_switch_set_index (stmt, def);
1395 simplify_gimple_switch_label_vec (stmt, ti);
1396 update_stmt (stmt);
1397 return true;
1403 return false;
1406 /* For pointers p2 and p1 return p2 - p1 if the
1407 difference is known and constant, otherwise return NULL. */
1409 static tree
1410 constant_pointer_difference (tree p1, tree p2)
1412 int i, j;
1413 #define CPD_ITERATIONS 5
1414 tree exps[2][CPD_ITERATIONS];
1415 tree offs[2][CPD_ITERATIONS];
1416 int cnt[2];
1418 for (i = 0; i < 2; i++)
1420 tree p = i ? p1 : p2;
1421 tree off = size_zero_node;
1422 gimple stmt;
1423 enum tree_code code;
1425 /* For each of p1 and p2 we need to iterate at least
1426 twice, to handle ADDR_EXPR directly in p1/p2,
1427 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1428 on definition's stmt RHS. Iterate a few extra times. */
1429 j = 0;
1432 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1433 break;
1434 if (TREE_CODE (p) == ADDR_EXPR)
1436 tree q = TREE_OPERAND (p, 0);
1437 HOST_WIDE_INT offset;
1438 tree base = get_addr_base_and_unit_offset (q, &offset);
1439 if (base)
1441 q = base;
1442 if (offset)
1443 off = size_binop (PLUS_EXPR, off, size_int (offset));
1445 if (TREE_CODE (q) == MEM_REF
1446 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1448 p = TREE_OPERAND (q, 0);
1449 off = size_binop (PLUS_EXPR, off,
1450 double_int_to_tree (sizetype,
1451 mem_ref_offset (q)));
1453 else
1455 exps[i][j] = q;
1456 offs[i][j++] = off;
1457 break;
1460 if (TREE_CODE (p) != SSA_NAME)
1461 break;
1462 exps[i][j] = p;
1463 offs[i][j++] = off;
1464 if (j == CPD_ITERATIONS)
1465 break;
1466 stmt = SSA_NAME_DEF_STMT (p);
1467 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1468 break;
1469 code = gimple_assign_rhs_code (stmt);
1470 if (code == POINTER_PLUS_EXPR)
1472 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1473 break;
1474 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1475 p = gimple_assign_rhs1 (stmt);
1477 else if (code == ADDR_EXPR || code == NOP_EXPR)
1478 p = gimple_assign_rhs1 (stmt);
1479 else
1480 break;
1482 while (1);
1483 cnt[i] = j;
1486 for (i = 0; i < cnt[0]; i++)
1487 for (j = 0; j < cnt[1]; j++)
1488 if (exps[0][i] == exps[1][j])
1489 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1491 return NULL_TREE;
1494 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1495 Optimize
1496 memcpy (p, "abcd", 4);
1497 memset (p + 4, ' ', 3);
1498 into
1499 memcpy (p, "abcd ", 7);
1500 call if the latter can be stored by pieces during expansion. */
1502 static bool
1503 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1505 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1506 tree vuse = gimple_vuse (stmt2);
1507 if (vuse == NULL)
1508 return false;
1509 stmt1 = SSA_NAME_DEF_STMT (vuse);
1511 switch (DECL_FUNCTION_CODE (callee2))
1513 case BUILT_IN_MEMSET:
1514 if (gimple_call_num_args (stmt2) != 3
1515 || gimple_call_lhs (stmt2)
1516 || CHAR_BIT != 8
1517 || BITS_PER_UNIT != 8)
1518 break;
1519 else
1521 tree callee1;
1522 tree ptr1, src1, str1, off1, len1, lhs1;
1523 tree ptr2 = gimple_call_arg (stmt2, 0);
1524 tree val2 = gimple_call_arg (stmt2, 1);
1525 tree len2 = gimple_call_arg (stmt2, 2);
1526 tree diff, vdef, new_str_cst;
1527 gimple use_stmt;
1528 unsigned int ptr1_align;
1529 unsigned HOST_WIDE_INT src_len;
1530 char *src_buf;
1531 use_operand_p use_p;
1533 if (!tree_fits_shwi_p (val2)
1534 || !tree_fits_uhwi_p (len2))
1535 break;
1536 if (is_gimple_call (stmt1))
1538 /* If first stmt is a call, it needs to be memcpy
1539 or mempcpy, with string literal as second argument and
1540 constant length. */
1541 callee1 = gimple_call_fndecl (stmt1);
1542 if (callee1 == NULL_TREE
1543 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1544 || gimple_call_num_args (stmt1) != 3)
1545 break;
1546 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1547 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1548 break;
1549 ptr1 = gimple_call_arg (stmt1, 0);
1550 src1 = gimple_call_arg (stmt1, 1);
1551 len1 = gimple_call_arg (stmt1, 2);
1552 lhs1 = gimple_call_lhs (stmt1);
1553 if (!tree_fits_uhwi_p (len1))
1554 break;
1555 str1 = string_constant (src1, &off1);
1556 if (str1 == NULL_TREE)
1557 break;
1558 if (!tree_fits_uhwi_p (off1)
1559 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1560 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1561 - tree_to_uhwi (off1)) > 0
1562 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1563 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1564 != TYPE_MODE (char_type_node))
1565 break;
1567 else if (gimple_assign_single_p (stmt1))
1569 /* Otherwise look for length 1 memcpy optimized into
1570 assignment. */
1571 ptr1 = gimple_assign_lhs (stmt1);
1572 src1 = gimple_assign_rhs1 (stmt1);
1573 if (TREE_CODE (ptr1) != MEM_REF
1574 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1575 || !tree_fits_shwi_p (src1))
1576 break;
1577 ptr1 = build_fold_addr_expr (ptr1);
1578 callee1 = NULL_TREE;
1579 len1 = size_one_node;
1580 lhs1 = NULL_TREE;
1581 off1 = size_zero_node;
1582 str1 = NULL_TREE;
1584 else
1585 break;
1587 diff = constant_pointer_difference (ptr1, ptr2);
1588 if (diff == NULL && lhs1 != NULL)
1590 diff = constant_pointer_difference (lhs1, ptr2);
1591 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1592 && diff != NULL)
1593 diff = size_binop (PLUS_EXPR, diff,
1594 fold_convert (sizetype, len1));
1596 /* If the difference between the second and first destination pointer
1597 is not constant, or is bigger than memcpy length, bail out. */
1598 if (diff == NULL
1599 || !tree_fits_uhwi_p (diff)
1600 || tree_int_cst_lt (len1, diff))
1601 break;
1603 /* Use maximum of difference plus memset length and memcpy length
1604 as the new memcpy length, if it is too big, bail out. */
1605 src_len = tree_to_uhwi (diff);
1606 src_len += tree_to_uhwi (len2);
1607 if (src_len < tree_to_uhwi (len1))
1608 src_len = tree_to_uhwi (len1);
1609 if (src_len > 1024)
1610 break;
1612 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1613 with bigger length will return different result. */
1614 if (lhs1 != NULL_TREE
1615 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1616 && (TREE_CODE (lhs1) != SSA_NAME
1617 || !single_imm_use (lhs1, &use_p, &use_stmt)
1618 || use_stmt != stmt2))
1619 break;
1621 /* If anything reads memory in between memcpy and memset
1622 call, the modified memcpy call might change it. */
1623 vdef = gimple_vdef (stmt1);
1624 if (vdef != NULL
1625 && (!single_imm_use (vdef, &use_p, &use_stmt)
1626 || use_stmt != stmt2))
1627 break;
1629 ptr1_align = get_pointer_alignment (ptr1);
1630 /* Construct the new source string literal. */
1631 src_buf = XALLOCAVEC (char, src_len + 1);
1632 if (callee1)
1633 memcpy (src_buf,
1634 TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1635 tree_to_uhwi (len1));
1636 else
1637 src_buf[0] = tree_to_shwi (src1);
1638 memset (src_buf + tree_to_uhwi (diff),
1639 tree_to_shwi (val2), tree_to_uhwi (len2));
1640 src_buf[src_len] = '\0';
1641 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1642 handle embedded '\0's. */
1643 if (strlen (src_buf) != src_len)
1644 break;
1645 rtl_profile_for_bb (gimple_bb (stmt2));
1646 /* If the new memcpy wouldn't be emitted by storing the literal
1647 by pieces, this optimization might enlarge .rodata too much,
1648 as commonly used string literals couldn't be shared any
1649 longer. */
1650 if (!can_store_by_pieces (src_len,
1651 builtin_strncpy_read_str,
1652 src_buf, ptr1_align, false))
1653 break;
1655 new_str_cst = build_string_literal (src_len, src_buf);
1656 if (callee1)
1658 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1659 memset call. */
1660 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1661 gimple_call_set_lhs (stmt1, NULL_TREE);
1662 gimple_call_set_arg (stmt1, 1, new_str_cst);
1663 gimple_call_set_arg (stmt1, 2,
1664 build_int_cst (TREE_TYPE (len1), src_len));
1665 update_stmt (stmt1);
1666 unlink_stmt_vdef (stmt2);
1667 gsi_remove (gsi_p, true);
1668 release_defs (stmt2);
1669 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1670 release_ssa_name (lhs1);
1671 return true;
1673 else
1675 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1676 assignment, remove STMT1 and change memset call into
1677 memcpy call. */
1678 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1680 if (!is_gimple_val (ptr1))
1681 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1682 true, GSI_SAME_STMT);
1683 gimple_call_set_fndecl (stmt2,
1684 builtin_decl_explicit (BUILT_IN_MEMCPY));
1685 gimple_call_set_arg (stmt2, 0, ptr1);
1686 gimple_call_set_arg (stmt2, 1, new_str_cst);
1687 gimple_call_set_arg (stmt2, 2,
1688 build_int_cst (TREE_TYPE (len2), src_len));
1689 unlink_stmt_vdef (stmt1);
1690 gsi_remove (&gsi, true);
1691 release_defs (stmt1);
1692 update_stmt (stmt2);
1693 return false;
1696 break;
1697 default:
1698 break;
1700 return false;
1703 /* Checks if expression has type of one-bit precision, or is a known
1704 truth-valued expression. */
1705 static bool
1706 truth_valued_ssa_name (tree name)
1708 gimple def;
1709 tree type = TREE_TYPE (name);
1711 if (!INTEGRAL_TYPE_P (type))
1712 return false;
1713 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1714 necessarily one and so ~X is not equal to !X. */
1715 if (TYPE_PRECISION (type) == 1)
1716 return true;
1717 def = SSA_NAME_DEF_STMT (name);
1718 if (is_gimple_assign (def))
1719 return truth_value_p (gimple_assign_rhs_code (def));
1720 return false;
1723 /* Helper routine for simplify_bitwise_binary_1 function.
1724 Return for the SSA name NAME the expression X if it mets condition
1725 NAME = !X. Otherwise return NULL_TREE.
1726 Detected patterns for NAME = !X are:
1727 !X and X == 0 for X with integral type.
1728 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1729 static tree
1730 lookup_logical_inverted_value (tree name)
1732 tree op1, op2;
1733 enum tree_code code;
1734 gimple def;
1736 /* If name has none-intergal type, or isn't a SSA_NAME, then
1737 return. */
1738 if (TREE_CODE (name) != SSA_NAME
1739 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1740 return NULL_TREE;
1741 def = SSA_NAME_DEF_STMT (name);
1742 if (!is_gimple_assign (def))
1743 return NULL_TREE;
1745 code = gimple_assign_rhs_code (def);
1746 op1 = gimple_assign_rhs1 (def);
1747 op2 = NULL_TREE;
1749 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1750 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1751 if (code == EQ_EXPR || code == NE_EXPR
1752 || code == BIT_XOR_EXPR)
1753 op2 = gimple_assign_rhs2 (def);
1755 switch (code)
1757 case BIT_NOT_EXPR:
1758 if (truth_valued_ssa_name (name))
1759 return op1;
1760 break;
1761 case EQ_EXPR:
1762 /* Check if we have X == 0 and X has an integral type. */
1763 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1764 break;
1765 if (integer_zerop (op2))
1766 return op1;
1767 break;
1768 case NE_EXPR:
1769 /* Check if we have X != 1 and X is a truth-valued. */
1770 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1771 break;
1772 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1773 return op1;
1774 break;
1775 case BIT_XOR_EXPR:
1776 /* Check if we have X ^ 1 and X is truth valued. */
1777 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1778 return op1;
1779 break;
1780 default:
1781 break;
1784 return NULL_TREE;
1787 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1788 operations CODE, if one operand has the logically inverted
1789 value of the other. */
1790 static tree
1791 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1792 tree arg1, tree arg2)
1794 tree anot;
1796 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1797 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1798 && code != BIT_XOR_EXPR)
1799 return NULL_TREE;
1801 /* First check if operands ARG1 and ARG2 are equal. If so
1802 return NULL_TREE as this optimization is handled fold_stmt. */
1803 if (arg1 == arg2)
1804 return NULL_TREE;
1805 /* See if we have in arguments logical-not patterns. */
1806 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1807 || anot != arg2)
1808 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1809 || anot != arg1))
1810 return NULL_TREE;
1812 /* X & !X -> 0. */
1813 if (code == BIT_AND_EXPR)
1814 return fold_convert (type, integer_zero_node);
1815 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1816 if (truth_valued_ssa_name (anot))
1817 return fold_convert (type, integer_one_node);
1819 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1820 return NULL_TREE;
1823 /* Given a ssa_name in NAME see if it was defined by an assignment and
1824 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1825 to the second operand on the rhs. */
1827 static inline void
1828 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1830 gimple def;
1831 enum tree_code code1;
1832 tree arg11;
1833 tree arg21;
1834 tree arg31;
1835 enum gimple_rhs_class grhs_class;
1837 code1 = TREE_CODE (name);
1838 arg11 = name;
1839 arg21 = NULL_TREE;
1840 grhs_class = get_gimple_rhs_class (code1);
1842 if (code1 == SSA_NAME)
1844 def = SSA_NAME_DEF_STMT (name);
1846 if (def && is_gimple_assign (def)
1847 && can_propagate_from (def))
1849 code1 = gimple_assign_rhs_code (def);
1850 arg11 = gimple_assign_rhs1 (def);
1851 arg21 = gimple_assign_rhs2 (def);
1852 arg31 = gimple_assign_rhs2 (def);
1855 else if (grhs_class == GIMPLE_TERNARY_RHS
1856 || GIMPLE_BINARY_RHS
1857 || GIMPLE_UNARY_RHS
1858 || GIMPLE_SINGLE_RHS)
1859 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31);
1861 *code = code1;
1862 *arg1 = arg11;
1863 if (arg2)
1864 *arg2 = arg21;
1865 /* Ignore arg3 currently. */
1868 /* Return true if a conversion of an operand from type FROM to type TO
1869 should be applied after performing the operation instead. */
1871 static bool
1872 hoist_conversion_for_bitop_p (tree to, tree from)
1874 /* That's a good idea if the conversion widens the operand, thus
1875 after hoisting the conversion the operation will be narrower. */
1876 if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
1877 return true;
1879 /* It's also a good idea if the conversion is to a non-integer mode. */
1880 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
1881 return true;
1883 /* Or if the precision of TO is not the same as the precision
1884 of its mode. */
1885 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
1886 return true;
1888 return false;
1891 /* GSI points to a statement of the form
1893 result = OP0 CODE OP1
1895 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1896 BIT_AND_EXPR or BIT_IOR_EXPR.
1898 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1899 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1900 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1902 If a simplification is made, return TRUE, else return FALSE. */
1903 static bool
1904 simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
1905 enum tree_code code,
1906 tree op0, tree op1)
1908 gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
1910 if (!is_gimple_assign (op0_def_stmt)
1911 || (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
1912 return false;
1914 tree x = gimple_assign_rhs1 (op0_def_stmt);
1915 if (TREE_CODE (x) == SSA_NAME
1916 && INTEGRAL_TYPE_P (TREE_TYPE (x))
1917 && TYPE_PRECISION (TREE_TYPE (x)) == 1
1918 && TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
1920 enum tree_code newcode;
1922 gimple stmt = gsi_stmt (*gsi);
1923 gimple_assign_set_rhs1 (stmt, x);
1924 gimple_assign_set_rhs2 (stmt, op1);
1925 if (code == BIT_AND_EXPR)
1926 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
1927 else
1928 newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
1929 gimple_assign_set_rhs_code (stmt, newcode);
1930 update_stmt (stmt);
1931 return true;
1933 return false;
1937 /* Simplify bitwise binary operations.
1938 Return true if a transformation applied, otherwise return false. */
1940 static bool
1941 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1943 gimple stmt = gsi_stmt (*gsi);
1944 tree arg1 = gimple_assign_rhs1 (stmt);
1945 tree arg2 = gimple_assign_rhs2 (stmt);
1946 enum tree_code code = gimple_assign_rhs_code (stmt);
1947 tree res;
1948 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
1949 enum tree_code def1_code, def2_code;
1951 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
1952 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
1954 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1955 when profitable. */
1956 if (TREE_CODE (arg2) == INTEGER_CST
1957 && CONVERT_EXPR_CODE_P (def1_code)
1958 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
1959 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1960 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1962 gimple newop;
1963 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1964 newop =
1965 gimple_build_assign_with_ops (code, tem, def1_arg1,
1966 fold_convert_loc (gimple_location (stmt),
1967 TREE_TYPE (def1_arg1),
1968 arg2));
1969 gimple_set_location (newop, gimple_location (stmt));
1970 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1971 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1972 tem, NULL_TREE, NULL_TREE);
1973 update_stmt (gsi_stmt (*gsi));
1974 return true;
1977 /* For bitwise binary operations apply operand conversions to the
1978 binary operation result instead of to the operands. This allows
1979 to combine successive conversions and bitwise binary operations. */
1980 if (CONVERT_EXPR_CODE_P (def1_code)
1981 && CONVERT_EXPR_CODE_P (def2_code)
1982 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1983 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
1985 gimple newop;
1986 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
1987 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1988 gimple_set_location (newop, gimple_location (stmt));
1989 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1990 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1991 tem, NULL_TREE, NULL_TREE);
1992 update_stmt (gsi_stmt (*gsi));
1993 return true;
1997 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1998 if (def1_code == def2_code
1999 && def1_code == BIT_AND_EXPR
2000 && operand_equal_for_phi_arg_p (def1_arg2,
2001 def2_arg2))
2003 tree b = def1_arg2;
2004 tree a = def1_arg1;
2005 tree c = def2_arg1;
2006 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
2007 /* If A OP0 C (this usually means C is the same as A) is 0
2008 then fold it down correctly. */
2009 if (integer_zerop (inner))
2011 gimple_assign_set_rhs_from_tree (gsi, inner);
2012 update_stmt (stmt);
2013 return true;
2015 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2016 then fold it down correctly. */
2017 else if (TREE_CODE (inner) == SSA_NAME)
2019 tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
2020 inner, b);
2021 gimple_assign_set_rhs_from_tree (gsi, outer);
2022 update_stmt (stmt);
2023 return true;
2025 else
2027 gimple newop;
2028 tree tem;
2029 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2030 newop = gimple_build_assign_with_ops (code, tem, a, c);
2031 gimple_set_location (newop, gimple_location (stmt));
2032 /* Make sure to re-process the new stmt as it's walking upwards. */
2033 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2034 gimple_assign_set_rhs1 (stmt, tem);
2035 gimple_assign_set_rhs2 (stmt, b);
2036 gimple_assign_set_rhs_code (stmt, def1_code);
2037 update_stmt (stmt);
2038 return true;
2042 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2043 if (code == BIT_AND_EXPR
2044 && def1_code == BIT_IOR_EXPR
2045 && CONSTANT_CLASS_P (arg2)
2046 && CONSTANT_CLASS_P (def1_arg2))
2048 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
2049 arg2, def1_arg2);
2050 tree tem;
2051 gimple newop;
2052 if (integer_zerop (cst))
2054 gimple_assign_set_rhs1 (stmt, def1_arg1);
2055 update_stmt (stmt);
2056 return true;
2058 tem = make_ssa_name (TREE_TYPE (arg2), NULL);
2059 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
2060 tem, def1_arg1, arg2);
2061 gimple_set_location (newop, gimple_location (stmt));
2062 /* Make sure to re-process the new stmt as it's walking upwards. */
2063 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
2064 gimple_assign_set_rhs1 (stmt, tem);
2065 gimple_assign_set_rhs2 (stmt, cst);
2066 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
2067 update_stmt (stmt);
2068 return true;
2071 /* Combine successive equal operations with constants. */
2072 if ((code == BIT_AND_EXPR
2073 || code == BIT_IOR_EXPR
2074 || code == BIT_XOR_EXPR)
2075 && def1_code == code
2076 && CONSTANT_CLASS_P (arg2)
2077 && CONSTANT_CLASS_P (def1_arg2))
2079 tree cst = fold_build2 (code, TREE_TYPE (arg2),
2080 arg2, def1_arg2);
2081 gimple_assign_set_rhs1 (stmt, def1_arg1);
2082 gimple_assign_set_rhs2 (stmt, cst);
2083 update_stmt (stmt);
2084 return true;
2087 /* Canonicalize X ^ ~0 to ~X. */
2088 if (code == BIT_XOR_EXPR
2089 && integer_all_onesp (arg2))
2091 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
2092 gcc_assert (gsi_stmt (*gsi) == stmt);
2093 update_stmt (stmt);
2094 return true;
2097 /* Try simple folding for X op !X, and X op X. */
2098 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
2099 if (res != NULL_TREE)
2101 gimple_assign_set_rhs_from_tree (gsi, res);
2102 update_stmt (gsi_stmt (*gsi));
2103 return true;
2106 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
2108 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
2109 if (def1_code == ocode)
2111 tree x = arg2;
2112 enum tree_code coden;
2113 tree a1, a2;
2114 /* ( X | Y) & X -> X */
2115 /* ( X & Y) | X -> X */
2116 if (x == def1_arg1
2117 || x == def1_arg2)
2119 gimple_assign_set_rhs_from_tree (gsi, x);
2120 update_stmt (gsi_stmt (*gsi));
2121 return true;
2124 defcodefor_name (def1_arg1, &coden, &a1, &a2);
2125 /* (~X | Y) & X -> X & Y */
2126 /* (~X & Y) | X -> X | Y */
2127 if (coden == BIT_NOT_EXPR && a1 == x)
2129 gimple_assign_set_rhs_with_ops (gsi, code,
2130 x, def1_arg2);
2131 gcc_assert (gsi_stmt (*gsi) == stmt);
2132 update_stmt (stmt);
2133 return true;
2135 defcodefor_name (def1_arg2, &coden, &a1, &a2);
2136 /* (Y | ~X) & X -> X & Y */
2137 /* (Y & ~X) | X -> X | Y */
2138 if (coden == BIT_NOT_EXPR && a1 == x)
2140 gimple_assign_set_rhs_with_ops (gsi, code,
2141 x, def1_arg1);
2142 gcc_assert (gsi_stmt (*gsi) == stmt);
2143 update_stmt (stmt);
2144 return true;
2147 if (def2_code == ocode)
2149 enum tree_code coden;
2150 tree a1;
2151 tree x = arg1;
2152 /* X & ( X | Y) -> X */
2153 /* X | ( X & Y) -> X */
2154 if (x == def2_arg1
2155 || x == def2_arg2)
2157 gimple_assign_set_rhs_from_tree (gsi, x);
2158 update_stmt (gsi_stmt (*gsi));
2159 return true;
2161 defcodefor_name (def2_arg1, &coden, &a1, NULL);
2162 /* (~X | Y) & X -> X & Y */
2163 /* (~X & Y) | X -> X | Y */
2164 if (coden == BIT_NOT_EXPR && a1 == x)
2166 gimple_assign_set_rhs_with_ops (gsi, code,
2167 x, def2_arg2);
2168 gcc_assert (gsi_stmt (*gsi) == stmt);
2169 update_stmt (stmt);
2170 return true;
2172 defcodefor_name (def2_arg2, &coden, &a1, NULL);
2173 /* (Y | ~X) & X -> X & Y */
2174 /* (Y & ~X) | X -> X | Y */
2175 if (coden == BIT_NOT_EXPR && a1 == x)
2177 gimple_assign_set_rhs_with_ops (gsi, code,
2178 x, def2_arg1);
2179 gcc_assert (gsi_stmt (*gsi) == stmt);
2180 update_stmt (stmt);
2181 return true;
2185 /* If arg1 and arg2 are booleans (or any single bit type)
2186 then try to simplify:
2188 (~X & Y) -> X < Y
2189 (X & ~Y) -> Y < X
2190 (~X | Y) -> X <= Y
2191 (X | ~Y) -> Y <= X
2193 But only do this if our result feeds into a comparison as
2194 this transformation is not always a win, particularly on
2195 targets with and-not instructions. */
2196 if (TREE_CODE (arg1) == SSA_NAME
2197 && TREE_CODE (arg2) == SSA_NAME
2198 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
2199 && TYPE_PRECISION (TREE_TYPE (arg1)) == 1
2200 && TYPE_PRECISION (TREE_TYPE (arg2)) == 1
2201 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2202 == TYPE_UNSIGNED (TREE_TYPE (arg2))))
2204 use_operand_p use_p;
2205 gimple use_stmt;
2207 if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
2209 if (gimple_code (use_stmt) == GIMPLE_COND
2210 && gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
2211 && integer_zerop (gimple_cond_rhs (use_stmt))
2212 && gimple_cond_code (use_stmt) == NE_EXPR)
2214 if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
2215 return true;
2216 if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
2217 return true;
2222 return false;
2226 /* Recognize rotation patterns. Return true if a transformation
2227 applied, otherwise return false.
2229 We are looking for X with unsigned type T with bitsize B, OP being
2230 +, | or ^, some type T2 wider than T and
2231 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2232 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2233 (X << Y) OP (X >> (B - Y))
2234 (X << (int) Y) OP (X >> (int) (B - Y))
2235 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2236 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2237 (X << Y) | (X >> ((-Y) & (B - 1)))
2238 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2239 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2240 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2242 and transform these into:
2243 X r<< CNT1
2244 X r<< Y
2246 Note, in the patterns with T2 type, the type of OP operands
2247 might be even a signed type, but should have precision B. */
2249 static bool
2250 simplify_rotate (gimple_stmt_iterator *gsi)
2252 gimple stmt = gsi_stmt (*gsi);
2253 tree arg[2], rtype, rotcnt = NULL_TREE;
2254 tree def_arg1[2], def_arg2[2];
2255 enum tree_code def_code[2];
2256 tree lhs;
2257 int i;
2258 bool swapped_p = false;
2259 gimple g;
2261 arg[0] = gimple_assign_rhs1 (stmt);
2262 arg[1] = gimple_assign_rhs2 (stmt);
2263 rtype = TREE_TYPE (arg[0]);
2265 /* Only create rotates in complete modes. Other cases are not
2266 expanded properly. */
2267 if (!INTEGRAL_TYPE_P (rtype)
2268 || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
2269 return false;
2271 for (i = 0; i < 2; i++)
2272 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2274 /* Look through narrowing conversions. */
2275 if (CONVERT_EXPR_CODE_P (def_code[0])
2276 && CONVERT_EXPR_CODE_P (def_code[1])
2277 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2278 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2279 && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2280 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2281 && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
2282 && has_single_use (arg[0])
2283 && has_single_use (arg[1]))
2285 for (i = 0; i < 2; i++)
2287 arg[i] = def_arg1[i];
2288 defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2292 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2293 for (i = 0; i < 2; i++)
2294 if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2295 return false;
2296 else if (!has_single_use (arg[i]))
2297 return false;
2298 if (def_code[0] == def_code[1])
2299 return false;
2301 /* If we've looked through narrowing conversions before, look through
2302 widening conversions from unsigned type with the same precision
2303 as rtype here. */
2304 if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2305 for (i = 0; i < 2; i++)
2307 tree tem;
2308 enum tree_code code;
2309 defcodefor_name (def_arg1[i], &code, &tem, NULL);
2310 if (!CONVERT_EXPR_CODE_P (code)
2311 || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2312 || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2313 return false;
2314 def_arg1[i] = tem;
2316 /* Both shifts have to use the same first operand. */
2317 if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
2318 return false;
2319 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
2320 return false;
2322 /* CNT1 + CNT2 == B case above. */
2323 if (tree_fits_uhwi_p (def_arg2[0])
2324 && tree_fits_uhwi_p (def_arg2[1])
2325 && tree_to_uhwi (def_arg2[0])
2326 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
2327 rotcnt = def_arg2[0];
2328 else if (TREE_CODE (def_arg2[0]) != SSA_NAME
2329 || TREE_CODE (def_arg2[1]) != SSA_NAME)
2330 return false;
2331 else
2333 tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
2334 enum tree_code cdef_code[2];
2335 /* Look through conversion of the shift count argument.
2336 The C/C++ FE cast any shift count argument to integer_type_node.
2337 The only problem might be if the shift count type maximum value
2338 is equal or smaller than number of bits in rtype. */
2339 for (i = 0; i < 2; i++)
2341 def_arg2_alt[i] = def_arg2[i];
2342 defcodefor_name (def_arg2[i], &cdef_code[i],
2343 &cdef_arg1[i], &cdef_arg2[i]);
2344 if (CONVERT_EXPR_CODE_P (cdef_code[i])
2345 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
2346 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2347 > floor_log2 (TYPE_PRECISION (rtype))
2348 && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
2349 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
2351 def_arg2_alt[i] = cdef_arg1[i];
2352 defcodefor_name (def_arg2_alt[i], &cdef_code[i],
2353 &cdef_arg1[i], &cdef_arg2[i]);
2356 for (i = 0; i < 2; i++)
2357 /* Check for one shift count being Y and the other B - Y,
2358 with optional casts. */
2359 if (cdef_code[i] == MINUS_EXPR
2360 && tree_fits_shwi_p (cdef_arg1[i])
2361 && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
2362 && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
2364 tree tem;
2365 enum tree_code code;
2367 if (cdef_arg2[i] == def_arg2[1 - i]
2368 || cdef_arg2[i] == def_arg2_alt[1 - i])
2370 rotcnt = cdef_arg2[i];
2371 break;
2373 defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
2374 if (CONVERT_EXPR_CODE_P (code)
2375 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2376 && TYPE_PRECISION (TREE_TYPE (tem))
2377 > floor_log2 (TYPE_PRECISION (rtype))
2378 && TYPE_PRECISION (TREE_TYPE (tem))
2379 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2380 && (tem == def_arg2[1 - i]
2381 || tem == def_arg2_alt[1 - i]))
2383 rotcnt = tem;
2384 break;
2387 /* The above sequence isn't safe for Y being 0,
2388 because then one of the shifts triggers undefined behavior.
2389 This alternative is safe even for rotation count of 0.
2390 One shift count is Y and the other (-Y) & (B - 1). */
2391 else if (cdef_code[i] == BIT_AND_EXPR
2392 && tree_fits_shwi_p (cdef_arg2[i])
2393 && tree_to_shwi (cdef_arg2[i])
2394 == TYPE_PRECISION (rtype) - 1
2395 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
2396 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
2398 tree tem;
2399 enum tree_code code;
2401 defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
2402 if (CONVERT_EXPR_CODE_P (code)
2403 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2404 && TYPE_PRECISION (TREE_TYPE (tem))
2405 > floor_log2 (TYPE_PRECISION (rtype))
2406 && TYPE_PRECISION (TREE_TYPE (tem))
2407 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
2408 defcodefor_name (tem, &code, &tem, NULL);
2410 if (code == NEGATE_EXPR)
2412 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
2414 rotcnt = tem;
2415 break;
2417 defcodefor_name (tem, &code, &tem, NULL);
2418 if (CONVERT_EXPR_CODE_P (code)
2419 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
2420 && TYPE_PRECISION (TREE_TYPE (tem))
2421 > floor_log2 (TYPE_PRECISION (rtype))
2422 && TYPE_PRECISION (TREE_TYPE (tem))
2423 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
2424 && (tem == def_arg2[1 - i]
2425 || tem == def_arg2_alt[1 - i]))
2427 rotcnt = tem;
2428 break;
2432 if (rotcnt == NULL_TREE)
2433 return false;
2434 swapped_p = i != 1;
2437 if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
2438 TREE_TYPE (rotcnt)))
2440 g = gimple_build_assign_with_ops (NOP_EXPR,
2441 make_ssa_name (TREE_TYPE (def_arg2[0]),
2442 NULL),
2443 rotcnt, NULL_TREE);
2444 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2445 rotcnt = gimple_assign_lhs (g);
2447 lhs = gimple_assign_lhs (stmt);
2448 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2449 lhs = make_ssa_name (TREE_TYPE (def_arg1[0]), NULL);
2450 g = gimple_build_assign_with_ops (((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
2451 ? LROTATE_EXPR : RROTATE_EXPR,
2452 lhs, def_arg1[0], rotcnt);
2453 if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
2455 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2456 g = gimple_build_assign_with_ops (NOP_EXPR, gimple_assign_lhs (stmt),
2457 lhs, NULL_TREE);
2459 gsi_replace (gsi, g, false);
2460 return true;
2463 /* Perform re-associations of the plus or minus statement STMT that are
2464 always permitted. Returns true if the CFG was changed. */
2466 static bool
2467 associate_plusminus (gimple_stmt_iterator *gsi)
2469 gimple stmt = gsi_stmt (*gsi);
2470 tree rhs1 = gimple_assign_rhs1 (stmt);
2471 tree rhs2 = gimple_assign_rhs2 (stmt);
2472 enum tree_code code = gimple_assign_rhs_code (stmt);
2473 bool changed;
2475 /* We can't reassociate at all for saturating types. */
2476 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
2477 return false;
2479 /* First contract negates. */
2482 changed = false;
2484 /* A +- (-B) -> A -+ B. */
2485 if (TREE_CODE (rhs2) == SSA_NAME)
2487 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2488 if (is_gimple_assign (def_stmt)
2489 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2490 && can_propagate_from (def_stmt))
2492 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
2493 gimple_assign_set_rhs_code (stmt, code);
2494 rhs2 = gimple_assign_rhs1 (def_stmt);
2495 gimple_assign_set_rhs2 (stmt, rhs2);
2496 gimple_set_modified (stmt, true);
2497 changed = true;
2501 /* (-A) + B -> B - A. */
2502 if (TREE_CODE (rhs1) == SSA_NAME
2503 && code == PLUS_EXPR)
2505 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2506 if (is_gimple_assign (def_stmt)
2507 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
2508 && can_propagate_from (def_stmt))
2510 code = MINUS_EXPR;
2511 gimple_assign_set_rhs_code (stmt, code);
2512 rhs1 = rhs2;
2513 gimple_assign_set_rhs1 (stmt, rhs1);
2514 rhs2 = gimple_assign_rhs1 (def_stmt);
2515 gimple_assign_set_rhs2 (stmt, rhs2);
2516 gimple_set_modified (stmt, true);
2517 changed = true;
2521 while (changed);
2523 /* We can't reassociate floating-point or fixed-point plus or minus
2524 because of saturation to +-Inf. */
2525 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
2526 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
2527 goto out;
2529 /* Second match patterns that allow contracting a plus-minus pair
2530 irrespective of overflow issues.
2532 (A +- B) - A -> +- B
2533 (A +- B) -+ B -> A
2534 (CST +- A) +- CST -> CST +- A
2535 (A +- CST) +- CST -> A +- CST
2536 ~A + A -> -1
2537 ~A + 1 -> -A
2538 A - (A +- B) -> -+ B
2539 A +- (B +- A) -> +- B
2540 CST +- (CST +- A) -> CST +- A
2541 CST +- (A +- CST) -> CST +- A
2542 A + ~A -> -1
2544 via commutating the addition and contracting operations to zero
2545 by reassociation. */
2547 if (TREE_CODE (rhs1) == SSA_NAME)
2549 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
2550 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2552 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2553 if (def_code == PLUS_EXPR
2554 || def_code == MINUS_EXPR)
2556 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2557 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2558 if (operand_equal_p (def_rhs1, rhs2, 0)
2559 && code == MINUS_EXPR)
2561 /* (A +- B) - A -> +- B. */
2562 code = ((def_code == PLUS_EXPR)
2563 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
2564 rhs1 = def_rhs2;
2565 rhs2 = NULL_TREE;
2566 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2567 gcc_assert (gsi_stmt (*gsi) == stmt);
2568 gimple_set_modified (stmt, true);
2570 else if (operand_equal_p (def_rhs2, rhs2, 0)
2571 && code != def_code)
2573 /* (A +- B) -+ B -> A. */
2574 code = TREE_CODE (def_rhs1);
2575 rhs1 = def_rhs1;
2576 rhs2 = NULL_TREE;
2577 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2578 gcc_assert (gsi_stmt (*gsi) == stmt);
2579 gimple_set_modified (stmt, true);
2581 else if (CONSTANT_CLASS_P (rhs2)
2582 && CONSTANT_CLASS_P (def_rhs1))
2584 /* (CST +- A) +- CST -> CST +- A. */
2585 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2586 def_rhs1, rhs2);
2587 if (cst && !TREE_OVERFLOW (cst))
2589 code = def_code;
2590 gimple_assign_set_rhs_code (stmt, code);
2591 rhs1 = cst;
2592 gimple_assign_set_rhs1 (stmt, rhs1);
2593 rhs2 = def_rhs2;
2594 gimple_assign_set_rhs2 (stmt, rhs2);
2595 gimple_set_modified (stmt, true);
2598 else if (CONSTANT_CLASS_P (rhs2)
2599 && CONSTANT_CLASS_P (def_rhs2))
2601 /* (A +- CST) +- CST -> A +- CST. */
2602 enum tree_code mix = (code == def_code)
2603 ? PLUS_EXPR : MINUS_EXPR;
2604 tree cst = fold_binary (mix, TREE_TYPE (rhs1),
2605 def_rhs2, rhs2);
2606 if (cst && !TREE_OVERFLOW (cst))
2608 code = def_code;
2609 gimple_assign_set_rhs_code (stmt, code);
2610 rhs1 = def_rhs1;
2611 gimple_assign_set_rhs1 (stmt, rhs1);
2612 rhs2 = cst;
2613 gimple_assign_set_rhs2 (stmt, rhs2);
2614 gimple_set_modified (stmt, true);
2618 else if (def_code == BIT_NOT_EXPR && code == PLUS_EXPR)
2620 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2621 if (operand_equal_p (def_rhs1, rhs2, 0))
2623 /* ~A + A -> -1. */
2624 rhs1 = build_all_ones_cst (TREE_TYPE (rhs2));
2625 rhs2 = NULL_TREE;
2626 code = TREE_CODE (rhs1);
2627 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2628 gcc_assert (gsi_stmt (*gsi) == stmt);
2629 gimple_set_modified (stmt, true);
2631 else if ((TREE_CODE (TREE_TYPE (rhs2)) != COMPLEX_TYPE
2632 && integer_onep (rhs2))
2633 || (TREE_CODE (rhs2) == COMPLEX_CST
2634 && integer_onep (TREE_REALPART (rhs2))
2635 && integer_onep (TREE_IMAGPART (rhs2))))
2637 /* ~A + 1 -> -A. */
2638 code = NEGATE_EXPR;
2639 rhs1 = def_rhs1;
2640 rhs2 = NULL_TREE;
2641 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2642 gcc_assert (gsi_stmt (*gsi) == stmt);
2643 gimple_set_modified (stmt, true);
2649 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2651 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2652 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2654 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2655 if (def_code == PLUS_EXPR
2656 || def_code == MINUS_EXPR)
2658 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2659 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2660 if (operand_equal_p (def_rhs1, rhs1, 0)
2661 && code == MINUS_EXPR)
2663 /* A - (A +- B) -> -+ B. */
2664 code = ((def_code == PLUS_EXPR)
2665 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2666 rhs1 = def_rhs2;
2667 rhs2 = NULL_TREE;
2668 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2669 gcc_assert (gsi_stmt (*gsi) == stmt);
2670 gimple_set_modified (stmt, true);
2672 else if (operand_equal_p (def_rhs2, rhs1, 0)
2673 && code != def_code)
2675 /* A +- (B +- A) -> +- B. */
2676 code = ((code == PLUS_EXPR)
2677 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2678 rhs1 = def_rhs1;
2679 rhs2 = NULL_TREE;
2680 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2681 gcc_assert (gsi_stmt (*gsi) == stmt);
2682 gimple_set_modified (stmt, true);
2684 else if (CONSTANT_CLASS_P (rhs1)
2685 && CONSTANT_CLASS_P (def_rhs1))
2687 /* CST +- (CST +- A) -> CST +- A. */
2688 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2689 rhs1, def_rhs1);
2690 if (cst && !TREE_OVERFLOW (cst))
2692 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2693 gimple_assign_set_rhs_code (stmt, code);
2694 rhs1 = cst;
2695 gimple_assign_set_rhs1 (stmt, rhs1);
2696 rhs2 = def_rhs2;
2697 gimple_assign_set_rhs2 (stmt, rhs2);
2698 gimple_set_modified (stmt, true);
2701 else if (CONSTANT_CLASS_P (rhs1)
2702 && CONSTANT_CLASS_P (def_rhs2))
2704 /* CST +- (A +- CST) -> CST +- A. */
2705 tree cst = fold_binary (def_code == code
2706 ? PLUS_EXPR : MINUS_EXPR,
2707 TREE_TYPE (rhs2),
2708 rhs1, def_rhs2);
2709 if (cst && !TREE_OVERFLOW (cst))
2711 rhs1 = cst;
2712 gimple_assign_set_rhs1 (stmt, rhs1);
2713 rhs2 = def_rhs1;
2714 gimple_assign_set_rhs2 (stmt, rhs2);
2715 gimple_set_modified (stmt, true);
2719 else if (def_code == BIT_NOT_EXPR)
2721 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2722 if (code == PLUS_EXPR
2723 && operand_equal_p (def_rhs1, rhs1, 0))
2725 /* A + ~A -> -1. */
2726 rhs1 = build_all_ones_cst (TREE_TYPE (rhs1));
2727 rhs2 = NULL_TREE;
2728 code = TREE_CODE (rhs1);
2729 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
2730 gcc_assert (gsi_stmt (*gsi) == stmt);
2731 gimple_set_modified (stmt, true);
2737 out:
2738 if (gimple_modified_p (stmt))
2740 fold_stmt_inplace (gsi);
2741 update_stmt (stmt);
2742 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2743 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2744 return true;
2747 return false;
2750 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2751 true if anything changed, false otherwise. */
2753 static bool
2754 associate_pointerplus (gimple_stmt_iterator *gsi)
2756 gimple stmt = gsi_stmt (*gsi);
2757 gimple def_stmt;
2758 tree ptr, rhs, algn;
2760 /* Pattern match
2761 tem = (sizetype) ptr;
2762 tem = tem & algn;
2763 tem = -tem;
2764 ... = ptr p+ tem;
2765 and produce the simpler and easier to analyze with respect to alignment
2766 ... = ptr & ~algn; */
2767 ptr = gimple_assign_rhs1 (stmt);
2768 rhs = gimple_assign_rhs2 (stmt);
2769 if (TREE_CODE (rhs) != SSA_NAME)
2770 return false;
2771 def_stmt = SSA_NAME_DEF_STMT (rhs);
2772 if (!is_gimple_assign (def_stmt)
2773 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR)
2774 return false;
2775 rhs = gimple_assign_rhs1 (def_stmt);
2776 if (TREE_CODE (rhs) != SSA_NAME)
2777 return false;
2778 def_stmt = SSA_NAME_DEF_STMT (rhs);
2779 if (!is_gimple_assign (def_stmt)
2780 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR)
2781 return false;
2782 rhs = gimple_assign_rhs1 (def_stmt);
2783 algn = gimple_assign_rhs2 (def_stmt);
2784 if (TREE_CODE (rhs) != SSA_NAME
2785 || TREE_CODE (algn) != INTEGER_CST)
2786 return false;
2787 def_stmt = SSA_NAME_DEF_STMT (rhs);
2788 if (!is_gimple_assign (def_stmt)
2789 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2790 return false;
2791 if (gimple_assign_rhs1 (def_stmt) != ptr)
2792 return false;
2794 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
2795 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
2796 fold_stmt_inplace (gsi);
2797 update_stmt (stmt);
2799 return true;
2802 /* Combine two conversions in a row for the second conversion at *GSI.
2803 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2804 run. Else it returns 0. */
2806 static int
2807 combine_conversions (gimple_stmt_iterator *gsi)
2809 gimple stmt = gsi_stmt (*gsi);
2810 gimple def_stmt;
2811 tree op0, lhs;
2812 enum tree_code code = gimple_assign_rhs_code (stmt);
2813 enum tree_code code2;
2815 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2816 || code == FLOAT_EXPR
2817 || code == FIX_TRUNC_EXPR);
2819 lhs = gimple_assign_lhs (stmt);
2820 op0 = gimple_assign_rhs1 (stmt);
2821 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2823 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2824 return 1;
2827 if (TREE_CODE (op0) != SSA_NAME)
2828 return 0;
2830 def_stmt = SSA_NAME_DEF_STMT (op0);
2831 if (!is_gimple_assign (def_stmt))
2832 return 0;
2834 code2 = gimple_assign_rhs_code (def_stmt);
2836 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR)
2838 tree defop0 = gimple_assign_rhs1 (def_stmt);
2839 tree type = TREE_TYPE (lhs);
2840 tree inside_type = TREE_TYPE (defop0);
2841 tree inter_type = TREE_TYPE (op0);
2842 int inside_int = INTEGRAL_TYPE_P (inside_type);
2843 int inside_ptr = POINTER_TYPE_P (inside_type);
2844 int inside_float = FLOAT_TYPE_P (inside_type);
2845 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2846 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2847 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2848 int inter_int = INTEGRAL_TYPE_P (inter_type);
2849 int inter_ptr = POINTER_TYPE_P (inter_type);
2850 int inter_float = FLOAT_TYPE_P (inter_type);
2851 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2852 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2853 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2854 int final_int = INTEGRAL_TYPE_P (type);
2855 int final_ptr = POINTER_TYPE_P (type);
2856 int final_float = FLOAT_TYPE_P (type);
2857 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2858 unsigned int final_prec = TYPE_PRECISION (type);
2859 int final_unsignedp = TYPE_UNSIGNED (type);
2861 /* Don't propagate ssa names that occur in abnormal phis. */
2862 if (TREE_CODE (defop0) == SSA_NAME
2863 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0))
2864 return 0;
2866 /* In addition to the cases of two conversions in a row
2867 handled below, if we are converting something to its own
2868 type via an object of identical or wider precision, neither
2869 conversion is needed. */
2870 if (useless_type_conversion_p (type, inside_type)
2871 && (((inter_int || inter_ptr) && final_int)
2872 || (inter_float && final_float))
2873 && inter_prec >= final_prec)
2875 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2876 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2877 update_stmt (stmt);
2878 return remove_prop_source_from_use (op0) ? 2 : 1;
2881 /* Likewise, if the intermediate and initial types are either both
2882 float or both integer, we don't need the middle conversion if the
2883 former is wider than the latter and doesn't change the signedness
2884 (for integers). Avoid this if the final type is a pointer since
2885 then we sometimes need the middle conversion. Likewise if the
2886 final type has a precision not equal to the size of its mode. */
2887 if (((inter_int && inside_int)
2888 || (inter_float && inside_float)
2889 || (inter_vec && inside_vec))
2890 && inter_prec >= inside_prec
2891 && (inter_float || inter_vec
2892 || inter_unsignedp == inside_unsignedp)
2893 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2894 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2895 && ! final_ptr
2896 && (! final_vec || inter_prec == inside_prec))
2898 gimple_assign_set_rhs1 (stmt, defop0);
2899 update_stmt (stmt);
2900 return remove_prop_source_from_use (op0) ? 2 : 1;
2903 /* If we have a sign-extension of a zero-extended value, we can
2904 replace that by a single zero-extension. Likewise if the
2905 final conversion does not change precision we can drop the
2906 intermediate conversion. */
2907 if (inside_int && inter_int && final_int
2908 && ((inside_prec < inter_prec && inter_prec < final_prec
2909 && inside_unsignedp && !inter_unsignedp)
2910 || final_prec == inter_prec))
2912 gimple_assign_set_rhs1 (stmt, defop0);
2913 update_stmt (stmt);
2914 return remove_prop_source_from_use (op0) ? 2 : 1;
2917 /* Two conversions in a row are not needed unless:
2918 - some conversion is floating-point (overstrict for now), or
2919 - some conversion is a vector (overstrict for now), or
2920 - the intermediate type is narrower than both initial and
2921 final, or
2922 - the intermediate type and innermost type differ in signedness,
2923 and the outermost type is wider than the intermediate, or
2924 - the initial type is a pointer type and the precisions of the
2925 intermediate and final types differ, or
2926 - the final type is a pointer type and the precisions of the
2927 initial and intermediate types differ. */
2928 if (! inside_float && ! inter_float && ! final_float
2929 && ! inside_vec && ! inter_vec && ! final_vec
2930 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2931 && ! (inside_int && inter_int
2932 && inter_unsignedp != inside_unsignedp
2933 && inter_prec < final_prec)
2934 && ((inter_unsignedp && inter_prec > inside_prec)
2935 == (final_unsignedp && final_prec > inter_prec))
2936 && ! (inside_ptr && inter_prec != final_prec)
2937 && ! (final_ptr && inside_prec != inter_prec)
2938 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
2939 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2941 gimple_assign_set_rhs1 (stmt, defop0);
2942 update_stmt (stmt);
2943 return remove_prop_source_from_use (op0) ? 2 : 1;
2946 /* A truncation to an unsigned type should be canonicalized as
2947 bitwise and of a mask. */
2948 if (final_int && inter_int && inside_int
2949 && final_prec == inside_prec
2950 && final_prec > inter_prec
2951 && inter_unsignedp)
2953 tree tem;
2954 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2955 defop0,
2956 double_int_to_tree
2957 (inside_type, double_int::mask (inter_prec)));
2958 if (!useless_type_conversion_p (type, inside_type))
2960 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2961 GSI_SAME_STMT);
2962 gimple_assign_set_rhs1 (stmt, tem);
2964 else
2965 gimple_assign_set_rhs_from_tree (gsi, tem);
2966 update_stmt (gsi_stmt (*gsi));
2967 return 1;
2970 /* If we are converting an integer to a floating-point that can
2971 represent it exactly and back to an integer, we can skip the
2972 floating-point conversion. */
2973 if (inside_int && inter_float && final_int &&
2974 (unsigned) significand_size (TYPE_MODE (inter_type))
2975 >= inside_prec - !inside_unsignedp)
2977 if (useless_type_conversion_p (type, inside_type))
2979 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2980 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2981 update_stmt (stmt);
2982 return remove_prop_source_from_use (op0) ? 2 : 1;
2984 else
2986 gimple_assign_set_rhs1 (stmt, defop0);
2987 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR);
2988 update_stmt (stmt);
2989 return remove_prop_source_from_use (op0) ? 2 : 1;
2994 return 0;
2997 /* Combine VIEW_CONVERT_EXPRs with their defining statement. */
2999 static bool
3000 simplify_vce (gimple_stmt_iterator *gsi)
3002 gimple stmt = gsi_stmt (*gsi);
3003 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
3005 /* Drop useless VIEW_CONVERT_EXPRs. */
3006 tree op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3007 if (useless_type_conversion_p (type, TREE_TYPE (op)))
3009 gimple_assign_set_rhs1 (stmt, op);
3010 update_stmt (stmt);
3011 return true;
3014 if (TREE_CODE (op) != SSA_NAME)
3015 return false;
3017 gimple def_stmt = SSA_NAME_DEF_STMT (op);
3018 if (!is_gimple_assign (def_stmt))
3019 return false;
3021 tree def_op = gimple_assign_rhs1 (def_stmt);
3022 switch (gimple_assign_rhs_code (def_stmt))
3024 CASE_CONVERT:
3025 /* Strip integral conversions that do not change the precision. */
3026 if ((INTEGRAL_TYPE_P (TREE_TYPE (op))
3027 || POINTER_TYPE_P (TREE_TYPE (op)))
3028 && (INTEGRAL_TYPE_P (TREE_TYPE (def_op))
3029 || POINTER_TYPE_P (TREE_TYPE (def_op)))
3030 && (TYPE_PRECISION (TREE_TYPE (op))
3031 == TYPE_PRECISION (TREE_TYPE (def_op))))
3033 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) = def_op;
3034 update_stmt (stmt);
3035 return true;
3037 break;
3039 case VIEW_CONVERT_EXPR:
3040 /* Series of VIEW_CONVERT_EXPRs on register operands can
3041 be contracted. */
3042 if (TREE_CODE (TREE_OPERAND (def_op, 0)) == SSA_NAME)
3044 if (useless_type_conversion_p (type,
3045 TREE_TYPE (TREE_OPERAND (def_op, 0))))
3046 gimple_assign_set_rhs1 (stmt, TREE_OPERAND (def_op, 0));
3047 else
3048 TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)
3049 = TREE_OPERAND (def_op, 0);
3050 update_stmt (stmt);
3051 return true;
3054 default:;
3057 return false;
3060 /* Combine an element access with a shuffle. Returns true if there were
3061 any changes made, else it returns false. */
3063 static bool
3064 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
3066 gimple stmt = gsi_stmt (*gsi);
3067 gimple def_stmt;
3068 tree op, op0, op1, op2;
3069 tree elem_type;
3070 unsigned idx, n, size;
3071 enum tree_code code;
3073 op = gimple_assign_rhs1 (stmt);
3074 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
3076 op0 = TREE_OPERAND (op, 0);
3077 if (TREE_CODE (op0) != SSA_NAME
3078 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
3079 return false;
3081 def_stmt = get_prop_source_stmt (op0, false, NULL);
3082 if (!def_stmt || !can_propagate_from (def_stmt))
3083 return false;
3085 op1 = TREE_OPERAND (op, 1);
3086 op2 = TREE_OPERAND (op, 2);
3087 code = gimple_assign_rhs_code (def_stmt);
3089 if (code == CONSTRUCTOR)
3091 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
3092 gimple_assign_rhs1 (def_stmt), op1, op2);
3093 if (!tem || !valid_gimple_rhs_p (tem))
3094 return false;
3095 gimple_assign_set_rhs_from_tree (gsi, tem);
3096 update_stmt (gsi_stmt (*gsi));
3097 return true;
3100 elem_type = TREE_TYPE (TREE_TYPE (op0));
3101 if (TREE_TYPE (op) != elem_type)
3102 return false;
3104 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3105 n = TREE_INT_CST_LOW (op1) / size;
3106 if (n != 1)
3107 return false;
3108 idx = TREE_INT_CST_LOW (op2) / size;
3110 if (code == VEC_PERM_EXPR)
3112 tree p, m, index, tem;
3113 unsigned nelts;
3114 m = gimple_assign_rhs3 (def_stmt);
3115 if (TREE_CODE (m) != VECTOR_CST)
3116 return false;
3117 nelts = VECTOR_CST_NELTS (m);
3118 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
3119 idx %= 2 * nelts;
3120 if (idx < nelts)
3122 p = gimple_assign_rhs1 (def_stmt);
3124 else
3126 p = gimple_assign_rhs2 (def_stmt);
3127 idx -= nelts;
3129 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
3130 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
3131 unshare_expr (p), op1, index);
3132 gimple_assign_set_rhs1 (stmt, tem);
3133 fold_stmt (gsi);
3134 update_stmt (gsi_stmt (*gsi));
3135 return true;
3138 return false;
3141 /* Determine whether applying the 2 permutations (mask1 then mask2)
3142 gives back one of the input. */
3144 static int
3145 is_combined_permutation_identity (tree mask1, tree mask2)
3147 tree mask;
3148 unsigned int nelts, i, j;
3149 bool maybe_identity1 = true;
3150 bool maybe_identity2 = true;
3152 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3153 && TREE_CODE (mask2) == VECTOR_CST);
3154 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3155 gcc_assert (TREE_CODE (mask) == VECTOR_CST);
3157 nelts = VECTOR_CST_NELTS (mask);
3158 for (i = 0; i < nelts; i++)
3160 tree val = VECTOR_CST_ELT (mask, i);
3161 gcc_assert (TREE_CODE (val) == INTEGER_CST);
3162 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3163 if (j == i)
3164 maybe_identity2 = false;
3165 else if (j == i + nelts)
3166 maybe_identity1 = false;
3167 else
3168 return 0;
3170 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3173 /* Combine a shuffle with its arguments. Returns 1 if there were any
3174 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3176 static int
3177 simplify_permutation (gimple_stmt_iterator *gsi)
3179 gimple stmt = gsi_stmt (*gsi);
3180 gimple def_stmt;
3181 tree op0, op1, op2, op3, arg0, arg1;
3182 enum tree_code code;
3183 bool single_use_op0 = false;
3185 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
3187 op0 = gimple_assign_rhs1 (stmt);
3188 op1 = gimple_assign_rhs2 (stmt);
3189 op2 = gimple_assign_rhs3 (stmt);
3191 if (TREE_CODE (op2) != VECTOR_CST)
3192 return 0;
3194 if (TREE_CODE (op0) == VECTOR_CST)
3196 code = VECTOR_CST;
3197 arg0 = op0;
3199 else if (TREE_CODE (op0) == SSA_NAME)
3201 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3202 if (!def_stmt || !can_propagate_from (def_stmt))
3203 return 0;
3205 code = gimple_assign_rhs_code (def_stmt);
3206 arg0 = gimple_assign_rhs1 (def_stmt);
3208 else
3209 return 0;
3211 /* Two consecutive shuffles. */
3212 if (code == VEC_PERM_EXPR)
3214 tree orig;
3215 int ident;
3217 if (op0 != op1)
3218 return 0;
3219 op3 = gimple_assign_rhs3 (def_stmt);
3220 if (TREE_CODE (op3) != VECTOR_CST)
3221 return 0;
3222 ident = is_combined_permutation_identity (op3, op2);
3223 if (!ident)
3224 return 0;
3225 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3226 : gimple_assign_rhs2 (def_stmt);
3227 gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3228 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3229 gimple_set_num_ops (stmt, 2);
3230 update_stmt (stmt);
3231 return remove_prop_source_from_use (op0) ? 2 : 1;
3234 /* Shuffle of a constructor. */
3235 else if (code == CONSTRUCTOR || code == VECTOR_CST)
3237 tree opt;
3238 bool ret = false;
3239 if (op0 != op1)
3241 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
3242 return 0;
3244 if (TREE_CODE (op1) == VECTOR_CST)
3245 arg1 = op1;
3246 else if (TREE_CODE (op1) == SSA_NAME)
3248 enum tree_code code2;
3250 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3251 if (!def_stmt2 || !can_propagate_from (def_stmt2))
3252 return 0;
3254 code2 = gimple_assign_rhs_code (def_stmt2);
3255 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3256 return 0;
3257 arg1 = gimple_assign_rhs1 (def_stmt2);
3259 else
3260 return 0;
3262 else
3264 /* Already used twice in this statement. */
3265 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3266 return 0;
3267 arg1 = arg0;
3269 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
3270 if (!opt
3271 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
3272 return 0;
3273 gimple_assign_set_rhs_from_tree (gsi, opt);
3274 update_stmt (gsi_stmt (*gsi));
3275 if (TREE_CODE (op0) == SSA_NAME)
3276 ret = remove_prop_source_from_use (op0);
3277 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3278 ret |= remove_prop_source_from_use (op1);
3279 return ret ? 2 : 1;
3282 return 0;
3285 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3287 static bool
3288 simplify_vector_constructor (gimple_stmt_iterator *gsi)
3290 gimple stmt = gsi_stmt (*gsi);
3291 gimple def_stmt;
3292 tree op, op2, orig, type, elem_type;
3293 unsigned elem_size, nelts, i;
3294 enum tree_code code;
3295 constructor_elt *elt;
3296 unsigned char *sel;
3297 bool maybe_ident;
3299 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
3301 op = gimple_assign_rhs1 (stmt);
3302 type = TREE_TYPE (op);
3303 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
3305 nelts = TYPE_VECTOR_SUBPARTS (type);
3306 elem_type = TREE_TYPE (type);
3307 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3309 sel = XALLOCAVEC (unsigned char, nelts);
3310 orig = NULL;
3311 maybe_ident = true;
3312 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
3314 tree ref, op1;
3316 if (i >= nelts)
3317 return false;
3319 if (TREE_CODE (elt->value) != SSA_NAME)
3320 return false;
3321 def_stmt = get_prop_source_stmt (elt->value, false, NULL);
3322 if (!def_stmt)
3323 return false;
3324 code = gimple_assign_rhs_code (def_stmt);
3325 if (code != BIT_FIELD_REF)
3326 return false;
3327 op1 = gimple_assign_rhs1 (def_stmt);
3328 ref = TREE_OPERAND (op1, 0);
3329 if (orig)
3331 if (ref != orig)
3332 return false;
3334 else
3336 if (TREE_CODE (ref) != SSA_NAME)
3337 return false;
3338 if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
3339 return false;
3340 orig = ref;
3342 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
3343 return false;
3344 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
3345 if (sel[i] != i) maybe_ident = false;
3347 if (i < nelts)
3348 return false;
3350 if (maybe_ident)
3351 gimple_assign_set_rhs_from_tree (gsi, orig);
3352 else
3354 tree mask_type, *mask_elts;
3356 if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
3357 return false;
3358 mask_type
3359 = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
3360 nelts);
3361 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
3362 || GET_MODE_SIZE (TYPE_MODE (mask_type))
3363 != GET_MODE_SIZE (TYPE_MODE (type)))
3364 return false;
3365 mask_elts = XALLOCAVEC (tree, nelts);
3366 for (i = 0; i < nelts; i++)
3367 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
3368 op2 = build_vector (mask_type, mask_elts);
3369 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2);
3371 update_stmt (gsi_stmt (*gsi));
3372 return true;
3375 /* Main entry point for the forward propagation and statement combine
3376 optimizer. */
3378 static unsigned int
3379 ssa_forward_propagate_and_combine (void)
3381 basic_block bb;
3382 unsigned int todoflags = 0;
3384 cfg_changed = false;
3386 FOR_EACH_BB (bb)
3388 gimple_stmt_iterator gsi;
3390 /* Apply forward propagation to all stmts in the basic-block.
3391 Note we update GSI within the loop as necessary. */
3392 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
3394 gimple stmt = gsi_stmt (gsi);
3395 tree lhs, rhs;
3396 enum tree_code code;
3398 if (!is_gimple_assign (stmt))
3400 gsi_next (&gsi);
3401 continue;
3404 lhs = gimple_assign_lhs (stmt);
3405 rhs = gimple_assign_rhs1 (stmt);
3406 code = gimple_assign_rhs_code (stmt);
3407 if (TREE_CODE (lhs) != SSA_NAME
3408 || has_zero_uses (lhs))
3410 gsi_next (&gsi);
3411 continue;
3414 /* If this statement sets an SSA_NAME to an address,
3415 try to propagate the address into the uses of the SSA_NAME. */
3416 if (code == ADDR_EXPR
3417 /* Handle pointer conversions on invariant addresses
3418 as well, as this is valid gimple. */
3419 || (CONVERT_EXPR_CODE_P (code)
3420 && TREE_CODE (rhs) == ADDR_EXPR
3421 && POINTER_TYPE_P (TREE_TYPE (lhs))))
3423 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3424 if ((!base
3425 || !DECL_P (base)
3426 || decl_address_invariant_p (base))
3427 && !stmt_references_abnormal_ssa_name (stmt)
3428 && forward_propagate_addr_expr (lhs, rhs, true))
3430 release_defs (stmt);
3431 gsi_remove (&gsi, true);
3433 else
3434 gsi_next (&gsi);
3436 else if (code == POINTER_PLUS_EXPR)
3438 tree off = gimple_assign_rhs2 (stmt);
3439 if (TREE_CODE (off) == INTEGER_CST
3440 && can_propagate_from (stmt)
3441 && !simple_iv_increment_p (stmt)
3442 /* ??? Better adjust the interface to that function
3443 instead of building new trees here. */
3444 && forward_propagate_addr_expr
3445 (lhs,
3446 build1_loc (gimple_location (stmt),
3447 ADDR_EXPR, TREE_TYPE (rhs),
3448 fold_build2 (MEM_REF,
3449 TREE_TYPE (TREE_TYPE (rhs)),
3450 rhs,
3451 fold_convert (ptr_type_node,
3452 off))), true))
3454 release_defs (stmt);
3455 gsi_remove (&gsi, true);
3457 else if (is_gimple_min_invariant (rhs))
3459 /* Make sure to fold &a[0] + off_1 here. */
3460 fold_stmt_inplace (&gsi);
3461 update_stmt (stmt);
3462 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3463 gsi_next (&gsi);
3465 else
3466 gsi_next (&gsi);
3468 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3470 if (forward_propagate_comparison (&gsi))
3471 cfg_changed = true;
3473 else
3474 gsi_next (&gsi);
3477 /* Combine stmts with the stmts defining their operands.
3478 Note we update GSI within the loop as necessary. */
3479 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
3481 gimple stmt = gsi_stmt (gsi);
3482 bool changed = false;
3484 /* Mark stmt as potentially needing revisiting. */
3485 gimple_set_plf (stmt, GF_PLF_1, false);
3487 switch (gimple_code (stmt))
3489 case GIMPLE_ASSIGN:
3491 tree rhs1 = gimple_assign_rhs1 (stmt);
3492 enum tree_code code = gimple_assign_rhs_code (stmt);
3494 if ((code == BIT_NOT_EXPR
3495 || code == NEGATE_EXPR)
3496 && TREE_CODE (rhs1) == SSA_NAME)
3497 changed = simplify_not_neg_expr (&gsi);
3498 else if (code == COND_EXPR
3499 || code == VEC_COND_EXPR)
3501 /* In this case the entire COND_EXPR is in rhs1. */
3502 if (forward_propagate_into_cond (&gsi)
3503 || combine_cond_exprs (&gsi))
3505 changed = true;
3506 stmt = gsi_stmt (gsi);
3509 else if (TREE_CODE_CLASS (code) == tcc_comparison)
3511 int did_something;
3512 did_something = forward_propagate_into_comparison (&gsi);
3513 if (did_something == 2)
3514 cfg_changed = true;
3515 changed = did_something != 0;
3517 else if ((code == PLUS_EXPR
3518 || code == BIT_IOR_EXPR
3519 || code == BIT_XOR_EXPR)
3520 && simplify_rotate (&gsi))
3521 changed = true;
3522 else if (code == BIT_AND_EXPR
3523 || code == BIT_IOR_EXPR
3524 || code == BIT_XOR_EXPR)
3525 changed = simplify_bitwise_binary (&gsi);
3526 else if (code == PLUS_EXPR
3527 || code == MINUS_EXPR)
3528 changed = associate_plusminus (&gsi);
3529 else if (code == POINTER_PLUS_EXPR)
3530 changed = associate_pointerplus (&gsi);
3531 else if (CONVERT_EXPR_CODE_P (code)
3532 || code == FLOAT_EXPR
3533 || code == FIX_TRUNC_EXPR)
3535 int did_something = combine_conversions (&gsi);
3536 if (did_something == 2)
3537 cfg_changed = true;
3539 /* If we have a narrowing conversion to an integral
3540 type that is fed by a BIT_AND_EXPR, we might be
3541 able to remove the BIT_AND_EXPR if it merely
3542 masks off bits outside the final type (and nothing
3543 else. */
3544 if (! did_something)
3546 tree outer_type = TREE_TYPE (gimple_assign_lhs (stmt));
3547 tree inner_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
3548 if (INTEGRAL_TYPE_P (outer_type)
3549 && INTEGRAL_TYPE_P (inner_type)
3550 && (TYPE_PRECISION (outer_type)
3551 <= TYPE_PRECISION (inner_type)))
3552 did_something = simplify_conversion_from_bitmask (&gsi);
3555 changed = did_something != 0;
3557 else if (code == VIEW_CONVERT_EXPR)
3558 changed = simplify_vce (&gsi);
3559 else if (code == VEC_PERM_EXPR)
3561 int did_something = simplify_permutation (&gsi);
3562 if (did_something == 2)
3563 cfg_changed = true;
3564 changed = did_something != 0;
3566 else if (code == BIT_FIELD_REF)
3567 changed = simplify_bitfield_ref (&gsi);
3568 else if (code == CONSTRUCTOR
3569 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3570 changed = simplify_vector_constructor (&gsi);
3571 break;
3574 case GIMPLE_SWITCH:
3575 changed = simplify_gimple_switch (stmt);
3576 break;
3578 case GIMPLE_COND:
3580 int did_something;
3581 did_something = forward_propagate_into_gimple_cond (stmt);
3582 if (did_something == 2)
3583 cfg_changed = true;
3584 changed = did_something != 0;
3585 break;
3588 case GIMPLE_CALL:
3590 tree callee = gimple_call_fndecl (stmt);
3591 if (callee != NULL_TREE
3592 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3593 changed = simplify_builtin_call (&gsi, callee);
3594 break;
3597 default:;
3600 if (changed)
3602 /* If the stmt changed then re-visit it and the statements
3603 inserted before it. */
3604 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3605 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3606 break;
3607 if (gsi_end_p (gsi))
3608 gsi = gsi_start_bb (bb);
3609 else
3610 gsi_next (&gsi);
3612 else
3614 /* Stmt no longer needs to be revisited. */
3615 gimple_set_plf (stmt, GF_PLF_1, true);
3616 gsi_next (&gsi);
3621 if (cfg_changed)
3622 todoflags |= TODO_cleanup_cfg;
3624 return todoflags;
3628 static bool
3629 gate_forwprop (void)
3631 return flag_tree_forwprop;
3634 namespace {
3636 const pass_data pass_data_forwprop =
3638 GIMPLE_PASS, /* type */
3639 "forwprop", /* name */
3640 OPTGROUP_NONE, /* optinfo_flags */
3641 true, /* has_gate */
3642 true, /* has_execute */
3643 TV_TREE_FORWPROP, /* tv_id */
3644 ( PROP_cfg | PROP_ssa ), /* properties_required */
3645 0, /* properties_provided */
3646 0, /* properties_destroyed */
3647 0, /* todo_flags_start */
3648 ( TODO_update_ssa | TODO_verify_ssa ), /* todo_flags_finish */
3651 class pass_forwprop : public gimple_opt_pass
3653 public:
3654 pass_forwprop (gcc::context *ctxt)
3655 : gimple_opt_pass (pass_data_forwprop, ctxt)
3658 /* opt_pass methods: */
3659 opt_pass * clone () { return new pass_forwprop (m_ctxt); }
3660 bool gate () { return gate_forwprop (); }
3661 unsigned int execute () { return ssa_forward_propagate_and_combine (); }
3663 }; // class pass_forwprop
3665 } // anon namespace
3667 gimple_opt_pass *
3668 make_pass_forwprop (gcc::context *ctxt)
3670 return new pass_forwprop (ctxt);