1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "stor-layout.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-fold.h"
33 #include "gimple-expr.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-ssa.h"
41 #include "tree-phinodes.h"
42 #include "ssa-iterators.h"
43 #include "stringpool.h"
44 #include "tree-ssanames.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
53 #include "tree-ssa-propagate.h"
54 #include "tree-ssa-dom.h"
57 /* This pass propagates the RHS of assignment statements into use
58 sites of the LHS of the assignment. It's basically a specialized
59 form of tree combination. It is hoped all of this can disappear
60 when we have a generalized tree combiner.
62 One class of common cases we handle is forward propagating a single use
63 variable into a COND_EXPR.
67 if (x) goto ... else goto ...
69 Will be transformed into:
72 if (a COND b) goto ... else goto ...
74 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
76 Or (assuming c1 and c2 are constants):
80 if (x EQ/NEQ c2) goto ... else goto ...
82 Will be transformed into:
85 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
87 Similarly for x = a - c1.
93 if (x) goto ... else goto ...
95 Will be transformed into:
98 if (a == 0) goto ... else goto ...
100 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
101 For these cases, we propagate A into all, possibly more than one,
102 COND_EXPRs that use X.
108 if (x) goto ... else goto ...
110 Will be transformed into:
113 if (a != 0) goto ... else goto ...
115 (Assuming a is an integral type and x is a boolean or x is an
116 integral and a is a boolean.)
118 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
119 For these cases, we propagate A into all, possibly more than one,
120 COND_EXPRs that use X.
122 In addition to eliminating the variable and the statement which assigns
123 a value to the variable, we may be able to later thread the jump without
124 adding insane complexity in the dominator optimizer.
126 Also note these transformations can cascade. We handle this by having
127 a worklist of COND_EXPR statements to examine. As we make a change to
128 a statement, we put it back on the worklist to examine on the next
129 iteration of the main loop.
131 A second class of propagation opportunities arises for ADDR_EXPR
142 ptr = (type1*)&type2var;
145 Will get turned into (if type1 and type2 are the same size
146 and neither have volatile on them):
147 res = VIEW_CONVERT_EXPR<type1>(type2var)
152 ptr2 = ptr + <constant>;
156 ptr2 = &x[constant/elementsize];
161 offset = index * element_size;
162 offset_p = (pointer) offset;
163 ptr2 = ptr + offset_p
165 Will get turned into:
173 Provided that decl has known alignment >= 2, will get turned into
177 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
178 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
181 This will (of course) be extended as other needs arise. */
183 static bool forward_propagate_addr_expr (tree
, tree
, bool);
185 /* Set to true if we delete dead edges during the optimization. */
186 static bool cfg_changed
;
188 static tree
rhs_to_tree (tree type
, gimple stmt
);
190 /* Get the next statement we can propagate NAME's value into skipping
191 trivial copies. Returns the statement that is suitable as a
192 propagation destination or NULL_TREE if there is no such one.
193 This only returns destinations in a single-use chain. FINAL_NAME_P
194 if non-NULL is written to the ssa name that represents the use. */
197 get_prop_dest_stmt (tree name
, tree
*final_name_p
)
203 /* If name has multiple uses, bail out. */
204 if (!single_imm_use (name
, &use
, &use_stmt
))
207 /* If this is not a trivial copy, we found it. */
208 if (!gimple_assign_ssa_name_copy_p (use_stmt
)
209 || gimple_assign_rhs1 (use_stmt
) != name
)
212 /* Continue searching uses of the copy destination. */
213 name
= gimple_assign_lhs (use_stmt
);
217 *final_name_p
= name
;
222 /* Get the statement we can propagate from into NAME skipping
223 trivial copies. Returns the statement which defines the
224 propagation source or NULL_TREE if there is no such one.
225 If SINGLE_USE_ONLY is set considers only sources which have
226 a single use chain up to NAME. If SINGLE_USE_P is non-null,
227 it is set to whether the chain to NAME is a single use chain
228 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
231 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
233 bool single_use
= true;
236 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
238 if (!has_single_use (name
))
245 /* If name is defined by a PHI node or is the default def, bail out. */
246 if (!is_gimple_assign (def_stmt
))
249 /* If def_stmt is a simple copy, continue looking. */
250 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
251 name
= gimple_assign_rhs1 (def_stmt
);
254 if (!single_use_only
&& single_use_p
)
255 *single_use_p
= single_use
;
262 /* Checks if the destination ssa name in DEF_STMT can be used as
263 propagation source. Returns true if so, otherwise false. */
266 can_propagate_from (gimple def_stmt
)
268 gcc_assert (is_gimple_assign (def_stmt
));
270 /* If the rhs has side-effects we cannot propagate from it. */
271 if (gimple_has_volatile_ops (def_stmt
))
274 /* If the rhs is a load we cannot propagate from it. */
275 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
276 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
279 /* Constants can be always propagated. */
280 if (gimple_assign_single_p (def_stmt
)
281 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
284 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
285 if (stmt_references_abnormal_ssa_name (def_stmt
))
288 /* If the definition is a conversion of a pointer to a function type,
289 then we can not apply optimizations as some targets require
290 function pointers to be canonicalized and in this case this
291 optimization could eliminate a necessary canonicalization. */
292 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
294 tree rhs
= gimple_assign_rhs1 (def_stmt
);
295 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
296 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
303 /* Remove a chain of dead statements starting at the definition of
304 NAME. The chain is linked via the first operand of the defining statements.
305 If NAME was replaced in its only use then this function can be used
306 to clean up dead stmts. The function handles already released SSA
308 Returns true if cleanup-cfg has to run. */
311 remove_prop_source_from_use (tree name
)
313 gimple_stmt_iterator gsi
;
315 bool cfg_changed
= false;
320 if (SSA_NAME_IN_FREE_LIST (name
)
321 || SSA_NAME_IS_DEFAULT_DEF (name
)
322 || !has_zero_uses (name
))
325 stmt
= SSA_NAME_DEF_STMT (name
);
326 if (gimple_code (stmt
) == GIMPLE_PHI
327 || gimple_has_side_effects (stmt
))
330 bb
= gimple_bb (stmt
);
331 gsi
= gsi_for_stmt (stmt
);
332 unlink_stmt_vdef (stmt
);
333 if (gsi_remove (&gsi
, true))
334 cfg_changed
|= gimple_purge_dead_eh_edges (bb
);
337 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
338 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
343 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
344 converted to type TYPE.
346 This should disappear, but is needed so we can combine expressions and use
347 the fold() interfaces. Long term, we need to develop folding and combine
348 routines that deal with gimple exclusively . */
351 rhs_to_tree (tree type
, gimple stmt
)
353 location_t loc
= gimple_location (stmt
);
354 enum tree_code code
= gimple_assign_rhs_code (stmt
);
355 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
356 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
357 gimple_assign_rhs2 (stmt
),
358 gimple_assign_rhs3 (stmt
));
359 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
360 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
361 gimple_assign_rhs2 (stmt
));
362 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
363 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
364 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
365 return gimple_assign_rhs1 (stmt
);
370 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
371 the folded result in a form suitable for COND_EXPR_COND or
372 NULL_TREE, if there is no suitable simplified form. If
373 INVARIANT_ONLY is true only gimple_min_invariant results are
374 considered simplified. */
377 combine_cond_expr_cond (gimple stmt
, enum tree_code code
, tree type
,
378 tree op0
, tree op1
, bool invariant_only
)
382 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
384 fold_defer_overflow_warnings ();
385 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
388 fold_undefer_overflow_warnings (false, NULL
, 0);
392 /* Require that we got a boolean type out if we put one in. */
393 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
395 /* Canonicalize the combined condition for use in a COND_EXPR. */
396 t
= canonicalize_cond_expr_cond (t
);
398 /* Bail out if we required an invariant but didn't get one. */
399 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
401 fold_undefer_overflow_warnings (false, NULL
, 0);
405 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
410 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
411 of its operand. Return a new comparison tree or NULL_TREE if there
412 were no simplifying combines. */
415 forward_propagate_into_comparison_1 (gimple stmt
,
416 enum tree_code code
, tree type
,
419 tree tmp
= NULL_TREE
;
420 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
421 bool single_use0_p
= false, single_use1_p
= false;
423 /* For comparisons use the first operand, that is likely to
424 simplify comparisons against constants. */
425 if (TREE_CODE (op0
) == SSA_NAME
)
427 gimple def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
428 if (def_stmt
&& can_propagate_from (def_stmt
))
430 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
431 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
432 rhs0
, op1
, !single_use0_p
);
438 /* If that wasn't successful, try the second operand. */
439 if (TREE_CODE (op1
) == SSA_NAME
)
441 gimple def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
442 if (def_stmt
&& can_propagate_from (def_stmt
))
444 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
445 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
446 op0
, rhs1
, !single_use1_p
);
452 /* If that wasn't successful either, try both operands. */
453 if (rhs0
!= NULL_TREE
454 && rhs1
!= NULL_TREE
)
455 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
457 !(single_use0_p
&& single_use1_p
));
462 /* Propagate from the ssa name definition statements of the assignment
463 from a comparison at *GSI into the conditional if that simplifies it.
464 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
465 otherwise returns 0. */
468 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
470 gimple stmt
= gsi_stmt (*gsi
);
472 bool cfg_changed
= false;
473 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
474 tree rhs1
= gimple_assign_rhs1 (stmt
);
475 tree rhs2
= gimple_assign_rhs2 (stmt
);
477 /* Combine the comparison with defining statements. */
478 tmp
= forward_propagate_into_comparison_1 (stmt
,
479 gimple_assign_rhs_code (stmt
),
481 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
483 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
485 update_stmt (gsi_stmt (*gsi
));
487 if (TREE_CODE (rhs1
) == SSA_NAME
)
488 cfg_changed
|= remove_prop_source_from_use (rhs1
);
489 if (TREE_CODE (rhs2
) == SSA_NAME
)
490 cfg_changed
|= remove_prop_source_from_use (rhs2
);
491 return cfg_changed
? 2 : 1;
497 /* Propagate from the ssa name definition statements of COND_EXPR
498 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
499 Returns zero if no statement was changed, one if there were
500 changes and two if cfg_cleanup needs to run.
502 This must be kept in sync with forward_propagate_into_cond. */
505 forward_propagate_into_gimple_cond (gimple stmt
)
508 enum tree_code code
= gimple_cond_code (stmt
);
509 bool cfg_changed
= false;
510 tree rhs1
= gimple_cond_lhs (stmt
);
511 tree rhs2
= gimple_cond_rhs (stmt
);
513 /* We can do tree combining on SSA_NAME and comparison expressions. */
514 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
517 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
522 if (dump_file
&& tmp
)
524 fprintf (dump_file
, " Replaced '");
525 print_gimple_expr (dump_file
, stmt
, 0, 0);
526 fprintf (dump_file
, "' with '");
527 print_generic_expr (dump_file
, tmp
, 0);
528 fprintf (dump_file
, "'\n");
531 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
534 if (TREE_CODE (rhs1
) == SSA_NAME
)
535 cfg_changed
|= remove_prop_source_from_use (rhs1
);
536 if (TREE_CODE (rhs2
) == SSA_NAME
)
537 cfg_changed
|= remove_prop_source_from_use (rhs2
);
538 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
541 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
542 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
543 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
544 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
546 && integer_zerop (rhs2
))
548 && integer_onep (rhs2
))))
550 basic_block bb
= gimple_bb (stmt
);
551 gimple_cond_set_code (stmt
, NE_EXPR
);
552 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
553 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
554 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
562 /* Propagate from the ssa name definition statements of COND_EXPR
563 in the rhs of statement STMT into the conditional if that simplifies it.
564 Returns true zero if the stmt was changed. */
567 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
569 gimple stmt
= gsi_stmt (*gsi_p
);
570 tree tmp
= NULL_TREE
;
571 tree cond
= gimple_assign_rhs1 (stmt
);
572 enum tree_code code
= gimple_assign_rhs_code (stmt
);
575 /* We can do tree combining on SSA_NAME and comparison expressions. */
576 if (COMPARISON_CLASS_P (cond
))
577 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
579 TREE_OPERAND (cond
, 0),
580 TREE_OPERAND (cond
, 1));
581 else if (TREE_CODE (cond
) == SSA_NAME
)
583 enum tree_code def_code
;
585 gimple def_stmt
= get_prop_source_stmt (name
, true, NULL
);
586 if (!def_stmt
|| !can_propagate_from (def_stmt
))
589 def_code
= gimple_assign_rhs_code (def_stmt
);
590 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
591 tmp
= fold_build2_loc (gimple_location (def_stmt
),
594 gimple_assign_rhs1 (def_stmt
),
595 gimple_assign_rhs2 (def_stmt
));
596 else if (code
== COND_EXPR
597 && ((def_code
== BIT_NOT_EXPR
598 && TYPE_PRECISION (TREE_TYPE (cond
)) == 1)
599 || (def_code
== BIT_XOR_EXPR
600 && integer_onep (gimple_assign_rhs2 (def_stmt
)))))
602 tmp
= gimple_assign_rhs1 (def_stmt
);
608 && is_gimple_condexpr (tmp
))
610 if (dump_file
&& tmp
)
612 fprintf (dump_file
, " Replaced '");
613 print_generic_expr (dump_file
, cond
, 0);
614 fprintf (dump_file
, "' with '");
615 print_generic_expr (dump_file
, tmp
, 0);
616 fprintf (dump_file
, "'\n");
619 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
620 : integer_onep (tmp
))
621 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
622 else if (integer_zerop (tmp
))
623 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
626 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
629 tree t
= gimple_assign_rhs2 (stmt
);
630 gimple_assign_set_rhs2 (stmt
, gimple_assign_rhs3 (stmt
));
631 gimple_assign_set_rhs3 (stmt
, t
);
634 stmt
= gsi_stmt (*gsi_p
);
643 /* Propagate from the ssa name definition statements of COND_EXPR
644 values in the rhs of statement STMT into the conditional arms
645 if that simplifies it.
646 Returns true if the stmt was changed. */
649 combine_cond_exprs (gimple_stmt_iterator
*gsi_p
)
651 gimple stmt
= gsi_stmt (*gsi_p
);
652 tree cond
, val1
, val2
;
653 bool changed
= false;
655 cond
= gimple_assign_rhs1 (stmt
);
656 val1
= gimple_assign_rhs2 (stmt
);
657 if (TREE_CODE (val1
) == SSA_NAME
)
659 gimple def_stmt
= SSA_NAME_DEF_STMT (val1
);
660 if (is_gimple_assign (def_stmt
)
661 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
662 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
664 val1
= unshare_expr (gimple_assign_rhs2 (def_stmt
));
665 gimple_assign_set_rhs2 (stmt
, val1
);
669 val2
= gimple_assign_rhs3 (stmt
);
670 if (TREE_CODE (val2
) == SSA_NAME
)
672 gimple def_stmt
= SSA_NAME_DEF_STMT (val2
);
673 if (is_gimple_assign (def_stmt
)
674 && gimple_assign_rhs_code (def_stmt
) == gimple_assign_rhs_code (stmt
)
675 && operand_equal_p (gimple_assign_rhs1 (def_stmt
), cond
, 0))
677 val2
= unshare_expr (gimple_assign_rhs3 (def_stmt
));
678 gimple_assign_set_rhs3 (stmt
, val2
);
682 if (operand_equal_p (val1
, val2
, 0))
684 gimple_assign_set_rhs_from_tree (gsi_p
, val1
);
685 stmt
= gsi_stmt (*gsi_p
);
695 /* We've just substituted an ADDR_EXPR into stmt. Update all the
696 relevant data structures to match. */
699 tidy_after_forward_propagate_addr (gimple stmt
)
701 /* We may have turned a trapping insn into a non-trapping insn. */
702 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
703 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
706 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
707 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
710 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
711 ADDR_EXPR <whatever>.
713 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
714 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
715 node or for recovery of array indexing from pointer arithmetic.
717 Return true if the propagation was successful (the propagation can
718 be not totally successful, yet things may have been changed). */
721 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
722 gimple_stmt_iterator
*use_stmt_gsi
,
725 tree lhs
, rhs
, rhs2
, array_ref
;
726 gimple use_stmt
= gsi_stmt (*use_stmt_gsi
);
727 enum tree_code rhs_code
;
730 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
732 lhs
= gimple_assign_lhs (use_stmt
);
733 rhs_code
= gimple_assign_rhs_code (use_stmt
);
734 rhs
= gimple_assign_rhs1 (use_stmt
);
736 /* Do not perform copy-propagation but recurse through copy chains. */
737 if (TREE_CODE (lhs
) == SSA_NAME
738 && rhs_code
== SSA_NAME
)
739 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
741 /* The use statement could be a conversion. Recurse to the uses of the
742 lhs as copyprop does not copy through pointer to integer to pointer
743 conversions and FRE does not catch all cases either.
744 Treat the case of a single-use name and
745 a conversion to def_rhs type separate, though. */
746 if (TREE_CODE (lhs
) == SSA_NAME
747 && CONVERT_EXPR_CODE_P (rhs_code
))
749 /* If there is a point in a conversion chain where the types match
750 so we can remove a conversion re-materialize the address here
753 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
755 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
756 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
760 /* Else recurse if the conversion preserves the address value. */
761 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
762 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
763 && (TYPE_PRECISION (TREE_TYPE (lhs
))
764 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
765 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
770 /* If this isn't a conversion chain from this on we only can propagate
771 into compatible pointer contexts. */
772 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
775 /* Propagate through constant pointer adjustments. */
776 if (TREE_CODE (lhs
) == SSA_NAME
777 && rhs_code
== POINTER_PLUS_EXPR
779 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
782 /* As we come here with non-invariant addresses in def_rhs we need
783 to make sure we can build a valid constant offsetted address
784 for further propagation. Simply rely on fold building that
785 and check after the fact. */
786 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
788 fold_convert (ptr_type_node
,
789 gimple_assign_rhs2 (use_stmt
)));
790 if (TREE_CODE (new_def_rhs
) == MEM_REF
791 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
793 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
796 /* Recurse. If we could propagate into all uses of lhs do not
797 bother to replace into the current use but just pretend we did. */
798 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
799 && forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
802 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
803 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
804 new_def_rhs
, NULL_TREE
);
805 else if (is_gimple_min_invariant (new_def_rhs
))
806 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
,
807 new_def_rhs
, NULL_TREE
);
810 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
811 update_stmt (use_stmt
);
815 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
816 ADDR_EXPR will not appear on the LHS. */
817 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
818 while (handled_component_p (*lhsp
))
819 lhsp
= &TREE_OPERAND (*lhsp
, 0);
822 /* Now see if the LHS node is a MEM_REF using NAME. If so,
823 propagate the ADDR_EXPR into the use of NAME and fold the result. */
824 if (TREE_CODE (lhs
) == MEM_REF
825 && TREE_OPERAND (lhs
, 0) == name
)
828 HOST_WIDE_INT def_rhs_offset
;
829 /* If the address is invariant we can always fold it. */
830 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
833 offset_int off
= mem_ref_offset (lhs
);
835 off
+= def_rhs_offset
;
836 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
838 off
+= mem_ref_offset (def_rhs_base
);
839 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
842 new_ptr
= build_fold_addr_expr (def_rhs_base
);
843 TREE_OPERAND (lhs
, 0) = new_ptr
;
844 TREE_OPERAND (lhs
, 1)
845 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
846 tidy_after_forward_propagate_addr (use_stmt
);
847 /* Continue propagating into the RHS if this was not the only use. */
851 /* If the LHS is a plain dereference and the value type is the same as
852 that of the pointed-to type of the address we can put the
853 dereferenced address on the LHS preserving the original alias-type. */
854 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
855 && ((gimple_assign_lhs (use_stmt
) == lhs
856 && useless_type_conversion_p
857 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
858 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
859 || types_compatible_p (TREE_TYPE (lhs
),
860 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
861 /* Don't forward anything into clobber stmts if it would result
862 in the lhs no longer being a MEM_REF. */
863 && (!gimple_clobber_p (use_stmt
)
864 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
866 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
867 tree new_offset
, new_base
, saved
, new_lhs
;
868 while (handled_component_p (*def_rhs_basep
))
869 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
870 saved
= *def_rhs_basep
;
871 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
873 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
874 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
875 TREE_OPERAND (*def_rhs_basep
, 1));
879 new_base
= build_fold_addr_expr (*def_rhs_basep
);
880 new_offset
= TREE_OPERAND (lhs
, 1);
882 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
883 new_base
, new_offset
);
884 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
885 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
886 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
887 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
889 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
890 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
891 *def_rhs_basep
= saved
;
892 tidy_after_forward_propagate_addr (use_stmt
);
893 /* Continue propagating into the RHS if this was not the
899 /* We can have a struct assignment dereferencing our name twice.
900 Note that we didn't propagate into the lhs to not falsely
901 claim we did when propagating into the rhs. */
905 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
906 nodes from the RHS. */
907 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
908 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
909 rhsp
= &TREE_OPERAND (*rhsp
, 0);
910 while (handled_component_p (*rhsp
))
911 rhsp
= &TREE_OPERAND (*rhsp
, 0);
914 /* Now see if the RHS node is a MEM_REF using NAME. If so,
915 propagate the ADDR_EXPR into the use of NAME and fold the result. */
916 if (TREE_CODE (rhs
) == MEM_REF
917 && TREE_OPERAND (rhs
, 0) == name
)
920 HOST_WIDE_INT def_rhs_offset
;
921 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
924 offset_int off
= mem_ref_offset (rhs
);
926 off
+= def_rhs_offset
;
927 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
929 off
+= mem_ref_offset (def_rhs_base
);
930 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
933 new_ptr
= build_fold_addr_expr (def_rhs_base
);
934 TREE_OPERAND (rhs
, 0) = new_ptr
;
935 TREE_OPERAND (rhs
, 1)
936 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
937 fold_stmt_inplace (use_stmt_gsi
);
938 tidy_after_forward_propagate_addr (use_stmt
);
941 /* If the RHS is a plain dereference and the value type is the same as
942 that of the pointed-to type of the address we can put the
943 dereferenced address on the RHS preserving the original alias-type. */
944 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
945 && ((gimple_assign_rhs1 (use_stmt
) == rhs
946 && useless_type_conversion_p
947 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
948 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
949 || types_compatible_p (TREE_TYPE (rhs
),
950 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
952 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
953 tree new_offset
, new_base
, saved
, new_rhs
;
954 while (handled_component_p (*def_rhs_basep
))
955 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
956 saved
= *def_rhs_basep
;
957 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
959 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
960 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
961 TREE_OPERAND (*def_rhs_basep
, 1));
965 new_base
= build_fold_addr_expr (*def_rhs_basep
);
966 new_offset
= TREE_OPERAND (rhs
, 1);
968 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
969 new_base
, new_offset
);
970 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
971 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
972 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
973 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
975 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
976 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
977 *def_rhs_basep
= saved
;
978 fold_stmt_inplace (use_stmt_gsi
);
979 tidy_after_forward_propagate_addr (use_stmt
);
984 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
986 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
987 || gimple_assign_rhs1 (use_stmt
) != name
)
990 /* The remaining cases are all for turning pointer arithmetic into
991 array indexing. They only apply when we have the address of
992 element zero in an array. If that is not the case then there
994 array_ref
= TREE_OPERAND (def_rhs
, 0);
995 if ((TREE_CODE (array_ref
) != ARRAY_REF
996 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
997 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
998 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
1001 rhs2
= gimple_assign_rhs2 (use_stmt
);
1002 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
1003 if (TREE_CODE (rhs2
) == INTEGER_CST
)
1005 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
1006 ADDR_EXPR
, TREE_TYPE (def_rhs
),
1007 fold_build2 (MEM_REF
,
1008 TREE_TYPE (TREE_TYPE (def_rhs
)),
1009 unshare_expr (def_rhs
),
1010 fold_convert (ptr_type_node
,
1012 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
1013 use_stmt
= gsi_stmt (*use_stmt_gsi
);
1014 update_stmt (use_stmt
);
1015 tidy_after_forward_propagate_addr (use_stmt
);
1022 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1024 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1025 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1026 node or for recovery of array indexing from pointer arithmetic.
1028 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1029 the single use in the previous invocation. Pass true when calling
1032 Returns true, if all uses have been propagated into. */
1035 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
1037 imm_use_iterator iter
;
1040 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
1042 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
1047 /* If the use is not in a simple assignment statement, then
1048 there is nothing we can do. */
1049 if (!is_gimple_assign (use_stmt
))
1051 if (!is_gimple_debug (use_stmt
))
1056 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1057 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1059 /* If the use has moved to a different statement adjust
1060 the update machinery for the old statement too. */
1061 if (use_stmt
!= gsi_stmt (gsi
))
1063 update_stmt (use_stmt
);
1064 use_stmt
= gsi_stmt (gsi
);
1066 update_stmt (use_stmt
);
1069 /* Remove intermediate now unused copy and conversion chains. */
1070 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1072 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1073 && TREE_CODE (use_rhs
) == SSA_NAME
1074 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1076 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1077 release_defs (use_stmt
);
1078 gsi_remove (&gsi
, true);
1082 return all
&& has_zero_uses (name
);
1086 /* Forward propagate the comparison defined in *DEFGSI like
1087 cond_1 = x CMP y to uses of the form
1091 Returns true if stmt is now unused. Advance DEFGSI to the next
1095 forward_propagate_comparison (gimple_stmt_iterator
*defgsi
)
1097 gimple stmt
= gsi_stmt (*defgsi
);
1098 tree name
= gimple_assign_lhs (stmt
);
1100 tree tmp
= NULL_TREE
;
1101 gimple_stmt_iterator gsi
;
1102 enum tree_code code
;
1105 /* Don't propagate ssa names that occur in abnormal phis. */
1106 if ((TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
1107 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt
)))
1108 || (TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
1109 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt
))))
1112 /* Do not un-cse comparisons. But propagate through copies. */
1113 use_stmt
= get_prop_dest_stmt (name
, &name
);
1115 || !is_gimple_assign (use_stmt
))
1118 code
= gimple_assign_rhs_code (use_stmt
);
1119 lhs
= gimple_assign_lhs (use_stmt
);
1120 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
1123 /* We can propagate the condition into a statement that
1124 computes the logical negation of the comparison result. */
1125 if ((code
== BIT_NOT_EXPR
1126 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 1)
1127 || (code
== BIT_XOR_EXPR
1128 && integer_onep (gimple_assign_rhs2 (use_stmt
))))
1130 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
1131 bool nans
= HONOR_NANS (TYPE_MODE (type
));
1132 enum tree_code inv_code
;
1133 inv_code
= invert_tree_comparison (gimple_assign_rhs_code (stmt
), nans
);
1134 if (inv_code
== ERROR_MARK
)
1137 tmp
= build2 (inv_code
, TREE_TYPE (lhs
), gimple_assign_rhs1 (stmt
),
1138 gimple_assign_rhs2 (stmt
));
1143 gsi
= gsi_for_stmt (use_stmt
);
1144 gimple_assign_set_rhs_from_tree (&gsi
, unshare_expr (tmp
));
1145 use_stmt
= gsi_stmt (gsi
);
1146 update_stmt (use_stmt
);
1148 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1150 fprintf (dump_file
, " Replaced '");
1151 print_gimple_expr (dump_file
, stmt
, 0, dump_flags
);
1152 fprintf (dump_file
, "' with '");
1153 print_gimple_expr (dump_file
, use_stmt
, 0, dump_flags
);
1154 fprintf (dump_file
, "'\n");
1157 /* When we remove stmt now the iterator defgsi goes off it's current
1158 sequence, hence advance it now. */
1161 /* Remove defining statements. */
1162 return remove_prop_source_from_use (name
);
1170 /* GSI_P points to a statement which performs a narrowing integral
1173 Look for cases like:
1183 If T is narrower than X's type and C merely masks off bits outside
1184 of (T) and nothing else.
1186 Normally we'd let DCE remove the dead statement. But no DCE runs
1187 after the last forwprop/combine pass, so we remove the obviously
1188 dead code ourselves.
1190 Return TRUE if a change was made, FALSE otherwise. */
1193 simplify_conversion_from_bitmask (gimple_stmt_iterator
*gsi_p
)
1195 gimple stmt
= gsi_stmt (*gsi_p
);
1196 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
1198 /* See if the input for the conversion was set via a BIT_AND_EXPR and
1199 the only use of the BIT_AND_EXPR result is the conversion. */
1200 if (is_gimple_assign (rhs_def_stmt
)
1201 && gimple_assign_rhs_code (rhs_def_stmt
) == BIT_AND_EXPR
1202 && has_single_use (gimple_assign_lhs (rhs_def_stmt
)))
1204 tree rhs_def_operand1
= gimple_assign_rhs1 (rhs_def_stmt
);
1205 tree rhs_def_operand2
= gimple_assign_rhs2 (rhs_def_stmt
);
1206 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1208 /* Now verify suitability of the BIT_AND_EXPR's operands.
1209 The first must be an SSA_NAME that we can propagate and the
1210 second must be an integer constant that masks out all the
1211 bits outside the final result's type, but nothing else. */
1212 if (TREE_CODE (rhs_def_operand1
) == SSA_NAME
1213 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand1
)
1214 && TREE_CODE (rhs_def_operand2
) == INTEGER_CST
1215 && operand_equal_p (rhs_def_operand2
,
1216 build_low_bits_mask (TREE_TYPE (rhs_def_operand2
),
1217 TYPE_PRECISION (lhs_type
)),
1220 /* This is an optimizable case. Replace the source operand
1221 in the conversion with the first source operand of the
1223 gimple_assign_set_rhs1 (stmt
, rhs_def_operand1
);
1224 stmt
= gsi_stmt (*gsi_p
);
1227 /* There is no DCE after the last forwprop pass. It's
1228 easy to clean up the first order effects here. */
1229 gimple_stmt_iterator si
;
1230 si
= gsi_for_stmt (rhs_def_stmt
);
1231 gsi_remove (&si
, true);
1232 release_defs (rhs_def_stmt
);
1241 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1242 If so, we can change STMT into lhs = y which can later be copy
1243 propagated. Similarly for negation.
1245 This could trivially be formulated as a forward propagation
1246 to immediate uses. However, we already had an implementation
1247 from DOM which used backward propagation via the use-def links.
1249 It turns out that backward propagation is actually faster as
1250 there's less work to do for each NOT/NEG expression we find.
1251 Backwards propagation needs to look at the statement in a single
1252 backlink. Forward propagation needs to look at potentially more
1253 than one forward link.
1255 Returns true when the statement was changed. */
1258 simplify_not_neg_expr (gimple_stmt_iterator
*gsi_p
)
1260 gimple stmt
= gsi_stmt (*gsi_p
);
1261 tree rhs
= gimple_assign_rhs1 (stmt
);
1262 gimple rhs_def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1264 /* See if the RHS_DEF_STMT has the same form as our statement. */
1265 if (is_gimple_assign (rhs_def_stmt
)
1266 && gimple_assign_rhs_code (rhs_def_stmt
) == gimple_assign_rhs_code (stmt
))
1268 tree rhs_def_operand
= gimple_assign_rhs1 (rhs_def_stmt
);
1270 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1271 if (TREE_CODE (rhs_def_operand
) == SSA_NAME
1272 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand
))
1274 gimple_assign_set_rhs_from_tree (gsi_p
, rhs_def_operand
);
1275 stmt
= gsi_stmt (*gsi_p
);
1284 /* Helper function for simplify_gimple_switch. Remove case labels that
1285 have values outside the range of the new type. */
1288 simplify_gimple_switch_label_vec (gimple stmt
, tree index_type
)
1290 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1291 auto_vec
<tree
> labels (branch_num
);
1292 unsigned int i
, len
;
1294 /* Collect the existing case labels in a VEC, and preprocess it as if
1295 we are gimplifying a GENERIC SWITCH_EXPR. */
1296 for (i
= 1; i
< branch_num
; i
++)
1297 labels
.quick_push (gimple_switch_label (stmt
, i
));
1298 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1300 /* If any labels were removed, replace the existing case labels
1301 in the GIMPLE_SWITCH statement with the correct ones.
1302 Note that the type updates were done in-place on the case labels,
1303 so we only have to replace the case labels in the GIMPLE_SWITCH
1304 if the number of labels changed. */
1305 len
= labels
.length ();
1306 if (len
< branch_num
- 1)
1308 bitmap target_blocks
;
1312 /* Corner case: *all* case labels have been removed as being
1313 out-of-range for INDEX_TYPE. Push one label and let the
1314 CFG cleanups deal with this further. */
1319 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1320 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1321 labels
.quick_push (elt
);
1325 for (i
= 0; i
< labels
.length (); i
++)
1326 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1327 for (i
++ ; i
< branch_num
; i
++)
1328 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1329 gimple_switch_set_num_labels (stmt
, len
+ 1);
1331 /* Cleanup any edges that are now dead. */
1332 target_blocks
= BITMAP_ALLOC (NULL
);
1333 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1335 tree elt
= gimple_switch_label (stmt
, i
);
1336 basic_block target
= label_to_block (CASE_LABEL (elt
));
1337 bitmap_set_bit (target_blocks
, target
->index
);
1339 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1341 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1345 free_dominance_info (CDI_DOMINATORS
);
1350 BITMAP_FREE (target_blocks
);
1354 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1355 the condition which we may be able to optimize better. */
1358 simplify_gimple_switch (gimple stmt
)
1360 /* The optimization that we really care about is removing unnecessary
1361 casts. That will let us do much better in propagating the inferred
1362 constant at the switch target. */
1363 tree cond
= gimple_switch_index (stmt
);
1364 if (TREE_CODE (cond
) == SSA_NAME
)
1366 gimple def_stmt
= SSA_NAME_DEF_STMT (cond
);
1367 if (gimple_assign_cast_p (def_stmt
))
1369 tree def
= gimple_assign_rhs1 (def_stmt
);
1370 if (TREE_CODE (def
) != SSA_NAME
)
1373 /* If we have an extension or sign-change that preserves the
1374 values we check against then we can copy the source value into
1376 tree ti
= TREE_TYPE (def
);
1377 if (INTEGRAL_TYPE_P (ti
)
1378 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1380 size_t n
= gimple_switch_num_labels (stmt
);
1381 tree min
= NULL_TREE
, max
= NULL_TREE
;
1384 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1385 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1386 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1388 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1390 if ((!min
|| int_fits_type_p (min
, ti
))
1391 && (!max
|| int_fits_type_p (max
, ti
)))
1393 gimple_switch_set_index (stmt
, def
);
1394 simplify_gimple_switch_label_vec (stmt
, ti
);
1405 /* For pointers p2 and p1 return p2 - p1 if the
1406 difference is known and constant, otherwise return NULL. */
1409 constant_pointer_difference (tree p1
, tree p2
)
1412 #define CPD_ITERATIONS 5
1413 tree exps
[2][CPD_ITERATIONS
];
1414 tree offs
[2][CPD_ITERATIONS
];
1417 for (i
= 0; i
< 2; i
++)
1419 tree p
= i
? p1
: p2
;
1420 tree off
= size_zero_node
;
1422 enum tree_code code
;
1424 /* For each of p1 and p2 we need to iterate at least
1425 twice, to handle ADDR_EXPR directly in p1/p2,
1426 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1427 on definition's stmt RHS. Iterate a few extra times. */
1431 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1433 if (TREE_CODE (p
) == ADDR_EXPR
)
1435 tree q
= TREE_OPERAND (p
, 0);
1436 HOST_WIDE_INT offset
;
1437 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1442 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1444 if (TREE_CODE (q
) == MEM_REF
1445 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1447 p
= TREE_OPERAND (q
, 0);
1448 off
= size_binop (PLUS_EXPR
, off
,
1449 wide_int_to_tree (sizetype
,
1450 mem_ref_offset (q
)));
1459 if (TREE_CODE (p
) != SSA_NAME
)
1463 if (j
== CPD_ITERATIONS
)
1465 stmt
= SSA_NAME_DEF_STMT (p
);
1466 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1468 code
= gimple_assign_rhs_code (stmt
);
1469 if (code
== POINTER_PLUS_EXPR
)
1471 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1473 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1474 p
= gimple_assign_rhs1 (stmt
);
1476 else if (code
== ADDR_EXPR
|| code
== NOP_EXPR
)
1477 p
= gimple_assign_rhs1 (stmt
);
1485 for (i
= 0; i
< cnt
[0]; i
++)
1486 for (j
= 0; j
< cnt
[1]; j
++)
1487 if (exps
[0][i
] == exps
[1][j
])
1488 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1493 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1495 memcpy (p, "abcd", 4);
1496 memset (p + 4, ' ', 3);
1498 memcpy (p, "abcd ", 7);
1499 call if the latter can be stored by pieces during expansion. */
1502 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1504 gimple stmt1
, stmt2
= gsi_stmt (*gsi_p
);
1505 tree vuse
= gimple_vuse (stmt2
);
1508 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1510 switch (DECL_FUNCTION_CODE (callee2
))
1512 case BUILT_IN_MEMSET
:
1513 if (gimple_call_num_args (stmt2
) != 3
1514 || gimple_call_lhs (stmt2
)
1516 || BITS_PER_UNIT
!= 8)
1521 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1522 tree ptr2
= gimple_call_arg (stmt2
, 0);
1523 tree val2
= gimple_call_arg (stmt2
, 1);
1524 tree len2
= gimple_call_arg (stmt2
, 2);
1525 tree diff
, vdef
, new_str_cst
;
1527 unsigned int ptr1_align
;
1528 unsigned HOST_WIDE_INT src_len
;
1530 use_operand_p use_p
;
1532 if (!tree_fits_shwi_p (val2
)
1533 || !tree_fits_uhwi_p (len2
))
1535 if (is_gimple_call (stmt1
))
1537 /* If first stmt is a call, it needs to be memcpy
1538 or mempcpy, with string literal as second argument and
1540 callee1
= gimple_call_fndecl (stmt1
);
1541 if (callee1
== NULL_TREE
1542 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1543 || gimple_call_num_args (stmt1
) != 3)
1545 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1546 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1548 ptr1
= gimple_call_arg (stmt1
, 0);
1549 src1
= gimple_call_arg (stmt1
, 1);
1550 len1
= gimple_call_arg (stmt1
, 2);
1551 lhs1
= gimple_call_lhs (stmt1
);
1552 if (!tree_fits_uhwi_p (len1
))
1554 str1
= string_constant (src1
, &off1
);
1555 if (str1
== NULL_TREE
)
1557 if (!tree_fits_uhwi_p (off1
)
1558 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1559 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1560 - tree_to_uhwi (off1
)) > 0
1561 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1562 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1563 != TYPE_MODE (char_type_node
))
1566 else if (gimple_assign_single_p (stmt1
))
1568 /* Otherwise look for length 1 memcpy optimized into
1570 ptr1
= gimple_assign_lhs (stmt1
);
1571 src1
= gimple_assign_rhs1 (stmt1
);
1572 if (TREE_CODE (ptr1
) != MEM_REF
1573 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1574 || !tree_fits_shwi_p (src1
))
1576 ptr1
= build_fold_addr_expr (ptr1
);
1577 callee1
= NULL_TREE
;
1578 len1
= size_one_node
;
1580 off1
= size_zero_node
;
1586 diff
= constant_pointer_difference (ptr1
, ptr2
);
1587 if (diff
== NULL
&& lhs1
!= NULL
)
1589 diff
= constant_pointer_difference (lhs1
, ptr2
);
1590 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1592 diff
= size_binop (PLUS_EXPR
, diff
,
1593 fold_convert (sizetype
, len1
));
1595 /* If the difference between the second and first destination pointer
1596 is not constant, or is bigger than memcpy length, bail out. */
1598 || !tree_fits_uhwi_p (diff
)
1599 || tree_int_cst_lt (len1
, diff
))
1602 /* Use maximum of difference plus memset length and memcpy length
1603 as the new memcpy length, if it is too big, bail out. */
1604 src_len
= tree_to_uhwi (diff
);
1605 src_len
+= tree_to_uhwi (len2
);
1606 if (src_len
< tree_to_uhwi (len1
))
1607 src_len
= tree_to_uhwi (len1
);
1611 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1612 with bigger length will return different result. */
1613 if (lhs1
!= NULL_TREE
1614 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1615 && (TREE_CODE (lhs1
) != SSA_NAME
1616 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1617 || use_stmt
!= stmt2
))
1620 /* If anything reads memory in between memcpy and memset
1621 call, the modified memcpy call might change it. */
1622 vdef
= gimple_vdef (stmt1
);
1624 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1625 || use_stmt
!= stmt2
))
1628 ptr1_align
= get_pointer_alignment (ptr1
);
1629 /* Construct the new source string literal. */
1630 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1633 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1634 tree_to_uhwi (len1
));
1636 src_buf
[0] = tree_to_shwi (src1
);
1637 memset (src_buf
+ tree_to_uhwi (diff
),
1638 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1639 src_buf
[src_len
] = '\0';
1640 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1641 handle embedded '\0's. */
1642 if (strlen (src_buf
) != src_len
)
1644 rtl_profile_for_bb (gimple_bb (stmt2
));
1645 /* If the new memcpy wouldn't be emitted by storing the literal
1646 by pieces, this optimization might enlarge .rodata too much,
1647 as commonly used string literals couldn't be shared any
1649 if (!can_store_by_pieces (src_len
,
1650 builtin_strncpy_read_str
,
1651 src_buf
, ptr1_align
, false))
1654 new_str_cst
= build_string_literal (src_len
, src_buf
);
1657 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1659 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1660 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1661 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1662 gimple_call_set_arg (stmt1
, 2,
1663 build_int_cst (TREE_TYPE (len1
), src_len
));
1664 update_stmt (stmt1
);
1665 unlink_stmt_vdef (stmt2
);
1666 gsi_remove (gsi_p
, true);
1667 release_defs (stmt2
);
1668 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1669 release_ssa_name (lhs1
);
1674 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1675 assignment, remove STMT1 and change memset call into
1677 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1679 if (!is_gimple_val (ptr1
))
1680 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1681 true, GSI_SAME_STMT
);
1682 gimple_call_set_fndecl (stmt2
,
1683 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1684 gimple_call_set_arg (stmt2
, 0, ptr1
);
1685 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1686 gimple_call_set_arg (stmt2
, 2,
1687 build_int_cst (TREE_TYPE (len2
), src_len
));
1688 unlink_stmt_vdef (stmt1
);
1689 gsi_remove (&gsi
, true);
1690 release_defs (stmt1
);
1691 update_stmt (stmt2
);
1702 /* Checks if expression has type of one-bit precision, or is a known
1703 truth-valued expression. */
1705 truth_valued_ssa_name (tree name
)
1708 tree type
= TREE_TYPE (name
);
1710 if (!INTEGRAL_TYPE_P (type
))
1712 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1713 necessarily one and so ~X is not equal to !X. */
1714 if (TYPE_PRECISION (type
) == 1)
1716 def
= SSA_NAME_DEF_STMT (name
);
1717 if (is_gimple_assign (def
))
1718 return truth_value_p (gimple_assign_rhs_code (def
));
1722 /* Helper routine for simplify_bitwise_binary_1 function.
1723 Return for the SSA name NAME the expression X if it mets condition
1724 NAME = !X. Otherwise return NULL_TREE.
1725 Detected patterns for NAME = !X are:
1726 !X and X == 0 for X with integral type.
1727 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1729 lookup_logical_inverted_value (tree name
)
1732 enum tree_code code
;
1735 /* If name has none-intergal type, or isn't a SSA_NAME, then
1737 if (TREE_CODE (name
) != SSA_NAME
1738 || !INTEGRAL_TYPE_P (TREE_TYPE (name
)))
1740 def
= SSA_NAME_DEF_STMT (name
);
1741 if (!is_gimple_assign (def
))
1744 code
= gimple_assign_rhs_code (def
);
1745 op1
= gimple_assign_rhs1 (def
);
1748 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1749 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1750 if (code
== EQ_EXPR
|| code
== NE_EXPR
1751 || code
== BIT_XOR_EXPR
)
1752 op2
= gimple_assign_rhs2 (def
);
1757 if (truth_valued_ssa_name (name
))
1761 /* Check if we have X == 0 and X has an integral type. */
1762 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1764 if (integer_zerop (op2
))
1768 /* Check if we have X != 1 and X is a truth-valued. */
1769 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
1771 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1775 /* Check if we have X ^ 1 and X is truth valued. */
1776 if (integer_onep (op2
) && truth_valued_ssa_name (op1
))
1786 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1787 operations CODE, if one operand has the logically inverted
1788 value of the other. */
1790 simplify_bitwise_binary_1 (enum tree_code code
, tree type
,
1791 tree arg1
, tree arg2
)
1795 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1796 if (code
!= BIT_AND_EXPR
&& code
!= BIT_IOR_EXPR
1797 && code
!= BIT_XOR_EXPR
)
1800 /* First check if operands ARG1 and ARG2 are equal. If so
1801 return NULL_TREE as this optimization is handled fold_stmt. */
1804 /* See if we have in arguments logical-not patterns. */
1805 if (((anot
= lookup_logical_inverted_value (arg1
)) == NULL_TREE
1807 && ((anot
= lookup_logical_inverted_value (arg2
)) == NULL_TREE
1812 if (code
== BIT_AND_EXPR
)
1813 return fold_convert (type
, integer_zero_node
);
1814 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1815 if (truth_valued_ssa_name (anot
))
1816 return fold_convert (type
, integer_one_node
);
1818 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1822 /* Given a ssa_name in NAME see if it was defined by an assignment and
1823 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1824 to the second operand on the rhs. */
1827 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1830 enum tree_code code1
;
1834 enum gimple_rhs_class grhs_class
;
1836 code1
= TREE_CODE (name
);
1839 grhs_class
= get_gimple_rhs_class (code1
);
1841 if (code1
== SSA_NAME
)
1843 def
= SSA_NAME_DEF_STMT (name
);
1845 if (def
&& is_gimple_assign (def
)
1846 && can_propagate_from (def
))
1848 code1
= gimple_assign_rhs_code (def
);
1849 arg11
= gimple_assign_rhs1 (def
);
1850 arg21
= gimple_assign_rhs2 (def
);
1851 arg31
= gimple_assign_rhs2 (def
);
1854 else if (grhs_class
== GIMPLE_TERNARY_RHS
1855 || GIMPLE_BINARY_RHS
1857 || GIMPLE_SINGLE_RHS
)
1858 extract_ops_from_tree_1 (name
, &code1
, &arg11
, &arg21
, &arg31
);
1864 /* Ignore arg3 currently. */
1867 /* Return true if a conversion of an operand from type FROM to type TO
1868 should be applied after performing the operation instead. */
1871 hoist_conversion_for_bitop_p (tree to
, tree from
)
1873 /* That's a good idea if the conversion widens the operand, thus
1874 after hoisting the conversion the operation will be narrower. */
1875 if (TYPE_PRECISION (from
) < TYPE_PRECISION (to
))
1878 /* It's also a good idea if the conversion is to a non-integer mode. */
1879 if (GET_MODE_CLASS (TYPE_MODE (to
)) != MODE_INT
)
1882 /* Or if the precision of TO is not the same as the precision
1884 if (TYPE_PRECISION (to
) != GET_MODE_PRECISION (TYPE_MODE (to
)))
1890 /* GSI points to a statement of the form
1892 result = OP0 CODE OP1
1894 Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
1895 BIT_AND_EXPR or BIT_IOR_EXPR.
1897 If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
1898 then we can simplify the two statements into a single LT_EXPR or LE_EXPR
1899 when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
1901 If a simplification is made, return TRUE, else return FALSE. */
1903 simplify_bitwise_binary_boolean (gimple_stmt_iterator
*gsi
,
1904 enum tree_code code
,
1907 gimple op0_def_stmt
= SSA_NAME_DEF_STMT (op0
);
1909 if (!is_gimple_assign (op0_def_stmt
)
1910 || (gimple_assign_rhs_code (op0_def_stmt
) != BIT_NOT_EXPR
))
1913 tree x
= gimple_assign_rhs1 (op0_def_stmt
);
1914 if (TREE_CODE (x
) == SSA_NAME
1915 && INTEGRAL_TYPE_P (TREE_TYPE (x
))
1916 && TYPE_PRECISION (TREE_TYPE (x
)) == 1
1917 && TYPE_UNSIGNED (TREE_TYPE (x
)) == TYPE_UNSIGNED (TREE_TYPE (op1
)))
1919 enum tree_code newcode
;
1921 gimple stmt
= gsi_stmt (*gsi
);
1922 gimple_assign_set_rhs1 (stmt
, x
);
1923 gimple_assign_set_rhs2 (stmt
, op1
);
1924 if (code
== BIT_AND_EXPR
)
1925 newcode
= TYPE_UNSIGNED (TREE_TYPE (x
)) ? LT_EXPR
: GT_EXPR
;
1927 newcode
= TYPE_UNSIGNED (TREE_TYPE (x
)) ? LE_EXPR
: GE_EXPR
;
1928 gimple_assign_set_rhs_code (stmt
, newcode
);
1936 /* Simplify bitwise binary operations.
1937 Return true if a transformation applied, otherwise return false. */
1940 simplify_bitwise_binary (gimple_stmt_iterator
*gsi
)
1942 gimple stmt
= gsi_stmt (*gsi
);
1943 tree arg1
= gimple_assign_rhs1 (stmt
);
1944 tree arg2
= gimple_assign_rhs2 (stmt
);
1945 enum tree_code code
= gimple_assign_rhs_code (stmt
);
1947 tree def1_arg1
, def1_arg2
, def2_arg1
, def2_arg2
;
1948 enum tree_code def1_code
, def2_code
;
1950 defcodefor_name (arg1
, &def1_code
, &def1_arg1
, &def1_arg2
);
1951 defcodefor_name (arg2
, &def2_code
, &def2_arg1
, &def2_arg2
);
1953 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
1955 if (TREE_CODE (arg2
) == INTEGER_CST
1956 && CONVERT_EXPR_CODE_P (def1_code
)
1957 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1
), TREE_TYPE (def1_arg1
))
1958 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1
))
1959 && int_fits_type_p (arg2
, TREE_TYPE (def1_arg1
)))
1962 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1964 gimple_build_assign_with_ops (code
, tem
, def1_arg1
,
1965 fold_convert_loc (gimple_location (stmt
),
1966 TREE_TYPE (def1_arg1
),
1968 gimple_set_location (newop
, gimple_location (stmt
));
1969 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1970 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1971 tem
, NULL_TREE
, NULL_TREE
);
1972 update_stmt (gsi_stmt (*gsi
));
1976 /* For bitwise binary operations apply operand conversions to the
1977 binary operation result instead of to the operands. This allows
1978 to combine successive conversions and bitwise binary operations. */
1979 if (CONVERT_EXPR_CODE_P (def1_code
)
1980 && CONVERT_EXPR_CODE_P (def2_code
)
1981 && types_compatible_p (TREE_TYPE (def1_arg1
), TREE_TYPE (def2_arg1
))
1982 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1
), TREE_TYPE (def1_arg1
)))
1985 tree tem
= make_ssa_name (TREE_TYPE (def1_arg1
), NULL
);
1986 newop
= gimple_build_assign_with_ops (code
, tem
, def1_arg1
, def2_arg1
);
1987 gimple_set_location (newop
, gimple_location (stmt
));
1988 gsi_insert_before (gsi
, newop
, GSI_SAME_STMT
);
1989 gimple_assign_set_rhs_with_ops_1 (gsi
, NOP_EXPR
,
1990 tem
, NULL_TREE
, NULL_TREE
);
1991 update_stmt (gsi_stmt (*gsi
));
1996 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
1997 if (def1_code
== def2_code
1998 && def1_code
== BIT_AND_EXPR
1999 && operand_equal_for_phi_arg_p (def1_arg2
,
2005 tree inner
= fold_build2 (code
, TREE_TYPE (arg2
), a
, c
);
2006 /* If A OP0 C (this usually means C is the same as A) is 0
2007 then fold it down correctly. */
2008 if (integer_zerop (inner
))
2010 gimple_assign_set_rhs_from_tree (gsi
, inner
);
2014 /* If A OP0 C (this usually means C is the same as A) is a ssa_name
2015 then fold it down correctly. */
2016 else if (TREE_CODE (inner
) == SSA_NAME
)
2018 tree outer
= fold_build2 (def1_code
, TREE_TYPE (inner
),
2020 gimple_assign_set_rhs_from_tree (gsi
, outer
);
2028 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
2029 newop
= gimple_build_assign_with_ops (code
, tem
, a
, c
);
2030 gimple_set_location (newop
, gimple_location (stmt
));
2031 /* Make sure to re-process the new stmt as it's walking upwards. */
2032 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
2033 gimple_assign_set_rhs1 (stmt
, tem
);
2034 gimple_assign_set_rhs2 (stmt
, b
);
2035 gimple_assign_set_rhs_code (stmt
, def1_code
);
2041 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
2042 if (code
== BIT_AND_EXPR
2043 && def1_code
== BIT_IOR_EXPR
2044 && CONSTANT_CLASS_P (arg2
)
2045 && CONSTANT_CLASS_P (def1_arg2
))
2047 tree cst
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg2
),
2051 if (integer_zerop (cst
))
2053 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
2057 tem
= make_ssa_name (TREE_TYPE (arg2
), NULL
);
2058 newop
= gimple_build_assign_with_ops (BIT_AND_EXPR
,
2059 tem
, def1_arg1
, arg2
);
2060 gimple_set_location (newop
, gimple_location (stmt
));
2061 /* Make sure to re-process the new stmt as it's walking upwards. */
2062 gsi_insert_before (gsi
, newop
, GSI_NEW_STMT
);
2063 gimple_assign_set_rhs1 (stmt
, tem
);
2064 gimple_assign_set_rhs2 (stmt
, cst
);
2065 gimple_assign_set_rhs_code (stmt
, BIT_IOR_EXPR
);
2070 /* Combine successive equal operations with constants. */
2071 if ((code
== BIT_AND_EXPR
2072 || code
== BIT_IOR_EXPR
2073 || code
== BIT_XOR_EXPR
)
2074 && def1_code
== code
2075 && CONSTANT_CLASS_P (arg2
)
2076 && CONSTANT_CLASS_P (def1_arg2
))
2078 tree cst
= fold_build2 (code
, TREE_TYPE (arg2
),
2080 gimple_assign_set_rhs1 (stmt
, def1_arg1
);
2081 gimple_assign_set_rhs2 (stmt
, cst
);
2086 /* Canonicalize X ^ ~0 to ~X. */
2087 if (code
== BIT_XOR_EXPR
2088 && integer_all_onesp (arg2
))
2090 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, arg1
, NULL_TREE
);
2091 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2096 /* Try simple folding for X op !X, and X op X. */
2097 res
= simplify_bitwise_binary_1 (code
, TREE_TYPE (arg1
), arg1
, arg2
);
2098 if (res
!= NULL_TREE
)
2100 gimple_assign_set_rhs_from_tree (gsi
, res
);
2101 update_stmt (gsi_stmt (*gsi
));
2105 if (code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
)
2107 enum tree_code ocode
= code
== BIT_AND_EXPR
? BIT_IOR_EXPR
: BIT_AND_EXPR
;
2108 if (def1_code
== ocode
)
2111 enum tree_code coden
;
2113 /* ( X | Y) & X -> X */
2114 /* ( X & Y) | X -> X */
2118 gimple_assign_set_rhs_from_tree (gsi
, x
);
2119 update_stmt (gsi_stmt (*gsi
));
2123 defcodefor_name (def1_arg1
, &coden
, &a1
, &a2
);
2124 /* (~X | Y) & X -> X & Y */
2125 /* (~X & Y) | X -> X | Y */
2126 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2128 gimple_assign_set_rhs_with_ops (gsi
, code
,
2130 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2134 defcodefor_name (def1_arg2
, &coden
, &a1
, &a2
);
2135 /* (Y | ~X) & X -> X & Y */
2136 /* (Y & ~X) | X -> X | Y */
2137 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2139 gimple_assign_set_rhs_with_ops (gsi
, code
,
2141 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2146 if (def2_code
== ocode
)
2148 enum tree_code coden
;
2151 /* X & ( X | Y) -> X */
2152 /* X | ( X & Y) -> X */
2156 gimple_assign_set_rhs_from_tree (gsi
, x
);
2157 update_stmt (gsi_stmt (*gsi
));
2160 defcodefor_name (def2_arg1
, &coden
, &a1
, NULL
);
2161 /* (~X | Y) & X -> X & Y */
2162 /* (~X & Y) | X -> X | Y */
2163 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2165 gimple_assign_set_rhs_with_ops (gsi
, code
,
2167 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2171 defcodefor_name (def2_arg2
, &coden
, &a1
, NULL
);
2172 /* (Y | ~X) & X -> X & Y */
2173 /* (Y & ~X) | X -> X | Y */
2174 if (coden
== BIT_NOT_EXPR
&& a1
== x
)
2176 gimple_assign_set_rhs_with_ops (gsi
, code
,
2178 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2184 /* If arg1 and arg2 are booleans (or any single bit type)
2185 then try to simplify:
2192 But only do this if our result feeds into a comparison as
2193 this transformation is not always a win, particularly on
2194 targets with and-not instructions. */
2195 if (TREE_CODE (arg1
) == SSA_NAME
2196 && TREE_CODE (arg2
) == SSA_NAME
2197 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
2198 && TYPE_PRECISION (TREE_TYPE (arg1
)) == 1
2199 && TYPE_PRECISION (TREE_TYPE (arg2
)) == 1
2200 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2201 == TYPE_UNSIGNED (TREE_TYPE (arg2
))))
2203 use_operand_p use_p
;
2206 if (single_imm_use (gimple_assign_lhs (stmt
), &use_p
, &use_stmt
))
2208 if (gimple_code (use_stmt
) == GIMPLE_COND
2209 && gimple_cond_lhs (use_stmt
) == gimple_assign_lhs (stmt
)
2210 && integer_zerop (gimple_cond_rhs (use_stmt
))
2211 && gimple_cond_code (use_stmt
) == NE_EXPR
)
2213 if (simplify_bitwise_binary_boolean (gsi
, code
, arg1
, arg2
))
2215 if (simplify_bitwise_binary_boolean (gsi
, code
, arg2
, arg1
))
2225 /* Recognize rotation patterns. Return true if a transformation
2226 applied, otherwise return false.
2228 We are looking for X with unsigned type T with bitsize B, OP being
2229 +, | or ^, some type T2 wider than T and
2230 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2231 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2232 (X << Y) OP (X >> (B - Y))
2233 (X << (int) Y) OP (X >> (int) (B - Y))
2234 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2235 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2236 (X << Y) | (X >> ((-Y) & (B - 1)))
2237 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2238 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2239 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2241 and transform these into:
2245 Note, in the patterns with T2 type, the type of OP operands
2246 might be even a signed type, but should have precision B. */
2249 simplify_rotate (gimple_stmt_iterator
*gsi
)
2251 gimple stmt
= gsi_stmt (*gsi
);
2252 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
2253 tree def_arg1
[2], def_arg2
[2];
2254 enum tree_code def_code
[2];
2257 bool swapped_p
= false;
2260 arg
[0] = gimple_assign_rhs1 (stmt
);
2261 arg
[1] = gimple_assign_rhs2 (stmt
);
2262 rtype
= TREE_TYPE (arg
[0]);
2264 /* Only create rotates in complete modes. Other cases are not
2265 expanded properly. */
2266 if (!INTEGRAL_TYPE_P (rtype
)
2267 || TYPE_PRECISION (rtype
) != GET_MODE_PRECISION (TYPE_MODE (rtype
)))
2270 for (i
= 0; i
< 2; i
++)
2271 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
2273 /* Look through narrowing conversions. */
2274 if (CONVERT_EXPR_CODE_P (def_code
[0])
2275 && CONVERT_EXPR_CODE_P (def_code
[1])
2276 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
2277 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
2278 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
2279 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
2280 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
2281 && has_single_use (arg
[0])
2282 && has_single_use (arg
[1]))
2284 for (i
= 0; i
< 2; i
++)
2286 arg
[i
] = def_arg1
[i
];
2287 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
2291 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2292 for (i
= 0; i
< 2; i
++)
2293 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
2295 else if (!has_single_use (arg
[i
]))
2297 if (def_code
[0] == def_code
[1])
2300 /* If we've looked through narrowing conversions before, look through
2301 widening conversions from unsigned type with the same precision
2303 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
2304 for (i
= 0; i
< 2; i
++)
2307 enum tree_code code
;
2308 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
2309 if (!CONVERT_EXPR_CODE_P (code
)
2310 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2311 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
2315 /* Both shifts have to use the same first operand. */
2316 if (TREE_CODE (def_arg1
[0]) != SSA_NAME
|| def_arg1
[0] != def_arg1
[1])
2318 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
2321 /* CNT1 + CNT2 == B case above. */
2322 if (tree_fits_uhwi_p (def_arg2
[0])
2323 && tree_fits_uhwi_p (def_arg2
[1])
2324 && tree_to_uhwi (def_arg2
[0])
2325 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
2326 rotcnt
= def_arg2
[0];
2327 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
2328 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
2332 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
2333 enum tree_code cdef_code
[2];
2334 /* Look through conversion of the shift count argument.
2335 The C/C++ FE cast any shift count argument to integer_type_node.
2336 The only problem might be if the shift count type maximum value
2337 is equal or smaller than number of bits in rtype. */
2338 for (i
= 0; i
< 2; i
++)
2340 def_arg2_alt
[i
] = def_arg2
[i
];
2341 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
2342 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2343 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
2344 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
2345 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
2346 > floor_log2 (TYPE_PRECISION (rtype
))
2347 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
2348 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1
[i
]))))
2350 def_arg2_alt
[i
] = cdef_arg1
[i
];
2351 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
2352 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2355 for (i
= 0; i
< 2; i
++)
2356 /* Check for one shift count being Y and the other B - Y,
2357 with optional casts. */
2358 if (cdef_code
[i
] == MINUS_EXPR
2359 && tree_fits_shwi_p (cdef_arg1
[i
])
2360 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
2361 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
2364 enum tree_code code
;
2366 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
2367 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
2369 rotcnt
= cdef_arg2
[i
];
2372 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
2373 if (CONVERT_EXPR_CODE_P (code
)
2374 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2375 && TYPE_PRECISION (TREE_TYPE (tem
))
2376 > floor_log2 (TYPE_PRECISION (rtype
))
2377 && TYPE_PRECISION (TREE_TYPE (tem
))
2378 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
2379 && (tem
== def_arg2
[1 - i
]
2380 || tem
== def_arg2_alt
[1 - i
]))
2386 /* The above sequence isn't safe for Y being 0,
2387 because then one of the shifts triggers undefined behavior.
2388 This alternative is safe even for rotation count of 0.
2389 One shift count is Y and the other (-Y) & (B - 1). */
2390 else if (cdef_code
[i
] == BIT_AND_EXPR
2391 && tree_fits_shwi_p (cdef_arg2
[i
])
2392 && tree_to_shwi (cdef_arg2
[i
])
2393 == TYPE_PRECISION (rtype
) - 1
2394 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
2395 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
2398 enum tree_code code
;
2400 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
2401 if (CONVERT_EXPR_CODE_P (code
)
2402 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2403 && TYPE_PRECISION (TREE_TYPE (tem
))
2404 > floor_log2 (TYPE_PRECISION (rtype
))
2405 && TYPE_PRECISION (TREE_TYPE (tem
))
2406 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
))))
2407 defcodefor_name (tem
, &code
, &tem
, NULL
);
2409 if (code
== NEGATE_EXPR
)
2411 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
2416 defcodefor_name (tem
, &code
, &tem
, NULL
);
2417 if (CONVERT_EXPR_CODE_P (code
)
2418 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2419 && TYPE_PRECISION (TREE_TYPE (tem
))
2420 > floor_log2 (TYPE_PRECISION (rtype
))
2421 && TYPE_PRECISION (TREE_TYPE (tem
))
2422 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
2423 && (tem
== def_arg2
[1 - i
]
2424 || tem
== def_arg2_alt
[1 - i
]))
2431 if (rotcnt
== NULL_TREE
)
2436 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
2437 TREE_TYPE (rotcnt
)))
2439 g
= gimple_build_assign_with_ops (NOP_EXPR
,
2440 make_ssa_name (TREE_TYPE (def_arg2
[0]),
2443 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2444 rotcnt
= gimple_assign_lhs (g
);
2446 lhs
= gimple_assign_lhs (stmt
);
2447 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2448 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]), NULL
);
2449 g
= gimple_build_assign_with_ops (((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
2450 ? LROTATE_EXPR
: RROTATE_EXPR
,
2451 lhs
, def_arg1
[0], rotcnt
);
2452 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2454 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2455 g
= gimple_build_assign_with_ops (NOP_EXPR
, gimple_assign_lhs (stmt
),
2458 gsi_replace (gsi
, g
, false);
2462 /* Perform re-associations of the plus or minus statement STMT that are
2463 always permitted. Returns true if the CFG was changed. */
2466 associate_plusminus (gimple_stmt_iterator
*gsi
)
2468 gimple stmt
= gsi_stmt (*gsi
);
2469 tree rhs1
= gimple_assign_rhs1 (stmt
);
2470 tree rhs2
= gimple_assign_rhs2 (stmt
);
2471 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2474 /* We can't reassociate at all for saturating types. */
2475 if (TYPE_SATURATING (TREE_TYPE (rhs1
)))
2478 /* First contract negates. */
2483 /* A +- (-B) -> A -+ B. */
2484 if (TREE_CODE (rhs2
) == SSA_NAME
)
2486 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2487 if (is_gimple_assign (def_stmt
)
2488 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2489 && can_propagate_from (def_stmt
))
2491 code
= (code
== MINUS_EXPR
) ? PLUS_EXPR
: MINUS_EXPR
;
2492 gimple_assign_set_rhs_code (stmt
, code
);
2493 rhs2
= gimple_assign_rhs1 (def_stmt
);
2494 gimple_assign_set_rhs2 (stmt
, rhs2
);
2495 gimple_set_modified (stmt
, true);
2500 /* (-A) + B -> B - A. */
2501 if (TREE_CODE (rhs1
) == SSA_NAME
2502 && code
== PLUS_EXPR
)
2504 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2505 if (is_gimple_assign (def_stmt
)
2506 && gimple_assign_rhs_code (def_stmt
) == NEGATE_EXPR
2507 && can_propagate_from (def_stmt
))
2510 gimple_assign_set_rhs_code (stmt
, code
);
2512 gimple_assign_set_rhs1 (stmt
, rhs1
);
2513 rhs2
= gimple_assign_rhs1 (def_stmt
);
2514 gimple_assign_set_rhs2 (stmt
, rhs2
);
2515 gimple_set_modified (stmt
, true);
2522 /* We can't reassociate floating-point or fixed-point plus or minus
2523 because of saturation to +-Inf. */
2524 if (FLOAT_TYPE_P (TREE_TYPE (rhs1
))
2525 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1
)))
2528 /* Second match patterns that allow contracting a plus-minus pair
2529 irrespective of overflow issues.
2531 (A +- B) - A -> +- B
2533 (CST +- A) +- CST -> CST +- A
2534 (A +- CST) +- CST -> A +- CST
2537 A - (A +- B) -> -+ B
2538 A +- (B +- A) -> +- B
2539 CST +- (CST +- A) -> CST +- A
2540 CST +- (A +- CST) -> CST +- A
2542 (T)(P + A) - (T)P -> (T)A
2544 via commutating the addition and contracting operations to zero
2545 by reassociation. */
2547 if (TREE_CODE (rhs1
) == SSA_NAME
)
2549 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs1
);
2550 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2552 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2553 if (def_code
== PLUS_EXPR
2554 || def_code
== MINUS_EXPR
)
2556 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2557 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2558 if (operand_equal_p (def_rhs1
, rhs2
, 0)
2559 && code
== MINUS_EXPR
)
2561 /* (A +- B) - A -> +- B. */
2562 code
= ((def_code
== PLUS_EXPR
)
2563 ? TREE_CODE (def_rhs2
) : NEGATE_EXPR
);
2566 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2567 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2568 gimple_set_modified (stmt
, true);
2570 else if (operand_equal_p (def_rhs2
, rhs2
, 0)
2571 && code
!= def_code
)
2573 /* (A +- B) -+ B -> A. */
2574 code
= TREE_CODE (def_rhs1
);
2577 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2578 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2579 gimple_set_modified (stmt
, true);
2581 else if (CONSTANT_CLASS_P (rhs2
)
2582 && CONSTANT_CLASS_P (def_rhs1
))
2584 /* (CST +- A) +- CST -> CST +- A. */
2585 tree cst
= fold_binary (code
, TREE_TYPE (rhs1
),
2587 if (cst
&& !TREE_OVERFLOW (cst
))
2590 gimple_assign_set_rhs_code (stmt
, code
);
2592 gimple_assign_set_rhs1 (stmt
, rhs1
);
2594 gimple_assign_set_rhs2 (stmt
, rhs2
);
2595 gimple_set_modified (stmt
, true);
2598 else if (CONSTANT_CLASS_P (rhs2
)
2599 && CONSTANT_CLASS_P (def_rhs2
))
2601 /* (A +- CST) +- CST -> A +- CST. */
2602 enum tree_code mix
= (code
== def_code
)
2603 ? PLUS_EXPR
: MINUS_EXPR
;
2604 tree cst
= fold_binary (mix
, TREE_TYPE (rhs1
),
2606 if (cst
&& !TREE_OVERFLOW (cst
))
2609 gimple_assign_set_rhs_code (stmt
, code
);
2611 gimple_assign_set_rhs1 (stmt
, rhs1
);
2613 gimple_assign_set_rhs2 (stmt
, rhs2
);
2614 gimple_set_modified (stmt
, true);
2618 else if (def_code
== BIT_NOT_EXPR
&& code
== PLUS_EXPR
)
2620 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2621 if (operand_equal_p (def_rhs1
, rhs2
, 0))
2624 rhs1
= build_all_ones_cst (TREE_TYPE (rhs2
));
2626 code
= TREE_CODE (rhs1
);
2627 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2628 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2629 gimple_set_modified (stmt
, true);
2631 else if ((TREE_CODE (TREE_TYPE (rhs2
)) != COMPLEX_TYPE
2632 && integer_onep (rhs2
))
2633 || (TREE_CODE (rhs2
) == COMPLEX_CST
2634 && integer_onep (TREE_REALPART (rhs2
))
2635 && integer_onep (TREE_IMAGPART (rhs2
))))
2641 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2642 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2643 gimple_set_modified (stmt
, true);
2646 else if (code
== MINUS_EXPR
2647 && CONVERT_EXPR_CODE_P (def_code
)
2648 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2649 && TREE_CODE (rhs2
) == SSA_NAME
)
2651 /* (T)(P + A) - (T)P -> (T)A. */
2652 gimple def_stmt2
= SSA_NAME_DEF_STMT (rhs2
);
2653 if (is_gimple_assign (def_stmt2
)
2654 && can_propagate_from (def_stmt2
)
2655 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt2
))
2656 && TREE_CODE (gimple_assign_rhs1 (def_stmt2
)) == SSA_NAME
)
2658 /* Now we have (T)X - (T)P. */
2659 tree p
= gimple_assign_rhs1 (def_stmt2
);
2660 def_stmt2
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt
));
2661 if (is_gimple_assign (def_stmt2
)
2662 && can_propagate_from (def_stmt2
)
2663 && (gimple_assign_rhs_code (def_stmt2
) == POINTER_PLUS_EXPR
2664 || gimple_assign_rhs_code (def_stmt2
) == PLUS_EXPR
)
2665 && gimple_assign_rhs1 (def_stmt2
) == p
)
2667 /* And finally (T)(P + A) - (T)P. */
2668 tree a
= gimple_assign_rhs2 (def_stmt2
);
2669 /* For pointer types, if the conversion of A to the final
2670 type requires a sign- or zero-extension, then we have
2671 to punt - it is not defined which one is correct. */
2672 if (!POINTER_TYPE_P (TREE_TYPE (rhs1
))
2673 || TYPE_PRECISION (TREE_TYPE (rhs1
))
2674 <= TYPE_PRECISION (TREE_TYPE (a
))
2675 || (TREE_CODE (a
) == INTEGER_CST
2676 && tree_int_cst_sign_bit (a
) == 0))
2678 if (useless_type_conversion_p (TREE_TYPE (rhs1
),
2680 code
= TREE_CODE (a
);
2685 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
,
2687 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2688 gimple_set_modified (stmt
, true);
2696 if (rhs2
&& TREE_CODE (rhs2
) == SSA_NAME
)
2698 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs2
);
2699 if (is_gimple_assign (def_stmt
) && can_propagate_from (def_stmt
))
2701 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
2702 if (def_code
== PLUS_EXPR
2703 || def_code
== MINUS_EXPR
)
2705 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2706 tree def_rhs2
= gimple_assign_rhs2 (def_stmt
);
2707 if (operand_equal_p (def_rhs1
, rhs1
, 0)
2708 && code
== MINUS_EXPR
)
2710 /* A - (A +- B) -> -+ B. */
2711 code
= ((def_code
== PLUS_EXPR
)
2712 ? NEGATE_EXPR
: TREE_CODE (def_rhs2
));
2715 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2716 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2717 gimple_set_modified (stmt
, true);
2719 else if (operand_equal_p (def_rhs2
, rhs1
, 0)
2720 && code
!= def_code
)
2722 /* A +- (B +- A) -> +- B. */
2723 code
= ((code
== PLUS_EXPR
)
2724 ? TREE_CODE (def_rhs1
) : NEGATE_EXPR
);
2727 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2728 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2729 gimple_set_modified (stmt
, true);
2731 else if (CONSTANT_CLASS_P (rhs1
)
2732 && CONSTANT_CLASS_P (def_rhs1
))
2734 /* CST +- (CST +- A) -> CST +- A. */
2735 tree cst
= fold_binary (code
, TREE_TYPE (rhs2
),
2737 if (cst
&& !TREE_OVERFLOW (cst
))
2739 code
= (code
== def_code
? PLUS_EXPR
: MINUS_EXPR
);
2740 gimple_assign_set_rhs_code (stmt
, code
);
2742 gimple_assign_set_rhs1 (stmt
, rhs1
);
2744 gimple_assign_set_rhs2 (stmt
, rhs2
);
2745 gimple_set_modified (stmt
, true);
2748 else if (CONSTANT_CLASS_P (rhs1
)
2749 && CONSTANT_CLASS_P (def_rhs2
))
2751 /* CST +- (A +- CST) -> CST +- A. */
2752 tree cst
= fold_binary (def_code
== code
2753 ? PLUS_EXPR
: MINUS_EXPR
,
2756 if (cst
&& !TREE_OVERFLOW (cst
))
2759 gimple_assign_set_rhs1 (stmt
, rhs1
);
2761 gimple_assign_set_rhs2 (stmt
, rhs2
);
2762 gimple_set_modified (stmt
, true);
2766 else if (def_code
== BIT_NOT_EXPR
)
2768 tree def_rhs1
= gimple_assign_rhs1 (def_stmt
);
2769 if (code
== PLUS_EXPR
2770 && operand_equal_p (def_rhs1
, rhs1
, 0))
2773 rhs1
= build_all_ones_cst (TREE_TYPE (rhs1
));
2775 code
= TREE_CODE (rhs1
);
2776 gimple_assign_set_rhs_with_ops (gsi
, code
, rhs1
, NULL_TREE
);
2777 gcc_assert (gsi_stmt (*gsi
) == stmt
);
2778 gimple_set_modified (stmt
, true);
2785 if (gimple_modified_p (stmt
))
2787 fold_stmt_inplace (gsi
);
2795 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2796 true if anything changed, false otherwise. */
2799 associate_pointerplus_align (gimple_stmt_iterator
*gsi
)
2801 gimple stmt
= gsi_stmt (*gsi
);
2803 tree ptr
, rhs
, algn
;
2806 tem = (sizetype) ptr;
2810 and produce the simpler and easier to analyze with respect to alignment
2811 ... = ptr & ~algn; */
2812 ptr
= gimple_assign_rhs1 (stmt
);
2813 rhs
= gimple_assign_rhs2 (stmt
);
2814 if (TREE_CODE (rhs
) != SSA_NAME
)
2816 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2817 if (!is_gimple_assign (def_stmt
)
2818 || gimple_assign_rhs_code (def_stmt
) != NEGATE_EXPR
)
2820 rhs
= gimple_assign_rhs1 (def_stmt
);
2821 if (TREE_CODE (rhs
) != SSA_NAME
)
2823 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2824 if (!is_gimple_assign (def_stmt
)
2825 || gimple_assign_rhs_code (def_stmt
) != BIT_AND_EXPR
)
2827 rhs
= gimple_assign_rhs1 (def_stmt
);
2828 algn
= gimple_assign_rhs2 (def_stmt
);
2829 if (TREE_CODE (rhs
) != SSA_NAME
2830 || TREE_CODE (algn
) != INTEGER_CST
)
2832 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2833 if (!is_gimple_assign (def_stmt
)
2834 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
2836 if (gimple_assign_rhs1 (def_stmt
) != ptr
)
2839 algn
= wide_int_to_tree (TREE_TYPE (ptr
), wi::bit_not (algn
));
2840 gimple_assign_set_rhs_with_ops (gsi
, BIT_AND_EXPR
, ptr
, algn
);
2841 fold_stmt_inplace (gsi
);
2847 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2848 true if anything changed, false otherwise. */
2851 associate_pointerplus_diff (gimple_stmt_iterator
*gsi
)
2853 gimple stmt
= gsi_stmt (*gsi
);
2861 tem4 = (unsigned long) tem3;
2865 ptr1
= gimple_assign_rhs1 (stmt
);
2866 rhs
= gimple_assign_rhs2 (stmt
);
2867 if (TREE_CODE (rhs
) != SSA_NAME
)
2869 gimple minus
= SSA_NAME_DEF_STMT (rhs
);
2870 /* Conditionally look through a sign-changing conversion. */
2871 if (is_gimple_assign (minus
)
2872 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (minus
))
2873 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (minus
)))
2874 == TYPE_PRECISION (TREE_TYPE (rhs
)))
2875 && TREE_CODE (gimple_assign_rhs1 (minus
)) == SSA_NAME
)
2876 minus
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (minus
));
2877 if (!is_gimple_assign (minus
))
2879 if (gimple_assign_rhs_code (minus
) != MINUS_EXPR
)
2881 rhs
= gimple_assign_rhs2 (minus
);
2882 if (TREE_CODE (rhs
) != SSA_NAME
)
2884 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2885 if (!is_gimple_assign (def_stmt
)
2886 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
))
2887 || gimple_assign_rhs1 (def_stmt
) != ptr1
)
2889 rhs
= gimple_assign_rhs1 (minus
);
2890 if (TREE_CODE (rhs
) != SSA_NAME
)
2892 def_stmt
= SSA_NAME_DEF_STMT (rhs
);
2893 if (!is_gimple_assign (def_stmt
)
2894 || ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
2896 rhs
= gimple_assign_rhs1 (def_stmt
);
2897 if (! useless_type_conversion_p (TREE_TYPE (ptr1
), TREE_TYPE (rhs
)))
2900 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (rhs
), rhs
, NULL_TREE
);
2906 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns
2907 true if anything changed, false otherwise. */
2910 associate_pointerplus (gimple_stmt_iterator
*gsi
)
2912 gimple stmt
= gsi_stmt (*gsi
);
2914 tree ptr
, off1
, off2
;
2916 if (associate_pointerplus_align (gsi
)
2917 || associate_pointerplus_diff (gsi
))
2920 /* Associate (p +p off1) +p off2 as (p +p (off1 + off2)). */
2921 ptr
= gimple_assign_rhs1 (stmt
);
2922 off1
= gimple_assign_rhs2 (stmt
);
2923 if (TREE_CODE (ptr
) != SSA_NAME
2924 || !has_single_use (ptr
))
2926 def_stmt
= SSA_NAME_DEF_STMT (ptr
);
2927 if (!is_gimple_assign (def_stmt
)
2928 || gimple_assign_rhs_code (def_stmt
) != POINTER_PLUS_EXPR
2929 || !can_propagate_from (def_stmt
))
2931 ptr
= gimple_assign_rhs1 (def_stmt
);
2932 off2
= gimple_assign_rhs2 (def_stmt
);
2933 if (!types_compatible_p (TREE_TYPE (off1
), TREE_TYPE (off2
)))
2936 tree off
= make_ssa_name (TREE_TYPE (off1
), NULL
);
2937 gimple ostmt
= gimple_build_assign_with_ops (PLUS_EXPR
, off
, off1
, off2
);
2938 gsi_insert_before (gsi
, ostmt
, GSI_SAME_STMT
);
2940 gimple_assign_set_rhs_with_ops (gsi
, POINTER_PLUS_EXPR
, ptr
, off
);
2946 /* Combine two conversions in a row for the second conversion at *GSI.
2947 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2948 run. Else it returns 0. */
2951 combine_conversions (gimple_stmt_iterator
*gsi
)
2953 gimple stmt
= gsi_stmt (*gsi
);
2956 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2957 enum tree_code code2
;
2959 gcc_checking_assert (CONVERT_EXPR_CODE_P (code
)
2960 || code
== FLOAT_EXPR
2961 || code
== FIX_TRUNC_EXPR
);
2963 lhs
= gimple_assign_lhs (stmt
);
2964 op0
= gimple_assign_rhs1 (stmt
);
2965 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (op0
)))
2967 gimple_assign_set_rhs_code (stmt
, TREE_CODE (op0
));
2971 if (TREE_CODE (op0
) != SSA_NAME
)
2974 def_stmt
= SSA_NAME_DEF_STMT (op0
);
2975 if (!is_gimple_assign (def_stmt
))
2978 code2
= gimple_assign_rhs_code (def_stmt
);
2980 if (CONVERT_EXPR_CODE_P (code2
) || code2
== FLOAT_EXPR
)
2982 tree defop0
= gimple_assign_rhs1 (def_stmt
);
2983 tree type
= TREE_TYPE (lhs
);
2984 tree inside_type
= TREE_TYPE (defop0
);
2985 tree inter_type
= TREE_TYPE (op0
);
2986 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
2987 int inside_ptr
= POINTER_TYPE_P (inside_type
);
2988 int inside_float
= FLOAT_TYPE_P (inside_type
);
2989 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
2990 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
2991 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
2992 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
2993 int inter_ptr
= POINTER_TYPE_P (inter_type
);
2994 int inter_float
= FLOAT_TYPE_P (inter_type
);
2995 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
2996 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
2997 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
2998 int final_int
= INTEGRAL_TYPE_P (type
);
2999 int final_ptr
= POINTER_TYPE_P (type
);
3000 int final_float
= FLOAT_TYPE_P (type
);
3001 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
3002 unsigned int final_prec
= TYPE_PRECISION (type
);
3003 int final_unsignedp
= TYPE_UNSIGNED (type
);
3005 /* Don't propagate ssa names that occur in abnormal phis. */
3006 if (TREE_CODE (defop0
) == SSA_NAME
3007 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0
))
3010 /* In addition to the cases of two conversions in a row
3011 handled below, if we are converting something to its own
3012 type via an object of identical or wider precision, neither
3013 conversion is needed. */
3014 if (useless_type_conversion_p (type
, inside_type
)
3015 && (((inter_int
|| inter_ptr
) && final_int
)
3016 || (inter_float
&& final_float
))
3017 && inter_prec
>= final_prec
)
3019 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
3020 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
3022 return remove_prop_source_from_use (op0
) ? 2 : 1;
3025 /* Likewise, if the intermediate and initial types are either both
3026 float or both integer, we don't need the middle conversion if the
3027 former is wider than the latter and doesn't change the signedness
3028 (for integers). Avoid this if the final type is a pointer since
3029 then we sometimes need the middle conversion. Likewise if the
3030 final type has a precision not equal to the size of its mode. */
3031 if (((inter_int
&& inside_int
)
3032 || (inter_float
&& inside_float
)
3033 || (inter_vec
&& inside_vec
))
3034 && inter_prec
>= inside_prec
3035 && (inter_float
|| inter_vec
3036 || inter_unsignedp
== inside_unsignedp
)
3037 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
3038 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
3040 && (! final_vec
|| inter_prec
== inside_prec
))
3042 gimple_assign_set_rhs1 (stmt
, defop0
);
3044 return remove_prop_source_from_use (op0
) ? 2 : 1;
3047 /* If we have a sign-extension of a zero-extended value, we can
3048 replace that by a single zero-extension. Likewise if the
3049 final conversion does not change precision we can drop the
3050 intermediate conversion. */
3051 if (inside_int
&& inter_int
&& final_int
3052 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
3053 && inside_unsignedp
&& !inter_unsignedp
)
3054 || final_prec
== inter_prec
))
3056 gimple_assign_set_rhs1 (stmt
, defop0
);
3058 return remove_prop_source_from_use (op0
) ? 2 : 1;
3061 /* Two conversions in a row are not needed unless:
3062 - some conversion is floating-point (overstrict for now), or
3063 - some conversion is a vector (overstrict for now), or
3064 - the intermediate type is narrower than both initial and
3066 - the intermediate type and innermost type differ in signedness,
3067 and the outermost type is wider than the intermediate, or
3068 - the initial type is a pointer type and the precisions of the
3069 intermediate and final types differ, or
3070 - the final type is a pointer type and the precisions of the
3071 initial and intermediate types differ. */
3072 if (! inside_float
&& ! inter_float
&& ! final_float
3073 && ! inside_vec
&& ! inter_vec
&& ! final_vec
3074 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
3075 && ! (inside_int
&& inter_int
3076 && inter_unsignedp
!= inside_unsignedp
3077 && inter_prec
< final_prec
)
3078 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
3079 == (final_unsignedp
&& final_prec
> inter_prec
))
3080 && ! (inside_ptr
&& inter_prec
!= final_prec
)
3081 && ! (final_ptr
&& inside_prec
!= inter_prec
)
3082 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
3083 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
3085 gimple_assign_set_rhs1 (stmt
, defop0
);
3087 return remove_prop_source_from_use (op0
) ? 2 : 1;
3090 /* A truncation to an unsigned type should be canonicalized as
3091 bitwise and of a mask. */
3092 if (final_int
&& inter_int
&& inside_int
3093 && final_prec
== inside_prec
3094 && final_prec
> inter_prec
3098 tem
= fold_build2 (BIT_AND_EXPR
, inside_type
,
3102 wi::mask (inter_prec
, false,
3103 TYPE_PRECISION (inside_type
))));
3104 if (!useless_type_conversion_p (type
, inside_type
))
3106 tem
= force_gimple_operand_gsi (gsi
, tem
, true, NULL_TREE
, true,
3108 gimple_assign_set_rhs1 (stmt
, tem
);
3111 gimple_assign_set_rhs_from_tree (gsi
, tem
);
3112 update_stmt (gsi_stmt (*gsi
));
3116 /* If we are converting an integer to a floating-point that can
3117 represent it exactly and back to an integer, we can skip the
3118 floating-point conversion. */
3119 if (inside_int
&& inter_float
&& final_int
&&
3120 (unsigned) significand_size (TYPE_MODE (inter_type
))
3121 >= inside_prec
- !inside_unsignedp
)
3123 if (useless_type_conversion_p (type
, inside_type
))
3125 gimple_assign_set_rhs1 (stmt
, unshare_expr (defop0
));
3126 gimple_assign_set_rhs_code (stmt
, TREE_CODE (defop0
));
3128 return remove_prop_source_from_use (op0
) ? 2 : 1;
3132 gimple_assign_set_rhs1 (stmt
, defop0
);
3133 gimple_assign_set_rhs_code (stmt
, CONVERT_EXPR
);
3135 return remove_prop_source_from_use (op0
) ? 2 : 1;
3143 /* Combine VIEW_CONVERT_EXPRs with their defining statement. */
3146 simplify_vce (gimple_stmt_iterator
*gsi
)
3148 gimple stmt
= gsi_stmt (*gsi
);
3149 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
3151 /* Drop useless VIEW_CONVERT_EXPRs. */
3152 tree op
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3153 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
3155 gimple_assign_set_rhs1 (stmt
, op
);
3160 if (TREE_CODE (op
) != SSA_NAME
)
3163 gimple def_stmt
= SSA_NAME_DEF_STMT (op
);
3164 if (!is_gimple_assign (def_stmt
))
3167 tree def_op
= gimple_assign_rhs1 (def_stmt
);
3168 switch (gimple_assign_rhs_code (def_stmt
))
3171 /* Strip integral conversions that do not change the precision. */
3172 if ((INTEGRAL_TYPE_P (TREE_TYPE (op
))
3173 || POINTER_TYPE_P (TREE_TYPE (op
)))
3174 && (INTEGRAL_TYPE_P (TREE_TYPE (def_op
))
3175 || POINTER_TYPE_P (TREE_TYPE (def_op
)))
3176 && (TYPE_PRECISION (TREE_TYPE (op
))
3177 == TYPE_PRECISION (TREE_TYPE (def_op
))))
3179 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0) = def_op
;
3185 case VIEW_CONVERT_EXPR
:
3186 /* Series of VIEW_CONVERT_EXPRs on register operands can
3188 if (TREE_CODE (TREE_OPERAND (def_op
, 0)) == SSA_NAME
)
3190 if (useless_type_conversion_p (type
,
3191 TREE_TYPE (TREE_OPERAND (def_op
, 0))))
3192 gimple_assign_set_rhs1 (stmt
, TREE_OPERAND (def_op
, 0));
3194 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0)
3195 = TREE_OPERAND (def_op
, 0);
3206 /* Combine an element access with a shuffle. Returns true if there were
3207 any changes made, else it returns false. */
3210 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
3212 gimple stmt
= gsi_stmt (*gsi
);
3214 tree op
, op0
, op1
, op2
;
3216 unsigned idx
, n
, size
;
3217 enum tree_code code
;
3219 op
= gimple_assign_rhs1 (stmt
);
3220 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
3222 op0
= TREE_OPERAND (op
, 0);
3223 if (TREE_CODE (op0
) != SSA_NAME
3224 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
3227 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
3228 if (!def_stmt
|| !can_propagate_from (def_stmt
))
3231 op1
= TREE_OPERAND (op
, 1);
3232 op2
= TREE_OPERAND (op
, 2);
3233 code
= gimple_assign_rhs_code (def_stmt
);
3235 if (code
== CONSTRUCTOR
)
3237 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
3238 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
3239 if (!tem
|| !valid_gimple_rhs_p (tem
))
3241 gimple_assign_set_rhs_from_tree (gsi
, tem
);
3242 update_stmt (gsi_stmt (*gsi
));
3246 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
3247 if (TREE_TYPE (op
) != elem_type
)
3250 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
3251 n
= TREE_INT_CST_LOW (op1
) / size
;
3254 idx
= TREE_INT_CST_LOW (op2
) / size
;
3256 if (code
== VEC_PERM_EXPR
)
3258 tree p
, m
, index
, tem
;
3260 m
= gimple_assign_rhs3 (def_stmt
);
3261 if (TREE_CODE (m
) != VECTOR_CST
)
3263 nelts
= VECTOR_CST_NELTS (m
);
3264 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
3268 p
= gimple_assign_rhs1 (def_stmt
);
3272 p
= gimple_assign_rhs2 (def_stmt
);
3275 index
= build_int_cst (TREE_TYPE (TREE_TYPE (m
)), idx
* size
);
3276 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
3277 unshare_expr (p
), op1
, index
);
3278 gimple_assign_set_rhs1 (stmt
, tem
);
3280 update_stmt (gsi_stmt (*gsi
));
3287 /* Determine whether applying the 2 permutations (mask1 then mask2)
3288 gives back one of the input. */
3291 is_combined_permutation_identity (tree mask1
, tree mask2
)
3294 unsigned int nelts
, i
, j
;
3295 bool maybe_identity1
= true;
3296 bool maybe_identity2
= true;
3298 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
3299 && TREE_CODE (mask2
) == VECTOR_CST
);
3300 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
3301 gcc_assert (TREE_CODE (mask
) == VECTOR_CST
);
3303 nelts
= VECTOR_CST_NELTS (mask
);
3304 for (i
= 0; i
< nelts
; i
++)
3306 tree val
= VECTOR_CST_ELT (mask
, i
);
3307 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
3308 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
3310 maybe_identity2
= false;
3311 else if (j
== i
+ nelts
)
3312 maybe_identity1
= false;
3316 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
3319 /* Combine a shuffle with its arguments. Returns 1 if there were any
3320 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
3323 simplify_permutation (gimple_stmt_iterator
*gsi
)
3325 gimple stmt
= gsi_stmt (*gsi
);
3327 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
3328 enum tree_code code
;
3329 bool single_use_op0
= false;
3331 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
3333 op0
= gimple_assign_rhs1 (stmt
);
3334 op1
= gimple_assign_rhs2 (stmt
);
3335 op2
= gimple_assign_rhs3 (stmt
);
3337 if (TREE_CODE (op2
) != VECTOR_CST
)
3340 if (TREE_CODE (op0
) == VECTOR_CST
)
3345 else if (TREE_CODE (op0
) == SSA_NAME
)
3347 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
3348 if (!def_stmt
|| !can_propagate_from (def_stmt
))
3351 code
= gimple_assign_rhs_code (def_stmt
);
3352 arg0
= gimple_assign_rhs1 (def_stmt
);
3357 /* Two consecutive shuffles. */
3358 if (code
== VEC_PERM_EXPR
)
3365 op3
= gimple_assign_rhs3 (def_stmt
);
3366 if (TREE_CODE (op3
) != VECTOR_CST
)
3368 ident
= is_combined_permutation_identity (op3
, op2
);
3371 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
3372 : gimple_assign_rhs2 (def_stmt
);
3373 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
3374 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
3375 gimple_set_num_ops (stmt
, 2);
3377 return remove_prop_source_from_use (op0
) ? 2 : 1;
3380 /* Shuffle of a constructor. */
3381 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
3387 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
3390 if (TREE_CODE (op1
) == VECTOR_CST
)
3392 else if (TREE_CODE (op1
) == SSA_NAME
)
3394 enum tree_code code2
;
3396 gimple def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
3397 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
3400 code2
= gimple_assign_rhs_code (def_stmt2
);
3401 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
3403 arg1
= gimple_assign_rhs1 (def_stmt2
);
3410 /* Already used twice in this statement. */
3411 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
3415 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
3417 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
3419 gimple_assign_set_rhs_from_tree (gsi
, opt
);
3420 update_stmt (gsi_stmt (*gsi
));
3421 if (TREE_CODE (op0
) == SSA_NAME
)
3422 ret
= remove_prop_source_from_use (op0
);
3423 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
3424 ret
|= remove_prop_source_from_use (op1
);
3431 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3434 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
3436 gimple stmt
= gsi_stmt (*gsi
);
3438 tree op
, op2
, orig
, type
, elem_type
;
3439 unsigned elem_size
, nelts
, i
;
3440 enum tree_code code
;
3441 constructor_elt
*elt
;
3445 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
3447 op
= gimple_assign_rhs1 (stmt
);
3448 type
= TREE_TYPE (op
);
3449 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
3451 nelts
= TYPE_VECTOR_SUBPARTS (type
);
3452 elem_type
= TREE_TYPE (type
);
3453 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
3455 sel
= XALLOCAVEC (unsigned char, nelts
);
3458 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
3465 if (TREE_CODE (elt
->value
) != SSA_NAME
)
3467 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
3470 code
= gimple_assign_rhs_code (def_stmt
);
3471 if (code
!= BIT_FIELD_REF
)
3473 op1
= gimple_assign_rhs1 (def_stmt
);
3474 ref
= TREE_OPERAND (op1
, 0);
3482 if (TREE_CODE (ref
) != SSA_NAME
)
3484 if (!useless_type_conversion_p (type
, TREE_TYPE (ref
)))
3488 if (TREE_INT_CST_LOW (TREE_OPERAND (op1
, 1)) != elem_size
)
3490 sel
[i
] = TREE_INT_CST_LOW (TREE_OPERAND (op1
, 2)) / elem_size
;
3491 if (sel
[i
] != i
) maybe_ident
= false;
3497 gimple_assign_set_rhs_from_tree (gsi
, orig
);
3500 tree mask_type
, *mask_elts
;
3502 if (!can_vec_perm_p (TYPE_MODE (type
), false, sel
))
3505 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
3507 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
3508 || GET_MODE_SIZE (TYPE_MODE (mask_type
))
3509 != GET_MODE_SIZE (TYPE_MODE (type
)))
3511 mask_elts
= XALLOCAVEC (tree
, nelts
);
3512 for (i
= 0; i
< nelts
; i
++)
3513 mask_elts
[i
] = build_int_cst (TREE_TYPE (mask_type
), sel
[i
]);
3514 op2
= build_vector (mask_type
, mask_elts
);
3515 gimple_assign_set_rhs_with_ops_1 (gsi
, VEC_PERM_EXPR
, orig
, orig
, op2
);
3517 update_stmt (gsi_stmt (*gsi
));
3521 /* Simplify multiplications.
3522 Return true if a transformation applied, otherwise return false. */
3525 simplify_mult (gimple_stmt_iterator
*gsi
)
3527 gimple stmt
= gsi_stmt (*gsi
);
3528 tree arg1
= gimple_assign_rhs1 (stmt
);
3529 tree arg2
= gimple_assign_rhs2 (stmt
);
3531 if (TREE_CODE (arg1
) != SSA_NAME
)
3534 gimple def_stmt
= SSA_NAME_DEF_STMT (arg1
);
3535 if (!is_gimple_assign (def_stmt
))
3538 /* Look through a sign-changing conversion. */
3539 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
3541 if (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (def_stmt
)))
3542 != TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
)))
3543 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) != SSA_NAME
)
3545 def_stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt
));
3546 if (!is_gimple_assign (def_stmt
))
3550 if (gimple_assign_rhs_code (def_stmt
) == EXACT_DIV_EXPR
)
3552 if (operand_equal_p (gimple_assign_rhs2 (def_stmt
), arg2
, 0))
3554 tree res
= gimple_assign_rhs1 (def_stmt
);
3555 if (useless_type_conversion_p (TREE_TYPE (arg1
), TREE_TYPE (res
)))
3556 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (res
), res
,
3559 gimple_assign_set_rhs_with_ops (gsi
, NOP_EXPR
, res
, NULL_TREE
);
3560 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3568 /* Main entry point for the forward propagation and statement combine
3573 const pass_data pass_data_forwprop
=
3575 GIMPLE_PASS
, /* type */
3576 "forwprop", /* name */
3577 OPTGROUP_NONE
, /* optinfo_flags */
3578 true, /* has_execute */
3579 TV_TREE_FORWPROP
, /* tv_id */
3580 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3581 0, /* properties_provided */
3582 0, /* properties_destroyed */
3583 0, /* todo_flags_start */
3584 TODO_update_ssa
, /* todo_flags_finish */
3587 class pass_forwprop
: public gimple_opt_pass
3590 pass_forwprop (gcc::context
*ctxt
)
3591 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
3594 /* opt_pass methods: */
3595 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
3596 virtual bool gate (function
*) { return flag_tree_forwprop
; }
3597 virtual unsigned int execute (function
*);
3599 }; // class pass_forwprop
3602 pass_forwprop::execute (function
*fun
)
3605 unsigned int todoflags
= 0;
3607 cfg_changed
= false;
3609 FOR_EACH_BB_FN (bb
, fun
)
3611 gimple_stmt_iterator gsi
;
3613 /* Apply forward propagation to all stmts in the basic-block.
3614 Note we update GSI within the loop as necessary. */
3615 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
3617 gimple stmt
= gsi_stmt (gsi
);
3619 enum tree_code code
;
3621 if (!is_gimple_assign (stmt
))
3627 lhs
= gimple_assign_lhs (stmt
);
3628 rhs
= gimple_assign_rhs1 (stmt
);
3629 code
= gimple_assign_rhs_code (stmt
);
3630 if (TREE_CODE (lhs
) != SSA_NAME
3631 || has_zero_uses (lhs
))
3637 /* If this statement sets an SSA_NAME to an address,
3638 try to propagate the address into the uses of the SSA_NAME. */
3639 if (code
== ADDR_EXPR
3640 /* Handle pointer conversions on invariant addresses
3641 as well, as this is valid gimple. */
3642 || (CONVERT_EXPR_CODE_P (code
)
3643 && TREE_CODE (rhs
) == ADDR_EXPR
3644 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
3646 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
3649 || decl_address_invariant_p (base
))
3650 && !stmt_references_abnormal_ssa_name (stmt
)
3651 && forward_propagate_addr_expr (lhs
, rhs
, true))
3653 release_defs (stmt
);
3654 gsi_remove (&gsi
, true);
3659 else if (code
== POINTER_PLUS_EXPR
)
3661 tree off
= gimple_assign_rhs2 (stmt
);
3662 if (TREE_CODE (off
) == INTEGER_CST
3663 && can_propagate_from (stmt
)
3664 && !simple_iv_increment_p (stmt
)
3665 /* ??? Better adjust the interface to that function
3666 instead of building new trees here. */
3667 && forward_propagate_addr_expr
3669 build1_loc (gimple_location (stmt
),
3670 ADDR_EXPR
, TREE_TYPE (rhs
),
3671 fold_build2 (MEM_REF
,
3672 TREE_TYPE (TREE_TYPE (rhs
)),
3674 fold_convert (ptr_type_node
,
3677 release_defs (stmt
);
3678 gsi_remove (&gsi
, true);
3680 else if (is_gimple_min_invariant (rhs
))
3682 /* Make sure to fold &a[0] + off_1 here. */
3683 fold_stmt_inplace (&gsi
);
3685 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3691 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3693 if (forward_propagate_comparison (&gsi
))
3700 /* Combine stmts with the stmts defining their operands.
3701 Note we update GSI within the loop as necessary. */
3702 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
3704 gimple stmt
= gsi_stmt (gsi
);
3705 bool changed
= false;
3707 /* Mark stmt as potentially needing revisiting. */
3708 gimple_set_plf (stmt
, GF_PLF_1
, false);
3710 switch (gimple_code (stmt
))
3714 tree rhs1
= gimple_assign_rhs1 (stmt
);
3715 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3717 if ((code
== BIT_NOT_EXPR
3718 || code
== NEGATE_EXPR
)
3719 && TREE_CODE (rhs1
) == SSA_NAME
)
3720 changed
= simplify_not_neg_expr (&gsi
);
3721 else if (code
== COND_EXPR
3722 || code
== VEC_COND_EXPR
)
3724 /* In this case the entire COND_EXPR is in rhs1. */
3725 if (forward_propagate_into_cond (&gsi
)
3726 || combine_cond_exprs (&gsi
))
3729 stmt
= gsi_stmt (gsi
);
3732 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3735 did_something
= forward_propagate_into_comparison (&gsi
);
3736 if (did_something
== 2)
3738 changed
= did_something
!= 0;
3740 else if ((code
== PLUS_EXPR
3741 || code
== BIT_IOR_EXPR
3742 || code
== BIT_XOR_EXPR
)
3743 && simplify_rotate (&gsi
))
3745 else if (code
== BIT_AND_EXPR
3746 || code
== BIT_IOR_EXPR
3747 || code
== BIT_XOR_EXPR
)
3748 changed
= simplify_bitwise_binary (&gsi
);
3749 else if (code
== MULT_EXPR
)
3751 changed
= simplify_mult (&gsi
);
3753 && maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
3754 && gimple_purge_dead_eh_edges (bb
))
3757 else if (code
== PLUS_EXPR
3758 || code
== MINUS_EXPR
)
3760 changed
= associate_plusminus (&gsi
);
3762 && maybe_clean_or_replace_eh_stmt (stmt
, stmt
)
3763 && gimple_purge_dead_eh_edges (bb
))
3766 else if (code
== POINTER_PLUS_EXPR
)
3767 changed
= associate_pointerplus (&gsi
);
3768 else if (CONVERT_EXPR_CODE_P (code
)
3769 || code
== FLOAT_EXPR
3770 || code
== FIX_TRUNC_EXPR
)
3772 int did_something
= combine_conversions (&gsi
);
3773 if (did_something
== 2)
3776 /* If we have a narrowing conversion to an integral
3777 type that is fed by a BIT_AND_EXPR, we might be
3778 able to remove the BIT_AND_EXPR if it merely
3779 masks off bits outside the final type (and nothing
3781 if (! did_something
)
3783 tree outer_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
3784 tree inner_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
3785 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
3786 && INTEGRAL_TYPE_P (outer_type
)
3787 && INTEGRAL_TYPE_P (inner_type
)
3788 && (TYPE_PRECISION (outer_type
)
3789 <= TYPE_PRECISION (inner_type
)))
3790 did_something
= simplify_conversion_from_bitmask (&gsi
);
3793 changed
= did_something
!= 0;
3795 else if (code
== VIEW_CONVERT_EXPR
)
3796 changed
= simplify_vce (&gsi
);
3797 else if (code
== VEC_PERM_EXPR
)
3799 int did_something
= simplify_permutation (&gsi
);
3800 if (did_something
== 2)
3802 changed
= did_something
!= 0;
3804 else if (code
== BIT_FIELD_REF
)
3805 changed
= simplify_bitfield_ref (&gsi
);
3806 else if (code
== CONSTRUCTOR
3807 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
3808 changed
= simplify_vector_constructor (&gsi
);
3813 changed
= simplify_gimple_switch (stmt
);
3819 did_something
= forward_propagate_into_gimple_cond (stmt
);
3820 if (did_something
== 2)
3822 changed
= did_something
!= 0;
3828 tree callee
= gimple_call_fndecl (stmt
);
3829 if (callee
!= NULL_TREE
3830 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
3831 changed
= simplify_builtin_call (&gsi
, callee
);
3840 /* If the stmt changed then re-visit it and the statements
3841 inserted before it. */
3842 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
3843 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
3845 if (gsi_end_p (gsi
))
3846 gsi
= gsi_start_bb (bb
);
3852 /* Stmt no longer needs to be revisited. */
3853 gimple_set_plf (stmt
, GF_PLF_1
, true);
3860 todoflags
|= TODO_cleanup_cfg
;
3868 make_pass_forwprop (gcc::context
*ctxt
)
3870 return new pass_forwprop (ctxt
);