1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "double-int.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
38 #include "hard-reg-set.h"
40 #include "dominance.h"
42 #include "basic-block.h"
43 #include "gimple-pretty-print.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
48 #include "gimple-expr.h"
52 #include "gimple-iterator.h"
53 #include "gimplify-me.h"
54 #include "gimple-ssa.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
63 #include "statistics.h"
65 #include "fixed-value.h"
66 #include "insn-config.h"
76 #include "tree-pass.h"
77 #include "langhooks.h"
78 #include "diagnostic.h"
80 #include "insn-codes.h"
82 #include "tree-ssa-propagate.h"
83 #include "tree-ssa-dom.h"
85 #include "tree-cfgcleanup.h"
86 #include "tree-into-ssa.h"
89 /* This pass propagates the RHS of assignment statements into use
90 sites of the LHS of the assignment. It's basically a specialized
91 form of tree combination. It is hoped all of this can disappear
92 when we have a generalized tree combiner.
94 One class of common cases we handle is forward propagating a single use
95 variable into a COND_EXPR.
99 if (x) goto ... else goto ...
101 Will be transformed into:
104 if (a COND b) goto ... else goto ...
106 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
108 Or (assuming c1 and c2 are constants):
112 if (x EQ/NEQ c2) goto ... else goto ...
114 Will be transformed into:
117 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
119 Similarly for x = a - c1.
125 if (x) goto ... else goto ...
127 Will be transformed into:
130 if (a == 0) goto ... else goto ...
132 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
133 For these cases, we propagate A into all, possibly more than one,
134 COND_EXPRs that use X.
140 if (x) goto ... else goto ...
142 Will be transformed into:
145 if (a != 0) goto ... else goto ...
147 (Assuming a is an integral type and x is a boolean or x is an
148 integral and a is a boolean.)
150 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
151 For these cases, we propagate A into all, possibly more than one,
152 COND_EXPRs that use X.
154 In addition to eliminating the variable and the statement which assigns
155 a value to the variable, we may be able to later thread the jump without
156 adding insane complexity in the dominator optimizer.
158 Also note these transformations can cascade. We handle this by having
159 a worklist of COND_EXPR statements to examine. As we make a change to
160 a statement, we put it back on the worklist to examine on the next
161 iteration of the main loop.
163 A second class of propagation opportunities arises for ADDR_EXPR
174 ptr = (type1*)&type2var;
177 Will get turned into (if type1 and type2 are the same size
178 and neither have volatile on them):
179 res = VIEW_CONVERT_EXPR<type1>(type2var)
184 ptr2 = ptr + <constant>;
188 ptr2 = &x[constant/elementsize];
193 offset = index * element_size;
194 offset_p = (pointer) offset;
195 ptr2 = ptr + offset_p
197 Will get turned into:
205 Provided that decl has known alignment >= 2, will get turned into
209 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
210 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
213 This will (of course) be extended as other needs arise. */
215 static bool forward_propagate_addr_expr (tree
, tree
, bool);
217 /* Set to true if we delete dead edges during the optimization. */
218 static bool cfg_changed
;
220 static tree
rhs_to_tree (tree type
, gimple stmt
);
222 static bitmap to_purge
;
224 /* Const-and-copy lattice. */
225 static vec
<tree
> lattice
;
227 /* Set the lattice entry for NAME to VAL. */
229 fwprop_set_lattice_val (tree name
, tree val
)
231 if (TREE_CODE (name
) == SSA_NAME
)
233 if (SSA_NAME_VERSION (name
) >= lattice
.length ())
235 lattice
.reserve (num_ssa_names
- lattice
.length ());
236 lattice
.quick_grow_cleared (num_ssa_names
);
238 lattice
[SSA_NAME_VERSION (name
)] = val
;
242 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
244 fwprop_invalidate_lattice (tree name
)
247 && TREE_CODE (name
) == SSA_NAME
248 && SSA_NAME_VERSION (name
) < lattice
.length ())
249 lattice
[SSA_NAME_VERSION (name
)] = NULL_TREE
;
253 /* Get the statement we can propagate from into NAME skipping
254 trivial copies. Returns the statement which defines the
255 propagation source or NULL_TREE if there is no such one.
256 If SINGLE_USE_ONLY is set considers only sources which have
257 a single use chain up to NAME. If SINGLE_USE_P is non-null,
258 it is set to whether the chain to NAME is a single use chain
259 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
262 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
264 bool single_use
= true;
267 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
269 if (!has_single_use (name
))
276 /* If name is defined by a PHI node or is the default def, bail out. */
277 if (!is_gimple_assign (def_stmt
))
280 /* If def_stmt is a simple copy, continue looking. */
281 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
282 name
= gimple_assign_rhs1 (def_stmt
);
285 if (!single_use_only
&& single_use_p
)
286 *single_use_p
= single_use
;
293 /* Checks if the destination ssa name in DEF_STMT can be used as
294 propagation source. Returns true if so, otherwise false. */
297 can_propagate_from (gimple def_stmt
)
299 gcc_assert (is_gimple_assign (def_stmt
));
301 /* If the rhs has side-effects we cannot propagate from it. */
302 if (gimple_has_volatile_ops (def_stmt
))
305 /* If the rhs is a load we cannot propagate from it. */
306 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
307 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
310 /* Constants can be always propagated. */
311 if (gimple_assign_single_p (def_stmt
)
312 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
315 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
316 if (stmt_references_abnormal_ssa_name (def_stmt
))
319 /* If the definition is a conversion of a pointer to a function type,
320 then we can not apply optimizations as some targets require
321 function pointers to be canonicalized and in this case this
322 optimization could eliminate a necessary canonicalization. */
323 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
325 tree rhs
= gimple_assign_rhs1 (def_stmt
);
326 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
327 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
334 /* Remove a chain of dead statements starting at the definition of
335 NAME. The chain is linked via the first operand of the defining statements.
336 If NAME was replaced in its only use then this function can be used
337 to clean up dead stmts. The function handles already released SSA
339 Returns true if cleanup-cfg has to run. */
342 remove_prop_source_from_use (tree name
)
344 gimple_stmt_iterator gsi
;
346 bool cfg_changed
= false;
351 if (SSA_NAME_IN_FREE_LIST (name
)
352 || SSA_NAME_IS_DEFAULT_DEF (name
)
353 || !has_zero_uses (name
))
356 stmt
= SSA_NAME_DEF_STMT (name
);
357 if (gimple_code (stmt
) == GIMPLE_PHI
358 || gimple_has_side_effects (stmt
))
361 bb
= gimple_bb (stmt
);
362 gsi
= gsi_for_stmt (stmt
);
363 unlink_stmt_vdef (stmt
);
364 if (gsi_remove (&gsi
, true))
365 bitmap_set_bit (to_purge
, bb
->index
);
366 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
369 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
370 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
375 /* Return the rhs of a gassign *STMT in a form of a single tree,
376 converted to type TYPE.
378 This should disappear, but is needed so we can combine expressions and use
379 the fold() interfaces. Long term, we need to develop folding and combine
380 routines that deal with gimple exclusively . */
383 rhs_to_tree (tree type
, gimple stmt
)
385 location_t loc
= gimple_location (stmt
);
386 enum tree_code code
= gimple_assign_rhs_code (stmt
);
387 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
388 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
389 gimple_assign_rhs2 (stmt
),
390 gimple_assign_rhs3 (stmt
));
391 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
392 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
393 gimple_assign_rhs2 (stmt
));
394 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
395 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
396 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
397 return gimple_assign_rhs1 (stmt
);
402 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
403 the folded result in a form suitable for COND_EXPR_COND or
404 NULL_TREE, if there is no suitable simplified form. If
405 INVARIANT_ONLY is true only gimple_min_invariant results are
406 considered simplified. */
409 combine_cond_expr_cond (gimple stmt
, enum tree_code code
, tree type
,
410 tree op0
, tree op1
, bool invariant_only
)
414 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
416 fold_defer_overflow_warnings ();
417 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
420 fold_undefer_overflow_warnings (false, NULL
, 0);
424 /* Require that we got a boolean type out if we put one in. */
425 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
427 /* Canonicalize the combined condition for use in a COND_EXPR. */
428 t
= canonicalize_cond_expr_cond (t
);
430 /* Bail out if we required an invariant but didn't get one. */
431 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
433 fold_undefer_overflow_warnings (false, NULL
, 0);
437 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
442 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
443 of its operand. Return a new comparison tree or NULL_TREE if there
444 were no simplifying combines. */
447 forward_propagate_into_comparison_1 (gimple stmt
,
448 enum tree_code code
, tree type
,
451 tree tmp
= NULL_TREE
;
452 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
453 bool single_use0_p
= false, single_use1_p
= false;
455 /* For comparisons use the first operand, that is likely to
456 simplify comparisons against constants. */
457 if (TREE_CODE (op0
) == SSA_NAME
)
459 gimple def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
460 if (def_stmt
&& can_propagate_from (def_stmt
))
462 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
463 bool invariant_only_p
= !single_use0_p
;
465 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
467 /* Always combine comparisons or conversions from booleans. */
468 if (TREE_CODE (op1
) == INTEGER_CST
469 && ((CONVERT_EXPR_CODE_P (def_code
)
470 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0
, 0)))
472 || TREE_CODE_CLASS (def_code
) == tcc_comparison
))
473 invariant_only_p
= false;
475 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
476 rhs0
, op1
, invariant_only_p
);
482 /* If that wasn't successful, try the second operand. */
483 if (TREE_CODE (op1
) == SSA_NAME
)
485 gimple def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
486 if (def_stmt
&& can_propagate_from (def_stmt
))
488 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
489 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
490 op0
, rhs1
, !single_use1_p
);
496 /* If that wasn't successful either, try both operands. */
497 if (rhs0
!= NULL_TREE
498 && rhs1
!= NULL_TREE
)
499 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
501 !(single_use0_p
&& single_use1_p
));
506 /* Propagate from the ssa name definition statements of the assignment
507 from a comparison at *GSI into the conditional if that simplifies it.
508 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
509 otherwise returns 0. */
512 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
514 gimple stmt
= gsi_stmt (*gsi
);
516 bool cfg_changed
= false;
517 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
518 tree rhs1
= gimple_assign_rhs1 (stmt
);
519 tree rhs2
= gimple_assign_rhs2 (stmt
);
521 /* Combine the comparison with defining statements. */
522 tmp
= forward_propagate_into_comparison_1 (stmt
,
523 gimple_assign_rhs_code (stmt
),
525 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
527 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
529 update_stmt (gsi_stmt (*gsi
));
531 if (TREE_CODE (rhs1
) == SSA_NAME
)
532 cfg_changed
|= remove_prop_source_from_use (rhs1
);
533 if (TREE_CODE (rhs2
) == SSA_NAME
)
534 cfg_changed
|= remove_prop_source_from_use (rhs2
);
535 return cfg_changed
? 2 : 1;
541 /* Propagate from the ssa name definition statements of COND_EXPR
542 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
543 Returns zero if no statement was changed, one if there were
544 changes and two if cfg_cleanup needs to run.
546 This must be kept in sync with forward_propagate_into_cond. */
549 forward_propagate_into_gimple_cond (gcond
*stmt
)
552 enum tree_code code
= gimple_cond_code (stmt
);
553 bool cfg_changed
= false;
554 tree rhs1
= gimple_cond_lhs (stmt
);
555 tree rhs2
= gimple_cond_rhs (stmt
);
557 /* We can do tree combining on SSA_NAME and comparison expressions. */
558 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
561 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
566 if (dump_file
&& tmp
)
568 fprintf (dump_file
, " Replaced '");
569 print_gimple_expr (dump_file
, stmt
, 0, 0);
570 fprintf (dump_file
, "' with '");
571 print_generic_expr (dump_file
, tmp
, 0);
572 fprintf (dump_file
, "'\n");
575 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
578 if (TREE_CODE (rhs1
) == SSA_NAME
)
579 cfg_changed
|= remove_prop_source_from_use (rhs1
);
580 if (TREE_CODE (rhs2
) == SSA_NAME
)
581 cfg_changed
|= remove_prop_source_from_use (rhs2
);
582 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
585 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
586 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
587 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
588 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
590 && integer_zerop (rhs2
))
592 && integer_onep (rhs2
))))
594 basic_block bb
= gimple_bb (stmt
);
595 gimple_cond_set_code (stmt
, NE_EXPR
);
596 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
597 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
598 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
606 /* Propagate from the ssa name definition statements of COND_EXPR
607 in the rhs of statement STMT into the conditional if that simplifies it.
608 Returns true zero if the stmt was changed. */
611 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
613 gimple stmt
= gsi_stmt (*gsi_p
);
614 tree tmp
= NULL_TREE
;
615 tree cond
= gimple_assign_rhs1 (stmt
);
616 enum tree_code code
= gimple_assign_rhs_code (stmt
);
618 /* We can do tree combining on SSA_NAME and comparison expressions. */
619 if (COMPARISON_CLASS_P (cond
))
620 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
622 TREE_OPERAND (cond
, 0),
623 TREE_OPERAND (cond
, 1));
624 else if (TREE_CODE (cond
) == SSA_NAME
)
626 enum tree_code def_code
;
628 gimple def_stmt
= get_prop_source_stmt (name
, true, NULL
);
629 if (!def_stmt
|| !can_propagate_from (def_stmt
))
632 def_code
= gimple_assign_rhs_code (def_stmt
);
633 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
634 tmp
= fold_build2_loc (gimple_location (def_stmt
),
637 gimple_assign_rhs1 (def_stmt
),
638 gimple_assign_rhs2 (def_stmt
));
642 && is_gimple_condexpr (tmp
))
644 if (dump_file
&& tmp
)
646 fprintf (dump_file
, " Replaced '");
647 print_generic_expr (dump_file
, cond
, 0);
648 fprintf (dump_file
, "' with '");
649 print_generic_expr (dump_file
, tmp
, 0);
650 fprintf (dump_file
, "'\n");
653 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
654 : integer_onep (tmp
))
655 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
656 else if (integer_zerop (tmp
))
657 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
659 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
660 stmt
= gsi_stmt (*gsi_p
);
669 /* We've just substituted an ADDR_EXPR into stmt. Update all the
670 relevant data structures to match. */
673 tidy_after_forward_propagate_addr (gimple stmt
)
675 /* We may have turned a trapping insn into a non-trapping insn. */
676 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
677 bitmap_set_bit (to_purge
, gimple_bb (stmt
)->index
);
679 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
680 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
683 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
684 ADDR_EXPR <whatever>.
686 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
687 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
688 node or for recovery of array indexing from pointer arithmetic.
690 Return true if the propagation was successful (the propagation can
691 be not totally successful, yet things may have been changed). */
694 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
695 gimple_stmt_iterator
*use_stmt_gsi
,
698 tree lhs
, rhs
, rhs2
, array_ref
;
699 gimple use_stmt
= gsi_stmt (*use_stmt_gsi
);
700 enum tree_code rhs_code
;
703 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
705 lhs
= gimple_assign_lhs (use_stmt
);
706 rhs_code
= gimple_assign_rhs_code (use_stmt
);
707 rhs
= gimple_assign_rhs1 (use_stmt
);
709 /* Do not perform copy-propagation but recurse through copy chains. */
710 if (TREE_CODE (lhs
) == SSA_NAME
711 && rhs_code
== SSA_NAME
)
712 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
714 /* The use statement could be a conversion. Recurse to the uses of the
715 lhs as copyprop does not copy through pointer to integer to pointer
716 conversions and FRE does not catch all cases either.
717 Treat the case of a single-use name and
718 a conversion to def_rhs type separate, though. */
719 if (TREE_CODE (lhs
) == SSA_NAME
720 && CONVERT_EXPR_CODE_P (rhs_code
))
722 /* If there is a point in a conversion chain where the types match
723 so we can remove a conversion re-materialize the address here
726 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
728 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
729 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
733 /* Else recurse if the conversion preserves the address value. */
734 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
735 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
736 && (TYPE_PRECISION (TREE_TYPE (lhs
))
737 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
738 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
743 /* If this isn't a conversion chain from this on we only can propagate
744 into compatible pointer contexts. */
745 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
748 /* Propagate through constant pointer adjustments. */
749 if (TREE_CODE (lhs
) == SSA_NAME
750 && rhs_code
== POINTER_PLUS_EXPR
752 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
755 /* As we come here with non-invariant addresses in def_rhs we need
756 to make sure we can build a valid constant offsetted address
757 for further propagation. Simply rely on fold building that
758 and check after the fact. */
759 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
761 fold_convert (ptr_type_node
,
762 gimple_assign_rhs2 (use_stmt
)));
763 if (TREE_CODE (new_def_rhs
) == MEM_REF
764 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
766 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
769 /* Recurse. If we could propagate into all uses of lhs do not
770 bother to replace into the current use but just pretend we did. */
771 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
772 && forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
775 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
776 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
778 else if (is_gimple_min_invariant (new_def_rhs
))
779 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
, new_def_rhs
);
782 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
783 update_stmt (use_stmt
);
787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
788 ADDR_EXPR will not appear on the LHS. */
789 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
790 while (handled_component_p (*lhsp
))
791 lhsp
= &TREE_OPERAND (*lhsp
, 0);
794 /* Now see if the LHS node is a MEM_REF using NAME. If so,
795 propagate the ADDR_EXPR into the use of NAME and fold the result. */
796 if (TREE_CODE (lhs
) == MEM_REF
797 && TREE_OPERAND (lhs
, 0) == name
)
800 HOST_WIDE_INT def_rhs_offset
;
801 /* If the address is invariant we can always fold it. */
802 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
805 offset_int off
= mem_ref_offset (lhs
);
807 off
+= def_rhs_offset
;
808 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
810 off
+= mem_ref_offset (def_rhs_base
);
811 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
814 new_ptr
= build_fold_addr_expr (def_rhs_base
);
815 TREE_OPERAND (lhs
, 0) = new_ptr
;
816 TREE_OPERAND (lhs
, 1)
817 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
818 tidy_after_forward_propagate_addr (use_stmt
);
819 /* Continue propagating into the RHS if this was not the only use. */
823 /* If the LHS is a plain dereference and the value type is the same as
824 that of the pointed-to type of the address we can put the
825 dereferenced address on the LHS preserving the original alias-type. */
826 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
827 && ((gimple_assign_lhs (use_stmt
) == lhs
828 && useless_type_conversion_p
829 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
830 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
831 || types_compatible_p (TREE_TYPE (lhs
),
832 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
833 /* Don't forward anything into clobber stmts if it would result
834 in the lhs no longer being a MEM_REF. */
835 && (!gimple_clobber_p (use_stmt
)
836 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
838 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
839 tree new_offset
, new_base
, saved
, new_lhs
;
840 while (handled_component_p (*def_rhs_basep
))
841 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
842 saved
= *def_rhs_basep
;
843 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
845 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
846 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
847 TREE_OPERAND (*def_rhs_basep
, 1));
851 new_base
= build_fold_addr_expr (*def_rhs_basep
);
852 new_offset
= TREE_OPERAND (lhs
, 1);
854 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
855 new_base
, new_offset
);
856 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
857 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
858 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
859 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
861 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
862 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
863 *def_rhs_basep
= saved
;
864 tidy_after_forward_propagate_addr (use_stmt
);
865 /* Continue propagating into the RHS if this was not the
871 /* We can have a struct assignment dereferencing our name twice.
872 Note that we didn't propagate into the lhs to not falsely
873 claim we did when propagating into the rhs. */
877 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
878 nodes from the RHS. */
879 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
880 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
881 rhsp
= &TREE_OPERAND (*rhsp
, 0);
882 while (handled_component_p (*rhsp
))
883 rhsp
= &TREE_OPERAND (*rhsp
, 0);
886 /* Now see if the RHS node is a MEM_REF using NAME. If so,
887 propagate the ADDR_EXPR into the use of NAME and fold the result. */
888 if (TREE_CODE (rhs
) == MEM_REF
889 && TREE_OPERAND (rhs
, 0) == name
)
892 HOST_WIDE_INT def_rhs_offset
;
893 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
896 offset_int off
= mem_ref_offset (rhs
);
898 off
+= def_rhs_offset
;
899 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
901 off
+= mem_ref_offset (def_rhs_base
);
902 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
905 new_ptr
= build_fold_addr_expr (def_rhs_base
);
906 TREE_OPERAND (rhs
, 0) = new_ptr
;
907 TREE_OPERAND (rhs
, 1)
908 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
909 fold_stmt_inplace (use_stmt_gsi
);
910 tidy_after_forward_propagate_addr (use_stmt
);
913 /* If the RHS is a plain dereference and the value type is the same as
914 that of the pointed-to type of the address we can put the
915 dereferenced address on the RHS preserving the original alias-type. */
916 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
917 && ((gimple_assign_rhs1 (use_stmt
) == rhs
918 && useless_type_conversion_p
919 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
920 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
921 || types_compatible_p (TREE_TYPE (rhs
),
922 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
924 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
925 tree new_offset
, new_base
, saved
, new_rhs
;
926 while (handled_component_p (*def_rhs_basep
))
927 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
928 saved
= *def_rhs_basep
;
929 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
931 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
932 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
933 TREE_OPERAND (*def_rhs_basep
, 1));
937 new_base
= build_fold_addr_expr (*def_rhs_basep
);
938 new_offset
= TREE_OPERAND (rhs
, 1);
940 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
941 new_base
, new_offset
);
942 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
943 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
944 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
945 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
947 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
948 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
949 *def_rhs_basep
= saved
;
950 fold_stmt_inplace (use_stmt_gsi
);
951 tidy_after_forward_propagate_addr (use_stmt
);
956 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
958 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
959 || gimple_assign_rhs1 (use_stmt
) != name
)
962 /* The remaining cases are all for turning pointer arithmetic into
963 array indexing. They only apply when we have the address of
964 element zero in an array. If that is not the case then there
966 array_ref
= TREE_OPERAND (def_rhs
, 0);
967 if ((TREE_CODE (array_ref
) != ARRAY_REF
968 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
969 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
970 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
973 rhs2
= gimple_assign_rhs2 (use_stmt
);
974 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
975 if (TREE_CODE (rhs2
) == INTEGER_CST
)
977 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
978 ADDR_EXPR
, TREE_TYPE (def_rhs
),
979 fold_build2 (MEM_REF
,
980 TREE_TYPE (TREE_TYPE (def_rhs
)),
981 unshare_expr (def_rhs
),
982 fold_convert (ptr_type_node
,
984 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
985 use_stmt
= gsi_stmt (*use_stmt_gsi
);
986 update_stmt (use_stmt
);
987 tidy_after_forward_propagate_addr (use_stmt
);
994 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
996 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
997 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
998 node or for recovery of array indexing from pointer arithmetic.
1000 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1001 the single use in the previous invocation. Pass true when calling
1004 Returns true, if all uses have been propagated into. */
1007 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
1009 imm_use_iterator iter
;
1012 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
1014 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
1019 /* If the use is not in a simple assignment statement, then
1020 there is nothing we can do. */
1021 if (!is_gimple_assign (use_stmt
))
1023 if (!is_gimple_debug (use_stmt
))
1028 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1029 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1031 /* If the use has moved to a different statement adjust
1032 the update machinery for the old statement too. */
1033 if (use_stmt
!= gsi_stmt (gsi
))
1035 update_stmt (use_stmt
);
1036 use_stmt
= gsi_stmt (gsi
);
1038 update_stmt (use_stmt
);
1041 /* Remove intermediate now unused copy and conversion chains. */
1042 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1044 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1045 && TREE_CODE (use_rhs
) == SSA_NAME
1046 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1048 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1049 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt
));
1050 release_defs (use_stmt
);
1051 gsi_remove (&gsi
, true);
1055 return all
&& has_zero_uses (name
);
1059 /* Helper function for simplify_gimple_switch. Remove case labels that
1060 have values outside the range of the new type. */
1063 simplify_gimple_switch_label_vec (gswitch
*stmt
, tree index_type
)
1065 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1066 auto_vec
<tree
> labels (branch_num
);
1067 unsigned int i
, len
;
1069 /* Collect the existing case labels in a VEC, and preprocess it as if
1070 we are gimplifying a GENERIC SWITCH_EXPR. */
1071 for (i
= 1; i
< branch_num
; i
++)
1072 labels
.quick_push (gimple_switch_label (stmt
, i
));
1073 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1075 /* If any labels were removed, replace the existing case labels
1076 in the GIMPLE_SWITCH statement with the correct ones.
1077 Note that the type updates were done in-place on the case labels,
1078 so we only have to replace the case labels in the GIMPLE_SWITCH
1079 if the number of labels changed. */
1080 len
= labels
.length ();
1081 if (len
< branch_num
- 1)
1083 bitmap target_blocks
;
1087 /* Corner case: *all* case labels have been removed as being
1088 out-of-range for INDEX_TYPE. Push one label and let the
1089 CFG cleanups deal with this further. */
1094 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1095 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1096 labels
.quick_push (elt
);
1100 for (i
= 0; i
< labels
.length (); i
++)
1101 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1102 for (i
++ ; i
< branch_num
; i
++)
1103 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1104 gimple_switch_set_num_labels (stmt
, len
+ 1);
1106 /* Cleanup any edges that are now dead. */
1107 target_blocks
= BITMAP_ALLOC (NULL
);
1108 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1110 tree elt
= gimple_switch_label (stmt
, i
);
1111 basic_block target
= label_to_block (CASE_LABEL (elt
));
1112 bitmap_set_bit (target_blocks
, target
->index
);
1114 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1116 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1120 free_dominance_info (CDI_DOMINATORS
);
1125 BITMAP_FREE (target_blocks
);
1129 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1130 the condition which we may be able to optimize better. */
1133 simplify_gimple_switch (gswitch
*stmt
)
1135 /* The optimization that we really care about is removing unnecessary
1136 casts. That will let us do much better in propagating the inferred
1137 constant at the switch target. */
1138 tree cond
= gimple_switch_index (stmt
);
1139 if (TREE_CODE (cond
) == SSA_NAME
)
1141 gimple def_stmt
= SSA_NAME_DEF_STMT (cond
);
1142 if (gimple_assign_cast_p (def_stmt
))
1144 tree def
= gimple_assign_rhs1 (def_stmt
);
1145 if (TREE_CODE (def
) != SSA_NAME
)
1148 /* If we have an extension or sign-change that preserves the
1149 values we check against then we can copy the source value into
1151 tree ti
= TREE_TYPE (def
);
1152 if (INTEGRAL_TYPE_P (ti
)
1153 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1155 size_t n
= gimple_switch_num_labels (stmt
);
1156 tree min
= NULL_TREE
, max
= NULL_TREE
;
1159 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1160 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1161 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1163 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1165 if ((!min
|| int_fits_type_p (min
, ti
))
1166 && (!max
|| int_fits_type_p (max
, ti
)))
1168 gimple_switch_set_index (stmt
, def
);
1169 simplify_gimple_switch_label_vec (stmt
, ti
);
1180 /* For pointers p2 and p1 return p2 - p1 if the
1181 difference is known and constant, otherwise return NULL. */
1184 constant_pointer_difference (tree p1
, tree p2
)
1187 #define CPD_ITERATIONS 5
1188 tree exps
[2][CPD_ITERATIONS
];
1189 tree offs
[2][CPD_ITERATIONS
];
1192 for (i
= 0; i
< 2; i
++)
1194 tree p
= i
? p1
: p2
;
1195 tree off
= size_zero_node
;
1197 enum tree_code code
;
1199 /* For each of p1 and p2 we need to iterate at least
1200 twice, to handle ADDR_EXPR directly in p1/p2,
1201 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1202 on definition's stmt RHS. Iterate a few extra times. */
1206 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1208 if (TREE_CODE (p
) == ADDR_EXPR
)
1210 tree q
= TREE_OPERAND (p
, 0);
1211 HOST_WIDE_INT offset
;
1212 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1217 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1219 if (TREE_CODE (q
) == MEM_REF
1220 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1222 p
= TREE_OPERAND (q
, 0);
1223 off
= size_binop (PLUS_EXPR
, off
,
1224 wide_int_to_tree (sizetype
,
1225 mem_ref_offset (q
)));
1234 if (TREE_CODE (p
) != SSA_NAME
)
1238 if (j
== CPD_ITERATIONS
)
1240 stmt
= SSA_NAME_DEF_STMT (p
);
1241 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1243 code
= gimple_assign_rhs_code (stmt
);
1244 if (code
== POINTER_PLUS_EXPR
)
1246 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1248 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1249 p
= gimple_assign_rhs1 (stmt
);
1251 else if (code
== ADDR_EXPR
|| CONVERT_EXPR_CODE_P (code
))
1252 p
= gimple_assign_rhs1 (stmt
);
1260 for (i
= 0; i
< cnt
[0]; i
++)
1261 for (j
= 0; j
< cnt
[1]; j
++)
1262 if (exps
[0][i
] == exps
[1][j
])
1263 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1268 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1270 memcpy (p, "abcd", 4);
1271 memset (p + 4, ' ', 3);
1273 memcpy (p, "abcd ", 7);
1274 call if the latter can be stored by pieces during expansion. */
1277 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1279 gimple stmt1
, stmt2
= gsi_stmt (*gsi_p
);
1280 tree vuse
= gimple_vuse (stmt2
);
1283 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1285 switch (DECL_FUNCTION_CODE (callee2
))
1287 case BUILT_IN_MEMSET
:
1288 if (gimple_call_num_args (stmt2
) != 3
1289 || gimple_call_lhs (stmt2
)
1291 || BITS_PER_UNIT
!= 8)
1296 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1297 tree ptr2
= gimple_call_arg (stmt2
, 0);
1298 tree val2
= gimple_call_arg (stmt2
, 1);
1299 tree len2
= gimple_call_arg (stmt2
, 2);
1300 tree diff
, vdef
, new_str_cst
;
1302 unsigned int ptr1_align
;
1303 unsigned HOST_WIDE_INT src_len
;
1305 use_operand_p use_p
;
1307 if (!tree_fits_shwi_p (val2
)
1308 || !tree_fits_uhwi_p (len2
)
1309 || compare_tree_int (len2
, 1024) == 1)
1311 if (is_gimple_call (stmt1
))
1313 /* If first stmt is a call, it needs to be memcpy
1314 or mempcpy, with string literal as second argument and
1316 callee1
= gimple_call_fndecl (stmt1
);
1317 if (callee1
== NULL_TREE
1318 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1319 || gimple_call_num_args (stmt1
) != 3)
1321 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1322 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1324 ptr1
= gimple_call_arg (stmt1
, 0);
1325 src1
= gimple_call_arg (stmt1
, 1);
1326 len1
= gimple_call_arg (stmt1
, 2);
1327 lhs1
= gimple_call_lhs (stmt1
);
1328 if (!tree_fits_uhwi_p (len1
))
1330 str1
= string_constant (src1
, &off1
);
1331 if (str1
== NULL_TREE
)
1333 if (!tree_fits_uhwi_p (off1
)
1334 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1335 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1336 - tree_to_uhwi (off1
)) > 0
1337 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1338 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1339 != TYPE_MODE (char_type_node
))
1342 else if (gimple_assign_single_p (stmt1
))
1344 /* Otherwise look for length 1 memcpy optimized into
1346 ptr1
= gimple_assign_lhs (stmt1
);
1347 src1
= gimple_assign_rhs1 (stmt1
);
1348 if (TREE_CODE (ptr1
) != MEM_REF
1349 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1350 || !tree_fits_shwi_p (src1
))
1352 ptr1
= build_fold_addr_expr (ptr1
);
1353 callee1
= NULL_TREE
;
1354 len1
= size_one_node
;
1356 off1
= size_zero_node
;
1362 diff
= constant_pointer_difference (ptr1
, ptr2
);
1363 if (diff
== NULL
&& lhs1
!= NULL
)
1365 diff
= constant_pointer_difference (lhs1
, ptr2
);
1366 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1368 diff
= size_binop (PLUS_EXPR
, diff
,
1369 fold_convert (sizetype
, len1
));
1371 /* If the difference between the second and first destination pointer
1372 is not constant, or is bigger than memcpy length, bail out. */
1374 || !tree_fits_uhwi_p (diff
)
1375 || tree_int_cst_lt (len1
, diff
)
1376 || compare_tree_int (diff
, 1024) == 1)
1379 /* Use maximum of difference plus memset length and memcpy length
1380 as the new memcpy length, if it is too big, bail out. */
1381 src_len
= tree_to_uhwi (diff
);
1382 src_len
+= tree_to_uhwi (len2
);
1383 if (src_len
< tree_to_uhwi (len1
))
1384 src_len
= tree_to_uhwi (len1
);
1388 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1389 with bigger length will return different result. */
1390 if (lhs1
!= NULL_TREE
1391 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1392 && (TREE_CODE (lhs1
) != SSA_NAME
1393 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1394 || use_stmt
!= stmt2
))
1397 /* If anything reads memory in between memcpy and memset
1398 call, the modified memcpy call might change it. */
1399 vdef
= gimple_vdef (stmt1
);
1401 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1402 || use_stmt
!= stmt2
))
1405 ptr1_align
= get_pointer_alignment (ptr1
);
1406 /* Construct the new source string literal. */
1407 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1410 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1411 tree_to_uhwi (len1
));
1413 src_buf
[0] = tree_to_shwi (src1
);
1414 memset (src_buf
+ tree_to_uhwi (diff
),
1415 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1416 src_buf
[src_len
] = '\0';
1417 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1418 handle embedded '\0's. */
1419 if (strlen (src_buf
) != src_len
)
1421 rtl_profile_for_bb (gimple_bb (stmt2
));
1422 /* If the new memcpy wouldn't be emitted by storing the literal
1423 by pieces, this optimization might enlarge .rodata too much,
1424 as commonly used string literals couldn't be shared any
1426 if (!can_store_by_pieces (src_len
,
1427 builtin_strncpy_read_str
,
1428 src_buf
, ptr1_align
, false))
1431 new_str_cst
= build_string_literal (src_len
, src_buf
);
1434 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1436 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1437 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1438 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1439 gimple_call_set_arg (stmt1
, 2,
1440 build_int_cst (TREE_TYPE (len1
), src_len
));
1441 update_stmt (stmt1
);
1442 unlink_stmt_vdef (stmt2
);
1443 gsi_remove (gsi_p
, true);
1444 fwprop_invalidate_lattice (gimple_get_lhs (stmt2
));
1445 release_defs (stmt2
);
1446 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1448 fwprop_invalidate_lattice (lhs1
);
1449 release_ssa_name (lhs1
);
1455 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1456 assignment, remove STMT1 and change memset call into
1458 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1460 if (!is_gimple_val (ptr1
))
1461 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1462 true, GSI_SAME_STMT
);
1463 gimple_call_set_fndecl (stmt2
,
1464 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1465 gimple_call_set_arg (stmt2
, 0, ptr1
);
1466 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1467 gimple_call_set_arg (stmt2
, 2,
1468 build_int_cst (TREE_TYPE (len2
), src_len
));
1469 unlink_stmt_vdef (stmt1
);
1470 gsi_remove (&gsi
, true);
1471 fwprop_invalidate_lattice (gimple_get_lhs (stmt1
));
1472 release_defs (stmt1
);
1473 update_stmt (stmt2
);
1484 /* Given a ssa_name in NAME see if it was defined by an assignment and
1485 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1486 to the second operand on the rhs. */
1489 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1492 enum tree_code code1
;
1496 enum gimple_rhs_class grhs_class
;
1498 code1
= TREE_CODE (name
);
1501 grhs_class
= get_gimple_rhs_class (code1
);
1503 if (code1
== SSA_NAME
)
1505 def
= SSA_NAME_DEF_STMT (name
);
1507 if (def
&& is_gimple_assign (def
)
1508 && can_propagate_from (def
))
1510 code1
= gimple_assign_rhs_code (def
);
1511 arg11
= gimple_assign_rhs1 (def
);
1512 arg21
= gimple_assign_rhs2 (def
);
1513 arg31
= gimple_assign_rhs2 (def
);
1516 else if (grhs_class
== GIMPLE_TERNARY_RHS
1517 || GIMPLE_BINARY_RHS
1519 || GIMPLE_SINGLE_RHS
)
1520 extract_ops_from_tree_1 (name
, &code1
, &arg11
, &arg21
, &arg31
);
1526 /* Ignore arg3 currently. */
1530 /* Recognize rotation patterns. Return true if a transformation
1531 applied, otherwise return false.
1533 We are looking for X with unsigned type T with bitsize B, OP being
1534 +, | or ^, some type T2 wider than T and
1535 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1536 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1537 (X << Y) OP (X >> (B - Y))
1538 (X << (int) Y) OP (X >> (int) (B - Y))
1539 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1540 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1541 (X << Y) | (X >> ((-Y) & (B - 1)))
1542 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1543 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1544 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1546 and transform these into:
1550 Note, in the patterns with T2 type, the type of OP operands
1551 might be even a signed type, but should have precision B. */
1554 simplify_rotate (gimple_stmt_iterator
*gsi
)
1556 gimple stmt
= gsi_stmt (*gsi
);
1557 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
1558 tree def_arg1
[2], def_arg2
[2];
1559 enum tree_code def_code
[2];
1562 bool swapped_p
= false;
1565 arg
[0] = gimple_assign_rhs1 (stmt
);
1566 arg
[1] = gimple_assign_rhs2 (stmt
);
1567 rtype
= TREE_TYPE (arg
[0]);
1569 /* Only create rotates in complete modes. Other cases are not
1570 expanded properly. */
1571 if (!INTEGRAL_TYPE_P (rtype
)
1572 || TYPE_PRECISION (rtype
) != GET_MODE_PRECISION (TYPE_MODE (rtype
)))
1575 for (i
= 0; i
< 2; i
++)
1576 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1578 /* Look through narrowing conversions. */
1579 if (CONVERT_EXPR_CODE_P (def_code
[0])
1580 && CONVERT_EXPR_CODE_P (def_code
[1])
1581 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
1582 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
1583 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1584 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
1585 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
1586 && has_single_use (arg
[0])
1587 && has_single_use (arg
[1]))
1589 for (i
= 0; i
< 2; i
++)
1591 arg
[i
] = def_arg1
[i
];
1592 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1596 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1597 for (i
= 0; i
< 2; i
++)
1598 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
1600 else if (!has_single_use (arg
[i
]))
1602 if (def_code
[0] == def_code
[1])
1605 /* If we've looked through narrowing conversions before, look through
1606 widening conversions from unsigned type with the same precision
1608 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
1609 for (i
= 0; i
< 2; i
++)
1612 enum tree_code code
;
1613 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1614 if (!CONVERT_EXPR_CODE_P (code
)
1615 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1616 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1620 /* Both shifts have to use the same first operand. */
1621 if (TREE_CODE (def_arg1
[0]) != SSA_NAME
|| def_arg1
[0] != def_arg1
[1])
1623 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
1626 /* CNT1 + CNT2 == B case above. */
1627 if (tree_fits_uhwi_p (def_arg2
[0])
1628 && tree_fits_uhwi_p (def_arg2
[1])
1629 && tree_to_uhwi (def_arg2
[0])
1630 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
1631 rotcnt
= def_arg2
[0];
1632 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
1633 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
1637 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
1638 enum tree_code cdef_code
[2];
1639 /* Look through conversion of the shift count argument.
1640 The C/C++ FE cast any shift count argument to integer_type_node.
1641 The only problem might be if the shift count type maximum value
1642 is equal or smaller than number of bits in rtype. */
1643 for (i
= 0; i
< 2; i
++)
1645 def_arg2_alt
[i
] = def_arg2
[i
];
1646 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
1647 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1648 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
1649 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
1650 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1651 > floor_log2 (TYPE_PRECISION (rtype
))
1652 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1653 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1
[i
]))))
1655 def_arg2_alt
[i
] = cdef_arg1
[i
];
1656 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
1657 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1660 for (i
= 0; i
< 2; i
++)
1661 /* Check for one shift count being Y and the other B - Y,
1662 with optional casts. */
1663 if (cdef_code
[i
] == MINUS_EXPR
1664 && tree_fits_shwi_p (cdef_arg1
[i
])
1665 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
1666 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
1669 enum tree_code code
;
1671 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
1672 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
1674 rotcnt
= cdef_arg2
[i
];
1677 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
1678 if (CONVERT_EXPR_CODE_P (code
)
1679 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1680 && TYPE_PRECISION (TREE_TYPE (tem
))
1681 > floor_log2 (TYPE_PRECISION (rtype
))
1682 && TYPE_PRECISION (TREE_TYPE (tem
))
1683 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
1684 && (tem
== def_arg2
[1 - i
]
1685 || tem
== def_arg2_alt
[1 - i
]))
1691 /* The above sequence isn't safe for Y being 0,
1692 because then one of the shifts triggers undefined behavior.
1693 This alternative is safe even for rotation count of 0.
1694 One shift count is Y and the other (-Y) & (B - 1). */
1695 else if (cdef_code
[i
] == BIT_AND_EXPR
1696 && tree_fits_shwi_p (cdef_arg2
[i
])
1697 && tree_to_shwi (cdef_arg2
[i
])
1698 == TYPE_PRECISION (rtype
) - 1
1699 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
1700 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
1703 enum tree_code code
;
1705 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
1706 if (CONVERT_EXPR_CODE_P (code
)
1707 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1708 && TYPE_PRECISION (TREE_TYPE (tem
))
1709 > floor_log2 (TYPE_PRECISION (rtype
))
1710 && TYPE_PRECISION (TREE_TYPE (tem
))
1711 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
))))
1712 defcodefor_name (tem
, &code
, &tem
, NULL
);
1714 if (code
== NEGATE_EXPR
)
1716 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
1721 defcodefor_name (tem
, &code
, &tem
, NULL
);
1722 if (CONVERT_EXPR_CODE_P (code
)
1723 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1724 && TYPE_PRECISION (TREE_TYPE (tem
))
1725 > floor_log2 (TYPE_PRECISION (rtype
))
1726 && TYPE_PRECISION (TREE_TYPE (tem
))
1727 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
1728 && (tem
== def_arg2
[1 - i
]
1729 || tem
== def_arg2_alt
[1 - i
]))
1736 if (rotcnt
== NULL_TREE
)
1741 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
1742 TREE_TYPE (rotcnt
)))
1744 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2
[0])),
1746 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1747 rotcnt
= gimple_assign_lhs (g
);
1749 lhs
= gimple_assign_lhs (stmt
);
1750 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1751 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]));
1752 g
= gimple_build_assign (lhs
,
1753 ((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
1754 ? LROTATE_EXPR
: RROTATE_EXPR
, def_arg1
[0], rotcnt
);
1755 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1757 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1758 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, lhs
);
1760 gsi_replace (gsi
, g
, false);
1764 /* Combine an element access with a shuffle. Returns true if there were
1765 any changes made, else it returns false. */
1768 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
1770 gimple stmt
= gsi_stmt (*gsi
);
1772 tree op
, op0
, op1
, op2
;
1774 unsigned idx
, n
, size
;
1775 enum tree_code code
;
1777 op
= gimple_assign_rhs1 (stmt
);
1778 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
1780 op0
= TREE_OPERAND (op
, 0);
1781 if (TREE_CODE (op0
) != SSA_NAME
1782 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
1785 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
1786 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1789 op1
= TREE_OPERAND (op
, 1);
1790 op2
= TREE_OPERAND (op
, 2);
1791 code
= gimple_assign_rhs_code (def_stmt
);
1793 if (code
== CONSTRUCTOR
)
1795 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
1796 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
1797 if (!tem
|| !valid_gimple_rhs_p (tem
))
1799 gimple_assign_set_rhs_from_tree (gsi
, tem
);
1800 update_stmt (gsi_stmt (*gsi
));
1804 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
1805 if (TREE_TYPE (op
) != elem_type
)
1808 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
1809 n
= TREE_INT_CST_LOW (op1
) / size
;
1812 idx
= TREE_INT_CST_LOW (op2
) / size
;
1814 if (code
== VEC_PERM_EXPR
)
1816 tree p
, m
, index
, tem
;
1818 m
= gimple_assign_rhs3 (def_stmt
);
1819 if (TREE_CODE (m
) != VECTOR_CST
)
1821 nelts
= VECTOR_CST_NELTS (m
);
1822 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
1826 p
= gimple_assign_rhs1 (def_stmt
);
1830 p
= gimple_assign_rhs2 (def_stmt
);
1833 index
= build_int_cst (TREE_TYPE (TREE_TYPE (m
)), idx
* size
);
1834 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
1835 unshare_expr (p
), op1
, index
);
1836 gimple_assign_set_rhs1 (stmt
, tem
);
1838 update_stmt (gsi_stmt (*gsi
));
1845 /* Determine whether applying the 2 permutations (mask1 then mask2)
1846 gives back one of the input. */
1849 is_combined_permutation_identity (tree mask1
, tree mask2
)
1852 unsigned int nelts
, i
, j
;
1853 bool maybe_identity1
= true;
1854 bool maybe_identity2
= true;
1856 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
1857 && TREE_CODE (mask2
) == VECTOR_CST
);
1858 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
1859 gcc_assert (TREE_CODE (mask
) == VECTOR_CST
);
1861 nelts
= VECTOR_CST_NELTS (mask
);
1862 for (i
= 0; i
< nelts
; i
++)
1864 tree val
= VECTOR_CST_ELT (mask
, i
);
1865 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
1866 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
1868 maybe_identity2
= false;
1869 else if (j
== i
+ nelts
)
1870 maybe_identity1
= false;
1874 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
1877 /* Combine a shuffle with its arguments. Returns 1 if there were any
1878 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1881 simplify_permutation (gimple_stmt_iterator
*gsi
)
1883 gimple stmt
= gsi_stmt (*gsi
);
1885 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
1886 enum tree_code code
;
1887 bool single_use_op0
= false;
1889 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
1891 op0
= gimple_assign_rhs1 (stmt
);
1892 op1
= gimple_assign_rhs2 (stmt
);
1893 op2
= gimple_assign_rhs3 (stmt
);
1895 if (TREE_CODE (op2
) != VECTOR_CST
)
1898 if (TREE_CODE (op0
) == VECTOR_CST
)
1903 else if (TREE_CODE (op0
) == SSA_NAME
)
1905 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
1906 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1909 code
= gimple_assign_rhs_code (def_stmt
);
1910 arg0
= gimple_assign_rhs1 (def_stmt
);
1915 /* Two consecutive shuffles. */
1916 if (code
== VEC_PERM_EXPR
)
1923 op3
= gimple_assign_rhs3 (def_stmt
);
1924 if (TREE_CODE (op3
) != VECTOR_CST
)
1926 ident
= is_combined_permutation_identity (op3
, op2
);
1929 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
1930 : gimple_assign_rhs2 (def_stmt
);
1931 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
1932 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
1933 gimple_set_num_ops (stmt
, 2);
1935 return remove_prop_source_from_use (op0
) ? 2 : 1;
1938 /* Shuffle of a constructor. */
1939 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
1945 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
1948 if (TREE_CODE (op1
) == VECTOR_CST
)
1950 else if (TREE_CODE (op1
) == SSA_NAME
)
1952 enum tree_code code2
;
1954 gimple def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
1955 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
1958 code2
= gimple_assign_rhs_code (def_stmt2
);
1959 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
1961 arg1
= gimple_assign_rhs1 (def_stmt2
);
1968 /* Already used twice in this statement. */
1969 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
1973 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
1975 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
1977 gimple_assign_set_rhs_from_tree (gsi
, opt
);
1978 update_stmt (gsi_stmt (*gsi
));
1979 if (TREE_CODE (op0
) == SSA_NAME
)
1980 ret
= remove_prop_source_from_use (op0
);
1981 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
1982 ret
|= remove_prop_source_from_use (op1
);
1989 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
1992 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
1994 gimple stmt
= gsi_stmt (*gsi
);
1996 tree op
, op2
, orig
, type
, elem_type
;
1997 unsigned elem_size
, nelts
, i
;
1998 enum tree_code code
;
1999 constructor_elt
*elt
;
2003 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
2005 op
= gimple_assign_rhs1 (stmt
);
2006 type
= TREE_TYPE (op
);
2007 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
2009 nelts
= TYPE_VECTOR_SUBPARTS (type
);
2010 elem_type
= TREE_TYPE (type
);
2011 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2013 sel
= XALLOCAVEC (unsigned char, nelts
);
2016 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
2023 if (TREE_CODE (elt
->value
) != SSA_NAME
)
2025 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
2028 code
= gimple_assign_rhs_code (def_stmt
);
2029 if (code
!= BIT_FIELD_REF
)
2031 op1
= gimple_assign_rhs1 (def_stmt
);
2032 ref
= TREE_OPERAND (op1
, 0);
2040 if (TREE_CODE (ref
) != SSA_NAME
)
2042 if (!useless_type_conversion_p (type
, TREE_TYPE (ref
)))
2046 if (TREE_INT_CST_LOW (TREE_OPERAND (op1
, 1)) != elem_size
)
2048 sel
[i
] = TREE_INT_CST_LOW (TREE_OPERAND (op1
, 2)) / elem_size
;
2049 if (sel
[i
] != i
) maybe_ident
= false;
2055 gimple_assign_set_rhs_from_tree (gsi
, orig
);
2058 tree mask_type
, *mask_elts
;
2060 if (!can_vec_perm_p (TYPE_MODE (type
), false, sel
))
2063 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2065 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2066 || GET_MODE_SIZE (TYPE_MODE (mask_type
))
2067 != GET_MODE_SIZE (TYPE_MODE (type
)))
2069 mask_elts
= XALLOCAVEC (tree
, nelts
);
2070 for (i
= 0; i
< nelts
; i
++)
2071 mask_elts
[i
] = build_int_cst (TREE_TYPE (mask_type
), sel
[i
]);
2072 op2
= build_vector (mask_type
, mask_elts
);
2073 gimple_assign_set_rhs_with_ops (gsi
, VEC_PERM_EXPR
, orig
, orig
, op2
);
2075 update_stmt (gsi_stmt (*gsi
));
2080 /* Primitive "lattice" function for gimple_simplify. */
2083 fwprop_ssa_val (tree name
)
2085 /* First valueize NAME. */
2086 if (TREE_CODE (name
) == SSA_NAME
2087 && SSA_NAME_VERSION (name
) < lattice
.length ())
2089 tree val
= lattice
[SSA_NAME_VERSION (name
)];
2093 /* We continue matching along SSA use-def edges for SSA names
2094 that are not single-use. Currently there are no patterns
2095 that would cause any issues with that. */
2099 /* Main entry point for the forward propagation and statement combine
2104 const pass_data pass_data_forwprop
=
2106 GIMPLE_PASS
, /* type */
2107 "forwprop", /* name */
2108 OPTGROUP_NONE
, /* optinfo_flags */
2109 TV_TREE_FORWPROP
, /* tv_id */
2110 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2111 0, /* properties_provided */
2112 0, /* properties_destroyed */
2113 0, /* todo_flags_start */
2114 TODO_update_ssa
, /* todo_flags_finish */
2117 class pass_forwprop
: public gimple_opt_pass
2120 pass_forwprop (gcc::context
*ctxt
)
2121 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
2124 /* opt_pass methods: */
2125 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
2126 virtual bool gate (function
*) { return flag_tree_forwprop
; }
2127 virtual unsigned int execute (function
*);
2129 }; // class pass_forwprop
2132 pass_forwprop::execute (function
*fun
)
2134 unsigned int todoflags
= 0;
2136 cfg_changed
= false;
2138 /* Combine stmts with the stmts defining their operands. Do that
2139 in an order that guarantees visiting SSA defs before SSA uses. */
2140 lattice
.create (num_ssa_names
);
2141 lattice
.quick_grow_cleared (num_ssa_names
);
2142 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (fun
));
2143 int postorder_num
= inverted_post_order_compute (postorder
);
2144 to_purge
= BITMAP_ALLOC (NULL
);
2145 for (int i
= 0; i
< postorder_num
; ++i
)
2147 gimple_stmt_iterator gsi
;
2148 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, postorder
[i
]);
2150 /* Apply forward propagation to all stmts in the basic-block.
2151 Note we update GSI within the loop as necessary. */
2152 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2154 gimple stmt
= gsi_stmt (gsi
);
2156 enum tree_code code
;
2158 if (!is_gimple_assign (stmt
))
2164 lhs
= gimple_assign_lhs (stmt
);
2165 rhs
= gimple_assign_rhs1 (stmt
);
2166 code
= gimple_assign_rhs_code (stmt
);
2167 if (TREE_CODE (lhs
) != SSA_NAME
2168 || has_zero_uses (lhs
))
2174 /* If this statement sets an SSA_NAME to an address,
2175 try to propagate the address into the uses of the SSA_NAME. */
2176 if (code
== ADDR_EXPR
2177 /* Handle pointer conversions on invariant addresses
2178 as well, as this is valid gimple. */
2179 || (CONVERT_EXPR_CODE_P (code
)
2180 && TREE_CODE (rhs
) == ADDR_EXPR
2181 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2183 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2186 || decl_address_invariant_p (base
))
2187 && !stmt_references_abnormal_ssa_name (stmt
)
2188 && forward_propagate_addr_expr (lhs
, rhs
, true))
2190 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2191 release_defs (stmt
);
2192 gsi_remove (&gsi
, true);
2197 else if (code
== POINTER_PLUS_EXPR
)
2199 tree off
= gimple_assign_rhs2 (stmt
);
2200 if (TREE_CODE (off
) == INTEGER_CST
2201 && can_propagate_from (stmt
)
2202 && !simple_iv_increment_p (stmt
)
2203 /* ??? Better adjust the interface to that function
2204 instead of building new trees here. */
2205 && forward_propagate_addr_expr
2207 build1_loc (gimple_location (stmt
),
2208 ADDR_EXPR
, TREE_TYPE (rhs
),
2209 fold_build2 (MEM_REF
,
2210 TREE_TYPE (TREE_TYPE (rhs
)),
2212 fold_convert (ptr_type_node
,
2215 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2216 release_defs (stmt
);
2217 gsi_remove (&gsi
, true);
2219 else if (is_gimple_min_invariant (rhs
))
2221 /* Make sure to fold &a[0] + off_1 here. */
2222 fold_stmt_inplace (&gsi
);
2224 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2230 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
2231 && gimple_assign_load_p (stmt
)
2232 && !gimple_has_volatile_ops (stmt
)
2233 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2235 && !stmt_can_throw_internal (stmt
))
2237 /* Rewrite loads used only in real/imagpart extractions to
2238 component-wise loads. */
2239 use_operand_p use_p
;
2240 imm_use_iterator iter
;
2241 bool rewrite
= true;
2242 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2244 gimple use_stmt
= USE_STMT (use_p
);
2245 if (is_gimple_debug (use_stmt
))
2247 if (!is_gimple_assign (use_stmt
)
2248 || (gimple_assign_rhs_code (use_stmt
) != REALPART_EXPR
2249 && gimple_assign_rhs_code (use_stmt
) != IMAGPART_EXPR
))
2258 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2260 if (is_gimple_debug (use_stmt
))
2262 if (gimple_debug_bind_p (use_stmt
))
2264 gimple_debug_bind_reset_value (use_stmt
);
2265 update_stmt (use_stmt
);
2270 tree new_rhs
= build1 (gimple_assign_rhs_code (use_stmt
),
2271 TREE_TYPE (TREE_TYPE (rhs
)),
2272 unshare_expr (rhs
));
2274 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2277 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2278 unlink_stmt_vdef (use_stmt
);
2279 gsi_remove (&gsi2
, true);
2281 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2284 release_defs (stmt
);
2285 gsi_remove (&gsi
, true);
2290 else if (code
== COMPLEX_EXPR
)
2292 /* Rewrite stores of a single-use complex build expression
2293 to component-wise stores. */
2294 use_operand_p use_p
;
2296 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
2297 && gimple_store_p (use_stmt
)
2298 && !gimple_has_volatile_ops (use_stmt
)
2299 && is_gimple_assign (use_stmt
)
2300 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
2303 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2304 tree new_lhs
= build1 (REALPART_EXPR
,
2305 TREE_TYPE (TREE_TYPE (use_lhs
)),
2306 unshare_expr (use_lhs
));
2307 gimple new_stmt
= gimple_build_assign (new_lhs
, rhs
);
2308 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
2309 gimple_set_vdef (new_stmt
, make_ssa_name (gimple_vop (cfun
)));
2310 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2311 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
2312 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2313 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
2315 new_lhs
= build1 (IMAGPART_EXPR
,
2316 TREE_TYPE (TREE_TYPE (use_lhs
)),
2317 unshare_expr (use_lhs
));
2318 gimple_assign_set_lhs (use_stmt
, new_lhs
);
2319 gimple_assign_set_rhs1 (use_stmt
, gimple_assign_rhs2 (stmt
));
2320 update_stmt (use_stmt
);
2322 release_defs (stmt
);
2323 gsi_remove (&gsi
, true);
2332 /* Combine stmts with the stmts defining their operands.
2333 Note we update GSI within the loop as necessary. */
2334 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2336 gimple stmt
= gsi_stmt (gsi
);
2337 gimple orig_stmt
= stmt
;
2338 bool changed
= false;
2340 /* Mark stmt as potentially needing revisiting. */
2341 gimple_set_plf (stmt
, GF_PLF_1
, false);
2343 if (fold_stmt (&gsi
, fwprop_ssa_val
))
2346 stmt
= gsi_stmt (gsi
);
2347 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2348 bitmap_set_bit (to_purge
, bb
->index
);
2349 /* Cleanup the CFG if we simplified a condition to
2351 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
2352 if (gimple_cond_true_p (cond
)
2353 || gimple_cond_false_p (cond
))
2358 switch (gimple_code (stmt
))
2362 tree rhs1
= gimple_assign_rhs1 (stmt
);
2363 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2365 if (code
== COND_EXPR
2366 || code
== VEC_COND_EXPR
)
2368 /* In this case the entire COND_EXPR is in rhs1. */
2369 if (forward_propagate_into_cond (&gsi
))
2372 stmt
= gsi_stmt (gsi
);
2375 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2378 did_something
= forward_propagate_into_comparison (&gsi
);
2379 if (did_something
== 2)
2381 changed
= did_something
!= 0;
2383 else if ((code
== PLUS_EXPR
2384 || code
== BIT_IOR_EXPR
2385 || code
== BIT_XOR_EXPR
)
2386 && simplify_rotate (&gsi
))
2388 else if (code
== VEC_PERM_EXPR
)
2390 int did_something
= simplify_permutation (&gsi
);
2391 if (did_something
== 2)
2393 changed
= did_something
!= 0;
2395 else if (code
== BIT_FIELD_REF
)
2396 changed
= simplify_bitfield_ref (&gsi
);
2397 else if (code
== CONSTRUCTOR
2398 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
2399 changed
= simplify_vector_constructor (&gsi
);
2404 changed
= simplify_gimple_switch (as_a
<gswitch
*> (stmt
));
2410 = forward_propagate_into_gimple_cond (as_a
<gcond
*> (stmt
));
2411 if (did_something
== 2)
2413 changed
= did_something
!= 0;
2419 tree callee
= gimple_call_fndecl (stmt
);
2420 if (callee
!= NULL_TREE
2421 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2422 changed
= simplify_builtin_call (&gsi
, callee
);
2431 /* If the stmt changed then re-visit it and the statements
2432 inserted before it. */
2433 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
2434 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
2436 if (gsi_end_p (gsi
))
2437 gsi
= gsi_start_bb (bb
);
2443 /* Stmt no longer needs to be revisited. */
2444 gimple_set_plf (stmt
, GF_PLF_1
, true);
2446 /* Fill up the lattice. */
2447 if (gimple_assign_single_p (stmt
))
2449 tree lhs
= gimple_assign_lhs (stmt
);
2450 tree rhs
= gimple_assign_rhs1 (stmt
);
2451 if (TREE_CODE (lhs
) == SSA_NAME
)
2454 if (TREE_CODE (rhs
) == SSA_NAME
)
2455 val
= fwprop_ssa_val (rhs
);
2456 else if (is_gimple_min_invariant (rhs
))
2458 fwprop_set_lattice_val (lhs
, val
);
2469 cfg_changed
|= gimple_purge_all_dead_eh_edges (to_purge
);
2470 BITMAP_FREE (to_purge
);
2473 todoflags
|= TODO_cleanup_cfg
;
2481 make_pass_forwprop (gcc::context
*ctxt
)
2483 return new pass_forwprop (ctxt
);