1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
46 #include "tree-cfgcleanup.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
52 /* This pass propagates the RHS of assignment statements into use
53 sites of the LHS of the assignment. It's basically a specialized
54 form of tree combination. It is hoped all of this can disappear
55 when we have a generalized tree combiner.
57 One class of common cases we handle is forward propagating a single use
58 variable into a COND_EXPR.
62 if (x) goto ... else goto ...
64 Will be transformed into:
67 if (a COND b) goto ... else goto ...
69 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
71 Or (assuming c1 and c2 are constants):
75 if (x EQ/NEQ c2) goto ... else goto ...
77 Will be transformed into:
80 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
82 Similarly for x = a - c1.
88 if (x) goto ... else goto ...
90 Will be transformed into:
93 if (a == 0) goto ... else goto ...
95 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
96 For these cases, we propagate A into all, possibly more than one,
97 COND_EXPRs that use X.
103 if (x) goto ... else goto ...
105 Will be transformed into:
108 if (a != 0) goto ... else goto ...
110 (Assuming a is an integral type and x is a boolean or x is an
111 integral and a is a boolean.)
113 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
114 For these cases, we propagate A into all, possibly more than one,
115 COND_EXPRs that use X.
117 In addition to eliminating the variable and the statement which assigns
118 a value to the variable, we may be able to later thread the jump without
119 adding insane complexity in the dominator optimizer.
121 Also note these transformations can cascade. We handle this by having
122 a worklist of COND_EXPR statements to examine. As we make a change to
123 a statement, we put it back on the worklist to examine on the next
124 iteration of the main loop.
126 A second class of propagation opportunities arises for ADDR_EXPR
137 ptr = (type1*)&type2var;
140 Will get turned into (if type1 and type2 are the same size
141 and neither have volatile on them):
142 res = VIEW_CONVERT_EXPR<type1>(type2var)
147 ptr2 = ptr + <constant>;
151 ptr2 = &x[constant/elementsize];
156 offset = index * element_size;
157 offset_p = (pointer) offset;
158 ptr2 = ptr + offset_p
160 Will get turned into:
168 Provided that decl has known alignment >= 2, will get turned into
172 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
173 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
176 This will (of course) be extended as other needs arise. */
178 static bool forward_propagate_addr_expr (tree
, tree
, bool);
180 /* Set to true if we delete dead edges during the optimization. */
181 static bool cfg_changed
;
183 static tree
rhs_to_tree (tree type
, gimple
*stmt
);
185 static bitmap to_purge
;
187 /* Const-and-copy lattice. */
188 static vec
<tree
> lattice
;
190 /* Set the lattice entry for NAME to VAL. */
192 fwprop_set_lattice_val (tree name
, tree val
)
194 if (TREE_CODE (name
) == SSA_NAME
)
196 if (SSA_NAME_VERSION (name
) >= lattice
.length ())
198 lattice
.reserve (num_ssa_names
- lattice
.length ());
199 lattice
.quick_grow_cleared (num_ssa_names
);
201 lattice
[SSA_NAME_VERSION (name
)] = val
;
205 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
207 fwprop_invalidate_lattice (tree name
)
210 && TREE_CODE (name
) == SSA_NAME
211 && SSA_NAME_VERSION (name
) < lattice
.length ())
212 lattice
[SSA_NAME_VERSION (name
)] = NULL_TREE
;
216 /* Get the statement we can propagate from into NAME skipping
217 trivial copies. Returns the statement which defines the
218 propagation source or NULL_TREE if there is no such one.
219 If SINGLE_USE_ONLY is set considers only sources which have
220 a single use chain up to NAME. If SINGLE_USE_P is non-null,
221 it is set to whether the chain to NAME is a single use chain
222 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
225 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
227 bool single_use
= true;
230 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
232 if (!has_single_use (name
))
239 /* If name is defined by a PHI node or is the default def, bail out. */
240 if (!is_gimple_assign (def_stmt
))
243 /* If def_stmt is a simple copy, continue looking. */
244 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
245 name
= gimple_assign_rhs1 (def_stmt
);
248 if (!single_use_only
&& single_use_p
)
249 *single_use_p
= single_use
;
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
260 can_propagate_from (gimple
*def_stmt
)
262 gcc_assert (is_gimple_assign (def_stmt
));
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt
))
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt
)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt
))
282 /* If the definition is a conversion of a pointer to a function type,
283 then we can not apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
288 tree rhs
= gimple_assign_rhs1 (def_stmt
);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
302 Returns true if cleanup-cfg has to run. */
305 remove_prop_source_from_use (tree name
)
307 gimple_stmt_iterator gsi
;
309 bool cfg_changed
= false;
314 if (SSA_NAME_IN_FREE_LIST (name
)
315 || SSA_NAME_IS_DEFAULT_DEF (name
)
316 || !has_zero_uses (name
))
319 stmt
= SSA_NAME_DEF_STMT (name
);
320 if (gimple_code (stmt
) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt
))
324 bb
= gimple_bb (stmt
);
325 gsi
= gsi_for_stmt (stmt
);
326 unlink_stmt_vdef (stmt
);
327 if (gsi_remove (&gsi
, true))
328 bitmap_set_bit (to_purge
, bb
->index
);
329 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
332 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
333 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
338 /* Return the rhs of a gassign *STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
346 rhs_to_tree (tree type
, gimple
*stmt
)
348 location_t loc
= gimple_location (stmt
);
349 enum tree_code code
= gimple_assign_rhs_code (stmt
);
350 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
351 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
352 gimple_assign_rhs2 (stmt
),
353 gimple_assign_rhs3 (stmt
));
354 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
355 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
356 gimple_assign_rhs2 (stmt
));
357 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
358 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
359 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
360 return gimple_assign_rhs1 (stmt
);
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
372 combine_cond_expr_cond (gimple
*stmt
, enum tree_code code
, tree type
,
373 tree op0
, tree op1
, bool invariant_only
)
377 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
379 fold_defer_overflow_warnings ();
380 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
383 fold_undefer_overflow_warnings (false, NULL
, 0);
387 /* Require that we got a boolean type out if we put one in. */
388 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
390 /* Canonicalize the combined condition for use in a COND_EXPR. */
391 t
= canonicalize_cond_expr_cond (t
);
393 /* Bail out if we required an invariant but didn't get one. */
394 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
396 fold_undefer_overflow_warnings (false, NULL
, 0);
400 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
405 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
406 of its operand. Return a new comparison tree or NULL_TREE if there
407 were no simplifying combines. */
410 forward_propagate_into_comparison_1 (gimple
*stmt
,
411 enum tree_code code
, tree type
,
414 tree tmp
= NULL_TREE
;
415 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
416 bool single_use0_p
= false, single_use1_p
= false;
418 /* For comparisons use the first operand, that is likely to
419 simplify comparisons against constants. */
420 if (TREE_CODE (op0
) == SSA_NAME
)
422 gimple
*def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
423 if (def_stmt
&& can_propagate_from (def_stmt
))
425 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
426 bool invariant_only_p
= !single_use0_p
;
428 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
430 /* Always combine comparisons or conversions from booleans. */
431 if (TREE_CODE (op1
) == INTEGER_CST
432 && ((CONVERT_EXPR_CODE_P (def_code
)
433 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0
, 0)))
435 || TREE_CODE_CLASS (def_code
) == tcc_comparison
))
436 invariant_only_p
= false;
438 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
439 rhs0
, op1
, invariant_only_p
);
445 /* If that wasn't successful, try the second operand. */
446 if (TREE_CODE (op1
) == SSA_NAME
)
448 gimple
*def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
449 if (def_stmt
&& can_propagate_from (def_stmt
))
451 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
452 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
453 op0
, rhs1
, !single_use1_p
);
459 /* If that wasn't successful either, try both operands. */
460 if (rhs0
!= NULL_TREE
461 && rhs1
!= NULL_TREE
)
462 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
464 !(single_use0_p
&& single_use1_p
));
469 /* Propagate from the ssa name definition statements of the assignment
470 from a comparison at *GSI into the conditional if that simplifies it.
471 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
472 otherwise returns 0. */
475 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
477 gimple
*stmt
= gsi_stmt (*gsi
);
479 bool cfg_changed
= false;
480 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
481 tree rhs1
= gimple_assign_rhs1 (stmt
);
482 tree rhs2
= gimple_assign_rhs2 (stmt
);
484 /* Combine the comparison with defining statements. */
485 tmp
= forward_propagate_into_comparison_1 (stmt
,
486 gimple_assign_rhs_code (stmt
),
488 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
490 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
492 update_stmt (gsi_stmt (*gsi
));
494 if (TREE_CODE (rhs1
) == SSA_NAME
)
495 cfg_changed
|= remove_prop_source_from_use (rhs1
);
496 if (TREE_CODE (rhs2
) == SSA_NAME
)
497 cfg_changed
|= remove_prop_source_from_use (rhs2
);
498 return cfg_changed
? 2 : 1;
504 /* Propagate from the ssa name definition statements of COND_EXPR
505 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
506 Returns zero if no statement was changed, one if there were
507 changes and two if cfg_cleanup needs to run.
509 This must be kept in sync with forward_propagate_into_cond. */
512 forward_propagate_into_gimple_cond (gcond
*stmt
)
515 enum tree_code code
= gimple_cond_code (stmt
);
516 bool cfg_changed
= false;
517 tree rhs1
= gimple_cond_lhs (stmt
);
518 tree rhs2
= gimple_cond_rhs (stmt
);
520 /* We can do tree combining on SSA_NAME and comparison expressions. */
521 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
524 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
529 if (dump_file
&& tmp
)
531 fprintf (dump_file
, " Replaced '");
532 print_gimple_expr (dump_file
, stmt
, 0);
533 fprintf (dump_file
, "' with '");
534 print_generic_expr (dump_file
, tmp
);
535 fprintf (dump_file
, "'\n");
538 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
541 if (TREE_CODE (rhs1
) == SSA_NAME
)
542 cfg_changed
|= remove_prop_source_from_use (rhs1
);
543 if (TREE_CODE (rhs2
) == SSA_NAME
)
544 cfg_changed
|= remove_prop_source_from_use (rhs2
);
545 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
548 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
549 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
550 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
551 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
553 && integer_zerop (rhs2
))
555 && integer_onep (rhs2
))))
557 basic_block bb
= gimple_bb (stmt
);
558 gimple_cond_set_code (stmt
, NE_EXPR
);
559 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
560 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
561 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
569 /* Propagate from the ssa name definition statements of COND_EXPR
570 in the rhs of statement STMT into the conditional if that simplifies it.
571 Returns true zero if the stmt was changed. */
574 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
576 gimple
*stmt
= gsi_stmt (*gsi_p
);
577 tree tmp
= NULL_TREE
;
578 tree cond
= gimple_assign_rhs1 (stmt
);
579 enum tree_code code
= gimple_assign_rhs_code (stmt
);
581 /* We can do tree combining on SSA_NAME and comparison expressions. */
582 if (COMPARISON_CLASS_P (cond
))
583 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
585 TREE_OPERAND (cond
, 0),
586 TREE_OPERAND (cond
, 1));
587 else if (TREE_CODE (cond
) == SSA_NAME
)
589 enum tree_code def_code
;
591 gimple
*def_stmt
= get_prop_source_stmt (name
, true, NULL
);
592 if (!def_stmt
|| !can_propagate_from (def_stmt
))
595 def_code
= gimple_assign_rhs_code (def_stmt
);
596 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
597 tmp
= fold_build2_loc (gimple_location (def_stmt
),
600 gimple_assign_rhs1 (def_stmt
),
601 gimple_assign_rhs2 (def_stmt
));
605 && is_gimple_condexpr (tmp
))
607 if (dump_file
&& tmp
)
609 fprintf (dump_file
, " Replaced '");
610 print_generic_expr (dump_file
, cond
);
611 fprintf (dump_file
, "' with '");
612 print_generic_expr (dump_file
, tmp
);
613 fprintf (dump_file
, "'\n");
616 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
617 : integer_onep (tmp
))
618 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
619 else if (integer_zerop (tmp
))
620 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
622 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
623 stmt
= gsi_stmt (*gsi_p
);
632 /* We've just substituted an ADDR_EXPR into stmt. Update all the
633 relevant data structures to match. */
636 tidy_after_forward_propagate_addr (gimple
*stmt
)
638 /* We may have turned a trapping insn into a non-trapping insn. */
639 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
640 bitmap_set_bit (to_purge
, gimple_bb (stmt
)->index
);
642 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
643 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
646 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
647 ADDR_EXPR <whatever>.
649 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
650 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
651 node or for recovery of array indexing from pointer arithmetic.
653 Return true if the propagation was successful (the propagation can
654 be not totally successful, yet things may have been changed). */
657 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
658 gimple_stmt_iterator
*use_stmt_gsi
,
661 tree lhs
, rhs
, rhs2
, array_ref
;
662 gimple
*use_stmt
= gsi_stmt (*use_stmt_gsi
);
663 enum tree_code rhs_code
;
666 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
668 lhs
= gimple_assign_lhs (use_stmt
);
669 rhs_code
= gimple_assign_rhs_code (use_stmt
);
670 rhs
= gimple_assign_rhs1 (use_stmt
);
672 /* Do not perform copy-propagation but recurse through copy chains. */
673 if (TREE_CODE (lhs
) == SSA_NAME
674 && rhs_code
== SSA_NAME
)
675 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
677 /* The use statement could be a conversion. Recurse to the uses of the
678 lhs as copyprop does not copy through pointer to integer to pointer
679 conversions and FRE does not catch all cases either.
680 Treat the case of a single-use name and
681 a conversion to def_rhs type separate, though. */
682 if (TREE_CODE (lhs
) == SSA_NAME
683 && CONVERT_EXPR_CODE_P (rhs_code
))
685 /* If there is a point in a conversion chain where the types match
686 so we can remove a conversion re-materialize the address here
689 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
691 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
692 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
696 /* Else recurse if the conversion preserves the address value. */
697 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
698 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
699 && (TYPE_PRECISION (TREE_TYPE (lhs
))
700 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
701 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
706 /* If this isn't a conversion chain from this on we only can propagate
707 into compatible pointer contexts. */
708 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
711 /* Propagate through constant pointer adjustments. */
712 if (TREE_CODE (lhs
) == SSA_NAME
713 && rhs_code
== POINTER_PLUS_EXPR
715 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
718 /* As we come here with non-invariant addresses in def_rhs we need
719 to make sure we can build a valid constant offsetted address
720 for further propagation. Simply rely on fold building that
721 and check after the fact. */
722 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
724 fold_convert (ptr_type_node
,
725 gimple_assign_rhs2 (use_stmt
)));
726 if (TREE_CODE (new_def_rhs
) == MEM_REF
727 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
729 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
732 /* Recurse. If we could propagate into all uses of lhs do not
733 bother to replace into the current use but just pretend we did. */
734 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
735 && forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
738 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
739 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
741 else if (is_gimple_min_invariant (new_def_rhs
))
742 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
, new_def_rhs
);
745 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
746 update_stmt (use_stmt
);
750 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
751 ADDR_EXPR will not appear on the LHS. */
752 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
753 while (handled_component_p (*lhsp
))
754 lhsp
= &TREE_OPERAND (*lhsp
, 0);
757 /* Now see if the LHS node is a MEM_REF using NAME. If so,
758 propagate the ADDR_EXPR into the use of NAME and fold the result. */
759 if (TREE_CODE (lhs
) == MEM_REF
760 && TREE_OPERAND (lhs
, 0) == name
)
763 poly_int64 def_rhs_offset
;
764 /* If the address is invariant we can always fold it. */
765 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
768 poly_offset_int off
= mem_ref_offset (lhs
);
770 off
+= def_rhs_offset
;
771 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
773 off
+= mem_ref_offset (def_rhs_base
);
774 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
777 new_ptr
= build_fold_addr_expr (def_rhs_base
);
778 TREE_OPERAND (lhs
, 0) = new_ptr
;
779 TREE_OPERAND (lhs
, 1)
780 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
781 tidy_after_forward_propagate_addr (use_stmt
);
782 /* Continue propagating into the RHS if this was not the only use. */
786 /* If the LHS is a plain dereference and the value type is the same as
787 that of the pointed-to type of the address we can put the
788 dereferenced address on the LHS preserving the original alias-type. */
789 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
790 && ((gimple_assign_lhs (use_stmt
) == lhs
791 && useless_type_conversion_p
792 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
793 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
794 || types_compatible_p (TREE_TYPE (lhs
),
795 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
796 /* Don't forward anything into clobber stmts if it would result
797 in the lhs no longer being a MEM_REF. */
798 && (!gimple_clobber_p (use_stmt
)
799 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
801 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
802 tree new_offset
, new_base
, saved
, new_lhs
;
803 while (handled_component_p (*def_rhs_basep
))
804 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
805 saved
= *def_rhs_basep
;
806 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
808 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
809 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
810 TREE_OPERAND (*def_rhs_basep
, 1));
814 new_base
= build_fold_addr_expr (*def_rhs_basep
);
815 new_offset
= TREE_OPERAND (lhs
, 1);
817 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
818 new_base
, new_offset
);
819 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
820 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
821 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
822 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
824 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
825 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
826 *def_rhs_basep
= saved
;
827 tidy_after_forward_propagate_addr (use_stmt
);
828 /* Continue propagating into the RHS if this was not the
834 /* We can have a struct assignment dereferencing our name twice.
835 Note that we didn't propagate into the lhs to not falsely
836 claim we did when propagating into the rhs. */
840 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
841 nodes from the RHS. */
842 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
843 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
844 rhsp
= &TREE_OPERAND (*rhsp
, 0);
845 while (handled_component_p (*rhsp
))
846 rhsp
= &TREE_OPERAND (*rhsp
, 0);
849 /* Now see if the RHS node is a MEM_REF using NAME. If so,
850 propagate the ADDR_EXPR into the use of NAME and fold the result. */
851 if (TREE_CODE (rhs
) == MEM_REF
852 && TREE_OPERAND (rhs
, 0) == name
)
855 poly_int64 def_rhs_offset
;
856 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
859 poly_offset_int off
= mem_ref_offset (rhs
);
861 off
+= def_rhs_offset
;
862 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
864 off
+= mem_ref_offset (def_rhs_base
);
865 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
868 new_ptr
= build_fold_addr_expr (def_rhs_base
);
869 TREE_OPERAND (rhs
, 0) = new_ptr
;
870 TREE_OPERAND (rhs
, 1)
871 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
872 fold_stmt_inplace (use_stmt_gsi
);
873 tidy_after_forward_propagate_addr (use_stmt
);
876 /* If the RHS is a plain dereference and the value type is the same as
877 that of the pointed-to type of the address we can put the
878 dereferenced address on the RHS preserving the original alias-type. */
879 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
880 && ((gimple_assign_rhs1 (use_stmt
) == rhs
881 && useless_type_conversion_p
882 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
883 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
884 || types_compatible_p (TREE_TYPE (rhs
),
885 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
887 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
888 tree new_offset
, new_base
, saved
, new_rhs
;
889 while (handled_component_p (*def_rhs_basep
))
890 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
891 saved
= *def_rhs_basep
;
892 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
894 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
895 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
896 TREE_OPERAND (*def_rhs_basep
, 1));
900 new_base
= build_fold_addr_expr (*def_rhs_basep
);
901 new_offset
= TREE_OPERAND (rhs
, 1);
903 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
904 new_base
, new_offset
);
905 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
906 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
907 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
908 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
910 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
911 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
912 *def_rhs_basep
= saved
;
913 fold_stmt_inplace (use_stmt_gsi
);
914 tidy_after_forward_propagate_addr (use_stmt
);
919 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
921 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
922 || gimple_assign_rhs1 (use_stmt
) != name
)
925 /* The remaining cases are all for turning pointer arithmetic into
926 array indexing. They only apply when we have the address of
927 element zero in an array. If that is not the case then there
929 array_ref
= TREE_OPERAND (def_rhs
, 0);
930 if ((TREE_CODE (array_ref
) != ARRAY_REF
931 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
932 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
933 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
936 rhs2
= gimple_assign_rhs2 (use_stmt
);
937 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
938 if (TREE_CODE (rhs2
) == INTEGER_CST
)
940 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
941 ADDR_EXPR
, TREE_TYPE (def_rhs
),
942 fold_build2 (MEM_REF
,
943 TREE_TYPE (TREE_TYPE (def_rhs
)),
944 unshare_expr (def_rhs
),
945 fold_convert (ptr_type_node
,
947 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
948 use_stmt
= gsi_stmt (*use_stmt_gsi
);
949 update_stmt (use_stmt
);
950 tidy_after_forward_propagate_addr (use_stmt
);
957 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
959 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
960 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
961 node or for recovery of array indexing from pointer arithmetic.
963 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
964 the single use in the previous invocation. Pass true when calling
967 Returns true, if all uses have been propagated into. */
970 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
972 imm_use_iterator iter
;
975 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
977 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
982 /* If the use is not in a simple assignment statement, then
983 there is nothing we can do. */
984 if (!is_gimple_assign (use_stmt
))
986 if (!is_gimple_debug (use_stmt
))
991 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
992 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
994 /* If the use has moved to a different statement adjust
995 the update machinery for the old statement too. */
996 if (use_stmt
!= gsi_stmt (gsi
))
998 update_stmt (use_stmt
);
999 use_stmt
= gsi_stmt (gsi
);
1001 update_stmt (use_stmt
);
1004 /* Remove intermediate now unused copy and conversion chains. */
1005 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1007 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1008 && TREE_CODE (use_rhs
) == SSA_NAME
1009 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1011 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1012 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt
));
1013 release_defs (use_stmt
);
1014 gsi_remove (&gsi
, true);
1018 return all
&& has_zero_uses (name
);
1022 /* Helper function for simplify_gimple_switch. Remove case labels that
1023 have values outside the range of the new type. */
1026 simplify_gimple_switch_label_vec (gswitch
*stmt
, tree index_type
)
1028 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1029 auto_vec
<tree
> labels (branch_num
);
1030 unsigned int i
, len
;
1032 /* Collect the existing case labels in a VEC, and preprocess it as if
1033 we are gimplifying a GENERIC SWITCH_EXPR. */
1034 for (i
= 1; i
< branch_num
; i
++)
1035 labels
.quick_push (gimple_switch_label (stmt
, i
));
1036 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1038 /* If any labels were removed, replace the existing case labels
1039 in the GIMPLE_SWITCH statement with the correct ones.
1040 Note that the type updates were done in-place on the case labels,
1041 so we only have to replace the case labels in the GIMPLE_SWITCH
1042 if the number of labels changed. */
1043 len
= labels
.length ();
1044 if (len
< branch_num
- 1)
1046 bitmap target_blocks
;
1050 /* Corner case: *all* case labels have been removed as being
1051 out-of-range for INDEX_TYPE. Push one label and let the
1052 CFG cleanups deal with this further. */
1057 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1058 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1059 labels
.quick_push (elt
);
1063 for (i
= 0; i
< labels
.length (); i
++)
1064 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1065 for (i
++ ; i
< branch_num
; i
++)
1066 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1067 gimple_switch_set_num_labels (stmt
, len
+ 1);
1069 /* Cleanup any edges that are now dead. */
1070 target_blocks
= BITMAP_ALLOC (NULL
);
1071 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1073 tree elt
= gimple_switch_label (stmt
, i
);
1074 basic_block target
= label_to_block (cfun
, CASE_LABEL (elt
));
1075 bitmap_set_bit (target_blocks
, target
->index
);
1077 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1079 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1083 free_dominance_info (CDI_DOMINATORS
);
1088 BITMAP_FREE (target_blocks
);
1092 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1093 the condition which we may be able to optimize better. */
1096 simplify_gimple_switch (gswitch
*stmt
)
1098 /* The optimization that we really care about is removing unnecessary
1099 casts. That will let us do much better in propagating the inferred
1100 constant at the switch target. */
1101 tree cond
= gimple_switch_index (stmt
);
1102 if (TREE_CODE (cond
) == SSA_NAME
)
1104 gimple
*def_stmt
= SSA_NAME_DEF_STMT (cond
);
1105 if (gimple_assign_cast_p (def_stmt
))
1107 tree def
= gimple_assign_rhs1 (def_stmt
);
1108 if (TREE_CODE (def
) != SSA_NAME
)
1111 /* If we have an extension or sign-change that preserves the
1112 values we check against then we can copy the source value into
1114 tree ti
= TREE_TYPE (def
);
1115 if (INTEGRAL_TYPE_P (ti
)
1116 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1118 size_t n
= gimple_switch_num_labels (stmt
);
1119 tree min
= NULL_TREE
, max
= NULL_TREE
;
1122 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1123 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1124 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1126 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1128 if ((!min
|| int_fits_type_p (min
, ti
))
1129 && (!max
|| int_fits_type_p (max
, ti
)))
1131 gimple_switch_set_index (stmt
, def
);
1132 simplify_gimple_switch_label_vec (stmt
, ti
);
1143 /* For pointers p2 and p1 return p2 - p1 if the
1144 difference is known and constant, otherwise return NULL. */
1147 constant_pointer_difference (tree p1
, tree p2
)
1150 #define CPD_ITERATIONS 5
1151 tree exps
[2][CPD_ITERATIONS
];
1152 tree offs
[2][CPD_ITERATIONS
];
1155 for (i
= 0; i
< 2; i
++)
1157 tree p
= i
? p1
: p2
;
1158 tree off
= size_zero_node
;
1160 enum tree_code code
;
1162 /* For each of p1 and p2 we need to iterate at least
1163 twice, to handle ADDR_EXPR directly in p1/p2,
1164 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1165 on definition's stmt RHS. Iterate a few extra times. */
1169 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1171 if (TREE_CODE (p
) == ADDR_EXPR
)
1173 tree q
= TREE_OPERAND (p
, 0);
1175 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1179 if (maybe_ne (offset
, 0))
1180 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1182 if (TREE_CODE (q
) == MEM_REF
1183 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1185 p
= TREE_OPERAND (q
, 0);
1186 off
= size_binop (PLUS_EXPR
, off
,
1187 wide_int_to_tree (sizetype
,
1188 mem_ref_offset (q
)));
1197 if (TREE_CODE (p
) != SSA_NAME
)
1201 if (j
== CPD_ITERATIONS
)
1203 stmt
= SSA_NAME_DEF_STMT (p
);
1204 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1206 code
= gimple_assign_rhs_code (stmt
);
1207 if (code
== POINTER_PLUS_EXPR
)
1209 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1211 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1212 p
= gimple_assign_rhs1 (stmt
);
1214 else if (code
== ADDR_EXPR
|| CONVERT_EXPR_CODE_P (code
))
1215 p
= gimple_assign_rhs1 (stmt
);
1223 for (i
= 0; i
< cnt
[0]; i
++)
1224 for (j
= 0; j
< cnt
[1]; j
++)
1225 if (exps
[0][i
] == exps
[1][j
])
1226 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1231 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1233 memcpy (p, "abcd", 4);
1234 memset (p + 4, ' ', 3);
1236 memcpy (p, "abcd ", 7);
1237 call if the latter can be stored by pieces during expansion. */
1240 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1242 gimple
*stmt1
, *stmt2
= gsi_stmt (*gsi_p
);
1243 tree vuse
= gimple_vuse (stmt2
);
1246 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1248 switch (DECL_FUNCTION_CODE (callee2
))
1250 case BUILT_IN_MEMSET
:
1251 if (gimple_call_num_args (stmt2
) != 3
1252 || gimple_call_lhs (stmt2
)
1254 || BITS_PER_UNIT
!= 8)
1259 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1260 tree ptr2
= gimple_call_arg (stmt2
, 0);
1261 tree val2
= gimple_call_arg (stmt2
, 1);
1262 tree len2
= gimple_call_arg (stmt2
, 2);
1263 tree diff
, vdef
, new_str_cst
;
1265 unsigned int ptr1_align
;
1266 unsigned HOST_WIDE_INT src_len
;
1268 use_operand_p use_p
;
1270 if (!tree_fits_shwi_p (val2
)
1271 || !tree_fits_uhwi_p (len2
)
1272 || compare_tree_int (len2
, 1024) == 1)
1274 if (is_gimple_call (stmt1
))
1276 /* If first stmt is a call, it needs to be memcpy
1277 or mempcpy, with string literal as second argument and
1279 callee1
= gimple_call_fndecl (stmt1
);
1280 if (callee1
== NULL_TREE
1281 || !fndecl_built_in_p (callee1
, BUILT_IN_NORMAL
)
1282 || gimple_call_num_args (stmt1
) != 3)
1284 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1285 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1287 ptr1
= gimple_call_arg (stmt1
, 0);
1288 src1
= gimple_call_arg (stmt1
, 1);
1289 len1
= gimple_call_arg (stmt1
, 2);
1290 lhs1
= gimple_call_lhs (stmt1
);
1291 if (!tree_fits_uhwi_p (len1
))
1293 str1
= string_constant (src1
, &off1
, NULL
, NULL
);
1294 if (str1
== NULL_TREE
)
1296 if (!tree_fits_uhwi_p (off1
)
1297 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1298 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1299 - tree_to_uhwi (off1
)) > 0
1300 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1301 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1302 != TYPE_MODE (char_type_node
))
1305 else if (gimple_assign_single_p (stmt1
))
1307 /* Otherwise look for length 1 memcpy optimized into
1309 ptr1
= gimple_assign_lhs (stmt1
);
1310 src1
= gimple_assign_rhs1 (stmt1
);
1311 if (TREE_CODE (ptr1
) != MEM_REF
1312 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1313 || !tree_fits_shwi_p (src1
))
1315 ptr1
= build_fold_addr_expr (ptr1
);
1316 callee1
= NULL_TREE
;
1317 len1
= size_one_node
;
1319 off1
= size_zero_node
;
1325 diff
= constant_pointer_difference (ptr1
, ptr2
);
1326 if (diff
== NULL
&& lhs1
!= NULL
)
1328 diff
= constant_pointer_difference (lhs1
, ptr2
);
1329 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1331 diff
= size_binop (PLUS_EXPR
, diff
,
1332 fold_convert (sizetype
, len1
));
1334 /* If the difference between the second and first destination pointer
1335 is not constant, or is bigger than memcpy length, bail out. */
1337 || !tree_fits_uhwi_p (diff
)
1338 || tree_int_cst_lt (len1
, diff
)
1339 || compare_tree_int (diff
, 1024) == 1)
1342 /* Use maximum of difference plus memset length and memcpy length
1343 as the new memcpy length, if it is too big, bail out. */
1344 src_len
= tree_to_uhwi (diff
);
1345 src_len
+= tree_to_uhwi (len2
);
1346 if (src_len
< tree_to_uhwi (len1
))
1347 src_len
= tree_to_uhwi (len1
);
1351 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1352 with bigger length will return different result. */
1353 if (lhs1
!= NULL_TREE
1354 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1355 && (TREE_CODE (lhs1
) != SSA_NAME
1356 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1357 || use_stmt
!= stmt2
))
1360 /* If anything reads memory in between memcpy and memset
1361 call, the modified memcpy call might change it. */
1362 vdef
= gimple_vdef (stmt1
);
1364 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1365 || use_stmt
!= stmt2
))
1368 ptr1_align
= get_pointer_alignment (ptr1
);
1369 /* Construct the new source string literal. */
1370 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1373 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1374 tree_to_uhwi (len1
));
1376 src_buf
[0] = tree_to_shwi (src1
);
1377 memset (src_buf
+ tree_to_uhwi (diff
),
1378 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1379 src_buf
[src_len
] = '\0';
1380 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1381 handle embedded '\0's. */
1382 if (strlen (src_buf
) != src_len
)
1384 rtl_profile_for_bb (gimple_bb (stmt2
));
1385 /* If the new memcpy wouldn't be emitted by storing the literal
1386 by pieces, this optimization might enlarge .rodata too much,
1387 as commonly used string literals couldn't be shared any
1389 if (!can_store_by_pieces (src_len
,
1390 builtin_strncpy_read_str
,
1391 src_buf
, ptr1_align
, false))
1394 new_str_cst
= build_string_literal (src_len
+ 1, src_buf
);
1397 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1399 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1400 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1401 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1402 gimple_call_set_arg (stmt1
, 2,
1403 build_int_cst (TREE_TYPE (len1
), src_len
));
1404 update_stmt (stmt1
);
1405 unlink_stmt_vdef (stmt2
);
1406 gsi_remove (gsi_p
, true);
1407 fwprop_invalidate_lattice (gimple_get_lhs (stmt2
));
1408 release_defs (stmt2
);
1409 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1411 fwprop_invalidate_lattice (lhs1
);
1412 release_ssa_name (lhs1
);
1418 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1419 assignment, remove STMT1 and change memset call into
1421 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1423 if (!is_gimple_val (ptr1
))
1424 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1425 true, GSI_SAME_STMT
);
1426 gimple_call_set_fndecl (stmt2
,
1427 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1428 gimple_call_set_arg (stmt2
, 0, ptr1
);
1429 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1430 gimple_call_set_arg (stmt2
, 2,
1431 build_int_cst (TREE_TYPE (len2
), src_len
));
1432 unlink_stmt_vdef (stmt1
);
1433 gsi_remove (&gsi
, true);
1434 fwprop_invalidate_lattice (gimple_get_lhs (stmt1
));
1435 release_defs (stmt1
);
1436 update_stmt (stmt2
);
1447 /* Given a ssa_name in NAME see if it was defined by an assignment and
1448 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1449 to the second operand on the rhs. */
1452 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1455 enum tree_code code1
;
1459 enum gimple_rhs_class grhs_class
;
1461 code1
= TREE_CODE (name
);
1465 grhs_class
= get_gimple_rhs_class (code1
);
1467 if (code1
== SSA_NAME
)
1469 def
= SSA_NAME_DEF_STMT (name
);
1471 if (def
&& is_gimple_assign (def
)
1472 && can_propagate_from (def
))
1474 code1
= gimple_assign_rhs_code (def
);
1475 arg11
= gimple_assign_rhs1 (def
);
1476 arg21
= gimple_assign_rhs2 (def
);
1477 arg31
= gimple_assign_rhs3 (def
);
1480 else if (grhs_class
!= GIMPLE_SINGLE_RHS
)
1492 /* Recognize rotation patterns. Return true if a transformation
1493 applied, otherwise return false.
1495 We are looking for X with unsigned type T with bitsize B, OP being
1496 +, | or ^, some type T2 wider than T. For:
1497 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1498 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1500 transform these into:
1504 (X << Y) OP (X >> (B - Y))
1505 (X << (int) Y) OP (X >> (int) (B - Y))
1506 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1507 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1508 (X << Y) | (X >> ((-Y) & (B - 1)))
1509 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1510 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1511 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1513 transform these into:
1517 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1518 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1519 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1520 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1521 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1523 transform these into:
1526 Note, in the patterns with T2 type, the type of OP operands
1527 might be even a signed type, but should have precision B.
1528 Expressions with & (B - 1) should be recognized only if B is
1532 simplify_rotate (gimple_stmt_iterator
*gsi
)
1534 gimple
*stmt
= gsi_stmt (*gsi
);
1535 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
1536 tree def_arg1
[2], def_arg2
[2];
1537 enum tree_code def_code
[2];
1540 bool swapped_p
= false;
1543 arg
[0] = gimple_assign_rhs1 (stmt
);
1544 arg
[1] = gimple_assign_rhs2 (stmt
);
1545 rtype
= TREE_TYPE (arg
[0]);
1547 /* Only create rotates in complete modes. Other cases are not
1548 expanded properly. */
1549 if (!INTEGRAL_TYPE_P (rtype
)
1550 || !type_has_mode_precision_p (rtype
))
1553 for (i
= 0; i
< 2; i
++)
1554 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1556 /* Look through narrowing conversions. */
1557 if (CONVERT_EXPR_CODE_P (def_code
[0])
1558 && CONVERT_EXPR_CODE_P (def_code
[1])
1559 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
1560 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
1561 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1562 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
1563 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
1564 && has_single_use (arg
[0])
1565 && has_single_use (arg
[1]))
1567 for (i
= 0; i
< 2; i
++)
1569 arg
[i
] = def_arg1
[i
];
1570 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1574 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1575 for (i
= 0; i
< 2; i
++)
1576 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
1578 else if (!has_single_use (arg
[i
]))
1580 if (def_code
[0] == def_code
[1])
1583 /* If we've looked through narrowing conversions before, look through
1584 widening conversions from unsigned type with the same precision
1586 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
1587 for (i
= 0; i
< 2; i
++)
1590 enum tree_code code
;
1591 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1592 if (!CONVERT_EXPR_CODE_P (code
)
1593 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1594 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1598 /* Both shifts have to use the same first operand. */
1599 if (!operand_equal_for_phi_arg_p (def_arg1
[0], def_arg1
[1])
1600 || !types_compatible_p (TREE_TYPE (def_arg1
[0]),
1601 TREE_TYPE (def_arg1
[1])))
1603 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
1606 /* CNT1 + CNT2 == B case above. */
1607 if (tree_fits_uhwi_p (def_arg2
[0])
1608 && tree_fits_uhwi_p (def_arg2
[1])
1609 && tree_to_uhwi (def_arg2
[0])
1610 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
1611 rotcnt
= def_arg2
[0];
1612 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
1613 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
1617 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
1618 enum tree_code cdef_code
[2];
1619 /* Look through conversion of the shift count argument.
1620 The C/C++ FE cast any shift count argument to integer_type_node.
1621 The only problem might be if the shift count type maximum value
1622 is equal or smaller than number of bits in rtype. */
1623 for (i
= 0; i
< 2; i
++)
1625 def_arg2_alt
[i
] = def_arg2
[i
];
1626 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
1627 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1628 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
1629 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
1630 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1631 > floor_log2 (TYPE_PRECISION (rtype
))
1632 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1
[i
])))
1634 def_arg2_alt
[i
] = cdef_arg1
[i
];
1635 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
1636 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1639 for (i
= 0; i
< 2; i
++)
1640 /* Check for one shift count being Y and the other B - Y,
1641 with optional casts. */
1642 if (cdef_code
[i
] == MINUS_EXPR
1643 && tree_fits_shwi_p (cdef_arg1
[i
])
1644 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
1645 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
1648 enum tree_code code
;
1650 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
1651 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
1653 rotcnt
= cdef_arg2
[i
];
1656 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
1657 if (CONVERT_EXPR_CODE_P (code
)
1658 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1659 && TYPE_PRECISION (TREE_TYPE (tem
))
1660 > floor_log2 (TYPE_PRECISION (rtype
))
1661 && type_has_mode_precision_p (TREE_TYPE (tem
))
1662 && (tem
== def_arg2
[1 - i
]
1663 || tem
== def_arg2_alt
[1 - i
]))
1669 /* The above sequence isn't safe for Y being 0,
1670 because then one of the shifts triggers undefined behavior.
1671 This alternative is safe even for rotation count of 0.
1672 One shift count is Y and the other (-Y) & (B - 1).
1673 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
1674 else if (cdef_code
[i
] == BIT_AND_EXPR
1675 && pow2p_hwi (TYPE_PRECISION (rtype
))
1676 && tree_fits_shwi_p (cdef_arg2
[i
])
1677 && tree_to_shwi (cdef_arg2
[i
])
1678 == TYPE_PRECISION (rtype
) - 1
1679 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
1680 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
1683 enum tree_code code
;
1685 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
1686 if (CONVERT_EXPR_CODE_P (code
)
1687 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1688 && TYPE_PRECISION (TREE_TYPE (tem
))
1689 > floor_log2 (TYPE_PRECISION (rtype
))
1690 && type_has_mode_precision_p (TREE_TYPE (tem
)))
1691 defcodefor_name (tem
, &code
, &tem
, NULL
);
1693 if (code
== NEGATE_EXPR
)
1695 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
1701 defcodefor_name (tem
, &code
, &tem2
, NULL
);
1702 if (CONVERT_EXPR_CODE_P (code
)
1703 && INTEGRAL_TYPE_P (TREE_TYPE (tem2
))
1704 && TYPE_PRECISION (TREE_TYPE (tem2
))
1705 > floor_log2 (TYPE_PRECISION (rtype
))
1706 && type_has_mode_precision_p (TREE_TYPE (tem2
)))
1708 if (tem2
== def_arg2
[1 - i
]
1709 || tem2
== def_arg2_alt
[1 - i
])
1718 if (cdef_code
[1 - i
] == BIT_AND_EXPR
1719 && tree_fits_shwi_p (cdef_arg2
[1 - i
])
1720 && tree_to_shwi (cdef_arg2
[1 - i
])
1721 == TYPE_PRECISION (rtype
) - 1
1722 && TREE_CODE (cdef_arg1
[1 - i
]) == SSA_NAME
)
1724 if (tem
== cdef_arg1
[1 - i
]
1725 || tem2
== cdef_arg1
[1 - i
])
1727 rotcnt
= def_arg2
[1 - i
];
1731 defcodefor_name (cdef_arg1
[1 - i
], &code
, &tem3
, NULL
);
1732 if (CONVERT_EXPR_CODE_P (code
)
1733 && INTEGRAL_TYPE_P (TREE_TYPE (tem3
))
1734 && TYPE_PRECISION (TREE_TYPE (tem3
))
1735 > floor_log2 (TYPE_PRECISION (rtype
))
1736 && type_has_mode_precision_p (TREE_TYPE (tem3
)))
1738 if (tem
== tem3
|| tem2
== tem3
)
1740 rotcnt
= def_arg2
[1 - i
];
1747 if (rotcnt
== NULL_TREE
)
1752 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
1753 TREE_TYPE (rotcnt
)))
1755 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2
[0])),
1757 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1758 rotcnt
= gimple_assign_lhs (g
);
1760 lhs
= gimple_assign_lhs (stmt
);
1761 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1762 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]));
1763 g
= gimple_build_assign (lhs
,
1764 ((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
1765 ? LROTATE_EXPR
: RROTATE_EXPR
, def_arg1
[0], rotcnt
);
1766 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1768 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1769 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, lhs
);
1771 gsi_replace (gsi
, g
, false);
1775 /* Combine an element access with a shuffle. Returns true if there were
1776 any changes made, else it returns false. */
1779 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
1781 gimple
*stmt
= gsi_stmt (*gsi
);
1783 tree op
, op0
, op1
, op2
;
1786 enum tree_code code
;
1788 op
= gimple_assign_rhs1 (stmt
);
1789 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
1791 op0
= TREE_OPERAND (op
, 0);
1792 if (TREE_CODE (op0
) != SSA_NAME
1793 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
1796 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
1797 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1800 op1
= TREE_OPERAND (op
, 1);
1801 op2
= TREE_OPERAND (op
, 2);
1802 code
= gimple_assign_rhs_code (def_stmt
);
1804 if (code
== CONSTRUCTOR
)
1806 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
1807 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
1808 if (!tem
|| !valid_gimple_rhs_p (tem
))
1810 gimple_assign_set_rhs_from_tree (gsi
, tem
);
1811 update_stmt (gsi_stmt (*gsi
));
1815 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
1816 if (TREE_TYPE (op
) != elem_type
)
1819 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
1820 if (maybe_ne (bit_field_size (op
), size
))
1823 if (code
== VEC_PERM_EXPR
1824 && constant_multiple_p (bit_field_offset (op
), size
, &idx
))
1827 unsigned HOST_WIDE_INT nelts
;
1828 m
= gimple_assign_rhs3 (def_stmt
);
1829 if (TREE_CODE (m
) != VECTOR_CST
1830 || !VECTOR_CST_NELTS (m
).is_constant (&nelts
))
1832 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
1836 p
= gimple_assign_rhs1 (def_stmt
);
1840 p
= gimple_assign_rhs2 (def_stmt
);
1843 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
1844 unshare_expr (p
), op1
, bitsize_int (idx
* size
));
1845 gimple_assign_set_rhs1 (stmt
, tem
);
1847 update_stmt (gsi_stmt (*gsi
));
1854 /* Determine whether applying the 2 permutations (mask1 then mask2)
1855 gives back one of the input. */
1858 is_combined_permutation_identity (tree mask1
, tree mask2
)
1861 unsigned HOST_WIDE_INT nelts
, i
, j
;
1862 bool maybe_identity1
= true;
1863 bool maybe_identity2
= true;
1865 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
1866 && TREE_CODE (mask2
) == VECTOR_CST
);
1867 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
1868 if (mask
== NULL_TREE
|| TREE_CODE (mask
) != VECTOR_CST
)
1871 if (!VECTOR_CST_NELTS (mask
).is_constant (&nelts
))
1873 for (i
= 0; i
< nelts
; i
++)
1875 tree val
= VECTOR_CST_ELT (mask
, i
);
1876 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
1877 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
1879 maybe_identity2
= false;
1880 else if (j
== i
+ nelts
)
1881 maybe_identity1
= false;
1885 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
1888 /* Combine a shuffle with its arguments. Returns 1 if there were any
1889 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1892 simplify_permutation (gimple_stmt_iterator
*gsi
)
1894 gimple
*stmt
= gsi_stmt (*gsi
);
1896 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
1897 enum tree_code code
;
1898 bool single_use_op0
= false;
1900 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
1902 op0
= gimple_assign_rhs1 (stmt
);
1903 op1
= gimple_assign_rhs2 (stmt
);
1904 op2
= gimple_assign_rhs3 (stmt
);
1906 if (TREE_CODE (op2
) != VECTOR_CST
)
1909 if (TREE_CODE (op0
) == VECTOR_CST
)
1914 else if (TREE_CODE (op0
) == SSA_NAME
)
1916 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
1917 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1920 code
= gimple_assign_rhs_code (def_stmt
);
1921 arg0
= gimple_assign_rhs1 (def_stmt
);
1926 /* Two consecutive shuffles. */
1927 if (code
== VEC_PERM_EXPR
)
1934 op3
= gimple_assign_rhs3 (def_stmt
);
1935 if (TREE_CODE (op3
) != VECTOR_CST
)
1937 ident
= is_combined_permutation_identity (op3
, op2
);
1940 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
1941 : gimple_assign_rhs2 (def_stmt
);
1942 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
1943 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
1944 gimple_set_num_ops (stmt
, 2);
1946 return remove_prop_source_from_use (op0
) ? 2 : 1;
1949 /* Shuffle of a constructor. */
1950 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
1956 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
1959 if (TREE_CODE (op1
) == VECTOR_CST
)
1961 else if (TREE_CODE (op1
) == SSA_NAME
)
1963 enum tree_code code2
;
1965 gimple
*def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
1966 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
1969 code2
= gimple_assign_rhs_code (def_stmt2
);
1970 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
1972 arg1
= gimple_assign_rhs1 (def_stmt2
);
1979 /* Already used twice in this statement. */
1980 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
1984 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
1986 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
1988 gimple_assign_set_rhs_from_tree (gsi
, opt
);
1989 update_stmt (gsi_stmt (*gsi
));
1990 if (TREE_CODE (op0
) == SSA_NAME
)
1991 ret
= remove_prop_source_from_use (op0
);
1992 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
1993 ret
|= remove_prop_source_from_use (op1
);
2000 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2003 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
2005 gimple
*stmt
= gsi_stmt (*gsi
);
2007 tree op
, op2
, orig
[2], type
, elem_type
;
2008 unsigned elem_size
, i
;
2009 unsigned HOST_WIDE_INT nelts
;
2010 enum tree_code code
, conv_code
;
2011 constructor_elt
*elt
;
2014 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
2016 op
= gimple_assign_rhs1 (stmt
);
2017 type
= TREE_TYPE (op
);
2018 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
2020 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
2022 elem_type
= TREE_TYPE (type
);
2023 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2025 vec_perm_builder
sel (nelts
, nelts
, 1);
2028 conv_code
= ERROR_MARK
;
2030 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
2037 if (TREE_CODE (elt
->value
) != SSA_NAME
)
2039 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
2042 code
= gimple_assign_rhs_code (def_stmt
);
2043 if (code
== FLOAT_EXPR
2044 || code
== FIX_TRUNC_EXPR
)
2046 op1
= gimple_assign_rhs1 (def_stmt
);
2047 if (conv_code
== ERROR_MARK
)
2049 if (maybe_ne (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (elt
->value
))),
2050 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1
)))))
2054 else if (conv_code
!= code
)
2056 if (TREE_CODE (op1
) != SSA_NAME
)
2058 def_stmt
= SSA_NAME_DEF_STMT (op1
);
2059 if (! is_gimple_assign (def_stmt
))
2061 code
= gimple_assign_rhs_code (def_stmt
);
2063 if (code
!= BIT_FIELD_REF
)
2065 op1
= gimple_assign_rhs1 (def_stmt
);
2066 ref
= TREE_OPERAND (op1
, 0);
2068 for (j
= 0; j
< 2; ++j
)
2072 if (TREE_CODE (ref
) != SSA_NAME
)
2074 if (! VECTOR_TYPE_P (TREE_TYPE (ref
))
2075 || ! useless_type_conversion_p (TREE_TYPE (op1
),
2076 TREE_TYPE (TREE_TYPE (ref
))))
2078 if (j
&& !useless_type_conversion_p (TREE_TYPE (orig
[0]),
2084 else if (ref
== orig
[j
])
2091 if (maybe_ne (bit_field_size (op1
), elem_size
)
2092 || !constant_multiple_p (bit_field_offset (op1
), elem_size
, &elt
))
2097 maybe_ident
= false;
2098 sel
.quick_push (elt
);
2103 if (! VECTOR_TYPE_P (TREE_TYPE (orig
[0]))
2104 || maybe_ne (TYPE_VECTOR_SUBPARTS (type
),
2105 TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig
[0]))))
2109 if (conv_code
!= ERROR_MARK
2110 && (! supportable_convert_operation (conv_code
, type
,
2111 TREE_TYPE (orig
[0]),
2113 || conv_code
== CALL_EXPR
))
2118 if (conv_code
== ERROR_MARK
)
2119 gimple_assign_set_rhs_from_tree (gsi
, orig
[0]);
2121 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
[0],
2122 NULL_TREE
, NULL_TREE
);
2128 vec_perm_indices
indices (sel
, orig
[1] ? 2 : 1, nelts
);
2129 if (!can_vec_perm_const_p (TYPE_MODE (type
), indices
))
2132 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2134 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2135 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type
)),
2136 GET_MODE_SIZE (TYPE_MODE (type
))))
2138 op2
= vec_perm_indices_to_tree (mask_type
, indices
);
2141 if (conv_code
== ERROR_MARK
)
2142 gimple_assign_set_rhs_with_ops (gsi
, VEC_PERM_EXPR
, orig
[0],
2147 = gimple_build_assign (make_ssa_name (TREE_TYPE (orig
[0])),
2148 VEC_PERM_EXPR
, orig
[0], orig
[1], op2
);
2149 orig
[0] = gimple_assign_lhs (perm
);
2150 gsi_insert_before (gsi
, perm
, GSI_SAME_STMT
);
2151 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
[0],
2152 NULL_TREE
, NULL_TREE
);
2155 update_stmt (gsi_stmt (*gsi
));
2160 /* Primitive "lattice" function for gimple_simplify. */
2163 fwprop_ssa_val (tree name
)
2165 /* First valueize NAME. */
2166 if (TREE_CODE (name
) == SSA_NAME
2167 && SSA_NAME_VERSION (name
) < lattice
.length ())
2169 tree val
= lattice
[SSA_NAME_VERSION (name
)];
2173 /* We continue matching along SSA use-def edges for SSA names
2174 that are not single-use. Currently there are no patterns
2175 that would cause any issues with that. */
2179 /* Main entry point for the forward propagation and statement combine
2184 const pass_data pass_data_forwprop
=
2186 GIMPLE_PASS
, /* type */
2187 "forwprop", /* name */
2188 OPTGROUP_NONE
, /* optinfo_flags */
2189 TV_TREE_FORWPROP
, /* tv_id */
2190 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2191 0, /* properties_provided */
2192 0, /* properties_destroyed */
2193 0, /* todo_flags_start */
2194 TODO_update_ssa
, /* todo_flags_finish */
2197 class pass_forwprop
: public gimple_opt_pass
2200 pass_forwprop (gcc::context
*ctxt
)
2201 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
2204 /* opt_pass methods: */
2205 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
2206 virtual bool gate (function
*) { return flag_tree_forwprop
; }
2207 virtual unsigned int execute (function
*);
2209 }; // class pass_forwprop
2212 pass_forwprop::execute (function
*fun
)
2214 unsigned int todoflags
= 0;
2216 cfg_changed
= false;
2218 /* Combine stmts with the stmts defining their operands. Do that
2219 in an order that guarantees visiting SSA defs before SSA uses. */
2220 lattice
.create (num_ssa_names
);
2221 lattice
.quick_grow_cleared (num_ssa_names
);
2222 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (fun
));
2223 int postorder_num
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
2225 auto_vec
<gimple
*, 4> to_fixup
;
2226 to_purge
= BITMAP_ALLOC (NULL
);
2227 for (int i
= 0; i
< postorder_num
; ++i
)
2229 gimple_stmt_iterator gsi
;
2230 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, postorder
[i
]);
2232 /* Propagate into PHIs and record degenerate ones in the lattice. */
2233 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
2236 gphi
*phi
= si
.phi ();
2237 tree res
= gimple_phi_result (phi
);
2238 if (virtual_operand_p (res
))
2241 use_operand_p use_p
;
2243 tree first
= NULL_TREE
;
2244 bool all_same
= true;
2245 FOR_EACH_PHI_ARG (use_p
, phi
, it
, SSA_OP_USE
)
2247 tree use
= USE_FROM_PTR (use_p
);
2248 tree tem
= fwprop_ssa_val (use
);
2251 else if (! operand_equal_p (first
, tem
, 0))
2254 && may_propagate_copy (use
, tem
))
2255 propagate_value (use_p
, tem
);
2258 fwprop_set_lattice_val (res
, first
);
2261 /* Apply forward propagation to all stmts in the basic-block.
2262 Note we update GSI within the loop as necessary. */
2263 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2265 gimple
*stmt
= gsi_stmt (gsi
);
2267 enum tree_code code
;
2269 if (!is_gimple_assign (stmt
))
2275 lhs
= gimple_assign_lhs (stmt
);
2276 rhs
= gimple_assign_rhs1 (stmt
);
2277 code
= gimple_assign_rhs_code (stmt
);
2278 if (TREE_CODE (lhs
) != SSA_NAME
2279 || has_zero_uses (lhs
))
2285 /* If this statement sets an SSA_NAME to an address,
2286 try to propagate the address into the uses of the SSA_NAME. */
2287 if (code
== ADDR_EXPR
2288 /* Handle pointer conversions on invariant addresses
2289 as well, as this is valid gimple. */
2290 || (CONVERT_EXPR_CODE_P (code
)
2291 && TREE_CODE (rhs
) == ADDR_EXPR
2292 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2294 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2297 || decl_address_invariant_p (base
))
2298 && !stmt_references_abnormal_ssa_name (stmt
)
2299 && forward_propagate_addr_expr (lhs
, rhs
, true))
2301 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2302 release_defs (stmt
);
2303 gsi_remove (&gsi
, true);
2308 else if (code
== POINTER_PLUS_EXPR
)
2310 tree off
= gimple_assign_rhs2 (stmt
);
2311 if (TREE_CODE (off
) == INTEGER_CST
2312 && can_propagate_from (stmt
)
2313 && !simple_iv_increment_p (stmt
)
2314 /* ??? Better adjust the interface to that function
2315 instead of building new trees here. */
2316 && forward_propagate_addr_expr
2318 build1_loc (gimple_location (stmt
),
2319 ADDR_EXPR
, TREE_TYPE (rhs
),
2320 fold_build2 (MEM_REF
,
2321 TREE_TYPE (TREE_TYPE (rhs
)),
2323 fold_convert (ptr_type_node
,
2326 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2327 release_defs (stmt
);
2328 gsi_remove (&gsi
, true);
2330 else if (is_gimple_min_invariant (rhs
))
2332 /* Make sure to fold &a[0] + off_1 here. */
2333 fold_stmt_inplace (&gsi
);
2335 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2341 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
2342 && gimple_assign_load_p (stmt
)
2343 && !gimple_has_volatile_ops (stmt
)
2344 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2346 && !stmt_can_throw_internal (stmt
))
2348 /* Rewrite loads used only in real/imagpart extractions to
2349 component-wise loads. */
2350 use_operand_p use_p
;
2351 imm_use_iterator iter
;
2352 bool rewrite
= true;
2353 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2355 gimple
*use_stmt
= USE_STMT (use_p
);
2356 if (is_gimple_debug (use_stmt
))
2358 if (!is_gimple_assign (use_stmt
)
2359 || (gimple_assign_rhs_code (use_stmt
) != REALPART_EXPR
2360 && gimple_assign_rhs_code (use_stmt
) != IMAGPART_EXPR
))
2369 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2371 if (is_gimple_debug (use_stmt
))
2373 if (gimple_debug_bind_p (use_stmt
))
2375 gimple_debug_bind_reset_value (use_stmt
);
2376 update_stmt (use_stmt
);
2381 tree new_rhs
= build1 (gimple_assign_rhs_code (use_stmt
),
2382 TREE_TYPE (TREE_TYPE (rhs
)),
2383 unshare_expr (rhs
));
2385 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2388 location_t loc
= gimple_location (use_stmt
);
2389 gimple_set_location (new_stmt
, loc
);
2390 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2391 unlink_stmt_vdef (use_stmt
);
2392 gsi_remove (&gsi2
, true);
2394 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2397 release_defs (stmt
);
2398 gsi_remove (&gsi
, true);
2403 else if (code
== COMPLEX_EXPR
)
2405 /* Rewrite stores of a single-use complex build expression
2406 to component-wise stores. */
2407 use_operand_p use_p
;
2409 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
2410 && gimple_store_p (use_stmt
)
2411 && !gimple_has_volatile_ops (use_stmt
)
2412 && is_gimple_assign (use_stmt
)
2413 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
2416 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2417 tree new_lhs
= build1 (REALPART_EXPR
,
2418 TREE_TYPE (TREE_TYPE (use_lhs
)),
2419 unshare_expr (use_lhs
));
2420 gimple
*new_stmt
= gimple_build_assign (new_lhs
, rhs
);
2421 location_t loc
= gimple_location (use_stmt
);
2422 gimple_set_location (new_stmt
, loc
);
2423 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
2424 gimple_set_vdef (new_stmt
, make_ssa_name (gimple_vop (cfun
)));
2425 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2426 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
2427 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2428 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
2430 new_lhs
= build1 (IMAGPART_EXPR
,
2431 TREE_TYPE (TREE_TYPE (use_lhs
)),
2432 unshare_expr (use_lhs
));
2433 gimple_assign_set_lhs (use_stmt
, new_lhs
);
2434 gimple_assign_set_rhs1 (use_stmt
, gimple_assign_rhs2 (stmt
));
2435 update_stmt (use_stmt
);
2437 release_defs (stmt
);
2438 gsi_remove (&gsi
, true);
2447 /* Combine stmts with the stmts defining their operands.
2448 Note we update GSI within the loop as necessary. */
2449 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2451 gimple
*stmt
= gsi_stmt (gsi
);
2452 gimple
*orig_stmt
= stmt
;
2453 bool changed
= false;
2454 bool was_noreturn
= (is_gimple_call (stmt
)
2455 && gimple_call_noreturn_p (stmt
));
2457 /* Mark stmt as potentially needing revisiting. */
2458 gimple_set_plf (stmt
, GF_PLF_1
, false);
2460 if (fold_stmt (&gsi
, fwprop_ssa_val
))
2463 stmt
= gsi_stmt (gsi
);
2464 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2465 bitmap_set_bit (to_purge
, bb
->index
);
2467 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
2468 to_fixup
.safe_push (stmt
);
2469 /* Cleanup the CFG if we simplified a condition to
2471 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
2472 if (gimple_cond_true_p (cond
)
2473 || gimple_cond_false_p (cond
))
2478 switch (gimple_code (stmt
))
2482 tree rhs1
= gimple_assign_rhs1 (stmt
);
2483 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2485 if (code
== COND_EXPR
2486 || code
== VEC_COND_EXPR
)
2488 /* In this case the entire COND_EXPR is in rhs1. */
2489 if (forward_propagate_into_cond (&gsi
))
2492 stmt
= gsi_stmt (gsi
);
2495 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2498 did_something
= forward_propagate_into_comparison (&gsi
);
2499 if (did_something
== 2)
2501 changed
= did_something
!= 0;
2503 else if ((code
== PLUS_EXPR
2504 || code
== BIT_IOR_EXPR
2505 || code
== BIT_XOR_EXPR
)
2506 && simplify_rotate (&gsi
))
2508 else if (code
== VEC_PERM_EXPR
)
2510 int did_something
= simplify_permutation (&gsi
);
2511 if (did_something
== 2)
2513 changed
= did_something
!= 0;
2515 else if (code
== BIT_FIELD_REF
)
2516 changed
= simplify_bitfield_ref (&gsi
);
2517 else if (code
== CONSTRUCTOR
2518 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
2519 changed
= simplify_vector_constructor (&gsi
);
2524 changed
= simplify_gimple_switch (as_a
<gswitch
*> (stmt
));
2530 = forward_propagate_into_gimple_cond (as_a
<gcond
*> (stmt
));
2531 if (did_something
== 2)
2533 changed
= did_something
!= 0;
2539 tree callee
= gimple_call_fndecl (stmt
);
2540 if (callee
!= NULL_TREE
2541 && fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
2542 changed
= simplify_builtin_call (&gsi
, callee
);
2551 /* If the stmt changed then re-visit it and the statements
2552 inserted before it. */
2553 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
2554 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
2556 if (gsi_end_p (gsi
))
2557 gsi
= gsi_start_bb (bb
);
2563 /* Stmt no longer needs to be revisited. */
2564 gimple_set_plf (stmt
, GF_PLF_1
, true);
2566 /* Fill up the lattice. */
2567 if (gimple_assign_single_p (stmt
))
2569 tree lhs
= gimple_assign_lhs (stmt
);
2570 tree rhs
= gimple_assign_rhs1 (stmt
);
2571 if (TREE_CODE (lhs
) == SSA_NAME
)
2574 if (TREE_CODE (rhs
) == SSA_NAME
)
2575 val
= fwprop_ssa_val (rhs
);
2576 else if (is_gimple_min_invariant (rhs
))
2578 fwprop_set_lattice_val (lhs
, val
);
2589 /* Fixup stmts that became noreturn calls. This may require splitting
2590 blocks and thus isn't possible during the walk. Do this
2591 in reverse order so we don't inadvertedly remove a stmt we want to
2592 fixup by visiting a dominating now noreturn call first. */
2593 while (!to_fixup
.is_empty ())
2595 gimple
*stmt
= to_fixup
.pop ();
2596 if (dump_file
&& dump_flags
& TDF_DETAILS
)
2598 fprintf (dump_file
, "Fixing up noreturn call ");
2599 print_gimple_stmt (dump_file
, stmt
, 0);
2600 fprintf (dump_file
, "\n");
2602 cfg_changed
|= fixup_noreturn_call (stmt
);
2605 cfg_changed
|= gimple_purge_all_dead_eh_edges (to_purge
);
2606 BITMAP_FREE (to_purge
);
2609 todoflags
|= TODO_cleanup_cfg
;
2617 make_pass_forwprop (gcc::context
*ctxt
)
2619 return new pass_forwprop (ctxt
);