1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
46 #include "tree-cfgcleanup.h"
48 #include "optabs-tree.h"
50 /* This pass propagates the RHS of assignment statements into use
51 sites of the LHS of the assignment. It's basically a specialized
52 form of tree combination. It is hoped all of this can disappear
53 when we have a generalized tree combiner.
55 One class of common cases we handle is forward propagating a single use
56 variable into a COND_EXPR.
60 if (x) goto ... else goto ...
62 Will be transformed into:
65 if (a COND b) goto ... else goto ...
67 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
69 Or (assuming c1 and c2 are constants):
73 if (x EQ/NEQ c2) goto ... else goto ...
75 Will be transformed into:
78 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
80 Similarly for x = a - c1.
86 if (x) goto ... else goto ...
88 Will be transformed into:
91 if (a == 0) goto ... else goto ...
93 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
94 For these cases, we propagate A into all, possibly more than one,
95 COND_EXPRs that use X.
101 if (x) goto ... else goto ...
103 Will be transformed into:
106 if (a != 0) goto ... else goto ...
108 (Assuming a is an integral type and x is a boolean or x is an
109 integral and a is a boolean.)
111 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
112 For these cases, we propagate A into all, possibly more than one,
113 COND_EXPRs that use X.
115 In addition to eliminating the variable and the statement which assigns
116 a value to the variable, we may be able to later thread the jump without
117 adding insane complexity in the dominator optimizer.
119 Also note these transformations can cascade. We handle this by having
120 a worklist of COND_EXPR statements to examine. As we make a change to
121 a statement, we put it back on the worklist to examine on the next
122 iteration of the main loop.
124 A second class of propagation opportunities arises for ADDR_EXPR
135 ptr = (type1*)&type2var;
138 Will get turned into (if type1 and type2 are the same size
139 and neither have volatile on them):
140 res = VIEW_CONVERT_EXPR<type1>(type2var)
145 ptr2 = ptr + <constant>;
149 ptr2 = &x[constant/elementsize];
154 offset = index * element_size;
155 offset_p = (pointer) offset;
156 ptr2 = ptr + offset_p
158 Will get turned into:
166 Provided that decl has known alignment >= 2, will get turned into
170 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
171 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
174 This will (of course) be extended as other needs arise. */
176 static bool forward_propagate_addr_expr (tree
, tree
, bool);
178 /* Set to true if we delete dead edges during the optimization. */
179 static bool cfg_changed
;
181 static tree
rhs_to_tree (tree type
, gimple
*stmt
);
183 static bitmap to_purge
;
185 /* Const-and-copy lattice. */
186 static vec
<tree
> lattice
;
188 /* Set the lattice entry for NAME to VAL. */
190 fwprop_set_lattice_val (tree name
, tree val
)
192 if (TREE_CODE (name
) == SSA_NAME
)
194 if (SSA_NAME_VERSION (name
) >= lattice
.length ())
196 lattice
.reserve (num_ssa_names
- lattice
.length ());
197 lattice
.quick_grow_cleared (num_ssa_names
);
199 lattice
[SSA_NAME_VERSION (name
)] = val
;
203 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
205 fwprop_invalidate_lattice (tree name
)
208 && TREE_CODE (name
) == SSA_NAME
209 && SSA_NAME_VERSION (name
) < lattice
.length ())
210 lattice
[SSA_NAME_VERSION (name
)] = NULL_TREE
;
214 /* Get the statement we can propagate from into NAME skipping
215 trivial copies. Returns the statement which defines the
216 propagation source or NULL_TREE if there is no such one.
217 If SINGLE_USE_ONLY is set considers only sources which have
218 a single use chain up to NAME. If SINGLE_USE_P is non-null,
219 it is set to whether the chain to NAME is a single use chain
220 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
223 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
225 bool single_use
= true;
228 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
230 if (!has_single_use (name
))
237 /* If name is defined by a PHI node or is the default def, bail out. */
238 if (!is_gimple_assign (def_stmt
))
241 /* If def_stmt is a simple copy, continue looking. */
242 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
243 name
= gimple_assign_rhs1 (def_stmt
);
246 if (!single_use_only
&& single_use_p
)
247 *single_use_p
= single_use
;
254 /* Checks if the destination ssa name in DEF_STMT can be used as
255 propagation source. Returns true if so, otherwise false. */
258 can_propagate_from (gimple
*def_stmt
)
260 gcc_assert (is_gimple_assign (def_stmt
));
262 /* If the rhs has side-effects we cannot propagate from it. */
263 if (gimple_has_volatile_ops (def_stmt
))
266 /* If the rhs is a load we cannot propagate from it. */
267 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
268 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
271 /* Constants can be always propagated. */
272 if (gimple_assign_single_p (def_stmt
)
273 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
276 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
277 if (stmt_references_abnormal_ssa_name (def_stmt
))
280 /* If the definition is a conversion of a pointer to a function type,
281 then we can not apply optimizations as some targets require
282 function pointers to be canonicalized and in this case this
283 optimization could eliminate a necessary canonicalization. */
284 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
286 tree rhs
= gimple_assign_rhs1 (def_stmt
);
287 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
288 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
295 /* Remove a chain of dead statements starting at the definition of
296 NAME. The chain is linked via the first operand of the defining statements.
297 If NAME was replaced in its only use then this function can be used
298 to clean up dead stmts. The function handles already released SSA
300 Returns true if cleanup-cfg has to run. */
303 remove_prop_source_from_use (tree name
)
305 gimple_stmt_iterator gsi
;
307 bool cfg_changed
= false;
312 if (SSA_NAME_IN_FREE_LIST (name
)
313 || SSA_NAME_IS_DEFAULT_DEF (name
)
314 || !has_zero_uses (name
))
317 stmt
= SSA_NAME_DEF_STMT (name
);
318 if (gimple_code (stmt
) == GIMPLE_PHI
319 || gimple_has_side_effects (stmt
))
322 bb
= gimple_bb (stmt
);
323 gsi
= gsi_for_stmt (stmt
);
324 unlink_stmt_vdef (stmt
);
325 if (gsi_remove (&gsi
, true))
326 bitmap_set_bit (to_purge
, bb
->index
);
327 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
330 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
331 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
336 /* Return the rhs of a gassign *STMT in a form of a single tree,
337 converted to type TYPE.
339 This should disappear, but is needed so we can combine expressions and use
340 the fold() interfaces. Long term, we need to develop folding and combine
341 routines that deal with gimple exclusively . */
344 rhs_to_tree (tree type
, gimple
*stmt
)
346 location_t loc
= gimple_location (stmt
);
347 enum tree_code code
= gimple_assign_rhs_code (stmt
);
348 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
349 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
350 gimple_assign_rhs2 (stmt
),
351 gimple_assign_rhs3 (stmt
));
352 else if (get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
)
353 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
354 gimple_assign_rhs2 (stmt
));
355 else if (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
)
356 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
357 else if (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
)
358 return gimple_assign_rhs1 (stmt
);
363 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
364 the folded result in a form suitable for COND_EXPR_COND or
365 NULL_TREE, if there is no suitable simplified form. If
366 INVARIANT_ONLY is true only gimple_min_invariant results are
367 considered simplified. */
370 combine_cond_expr_cond (gimple
*stmt
, enum tree_code code
, tree type
,
371 tree op0
, tree op1
, bool invariant_only
)
375 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
377 fold_defer_overflow_warnings ();
378 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
381 fold_undefer_overflow_warnings (false, NULL
, 0);
385 /* Require that we got a boolean type out if we put one in. */
386 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
388 /* Canonicalize the combined condition for use in a COND_EXPR. */
389 t
= canonicalize_cond_expr_cond (t
);
391 /* Bail out if we required an invariant but didn't get one. */
392 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
394 fold_undefer_overflow_warnings (false, NULL
, 0);
398 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
403 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
404 of its operand. Return a new comparison tree or NULL_TREE if there
405 were no simplifying combines. */
408 forward_propagate_into_comparison_1 (gimple
*stmt
,
409 enum tree_code code
, tree type
,
412 tree tmp
= NULL_TREE
;
413 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
414 bool single_use0_p
= false, single_use1_p
= false;
416 /* For comparisons use the first operand, that is likely to
417 simplify comparisons against constants. */
418 if (TREE_CODE (op0
) == SSA_NAME
)
420 gimple
*def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
421 if (def_stmt
&& can_propagate_from (def_stmt
))
423 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
424 bool invariant_only_p
= !single_use0_p
;
426 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
428 /* Always combine comparisons or conversions from booleans. */
429 if (TREE_CODE (op1
) == INTEGER_CST
430 && ((CONVERT_EXPR_CODE_P (def_code
)
431 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0
, 0)))
433 || TREE_CODE_CLASS (def_code
) == tcc_comparison
))
434 invariant_only_p
= false;
436 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
437 rhs0
, op1
, invariant_only_p
);
443 /* If that wasn't successful, try the second operand. */
444 if (TREE_CODE (op1
) == SSA_NAME
)
446 gimple
*def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
447 if (def_stmt
&& can_propagate_from (def_stmt
))
449 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
450 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
451 op0
, rhs1
, !single_use1_p
);
457 /* If that wasn't successful either, try both operands. */
458 if (rhs0
!= NULL_TREE
459 && rhs1
!= NULL_TREE
)
460 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
462 !(single_use0_p
&& single_use1_p
));
467 /* Propagate from the ssa name definition statements of the assignment
468 from a comparison at *GSI into the conditional if that simplifies it.
469 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
470 otherwise returns 0. */
473 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
475 gimple
*stmt
= gsi_stmt (*gsi
);
477 bool cfg_changed
= false;
478 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
479 tree rhs1
= gimple_assign_rhs1 (stmt
);
480 tree rhs2
= gimple_assign_rhs2 (stmt
);
482 /* Combine the comparison with defining statements. */
483 tmp
= forward_propagate_into_comparison_1 (stmt
,
484 gimple_assign_rhs_code (stmt
),
486 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
488 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
490 update_stmt (gsi_stmt (*gsi
));
492 if (TREE_CODE (rhs1
) == SSA_NAME
)
493 cfg_changed
|= remove_prop_source_from_use (rhs1
);
494 if (TREE_CODE (rhs2
) == SSA_NAME
)
495 cfg_changed
|= remove_prop_source_from_use (rhs2
);
496 return cfg_changed
? 2 : 1;
502 /* Propagate from the ssa name definition statements of COND_EXPR
503 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
504 Returns zero if no statement was changed, one if there were
505 changes and two if cfg_cleanup needs to run.
507 This must be kept in sync with forward_propagate_into_cond. */
510 forward_propagate_into_gimple_cond (gcond
*stmt
)
513 enum tree_code code
= gimple_cond_code (stmt
);
514 bool cfg_changed
= false;
515 tree rhs1
= gimple_cond_lhs (stmt
);
516 tree rhs2
= gimple_cond_rhs (stmt
);
518 /* We can do tree combining on SSA_NAME and comparison expressions. */
519 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
522 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
527 if (dump_file
&& tmp
)
529 fprintf (dump_file
, " Replaced '");
530 print_gimple_expr (dump_file
, stmt
, 0, 0);
531 fprintf (dump_file
, "' with '");
532 print_generic_expr (dump_file
, tmp
, 0);
533 fprintf (dump_file
, "'\n");
536 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
539 if (TREE_CODE (rhs1
) == SSA_NAME
)
540 cfg_changed
|= remove_prop_source_from_use (rhs1
);
541 if (TREE_CODE (rhs2
) == SSA_NAME
)
542 cfg_changed
|= remove_prop_source_from_use (rhs2
);
543 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
546 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
547 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
548 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
549 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
551 && integer_zerop (rhs2
))
553 && integer_onep (rhs2
))))
555 basic_block bb
= gimple_bb (stmt
);
556 gimple_cond_set_code (stmt
, NE_EXPR
);
557 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
558 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
559 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
567 /* Propagate from the ssa name definition statements of COND_EXPR
568 in the rhs of statement STMT into the conditional if that simplifies it.
569 Returns true zero if the stmt was changed. */
572 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
574 gimple
*stmt
= gsi_stmt (*gsi_p
);
575 tree tmp
= NULL_TREE
;
576 tree cond
= gimple_assign_rhs1 (stmt
);
577 enum tree_code code
= gimple_assign_rhs_code (stmt
);
579 /* We can do tree combining on SSA_NAME and comparison expressions. */
580 if (COMPARISON_CLASS_P (cond
))
581 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
583 TREE_OPERAND (cond
, 0),
584 TREE_OPERAND (cond
, 1));
585 else if (TREE_CODE (cond
) == SSA_NAME
)
587 enum tree_code def_code
;
589 gimple
*def_stmt
= get_prop_source_stmt (name
, true, NULL
);
590 if (!def_stmt
|| !can_propagate_from (def_stmt
))
593 def_code
= gimple_assign_rhs_code (def_stmt
);
594 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
595 tmp
= fold_build2_loc (gimple_location (def_stmt
),
598 gimple_assign_rhs1 (def_stmt
),
599 gimple_assign_rhs2 (def_stmt
));
603 && is_gimple_condexpr (tmp
))
605 if (dump_file
&& tmp
)
607 fprintf (dump_file
, " Replaced '");
608 print_generic_expr (dump_file
, cond
, 0);
609 fprintf (dump_file
, "' with '");
610 print_generic_expr (dump_file
, tmp
, 0);
611 fprintf (dump_file
, "'\n");
614 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
615 : integer_onep (tmp
))
616 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
617 else if (integer_zerop (tmp
))
618 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
620 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
621 stmt
= gsi_stmt (*gsi_p
);
630 /* We've just substituted an ADDR_EXPR into stmt. Update all the
631 relevant data structures to match. */
634 tidy_after_forward_propagate_addr (gimple
*stmt
)
636 /* We may have turned a trapping insn into a non-trapping insn. */
637 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
638 bitmap_set_bit (to_purge
, gimple_bb (stmt
)->index
);
640 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
641 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
644 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
645 ADDR_EXPR <whatever>.
647 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
648 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
649 node or for recovery of array indexing from pointer arithmetic.
651 Return true if the propagation was successful (the propagation can
652 be not totally successful, yet things may have been changed). */
655 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
656 gimple_stmt_iterator
*use_stmt_gsi
,
659 tree lhs
, rhs
, rhs2
, array_ref
;
660 gimple
*use_stmt
= gsi_stmt (*use_stmt_gsi
);
661 enum tree_code rhs_code
;
664 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
666 lhs
= gimple_assign_lhs (use_stmt
);
667 rhs_code
= gimple_assign_rhs_code (use_stmt
);
668 rhs
= gimple_assign_rhs1 (use_stmt
);
670 /* Do not perform copy-propagation but recurse through copy chains. */
671 if (TREE_CODE (lhs
) == SSA_NAME
672 && rhs_code
== SSA_NAME
)
673 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
675 /* The use statement could be a conversion. Recurse to the uses of the
676 lhs as copyprop does not copy through pointer to integer to pointer
677 conversions and FRE does not catch all cases either.
678 Treat the case of a single-use name and
679 a conversion to def_rhs type separate, though. */
680 if (TREE_CODE (lhs
) == SSA_NAME
681 && CONVERT_EXPR_CODE_P (rhs_code
))
683 /* If there is a point in a conversion chain where the types match
684 so we can remove a conversion re-materialize the address here
687 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
689 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
690 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
694 /* Else recurse if the conversion preserves the address value. */
695 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
696 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
697 && (TYPE_PRECISION (TREE_TYPE (lhs
))
698 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
699 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
704 /* If this isn't a conversion chain from this on we only can propagate
705 into compatible pointer contexts. */
706 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
709 /* Propagate through constant pointer adjustments. */
710 if (TREE_CODE (lhs
) == SSA_NAME
711 && rhs_code
== POINTER_PLUS_EXPR
713 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
716 /* As we come here with non-invariant addresses in def_rhs we need
717 to make sure we can build a valid constant offsetted address
718 for further propagation. Simply rely on fold building that
719 and check after the fact. */
720 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
722 fold_convert (ptr_type_node
,
723 gimple_assign_rhs2 (use_stmt
)));
724 if (TREE_CODE (new_def_rhs
) == MEM_REF
725 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
727 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
730 /* Recurse. If we could propagate into all uses of lhs do not
731 bother to replace into the current use but just pretend we did. */
732 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
733 && forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
736 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
737 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
739 else if (is_gimple_min_invariant (new_def_rhs
))
740 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
, new_def_rhs
);
743 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
744 update_stmt (use_stmt
);
748 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
749 ADDR_EXPR will not appear on the LHS. */
750 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
751 while (handled_component_p (*lhsp
))
752 lhsp
= &TREE_OPERAND (*lhsp
, 0);
755 /* Now see if the LHS node is a MEM_REF using NAME. If so,
756 propagate the ADDR_EXPR into the use of NAME and fold the result. */
757 if (TREE_CODE (lhs
) == MEM_REF
758 && TREE_OPERAND (lhs
, 0) == name
)
761 HOST_WIDE_INT def_rhs_offset
;
762 /* If the address is invariant we can always fold it. */
763 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
766 offset_int off
= mem_ref_offset (lhs
);
768 off
+= def_rhs_offset
;
769 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
771 off
+= mem_ref_offset (def_rhs_base
);
772 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
775 new_ptr
= build_fold_addr_expr (def_rhs_base
);
776 TREE_OPERAND (lhs
, 0) = new_ptr
;
777 TREE_OPERAND (lhs
, 1)
778 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
779 tidy_after_forward_propagate_addr (use_stmt
);
780 /* Continue propagating into the RHS if this was not the only use. */
784 /* If the LHS is a plain dereference and the value type is the same as
785 that of the pointed-to type of the address we can put the
786 dereferenced address on the LHS preserving the original alias-type. */
787 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
788 && ((gimple_assign_lhs (use_stmt
) == lhs
789 && useless_type_conversion_p
790 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
791 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
792 || types_compatible_p (TREE_TYPE (lhs
),
793 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
794 /* Don't forward anything into clobber stmts if it would result
795 in the lhs no longer being a MEM_REF. */
796 && (!gimple_clobber_p (use_stmt
)
797 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
799 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
800 tree new_offset
, new_base
, saved
, new_lhs
;
801 while (handled_component_p (*def_rhs_basep
))
802 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
803 saved
= *def_rhs_basep
;
804 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
806 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
807 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
808 TREE_OPERAND (*def_rhs_basep
, 1));
812 new_base
= build_fold_addr_expr (*def_rhs_basep
);
813 new_offset
= TREE_OPERAND (lhs
, 1);
815 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
816 new_base
, new_offset
);
817 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
818 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
819 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
820 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
822 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
823 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
824 *def_rhs_basep
= saved
;
825 tidy_after_forward_propagate_addr (use_stmt
);
826 /* Continue propagating into the RHS if this was not the
832 /* We can have a struct assignment dereferencing our name twice.
833 Note that we didn't propagate into the lhs to not falsely
834 claim we did when propagating into the rhs. */
838 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
839 nodes from the RHS. */
840 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
841 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
842 rhsp
= &TREE_OPERAND (*rhsp
, 0);
843 while (handled_component_p (*rhsp
))
844 rhsp
= &TREE_OPERAND (*rhsp
, 0);
847 /* Now see if the RHS node is a MEM_REF using NAME. If so,
848 propagate the ADDR_EXPR into the use of NAME and fold the result. */
849 if (TREE_CODE (rhs
) == MEM_REF
850 && TREE_OPERAND (rhs
, 0) == name
)
853 HOST_WIDE_INT def_rhs_offset
;
854 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
857 offset_int off
= mem_ref_offset (rhs
);
859 off
+= def_rhs_offset
;
860 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
862 off
+= mem_ref_offset (def_rhs_base
);
863 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
866 new_ptr
= build_fold_addr_expr (def_rhs_base
);
867 TREE_OPERAND (rhs
, 0) = new_ptr
;
868 TREE_OPERAND (rhs
, 1)
869 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
870 fold_stmt_inplace (use_stmt_gsi
);
871 tidy_after_forward_propagate_addr (use_stmt
);
874 /* If the RHS is a plain dereference and the value type is the same as
875 that of the pointed-to type of the address we can put the
876 dereferenced address on the RHS preserving the original alias-type. */
877 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
878 && ((gimple_assign_rhs1 (use_stmt
) == rhs
879 && useless_type_conversion_p
880 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
881 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
882 || types_compatible_p (TREE_TYPE (rhs
),
883 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
885 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
886 tree new_offset
, new_base
, saved
, new_rhs
;
887 while (handled_component_p (*def_rhs_basep
))
888 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
889 saved
= *def_rhs_basep
;
890 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
892 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
893 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
894 TREE_OPERAND (*def_rhs_basep
, 1));
898 new_base
= build_fold_addr_expr (*def_rhs_basep
);
899 new_offset
= TREE_OPERAND (rhs
, 1);
901 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
902 new_base
, new_offset
);
903 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
904 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
905 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
906 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
908 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
909 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
910 *def_rhs_basep
= saved
;
911 fold_stmt_inplace (use_stmt_gsi
);
912 tidy_after_forward_propagate_addr (use_stmt
);
917 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
919 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
920 || gimple_assign_rhs1 (use_stmt
) != name
)
923 /* The remaining cases are all for turning pointer arithmetic into
924 array indexing. They only apply when we have the address of
925 element zero in an array. If that is not the case then there
927 array_ref
= TREE_OPERAND (def_rhs
, 0);
928 if ((TREE_CODE (array_ref
) != ARRAY_REF
929 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
930 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
931 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
934 rhs2
= gimple_assign_rhs2 (use_stmt
);
935 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
936 if (TREE_CODE (rhs2
) == INTEGER_CST
)
938 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
939 ADDR_EXPR
, TREE_TYPE (def_rhs
),
940 fold_build2 (MEM_REF
,
941 TREE_TYPE (TREE_TYPE (def_rhs
)),
942 unshare_expr (def_rhs
),
943 fold_convert (ptr_type_node
,
945 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
946 use_stmt
= gsi_stmt (*use_stmt_gsi
);
947 update_stmt (use_stmt
);
948 tidy_after_forward_propagate_addr (use_stmt
);
955 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
957 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
958 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
959 node or for recovery of array indexing from pointer arithmetic.
961 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
962 the single use in the previous invocation. Pass true when calling
965 Returns true, if all uses have been propagated into. */
968 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
970 imm_use_iterator iter
;
973 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
975 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
980 /* If the use is not in a simple assignment statement, then
981 there is nothing we can do. */
982 if (!is_gimple_assign (use_stmt
))
984 if (!is_gimple_debug (use_stmt
))
989 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
990 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
992 /* If the use has moved to a different statement adjust
993 the update machinery for the old statement too. */
994 if (use_stmt
!= gsi_stmt (gsi
))
996 update_stmt (use_stmt
);
997 use_stmt
= gsi_stmt (gsi
);
999 update_stmt (use_stmt
);
1002 /* Remove intermediate now unused copy and conversion chains. */
1003 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1005 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1006 && TREE_CODE (use_rhs
) == SSA_NAME
1007 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1009 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1010 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt
));
1011 release_defs (use_stmt
);
1012 gsi_remove (&gsi
, true);
1016 return all
&& has_zero_uses (name
);
1020 /* Helper function for simplify_gimple_switch. Remove case labels that
1021 have values outside the range of the new type. */
1024 simplify_gimple_switch_label_vec (gswitch
*stmt
, tree index_type
)
1026 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1027 auto_vec
<tree
> labels (branch_num
);
1028 unsigned int i
, len
;
1030 /* Collect the existing case labels in a VEC, and preprocess it as if
1031 we are gimplifying a GENERIC SWITCH_EXPR. */
1032 for (i
= 1; i
< branch_num
; i
++)
1033 labels
.quick_push (gimple_switch_label (stmt
, i
));
1034 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1036 /* If any labels were removed, replace the existing case labels
1037 in the GIMPLE_SWITCH statement with the correct ones.
1038 Note that the type updates were done in-place on the case labels,
1039 so we only have to replace the case labels in the GIMPLE_SWITCH
1040 if the number of labels changed. */
1041 len
= labels
.length ();
1042 if (len
< branch_num
- 1)
1044 bitmap target_blocks
;
1048 /* Corner case: *all* case labels have been removed as being
1049 out-of-range for INDEX_TYPE. Push one label and let the
1050 CFG cleanups deal with this further. */
1055 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1056 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1057 labels
.quick_push (elt
);
1061 for (i
= 0; i
< labels
.length (); i
++)
1062 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1063 for (i
++ ; i
< branch_num
; i
++)
1064 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1065 gimple_switch_set_num_labels (stmt
, len
+ 1);
1067 /* Cleanup any edges that are now dead. */
1068 target_blocks
= BITMAP_ALLOC (NULL
);
1069 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1071 tree elt
= gimple_switch_label (stmt
, i
);
1072 basic_block target
= label_to_block (CASE_LABEL (elt
));
1073 bitmap_set_bit (target_blocks
, target
->index
);
1075 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1077 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1081 free_dominance_info (CDI_DOMINATORS
);
1086 BITMAP_FREE (target_blocks
);
1090 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1091 the condition which we may be able to optimize better. */
1094 simplify_gimple_switch (gswitch
*stmt
)
1096 /* The optimization that we really care about is removing unnecessary
1097 casts. That will let us do much better in propagating the inferred
1098 constant at the switch target. */
1099 tree cond
= gimple_switch_index (stmt
);
1100 if (TREE_CODE (cond
) == SSA_NAME
)
1102 gimple
*def_stmt
= SSA_NAME_DEF_STMT (cond
);
1103 if (gimple_assign_cast_p (def_stmt
))
1105 tree def
= gimple_assign_rhs1 (def_stmt
);
1106 if (TREE_CODE (def
) != SSA_NAME
)
1109 /* If we have an extension or sign-change that preserves the
1110 values we check against then we can copy the source value into
1112 tree ti
= TREE_TYPE (def
);
1113 if (INTEGRAL_TYPE_P (ti
)
1114 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1116 size_t n
= gimple_switch_num_labels (stmt
);
1117 tree min
= NULL_TREE
, max
= NULL_TREE
;
1120 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1121 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1122 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1124 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1126 if ((!min
|| int_fits_type_p (min
, ti
))
1127 && (!max
|| int_fits_type_p (max
, ti
)))
1129 gimple_switch_set_index (stmt
, def
);
1130 simplify_gimple_switch_label_vec (stmt
, ti
);
1141 /* For pointers p2 and p1 return p2 - p1 if the
1142 difference is known and constant, otherwise return NULL. */
1145 constant_pointer_difference (tree p1
, tree p2
)
1148 #define CPD_ITERATIONS 5
1149 tree exps
[2][CPD_ITERATIONS
];
1150 tree offs
[2][CPD_ITERATIONS
];
1153 for (i
= 0; i
< 2; i
++)
1155 tree p
= i
? p1
: p2
;
1156 tree off
= size_zero_node
;
1158 enum tree_code code
;
1160 /* For each of p1 and p2 we need to iterate at least
1161 twice, to handle ADDR_EXPR directly in p1/p2,
1162 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1163 on definition's stmt RHS. Iterate a few extra times. */
1167 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1169 if (TREE_CODE (p
) == ADDR_EXPR
)
1171 tree q
= TREE_OPERAND (p
, 0);
1172 HOST_WIDE_INT offset
;
1173 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1178 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1180 if (TREE_CODE (q
) == MEM_REF
1181 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1183 p
= TREE_OPERAND (q
, 0);
1184 off
= size_binop (PLUS_EXPR
, off
,
1185 wide_int_to_tree (sizetype
,
1186 mem_ref_offset (q
)));
1195 if (TREE_CODE (p
) != SSA_NAME
)
1199 if (j
== CPD_ITERATIONS
)
1201 stmt
= SSA_NAME_DEF_STMT (p
);
1202 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1204 code
= gimple_assign_rhs_code (stmt
);
1205 if (code
== POINTER_PLUS_EXPR
)
1207 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1209 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1210 p
= gimple_assign_rhs1 (stmt
);
1212 else if (code
== ADDR_EXPR
|| CONVERT_EXPR_CODE_P (code
))
1213 p
= gimple_assign_rhs1 (stmt
);
1221 for (i
= 0; i
< cnt
[0]; i
++)
1222 for (j
= 0; j
< cnt
[1]; j
++)
1223 if (exps
[0][i
] == exps
[1][j
])
1224 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1229 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1231 memcpy (p, "abcd", 4);
1232 memset (p + 4, ' ', 3);
1234 memcpy (p, "abcd ", 7);
1235 call if the latter can be stored by pieces during expansion. */
1238 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1240 gimple
*stmt1
, *stmt2
= gsi_stmt (*gsi_p
);
1241 tree vuse
= gimple_vuse (stmt2
);
1244 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1246 switch (DECL_FUNCTION_CODE (callee2
))
1248 case BUILT_IN_MEMSET
:
1249 if (gimple_call_num_args (stmt2
) != 3
1250 || gimple_call_lhs (stmt2
)
1252 || BITS_PER_UNIT
!= 8)
1257 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1258 tree ptr2
= gimple_call_arg (stmt2
, 0);
1259 tree val2
= gimple_call_arg (stmt2
, 1);
1260 tree len2
= gimple_call_arg (stmt2
, 2);
1261 tree diff
, vdef
, new_str_cst
;
1263 unsigned int ptr1_align
;
1264 unsigned HOST_WIDE_INT src_len
;
1266 use_operand_p use_p
;
1268 if (!tree_fits_shwi_p (val2
)
1269 || !tree_fits_uhwi_p (len2
)
1270 || compare_tree_int (len2
, 1024) == 1)
1272 if (is_gimple_call (stmt1
))
1274 /* If first stmt is a call, it needs to be memcpy
1275 or mempcpy, with string literal as second argument and
1277 callee1
= gimple_call_fndecl (stmt1
);
1278 if (callee1
== NULL_TREE
1279 || DECL_BUILT_IN_CLASS (callee1
) != BUILT_IN_NORMAL
1280 || gimple_call_num_args (stmt1
) != 3)
1282 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1283 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1285 ptr1
= gimple_call_arg (stmt1
, 0);
1286 src1
= gimple_call_arg (stmt1
, 1);
1287 len1
= gimple_call_arg (stmt1
, 2);
1288 lhs1
= gimple_call_lhs (stmt1
);
1289 if (!tree_fits_uhwi_p (len1
))
1291 str1
= string_constant (src1
, &off1
);
1292 if (str1
== NULL_TREE
)
1294 if (!tree_fits_uhwi_p (off1
)
1295 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1296 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1297 - tree_to_uhwi (off1
)) > 0
1298 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1299 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1300 != TYPE_MODE (char_type_node
))
1303 else if (gimple_assign_single_p (stmt1
))
1305 /* Otherwise look for length 1 memcpy optimized into
1307 ptr1
= gimple_assign_lhs (stmt1
);
1308 src1
= gimple_assign_rhs1 (stmt1
);
1309 if (TREE_CODE (ptr1
) != MEM_REF
1310 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1311 || !tree_fits_shwi_p (src1
))
1313 ptr1
= build_fold_addr_expr (ptr1
);
1314 callee1
= NULL_TREE
;
1315 len1
= size_one_node
;
1317 off1
= size_zero_node
;
1323 diff
= constant_pointer_difference (ptr1
, ptr2
);
1324 if (diff
== NULL
&& lhs1
!= NULL
)
1326 diff
= constant_pointer_difference (lhs1
, ptr2
);
1327 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1329 diff
= size_binop (PLUS_EXPR
, diff
,
1330 fold_convert (sizetype
, len1
));
1332 /* If the difference between the second and first destination pointer
1333 is not constant, or is bigger than memcpy length, bail out. */
1335 || !tree_fits_uhwi_p (diff
)
1336 || tree_int_cst_lt (len1
, diff
)
1337 || compare_tree_int (diff
, 1024) == 1)
1340 /* Use maximum of difference plus memset length and memcpy length
1341 as the new memcpy length, if it is too big, bail out. */
1342 src_len
= tree_to_uhwi (diff
);
1343 src_len
+= tree_to_uhwi (len2
);
1344 if (src_len
< tree_to_uhwi (len1
))
1345 src_len
= tree_to_uhwi (len1
);
1349 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1350 with bigger length will return different result. */
1351 if (lhs1
!= NULL_TREE
1352 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1353 && (TREE_CODE (lhs1
) != SSA_NAME
1354 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1355 || use_stmt
!= stmt2
))
1358 /* If anything reads memory in between memcpy and memset
1359 call, the modified memcpy call might change it. */
1360 vdef
= gimple_vdef (stmt1
);
1362 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1363 || use_stmt
!= stmt2
))
1366 ptr1_align
= get_pointer_alignment (ptr1
);
1367 /* Construct the new source string literal. */
1368 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1371 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1372 tree_to_uhwi (len1
));
1374 src_buf
[0] = tree_to_shwi (src1
);
1375 memset (src_buf
+ tree_to_uhwi (diff
),
1376 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1377 src_buf
[src_len
] = '\0';
1378 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1379 handle embedded '\0's. */
1380 if (strlen (src_buf
) != src_len
)
1382 rtl_profile_for_bb (gimple_bb (stmt2
));
1383 /* If the new memcpy wouldn't be emitted by storing the literal
1384 by pieces, this optimization might enlarge .rodata too much,
1385 as commonly used string literals couldn't be shared any
1387 if (!can_store_by_pieces (src_len
,
1388 builtin_strncpy_read_str
,
1389 src_buf
, ptr1_align
, false))
1392 new_str_cst
= build_string_literal (src_len
, src_buf
);
1395 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1397 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1398 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1399 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1400 gimple_call_set_arg (stmt1
, 2,
1401 build_int_cst (TREE_TYPE (len1
), src_len
));
1402 update_stmt (stmt1
);
1403 unlink_stmt_vdef (stmt2
);
1404 gsi_remove (gsi_p
, true);
1405 fwprop_invalidate_lattice (gimple_get_lhs (stmt2
));
1406 release_defs (stmt2
);
1407 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1409 fwprop_invalidate_lattice (lhs1
);
1410 release_ssa_name (lhs1
);
1416 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1417 assignment, remove STMT1 and change memset call into
1419 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1421 if (!is_gimple_val (ptr1
))
1422 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1423 true, GSI_SAME_STMT
);
1424 gimple_call_set_fndecl (stmt2
,
1425 builtin_decl_explicit (BUILT_IN_MEMCPY
));
1426 gimple_call_set_arg (stmt2
, 0, ptr1
);
1427 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1428 gimple_call_set_arg (stmt2
, 2,
1429 build_int_cst (TREE_TYPE (len2
), src_len
));
1430 unlink_stmt_vdef (stmt1
);
1431 gsi_remove (&gsi
, true);
1432 fwprop_invalidate_lattice (gimple_get_lhs (stmt1
));
1433 release_defs (stmt1
);
1434 update_stmt (stmt2
);
1445 /* Given a ssa_name in NAME see if it was defined by an assignment and
1446 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1447 to the second operand on the rhs. */
1450 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1453 enum tree_code code1
;
1457 enum gimple_rhs_class grhs_class
;
1459 code1
= TREE_CODE (name
);
1463 grhs_class
= get_gimple_rhs_class (code1
);
1465 if (code1
== SSA_NAME
)
1467 def
= SSA_NAME_DEF_STMT (name
);
1469 if (def
&& is_gimple_assign (def
)
1470 && can_propagate_from (def
))
1472 code1
= gimple_assign_rhs_code (def
);
1473 arg11
= gimple_assign_rhs1 (def
);
1474 arg21
= gimple_assign_rhs2 (def
);
1475 arg31
= gimple_assign_rhs3 (def
);
1478 else if (grhs_class
!= GIMPLE_SINGLE_RHS
)
1490 /* Recognize rotation patterns. Return true if a transformation
1491 applied, otherwise return false.
1493 We are looking for X with unsigned type T with bitsize B, OP being
1494 +, | or ^, some type T2 wider than T and
1495 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1496 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1497 (X << Y) OP (X >> (B - Y))
1498 (X << (int) Y) OP (X >> (int) (B - Y))
1499 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1500 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1501 (X << Y) | (X >> ((-Y) & (B - 1)))
1502 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1503 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1504 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1506 and transform these into:
1510 Note, in the patterns with T2 type, the type of OP operands
1511 might be even a signed type, but should have precision B. */
1514 simplify_rotate (gimple_stmt_iterator
*gsi
)
1516 gimple
*stmt
= gsi_stmt (*gsi
);
1517 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
1518 tree def_arg1
[2], def_arg2
[2];
1519 enum tree_code def_code
[2];
1522 bool swapped_p
= false;
1525 arg
[0] = gimple_assign_rhs1 (stmt
);
1526 arg
[1] = gimple_assign_rhs2 (stmt
);
1527 rtype
= TREE_TYPE (arg
[0]);
1529 /* Only create rotates in complete modes. Other cases are not
1530 expanded properly. */
1531 if (!INTEGRAL_TYPE_P (rtype
)
1532 || TYPE_PRECISION (rtype
) != GET_MODE_PRECISION (TYPE_MODE (rtype
)))
1535 for (i
= 0; i
< 2; i
++)
1536 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1538 /* Look through narrowing conversions. */
1539 if (CONVERT_EXPR_CODE_P (def_code
[0])
1540 && CONVERT_EXPR_CODE_P (def_code
[1])
1541 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
1542 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
1543 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1544 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
1545 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
1546 && has_single_use (arg
[0])
1547 && has_single_use (arg
[1]))
1549 for (i
= 0; i
< 2; i
++)
1551 arg
[i
] = def_arg1
[i
];
1552 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1556 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1557 for (i
= 0; i
< 2; i
++)
1558 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
1560 else if (!has_single_use (arg
[i
]))
1562 if (def_code
[0] == def_code
[1])
1565 /* If we've looked through narrowing conversions before, look through
1566 widening conversions from unsigned type with the same precision
1568 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
1569 for (i
= 0; i
< 2; i
++)
1572 enum tree_code code
;
1573 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1574 if (!CONVERT_EXPR_CODE_P (code
)
1575 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1576 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1580 /* Both shifts have to use the same first operand. */
1581 if (TREE_CODE (def_arg1
[0]) != SSA_NAME
|| def_arg1
[0] != def_arg1
[1])
1583 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
1586 /* CNT1 + CNT2 == B case above. */
1587 if (tree_fits_uhwi_p (def_arg2
[0])
1588 && tree_fits_uhwi_p (def_arg2
[1])
1589 && tree_to_uhwi (def_arg2
[0])
1590 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
1591 rotcnt
= def_arg2
[0];
1592 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
1593 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
1597 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
1598 enum tree_code cdef_code
[2];
1599 /* Look through conversion of the shift count argument.
1600 The C/C++ FE cast any shift count argument to integer_type_node.
1601 The only problem might be if the shift count type maximum value
1602 is equal or smaller than number of bits in rtype. */
1603 for (i
= 0; i
< 2; i
++)
1605 def_arg2_alt
[i
] = def_arg2
[i
];
1606 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
1607 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1608 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
1609 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
1610 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1611 > floor_log2 (TYPE_PRECISION (rtype
))
1612 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1613 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1
[i
]))))
1615 def_arg2_alt
[i
] = cdef_arg1
[i
];
1616 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
1617 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1620 for (i
= 0; i
< 2; i
++)
1621 /* Check for one shift count being Y and the other B - Y,
1622 with optional casts. */
1623 if (cdef_code
[i
] == MINUS_EXPR
1624 && tree_fits_shwi_p (cdef_arg1
[i
])
1625 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
1626 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
1629 enum tree_code code
;
1631 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
1632 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
1634 rotcnt
= cdef_arg2
[i
];
1637 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
1638 if (CONVERT_EXPR_CODE_P (code
)
1639 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1640 && TYPE_PRECISION (TREE_TYPE (tem
))
1641 > floor_log2 (TYPE_PRECISION (rtype
))
1642 && TYPE_PRECISION (TREE_TYPE (tem
))
1643 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
1644 && (tem
== def_arg2
[1 - i
]
1645 || tem
== def_arg2_alt
[1 - i
]))
1651 /* The above sequence isn't safe for Y being 0,
1652 because then one of the shifts triggers undefined behavior.
1653 This alternative is safe even for rotation count of 0.
1654 One shift count is Y and the other (-Y) & (B - 1). */
1655 else if (cdef_code
[i
] == BIT_AND_EXPR
1656 && tree_fits_shwi_p (cdef_arg2
[i
])
1657 && tree_to_shwi (cdef_arg2
[i
])
1658 == TYPE_PRECISION (rtype
) - 1
1659 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
1660 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
1663 enum tree_code code
;
1665 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
1666 if (CONVERT_EXPR_CODE_P (code
)
1667 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1668 && TYPE_PRECISION (TREE_TYPE (tem
))
1669 > floor_log2 (TYPE_PRECISION (rtype
))
1670 && TYPE_PRECISION (TREE_TYPE (tem
))
1671 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
))))
1672 defcodefor_name (tem
, &code
, &tem
, NULL
);
1674 if (code
== NEGATE_EXPR
)
1676 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
1681 defcodefor_name (tem
, &code
, &tem
, NULL
);
1682 if (CONVERT_EXPR_CODE_P (code
)
1683 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1684 && TYPE_PRECISION (TREE_TYPE (tem
))
1685 > floor_log2 (TYPE_PRECISION (rtype
))
1686 && TYPE_PRECISION (TREE_TYPE (tem
))
1687 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem
)))
1688 && (tem
== def_arg2
[1 - i
]
1689 || tem
== def_arg2_alt
[1 - i
]))
1696 if (rotcnt
== NULL_TREE
)
1701 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
1702 TREE_TYPE (rotcnt
)))
1704 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2
[0])),
1706 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1707 rotcnt
= gimple_assign_lhs (g
);
1709 lhs
= gimple_assign_lhs (stmt
);
1710 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1711 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]));
1712 g
= gimple_build_assign (lhs
,
1713 ((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
1714 ? LROTATE_EXPR
: RROTATE_EXPR
, def_arg1
[0], rotcnt
);
1715 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1717 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1718 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, lhs
);
1720 gsi_replace (gsi
, g
, false);
1724 /* Combine an element access with a shuffle. Returns true if there were
1725 any changes made, else it returns false. */
1728 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
1730 gimple
*stmt
= gsi_stmt (*gsi
);
1732 tree op
, op0
, op1
, op2
;
1734 unsigned idx
, n
, size
;
1735 enum tree_code code
;
1737 op
= gimple_assign_rhs1 (stmt
);
1738 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
1740 op0
= TREE_OPERAND (op
, 0);
1741 if (TREE_CODE (op0
) != SSA_NAME
1742 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
1745 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
1746 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1749 op1
= TREE_OPERAND (op
, 1);
1750 op2
= TREE_OPERAND (op
, 2);
1751 code
= gimple_assign_rhs_code (def_stmt
);
1753 if (code
== CONSTRUCTOR
)
1755 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
1756 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
1757 if (!tem
|| !valid_gimple_rhs_p (tem
))
1759 gimple_assign_set_rhs_from_tree (gsi
, tem
);
1760 update_stmt (gsi_stmt (*gsi
));
1764 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
1765 if (TREE_TYPE (op
) != elem_type
)
1768 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
1769 n
= TREE_INT_CST_LOW (op1
) / size
;
1772 idx
= TREE_INT_CST_LOW (op2
) / size
;
1774 if (code
== VEC_PERM_EXPR
)
1778 m
= gimple_assign_rhs3 (def_stmt
);
1779 if (TREE_CODE (m
) != VECTOR_CST
)
1781 nelts
= VECTOR_CST_NELTS (m
);
1782 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
1786 p
= gimple_assign_rhs1 (def_stmt
);
1790 p
= gimple_assign_rhs2 (def_stmt
);
1793 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
1794 unshare_expr (p
), op1
, bitsize_int (idx
* size
));
1795 gimple_assign_set_rhs1 (stmt
, tem
);
1797 update_stmt (gsi_stmt (*gsi
));
1804 /* Determine whether applying the 2 permutations (mask1 then mask2)
1805 gives back one of the input. */
1808 is_combined_permutation_identity (tree mask1
, tree mask2
)
1811 unsigned int nelts
, i
, j
;
1812 bool maybe_identity1
= true;
1813 bool maybe_identity2
= true;
1815 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
1816 && TREE_CODE (mask2
) == VECTOR_CST
);
1817 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
1818 gcc_assert (TREE_CODE (mask
) == VECTOR_CST
);
1820 nelts
= VECTOR_CST_NELTS (mask
);
1821 for (i
= 0; i
< nelts
; i
++)
1823 tree val
= VECTOR_CST_ELT (mask
, i
);
1824 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
1825 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
1827 maybe_identity2
= false;
1828 else if (j
== i
+ nelts
)
1829 maybe_identity1
= false;
1833 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
1836 /* Combine a shuffle with its arguments. Returns 1 if there were any
1837 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1840 simplify_permutation (gimple_stmt_iterator
*gsi
)
1842 gimple
*stmt
= gsi_stmt (*gsi
);
1844 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
1845 enum tree_code code
;
1846 bool single_use_op0
= false;
1848 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
1850 op0
= gimple_assign_rhs1 (stmt
);
1851 op1
= gimple_assign_rhs2 (stmt
);
1852 op2
= gimple_assign_rhs3 (stmt
);
1854 if (TREE_CODE (op2
) != VECTOR_CST
)
1857 if (TREE_CODE (op0
) == VECTOR_CST
)
1862 else if (TREE_CODE (op0
) == SSA_NAME
)
1864 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
1865 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1868 code
= gimple_assign_rhs_code (def_stmt
);
1869 arg0
= gimple_assign_rhs1 (def_stmt
);
1874 /* Two consecutive shuffles. */
1875 if (code
== VEC_PERM_EXPR
)
1882 op3
= gimple_assign_rhs3 (def_stmt
);
1883 if (TREE_CODE (op3
) != VECTOR_CST
)
1885 ident
= is_combined_permutation_identity (op3
, op2
);
1888 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
1889 : gimple_assign_rhs2 (def_stmt
);
1890 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
1891 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
1892 gimple_set_num_ops (stmt
, 2);
1894 return remove_prop_source_from_use (op0
) ? 2 : 1;
1897 /* Shuffle of a constructor. */
1898 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
1904 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
1907 if (TREE_CODE (op1
) == VECTOR_CST
)
1909 else if (TREE_CODE (op1
) == SSA_NAME
)
1911 enum tree_code code2
;
1913 gimple
*def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
1914 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
1917 code2
= gimple_assign_rhs_code (def_stmt2
);
1918 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
1920 arg1
= gimple_assign_rhs1 (def_stmt2
);
1927 /* Already used twice in this statement. */
1928 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
1932 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
1934 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
1936 gimple_assign_set_rhs_from_tree (gsi
, opt
);
1937 update_stmt (gsi_stmt (*gsi
));
1938 if (TREE_CODE (op0
) == SSA_NAME
)
1939 ret
= remove_prop_source_from_use (op0
);
1940 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
1941 ret
|= remove_prop_source_from_use (op1
);
1948 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
1951 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
1953 gimple
*stmt
= gsi_stmt (*gsi
);
1955 tree op
, op2
, orig
, type
, elem_type
;
1956 unsigned elem_size
, nelts
, i
;
1957 enum tree_code code
, conv_code
;
1958 constructor_elt
*elt
;
1962 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
1964 op
= gimple_assign_rhs1 (stmt
);
1965 type
= TREE_TYPE (op
);
1966 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
1968 nelts
= TYPE_VECTOR_SUBPARTS (type
);
1969 elem_type
= TREE_TYPE (type
);
1970 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
1972 sel
= XALLOCAVEC (unsigned char, nelts
);
1974 conv_code
= ERROR_MARK
;
1976 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
1983 if (TREE_CODE (elt
->value
) != SSA_NAME
)
1985 def_stmt
= get_prop_source_stmt (elt
->value
, false, NULL
);
1988 code
= gimple_assign_rhs_code (def_stmt
);
1989 if (code
== FLOAT_EXPR
1990 || code
== FIX_TRUNC_EXPR
)
1992 op1
= gimple_assign_rhs1 (def_stmt
);
1993 if (conv_code
== ERROR_MARK
)
1995 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (elt
->value
)))
1996 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1
))))
2000 else if (conv_code
!= code
)
2002 if (TREE_CODE (op1
) != SSA_NAME
)
2004 def_stmt
= SSA_NAME_DEF_STMT (op1
);
2005 if (! is_gimple_assign (def_stmt
))
2007 code
= gimple_assign_rhs_code (def_stmt
);
2009 if (code
!= BIT_FIELD_REF
)
2011 op1
= gimple_assign_rhs1 (def_stmt
);
2012 ref
= TREE_OPERAND (op1
, 0);
2020 if (TREE_CODE (ref
) != SSA_NAME
)
2022 if (! VECTOR_TYPE_P (TREE_TYPE (ref
))
2023 || ! useless_type_conversion_p (TREE_TYPE (op1
),
2024 TREE_TYPE (TREE_TYPE (ref
))))
2028 if (TREE_INT_CST_LOW (TREE_OPERAND (op1
, 1)) != elem_size
)
2030 sel
[i
] = TREE_INT_CST_LOW (TREE_OPERAND (op1
, 2)) / elem_size
;
2031 if (sel
[i
] != i
) maybe_ident
= false;
2036 if (! VECTOR_TYPE_P (TREE_TYPE (orig
))
2037 || (TYPE_VECTOR_SUBPARTS (type
)
2038 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig
))))
2042 if (conv_code
!= ERROR_MARK
2043 && (! supportable_convert_operation (conv_code
, type
, TREE_TYPE (orig
),
2045 || conv_code
== CALL_EXPR
))
2050 if (conv_code
== ERROR_MARK
)
2051 gimple_assign_set_rhs_from_tree (gsi
, orig
);
2053 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
,
2054 NULL_TREE
, NULL_TREE
);
2058 tree mask_type
, *mask_elts
;
2060 if (!can_vec_perm_p (TYPE_MODE (type
), false, sel
))
2063 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2065 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2066 || GET_MODE_SIZE (TYPE_MODE (mask_type
))
2067 != GET_MODE_SIZE (TYPE_MODE (type
)))
2069 mask_elts
= XALLOCAVEC (tree
, nelts
);
2070 for (i
= 0; i
< nelts
; i
++)
2071 mask_elts
[i
] = build_int_cst (TREE_TYPE (mask_type
), sel
[i
]);
2072 op2
= build_vector (mask_type
, mask_elts
);
2073 if (conv_code
== ERROR_MARK
)
2074 gimple_assign_set_rhs_with_ops (gsi
, VEC_PERM_EXPR
, orig
, orig
, op2
);
2078 = gimple_build_assign (make_ssa_name (TREE_TYPE (orig
)),
2079 VEC_PERM_EXPR
, orig
, orig
, op2
);
2080 orig
= gimple_assign_lhs (perm
);
2081 gsi_insert_before (gsi
, perm
, GSI_SAME_STMT
);
2082 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
,
2083 NULL_TREE
, NULL_TREE
);
2086 update_stmt (gsi_stmt (*gsi
));
2091 /* Primitive "lattice" function for gimple_simplify. */
2094 fwprop_ssa_val (tree name
)
2096 /* First valueize NAME. */
2097 if (TREE_CODE (name
) == SSA_NAME
2098 && SSA_NAME_VERSION (name
) < lattice
.length ())
2100 tree val
= lattice
[SSA_NAME_VERSION (name
)];
2104 /* We continue matching along SSA use-def edges for SSA names
2105 that are not single-use. Currently there are no patterns
2106 that would cause any issues with that. */
2110 /* Main entry point for the forward propagation and statement combine
2115 const pass_data pass_data_forwprop
=
2117 GIMPLE_PASS
, /* type */
2118 "forwprop", /* name */
2119 OPTGROUP_NONE
, /* optinfo_flags */
2120 TV_TREE_FORWPROP
, /* tv_id */
2121 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2122 0, /* properties_provided */
2123 0, /* properties_destroyed */
2124 0, /* todo_flags_start */
2125 TODO_update_ssa
, /* todo_flags_finish */
2128 class pass_forwprop
: public gimple_opt_pass
2131 pass_forwprop (gcc::context
*ctxt
)
2132 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
2135 /* opt_pass methods: */
2136 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
2137 virtual bool gate (function
*) { return flag_tree_forwprop
; }
2138 virtual unsigned int execute (function
*);
2140 }; // class pass_forwprop
2143 pass_forwprop::execute (function
*fun
)
2145 unsigned int todoflags
= 0;
2147 cfg_changed
= false;
2149 /* Combine stmts with the stmts defining their operands. Do that
2150 in an order that guarantees visiting SSA defs before SSA uses. */
2151 lattice
.create (num_ssa_names
);
2152 lattice
.quick_grow_cleared (num_ssa_names
);
2153 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (fun
));
2154 int postorder_num
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
2156 auto_vec
<gimple
*, 4> to_fixup
;
2157 to_purge
= BITMAP_ALLOC (NULL
);
2158 for (int i
= 0; i
< postorder_num
; ++i
)
2160 gimple_stmt_iterator gsi
;
2161 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, postorder
[i
]);
2163 /* Propagate into PHIs and record degenerate ones in the lattice. */
2164 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
2167 gphi
*phi
= si
.phi ();
2168 tree res
= gimple_phi_result (phi
);
2169 if (virtual_operand_p (res
))
2172 use_operand_p use_p
;
2174 tree first
= NULL_TREE
;
2175 bool all_same
= true;
2176 FOR_EACH_PHI_ARG (use_p
, phi
, it
, SSA_OP_USE
)
2178 tree use
= USE_FROM_PTR (use_p
);
2179 tree tem
= fwprop_ssa_val (use
);
2182 else if (! operand_equal_p (first
, tem
, 0))
2185 && may_propagate_copy (use
, tem
))
2186 propagate_value (use_p
, tem
);
2189 fwprop_set_lattice_val (res
, first
);
2192 /* Apply forward propagation to all stmts in the basic-block.
2193 Note we update GSI within the loop as necessary. */
2194 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2196 gimple
*stmt
= gsi_stmt (gsi
);
2198 enum tree_code code
;
2200 if (!is_gimple_assign (stmt
))
2206 lhs
= gimple_assign_lhs (stmt
);
2207 rhs
= gimple_assign_rhs1 (stmt
);
2208 code
= gimple_assign_rhs_code (stmt
);
2209 if (TREE_CODE (lhs
) != SSA_NAME
2210 || has_zero_uses (lhs
))
2216 /* If this statement sets an SSA_NAME to an address,
2217 try to propagate the address into the uses of the SSA_NAME. */
2218 if (code
== ADDR_EXPR
2219 /* Handle pointer conversions on invariant addresses
2220 as well, as this is valid gimple. */
2221 || (CONVERT_EXPR_CODE_P (code
)
2222 && TREE_CODE (rhs
) == ADDR_EXPR
2223 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2225 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2228 || decl_address_invariant_p (base
))
2229 && !stmt_references_abnormal_ssa_name (stmt
)
2230 && forward_propagate_addr_expr (lhs
, rhs
, true))
2232 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2233 release_defs (stmt
);
2234 gsi_remove (&gsi
, true);
2239 else if (code
== POINTER_PLUS_EXPR
)
2241 tree off
= gimple_assign_rhs2 (stmt
);
2242 if (TREE_CODE (off
) == INTEGER_CST
2243 && can_propagate_from (stmt
)
2244 && !simple_iv_increment_p (stmt
)
2245 /* ??? Better adjust the interface to that function
2246 instead of building new trees here. */
2247 && forward_propagate_addr_expr
2249 build1_loc (gimple_location (stmt
),
2250 ADDR_EXPR
, TREE_TYPE (rhs
),
2251 fold_build2 (MEM_REF
,
2252 TREE_TYPE (TREE_TYPE (rhs
)),
2254 fold_convert (ptr_type_node
,
2257 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2258 release_defs (stmt
);
2259 gsi_remove (&gsi
, true);
2261 else if (is_gimple_min_invariant (rhs
))
2263 /* Make sure to fold &a[0] + off_1 here. */
2264 fold_stmt_inplace (&gsi
);
2266 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2272 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
2273 && gimple_assign_load_p (stmt
)
2274 && !gimple_has_volatile_ops (stmt
)
2275 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2277 && !stmt_can_throw_internal (stmt
))
2279 /* Rewrite loads used only in real/imagpart extractions to
2280 component-wise loads. */
2281 use_operand_p use_p
;
2282 imm_use_iterator iter
;
2283 bool rewrite
= true;
2284 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2286 gimple
*use_stmt
= USE_STMT (use_p
);
2287 if (is_gimple_debug (use_stmt
))
2289 if (!is_gimple_assign (use_stmt
)
2290 || (gimple_assign_rhs_code (use_stmt
) != REALPART_EXPR
2291 && gimple_assign_rhs_code (use_stmt
) != IMAGPART_EXPR
))
2300 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2302 if (is_gimple_debug (use_stmt
))
2304 if (gimple_debug_bind_p (use_stmt
))
2306 gimple_debug_bind_reset_value (use_stmt
);
2307 update_stmt (use_stmt
);
2312 tree new_rhs
= build1 (gimple_assign_rhs_code (use_stmt
),
2313 TREE_TYPE (TREE_TYPE (rhs
)),
2314 unshare_expr (rhs
));
2316 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2319 location_t loc
= gimple_location (use_stmt
);
2320 gimple_set_location (new_stmt
, loc
);
2321 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2322 unlink_stmt_vdef (use_stmt
);
2323 gsi_remove (&gsi2
, true);
2325 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2328 release_defs (stmt
);
2329 gsi_remove (&gsi
, true);
2334 else if (code
== COMPLEX_EXPR
)
2336 /* Rewrite stores of a single-use complex build expression
2337 to component-wise stores. */
2338 use_operand_p use_p
;
2340 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
2341 && gimple_store_p (use_stmt
)
2342 && !gimple_has_volatile_ops (use_stmt
)
2343 && is_gimple_assign (use_stmt
)
2344 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
2347 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2348 tree new_lhs
= build1 (REALPART_EXPR
,
2349 TREE_TYPE (TREE_TYPE (use_lhs
)),
2350 unshare_expr (use_lhs
));
2351 gimple
*new_stmt
= gimple_build_assign (new_lhs
, rhs
);
2352 location_t loc
= gimple_location (use_stmt
);
2353 gimple_set_location (new_stmt
, loc
);
2354 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
2355 gimple_set_vdef (new_stmt
, make_ssa_name (gimple_vop (cfun
)));
2356 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2357 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
2358 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2359 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
2361 new_lhs
= build1 (IMAGPART_EXPR
,
2362 TREE_TYPE (TREE_TYPE (use_lhs
)),
2363 unshare_expr (use_lhs
));
2364 gimple_assign_set_lhs (use_stmt
, new_lhs
);
2365 gimple_assign_set_rhs1 (use_stmt
, gimple_assign_rhs2 (stmt
));
2366 update_stmt (use_stmt
);
2368 release_defs (stmt
);
2369 gsi_remove (&gsi
, true);
2378 /* Combine stmts with the stmts defining their operands.
2379 Note we update GSI within the loop as necessary. */
2380 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2382 gimple
*stmt
= gsi_stmt (gsi
);
2383 gimple
*orig_stmt
= stmt
;
2384 bool changed
= false;
2385 bool was_noreturn
= (is_gimple_call (stmt
)
2386 && gimple_call_noreturn_p (stmt
));
2388 /* Mark stmt as potentially needing revisiting. */
2389 gimple_set_plf (stmt
, GF_PLF_1
, false);
2391 if (fold_stmt (&gsi
, fwprop_ssa_val
))
2394 stmt
= gsi_stmt (gsi
);
2395 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2396 bitmap_set_bit (to_purge
, bb
->index
);
2398 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
2399 to_fixup
.safe_push (stmt
);
2400 /* Cleanup the CFG if we simplified a condition to
2402 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
2403 if (gimple_cond_true_p (cond
)
2404 || gimple_cond_false_p (cond
))
2409 switch (gimple_code (stmt
))
2413 tree rhs1
= gimple_assign_rhs1 (stmt
);
2414 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2416 if (code
== COND_EXPR
2417 || code
== VEC_COND_EXPR
)
2419 /* In this case the entire COND_EXPR is in rhs1. */
2420 if (forward_propagate_into_cond (&gsi
))
2423 stmt
= gsi_stmt (gsi
);
2426 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2429 did_something
= forward_propagate_into_comparison (&gsi
);
2430 if (did_something
== 2)
2432 changed
= did_something
!= 0;
2434 else if ((code
== PLUS_EXPR
2435 || code
== BIT_IOR_EXPR
2436 || code
== BIT_XOR_EXPR
)
2437 && simplify_rotate (&gsi
))
2439 else if (code
== VEC_PERM_EXPR
)
2441 int did_something
= simplify_permutation (&gsi
);
2442 if (did_something
== 2)
2444 changed
= did_something
!= 0;
2446 else if (code
== BIT_FIELD_REF
)
2447 changed
= simplify_bitfield_ref (&gsi
);
2448 else if (code
== CONSTRUCTOR
2449 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
2450 changed
= simplify_vector_constructor (&gsi
);
2455 changed
= simplify_gimple_switch (as_a
<gswitch
*> (stmt
));
2461 = forward_propagate_into_gimple_cond (as_a
<gcond
*> (stmt
));
2462 if (did_something
== 2)
2464 changed
= did_something
!= 0;
2470 tree callee
= gimple_call_fndecl (stmt
);
2471 if (callee
!= NULL_TREE
2472 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2473 changed
= simplify_builtin_call (&gsi
, callee
);
2482 /* If the stmt changed then re-visit it and the statements
2483 inserted before it. */
2484 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
2485 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
2487 if (gsi_end_p (gsi
))
2488 gsi
= gsi_start_bb (bb
);
2494 /* Stmt no longer needs to be revisited. */
2495 gimple_set_plf (stmt
, GF_PLF_1
, true);
2497 /* Fill up the lattice. */
2498 if (gimple_assign_single_p (stmt
))
2500 tree lhs
= gimple_assign_lhs (stmt
);
2501 tree rhs
= gimple_assign_rhs1 (stmt
);
2502 if (TREE_CODE (lhs
) == SSA_NAME
)
2505 if (TREE_CODE (rhs
) == SSA_NAME
)
2506 val
= fwprop_ssa_val (rhs
);
2507 else if (is_gimple_min_invariant (rhs
))
2509 fwprop_set_lattice_val (lhs
, val
);
2520 /* Fixup stmts that became noreturn calls. This may require splitting
2521 blocks and thus isn't possible during the walk. Do this
2522 in reverse order so we don't inadvertedly remove a stmt we want to
2523 fixup by visiting a dominating now noreturn call first. */
2524 while (!to_fixup
.is_empty ())
2526 gimple
*stmt
= to_fixup
.pop ();
2527 if (dump_file
&& dump_flags
& TDF_DETAILS
)
2529 fprintf (dump_file
, "Fixing up noreturn call ");
2530 print_gimple_stmt (dump_file
, stmt
, 0, 0);
2531 fprintf (dump_file
, "\n");
2533 cfg_changed
|= fixup_noreturn_call (stmt
);
2536 cfg_changed
|= gimple_purge_all_dead_eh_edges (to_purge
);
2537 BITMAP_FREE (to_purge
);
2540 todoflags
|= TODO_cleanup_cfg
;
2548 make_pass_forwprop (gcc::context
*ctxt
)
2550 return new pass_forwprop (ctxt
);