1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-iterator.h"
36 #include "gimple-fold.h"
39 #include "gimplify-me.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "tree-ssa-strlen.h"
47 #include "tree-cfgcleanup.h"
49 #include "optabs-tree.h"
50 #include "tree-vector-builder.h"
51 #include "vec-perm-indices.h"
52 #include "internal-fn.h"
56 /* This pass propagates the RHS of assignment statements into use
57 sites of the LHS of the assignment. It's basically a specialized
58 form of tree combination. It is hoped all of this can disappear
59 when we have a generalized tree combiner.
61 One class of common cases we handle is forward propagating a single use
62 variable into a COND_EXPR.
66 if (x) goto ... else goto ...
68 Will be transformed into:
71 if (a COND b) goto ... else goto ...
73 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
75 Or (assuming c1 and c2 are constants):
79 if (x EQ/NEQ c2) goto ... else goto ...
81 Will be transformed into:
84 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
86 Similarly for x = a - c1.
92 if (x) goto ... else goto ...
94 Will be transformed into:
97 if (a == 0) goto ... else goto ...
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
107 if (x) goto ... else goto ...
109 Will be transformed into:
112 if (a != 0) goto ... else goto ...
114 (Assuming a is an integral type and x is a boolean or x is an
115 integral and a is a boolean.)
117 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
118 For these cases, we propagate A into all, possibly more than one,
119 COND_EXPRs that use X.
121 In addition to eliminating the variable and the statement which assigns
122 a value to the variable, we may be able to later thread the jump without
123 adding insane complexity in the dominator optimizer.
125 Also note these transformations can cascade. We handle this by having
126 a worklist of COND_EXPR statements to examine. As we make a change to
127 a statement, we put it back on the worklist to examine on the next
128 iteration of the main loop.
130 A second class of propagation opportunities arises for ADDR_EXPR
141 ptr = (type1*)&type2var;
144 Will get turned into (if type1 and type2 are the same size
145 and neither have volatile on them):
146 res = VIEW_CONVERT_EXPR<type1>(type2var)
151 ptr2 = ptr + <constant>;
155 ptr2 = &x[constant/elementsize];
160 offset = index * element_size;
161 offset_p = (pointer) offset;
162 ptr2 = ptr + offset_p
164 Will get turned into:
172 Provided that decl has known alignment >= 2, will get turned into
176 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
177 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
180 This will (of course) be extended as other needs arise. */
182 static bool forward_propagate_addr_expr (tree
, tree
, bool);
184 /* Set to true if we delete dead edges during the optimization. */
185 static bool cfg_changed
;
187 static tree
rhs_to_tree (tree type
, gimple
*stmt
);
189 static bitmap to_purge
;
191 /* Const-and-copy lattice. */
192 static vec
<tree
> lattice
;
194 /* Set the lattice entry for NAME to VAL. */
196 fwprop_set_lattice_val (tree name
, tree val
)
198 if (TREE_CODE (name
) == SSA_NAME
)
200 if (SSA_NAME_VERSION (name
) >= lattice
.length ())
202 lattice
.reserve (num_ssa_names
- lattice
.length ());
203 lattice
.quick_grow_cleared (num_ssa_names
);
205 lattice
[SSA_NAME_VERSION (name
)] = val
;
209 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
211 fwprop_invalidate_lattice (tree name
)
214 && TREE_CODE (name
) == SSA_NAME
215 && SSA_NAME_VERSION (name
) < lattice
.length ())
216 lattice
[SSA_NAME_VERSION (name
)] = NULL_TREE
;
220 /* Get the statement we can propagate from into NAME skipping
221 trivial copies. Returns the statement which defines the
222 propagation source or NULL_TREE if there is no such one.
223 If SINGLE_USE_ONLY is set considers only sources which have
224 a single use chain up to NAME. If SINGLE_USE_P is non-null,
225 it is set to whether the chain to NAME is a single use chain
226 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
229 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
231 bool single_use
= true;
234 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
236 if (!has_single_use (name
))
243 /* If name is defined by a PHI node or is the default def, bail out. */
244 if (!is_gimple_assign (def_stmt
))
247 /* If def_stmt is a simple copy, continue looking. */
248 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
249 name
= gimple_assign_rhs1 (def_stmt
);
252 if (!single_use_only
&& single_use_p
)
253 *single_use_p
= single_use
;
260 /* Checks if the destination ssa name in DEF_STMT can be used as
261 propagation source. Returns true if so, otherwise false. */
264 can_propagate_from (gimple
*def_stmt
)
266 gcc_assert (is_gimple_assign (def_stmt
));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt
))
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt
)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 if (stmt_references_abnormal_ssa_name (def_stmt
))
286 /* If the definition is a conversion of a pointer to a function type,
287 then we cannot apply optimizations as some targets require
288 function pointers to be canonicalized and in this case this
289 optimization could eliminate a necessary canonicalization. */
290 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
292 tree rhs
= gimple_assign_rhs1 (def_stmt
);
293 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
294 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
301 /* Remove a chain of dead statements starting at the definition of
302 NAME. The chain is linked via the first operand of the defining statements.
303 If NAME was replaced in its only use then this function can be used
304 to clean up dead stmts. The function handles already released SSA
306 Returns true if cleanup-cfg has to run. */
309 remove_prop_source_from_use (tree name
)
311 gimple_stmt_iterator gsi
;
313 bool cfg_changed
= false;
318 if (SSA_NAME_IN_FREE_LIST (name
)
319 || SSA_NAME_IS_DEFAULT_DEF (name
)
320 || !has_zero_uses (name
))
323 stmt
= SSA_NAME_DEF_STMT (name
);
324 if (gimple_code (stmt
) == GIMPLE_PHI
325 || gimple_has_side_effects (stmt
))
328 bb
= gimple_bb (stmt
);
329 gsi
= gsi_for_stmt (stmt
);
330 unlink_stmt_vdef (stmt
);
331 if (gsi_remove (&gsi
, true))
332 bitmap_set_bit (to_purge
, bb
->index
);
333 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
336 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
337 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
342 /* Return the rhs of a gassign *STMT in a form of a single tree,
343 converted to type TYPE.
345 This should disappear, but is needed so we can combine expressions and use
346 the fold() interfaces. Long term, we need to develop folding and combine
347 routines that deal with gimple exclusively . */
350 rhs_to_tree (tree type
, gimple
*stmt
)
352 location_t loc
= gimple_location (stmt
);
353 enum tree_code code
= gimple_assign_rhs_code (stmt
);
354 switch (get_gimple_rhs_class (code
))
356 case GIMPLE_TERNARY_RHS
:
357 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
358 gimple_assign_rhs2 (stmt
),
359 gimple_assign_rhs3 (stmt
));
360 case GIMPLE_BINARY_RHS
:
361 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
362 gimple_assign_rhs2 (stmt
));
363 case GIMPLE_UNARY_RHS
:
364 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
365 case GIMPLE_SINGLE_RHS
:
366 return gimple_assign_rhs1 (stmt
);
372 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
373 the folded result in a form suitable for COND_EXPR_COND or
374 NULL_TREE, if there is no suitable simplified form. If
375 INVARIANT_ONLY is true only gimple_min_invariant results are
376 considered simplified. */
379 combine_cond_expr_cond (gimple
*stmt
, enum tree_code code
, tree type
,
380 tree op0
, tree op1
, bool invariant_only
)
384 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
386 fold_defer_overflow_warnings ();
387 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
390 fold_undefer_overflow_warnings (false, NULL
, 0);
394 /* Require that we got a boolean type out if we put one in. */
395 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
397 /* Canonicalize the combined condition for use in a COND_EXPR. */
398 t
= canonicalize_cond_expr_cond (t
);
400 /* Bail out if we required an invariant but didn't get one. */
401 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
403 fold_undefer_overflow_warnings (false, NULL
, 0);
407 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
408 fold_undefer_overflow_warnings (!nowarn
, stmt
, 0);
413 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
414 of its operand. Return a new comparison tree or NULL_TREE if there
415 were no simplifying combines. */
418 forward_propagate_into_comparison_1 (gimple
*stmt
,
419 enum tree_code code
, tree type
,
422 tree tmp
= NULL_TREE
;
423 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
424 bool single_use0_p
= false, single_use1_p
= false;
426 /* For comparisons use the first operand, that is likely to
427 simplify comparisons against constants. */
428 if (TREE_CODE (op0
) == SSA_NAME
)
430 gimple
*def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
431 if (def_stmt
&& can_propagate_from (def_stmt
))
433 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
434 bool invariant_only_p
= !single_use0_p
;
436 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
438 /* Always combine comparisons or conversions from booleans. */
439 if (TREE_CODE (op1
) == INTEGER_CST
440 && ((CONVERT_EXPR_CODE_P (def_code
)
441 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0
, 0)))
443 || TREE_CODE_CLASS (def_code
) == tcc_comparison
))
444 invariant_only_p
= false;
446 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
447 rhs0
, op1
, invariant_only_p
);
453 /* If that wasn't successful, try the second operand. */
454 if (TREE_CODE (op1
) == SSA_NAME
)
456 gimple
*def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
457 if (def_stmt
&& can_propagate_from (def_stmt
))
459 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
460 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
461 op0
, rhs1
, !single_use1_p
);
467 /* If that wasn't successful either, try both operands. */
468 if (rhs0
!= NULL_TREE
469 && rhs1
!= NULL_TREE
)
470 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
472 !(single_use0_p
&& single_use1_p
));
477 /* Propagate from the ssa name definition statements of the assignment
478 from a comparison at *GSI into the conditional if that simplifies it.
479 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
480 otherwise returns 0. */
483 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
485 gimple
*stmt
= gsi_stmt (*gsi
);
487 bool cfg_changed
= false;
488 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
489 tree rhs1
= gimple_assign_rhs1 (stmt
);
490 tree rhs2
= gimple_assign_rhs2 (stmt
);
492 /* Combine the comparison with defining statements. */
493 tmp
= forward_propagate_into_comparison_1 (stmt
,
494 gimple_assign_rhs_code (stmt
),
496 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
498 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
500 update_stmt (gsi_stmt (*gsi
));
502 if (TREE_CODE (rhs1
) == SSA_NAME
)
503 cfg_changed
|= remove_prop_source_from_use (rhs1
);
504 if (TREE_CODE (rhs2
) == SSA_NAME
)
505 cfg_changed
|= remove_prop_source_from_use (rhs2
);
506 return cfg_changed
? 2 : 1;
512 /* Propagate from the ssa name definition statements of COND_EXPR
513 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
514 Returns zero if no statement was changed, one if there were
515 changes and two if cfg_cleanup needs to run. */
518 forward_propagate_into_gimple_cond (gcond
*stmt
)
521 enum tree_code code
= gimple_cond_code (stmt
);
522 bool cfg_changed
= false;
523 tree rhs1
= gimple_cond_lhs (stmt
);
524 tree rhs2
= gimple_cond_rhs (stmt
);
526 /* We can do tree combining on SSA_NAME and comparison expressions. */
527 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
530 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
534 && is_gimple_condexpr_for_cond (tmp
))
538 fprintf (dump_file
, " Replaced '");
539 print_gimple_expr (dump_file
, stmt
, 0);
540 fprintf (dump_file
, "' with '");
541 print_generic_expr (dump_file
, tmp
);
542 fprintf (dump_file
, "'\n");
545 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
548 if (TREE_CODE (rhs1
) == SSA_NAME
)
549 cfg_changed
|= remove_prop_source_from_use (rhs1
);
550 if (TREE_CODE (rhs2
) == SSA_NAME
)
551 cfg_changed
|= remove_prop_source_from_use (rhs2
);
552 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
555 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
556 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
557 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
558 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
560 && integer_zerop (rhs2
))
562 && integer_onep (rhs2
))))
564 basic_block bb
= gimple_bb (stmt
);
565 gimple_cond_set_code (stmt
, NE_EXPR
);
566 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
567 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
568 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
575 /* We've just substituted an ADDR_EXPR into stmt. Update all the
576 relevant data structures to match. */
579 tidy_after_forward_propagate_addr (gimple
*stmt
)
581 /* We may have turned a trapping insn into a non-trapping insn. */
582 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
583 bitmap_set_bit (to_purge
, gimple_bb (stmt
)->index
);
585 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
586 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
589 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
590 ADDR_EXPR <whatever>.
592 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
593 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
594 node or for recovery of array indexing from pointer arithmetic.
596 Return true if the propagation was successful (the propagation can
597 be not totally successful, yet things may have been changed). */
600 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
601 gimple_stmt_iterator
*use_stmt_gsi
,
604 tree lhs
, rhs
, rhs2
, array_ref
;
605 gimple
*use_stmt
= gsi_stmt (*use_stmt_gsi
);
606 enum tree_code rhs_code
;
609 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
611 lhs
= gimple_assign_lhs (use_stmt
);
612 rhs_code
= gimple_assign_rhs_code (use_stmt
);
613 rhs
= gimple_assign_rhs1 (use_stmt
);
615 /* Do not perform copy-propagation but recurse through copy chains. */
616 if (TREE_CODE (lhs
) == SSA_NAME
617 && rhs_code
== SSA_NAME
)
618 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
620 /* The use statement could be a conversion. Recurse to the uses of the
621 lhs as copyprop does not copy through pointer to integer to pointer
622 conversions and FRE does not catch all cases either.
623 Treat the case of a single-use name and
624 a conversion to def_rhs type separate, though. */
625 if (TREE_CODE (lhs
) == SSA_NAME
626 && CONVERT_EXPR_CODE_P (rhs_code
))
628 /* If there is a point in a conversion chain where the types match
629 so we can remove a conversion re-materialize the address here
632 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
634 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
635 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
639 /* Else recurse if the conversion preserves the address value. */
640 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
641 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
642 && (TYPE_PRECISION (TREE_TYPE (lhs
))
643 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
644 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
649 /* If this isn't a conversion chain from this on we only can propagate
650 into compatible pointer contexts. */
651 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
654 /* Propagate through constant pointer adjustments. */
655 if (TREE_CODE (lhs
) == SSA_NAME
656 && rhs_code
== POINTER_PLUS_EXPR
658 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
661 /* As we come here with non-invariant addresses in def_rhs we need
662 to make sure we can build a valid constant offsetted address
663 for further propagation. Simply rely on fold building that
664 and check after the fact. */
665 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
667 fold_convert (ptr_type_node
,
668 gimple_assign_rhs2 (use_stmt
)));
669 if (TREE_CODE (new_def_rhs
) == MEM_REF
670 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
672 new_def_rhs
= build1 (ADDR_EXPR
, TREE_TYPE (rhs
), new_def_rhs
);
674 /* Recurse. If we could propagate into all uses of lhs do not
675 bother to replace into the current use but just pretend we did. */
676 if (forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
679 if (useless_type_conversion_p (TREE_TYPE (lhs
),
680 TREE_TYPE (new_def_rhs
)))
681 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
683 else if (is_gimple_min_invariant (new_def_rhs
))
684 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
, new_def_rhs
);
687 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
688 update_stmt (use_stmt
);
692 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
693 ADDR_EXPR will not appear on the LHS. */
694 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
695 while (handled_component_p (*lhsp
))
696 lhsp
= &TREE_OPERAND (*lhsp
, 0);
699 /* Now see if the LHS node is a MEM_REF using NAME. If so,
700 propagate the ADDR_EXPR into the use of NAME and fold the result. */
701 if (TREE_CODE (lhs
) == MEM_REF
702 && TREE_OPERAND (lhs
, 0) == name
)
705 poly_int64 def_rhs_offset
;
706 /* If the address is invariant we can always fold it. */
707 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
710 poly_offset_int off
= mem_ref_offset (lhs
);
712 off
+= def_rhs_offset
;
713 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
715 off
+= mem_ref_offset (def_rhs_base
);
716 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
719 new_ptr
= build_fold_addr_expr (def_rhs_base
);
720 TREE_OPERAND (lhs
, 0) = new_ptr
;
721 TREE_OPERAND (lhs
, 1)
722 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
723 tidy_after_forward_propagate_addr (use_stmt
);
724 /* Continue propagating into the RHS if this was not the only use. */
728 /* If the LHS is a plain dereference and the value type is the same as
729 that of the pointed-to type of the address we can put the
730 dereferenced address on the LHS preserving the original alias-type. */
731 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
732 && ((gimple_assign_lhs (use_stmt
) == lhs
733 && useless_type_conversion_p
734 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
735 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
736 || types_compatible_p (TREE_TYPE (lhs
),
737 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
738 /* Don't forward anything into clobber stmts if it would result
739 in the lhs no longer being a MEM_REF. */
740 && (!gimple_clobber_p (use_stmt
)
741 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
743 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
744 tree new_offset
, new_base
, saved
, new_lhs
;
745 while (handled_component_p (*def_rhs_basep
))
746 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
747 saved
= *def_rhs_basep
;
748 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
750 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
751 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
752 TREE_OPERAND (*def_rhs_basep
, 1));
756 new_base
= build_fold_addr_expr (*def_rhs_basep
);
757 new_offset
= TREE_OPERAND (lhs
, 1);
759 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
760 new_base
, new_offset
);
761 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
762 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
763 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
764 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
766 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
767 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
768 *def_rhs_basep
= saved
;
769 tidy_after_forward_propagate_addr (use_stmt
);
770 /* Continue propagating into the RHS if this was not the
776 /* We can have a struct assignment dereferencing our name twice.
777 Note that we didn't propagate into the lhs to not falsely
778 claim we did when propagating into the rhs. */
782 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
783 nodes from the RHS. */
784 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
785 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
786 rhsp
= &TREE_OPERAND (*rhsp
, 0);
787 while (handled_component_p (*rhsp
))
788 rhsp
= &TREE_OPERAND (*rhsp
, 0);
791 /* Now see if the RHS node is a MEM_REF using NAME. If so,
792 propagate the ADDR_EXPR into the use of NAME and fold the result. */
793 if (TREE_CODE (rhs
) == MEM_REF
794 && TREE_OPERAND (rhs
, 0) == name
)
797 poly_int64 def_rhs_offset
;
798 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
801 poly_offset_int off
= mem_ref_offset (rhs
);
803 off
+= def_rhs_offset
;
804 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
806 off
+= mem_ref_offset (def_rhs_base
);
807 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
810 new_ptr
= build_fold_addr_expr (def_rhs_base
);
811 TREE_OPERAND (rhs
, 0) = new_ptr
;
812 TREE_OPERAND (rhs
, 1)
813 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
814 fold_stmt_inplace (use_stmt_gsi
);
815 tidy_after_forward_propagate_addr (use_stmt
);
818 /* If the RHS is a plain dereference and the value type is the same as
819 that of the pointed-to type of the address we can put the
820 dereferenced address on the RHS preserving the original alias-type. */
821 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
822 && ((gimple_assign_rhs1 (use_stmt
) == rhs
823 && useless_type_conversion_p
824 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
825 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
826 || types_compatible_p (TREE_TYPE (rhs
),
827 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
829 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
830 tree new_offset
, new_base
, saved
, new_rhs
;
831 while (handled_component_p (*def_rhs_basep
))
832 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
833 saved
= *def_rhs_basep
;
834 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
836 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
837 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
838 TREE_OPERAND (*def_rhs_basep
, 1));
842 new_base
= build_fold_addr_expr (*def_rhs_basep
);
843 new_offset
= TREE_OPERAND (rhs
, 1);
845 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
846 new_base
, new_offset
);
847 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
848 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
849 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
850 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
852 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
853 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
854 *def_rhs_basep
= saved
;
855 fold_stmt_inplace (use_stmt_gsi
);
856 tidy_after_forward_propagate_addr (use_stmt
);
861 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
863 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
864 || gimple_assign_rhs1 (use_stmt
) != name
)
867 /* The remaining cases are all for turning pointer arithmetic into
868 array indexing. They only apply when we have the address of
869 element zero in an array. If that is not the case then there
871 array_ref
= TREE_OPERAND (def_rhs
, 0);
872 if ((TREE_CODE (array_ref
) != ARRAY_REF
873 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
874 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
875 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
878 rhs2
= gimple_assign_rhs2 (use_stmt
);
879 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
880 if (TREE_CODE (rhs2
) == INTEGER_CST
)
882 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
883 ADDR_EXPR
, TREE_TYPE (def_rhs
),
884 fold_build2 (MEM_REF
,
885 TREE_TYPE (TREE_TYPE (def_rhs
)),
886 unshare_expr (def_rhs
),
887 fold_convert (ptr_type_node
,
889 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
890 use_stmt
= gsi_stmt (*use_stmt_gsi
);
891 update_stmt (use_stmt
);
892 tidy_after_forward_propagate_addr (use_stmt
);
899 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
901 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
902 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
903 node or for recovery of array indexing from pointer arithmetic.
905 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
906 the single use in the previous invocation. Pass true when calling
909 Returns true, if all uses have been propagated into. */
912 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
914 imm_use_iterator iter
;
917 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
919 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
924 /* If the use is not in a simple assignment statement, then
925 there is nothing we can do. */
926 if (!is_gimple_assign (use_stmt
))
928 if (!is_gimple_debug (use_stmt
))
933 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
934 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
936 /* If the use has moved to a different statement adjust
937 the update machinery for the old statement too. */
938 if (use_stmt
!= gsi_stmt (gsi
))
940 update_stmt (use_stmt
);
941 use_stmt
= gsi_stmt (gsi
);
943 update_stmt (use_stmt
);
946 /* Remove intermediate now unused copy and conversion chains. */
947 use_rhs
= gimple_assign_rhs1 (use_stmt
);
949 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
950 && TREE_CODE (use_rhs
) == SSA_NAME
951 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
953 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
954 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt
));
955 release_defs (use_stmt
);
956 gsi_remove (&gsi
, true);
960 return all
&& has_zero_uses (name
);
964 /* Helper function for simplify_gimple_switch. Remove case labels that
965 have values outside the range of the new type. */
968 simplify_gimple_switch_label_vec (gswitch
*stmt
, tree index_type
)
970 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
971 auto_vec
<tree
> labels (branch_num
);
974 /* Collect the existing case labels in a VEC, and preprocess it as if
975 we are gimplifying a GENERIC SWITCH_EXPR. */
976 for (i
= 1; i
< branch_num
; i
++)
977 labels
.quick_push (gimple_switch_label (stmt
, i
));
978 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
980 /* If any labels were removed, replace the existing case labels
981 in the GIMPLE_SWITCH statement with the correct ones.
982 Note that the type updates were done in-place on the case labels,
983 so we only have to replace the case labels in the GIMPLE_SWITCH
984 if the number of labels changed. */
985 len
= labels
.length ();
986 if (len
< branch_num
- 1)
988 bitmap target_blocks
;
992 /* Corner case: *all* case labels have been removed as being
993 out-of-range for INDEX_TYPE. Push one label and let the
994 CFG cleanups deal with this further. */
999 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1000 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1001 labels
.quick_push (elt
);
1005 for (i
= 0; i
< labels
.length (); i
++)
1006 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1007 for (i
++ ; i
< branch_num
; i
++)
1008 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1009 gimple_switch_set_num_labels (stmt
, len
+ 1);
1011 /* Cleanup any edges that are now dead. */
1012 target_blocks
= BITMAP_ALLOC (NULL
);
1013 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1015 tree elt
= gimple_switch_label (stmt
, i
);
1016 basic_block target
= label_to_block (cfun
, CASE_LABEL (elt
));
1017 bitmap_set_bit (target_blocks
, target
->index
);
1019 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1021 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1025 free_dominance_info (CDI_DOMINATORS
);
1030 BITMAP_FREE (target_blocks
);
1034 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1035 the condition which we may be able to optimize better. */
1038 simplify_gimple_switch (gswitch
*stmt
)
1040 /* The optimization that we really care about is removing unnecessary
1041 casts. That will let us do much better in propagating the inferred
1042 constant at the switch target. */
1043 tree cond
= gimple_switch_index (stmt
);
1044 if (TREE_CODE (cond
) == SSA_NAME
)
1046 gimple
*def_stmt
= SSA_NAME_DEF_STMT (cond
);
1047 if (gimple_assign_cast_p (def_stmt
))
1049 tree def
= gimple_assign_rhs1 (def_stmt
);
1050 if (TREE_CODE (def
) != SSA_NAME
)
1053 /* If we have an extension or sign-change that preserves the
1054 values we check against then we can copy the source value into
1056 tree ti
= TREE_TYPE (def
);
1057 if (INTEGRAL_TYPE_P (ti
)
1058 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1060 size_t n
= gimple_switch_num_labels (stmt
);
1061 tree min
= NULL_TREE
, max
= NULL_TREE
;
1064 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1065 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1066 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1068 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1070 if ((!min
|| int_fits_type_p (min
, ti
))
1071 && (!max
|| int_fits_type_p (max
, ti
)))
1073 gimple_switch_set_index (stmt
, def
);
1074 simplify_gimple_switch_label_vec (stmt
, ti
);
1085 /* For pointers p2 and p1 return p2 - p1 if the
1086 difference is known and constant, otherwise return NULL. */
1089 constant_pointer_difference (tree p1
, tree p2
)
1092 #define CPD_ITERATIONS 5
1093 tree exps
[2][CPD_ITERATIONS
];
1094 tree offs
[2][CPD_ITERATIONS
];
1097 for (i
= 0; i
< 2; i
++)
1099 tree p
= i
? p1
: p2
;
1100 tree off
= size_zero_node
;
1102 enum tree_code code
;
1104 /* For each of p1 and p2 we need to iterate at least
1105 twice, to handle ADDR_EXPR directly in p1/p2,
1106 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1107 on definition's stmt RHS. Iterate a few extra times. */
1111 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1113 if (TREE_CODE (p
) == ADDR_EXPR
)
1115 tree q
= TREE_OPERAND (p
, 0);
1117 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1121 if (maybe_ne (offset
, 0))
1122 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1124 if (TREE_CODE (q
) == MEM_REF
1125 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1127 p
= TREE_OPERAND (q
, 0);
1128 off
= size_binop (PLUS_EXPR
, off
,
1129 wide_int_to_tree (sizetype
,
1130 mem_ref_offset (q
)));
1139 if (TREE_CODE (p
) != SSA_NAME
)
1143 if (j
== CPD_ITERATIONS
)
1145 stmt
= SSA_NAME_DEF_STMT (p
);
1146 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1148 code
= gimple_assign_rhs_code (stmt
);
1149 if (code
== POINTER_PLUS_EXPR
)
1151 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1153 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1154 p
= gimple_assign_rhs1 (stmt
);
1156 else if (code
== ADDR_EXPR
|| CONVERT_EXPR_CODE_P (code
))
1157 p
= gimple_assign_rhs1 (stmt
);
1165 for (i
= 0; i
< cnt
[0]; i
++)
1166 for (j
= 0; j
< cnt
[1]; j
++)
1167 if (exps
[0][i
] == exps
[1][j
])
1168 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1173 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1175 memcpy (p, "abcd", 4);
1176 memset (p + 4, ' ', 3);
1178 memcpy (p, "abcd ", 7);
1179 call if the latter can be stored by pieces during expansion.
1182 memchr ("abcd", a, 4) == 0;
1184 memchr ("abcd", a, 4) != 0;
1186 (a == 'a' || a == 'b' || a == 'c' || a == 'd') == 0
1188 (a == 'a' || a == 'b' || a == 'c' || a == 'd') != 0
1190 Also canonicalize __atomic_fetch_op (p, x, y) op x
1191 to __atomic_op_fetch (p, x, y) or
1192 __atomic_op_fetch (p, x, y) iop x
1193 to __atomic_fetch_op (p, x, y) when possible (also __sync). */
1196 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1198 gimple
*stmt1
, *stmt2
= gsi_stmt (*gsi_p
);
1199 enum built_in_function other_atomic
= END_BUILTINS
;
1200 enum tree_code atomic_op
= ERROR_MARK
;
1201 tree vuse
= gimple_vuse (stmt2
);
1204 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1208 switch (DECL_FUNCTION_CODE (callee2
))
1210 case BUILT_IN_MEMCHR
:
1211 if (gimple_call_num_args (stmt2
) == 3
1212 && (res
= gimple_call_lhs (stmt2
)) != nullptr
1213 && use_in_zero_equality (res
) != nullptr
1215 && BITS_PER_UNIT
== 8)
1217 tree ptr
= gimple_call_arg (stmt2
, 0);
1218 if (TREE_CODE (ptr
) != ADDR_EXPR
1219 || TREE_CODE (TREE_OPERAND (ptr
, 0)) != STRING_CST
)
1221 unsigned HOST_WIDE_INT slen
1222 = TREE_STRING_LENGTH (TREE_OPERAND (ptr
, 0));
1223 /* It must be a non-empty string constant. */
1226 /* For -Os, only simplify strings with a single character. */
1227 if (!optimize_bb_for_speed_p (gimple_bb (stmt2
))
1230 tree size
= gimple_call_arg (stmt2
, 2);
1231 /* Size must be a constant which is <= UNITS_PER_WORD and
1232 <= the string length. */
1233 if (TREE_CODE (size
) != INTEGER_CST
|| integer_zerop (size
))
1236 if (!tree_fits_uhwi_p (size
))
1239 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (size
);
1240 if (sz
> UNITS_PER_WORD
|| sz
>= slen
)
1243 tree ch
= gimple_call_arg (stmt2
, 1);
1244 location_t loc
= gimple_location (stmt2
);
1245 if (!useless_type_conversion_p (char_type_node
,
1247 ch
= fold_convert_loc (loc
, char_type_node
, ch
);
1248 const char *p
= TREE_STRING_POINTER (TREE_OPERAND (ptr
, 0));
1249 unsigned int isize
= sz
;
1250 tree
*op
= XALLOCAVEC (tree
, isize
);
1251 for (unsigned int i
= 0; i
< isize
; i
++)
1253 op
[i
] = build_int_cst (char_type_node
, p
[i
]);
1254 op
[i
] = fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
,
1257 for (unsigned int i
= isize
- 1; i
>= 1; i
--)
1258 op
[i
- 1] = fold_convert_loc (loc
, boolean_type_node
,
1259 fold_build2_loc (loc
,
1264 res
= fold_convert_loc (loc
, TREE_TYPE (res
), op
[0]);
1265 gimplify_and_update_call_from_tree (gsi_p
, res
);
1270 case BUILT_IN_MEMSET
:
1271 if (gimple_call_num_args (stmt2
) != 3
1272 || gimple_call_lhs (stmt2
)
1274 || BITS_PER_UNIT
!= 8)
1279 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1280 tree ptr2
= gimple_call_arg (stmt2
, 0);
1281 tree val2
= gimple_call_arg (stmt2
, 1);
1282 tree len2
= gimple_call_arg (stmt2
, 2);
1283 tree diff
, vdef
, new_str_cst
;
1285 unsigned int ptr1_align
;
1286 unsigned HOST_WIDE_INT src_len
;
1288 use_operand_p use_p
;
1290 if (!tree_fits_shwi_p (val2
)
1291 || !tree_fits_uhwi_p (len2
)
1292 || compare_tree_int (len2
, 1024) == 1)
1294 if (is_gimple_call (stmt1
))
1296 /* If first stmt is a call, it needs to be memcpy
1297 or mempcpy, with string literal as second argument and
1299 callee1
= gimple_call_fndecl (stmt1
);
1300 if (callee1
== NULL_TREE
1301 || !fndecl_built_in_p (callee1
, BUILT_IN_NORMAL
)
1302 || gimple_call_num_args (stmt1
) != 3)
1304 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1305 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1307 ptr1
= gimple_call_arg (stmt1
, 0);
1308 src1
= gimple_call_arg (stmt1
, 1);
1309 len1
= gimple_call_arg (stmt1
, 2);
1310 lhs1
= gimple_call_lhs (stmt1
);
1311 if (!tree_fits_uhwi_p (len1
))
1313 str1
= string_constant (src1
, &off1
, NULL
, NULL
);
1314 if (str1
== NULL_TREE
)
1316 if (!tree_fits_uhwi_p (off1
)
1317 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1318 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1319 - tree_to_uhwi (off1
)) > 0
1320 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1321 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1322 != TYPE_MODE (char_type_node
))
1325 else if (gimple_assign_single_p (stmt1
))
1327 /* Otherwise look for length 1 memcpy optimized into
1329 ptr1
= gimple_assign_lhs (stmt1
);
1330 src1
= gimple_assign_rhs1 (stmt1
);
1331 if (TREE_CODE (ptr1
) != MEM_REF
1332 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1333 || !tree_fits_shwi_p (src1
))
1335 ptr1
= build_fold_addr_expr (ptr1
);
1336 STRIP_USELESS_TYPE_CONVERSION (ptr1
);
1337 callee1
= NULL_TREE
;
1338 len1
= size_one_node
;
1340 off1
= size_zero_node
;
1346 diff
= constant_pointer_difference (ptr1
, ptr2
);
1347 if (diff
== NULL
&& lhs1
!= NULL
)
1349 diff
= constant_pointer_difference (lhs1
, ptr2
);
1350 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1352 diff
= size_binop (PLUS_EXPR
, diff
,
1353 fold_convert (sizetype
, len1
));
1355 /* If the difference between the second and first destination pointer
1356 is not constant, or is bigger than memcpy length, bail out. */
1358 || !tree_fits_uhwi_p (diff
)
1359 || tree_int_cst_lt (len1
, diff
)
1360 || compare_tree_int (diff
, 1024) == 1)
1363 /* Use maximum of difference plus memset length and memcpy length
1364 as the new memcpy length, if it is too big, bail out. */
1365 src_len
= tree_to_uhwi (diff
);
1366 src_len
+= tree_to_uhwi (len2
);
1367 if (src_len
< tree_to_uhwi (len1
))
1368 src_len
= tree_to_uhwi (len1
);
1372 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1373 with bigger length will return different result. */
1374 if (lhs1
!= NULL_TREE
1375 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1376 && (TREE_CODE (lhs1
) != SSA_NAME
1377 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1378 || use_stmt
!= stmt2
))
1381 /* If anything reads memory in between memcpy and memset
1382 call, the modified memcpy call might change it. */
1383 vdef
= gimple_vdef (stmt1
);
1385 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1386 || use_stmt
!= stmt2
))
1389 ptr1_align
= get_pointer_alignment (ptr1
);
1390 /* Construct the new source string literal. */
1391 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1394 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1395 tree_to_uhwi (len1
));
1397 src_buf
[0] = tree_to_shwi (src1
);
1398 memset (src_buf
+ tree_to_uhwi (diff
),
1399 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1400 src_buf
[src_len
] = '\0';
1401 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1402 handle embedded '\0's. */
1403 if (strlen (src_buf
) != src_len
)
1405 rtl_profile_for_bb (gimple_bb (stmt2
));
1406 /* If the new memcpy wouldn't be emitted by storing the literal
1407 by pieces, this optimization might enlarge .rodata too much,
1408 as commonly used string literals couldn't be shared any
1410 if (!can_store_by_pieces (src_len
,
1411 builtin_strncpy_read_str
,
1412 src_buf
, ptr1_align
, false))
1415 new_str_cst
= build_string_literal (src_len
, src_buf
);
1418 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1420 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1421 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1422 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1423 gimple_call_set_arg (stmt1
, 2,
1424 build_int_cst (TREE_TYPE (len1
), src_len
));
1425 update_stmt (stmt1
);
1426 unlink_stmt_vdef (stmt2
);
1427 gsi_replace (gsi_p
, gimple_build_nop (), false);
1428 fwprop_invalidate_lattice (gimple_get_lhs (stmt2
));
1429 release_defs (stmt2
);
1430 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1432 fwprop_invalidate_lattice (lhs1
);
1433 release_ssa_name (lhs1
);
1439 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1440 assignment, remove STMT1 and change memset call into
1442 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1444 if (!is_gimple_val (ptr1
))
1445 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1446 true, GSI_SAME_STMT
);
1447 tree fndecl
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1448 gimple_call_set_fndecl (stmt2
, fndecl
);
1449 gimple_call_set_fntype (as_a
<gcall
*> (stmt2
),
1450 TREE_TYPE (fndecl
));
1451 gimple_call_set_arg (stmt2
, 0, ptr1
);
1452 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1453 gimple_call_set_arg (stmt2
, 2,
1454 build_int_cst (TREE_TYPE (len2
), src_len
));
1455 unlink_stmt_vdef (stmt1
);
1456 gsi_remove (&gsi
, true);
1457 fwprop_invalidate_lattice (gimple_get_lhs (stmt1
));
1458 release_defs (stmt1
);
1459 update_stmt (stmt2
);
1465 #define CASE_ATOMIC(NAME, OTHER, OP) \
1466 case BUILT_IN_##NAME##_1: \
1467 case BUILT_IN_##NAME##_2: \
1468 case BUILT_IN_##NAME##_4: \
1469 case BUILT_IN_##NAME##_8: \
1470 case BUILT_IN_##NAME##_16: \
1473 = (enum built_in_function) (BUILT_IN_##OTHER##_1 \
1474 + (DECL_FUNCTION_CODE (callee2) \
1475 - BUILT_IN_##NAME##_1)); \
1476 goto handle_atomic_fetch_op;
1478 CASE_ATOMIC (ATOMIC_FETCH_ADD
, ATOMIC_ADD_FETCH
, PLUS_EXPR
)
1479 CASE_ATOMIC (ATOMIC_FETCH_SUB
, ATOMIC_SUB_FETCH
, MINUS_EXPR
)
1480 CASE_ATOMIC (ATOMIC_FETCH_AND
, ATOMIC_AND_FETCH
, BIT_AND_EXPR
)
1481 CASE_ATOMIC (ATOMIC_FETCH_XOR
, ATOMIC_XOR_FETCH
, BIT_XOR_EXPR
)
1482 CASE_ATOMIC (ATOMIC_FETCH_OR
, ATOMIC_OR_FETCH
, BIT_IOR_EXPR
)
1484 CASE_ATOMIC (SYNC_FETCH_AND_ADD
, SYNC_ADD_AND_FETCH
, PLUS_EXPR
)
1485 CASE_ATOMIC (SYNC_FETCH_AND_SUB
, SYNC_SUB_AND_FETCH
, MINUS_EXPR
)
1486 CASE_ATOMIC (SYNC_FETCH_AND_AND
, SYNC_AND_AND_FETCH
, BIT_AND_EXPR
)
1487 CASE_ATOMIC (SYNC_FETCH_AND_XOR
, SYNC_XOR_AND_FETCH
, BIT_XOR_EXPR
)
1488 CASE_ATOMIC (SYNC_FETCH_AND_OR
, SYNC_OR_AND_FETCH
, BIT_IOR_EXPR
)
1490 CASE_ATOMIC (ATOMIC_ADD_FETCH
, ATOMIC_FETCH_ADD
, MINUS_EXPR
)
1491 CASE_ATOMIC (ATOMIC_SUB_FETCH
, ATOMIC_FETCH_SUB
, PLUS_EXPR
)
1492 CASE_ATOMIC (ATOMIC_XOR_FETCH
, ATOMIC_FETCH_XOR
, BIT_XOR_EXPR
)
1494 CASE_ATOMIC (SYNC_ADD_AND_FETCH
, SYNC_FETCH_AND_ADD
, MINUS_EXPR
)
1495 CASE_ATOMIC (SYNC_SUB_AND_FETCH
, SYNC_FETCH_AND_SUB
, PLUS_EXPR
)
1496 CASE_ATOMIC (SYNC_XOR_AND_FETCH
, SYNC_FETCH_AND_XOR
, BIT_XOR_EXPR
)
1500 handle_atomic_fetch_op
:
1501 if (gimple_call_num_args (stmt2
) >= 2 && gimple_call_lhs (stmt2
))
1503 tree lhs2
= gimple_call_lhs (stmt2
), lhsc
= lhs2
;
1504 tree arg
= gimple_call_arg (stmt2
, 1);
1505 gimple
*use_stmt
, *cast_stmt
= NULL
;
1506 use_operand_p use_p
;
1507 tree ndecl
= builtin_decl_explicit (other_atomic
);
1509 if (ndecl
== NULL_TREE
|| !single_imm_use (lhs2
, &use_p
, &use_stmt
))
1512 if (gimple_assign_cast_p (use_stmt
))
1514 cast_stmt
= use_stmt
;
1515 lhsc
= gimple_assign_lhs (cast_stmt
);
1516 if (lhsc
== NULL_TREE
1517 || !INTEGRAL_TYPE_P (TREE_TYPE (lhsc
))
1518 || (TYPE_PRECISION (TREE_TYPE (lhsc
))
1519 != TYPE_PRECISION (TREE_TYPE (lhs2
)))
1520 || !single_imm_use (lhsc
, &use_p
, &use_stmt
))
1522 use_stmt
= cast_stmt
;
1529 tree oarg
= NULL_TREE
;
1530 enum tree_code ccode
= ERROR_MARK
;
1531 tree crhs1
= NULL_TREE
, crhs2
= NULL_TREE
;
1532 if (is_gimple_assign (use_stmt
)
1533 && gimple_assign_rhs_code (use_stmt
) == atomic_op
)
1535 if (gimple_assign_rhs1 (use_stmt
) == lhsc
)
1536 oarg
= gimple_assign_rhs2 (use_stmt
);
1537 else if (atomic_op
!= MINUS_EXPR
)
1538 oarg
= gimple_assign_rhs1 (use_stmt
);
1540 else if (atomic_op
== MINUS_EXPR
1541 && is_gimple_assign (use_stmt
)
1542 && gimple_assign_rhs_code (use_stmt
) == PLUS_EXPR
1543 && TREE_CODE (arg
) == INTEGER_CST
1544 && (TREE_CODE (gimple_assign_rhs2 (use_stmt
))
1547 tree a
= fold_convert (TREE_TYPE (lhs2
), arg
);
1548 tree o
= fold_convert (TREE_TYPE (lhs2
),
1549 gimple_assign_rhs2 (use_stmt
));
1550 if (wi::to_wide (a
) == wi::neg (wi::to_wide (o
)))
1553 else if (atomic_op
== BIT_AND_EXPR
|| atomic_op
== BIT_IOR_EXPR
)
1555 else if (gimple_code (use_stmt
) == GIMPLE_COND
)
1557 ccode
= gimple_cond_code (use_stmt
);
1558 crhs1
= gimple_cond_lhs (use_stmt
);
1559 crhs2
= gimple_cond_rhs (use_stmt
);
1561 else if (is_gimple_assign (use_stmt
))
1563 if (gimple_assign_rhs_class (use_stmt
) == GIMPLE_BINARY_RHS
)
1565 ccode
= gimple_assign_rhs_code (use_stmt
);
1566 crhs1
= gimple_assign_rhs1 (use_stmt
);
1567 crhs2
= gimple_assign_rhs2 (use_stmt
);
1569 else if (gimple_assign_rhs_code (use_stmt
) == COND_EXPR
)
1571 tree cond
= gimple_assign_rhs1 (use_stmt
);
1572 if (COMPARISON_CLASS_P (cond
))
1574 ccode
= TREE_CODE (cond
);
1575 crhs1
= TREE_OPERAND (cond
, 0);
1576 crhs2
= TREE_OPERAND (cond
, 1);
1580 if (ccode
== EQ_EXPR
|| ccode
== NE_EXPR
)
1582 /* Deal with x - y == 0 or x ^ y == 0
1583 being optimized into x == y and x + cst == 0
1588 else if (crhs2
== lhsc
)
1590 if (o
&& atomic_op
!= PLUS_EXPR
)
1593 && TREE_CODE (o
) == INTEGER_CST
1594 && TREE_CODE (arg
) == INTEGER_CST
)
1596 tree a
= fold_convert (TREE_TYPE (lhs2
), arg
);
1597 o
= fold_convert (TREE_TYPE (lhs2
), o
);
1598 if (wi::to_wide (a
) == wi::neg (wi::to_wide (o
)))
1604 if (operand_equal_p (arg
, oarg
, 0))
1606 else if (TREE_CODE (arg
) == SSA_NAME
1607 && TREE_CODE (oarg
) == SSA_NAME
)
1610 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (oarg
)))
1612 gimple
*g
= SSA_NAME_DEF_STMT (oarg
);
1613 oarg2
= gimple_assign_rhs1 (g
);
1614 if (TREE_CODE (oarg2
) != SSA_NAME
1615 || !INTEGRAL_TYPE_P (TREE_TYPE (oarg2
))
1616 || (TYPE_PRECISION (TREE_TYPE (oarg2
))
1617 != TYPE_PRECISION (TREE_TYPE (oarg
))))
1620 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg
)))
1622 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
1623 tree rhs1
= gimple_assign_rhs1 (g
);
1625 x.0_1 = (long unsigned int) x_4(D);
1626 _2 = __atomic_fetch_add_8 (&vlong, x.0_1, 0);
1628 _7 = x_4(D) + _3; */
1629 if (rhs1
== oarg
|| rhs1
== oarg2
)
1632 x.18_1 = (short unsigned int) x_5(D);
1634 _3 = __atomic_fetch_xor_2 (&vshort, _2, 0);
1635 _4 = (short int) _3;
1637 This happens only for char/short. */
1638 else if (TREE_CODE (rhs1
) == SSA_NAME
1639 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
1640 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
1641 == TYPE_PRECISION (TREE_TYPE (lhs2
))))
1643 g
= SSA_NAME_DEF_STMT (rhs1
);
1644 if (gimple_assign_cast_p (g
)
1645 && (gimple_assign_rhs1 (g
) == oarg
1646 || gimple_assign_rhs1 (g
) == oarg2
))
1650 if (!ok
&& arg
== oarg2
)
1652 _1 = __sync_fetch_and_add_4 (&v, x_5(D));
1654 x.0_3 = (int) x_5(D);
1662 tree new_lhs
= make_ssa_name (TREE_TYPE (lhs2
));
1663 gimple_call_set_lhs (stmt2
, new_lhs
);
1664 gimple_call_set_fndecl (stmt2
, ndecl
);
1665 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1666 if (ccode
== ERROR_MARK
)
1667 gimple_assign_set_rhs_with_ops (&gsi
, cast_stmt
1668 ? NOP_EXPR
: SSA_NAME
,
1673 crhs2
= build_zero_cst (TREE_TYPE (lhs2
));
1674 if (gimple_code (use_stmt
) == GIMPLE_COND
)
1676 gcond
*cond_stmt
= as_a
<gcond
*> (use_stmt
);
1677 gimple_cond_set_lhs (cond_stmt
, crhs1
);
1678 gimple_cond_set_rhs (cond_stmt
, crhs2
);
1680 else if (gimple_assign_rhs_class (use_stmt
)
1681 == GIMPLE_BINARY_RHS
)
1683 gimple_assign_set_rhs1 (use_stmt
, crhs1
);
1684 gimple_assign_set_rhs2 (use_stmt
, crhs2
);
1688 gcc_checking_assert (gimple_assign_rhs_code (use_stmt
)
1690 tree cond
= build2 (ccode
, boolean_type_node
,
1692 gimple_assign_set_rhs1 (use_stmt
, cond
);
1695 update_stmt (use_stmt
);
1696 if (atomic_op
!= BIT_AND_EXPR
1697 && atomic_op
!= BIT_IOR_EXPR
1698 && !stmt_ends_bb_p (stmt2
))
1700 /* For the benefit of debug stmts, emit stmt(s) to set
1701 lhs2 to the value it had from the new builtin.
1702 E.g. if it was previously:
1703 lhs2 = __atomic_fetch_add_8 (ptr, arg, 0);
1705 new_lhs = __atomic_add_fetch_8 (ptr, arg, 0);
1706 lhs2 = new_lhs - arg;
1707 We also keep cast_stmt if any in the IL for
1709 These stmts will be DCEd later and proper debug info
1711 This is only possible for reversible operations
1712 (+/-/^) and without -fnon-call-exceptions. */
1713 gsi
= gsi_for_stmt (stmt2
);
1714 tree type
= TREE_TYPE (lhs2
);
1715 if (TREE_CODE (arg
) == INTEGER_CST
)
1716 arg
= fold_convert (type
, arg
);
1717 else if (!useless_type_conversion_p (type
, TREE_TYPE (arg
)))
1719 tree narg
= make_ssa_name (type
);
1720 gimple
*g
= gimple_build_assign (narg
, NOP_EXPR
, arg
);
1721 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1724 enum tree_code rcode
;
1727 case PLUS_EXPR
: rcode
= MINUS_EXPR
; break;
1728 case MINUS_EXPR
: rcode
= PLUS_EXPR
; break;
1729 case BIT_XOR_EXPR
: rcode
= atomic_op
; break;
1730 default: gcc_unreachable ();
1732 gimple
*g
= gimple_build_assign (lhs2
, rcode
, new_lhs
, arg
);
1733 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1734 update_stmt (stmt2
);
1739 lhs2 = __atomic_fetch_or_8 (ptr, arg, 0);
1740 after we change it to
1741 new_lhs = __atomic_or_fetch_8 (ptr, arg, 0);
1742 there is no way to find out the lhs2 value (i.e.
1743 what the atomic memory contained before the operation),
1744 values of some bits are lost. We have checked earlier
1745 that we don't have any non-debug users except for what
1746 we are already changing, so we need to reset the
1747 debug stmts and remove the cast_stmt if any. */
1748 imm_use_iterator iter
;
1749 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs2
)
1750 if (use_stmt
!= cast_stmt
)
1752 gcc_assert (is_gimple_debug (use_stmt
));
1753 gimple_debug_bind_reset_value (use_stmt
);
1754 update_stmt (use_stmt
);
1758 gsi
= gsi_for_stmt (cast_stmt
);
1759 gsi_remove (&gsi
, true);
1761 update_stmt (stmt2
);
1762 release_ssa_name (lhs2
);
1774 /* Given a ssa_name in NAME see if it was defined by an assignment and
1775 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1776 to the second operand on the rhs. */
1779 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1782 enum tree_code code1
;
1786 enum gimple_rhs_class grhs_class
;
1788 code1
= TREE_CODE (name
);
1792 grhs_class
= get_gimple_rhs_class (code1
);
1794 if (code1
== SSA_NAME
)
1796 def
= SSA_NAME_DEF_STMT (name
);
1798 if (def
&& is_gimple_assign (def
)
1799 && can_propagate_from (def
))
1801 code1
= gimple_assign_rhs_code (def
);
1802 arg11
= gimple_assign_rhs1 (def
);
1803 arg21
= gimple_assign_rhs2 (def
);
1804 arg31
= gimple_assign_rhs3 (def
);
1807 else if (grhs_class
!= GIMPLE_SINGLE_RHS
)
1819 /* Recognize rotation patterns. Return true if a transformation
1820 applied, otherwise return false.
1822 We are looking for X with unsigned type T with bitsize B, OP being
1823 +, | or ^, some type T2 wider than T. For:
1824 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1825 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1827 transform these into:
1831 (X << Y) OP (X >> (B - Y))
1832 (X << (int) Y) OP (X >> (int) (B - Y))
1833 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1834 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1835 (X << Y) | (X >> ((-Y) & (B - 1)))
1836 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1837 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1838 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1840 transform these into:
1844 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1845 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1846 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1847 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1848 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1850 transform these into:
1853 Note, in the patterns with T2 type, the type of OP operands
1854 might be even a signed type, but should have precision B.
1855 Expressions with & (B - 1) should be recognized only if B is
1859 simplify_rotate (gimple_stmt_iterator
*gsi
)
1861 gimple
*stmt
= gsi_stmt (*gsi
);
1862 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
1863 tree def_arg1
[2], def_arg2
[2];
1864 enum tree_code def_code
[2];
1867 bool swapped_p
= false;
1870 arg
[0] = gimple_assign_rhs1 (stmt
);
1871 arg
[1] = gimple_assign_rhs2 (stmt
);
1872 rtype
= TREE_TYPE (arg
[0]);
1874 /* Only create rotates in complete modes. Other cases are not
1875 expanded properly. */
1876 if (!INTEGRAL_TYPE_P (rtype
)
1877 || !type_has_mode_precision_p (rtype
))
1880 for (i
= 0; i
< 2; i
++)
1881 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1883 /* Look through narrowing (or same precision) conversions. */
1884 if (CONVERT_EXPR_CODE_P (def_code
[0])
1885 && CONVERT_EXPR_CODE_P (def_code
[1])
1886 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
1887 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
1888 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1889 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
1890 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) >= TYPE_PRECISION (rtype
)
1891 && has_single_use (arg
[0])
1892 && has_single_use (arg
[1]))
1894 for (i
= 0; i
< 2; i
++)
1896 arg
[i
] = def_arg1
[i
];
1897 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1902 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1903 in unsigned type but LSHIFT_EXPR could be signed. */
1904 i
= (def_code
[0] == LSHIFT_EXPR
|| def_code
[0] == RSHIFT_EXPR
);
1905 if (CONVERT_EXPR_CODE_P (def_code
[i
])
1906 && (def_code
[1 - i
] == LSHIFT_EXPR
|| def_code
[1 - i
] == RSHIFT_EXPR
)
1907 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[i
]))
1908 && TYPE_PRECISION (rtype
) == TYPE_PRECISION (TREE_TYPE (def_arg1
[i
]))
1909 && has_single_use (arg
[i
]))
1911 arg
[i
] = def_arg1
[i
];
1912 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1916 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1917 for (i
= 0; i
< 2; i
++)
1918 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
1920 else if (!has_single_use (arg
[i
]))
1922 if (def_code
[0] == def_code
[1])
1925 /* If we've looked through narrowing conversions before, look through
1926 widening conversions from unsigned type with the same precision
1928 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
1929 for (i
= 0; i
< 2; i
++)
1932 enum tree_code code
;
1933 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1934 if (!CONVERT_EXPR_CODE_P (code
)
1935 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1936 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1940 /* Both shifts have to use the same first operand. */
1941 if (!operand_equal_for_phi_arg_p (def_arg1
[0], def_arg1
[1])
1942 || !types_compatible_p (TREE_TYPE (def_arg1
[0]),
1943 TREE_TYPE (def_arg1
[1])))
1945 if ((TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1946 != TYPE_PRECISION (TREE_TYPE (def_arg1
[1])))
1947 || (TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0]))
1948 == TYPE_UNSIGNED (TREE_TYPE (def_arg1
[1]))))
1951 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1952 in unsigned type but LSHIFT_EXPR could be signed. */
1953 i
= def_code
[0] != RSHIFT_EXPR
;
1954 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[i
])))
1958 enum tree_code code
;
1959 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1960 if (!CONVERT_EXPR_CODE_P (code
)
1961 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1962 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1965 if (!operand_equal_for_phi_arg_p (def_arg1
[0], def_arg1
[1])
1966 || !types_compatible_p (TREE_TYPE (def_arg1
[0]),
1967 TREE_TYPE (def_arg1
[1])))
1970 else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
1973 /* CNT1 + CNT2 == B case above. */
1974 if (tree_fits_uhwi_p (def_arg2
[0])
1975 && tree_fits_uhwi_p (def_arg2
[1])
1976 && tree_to_uhwi (def_arg2
[0])
1977 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
1978 rotcnt
= def_arg2
[0];
1979 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
1980 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
1984 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
1985 enum tree_code cdef_code
[2];
1986 /* Look through conversion of the shift count argument.
1987 The C/C++ FE cast any shift count argument to integer_type_node.
1988 The only problem might be if the shift count type maximum value
1989 is equal or smaller than number of bits in rtype. */
1990 for (i
= 0; i
< 2; i
++)
1992 def_arg2_alt
[i
] = def_arg2
[i
];
1993 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
1994 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1995 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
1996 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
1997 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1998 > floor_log2 (TYPE_PRECISION (rtype
))
1999 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1
[i
])))
2001 def_arg2_alt
[i
] = cdef_arg1
[i
];
2002 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
2003 &cdef_arg1
[i
], &cdef_arg2
[i
]);
2006 for (i
= 0; i
< 2; i
++)
2007 /* Check for one shift count being Y and the other B - Y,
2008 with optional casts. */
2009 if (cdef_code
[i
] == MINUS_EXPR
2010 && tree_fits_shwi_p (cdef_arg1
[i
])
2011 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
2012 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
2015 enum tree_code code
;
2017 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
2018 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
2020 rotcnt
= cdef_arg2
[i
];
2023 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
2024 if (CONVERT_EXPR_CODE_P (code
)
2025 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2026 && TYPE_PRECISION (TREE_TYPE (tem
))
2027 > floor_log2 (TYPE_PRECISION (rtype
))
2028 && type_has_mode_precision_p (TREE_TYPE (tem
))
2029 && (tem
== def_arg2
[1 - i
]
2030 || tem
== def_arg2_alt
[1 - i
]))
2036 /* The above sequence isn't safe for Y being 0,
2037 because then one of the shifts triggers undefined behavior.
2038 This alternative is safe even for rotation count of 0.
2039 One shift count is Y and the other (-Y) & (B - 1).
2040 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
2041 else if (cdef_code
[i
] == BIT_AND_EXPR
2042 && pow2p_hwi (TYPE_PRECISION (rtype
))
2043 && tree_fits_shwi_p (cdef_arg2
[i
])
2044 && tree_to_shwi (cdef_arg2
[i
])
2045 == TYPE_PRECISION (rtype
) - 1
2046 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
2047 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
2050 enum tree_code code
;
2052 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
2053 if (CONVERT_EXPR_CODE_P (code
)
2054 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
2055 && TYPE_PRECISION (TREE_TYPE (tem
))
2056 > floor_log2 (TYPE_PRECISION (rtype
))
2057 && type_has_mode_precision_p (TREE_TYPE (tem
)))
2058 defcodefor_name (tem
, &code
, &tem
, NULL
);
2060 if (code
== NEGATE_EXPR
)
2062 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
2068 defcodefor_name (tem
, &code
, &tem2
, NULL
);
2069 if (CONVERT_EXPR_CODE_P (code
)
2070 && INTEGRAL_TYPE_P (TREE_TYPE (tem2
))
2071 && TYPE_PRECISION (TREE_TYPE (tem2
))
2072 > floor_log2 (TYPE_PRECISION (rtype
))
2073 && type_has_mode_precision_p (TREE_TYPE (tem2
)))
2075 if (tem2
== def_arg2
[1 - i
]
2076 || tem2
== def_arg2_alt
[1 - i
])
2085 if (cdef_code
[1 - i
] == BIT_AND_EXPR
2086 && tree_fits_shwi_p (cdef_arg2
[1 - i
])
2087 && tree_to_shwi (cdef_arg2
[1 - i
])
2088 == TYPE_PRECISION (rtype
) - 1
2089 && TREE_CODE (cdef_arg1
[1 - i
]) == SSA_NAME
)
2091 if (tem
== cdef_arg1
[1 - i
]
2092 || tem2
== cdef_arg1
[1 - i
])
2094 rotcnt
= def_arg2
[1 - i
];
2098 defcodefor_name (cdef_arg1
[1 - i
], &code
, &tem3
, NULL
);
2099 if (CONVERT_EXPR_CODE_P (code
)
2100 && INTEGRAL_TYPE_P (TREE_TYPE (tem3
))
2101 && TYPE_PRECISION (TREE_TYPE (tem3
))
2102 > floor_log2 (TYPE_PRECISION (rtype
))
2103 && type_has_mode_precision_p (TREE_TYPE (tem3
)))
2105 if (tem
== tem3
|| tem2
== tem3
)
2107 rotcnt
= def_arg2
[1 - i
];
2114 if (rotcnt
== NULL_TREE
)
2119 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
2120 TREE_TYPE (rotcnt
)))
2122 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2
[0])),
2124 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2125 rotcnt
= gimple_assign_lhs (g
);
2127 lhs
= gimple_assign_lhs (stmt
);
2128 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2129 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]));
2130 g
= gimple_build_assign (lhs
,
2131 ((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
2132 ? LROTATE_EXPR
: RROTATE_EXPR
, def_arg1
[0], rotcnt
);
2133 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
2135 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
2136 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, lhs
);
2138 gsi_replace (gsi
, g
, false);
2143 /* Check whether an array contains a valid ctz table. */
2145 check_ctz_array (tree ctor
, unsigned HOST_WIDE_INT mulc
,
2146 HOST_WIDE_INT
&zero_val
, unsigned shift
, unsigned bits
)
2149 unsigned HOST_WIDE_INT i
, mask
;
2150 unsigned matched
= 0;
2152 mask
= ((HOST_WIDE_INT_1U
<< (bits
- shift
)) - 1) << shift
;
2156 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), i
, idx
, elt
)
2158 if (TREE_CODE (idx
) != INTEGER_CST
|| TREE_CODE (elt
) != INTEGER_CST
)
2163 unsigned HOST_WIDE_INT index
= tree_to_shwi (idx
);
2164 HOST_WIDE_INT val
= tree_to_shwi (elt
);
2172 if (val
>= 0 && val
< bits
&& (((mulc
<< val
) & mask
) >> shift
) == index
)
2182 /* Check whether a string contains a valid ctz table. */
2184 check_ctz_string (tree string
, unsigned HOST_WIDE_INT mulc
,
2185 HOST_WIDE_INT
&zero_val
, unsigned shift
, unsigned bits
)
2187 unsigned HOST_WIDE_INT len
= TREE_STRING_LENGTH (string
);
2188 unsigned HOST_WIDE_INT mask
;
2189 unsigned matched
= 0;
2190 const unsigned char *p
= (const unsigned char *) TREE_STRING_POINTER (string
);
2192 if (len
< bits
|| len
> bits
* 2)
2195 mask
= ((HOST_WIDE_INT_1U
<< (bits
- shift
)) - 1) << shift
;
2199 for (unsigned i
= 0; i
< len
; i
++)
2200 if (p
[i
] < bits
&& (((mulc
<< p
[i
]) & mask
) >> shift
) == i
)
2203 return matched
== bits
;
2206 /* Recognize count trailing zeroes idiom.
2207 The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
2208 constant which when multiplied by a power of 2 creates a unique value
2209 in the top 5 or 6 bits. This is then indexed into a table which maps it
2210 to the number of trailing zeroes. Array[0] is returned so the caller can
2211 emit an appropriate sequence depending on whether ctz (0) is defined on
2214 optimize_count_trailing_zeroes (tree array_ref
, tree x
, tree mulc
,
2215 tree tshift
, HOST_WIDE_INT
&zero_val
)
2217 tree type
= TREE_TYPE (array_ref
);
2218 tree array
= TREE_OPERAND (array_ref
, 0);
2220 gcc_assert (TREE_CODE (mulc
) == INTEGER_CST
);
2221 gcc_assert (TREE_CODE (tshift
) == INTEGER_CST
);
2223 tree input_type
= TREE_TYPE (x
);
2224 unsigned input_bits
= tree_to_shwi (TYPE_SIZE (input_type
));
2226 /* Check the array element type is not wider than 32 bits and the input is
2227 an unsigned 32-bit or 64-bit type. */
2228 if (TYPE_PRECISION (type
) > 32 || !TYPE_UNSIGNED (input_type
))
2230 if (input_bits
!= 32 && input_bits
!= 64)
2233 if (!direct_internal_fn_supported_p (IFN_CTZ
, input_type
, OPTIMIZE_FOR_BOTH
))
2236 /* Check the lower bound of the array is zero. */
2237 tree low
= array_ref_low_bound (array_ref
);
2238 if (!low
|| !integer_zerop (low
))
2241 unsigned shiftval
= tree_to_shwi (tshift
);
2243 /* Check the shift extracts the top 5..7 bits. */
2244 if (shiftval
< input_bits
- 7 || shiftval
> input_bits
- 5)
2247 tree ctor
= ctor_for_folding (array
);
2251 unsigned HOST_WIDE_INT val
= tree_to_uhwi (mulc
);
2253 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2254 return check_ctz_array (ctor
, val
, zero_val
, shiftval
, input_bits
);
2256 if (TREE_CODE (ctor
) == STRING_CST
2257 && TYPE_PRECISION (type
) == CHAR_TYPE_SIZE
)
2258 return check_ctz_string (ctor
, val
, zero_val
, shiftval
, input_bits
);
2263 /* Match.pd function to match the ctz expression. */
2264 extern bool gimple_ctz_table_index (tree
, tree
*, tree (*)(tree
));
2267 simplify_count_trailing_zeroes (gimple_stmt_iterator
*gsi
)
2269 gimple
*stmt
= gsi_stmt (*gsi
);
2270 tree array_ref
= gimple_assign_rhs1 (stmt
);
2272 HOST_WIDE_INT zero_val
;
2274 gcc_checking_assert (TREE_CODE (array_ref
) == ARRAY_REF
);
2276 if (!gimple_ctz_table_index (TREE_OPERAND (array_ref
, 1), &res_ops
[0], NULL
))
2279 if (optimize_count_trailing_zeroes (array_ref
, res_ops
[0],
2280 res_ops
[1], res_ops
[2], zero_val
))
2282 tree type
= TREE_TYPE (res_ops
[0]);
2283 HOST_WIDE_INT ctz_val
= 0;
2284 HOST_WIDE_INT type_size
= tree_to_shwi (TYPE_SIZE (type
));
2286 = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type
), ctz_val
) == 2;
2288 /* If the input value can't be zero, don't special case ctz (0). */
2289 if (tree_expr_nonzero_p (res_ops
[0]))
2296 /* Skip if there is no value defined at zero, or if we can't easily
2297 return the correct value for zero. */
2300 if (zero_val
!= ctz_val
&& !(zero_val
== 0 && ctz_val
== type_size
))
2303 gimple_seq seq
= NULL
;
2305 gcall
*call
= gimple_build_call_internal (IFN_CTZ
, 1, res_ops
[0]);
2306 gimple_set_location (call
, gimple_location (stmt
));
2307 gimple_set_lhs (call
, make_ssa_name (integer_type_node
));
2308 gimple_seq_add_stmt (&seq
, call
);
2310 tree prev_lhs
= gimple_call_lhs (call
);
2312 /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0. */
2313 if (zero_val
== 0 && ctz_val
== type_size
)
2315 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
2316 BIT_AND_EXPR
, prev_lhs
,
2317 build_int_cst (integer_type_node
,
2319 gimple_set_location (g
, gimple_location (stmt
));
2320 gimple_seq_add_stmt (&seq
, g
);
2321 prev_lhs
= gimple_assign_lhs (g
);
2324 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, prev_lhs
);
2325 gimple_seq_add_stmt (&seq
, g
);
2326 gsi_replace_with_seq (gsi
, seq
, true);
2334 /* Combine an element access with a shuffle. Returns true if there were
2335 any changes made, else it returns false. */
2338 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
2340 gimple
*stmt
= gsi_stmt (*gsi
);
2343 tree elem_type
, type
;
2345 unsigned HOST_WIDE_INT nelts
, idx
;
2346 poly_uint64 size
, elem_size
;
2347 enum tree_code code
;
2349 op
= gimple_assign_rhs1 (stmt
);
2350 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
2352 op0
= TREE_OPERAND (op
, 0);
2353 if (TREE_CODE (op0
) != SSA_NAME
2354 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
2357 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
2358 if (!def_stmt
|| !can_propagate_from (def_stmt
))
2361 op1
= TREE_OPERAND (op
, 1);
2362 code
= gimple_assign_rhs_code (def_stmt
);
2363 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
2364 type
= TREE_TYPE (op
);
2365 /* Also handle vector type.
2367 _7 = VEC_PERM_EXPR <_1, _1, { 2, 3, 2, 3 }>;
2368 _11 = BIT_FIELD_REF <_7, 64, 0>;
2372 _11 = BIT_FIELD_REF <_1, 64, 64>. */
2374 size
= tree_to_poly_uint64 (TYPE_SIZE (type
));
2375 if (maybe_ne (bit_field_size (op
), size
))
2378 elem_size
= tree_to_poly_uint64 (TYPE_SIZE (elem_type
));
2379 if (code
!= VEC_PERM_EXPR
2380 || !constant_multiple_p (bit_field_offset (op
), elem_size
, &idx
))
2383 m
= gimple_assign_rhs3 (def_stmt
);
2384 if (TREE_CODE (m
) != VECTOR_CST
2385 || !VECTOR_CST_NELTS (m
).is_constant (&nelts
))
2389 if (known_eq (size
, elem_size
))
2390 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
)) % (2 * nelts
);
2393 unsigned HOST_WIDE_INT nelts_op
;
2394 if (!constant_multiple_p (size
, elem_size
, &nelts_op
)
2395 || !pow2p_hwi (nelts_op
))
2397 /* Clamp vec_perm_expr index. */
2398 unsigned start
= TREE_INT_CST_LOW (vector_cst_elt (m
, idx
)) % (2 * nelts
);
2399 unsigned end
= TREE_INT_CST_LOW (vector_cst_elt (m
, idx
+ nelts_op
- 1))
2401 /* Be in the same vector. */
2402 if ((start
< nelts
) != (end
< nelts
))
2404 for (unsigned HOST_WIDE_INT i
= 1; i
!= nelts_op
; i
++)
2406 /* Continuous area. */
2407 if (TREE_INT_CST_LOW (vector_cst_elt (m
, idx
+ i
)) % (2 * nelts
) - 1
2408 != TREE_INT_CST_LOW (vector_cst_elt (m
, idx
+ i
- 1))
2412 /* Alignment not worse than before. */
2413 if (start
% nelts_op
)
2419 p
= gimple_assign_rhs1 (def_stmt
);
2422 p
= gimple_assign_rhs2 (def_stmt
);
2426 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
2427 p
, op1
, bitsize_int (idx
* elem_size
));
2428 gimple_assign_set_rhs1 (stmt
, tem
);
2430 update_stmt (gsi_stmt (*gsi
));
2434 /* Determine whether applying the 2 permutations (mask1 then mask2)
2435 gives back one of the input. */
2438 is_combined_permutation_identity (tree mask1
, tree mask2
)
2441 unsigned HOST_WIDE_INT nelts
, i
, j
;
2442 bool maybe_identity1
= true;
2443 bool maybe_identity2
= true;
2445 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
2446 && TREE_CODE (mask2
) == VECTOR_CST
);
2447 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
2448 if (mask
== NULL_TREE
|| TREE_CODE (mask
) != VECTOR_CST
)
2451 if (!VECTOR_CST_NELTS (mask
).is_constant (&nelts
))
2453 for (i
= 0; i
< nelts
; i
++)
2455 tree val
= VECTOR_CST_ELT (mask
, i
);
2456 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2457 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
2459 maybe_identity2
= false;
2460 else if (j
== i
+ nelts
)
2461 maybe_identity1
= false;
2465 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
2468 /* Combine a shuffle with its arguments. Returns 1 if there were any
2469 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2472 simplify_permutation (gimple_stmt_iterator
*gsi
)
2474 gimple
*stmt
= gsi_stmt (*gsi
);
2475 gimple
*def_stmt
= NULL
;
2476 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
2477 enum tree_code code
, code2
= ERROR_MARK
;
2478 bool single_use_op0
= false;
2480 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
2482 op0
= gimple_assign_rhs1 (stmt
);
2483 op1
= gimple_assign_rhs2 (stmt
);
2484 op2
= gimple_assign_rhs3 (stmt
);
2486 if (TREE_CODE (op2
) != VECTOR_CST
)
2489 if (TREE_CODE (op0
) == VECTOR_CST
)
2494 else if (TREE_CODE (op0
) == SSA_NAME
)
2496 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
2499 code
= gimple_assign_rhs_code (def_stmt
);
2500 if (code
== VIEW_CONVERT_EXPR
)
2502 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2503 tree name
= TREE_OPERAND (rhs
, 0);
2504 if (TREE_CODE (name
) != SSA_NAME
)
2506 if (!has_single_use (name
))
2507 single_use_op0
= false;
2508 /* Here we update the def_stmt through this VIEW_CONVERT_EXPR,
2509 but still keep the code to indicate it comes from
2510 VIEW_CONVERT_EXPR. */
2511 def_stmt
= SSA_NAME_DEF_STMT (name
);
2512 if (!def_stmt
|| !is_gimple_assign (def_stmt
))
2514 if (gimple_assign_rhs_code (def_stmt
) != CONSTRUCTOR
)
2517 if (!can_propagate_from (def_stmt
))
2519 arg0
= gimple_assign_rhs1 (def_stmt
);
2524 /* Two consecutive shuffles. */
2525 if (code
== VEC_PERM_EXPR
)
2532 op3
= gimple_assign_rhs3 (def_stmt
);
2533 if (TREE_CODE (op3
) != VECTOR_CST
)
2535 ident
= is_combined_permutation_identity (op3
, op2
);
2538 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
2539 : gimple_assign_rhs2 (def_stmt
);
2540 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
2541 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
2542 gimple_set_num_ops (stmt
, 2);
2544 return remove_prop_source_from_use (op0
) ? 2 : 1;
2546 else if (code
== CONSTRUCTOR
2547 || code
== VECTOR_CST
2548 || code
== VIEW_CONVERT_EXPR
)
2552 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
2555 if (TREE_CODE (op1
) == VECTOR_CST
)
2557 else if (TREE_CODE (op1
) == SSA_NAME
)
2559 gimple
*def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
2562 code2
= gimple_assign_rhs_code (def_stmt2
);
2563 if (code2
== VIEW_CONVERT_EXPR
)
2565 tree rhs
= gimple_assign_rhs1 (def_stmt2
);
2566 tree name
= TREE_OPERAND (rhs
, 0);
2567 if (TREE_CODE (name
) != SSA_NAME
)
2569 if (!has_single_use (name
))
2571 def_stmt2
= SSA_NAME_DEF_STMT (name
);
2572 if (!def_stmt2
|| !is_gimple_assign (def_stmt2
))
2574 if (gimple_assign_rhs_code (def_stmt2
) != CONSTRUCTOR
)
2577 else if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
2579 if (!can_propagate_from (def_stmt2
))
2581 arg1
= gimple_assign_rhs1 (def_stmt2
);
2588 /* Already used twice in this statement. */
2589 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
2594 /* If there are any VIEW_CONVERT_EXPRs found when finding permutation
2595 operands source, check whether it's valid to transform and prepare
2596 the required new operands. */
2597 if (code
== VIEW_CONVERT_EXPR
|| code2
== VIEW_CONVERT_EXPR
)
2599 /* Figure out the target vector type to which operands should be
2600 converted. If both are CONSTRUCTOR, the types should be the
2601 same, otherwise, use the one of CONSTRUCTOR. */
2602 tree tgt_type
= NULL_TREE
;
2603 if (code
== VIEW_CONVERT_EXPR
)
2605 gcc_assert (gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
);
2607 tgt_type
= TREE_TYPE (arg0
);
2609 if (code2
== VIEW_CONVERT_EXPR
)
2611 tree arg1_type
= TREE_TYPE (arg1
);
2612 if (tgt_type
== NULL_TREE
)
2613 tgt_type
= arg1_type
;
2614 else if (tgt_type
!= arg1_type
)
2618 if (!VECTOR_TYPE_P (tgt_type
))
2620 tree op2_type
= TREE_TYPE (op2
);
2622 /* Figure out the shrunk factor. */
2623 poly_uint64 tgt_units
= TYPE_VECTOR_SUBPARTS (tgt_type
);
2624 poly_uint64 op2_units
= TYPE_VECTOR_SUBPARTS (op2_type
);
2625 if (maybe_gt (tgt_units
, op2_units
))
2627 unsigned int factor
;
2628 if (!constant_multiple_p (op2_units
, tgt_units
, &factor
))
2631 /* Build the new permutation control vector as target vector. */
2632 vec_perm_builder builder
;
2633 if (!tree_to_vec_perm_builder (&builder
, op2
))
2635 vec_perm_indices
indices (builder
, 2, op2_units
);
2636 vec_perm_indices new_indices
;
2637 if (new_indices
.new_shrunk_vector (indices
, factor
))
2639 tree mask_type
= tgt_type
;
2640 if (!VECTOR_INTEGER_TYPE_P (mask_type
))
2642 tree elem_type
= TREE_TYPE (mask_type
);
2643 unsigned elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2644 tree int_type
= build_nonstandard_integer_type (elem_size
, 0);
2645 mask_type
= build_vector_type (int_type
, tgt_units
);
2647 op2
= vec_perm_indices_to_tree (mask_type
, new_indices
);
2652 /* Convert the VECTOR_CST to the appropriate vector type. */
2653 if (tgt_type
!= TREE_TYPE (arg0
))
2654 arg0
= fold_build1 (VIEW_CONVERT_EXPR
, tgt_type
, arg0
);
2655 else if (tgt_type
!= TREE_TYPE (arg1
))
2656 arg1
= fold_build1 (VIEW_CONVERT_EXPR
, tgt_type
, arg1
);
2659 /* VIEW_CONVERT_EXPR should be updated to CONSTRUCTOR before. */
2660 gcc_assert (code
== CONSTRUCTOR
|| code
== VECTOR_CST
);
2662 /* Shuffle of a constructor. */
2664 tree res_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2665 tree opt
= fold_ternary (VEC_PERM_EXPR
, res_type
, arg0
, arg1
, op2
);
2667 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
2669 /* Found VIEW_CONVERT_EXPR before, need one explicit conversion. */
2670 if (res_type
!= TREE_TYPE (op0
))
2672 tree name
= make_ssa_name (TREE_TYPE (opt
));
2673 gimple
*ass_stmt
= gimple_build_assign (name
, opt
);
2674 gsi_insert_before (gsi
, ass_stmt
, GSI_SAME_STMT
);
2675 opt
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (op0
), name
);
2677 gimple_assign_set_rhs_from_tree (gsi
, opt
);
2678 update_stmt (gsi_stmt (*gsi
));
2679 if (TREE_CODE (op0
) == SSA_NAME
)
2680 ret
= remove_prop_source_from_use (op0
);
2681 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
2682 ret
|= remove_prop_source_from_use (op1
);
2689 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2690 conversions with code CONV_CODE or update it if still ERROR_MARK.
2691 Return NULL_TREE if no such matching def was found. */
2694 get_bit_field_ref_def (tree val
, enum tree_code
&conv_code
)
2696 if (TREE_CODE (val
) != SSA_NAME
)
2698 gimple
*def_stmt
= get_prop_source_stmt (val
, false, NULL
);
2701 enum tree_code code
= gimple_assign_rhs_code (def_stmt
);
2702 if (code
== FLOAT_EXPR
2703 || code
== FIX_TRUNC_EXPR
2704 || CONVERT_EXPR_CODE_P (code
))
2706 tree op1
= gimple_assign_rhs1 (def_stmt
);
2707 if (conv_code
== ERROR_MARK
)
2709 else if (conv_code
!= code
)
2711 if (TREE_CODE (op1
) != SSA_NAME
)
2713 def_stmt
= SSA_NAME_DEF_STMT (op1
);
2714 if (! is_gimple_assign (def_stmt
))
2716 code
= gimple_assign_rhs_code (def_stmt
);
2718 if (code
!= BIT_FIELD_REF
)
2720 return gimple_assign_rhs1 (def_stmt
);
2723 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2726 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
2728 gimple
*stmt
= gsi_stmt (*gsi
);
2729 tree op
, orig
[2], type
, elem_type
;
2730 unsigned elem_size
, i
;
2731 unsigned HOST_WIDE_INT nelts
;
2732 unsigned HOST_WIDE_INT refnelts
;
2733 enum tree_code conv_code
;
2734 constructor_elt
*elt
;
2736 op
= gimple_assign_rhs1 (stmt
);
2737 type
= TREE_TYPE (op
);
2738 gcc_checking_assert (TREE_CODE (op
) == CONSTRUCTOR
2739 && TREE_CODE (type
) == VECTOR_TYPE
);
2741 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
2743 elem_type
= TREE_TYPE (type
);
2744 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2748 conv_code
= ERROR_MARK
;
2749 bool maybe_ident
= true;
2750 bool maybe_blend
[2] = { true, true };
2751 tree one_constant
= NULL_TREE
;
2752 tree one_nonconstant
= NULL_TREE
;
2753 auto_vec
<tree
> constants
;
2754 constants
.safe_grow_cleared (nelts
, true);
2755 auto_vec
<std::pair
<unsigned, unsigned>, 64> elts
;
2756 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
2764 /* Look for elements extracted and possibly converted from
2766 op1
= get_bit_field_ref_def (elt
->value
, conv_code
);
2768 && TREE_CODE ((ref
= TREE_OPERAND (op1
, 0))) == SSA_NAME
2769 && VECTOR_TYPE_P (TREE_TYPE (ref
))
2770 && useless_type_conversion_p (TREE_TYPE (op1
),
2771 TREE_TYPE (TREE_TYPE (ref
)))
2772 && constant_multiple_p (bit_field_offset (op1
),
2773 bit_field_size (op1
), &elem
)
2774 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref
)).is_constant (&refnelts
))
2777 for (j
= 0; j
< 2; ++j
)
2782 || useless_type_conversion_p (TREE_TYPE (orig
[0]),
2786 else if (ref
== orig
[j
])
2789 /* Found a suitable vector element. */
2793 if (elem
!= i
|| j
!= 0)
2794 maybe_ident
= false;
2796 maybe_blend
[j
] = false;
2797 elts
.safe_push (std::make_pair (j
, elem
));
2800 /* Else fallthru. */
2802 /* Handle elements not extracted from a vector.
2803 1. constants by permuting with constant vector
2804 2. a unique non-constant element by permuting with a splat vector */
2806 && orig
[1] != error_mark_node
)
2808 orig
[1] = error_mark_node
;
2809 if (CONSTANT_CLASS_P (elt
->value
))
2811 if (one_nonconstant
)
2814 one_constant
= elt
->value
;
2815 constants
[i
] = elt
->value
;
2821 if (!one_nonconstant
)
2822 one_nonconstant
= elt
->value
;
2823 else if (!operand_equal_p (one_nonconstant
, elt
->value
, 0))
2826 elts
.safe_push (std::make_pair (1, i
));
2827 maybe_ident
= false;
2833 || ! VECTOR_TYPE_P (TREE_TYPE (orig
[0])))
2835 refnelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig
[0])).to_constant ();
2836 /* We currently do not handle larger destination vectors. */
2837 if (refnelts
< nelts
)
2843 = (nelts
!= refnelts
2844 ? (conv_code
!= ERROR_MARK
2845 ? build_vector_type (TREE_TYPE (TREE_TYPE (orig
[0])), nelts
)
2847 : TREE_TYPE (orig
[0]));
2848 if (conv_code
!= ERROR_MARK
2849 && !supportable_convert_operation (conv_code
, type
, conv_src_type
,
2852 /* Only few targets implement direct conversion patterns so try
2853 some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR. */
2855 tree halfvectype
, dblvectype
;
2856 enum tree_code unpack_op
;
2858 if (!BYTES_BIG_ENDIAN
)
2859 unpack_op
= (FLOAT_TYPE_P (TREE_TYPE (type
))
2860 ? VEC_UNPACK_FLOAT_LO_EXPR
2861 : VEC_UNPACK_LO_EXPR
);
2863 unpack_op
= (FLOAT_TYPE_P (TREE_TYPE (type
))
2864 ? VEC_UNPACK_FLOAT_HI_EXPR
2865 : VEC_UNPACK_HI_EXPR
);
2867 /* Conversions between DFP and FP have no special tree code
2868 but we cannot handle those since all relevant vector conversion
2869 optabs only have a single mode. */
2870 if (CONVERT_EXPR_CODE_P (conv_code
)
2871 && FLOAT_TYPE_P (TREE_TYPE (type
))
2872 && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type
))
2873 != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type
))))
2876 if (CONVERT_EXPR_CODE_P (conv_code
)
2877 && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig
[0])))
2878 == TYPE_PRECISION (TREE_TYPE (type
)))
2879 && mode_for_vector (as_a
<scalar_mode
>
2880 (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig
[0])))),
2881 nelts
* 2).exists ()
2883 = build_vector_type (TREE_TYPE (TREE_TYPE (orig
[0])),
2885 /* Only use it for vector modes or for vector booleans
2886 represented as scalar bitmasks. See PR95528. */
2887 && (VECTOR_MODE_P (TYPE_MODE (dblvectype
))
2888 || VECTOR_BOOLEAN_TYPE_P (dblvectype
))
2889 && (optab
= optab_for_tree_code (unpack_op
,
2892 && (optab_handler (optab
, TYPE_MODE (dblvectype
))
2893 != CODE_FOR_nothing
))
2895 gimple_seq stmts
= NULL
;
2897 if (refnelts
== nelts
)
2899 /* ??? Paradoxical subregs don't exist, so insert into
2900 the lower half of a wider zero vector. */
2901 dbl
= gimple_build (&stmts
, BIT_INSERT_EXPR
, dblvectype
,
2902 build_zero_cst (dblvectype
), orig
[0],
2905 else if (refnelts
== 2 * nelts
)
2908 dbl
= gimple_build (&stmts
, BIT_FIELD_REF
, dblvectype
,
2909 orig
[0], TYPE_SIZE (dblvectype
),
2911 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2912 gimple_assign_set_rhs_with_ops (gsi
, unpack_op
, dbl
);
2914 else if (CONVERT_EXPR_CODE_P (conv_code
)
2915 && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig
[0])))
2916 == 2 * TYPE_PRECISION (TREE_TYPE (type
)))
2917 && mode_for_vector (as_a
<scalar_mode
>
2919 (TREE_TYPE (TREE_TYPE (orig
[0])))),
2920 nelts
/ 2).exists ()
2922 = build_vector_type (TREE_TYPE (TREE_TYPE (orig
[0])),
2924 /* Only use it for vector modes or for vector booleans
2925 represented as scalar bitmasks. See PR95528. */
2926 && (VECTOR_MODE_P (TYPE_MODE (halfvectype
))
2927 || VECTOR_BOOLEAN_TYPE_P (halfvectype
))
2928 && (optab
= optab_for_tree_code (VEC_PACK_TRUNC_EXPR
,
2931 && (optab_handler (optab
, TYPE_MODE (halfvectype
))
2932 != CODE_FOR_nothing
))
2934 gimple_seq stmts
= NULL
;
2935 tree low
= gimple_build (&stmts
, BIT_FIELD_REF
, halfvectype
,
2936 orig
[0], TYPE_SIZE (halfvectype
),
2938 tree hig
= gimple_build (&stmts
, BIT_FIELD_REF
, halfvectype
,
2939 orig
[0], TYPE_SIZE (halfvectype
),
2940 TYPE_SIZE (halfvectype
));
2941 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2942 gimple_assign_set_rhs_with_ops (gsi
, VEC_PACK_TRUNC_EXPR
,
2947 update_stmt (gsi_stmt (*gsi
));
2950 if (nelts
!= refnelts
)
2953 = gimple_build_assign (make_ssa_name (conv_src_type
),
2954 build3 (BIT_FIELD_REF
, conv_src_type
,
2955 orig
[0], TYPE_SIZE (conv_src_type
),
2956 bitsize_zero_node
));
2957 gsi_insert_before (gsi
, lowpart
, GSI_SAME_STMT
);
2958 orig
[0] = gimple_assign_lhs (lowpart
);
2960 if (conv_code
== ERROR_MARK
)
2962 tree src_type
= TREE_TYPE (orig
[0]);
2963 if (!useless_type_conversion_p (type
, src_type
))
2965 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
),
2966 TYPE_VECTOR_SUBPARTS (src_type
))
2967 && useless_type_conversion_p (TREE_TYPE (type
),
2968 TREE_TYPE (src_type
)));
2969 tree rhs
= build1 (VIEW_CONVERT_EXPR
, type
, orig
[0]);
2970 orig
[0] = make_ssa_name (type
);
2971 gassign
*assign
= gimple_build_assign (orig
[0], rhs
);
2972 gsi_insert_before (gsi
, assign
, GSI_SAME_STMT
);
2974 gimple_assign_set_rhs_from_tree (gsi
, orig
[0]);
2977 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
[0],
2978 NULL_TREE
, NULL_TREE
);
2982 /* If we combine a vector with a non-vector avoid cases where
2983 we'll obviously end up with more GIMPLE stmts which is when
2984 we'll later not fold this to a single insert into the vector
2985 and we had a single extract originally. See PR92819. */
2988 && orig
[1] == error_mark_node
2991 tree mask_type
, perm_type
, conv_src_type
;
2992 perm_type
= TREE_TYPE (orig
[0]);
2993 conv_src_type
= (nelts
== refnelts
2995 : build_vector_type (TREE_TYPE (perm_type
), nelts
));
2996 if (conv_code
!= ERROR_MARK
2997 && !supportable_convert_operation (conv_code
, type
, conv_src_type
,
3001 /* Now that we know the number of elements of the source build the
3003 ??? When the second vector has constant values we can shuffle
3004 it and its source indexes to make the permutation supported.
3005 For now it mimics a blend. */
3006 vec_perm_builder
sel (refnelts
, refnelts
, 1);
3007 bool all_same_p
= true;
3008 for (i
= 0; i
< elts
.length (); ++i
)
3010 sel
.quick_push (elts
[i
].second
+ elts
[i
].first
* refnelts
);
3011 all_same_p
&= known_eq (sel
[i
], sel
[0]);
3013 /* And fill the tail with "something". It's really don't care,
3014 and ideally we'd allow VEC_PERM to have a smaller destination
3015 vector. As a heuristic:
3017 (a) if what we have so far duplicates a single element, make the
3020 (b) otherwise preserve a uniform orig[0]. This facilitates
3021 later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR. */
3022 for (; i
< refnelts
; ++i
)
3023 sel
.quick_push (all_same_p
3025 : (elts
[0].second
== 0 && elts
[0].first
== 0
3026 ? 0 : refnelts
) + i
);
3027 vec_perm_indices
indices (sel
, orig
[1] ? 2 : 1, refnelts
);
3028 machine_mode vmode
= TYPE_MODE (perm_type
);
3029 if (!can_vec_perm_const_p (vmode
, vmode
, indices
))
3032 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
3034 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
3035 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type
)),
3036 GET_MODE_SIZE (TYPE_MODE (perm_type
))))
3038 tree op2
= vec_perm_indices_to_tree (mask_type
, indices
);
3039 bool converted_orig1
= false;
3040 gimple_seq stmts
= NULL
;
3043 else if (orig
[1] == error_mark_node
3046 /* ??? We can see if we can safely convert to the original
3048 converted_orig1
= conv_code
!= ERROR_MARK
;
3049 orig
[1] = gimple_build_vector_from_val (&stmts
, UNKNOWN_LOCATION
,
3054 else if (orig
[1] == error_mark_node
)
3056 /* ??? See if we can convert the vector to the original type. */
3057 converted_orig1
= conv_code
!= ERROR_MARK
;
3058 unsigned n
= converted_orig1
? nelts
: refnelts
;
3059 tree_vector_builder
vec (converted_orig1
3060 ? type
: perm_type
, n
, 1);
3061 for (unsigned i
= 0; i
< n
; ++i
)
3062 if (i
< nelts
&& constants
[i
])
3063 vec
.quick_push (constants
[i
]);
3065 /* ??? Push a don't-care value. */
3066 vec
.quick_push (one_constant
);
3067 orig
[1] = vec
.build ();
3069 tree blend_op2
= NULL_TREE
;
3070 if (converted_orig1
)
3072 /* Make sure we can do a blend in the target type. */
3073 vec_perm_builder
sel (nelts
, nelts
, 1);
3074 for (i
= 0; i
< elts
.length (); ++i
)
3075 sel
.quick_push (elts
[i
].first
3076 ? elts
[i
].second
+ nelts
: i
);
3077 vec_perm_indices
indices (sel
, 2, nelts
);
3078 machine_mode vmode
= TYPE_MODE (type
);
3079 if (!can_vec_perm_const_p (vmode
, vmode
, indices
))
3082 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
3084 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
3085 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type
)),
3086 GET_MODE_SIZE (TYPE_MODE (type
))))
3088 blend_op2
= vec_perm_indices_to_tree (mask_type
, indices
);
3091 = converted_orig1
? build_zero_cst (perm_type
) : orig
[1];
3092 tree res
= gimple_build (&stmts
, VEC_PERM_EXPR
, perm_type
,
3093 orig
[0], orig1_for_perm
, op2
);
3094 if (nelts
!= refnelts
)
3095 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
3096 conv_code
!= ERROR_MARK
? conv_src_type
: type
,
3097 res
, TYPE_SIZE (type
), bitsize_zero_node
);
3098 if (conv_code
!= ERROR_MARK
)
3099 res
= gimple_build (&stmts
, conv_code
, type
, res
);
3100 else if (!useless_type_conversion_p (type
, TREE_TYPE (res
)))
3102 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
),
3103 TYPE_VECTOR_SUBPARTS (perm_type
))
3104 && useless_type_conversion_p (TREE_TYPE (type
),
3105 TREE_TYPE (perm_type
)));
3106 res
= gimple_build (&stmts
, VIEW_CONVERT_EXPR
, type
, res
);
3108 /* Blend in the actual constant. */
3109 if (converted_orig1
)
3110 res
= gimple_build (&stmts
, VEC_PERM_EXPR
, type
,
3111 res
, orig
[1], blend_op2
);
3112 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3113 gimple_assign_set_rhs_with_ops (gsi
, SSA_NAME
, res
);
3115 update_stmt (gsi_stmt (*gsi
));
3120 /* Rewrite the vector load at *GSI to component-wise loads if the load
3121 is only used in BIT_FIELD_REF extractions with eventual intermediate
3125 optimize_vector_load (gimple_stmt_iterator
*gsi
)
3127 gimple
*stmt
= gsi_stmt (*gsi
);
3128 tree lhs
= gimple_assign_lhs (stmt
);
3129 tree rhs
= gimple_assign_rhs1 (stmt
);
3131 /* Gather BIT_FIELD_REFs to rewrite, looking through
3132 VEC_UNPACK_{LO,HI}_EXPR. */
3133 use_operand_p use_p
;
3134 imm_use_iterator iter
;
3135 bool rewrite
= true;
3136 auto_vec
<gimple
*, 8> bf_stmts
;
3137 auto_vec
<tree
, 8> worklist
;
3138 worklist
.quick_push (lhs
);
3141 tree def
= worklist
.pop ();
3142 unsigned HOST_WIDE_INT def_eltsize
3143 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (def
))));
3144 FOR_EACH_IMM_USE_FAST (use_p
, iter
, def
)
3146 gimple
*use_stmt
= USE_STMT (use_p
);
3147 if (is_gimple_debug (use_stmt
))
3149 if (!is_gimple_assign (use_stmt
))
3154 enum tree_code use_code
= gimple_assign_rhs_code (use_stmt
);
3155 tree use_rhs
= gimple_assign_rhs1 (use_stmt
);
3156 if (use_code
== BIT_FIELD_REF
3157 && TREE_OPERAND (use_rhs
, 0) == def
3158 /* If its on the VEC_UNPACK_{HI,LO}_EXPR
3159 def need to verify it is element aligned. */
3161 || (known_eq (bit_field_size (use_rhs
), def_eltsize
)
3162 && constant_multiple_p (bit_field_offset (use_rhs
),
3165 bf_stmts
.safe_push (use_stmt
);
3168 /* Walk through one level of VEC_UNPACK_{LO,HI}_EXPR. */
3170 && (use_code
== VEC_UNPACK_HI_EXPR
3171 || use_code
== VEC_UNPACK_LO_EXPR
)
3174 worklist
.safe_push (gimple_assign_lhs (use_stmt
));
3183 while (!worklist
.is_empty ());
3190 /* We now have all ultimate uses of the load to rewrite in bf_stmts. */
3192 /* Prepare the original ref to be wrapped in adjusted BIT_FIELD_REFs.
3193 For TARGET_MEM_REFs we have to separate the LEA from the reference. */
3194 tree load_rhs
= rhs
;
3195 if (TREE_CODE (load_rhs
) == TARGET_MEM_REF
)
3197 if (TREE_CODE (TREE_OPERAND (load_rhs
, 0)) == ADDR_EXPR
)
3198 mark_addressable (TREE_OPERAND (TREE_OPERAND (load_rhs
, 0), 0));
3199 tree tem
= make_ssa_name (TREE_TYPE (TREE_OPERAND (load_rhs
, 0)));
3201 = gimple_build_assign (tem
, build1 (ADDR_EXPR
, TREE_TYPE (tem
),
3202 unshare_expr (load_rhs
)));
3203 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3204 load_rhs
= build2_loc (EXPR_LOCATION (load_rhs
),
3205 MEM_REF
, TREE_TYPE (load_rhs
), tem
,
3207 (TREE_TYPE (TREE_OPERAND (load_rhs
, 1)), 0));
3210 /* Rewrite the BIT_FIELD_REFs to be actual loads, re-emitting them at
3211 the place of the original load. */
3212 for (gimple
*use_stmt
: bf_stmts
)
3214 tree bfr
= gimple_assign_rhs1 (use_stmt
);
3215 tree new_rhs
= unshare_expr (load_rhs
);
3216 if (TREE_OPERAND (bfr
, 0) != lhs
)
3218 /* When the BIT_FIELD_REF is on the promoted vector we have to
3219 adjust it and emit a conversion afterwards. */
3221 = SSA_NAME_DEF_STMT (TREE_OPERAND (bfr
, 0));
3222 enum tree_code def_code
3223 = gimple_assign_rhs_code (def_stmt
);
3225 /* The adjusted BIT_FIELD_REF is of the promotion source
3226 vector size and at half of the offset... */
3227 new_rhs
= fold_build3 (BIT_FIELD_REF
,
3228 TREE_TYPE (TREE_TYPE (lhs
)),
3230 TYPE_SIZE (TREE_TYPE (TREE_TYPE (lhs
))),
3231 size_binop (EXACT_DIV_EXPR
,
3232 TREE_OPERAND (bfr
, 2),
3234 /* ... and offsetted by half of the vector if VEC_UNPACK_HI_EXPR. */
3235 if (def_code
== (!BYTES_BIG_ENDIAN
3236 ? VEC_UNPACK_HI_EXPR
: VEC_UNPACK_LO_EXPR
))
3237 TREE_OPERAND (new_rhs
, 2)
3238 = size_binop (PLUS_EXPR
, TREE_OPERAND (new_rhs
, 2),
3239 size_binop (EXACT_DIV_EXPR
,
3240 TYPE_SIZE (TREE_TYPE (lhs
)),
3242 tree tem
= make_ssa_name (TREE_TYPE (TREE_TYPE (lhs
)));
3243 gimple
*new_stmt
= gimple_build_assign (tem
, new_rhs
);
3244 location_t loc
= gimple_location (use_stmt
);
3245 gimple_set_location (new_stmt
, loc
);
3246 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3247 /* Perform scalar promotion. */
3248 new_stmt
= gimple_build_assign (gimple_assign_lhs (use_stmt
),
3250 gimple_set_location (new_stmt
, loc
);
3251 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3255 /* When the BIT_FIELD_REF is on the original load result
3256 we can just wrap that. */
3257 tree new_rhs
= fold_build3 (BIT_FIELD_REF
, TREE_TYPE (bfr
),
3258 unshare_expr (load_rhs
),
3259 TREE_OPERAND (bfr
, 1),
3260 TREE_OPERAND (bfr
, 2));
3261 gimple
*new_stmt
= gimple_build_assign (gimple_assign_lhs (use_stmt
),
3263 location_t loc
= gimple_location (use_stmt
);
3264 gimple_set_location (new_stmt
, loc
);
3265 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3267 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3268 unlink_stmt_vdef (use_stmt
);
3269 gsi_remove (&gsi2
, true);
3272 /* Finally get rid of the intermediate stmts. */
3274 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
3276 if (is_gimple_debug (use_stmt
))
3278 if (gimple_debug_bind_p (use_stmt
))
3280 gimple_debug_bind_reset_value (use_stmt
);
3281 update_stmt (use_stmt
);
3285 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3286 unlink_stmt_vdef (use_stmt
);
3287 release_defs (use_stmt
);
3288 gsi_remove (&gsi2
, true);
3290 /* And the original load. */
3291 release_defs (stmt
);
3292 gsi_remove (gsi
, true);
3296 /* Primitive "lattice" function for gimple_simplify. */
3299 fwprop_ssa_val (tree name
)
3301 /* First valueize NAME. */
3302 if (TREE_CODE (name
) == SSA_NAME
3303 && SSA_NAME_VERSION (name
) < lattice
.length ())
3305 tree val
= lattice
[SSA_NAME_VERSION (name
)];
3309 /* We continue matching along SSA use-def edges for SSA names
3310 that are not single-use. Currently there are no patterns
3311 that would cause any issues with that. */
3315 /* Main entry point for the forward propagation and statement combine
3320 const pass_data pass_data_forwprop
=
3322 GIMPLE_PASS
, /* type */
3323 "forwprop", /* name */
3324 OPTGROUP_NONE
, /* optinfo_flags */
3325 TV_TREE_FORWPROP
, /* tv_id */
3326 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3327 0, /* properties_provided */
3328 0, /* properties_destroyed */
3329 0, /* todo_flags_start */
3330 TODO_update_ssa
, /* todo_flags_finish */
3333 class pass_forwprop
: public gimple_opt_pass
3336 pass_forwprop (gcc::context
*ctxt
)
3337 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
3340 /* opt_pass methods: */
3341 opt_pass
* clone () final override
{ return new pass_forwprop (m_ctxt
); }
3342 bool gate (function
*) final override
{ return flag_tree_forwprop
; }
3343 unsigned int execute (function
*) final override
;
3345 }; // class pass_forwprop
3348 pass_forwprop::execute (function
*fun
)
3350 unsigned int todoflags
= 0;
3352 cfg_changed
= false;
3354 /* Combine stmts with the stmts defining their operands. Do that
3355 in an order that guarantees visiting SSA defs before SSA uses. */
3356 lattice
.create (num_ssa_names
);
3357 lattice
.quick_grow_cleared (num_ssa_names
);
3358 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (fun
));
3359 int postorder_num
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
3361 auto_vec
<gimple
*, 4> to_fixup
;
3362 auto_vec
<gimple
*, 32> to_remove
;
3363 to_purge
= BITMAP_ALLOC (NULL
);
3364 for (int i
= 0; i
< postorder_num
; ++i
)
3366 gimple_stmt_iterator gsi
;
3367 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, postorder
[i
]);
3369 /* Record degenerate PHIs in the lattice. */
3370 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
3373 gphi
*phi
= si
.phi ();
3374 tree res
= gimple_phi_result (phi
);
3375 if (virtual_operand_p (res
))
3378 use_operand_p use_p
;
3380 tree first
= NULL_TREE
;
3381 bool all_same
= true;
3382 FOR_EACH_PHI_ARG (use_p
, phi
, it
, SSA_OP_USE
)
3384 tree use
= USE_FROM_PTR (use_p
);
3387 else if (! operand_equal_p (first
, use
, 0))
3395 if (may_propagate_copy (res
, first
))
3396 to_remove
.safe_push (phi
);
3397 fwprop_set_lattice_val (res
, first
);
3401 /* Apply forward propagation to all stmts in the basic-block.
3402 Note we update GSI within the loop as necessary. */
3403 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
3405 gimple
*stmt
= gsi_stmt (gsi
);
3407 enum tree_code code
;
3409 if (!is_gimple_assign (stmt
))
3415 lhs
= gimple_assign_lhs (stmt
);
3416 rhs
= gimple_assign_rhs1 (stmt
);
3417 code
= gimple_assign_rhs_code (stmt
);
3418 if (TREE_CODE (lhs
) != SSA_NAME
3419 || has_zero_uses (lhs
))
3425 /* If this statement sets an SSA_NAME to an address,
3426 try to propagate the address into the uses of the SSA_NAME. */
3427 if ((code
== ADDR_EXPR
3428 /* Handle pointer conversions on invariant addresses
3429 as well, as this is valid gimple. */
3430 || (CONVERT_EXPR_CODE_P (code
)
3431 && TREE_CODE (rhs
) == ADDR_EXPR
3432 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
3433 && TREE_CODE (TREE_OPERAND (rhs
, 0)) != TARGET_MEM_REF
)
3435 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
3438 || decl_address_invariant_p (base
))
3439 && !stmt_references_abnormal_ssa_name (stmt
)
3440 && forward_propagate_addr_expr (lhs
, rhs
, true))
3442 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
3443 release_defs (stmt
);
3444 gsi_remove (&gsi
, true);
3449 else if (code
== POINTER_PLUS_EXPR
)
3451 tree off
= gimple_assign_rhs2 (stmt
);
3452 if (TREE_CODE (off
) == INTEGER_CST
3453 && can_propagate_from (stmt
)
3454 && !simple_iv_increment_p (stmt
)
3455 /* ??? Better adjust the interface to that function
3456 instead of building new trees here. */
3457 && forward_propagate_addr_expr
3459 build1_loc (gimple_location (stmt
),
3460 ADDR_EXPR
, TREE_TYPE (rhs
),
3461 fold_build2 (MEM_REF
,
3462 TREE_TYPE (TREE_TYPE (rhs
)),
3464 fold_convert (ptr_type_node
,
3467 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
3468 release_defs (stmt
);
3469 gsi_remove (&gsi
, true);
3471 else if (is_gimple_min_invariant (rhs
))
3473 /* Make sure to fold &a[0] + off_1 here. */
3474 fold_stmt_inplace (&gsi
);
3476 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
3482 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
3483 && gimple_assign_load_p (stmt
)
3484 && !gimple_has_volatile_ops (stmt
)
3485 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
3487 && !stmt_can_throw_internal (cfun
, stmt
))
3489 /* Rewrite loads used only in real/imagpart extractions to
3490 component-wise loads. */
3491 use_operand_p use_p
;
3492 imm_use_iterator iter
;
3493 bool rewrite
= true;
3494 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
3496 gimple
*use_stmt
= USE_STMT (use_p
);
3497 if (is_gimple_debug (use_stmt
))
3499 if (!is_gimple_assign (use_stmt
)
3500 || (gimple_assign_rhs_code (use_stmt
) != REALPART_EXPR
3501 && gimple_assign_rhs_code (use_stmt
) != IMAGPART_EXPR
)
3502 || TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) != lhs
)
3511 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
3513 if (is_gimple_debug (use_stmt
))
3515 if (gimple_debug_bind_p (use_stmt
))
3517 gimple_debug_bind_reset_value (use_stmt
);
3518 update_stmt (use_stmt
);
3523 tree new_rhs
= build1 (gimple_assign_rhs_code (use_stmt
),
3524 TREE_TYPE (TREE_TYPE (rhs
)),
3525 unshare_expr (rhs
));
3527 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
3530 location_t loc
= gimple_location (use_stmt
);
3531 gimple_set_location (new_stmt
, loc
);
3532 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3533 unlink_stmt_vdef (use_stmt
);
3534 gsi_remove (&gsi2
, true);
3536 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
3539 release_defs (stmt
);
3540 gsi_remove (&gsi
, true);
3545 else if (TREE_CODE (TREE_TYPE (lhs
)) == VECTOR_TYPE
3546 && (TYPE_MODE (TREE_TYPE (lhs
)) == BLKmode
3547 /* After vector lowering rewrite all loads, but
3548 initially do not since this conflicts with
3549 vector CONSTRUCTOR to shuffle optimization. */
3550 || (fun
->curr_properties
& PROP_gimple_lvec
))
3551 && gimple_assign_load_p (stmt
)
3552 && !gimple_has_volatile_ops (stmt
)
3553 && !stmt_can_throw_internal (cfun
, stmt
)
3554 && (!VAR_P (rhs
) || !DECL_HARD_REGISTER (rhs
)))
3555 optimize_vector_load (&gsi
);
3557 else if (code
== COMPLEX_EXPR
)
3559 /* Rewrite stores of a single-use complex build expression
3560 to component-wise stores. */
3561 use_operand_p use_p
;
3563 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
3564 && gimple_store_p (use_stmt
)
3565 && !gimple_has_volatile_ops (use_stmt
)
3566 && is_gimple_assign (use_stmt
)
3567 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
3570 tree use_lhs
= gimple_assign_lhs (use_stmt
);
3571 if (auto_var_p (use_lhs
))
3572 DECL_NOT_GIMPLE_REG_P (use_lhs
) = 1;
3573 tree new_lhs
= build1 (REALPART_EXPR
,
3574 TREE_TYPE (TREE_TYPE (use_lhs
)),
3575 unshare_expr (use_lhs
));
3576 gimple
*new_stmt
= gimple_build_assign (new_lhs
, rhs
);
3577 location_t loc
= gimple_location (use_stmt
);
3578 gimple_set_location (new_stmt
, loc
);
3579 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
3580 gimple_set_vdef (new_stmt
, make_ssa_name (gimple_vop (cfun
)));
3581 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
3582 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
3583 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3584 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
3586 new_lhs
= build1 (IMAGPART_EXPR
,
3587 TREE_TYPE (TREE_TYPE (use_lhs
)),
3588 unshare_expr (use_lhs
));
3589 gimple_assign_set_lhs (use_stmt
, new_lhs
);
3590 gimple_assign_set_rhs1 (use_stmt
, gimple_assign_rhs2 (stmt
));
3591 update_stmt (use_stmt
);
3593 release_defs (stmt
);
3594 gsi_remove (&gsi
, true);
3599 else if (code
== CONSTRUCTOR
3600 && VECTOR_TYPE_P (TREE_TYPE (rhs
))
3601 && TYPE_MODE (TREE_TYPE (rhs
)) == BLKmode
3602 && CONSTRUCTOR_NELTS (rhs
) > 0
3603 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
))
3604 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
))
3607 /* Rewrite stores of a single-use vector constructors
3608 to component-wise stores if the mode isn't supported. */
3609 use_operand_p use_p
;
3611 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
3612 && gimple_store_p (use_stmt
)
3613 && !gimple_has_volatile_ops (use_stmt
)
3614 && !stmt_can_throw_internal (cfun
, use_stmt
)
3615 && is_gimple_assign (use_stmt
)
3616 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
3619 tree elt_t
= TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
);
3620 unsigned HOST_WIDE_INT elt_w
3621 = tree_to_uhwi (TYPE_SIZE (elt_t
));
3622 unsigned HOST_WIDE_INT n
3623 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs
)));
3624 tree use_lhs
= gimple_assign_lhs (use_stmt
);
3625 if (auto_var_p (use_lhs
))
3626 DECL_NOT_GIMPLE_REG_P (use_lhs
) = 1;
3627 for (unsigned HOST_WIDE_INT bi
= 0; bi
< n
; bi
+= elt_w
)
3629 unsigned HOST_WIDE_INT ci
= bi
/ elt_w
;
3631 if (ci
< CONSTRUCTOR_NELTS (rhs
))
3632 new_rhs
= CONSTRUCTOR_ELT (rhs
, ci
)->value
;
3634 new_rhs
= build_zero_cst (elt_t
);
3635 tree new_lhs
= build3 (BIT_FIELD_REF
,
3637 unshare_expr (use_lhs
),
3638 bitsize_int (elt_w
),
3640 gimple
*new_stmt
= gimple_build_assign (new_lhs
, new_rhs
);
3641 location_t loc
= gimple_location (use_stmt
);
3642 gimple_set_location (new_stmt
, loc
);
3643 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
3644 gimple_set_vdef (new_stmt
,
3645 make_ssa_name (gimple_vop (cfun
)));
3646 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
3647 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
3648 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3649 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
3651 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3652 unlink_stmt_vdef (use_stmt
);
3653 release_defs (use_stmt
);
3654 gsi_remove (&gsi2
, true);
3655 release_defs (stmt
);
3656 gsi_remove (&gsi
, true);
3665 /* Combine stmts with the stmts defining their operands.
3666 Note we update GSI within the loop as necessary. */
3667 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3669 gimple
*stmt
= gsi_stmt (gsi
);
3671 /* Mark stmt as potentially needing revisiting. */
3672 gimple_set_plf (stmt
, GF_PLF_1
, false);
3674 /* Substitute from our lattice. We need to do so only once. */
3675 bool substituted_p
= false;
3678 FOR_EACH_SSA_USE_OPERAND (usep
, stmt
, iter
, SSA_OP_USE
)
3680 tree use
= USE_FROM_PTR (usep
);
3681 tree val
= fwprop_ssa_val (use
);
3682 if (val
&& val
!= use
&& may_propagate_copy (use
, val
))
3684 propagate_value (usep
, val
);
3685 substituted_p
= true;
3689 && is_gimple_assign (stmt
)
3690 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
3691 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
3696 gimple
*orig_stmt
= stmt
= gsi_stmt (gsi
);
3697 bool was_noreturn
= (is_gimple_call (stmt
)
3698 && gimple_call_noreturn_p (stmt
));
3701 if (fold_stmt (&gsi
, fwprop_ssa_val
))
3704 stmt
= gsi_stmt (gsi
);
3705 /* Cleanup the CFG if we simplified a condition to
3707 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
3708 if (gimple_cond_true_p (cond
)
3709 || gimple_cond_false_p (cond
))
3713 if (changed
|| substituted_p
)
3715 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
3716 bitmap_set_bit (to_purge
, bb
->index
);
3718 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
3719 to_fixup
.safe_push (stmt
);
3721 substituted_p
= false;
3724 switch (gimple_code (stmt
))
3728 tree rhs1
= gimple_assign_rhs1 (stmt
);
3729 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3731 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3734 did_something
= forward_propagate_into_comparison (&gsi
);
3735 if (maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (gsi
)))
3736 bitmap_set_bit (to_purge
, bb
->index
);
3737 if (did_something
== 2)
3739 changed
= did_something
!= 0;
3741 else if ((code
== PLUS_EXPR
3742 || code
== BIT_IOR_EXPR
3743 || code
== BIT_XOR_EXPR
)
3744 && simplify_rotate (&gsi
))
3746 else if (code
== VEC_PERM_EXPR
)
3748 int did_something
= simplify_permutation (&gsi
);
3749 if (did_something
== 2)
3751 changed
= did_something
!= 0;
3753 else if (code
== BIT_FIELD_REF
)
3754 changed
= simplify_bitfield_ref (&gsi
);
3755 else if (code
== CONSTRUCTOR
3756 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
3757 changed
= simplify_vector_constructor (&gsi
);
3758 else if (code
== ARRAY_REF
)
3759 changed
= simplify_count_trailing_zeroes (&gsi
);
3764 changed
= simplify_gimple_switch (as_a
<gswitch
*> (stmt
));
3769 int did_something
= forward_propagate_into_gimple_cond
3770 (as_a
<gcond
*> (stmt
));
3771 if (did_something
== 2)
3773 changed
= did_something
!= 0;
3779 tree callee
= gimple_call_fndecl (stmt
);
3780 if (callee
!= NULL_TREE
3781 && fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
3782 changed
= simplify_builtin_call (&gsi
, callee
);
3791 /* If the stmt changed then re-visit it and the statements
3792 inserted before it. */
3793 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
3794 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
3796 if (gsi_end_p (gsi
))
3797 gsi
= gsi_start_bb (bb
);
3804 /* Stmt no longer needs to be revisited. */
3805 stmt
= gsi_stmt (gsi
);
3806 gcc_checking_assert (!gimple_plf (stmt
, GF_PLF_1
));
3807 gimple_set_plf (stmt
, GF_PLF_1
, true);
3809 /* Fill up the lattice. */
3810 if (gimple_assign_single_p (stmt
))
3812 tree lhs
= gimple_assign_lhs (stmt
);
3813 tree rhs
= gimple_assign_rhs1 (stmt
);
3814 if (TREE_CODE (lhs
) == SSA_NAME
)
3817 if (TREE_CODE (rhs
) == SSA_NAME
)
3818 val
= fwprop_ssa_val (rhs
);
3819 else if (is_gimple_min_invariant (rhs
))
3821 /* If we can propagate the lattice-value mark the
3822 stmt for removal. */
3824 && may_propagate_copy (lhs
, val
))
3825 to_remove
.safe_push (stmt
);
3826 fwprop_set_lattice_val (lhs
, val
);
3829 else if (gimple_nop_p (stmt
))
3830 to_remove
.safe_push (stmt
);
3833 /* Substitute in destination PHI arguments. */
3836 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3837 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
3838 !gsi_end_p (gsi
); gsi_next (&gsi
))
3840 gphi
*phi
= gsi
.phi ();
3841 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
3842 tree arg
= USE_FROM_PTR (use_p
);
3843 if (TREE_CODE (arg
) != SSA_NAME
3844 || virtual_operand_p (arg
))
3846 tree val
= fwprop_ssa_val (arg
);
3848 && may_propagate_copy (arg
, val
))
3849 propagate_value (use_p
, val
);
3855 /* Remove stmts in reverse order to make debug stmt creation possible. */
3856 while (!to_remove
.is_empty())
3858 gimple
*stmt
= to_remove
.pop ();
3859 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3861 fprintf (dump_file
, "Removing dead stmt ");
3862 print_gimple_stmt (dump_file
, stmt
, 0);
3863 fprintf (dump_file
, "\n");
3865 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3866 if (gimple_code (stmt
) == GIMPLE_PHI
)
3867 remove_phi_node (&gsi
, true);
3870 unlink_stmt_vdef (stmt
);
3871 gsi_remove (&gsi
, true);
3872 release_defs (stmt
);
3876 /* Fixup stmts that became noreturn calls. This may require splitting
3877 blocks and thus isn't possible during the walk. Do this
3878 in reverse order so we don't inadvertedly remove a stmt we want to
3879 fixup by visiting a dominating now noreturn call first. */
3880 while (!to_fixup
.is_empty ())
3882 gimple
*stmt
= to_fixup
.pop ();
3883 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3885 fprintf (dump_file
, "Fixing up noreturn call ");
3886 print_gimple_stmt (dump_file
, stmt
, 0);
3887 fprintf (dump_file
, "\n");
3889 cfg_changed
|= fixup_noreturn_call (stmt
);
3892 cfg_changed
|= gimple_purge_all_dead_eh_edges (to_purge
);
3893 BITMAP_FREE (to_purge
);
3896 todoflags
|= TODO_cleanup_cfg
;
3904 make_pass_forwprop (gcc::context
*ctxt
)
3906 return new pass_forwprop (ctxt
);