1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2022 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 #include "internal-fn.h"
72 enum strlen_range_kind
{
73 /* Compute the exact constant string length. */
75 /* Compute the maximum constant string length. */
77 /* Compute a range of string lengths bounded by object sizes. When
78 the length of a string cannot be determined, consider as the upper
79 bound the size of the enclosing object the string may be a member
80 or element of. Also determine the size of the largest character
81 array the string may refer to. */
83 /* Determine the integer value of the argument (not string length). */
88 get_range_strlen (tree
, bitmap
, strlen_range_kind
, c_strlen_data
*, unsigned);
90 /* Return true when DECL can be referenced from current unit.
91 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
92 We can get declarations that are not possible to reference for various
95 1) When analyzing C++ virtual tables.
96 C++ virtual tables do have known constructors even
97 when they are keyed to other compilation unit.
98 Those tables can contain pointers to methods and vars
99 in other units. Those methods have both STATIC and EXTERNAL
101 2) In WHOPR mode devirtualization might lead to reference
102 to method that was partitioned elsehwere.
103 In this case we have static VAR_DECL or FUNCTION_DECL
104 that has no corresponding callgraph/varpool node
106 3) COMDAT functions referred by external vtables that
107 we devirtualize only during final compilation stage.
108 At this time we already decided that we will not output
109 the function body and thus we can't reference the symbol
113 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
116 struct cgraph_node
*node
;
119 if (DECL_ABSTRACT_P (decl
))
122 /* We are concerned only about static/external vars and functions. */
123 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
124 || !VAR_OR_FUNCTION_DECL_P (decl
))
127 /* Static objects can be referred only if they are defined and not optimized
129 if (!TREE_PUBLIC (decl
))
131 if (DECL_EXTERNAL (decl
))
133 /* Before we start optimizing unreachable code we can be sure all
134 static objects are defined. */
135 if (symtab
->function_flags_ready
)
137 snode
= symtab_node::get (decl
);
138 if (!snode
|| !snode
->definition
)
140 node
= dyn_cast
<cgraph_node
*> (snode
);
141 return !node
|| !node
->inlined_to
;
144 /* We will later output the initializer, so we can refer to it.
145 So we are concerned only when DECL comes from initializer of
146 external var or var that has been optimized out. */
148 || !VAR_P (from_decl
)
149 || (!DECL_EXTERNAL (from_decl
)
150 && (vnode
= varpool_node::get (from_decl
)) != NULL
151 && vnode
->definition
)
153 && (vnode
= varpool_node::get (from_decl
)) != NULL
154 && vnode
->in_other_partition
))
156 /* We are folding reference from external vtable. The vtable may reffer
157 to a symbol keyed to other compilation unit. The other compilation
158 unit may be in separate DSO and the symbol may be hidden. */
159 if (DECL_VISIBILITY_SPECIFIED (decl
)
160 && DECL_EXTERNAL (decl
)
161 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
162 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
164 /* When function is public, we always can introduce new reference.
165 Exception are the COMDAT functions where introducing a direct
166 reference imply need to include function body in the curren tunit. */
167 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
169 /* We have COMDAT. We are going to check if we still have definition
170 or if the definition is going to be output in other partition.
171 Bypass this when gimplifying; all needed functions will be produced.
173 As observed in PR20991 for already optimized out comdat virtual functions
174 it may be tempting to not necessarily give up because the copy will be
175 output elsewhere when corresponding vtable is output.
176 This is however not possible - ABI specify that COMDATs are output in
177 units where they are used and when the other unit was compiled with LTO
178 it is possible that vtable was kept public while the function itself
180 if (!symtab
->function_flags_ready
)
183 snode
= symtab_node::get (decl
);
185 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
186 && (!snode
->in_other_partition
187 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
189 node
= dyn_cast
<cgraph_node
*> (snode
);
190 return !node
|| !node
->inlined_to
;
193 /* Create a temporary for TYPE for a statement STMT. If the current function
194 is in SSA form, a SSA name is created. Otherwise a temporary register
198 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
200 if (gimple_in_ssa_p (cfun
))
201 return make_ssa_name (type
, stmt
);
203 return create_tmp_reg (type
);
206 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
207 acceptable form for is_gimple_min_invariant.
208 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 canonicalize_constructor_val (tree cval
, tree from_decl
)
213 if (CONSTANT_CLASS_P (cval
))
216 tree orig_cval
= cval
;
218 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
219 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
221 tree ptr
= TREE_OPERAND (cval
, 0);
222 if (is_gimple_min_invariant (ptr
))
223 cval
= build1_loc (EXPR_LOCATION (cval
),
224 ADDR_EXPR
, TREE_TYPE (ptr
),
225 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
227 fold_convert (ptr_type_node
,
228 TREE_OPERAND (cval
, 1))));
230 if (TREE_CODE (cval
) == ADDR_EXPR
)
232 tree base
= NULL_TREE
;
233 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
235 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
237 TREE_OPERAND (cval
, 0) = base
;
240 base
= get_base_address (TREE_OPERAND (cval
, 0));
244 if (VAR_OR_FUNCTION_DECL_P (base
)
245 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
247 if (TREE_TYPE (base
) == error_mark_node
)
250 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
251 but since the use can be in a debug stmt we can't. */
253 else if (TREE_CODE (base
) == FUNCTION_DECL
)
255 /* Make sure we create a cgraph node for functions we'll reference.
256 They can be non-existent if the reference comes from an entry
257 of an external vtable for example. */
258 cgraph_node::get_create (base
);
260 /* Fixup types in global initializers. */
261 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
262 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
264 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
265 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
268 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
269 if (TREE_CODE (cval
) == INTEGER_CST
)
271 if (TREE_OVERFLOW_P (cval
))
272 cval
= drop_tree_overflow (cval
);
273 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
274 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
280 /* If SYM is a constant variable with known value, return the value.
281 NULL_TREE is returned otherwise. */
284 get_symbol_constant_value (tree sym
)
286 tree val
= ctor_for_folding (sym
);
287 if (val
!= error_mark_node
)
291 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
293 && is_gimple_min_invariant (val
)
294 && useless_type_conversion_p (TREE_TYPE (sym
), TREE_TYPE (val
)))
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
303 && is_gimple_reg_type (TREE_TYPE (sym
)))
304 return build_zero_cst (TREE_TYPE (sym
));
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
316 maybe_fold_reference (tree expr
)
318 tree result
= NULL_TREE
;
320 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr
) == REALPART_EXPR
322 || TREE_CODE (expr
) == IMAGPART_EXPR
)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
324 result
= fold_unary_loc (EXPR_LOCATION (expr
),
327 TREE_OPERAND (expr
, 0));
328 else if (TREE_CODE (expr
) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
330 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
333 TREE_OPERAND (expr
, 0),
334 TREE_OPERAND (expr
, 1),
335 TREE_OPERAND (expr
, 2));
337 result
= fold_const_aggregate_ref (expr
);
339 if (result
&& is_gimple_min_invariant (result
))
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.cc
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
358 valid_gimple_rhs_p (tree expr
)
360 enum tree_code code
= TREE_CODE (expr
);
362 switch (TREE_CODE_CLASS (code
))
364 case tcc_declaration
:
365 if (!is_gimple_variable (expr
))
370 /* All constants are ok. */
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
378 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
385 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
386 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
391 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
401 if (is_gimple_min_invariant (expr
))
403 t
= TREE_OPERAND (expr
, 0);
404 while (handled_component_p (t
))
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t
) == ARRAY_REF
408 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
409 && !is_gimple_val (TREE_OPERAND (t
, 1)))
411 t
= TREE_OPERAND (t
, 0);
413 if (!is_gimple_id (t
))
419 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
421 if ((code
== COND_EXPR
422 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
423 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
424 || !is_gimple_val (TREE_OPERAND (expr
, 1))
425 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
436 case tcc_exceptional
:
437 if (code
== CONSTRUCTOR
)
441 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
442 if (!is_gimple_val (elt
))
446 if (code
!= SSA_NAME
)
451 if (code
== BIT_FIELD_REF
)
452 return is_gimple_val (TREE_OPERAND (expr
, 0));
463 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
464 replacement rhs for the statement or NULL_TREE if no simplification
465 could be made. It is assumed that the operands have been previously
469 fold_gimple_assign (gimple_stmt_iterator
*si
)
471 gimple
*stmt
= gsi_stmt (*si
);
472 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
473 location_t loc
= gimple_location (stmt
);
475 tree result
= NULL_TREE
;
477 switch (get_gimple_rhs_class (subcode
))
479 case GIMPLE_SINGLE_RHS
:
481 tree rhs
= gimple_assign_rhs1 (stmt
);
483 if (TREE_CLOBBER_P (rhs
))
486 if (REFERENCE_CLASS_P (rhs
))
487 return maybe_fold_reference (rhs
);
489 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
491 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
492 if (is_gimple_min_invariant (val
))
494 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
497 vec
<cgraph_node
*>targets
498 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
499 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
501 if (dump_enabled_p ())
503 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
504 "resolving virtual function address "
505 "reference to function %s\n",
506 targets
.length () == 1
507 ? targets
[0]->name ()
510 if (targets
.length () == 1)
512 val
= fold_convert (TREE_TYPE (val
),
513 build_fold_addr_expr_loc
514 (loc
, targets
[0]->decl
));
515 STRIP_USELESS_TYPE_CONVERSION (val
);
518 /* We cannot use __builtin_unreachable here because it
519 cannot have address taken. */
520 val
= build_int_cst (TREE_TYPE (val
), 0);
526 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
528 tree ref
= TREE_OPERAND (rhs
, 0);
529 if (TREE_CODE (ref
) == MEM_REF
530 && integer_zerop (TREE_OPERAND (ref
, 1)))
532 result
= TREE_OPERAND (ref
, 0);
533 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
535 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
540 else if (TREE_CODE (rhs
) == CONSTRUCTOR
541 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
543 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
547 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
548 if (! CONSTANT_CLASS_P (val
))
551 return build_vector_from_ctor (TREE_TYPE (rhs
),
552 CONSTRUCTOR_ELTS (rhs
));
555 else if (DECL_P (rhs
)
556 && is_gimple_reg_type (TREE_TYPE (rhs
)))
557 return get_symbol_constant_value (rhs
);
561 case GIMPLE_UNARY_RHS
:
564 case GIMPLE_BINARY_RHS
:
567 case GIMPLE_TERNARY_RHS
:
568 result
= fold_ternary_loc (loc
, subcode
,
569 TREE_TYPE (gimple_assign_lhs (stmt
)),
570 gimple_assign_rhs1 (stmt
),
571 gimple_assign_rhs2 (stmt
),
572 gimple_assign_rhs3 (stmt
));
576 STRIP_USELESS_TYPE_CONVERSION (result
);
577 if (valid_gimple_rhs_p (result
))
582 case GIMPLE_INVALID_RHS
:
590 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
591 adjusting the replacement stmts location and virtual operands.
592 If the statement has a lhs the last stmt in the sequence is expected
593 to assign to that lhs. */
596 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
598 gimple
*stmt
= gsi_stmt (*si_p
);
600 if (gimple_has_location (stmt
))
601 annotate_all_with_location (stmts
, gimple_location (stmt
));
603 /* First iterate over the replacement statements backward, assigning
604 virtual operands to their defining statements. */
605 gimple
*laststore
= NULL
;
606 for (gimple_stmt_iterator i
= gsi_last (stmts
);
607 !gsi_end_p (i
); gsi_prev (&i
))
609 gimple
*new_stmt
= gsi_stmt (i
);
610 if ((gimple_assign_single_p (new_stmt
)
611 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
612 || (is_gimple_call (new_stmt
)
613 && (gimple_call_flags (new_stmt
)
614 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
618 vdef
= gimple_vdef (stmt
);
620 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
621 gimple_set_vdef (new_stmt
, vdef
);
622 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
623 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
624 laststore
= new_stmt
;
628 /* Second iterate over the statements forward, assigning virtual
629 operands to their uses. */
630 tree reaching_vuse
= gimple_vuse (stmt
);
631 for (gimple_stmt_iterator i
= gsi_start (stmts
);
632 !gsi_end_p (i
); gsi_next (&i
))
634 gimple
*new_stmt
= gsi_stmt (i
);
635 /* If the new statement possibly has a VUSE, update it with exact SSA
636 name we know will reach this one. */
637 if (gimple_has_mem_ops (new_stmt
))
638 gimple_set_vuse (new_stmt
, reaching_vuse
);
639 gimple_set_modified (new_stmt
, true);
640 if (gimple_vdef (new_stmt
))
641 reaching_vuse
= gimple_vdef (new_stmt
);
644 /* If the new sequence does not do a store release the virtual
645 definition of the original statement. */
647 && reaching_vuse
== gimple_vuse (stmt
))
649 tree vdef
= gimple_vdef (stmt
);
651 && TREE_CODE (vdef
) == SSA_NAME
)
653 unlink_stmt_vdef (stmt
);
654 release_ssa_name (vdef
);
658 /* Finally replace the original statement with the sequence. */
659 gsi_replace_with_seq (si_p
, stmts
, false);
662 /* Helper function for update_gimple_call and
663 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
664 with GIMPLE_CALL NEW_STMT. */
667 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
670 tree lhs
= gimple_call_lhs (stmt
);
671 gimple_call_set_lhs (new_stmt
, lhs
);
672 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
673 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
674 gimple_move_vops (new_stmt
, stmt
);
675 gimple_set_location (new_stmt
, gimple_location (stmt
));
676 if (gimple_block (new_stmt
) == NULL_TREE
)
677 gimple_set_block (new_stmt
, gimple_block (stmt
));
678 gsi_replace (si_p
, new_stmt
, false);
681 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
682 with number of arguments NARGS, where the arguments in GIMPLE form
683 follow NARGS argument. */
686 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
689 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
691 gcc_assert (is_gimple_call (stmt
));
692 va_start (ap
, nargs
);
693 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
694 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
699 /* Return true if EXPR is a CALL_EXPR suitable for representation
700 as a single GIMPLE_CALL statement. If the arguments require
701 further gimplification, return false. */
704 valid_gimple_call_p (tree expr
)
708 if (TREE_CODE (expr
) != CALL_EXPR
)
711 nargs
= call_expr_nargs (expr
);
712 for (i
= 0; i
< nargs
; i
++)
714 tree arg
= CALL_EXPR_ARG (expr
, i
);
715 if (is_gimple_reg_type (TREE_TYPE (arg
)))
717 if (!is_gimple_val (arg
))
721 if (!is_gimple_lvalue (arg
))
728 /* Convert EXPR into a GIMPLE value suitable for substitution on the
729 RHS of an assignment. Insert the necessary statements before
730 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
731 is replaced. If the call is expected to produces a result, then it
732 is replaced by an assignment of the new RHS to the result variable.
733 If the result is to be ignored, then the call is replaced by a
734 GIMPLE_NOP. A proper VDEF chain is retained by making the first
735 VUSE and the last VDEF of the whole sequence be the same as the replaced
736 statement and using new SSA names for stores in between. */
739 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
742 gimple
*stmt
, *new_stmt
;
743 gimple_stmt_iterator i
;
744 gimple_seq stmts
= NULL
;
746 stmt
= gsi_stmt (*si_p
);
748 gcc_assert (is_gimple_call (stmt
));
750 if (valid_gimple_call_p (expr
))
752 /* The call has simplified to another call. */
753 tree fn
= CALL_EXPR_FN (expr
);
755 unsigned nargs
= call_expr_nargs (expr
);
756 vec
<tree
> args
= vNULL
;
762 args
.safe_grow_cleared (nargs
, true);
764 for (i
= 0; i
< nargs
; i
++)
765 args
[i
] = CALL_EXPR_ARG (expr
, i
);
768 new_stmt
= gimple_build_call_vec (fn
, args
);
769 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
774 lhs
= gimple_call_lhs (stmt
);
775 if (lhs
== NULL_TREE
)
777 push_gimplify_context (gimple_in_ssa_p (cfun
));
778 gimplify_and_add (expr
, &stmts
);
779 pop_gimplify_context (NULL
);
781 /* We can end up with folding a memcpy of an empty class assignment
782 which gets optimized away by C++ gimplification. */
783 if (gimple_seq_empty_p (stmts
))
785 if (gimple_in_ssa_p (cfun
))
787 unlink_stmt_vdef (stmt
);
790 gsi_replace (si_p
, gimple_build_nop (), false);
796 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
797 new_stmt
= gimple_build_assign (lhs
, tmp
);
798 i
= gsi_last (stmts
);
799 gsi_insert_after_without_update (&i
, new_stmt
,
800 GSI_CONTINUE_LINKING
);
803 gsi_replace_with_seq_vops (si_p
, stmts
);
807 /* Replace the call at *GSI with the gimple value VAL. */
810 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
812 gimple
*stmt
= gsi_stmt (*gsi
);
813 tree lhs
= gimple_call_lhs (stmt
);
817 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
818 val
= fold_convert (TREE_TYPE (lhs
), val
);
819 repl
= gimple_build_assign (lhs
, val
);
822 repl
= gimple_build_nop ();
823 tree vdef
= gimple_vdef (stmt
);
824 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
826 unlink_stmt_vdef (stmt
);
827 release_ssa_name (vdef
);
829 gsi_replace (gsi
, repl
, false);
832 /* Replace the call at *GSI with the new call REPL and fold that
836 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
838 gimple
*stmt
= gsi_stmt (*gsi
);
839 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
840 gimple_set_location (repl
, gimple_location (stmt
));
841 gimple_move_vops (repl
, stmt
);
842 gsi_replace (gsi
, repl
, false);
846 /* Return true if VAR is a VAR_DECL or a component thereof. */
849 var_decl_component_p (tree var
)
852 while (handled_component_p (inner
))
853 inner
= TREE_OPERAND (inner
, 0);
854 return (DECL_P (inner
)
855 || (TREE_CODE (inner
) == MEM_REF
856 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
859 /* Return TRUE if the SIZE argument, representing the size of an
860 object, is in a range of values of which exactly zero is valid. */
863 size_must_be_zero_p (tree size
)
865 if (integer_zerop (size
))
868 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
871 tree type
= TREE_TYPE (size
);
872 int prec
= TYPE_PRECISION (type
);
874 /* Compute the value of SSIZE_MAX, the largest positive value that
875 can be stored in ssize_t, the signed counterpart of size_t. */
876 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
877 value_range
valid_range (build_int_cst (type
, 0),
878 wide_int_to_tree (type
, ssize_max
));
881 get_range_query (cfun
)->range_of_expr (vr
, size
);
883 get_global_range_query ()->range_of_expr (vr
, size
);
884 if (vr
.undefined_p ())
885 vr
.set_varying (TREE_TYPE (size
));
886 vr
.intersect (valid_range
);
890 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
891 diagnose (otherwise undefined) overlapping copies without preventing
892 folding. When folded, GCC guarantees that overlapping memcpy has
893 the same semantics as memmove. Call to the library memcpy need not
894 provide the same guarantee. Return false if no simplification can
898 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
899 tree dest
, tree src
, enum built_in_function code
)
901 gimple
*stmt
= gsi_stmt (*gsi
);
902 tree lhs
= gimple_call_lhs (stmt
);
903 tree len
= gimple_call_arg (stmt
, 2);
904 location_t loc
= gimple_location (stmt
);
906 /* If the LEN parameter is a constant zero or in range where
907 the only valid value is zero, return DEST. */
908 if (size_must_be_zero_p (len
))
911 if (gimple_call_lhs (stmt
))
912 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
914 repl
= gimple_build_nop ();
915 tree vdef
= gimple_vdef (stmt
);
916 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
918 unlink_stmt_vdef (stmt
);
919 release_ssa_name (vdef
);
921 gsi_replace (gsi
, repl
, false);
925 /* If SRC and DEST are the same (and not volatile), return
926 DEST{,+LEN,+LEN-1}. */
927 if (operand_equal_p (src
, dest
, 0))
929 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
930 It's safe and may even be emitted by GCC itself (see bug
932 unlink_stmt_vdef (stmt
);
933 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
934 release_ssa_name (gimple_vdef (stmt
));
937 gsi_replace (gsi
, gimple_build_nop (), false);
944 /* We cannot (easily) change the type of the copy if it is a storage
945 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
946 modify the storage order of objects (see storage_order_barrier_p). */
948 = POINTER_TYPE_P (TREE_TYPE (src
))
949 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
951 = POINTER_TYPE_P (TREE_TYPE (dest
))
952 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
953 tree destvar
, srcvar
, srcoff
;
954 unsigned int src_align
, dest_align
;
955 unsigned HOST_WIDE_INT tmp_len
;
958 /* Build accesses at offset zero with a ref-all character type. */
960 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
963 /* If we can perform the copy efficiently with first doing all loads
964 and then all stores inline it that way. Currently efficiently
965 means that we can load all the memory into a single integer
966 register which is what MOVE_MAX gives us. */
967 src_align
= get_pointer_alignment (src
);
968 dest_align
= get_pointer_alignment (dest
);
969 if (tree_fits_uhwi_p (len
)
970 && compare_tree_int (len
, MOVE_MAX
) <= 0
971 /* FIXME: Don't transform copies from strings with known length.
972 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
973 from being handled, and the case was XFAILed for that reason.
974 Now that it is handled and the XFAIL removed, as soon as other
975 strlenopt tests that rely on it for passing are adjusted, this
976 hack can be removed. */
977 && !c_strlen (src
, 1)
978 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
979 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
981 && AGGREGATE_TYPE_P (srctype
)
982 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
984 && AGGREGATE_TYPE_P (desttype
)
985 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
987 unsigned ilen
= tree_to_uhwi (len
);
988 if (pow2p_hwi (ilen
))
990 /* Detect out-of-bounds accesses without issuing warnings.
991 Avoid folding out-of-bounds copies but to avoid false
992 positives for unreachable code defer warning until after
993 DCE has worked its magic.
994 -Wrestrict is still diagnosed. */
995 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
998 if (warning
!= OPT_Wrestrict
)
1001 scalar_int_mode mode
;
1002 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1003 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1004 /* If the destination pointer is not aligned we must be able
1005 to emit an unaligned store. */
1006 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1007 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1008 || (optab_handler (movmisalign_optab
, mode
)
1009 != CODE_FOR_nothing
)))
1011 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1012 tree srctype
= type
;
1013 tree desttype
= type
;
1014 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1015 srctype
= build_aligned_type (type
, src_align
);
1016 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1017 tree tem
= fold_const_aggregate_ref (srcmem
);
1020 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1021 && targetm
.slow_unaligned_access (mode
, src_align
)
1022 && (optab_handler (movmisalign_optab
, mode
)
1023 == CODE_FOR_nothing
))
1028 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1030 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1032 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1034 gimple_assign_set_lhs (new_stmt
, srcmem
);
1035 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1036 gimple_set_location (new_stmt
, loc
);
1037 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1039 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1040 desttype
= build_aligned_type (type
, dest_align
);
1042 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1045 gimple_move_vops (new_stmt
, stmt
);
1048 gsi_replace (gsi
, new_stmt
, false);
1051 gimple_set_location (new_stmt
, loc
);
1052 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1059 if (code
== BUILT_IN_MEMMOVE
)
1061 /* Both DEST and SRC must be pointer types.
1062 ??? This is what old code did. Is the testing for pointer types
1065 If either SRC is readonly or length is 1, we can use memcpy. */
1066 if (!dest_align
|| !src_align
)
1068 if (readonly_data_expr (src
)
1069 || (tree_fits_uhwi_p (len
)
1070 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1071 >= tree_to_uhwi (len
))))
1073 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1076 gimple_call_set_fndecl (stmt
, fn
);
1077 gimple_call_set_arg (stmt
, 0, dest
);
1078 gimple_call_set_arg (stmt
, 1, src
);
1083 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1084 if (TREE_CODE (src
) == ADDR_EXPR
1085 && TREE_CODE (dest
) == ADDR_EXPR
)
1087 tree src_base
, dest_base
, fn
;
1088 poly_int64 src_offset
= 0, dest_offset
= 0;
1089 poly_uint64 maxsize
;
1091 srcvar
= TREE_OPERAND (src
, 0);
1092 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1093 if (src_base
== NULL
)
1095 destvar
= TREE_OPERAND (dest
, 0);
1096 dest_base
= get_addr_base_and_unit_offset (destvar
,
1098 if (dest_base
== NULL
)
1099 dest_base
= destvar
;
1100 if (!poly_int_tree_p (len
, &maxsize
))
1102 if (SSA_VAR_P (src_base
)
1103 && SSA_VAR_P (dest_base
))
1105 if (operand_equal_p (src_base
, dest_base
, 0)
1106 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1107 dest_offset
, maxsize
))
1110 else if (TREE_CODE (src_base
) == MEM_REF
1111 && TREE_CODE (dest_base
) == MEM_REF
)
1113 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1114 TREE_OPERAND (dest_base
, 0), 0))
1116 poly_offset_int full_src_offset
1117 = mem_ref_offset (src_base
) + src_offset
;
1118 poly_offset_int full_dest_offset
1119 = mem_ref_offset (dest_base
) + dest_offset
;
1120 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1121 full_dest_offset
, maxsize
))
1127 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1130 gimple_call_set_fndecl (stmt
, fn
);
1131 gimple_call_set_arg (stmt
, 0, dest
);
1132 gimple_call_set_arg (stmt
, 1, src
);
1137 /* If the destination and source do not alias optimize into
1139 if ((is_gimple_min_invariant (dest
)
1140 || TREE_CODE (dest
) == SSA_NAME
)
1141 && (is_gimple_min_invariant (src
)
1142 || TREE_CODE (src
) == SSA_NAME
))
1145 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1146 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1147 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1150 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1153 gimple_call_set_fndecl (stmt
, fn
);
1154 gimple_call_set_arg (stmt
, 0, dest
);
1155 gimple_call_set_arg (stmt
, 1, src
);
1164 if (!tree_fits_shwi_p (len
))
1167 || (AGGREGATE_TYPE_P (srctype
)
1168 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1171 || (AGGREGATE_TYPE_P (desttype
)
1172 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1174 /* In the following try to find a type that is most natural to be
1175 used for the memcpy source and destination and that allows
1176 the most optimization when memcpy is turned into a plain assignment
1177 using that type. In theory we could always use a char[len] type
1178 but that only gains us that the destination and source possibly
1179 no longer will have their address taken. */
1180 if (TREE_CODE (srctype
) == ARRAY_TYPE
1181 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1182 srctype
= TREE_TYPE (srctype
);
1183 if (TREE_CODE (desttype
) == ARRAY_TYPE
1184 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1185 desttype
= TREE_TYPE (desttype
);
1186 if (TREE_ADDRESSABLE (srctype
)
1187 || TREE_ADDRESSABLE (desttype
))
1190 /* Make sure we are not copying using a floating-point mode or
1191 a type whose size possibly does not match its precision. */
1192 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1193 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1194 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1195 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1196 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1197 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1198 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1199 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1207 src_align
= get_pointer_alignment (src
);
1208 dest_align
= get_pointer_alignment (dest
);
1210 /* Choose between src and destination type for the access based
1211 on alignment, whether the access constitutes a register access
1212 and whether it may actually expose a declaration for SSA rewrite
1213 or SRA decomposition. Also try to expose a string constant, we
1214 might be able to concatenate several of them later into a single
1216 destvar
= NULL_TREE
;
1218 if (TREE_CODE (dest
) == ADDR_EXPR
1219 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1220 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1221 && dest_align
>= TYPE_ALIGN (desttype
)
1222 && (is_gimple_reg_type (desttype
)
1223 || src_align
>= TYPE_ALIGN (desttype
)))
1224 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1225 else if (TREE_CODE (src
) == ADDR_EXPR
1226 && var_decl_component_p (TREE_OPERAND (src
, 0))
1227 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1228 && src_align
>= TYPE_ALIGN (srctype
)
1229 && (is_gimple_reg_type (srctype
)
1230 || dest_align
>= TYPE_ALIGN (srctype
)))
1231 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1232 /* FIXME: Don't transform copies from strings with known original length.
1233 As soon as strlenopt tests that rely on it for passing are adjusted,
1234 this hack can be removed. */
1235 else if (gimple_call_alloca_for_var_p (stmt
)
1236 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1237 && integer_zerop (srcoff
)
1238 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1239 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1240 srctype
= TREE_TYPE (srcvar
);
1244 /* Now that we chose an access type express the other side in
1245 terms of it if the target allows that with respect to alignment
1247 if (srcvar
== NULL_TREE
)
1249 if (src_align
>= TYPE_ALIGN (desttype
))
1250 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1253 enum machine_mode mode
= TYPE_MODE (desttype
);
1254 if ((mode
== BLKmode
&& STRICT_ALIGNMENT
)
1255 || (targetm
.slow_unaligned_access (mode
, src_align
)
1256 && (optab_handler (movmisalign_optab
, mode
)
1257 == CODE_FOR_nothing
)))
1259 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1261 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1264 else if (destvar
== NULL_TREE
)
1266 if (dest_align
>= TYPE_ALIGN (srctype
))
1267 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1270 enum machine_mode mode
= TYPE_MODE (srctype
);
1271 if ((mode
== BLKmode
&& STRICT_ALIGNMENT
)
1272 || (targetm
.slow_unaligned_access (mode
, dest_align
)
1273 && (optab_handler (movmisalign_optab
, mode
)
1274 == CODE_FOR_nothing
)))
1276 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1278 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1282 /* Same as above, detect out-of-bounds accesses without issuing
1283 warnings. Avoid folding out-of-bounds copies but to avoid
1284 false positives for unreachable code defer warning until
1285 after DCE has worked its magic.
1286 -Wrestrict is still diagnosed. */
1287 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1288 dest
, src
, len
, len
,
1290 if (warning
!= OPT_Wrestrict
)
1294 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1296 tree tem
= fold_const_aggregate_ref (srcvar
);
1299 if (! is_gimple_min_invariant (srcvar
))
1301 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1302 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1304 gimple_assign_set_lhs (new_stmt
, srcvar
);
1305 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1306 gimple_set_location (new_stmt
, loc
);
1307 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1309 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1310 goto set_vop_and_replace
;
1313 /* We get an aggregate copy. If the source is a STRING_CST, then
1314 directly use its type to perform the copy. */
1315 if (TREE_CODE (srcvar
) == STRING_CST
)
1318 /* Or else, use an unsigned char[] type to perform the copy in order
1319 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1320 types or float modes behavior on copying. */
1323 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1324 tree_to_uhwi (len
));
1326 if (src_align
> TYPE_ALIGN (srctype
))
1327 srctype
= build_aligned_type (srctype
, src_align
);
1328 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1331 if (dest_align
> TYPE_ALIGN (desttype
))
1332 desttype
= build_aligned_type (desttype
, dest_align
);
1333 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1334 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1336 set_vop_and_replace
:
1337 gimple_move_vops (new_stmt
, stmt
);
1340 gsi_replace (gsi
, new_stmt
, false);
1343 gimple_set_location (new_stmt
, loc
);
1344 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1348 gimple_seq stmts
= NULL
;
1349 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1351 else if (code
== BUILT_IN_MEMPCPY
)
1353 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1354 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1355 TREE_TYPE (dest
), dest
, len
);
1360 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1361 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1362 gsi_replace (gsi
, repl
, false);
1366 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1367 to built-in memcmp (a, b, len). */
1370 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1372 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1377 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1379 gimple
*stmt
= gsi_stmt (*gsi
);
1380 tree a
= gimple_call_arg (stmt
, 0);
1381 tree b
= gimple_call_arg (stmt
, 1);
1382 tree len
= gimple_call_arg (stmt
, 2);
1384 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1385 replace_call_with_call_and_fold (gsi
, repl
);
1390 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1391 to built-in memmove (dest, src, len). */
1394 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1396 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1401 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1402 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1403 len) into memmove (dest, src, len). */
1405 gimple
*stmt
= gsi_stmt (*gsi
);
1406 tree src
= gimple_call_arg (stmt
, 0);
1407 tree dest
= gimple_call_arg (stmt
, 1);
1408 tree len
= gimple_call_arg (stmt
, 2);
1410 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1411 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1412 replace_call_with_call_and_fold (gsi
, repl
);
1417 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1418 to built-in memset (dest, 0, len). */
1421 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1423 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1428 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1430 gimple
*stmt
= gsi_stmt (*gsi
);
1431 tree dest
= gimple_call_arg (stmt
, 0);
1432 tree len
= gimple_call_arg (stmt
, 1);
1434 gimple_seq seq
= NULL
;
1435 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1436 gimple_seq_add_stmt_without_update (&seq
, repl
);
1437 gsi_replace_with_seq_vops (gsi
, seq
);
1443 /* Fold function call to builtin memset or bzero at *GSI setting the
1444 memory of size LEN to VAL. Return whether a simplification was made. */
1447 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1449 gimple
*stmt
= gsi_stmt (*gsi
);
1451 unsigned HOST_WIDE_INT length
, cval
;
1453 /* If the LEN parameter is zero, return DEST. */
1454 if (integer_zerop (len
))
1456 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1460 if (! tree_fits_uhwi_p (len
))
1463 if (TREE_CODE (c
) != INTEGER_CST
)
1466 tree dest
= gimple_call_arg (stmt
, 0);
1468 if (TREE_CODE (var
) != ADDR_EXPR
)
1471 var
= TREE_OPERAND (var
, 0);
1472 if (TREE_THIS_VOLATILE (var
))
1475 etype
= TREE_TYPE (var
);
1476 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1477 etype
= TREE_TYPE (etype
);
1479 if (!INTEGRAL_TYPE_P (etype
)
1480 && !POINTER_TYPE_P (etype
))
1483 if (! var_decl_component_p (var
))
1486 length
= tree_to_uhwi (len
);
1487 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1488 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1489 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1490 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1493 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1496 if (!type_has_mode_precision_p (etype
))
1497 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1498 TYPE_UNSIGNED (etype
));
1500 if (integer_zerop (c
))
1504 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1507 cval
= TREE_INT_CST_LOW (c
);
1511 cval
|= (cval
<< 31) << 1;
1514 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1515 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1516 gimple_move_vops (store
, stmt
);
1517 gimple_set_location (store
, gimple_location (stmt
));
1518 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1519 if (gimple_call_lhs (stmt
))
1521 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1522 gsi_replace (gsi
, asgn
, false);
1526 gimple_stmt_iterator gsi2
= *gsi
;
1528 gsi_remove (&gsi2
, true);
1534 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1537 get_range_strlen_tree (tree arg
, bitmap visited
, strlen_range_kind rkind
,
1538 c_strlen_data
*pdata
, unsigned eltsize
)
1540 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1542 /* The length computed by this invocation of the function. */
1543 tree val
= NULL_TREE
;
1545 /* True if VAL is an optimistic (tight) bound determined from
1546 the size of the character array in which the string may be
1547 stored. In that case, the computed VAL is used to set
1549 bool tight_bound
= false;
1551 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1552 if (TREE_CODE (arg
) == ADDR_EXPR
1553 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1555 tree op
= TREE_OPERAND (arg
, 0);
1556 if (integer_zerop (TREE_OPERAND (op
, 1)))
1558 tree aop0
= TREE_OPERAND (op
, 0);
1559 if (TREE_CODE (aop0
) == INDIRECT_REF
1560 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1561 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1564 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1565 && rkind
== SRK_LENRANGE
)
1567 /* Fail if an array is the last member of a struct object
1568 since it could be treated as a (fake) flexible array
1570 tree idx
= TREE_OPERAND (op
, 1);
1572 arg
= TREE_OPERAND (op
, 0);
1573 tree optype
= TREE_TYPE (arg
);
1574 if (tree dom
= TYPE_DOMAIN (optype
))
1575 if (tree bound
= TYPE_MAX_VALUE (dom
))
1576 if (TREE_CODE (bound
) == INTEGER_CST
1577 && TREE_CODE (idx
) == INTEGER_CST
1578 && tree_int_cst_lt (bound
, idx
))
1583 if (rkind
== SRK_INT_VALUE
)
1585 /* We are computing the maximum value (not string length). */
1587 if (TREE_CODE (val
) != INTEGER_CST
1588 || tree_int_cst_sgn (val
) < 0)
1593 c_strlen_data lendata
= { };
1594 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1596 if (!val
&& lendata
.decl
)
1598 /* ARG refers to an unterminated const character array.
1599 DATA.DECL with size DATA.LEN. */
1600 val
= lendata
.minlen
;
1601 pdata
->decl
= lendata
.decl
;
1605 /* Set if VAL represents the maximum length based on array size (set
1606 when exact length cannot be determined). */
1607 bool maxbound
= false;
1609 if (!val
&& rkind
== SRK_LENRANGE
)
1611 if (TREE_CODE (arg
) == ADDR_EXPR
)
1612 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1615 if (TREE_CODE (arg
) == ARRAY_REF
)
1617 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1619 /* Determine the "innermost" array type. */
1620 while (TREE_CODE (optype
) == ARRAY_TYPE
1621 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1622 optype
= TREE_TYPE (optype
);
1624 /* Avoid arrays of pointers. */
1625 tree eltype
= TREE_TYPE (optype
);
1626 if (TREE_CODE (optype
) != ARRAY_TYPE
1627 || !INTEGRAL_TYPE_P (eltype
))
1630 /* Fail when the array bound is unknown or zero. */
1631 val
= TYPE_SIZE_UNIT (optype
);
1633 || TREE_CODE (val
) != INTEGER_CST
1634 || integer_zerop (val
))
1637 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1640 /* Set the minimum size to zero since the string in
1641 the array could have zero length. */
1642 pdata
->minlen
= ssize_int (0);
1646 else if (TREE_CODE (arg
) == COMPONENT_REF
1647 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1650 /* Use the type of the member array to determine the upper
1651 bound on the length of the array. This may be overly
1652 optimistic if the array itself isn't NUL-terminated and
1653 the caller relies on the subsequent member to contain
1654 the NUL but that would only be considered valid if
1655 the array were the last member of a struct. */
1657 tree fld
= TREE_OPERAND (arg
, 1);
1659 tree optype
= TREE_TYPE (fld
);
1661 /* Determine the "innermost" array type. */
1662 while (TREE_CODE (optype
) == ARRAY_TYPE
1663 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1664 optype
= TREE_TYPE (optype
);
1666 /* Fail when the array bound is unknown or zero. */
1667 val
= TYPE_SIZE_UNIT (optype
);
1669 || TREE_CODE (val
) != INTEGER_CST
1670 || integer_zerop (val
))
1672 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1675 /* Set the minimum size to zero since the string in
1676 the array could have zero length. */
1677 pdata
->minlen
= ssize_int (0);
1679 /* The array size determined above is an optimistic bound
1680 on the length. If the array isn't nul-terminated the
1681 length computed by the library function would be greater.
1682 Even though using strlen to cross the subobject boundary
1683 is undefined, avoid drawing conclusions from the member
1684 type about the length here. */
1687 else if (TREE_CODE (arg
) == MEM_REF
1688 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1689 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1690 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1692 /* Handle a MEM_REF into a DECL accessing an array of integers,
1693 being conservative about references to extern structures with
1694 flexible array members that can be initialized to arbitrary
1695 numbers of elements as an extension (static structs are okay).
1696 FIXME: Make this less conservative -- see
1697 component_ref_size in tree.cc. */
1698 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1699 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1700 && (decl_binds_to_current_def_p (ref
)
1701 || !array_at_struct_end_p (arg
)))
1703 /* Fail if the offset is out of bounds. Such accesses
1704 should be diagnosed at some point. */
1705 val
= DECL_SIZE_UNIT (ref
);
1707 || TREE_CODE (val
) != INTEGER_CST
1708 || integer_zerop (val
))
1711 poly_offset_int psiz
= wi::to_offset (val
);
1712 poly_offset_int poff
= mem_ref_offset (arg
);
1713 if (known_le (psiz
, poff
))
1716 pdata
->minlen
= ssize_int (0);
1718 /* Subtract the offset and one for the terminating nul. */
1721 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1722 /* Since VAL reflects the size of a declared object
1723 rather the type of the access it is not a tight bound. */
1726 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1728 /* Avoid handling pointers to arrays. GCC might misuse
1729 a pointer to an array of one bound to point to an array
1730 object of a greater bound. */
1731 tree argtype
= TREE_TYPE (arg
);
1732 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1734 val
= TYPE_SIZE_UNIT (argtype
);
1736 || TREE_CODE (val
) != INTEGER_CST
1737 || integer_zerop (val
))
1739 val
= wide_int_to_tree (TREE_TYPE (val
),
1740 wi::sub (wi::to_wide (val
), 1));
1742 /* Set the minimum size to zero since the string in
1743 the array could have zero length. */
1744 pdata
->minlen
= ssize_int (0);
1753 /* Adjust the lower bound on the string length as necessary. */
1755 || (rkind
!= SRK_STRLEN
1756 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1757 && TREE_CODE (val
) == INTEGER_CST
1758 && tree_int_cst_lt (val
, pdata
->minlen
)))
1759 pdata
->minlen
= val
;
1761 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1763 /* Adjust the tighter (more optimistic) string length bound
1764 if necessary and proceed to adjust the more conservative
1766 if (TREE_CODE (val
) == INTEGER_CST
)
1768 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1769 pdata
->maxbound
= val
;
1772 pdata
->maxbound
= val
;
1774 else if (pdata
->maxbound
|| maxbound
)
1775 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1776 if VAL corresponds to the maximum length determined based
1777 on the type of the object. */
1778 pdata
->maxbound
= val
;
1782 /* VAL computed above represents an optimistically tight bound
1783 on the length of the string based on the referenced object's
1784 or subobject's type. Determine the conservative upper bound
1785 based on the enclosing object's size if possible. */
1786 if (rkind
== SRK_LENRANGE
)
1789 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1792 /* When the call above fails due to a non-constant offset
1793 assume the offset is zero and use the size of the whole
1794 enclosing object instead. */
1795 base
= get_base_address (arg
);
1798 /* If the base object is a pointer no upper bound on the length
1799 can be determined. Otherwise the maximum length is equal to
1800 the size of the enclosing object minus the offset of
1801 the referenced subobject minus 1 (for the terminating nul). */
1802 tree type
= TREE_TYPE (base
);
1803 if (TREE_CODE (type
) == POINTER_TYPE
1804 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1805 || !(val
= DECL_SIZE_UNIT (base
)))
1806 val
= build_all_ones_cst (size_type_node
);
1809 val
= DECL_SIZE_UNIT (base
);
1810 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1811 size_int (offset
+ 1));
1820 /* Adjust the more conservative bound if possible/necessary
1821 and fail otherwise. */
1822 if (rkind
!= SRK_STRLEN
)
1824 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1825 || TREE_CODE (val
) != INTEGER_CST
)
1828 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1829 pdata
->maxlen
= val
;
1832 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1834 /* Fail if the length of this ARG is different from that
1835 previously determined from another ARG. */
1840 pdata
->maxlen
= val
;
1841 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1844 /* For an ARG referencing one or more strings, try to obtain the range
1845 of their lengths, or the size of the largest array ARG referes to if
1846 the range of lengths cannot be determined, and store all in *PDATA.
1847 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1848 the maximum constant value.
1849 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1850 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1851 length or if we are unable to determine the length, return false.
1852 VISITED is a bitmap of visited variables.
1853 RKIND determines the kind of value or range to obtain (see
1855 Set PDATA->DECL if ARG refers to an unterminated constant array.
1856 On input, set ELTSIZE to 1 for normal single byte character strings,
1857 and either 2 or 4 for wide characer strings (the size of wchar_t).
1858 Return true if *PDATA was successfully populated and false otherwise. */
1861 get_range_strlen (tree arg
, bitmap visited
,
1862 strlen_range_kind rkind
,
1863 c_strlen_data
*pdata
, unsigned eltsize
)
1866 if (TREE_CODE (arg
) != SSA_NAME
)
1867 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1869 /* If ARG is registered for SSA update we cannot look at its defining
1871 if (name_registered_for_update_p (arg
))
1874 /* If we were already here, break the infinite cycle. */
1875 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1879 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1881 switch (gimple_code (def_stmt
))
1884 /* The RHS of the statement defining VAR must either have a
1885 constant length or come from another SSA_NAME with a constant
1887 if (gimple_assign_single_p (def_stmt
)
1888 || gimple_assign_unary_nop_p (def_stmt
))
1890 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1891 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1893 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1895 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1896 gimple_assign_rhs3 (def_stmt
) };
1898 for (unsigned int i
= 0; i
< 2; i
++)
1899 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1901 if (rkind
!= SRK_LENRANGE
)
1903 /* Set the upper bound to the maximum to prevent
1904 it from being adjusted in the next iteration but
1905 leave MINLEN and the more conservative MAXBOUND
1906 determined so far alone (or leave them null if
1907 they haven't been set yet). That the MINLEN is
1908 in fact zero can be determined from MAXLEN being
1909 unbounded but the discovered minimum is used for
1911 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1918 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1919 must have a constant length. */
1920 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1922 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1924 /* If this PHI has itself as an argument, we cannot
1925 determine the string length of this argument. However,
1926 if we can find a constant string length for the other
1927 PHI args then we can still be sure that this is a
1928 constant string length. So be optimistic and just
1929 continue with the next argument. */
1930 if (arg
== gimple_phi_result (def_stmt
))
1933 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1935 if (rkind
!= SRK_LENRANGE
)
1937 /* Set the upper bound to the maximum to prevent
1938 it from being adjusted in the next iteration but
1939 leave MINLEN and the more conservative MAXBOUND
1940 determined so far alone (or leave them null if
1941 they haven't been set yet). That the MINLEN is
1942 in fact zero can be determined from MAXLEN being
1943 unbounded but the discovered minimum is used for
1945 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1955 /* Try to obtain the range of the lengths of the string(s) referenced
1956 by ARG, or the size of the largest array ARG refers to if the range
1957 of lengths cannot be determined, and store all in *PDATA which must
1958 be zero-initialized on input except PDATA->MAXBOUND may be set to
1959 a non-null tree node other than INTEGER_CST to request to have it
1960 set to the length of the longest string in a PHI. ELTSIZE is
1961 the expected size of the string element in bytes: 1 for char and
1962 some power of 2 for wide characters.
1963 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1964 for optimization. Returning false means that a nonzero PDATA->MINLEN
1965 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1966 is -1 (in that case, the actual range is indeterminate, i.e.,
1967 [0, PTRDIFF_MAX - 2]. */
1970 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1972 auto_bitmap visited
;
1973 tree maxbound
= pdata
->maxbound
;
1975 if (!get_range_strlen (arg
, visited
, SRK_LENRANGE
, pdata
, eltsize
))
1977 /* On failure extend the length range to an impossible maximum
1978 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1979 members can stay unchanged regardless. */
1980 pdata
->minlen
= ssize_int (0);
1981 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1983 else if (!pdata
->minlen
)
1984 pdata
->minlen
= ssize_int (0);
1986 /* If it's unchanged from it initial non-null value, set the conservative
1987 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1988 if (maxbound
&& pdata
->maxbound
== maxbound
)
1989 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1991 return !integer_all_onesp (pdata
->maxlen
);
1994 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1995 For ARG of pointer types, NONSTR indicates if the caller is prepared
1996 to handle unterminated strings. For integer ARG and when RKIND ==
1997 SRK_INT_VALUE, NONSTR must be null.
1999 If an unterminated array is discovered and our caller handles
2000 unterminated arrays, then bubble up the offending DECL and
2001 return the maximum size. Otherwise return NULL. */
2004 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
2006 /* A non-null NONSTR is meaningless when determining the maximum
2007 value of an integer ARG. */
2008 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2009 /* ARG must have an integral type when RKIND says so. */
2010 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2012 auto_bitmap visited
;
2014 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2016 c_strlen_data lendata
= { };
2017 if (!get_range_strlen (arg
, visited
, rkind
, &lendata
, /* eltsize = */1))
2018 lendata
.maxlen
= NULL_TREE
;
2019 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2020 lendata
.maxlen
= NULL_TREE
;
2024 /* For callers prepared to handle unterminated arrays set
2025 *NONSTR to point to the declaration of the array and return
2026 the maximum length/size. */
2027 *nonstr
= lendata
.decl
;
2028 return lendata
.maxlen
;
2031 /* Fail if the constant array isn't nul-terminated. */
2032 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2035 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2036 true, strictly less than) the lower bound of SIZE at compile time and false
2040 known_lower (gimple
*stmt
, tree len
, tree size
, bool strict
= false)
2042 if (len
== NULL_TREE
)
2045 wide_int size_range
[2];
2046 wide_int len_range
[2];
2047 if (get_range (len
, stmt
, len_range
) && get_range (size
, stmt
, size_range
))
2050 return wi::ltu_p (len_range
[1], size_range
[0]);
2052 return wi::leu_p (len_range
[1], size_range
[0]);
2058 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2059 If LEN is not NULL, it represents the length of the string to be
2060 copied. Return NULL_TREE if no simplification can be made. */
2063 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2064 tree dest
, tree src
)
2066 gimple
*stmt
= gsi_stmt (*gsi
);
2067 location_t loc
= gimple_location (stmt
);
2070 /* If SRC and DEST are the same (and not volatile), return DEST. */
2071 if (operand_equal_p (src
, dest
, 0))
2073 /* Issue -Wrestrict unless the pointers are null (those do
2074 not point to objects and so do not indicate an overlap;
2075 such calls could be the result of sanitization and jump
2077 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2079 tree func
= gimple_call_fndecl (stmt
);
2081 warning_at (loc
, OPT_Wrestrict
,
2082 "%qD source argument is the same as destination",
2086 replace_call_with_value (gsi
, dest
);
2090 if (optimize_function_for_size_p (cfun
))
2093 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2097 /* Set to non-null if ARG refers to an unterminated array. */
2099 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2103 /* Avoid folding calls with unterminated arrays. */
2104 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2105 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2106 suppress_warning (stmt
, OPT_Wstringop_overread
);
2113 len
= fold_convert_loc (loc
, size_type_node
, len
);
2114 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2115 len
= force_gimple_operand_gsi (gsi
, len
, true,
2116 NULL_TREE
, true, GSI_SAME_STMT
);
2117 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2118 replace_call_with_call_and_fold (gsi
, repl
);
2122 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2123 If SLEN is not NULL, it represents the length of the source string.
2124 Return NULL_TREE if no simplification can be made. */
2127 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2128 tree dest
, tree src
, tree len
)
2130 gimple
*stmt
= gsi_stmt (*gsi
);
2131 location_t loc
= gimple_location (stmt
);
2132 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2134 /* If the LEN parameter is zero, return DEST. */
2135 if (integer_zerop (len
))
2137 /* Avoid warning if the destination refers to an array/pointer
2138 decorate with attribute nonstring. */
2141 tree fndecl
= gimple_call_fndecl (stmt
);
2143 /* Warn about the lack of nul termination: the result is not
2144 a (nul-terminated) string. */
2145 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2146 if (slen
&& !integer_zerop (slen
))
2147 warning_at (loc
, OPT_Wstringop_truncation
,
2148 "%qD destination unchanged after copying no bytes "
2149 "from a string of length %E",
2152 warning_at (loc
, OPT_Wstringop_truncation
,
2153 "%qD destination unchanged after copying no bytes",
2157 replace_call_with_value (gsi
, dest
);
2161 /* We can't compare slen with len as constants below if len is not a
2163 if (TREE_CODE (len
) != INTEGER_CST
)
2166 /* Now, we must be passed a constant src ptr parameter. */
2167 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2168 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2171 /* The size of the source string including the terminating nul. */
2172 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2174 /* We do not support simplification of this case, though we do
2175 support it when expanding trees into RTL. */
2176 /* FIXME: generate a call to __builtin_memset. */
2177 if (tree_int_cst_lt (ssize
, len
))
2180 /* Diagnose truncation that leaves the copy unterminated. */
2181 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2183 /* OK transform into builtin memcpy. */
2184 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2188 len
= fold_convert_loc (loc
, size_type_node
, len
);
2189 len
= force_gimple_operand_gsi (gsi
, len
, true,
2190 NULL_TREE
, true, GSI_SAME_STMT
);
2191 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2192 replace_call_with_call_and_fold (gsi
, repl
);
2197 /* Fold function call to builtin strchr or strrchr.
2198 If both arguments are constant, evaluate and fold the result,
2199 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2200 In general strlen is significantly faster than strchr
2201 due to being a simpler operation. */
2203 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2205 gimple
*stmt
= gsi_stmt (*gsi
);
2206 tree str
= gimple_call_arg (stmt
, 0);
2207 tree c
= gimple_call_arg (stmt
, 1);
2208 location_t loc
= gimple_location (stmt
);
2212 if (!gimple_call_lhs (stmt
))
2215 /* Avoid folding if the first argument is not a nul-terminated array.
2216 Defer warning until later. */
2217 if (!check_nul_terminated_array (NULL_TREE
, str
))
2220 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2222 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2226 replace_call_with_value (gsi
, integer_zero_node
);
2230 tree len
= build_int_cst (size_type_node
, p1
- p
);
2231 gimple_seq stmts
= NULL
;
2232 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2233 POINTER_PLUS_EXPR
, str
, len
);
2234 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2235 gsi_replace_with_seq_vops (gsi
, stmts
);
2239 if (!integer_zerop (c
))
2242 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2243 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2245 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2249 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2250 replace_call_with_call_and_fold (gsi
, repl
);
2258 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2263 /* Create newstr = strlen (str). */
2264 gimple_seq stmts
= NULL
;
2265 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2266 gimple_set_location (new_stmt
, loc
);
2267 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2268 gimple_call_set_lhs (new_stmt
, len
);
2269 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2271 /* Create (str p+ strlen (str)). */
2272 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2273 POINTER_PLUS_EXPR
, str
, len
);
2274 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2275 gsi_replace_with_seq_vops (gsi
, stmts
);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the strlen.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2
= *gsi
;
2286 /* Fold function call to builtin strstr.
2287 If both arguments are constant, evaluate and fold the result,
2288 additionally fold strstr (x, "") into x and strstr (x, "c")
2289 into strchr (x, 'c'). */
2291 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2293 gimple
*stmt
= gsi_stmt (*gsi
);
2294 if (!gimple_call_lhs (stmt
))
2297 tree haystack
= gimple_call_arg (stmt
, 0);
2298 tree needle
= gimple_call_arg (stmt
, 1);
2300 /* Avoid folding if either argument is not a nul-terminated array.
2301 Defer warning until later. */
2302 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2303 || !check_nul_terminated_array (NULL_TREE
, needle
))
2306 const char *q
= c_getstr (needle
);
2310 if (const char *p
= c_getstr (haystack
))
2312 const char *r
= strstr (p
, q
);
2316 replace_call_with_value (gsi
, integer_zero_node
);
2320 tree len
= build_int_cst (size_type_node
, r
- p
);
2321 gimple_seq stmts
= NULL
;
2323 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2325 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2326 gsi_replace_with_seq_vops (gsi
, stmts
);
2330 /* For strstr (x, "") return x. */
2333 replace_call_with_value (gsi
, haystack
);
2337 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2340 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2343 tree c
= build_int_cst (integer_type_node
, q
[0]);
2344 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2345 replace_call_with_call_and_fold (gsi
, repl
);
2353 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2356 Return NULL_TREE if no simplification was possible, otherwise return the
2357 simplified form of the call as a tree.
2359 The simplified form may be a constant or other expression which
2360 computes the same value, but in a more efficient manner (including
2361 calls to other builtin functions).
2363 The call may contain arguments which need to be evaluated, but
2364 which are not useful to determine the result of the call. In
2365 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2366 COMPOUND_EXPR will be an argument which must be evaluated.
2367 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2368 COMPOUND_EXPR in the chain will contain the tree for the simplified
2369 form of the builtin function call. */
2372 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2374 gimple
*stmt
= gsi_stmt (*gsi
);
2375 location_t loc
= gimple_location (stmt
);
2377 const char *p
= c_getstr (src
);
2379 /* If the string length is zero, return the dst parameter. */
2380 if (p
&& *p
== '\0')
2382 replace_call_with_value (gsi
, dst
);
2386 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2389 /* See if we can store by pieces into (dst + strlen(dst)). */
2391 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2392 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2394 if (!strlen_fn
|| !memcpy_fn
)
2397 /* If the length of the source string isn't computable don't
2398 split strcat into strlen and memcpy. */
2399 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2403 /* Create strlen (dst). */
2404 gimple_seq stmts
= NULL
, stmts2
;
2405 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2406 gimple_set_location (repl
, loc
);
2407 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2408 gimple_call_set_lhs (repl
, newdst
);
2409 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2411 /* Create (dst p+ strlen (dst)). */
2412 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2413 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2414 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2416 len
= fold_convert_loc (loc
, size_type_node
, len
);
2417 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2418 build_int_cst (size_type_node
, 1));
2419 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2420 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2422 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2423 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2424 if (gimple_call_lhs (stmt
))
2426 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2427 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2428 gsi_replace_with_seq_vops (gsi
, stmts
);
2429 /* gsi now points at the assignment to the lhs, get a
2430 stmt iterator to the memcpy call.
2431 ??? We can't use gsi_for_stmt as that doesn't work when the
2432 CFG isn't built yet. */
2433 gimple_stmt_iterator gsi2
= *gsi
;
2439 gsi_replace_with_seq_vops (gsi
, stmts
);
2445 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2446 are the arguments to the call. */
2449 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2451 gimple
*stmt
= gsi_stmt (*gsi
);
2452 tree dest
= gimple_call_arg (stmt
, 0);
2453 tree src
= gimple_call_arg (stmt
, 1);
2454 tree size
= gimple_call_arg (stmt
, 2);
2460 /* If the SRC parameter is "", return DEST. */
2461 if (p
&& *p
== '\0')
2463 replace_call_with_value (gsi
, dest
);
2467 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2470 /* If __builtin_strcat_chk is used, assume strcat is available. */
2471 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2475 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2476 replace_call_with_call_and_fold (gsi
, repl
);
2480 /* Simplify a call to the strncat builtin. */
2483 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2485 gimple
*stmt
= gsi_stmt (*gsi
);
2486 tree dst
= gimple_call_arg (stmt
, 0);
2487 tree src
= gimple_call_arg (stmt
, 1);
2488 tree len
= gimple_call_arg (stmt
, 2);
2489 tree src_len
= c_strlen (src
, 1);
2491 /* If the requested length is zero, or the src parameter string
2492 length is zero, return the dst parameter. */
2493 if (integer_zerop (len
) || (src_len
&& integer_zerop (src_len
)))
2495 replace_call_with_value (gsi
, dst
);
2499 /* Return early if the requested len is less than the string length.
2500 Warnings will be issued elsewhere later. */
2501 if (!src_len
|| known_lower (stmt
, len
, src_len
, true))
2504 /* Warn on constant LEN. */
2505 if (TREE_CODE (len
) == INTEGER_CST
)
2507 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2510 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
)
2511 && TREE_CODE (dstsize
) == INTEGER_CST
)
2513 int cmpdst
= tree_int_cst_compare (len
, dstsize
);
2517 tree fndecl
= gimple_call_fndecl (stmt
);
2519 /* Strncat copies (at most) LEN bytes and always appends
2520 the terminating NUL so the specified bound should never
2521 be equal to (or greater than) the size of the destination.
2522 If it is, the copy could overflow. */
2523 location_t loc
= gimple_location (stmt
);
2524 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2526 ? G_("%qD specified bound %E equals "
2528 : G_("%qD specified bound %E exceeds "
2529 "destination size %E"),
2530 fndecl
, len
, dstsize
);
2532 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2536 if (!nowarn
&& TREE_CODE (src_len
) == INTEGER_CST
2537 && tree_int_cst_compare (src_len
, len
) == 0)
2539 tree fndecl
= gimple_call_fndecl (stmt
);
2540 location_t loc
= gimple_location (stmt
);
2542 /* To avoid possible overflow the specified bound should also
2543 not be equal to the length of the source, even when the size
2544 of the destination is unknown (it's not an uncommon mistake
2545 to specify as the bound to strncpy the length of the source). */
2546 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2547 "%qD specified bound %E equals source length",
2549 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2553 if (!known_lower (stmt
, src_len
, len
))
2556 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2558 /* If the replacement _DECL isn't initialized, don't do the
2563 /* Otherwise, emit a call to strcat. */
2564 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2565 replace_call_with_call_and_fold (gsi
, repl
);
2569 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2573 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2575 gimple
*stmt
= gsi_stmt (*gsi
);
2576 tree dest
= gimple_call_arg (stmt
, 0);
2577 tree src
= gimple_call_arg (stmt
, 1);
2578 tree len
= gimple_call_arg (stmt
, 2);
2579 tree size
= gimple_call_arg (stmt
, 3);
2584 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2585 if ((p
&& *p
== '\0')
2586 || integer_zerop (len
))
2588 replace_call_with_value (gsi
, dest
);
2592 if (! integer_all_onesp (size
))
2594 tree src_len
= c_strlen (src
, 1);
2595 if (known_lower (stmt
, src_len
, len
))
2597 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2598 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2602 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2603 replace_call_with_call_and_fold (gsi
, repl
);
2609 /* If __builtin_strncat_chk is used, assume strncat is available. */
2610 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2614 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2615 replace_call_with_call_and_fold (gsi
, repl
);
2619 /* Build and append gimple statements to STMTS that would load a first
2620 character of a memory location identified by STR. LOC is location
2621 of the statement. */
2624 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2628 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2629 tree cst_uchar_ptr_node
2630 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2631 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2633 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2634 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2635 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2637 gimple_assign_set_lhs (stmt
, var
);
2638 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2643 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2646 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2648 gimple
*stmt
= gsi_stmt (*gsi
);
2649 tree callee
= gimple_call_fndecl (stmt
);
2650 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2652 tree type
= integer_type_node
;
2653 tree str1
= gimple_call_arg (stmt
, 0);
2654 tree str2
= gimple_call_arg (stmt
, 1);
2655 tree lhs
= gimple_call_lhs (stmt
);
2657 tree bound_node
= NULL_TREE
;
2658 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2660 /* Handle strncmp and strncasecmp functions. */
2661 if (gimple_call_num_args (stmt
) == 3)
2663 bound_node
= gimple_call_arg (stmt
, 2);
2664 if (tree_fits_uhwi_p (bound_node
))
2665 bound
= tree_to_uhwi (bound_node
);
2668 /* If the BOUND parameter is zero, return zero. */
2671 replace_call_with_value (gsi
, integer_zero_node
);
2675 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2676 if (operand_equal_p (str1
, str2
, 0))
2678 replace_call_with_value (gsi
, integer_zero_node
);
2682 /* Initially set to the number of characters, including the terminating
2683 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2684 the array Sx is not terminated by a nul.
2685 For nul-terminated strings then adjusted to their length so that
2686 LENx == NULPOSx holds. */
2687 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2688 const char *p1
= getbyterep (str1
, &len1
);
2689 const char *p2
= getbyterep (str2
, &len2
);
2691 /* The position of the terminating nul character if one exists, otherwise
2692 a value greater than LENx. */
2693 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2697 size_t n
= strnlen (p1
, len1
);
2704 size_t n
= strnlen (p2
, len2
);
2709 /* For known strings, return an immediate value. */
2713 bool known_result
= false;
2717 case BUILT_IN_STRCMP
:
2718 case BUILT_IN_STRCMP_EQ
:
2719 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2722 r
= strcmp (p1
, p2
);
2723 known_result
= true;
2726 case BUILT_IN_STRNCMP
:
2727 case BUILT_IN_STRNCMP_EQ
:
2729 if (bound
== HOST_WIDE_INT_M1U
)
2732 /* Reduce the bound to be no more than the length
2733 of the shorter of the two strings, or the sizes
2734 of the unterminated arrays. */
2735 unsigned HOST_WIDE_INT n
= bound
;
2737 if (len1
== nulpos1
&& len1
< n
)
2739 if (len2
== nulpos2
&& len2
< n
)
2742 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2745 r
= strncmp (p1
, p2
, n
);
2746 known_result
= true;
2749 /* Only handleable situation is where the string are equal (result 0),
2750 which is already handled by operand_equal_p case. */
2751 case BUILT_IN_STRCASECMP
:
2753 case BUILT_IN_STRNCASECMP
:
2755 if (bound
== HOST_WIDE_INT_M1U
)
2757 r
= strncmp (p1
, p2
, bound
);
2759 known_result
= true;
2768 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2773 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2774 || fcode
== BUILT_IN_STRCMP
2775 || fcode
== BUILT_IN_STRCMP_EQ
2776 || fcode
== BUILT_IN_STRCASECMP
;
2778 location_t loc
= gimple_location (stmt
);
2780 /* If the second arg is "", return *(const unsigned char*)arg1. */
2781 if (p2
&& *p2
== '\0' && nonzero_bound
)
2783 gimple_seq stmts
= NULL
;
2784 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2787 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2788 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2791 gsi_replace_with_seq_vops (gsi
, stmts
);
2795 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2796 if (p1
&& *p1
== '\0' && nonzero_bound
)
2798 gimple_seq stmts
= NULL
;
2799 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2803 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2804 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2805 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2807 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2808 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2811 gsi_replace_with_seq_vops (gsi
, stmts
);
2815 /* If BOUND is one, return an expression corresponding to
2816 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2817 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2819 gimple_seq stmts
= NULL
;
2820 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2821 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2825 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2826 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2827 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2829 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2830 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2831 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2833 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2834 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2837 gsi_replace_with_seq_vops (gsi
, stmts
);
2841 /* If BOUND is greater than the length of one constant string,
2842 and the other argument is also a nul-terminated string, replace
2843 strncmp with strcmp. */
2844 if (fcode
== BUILT_IN_STRNCMP
2845 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2846 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2847 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2849 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2852 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2853 replace_call_with_call_and_fold (gsi
, repl
);
2860 /* Fold a call to the memchr pointed by GSI iterator. */
2863 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2865 gimple
*stmt
= gsi_stmt (*gsi
);
2866 tree lhs
= gimple_call_lhs (stmt
);
2867 tree arg1
= gimple_call_arg (stmt
, 0);
2868 tree arg2
= gimple_call_arg (stmt
, 1);
2869 tree len
= gimple_call_arg (stmt
, 2);
2871 /* If the LEN parameter is zero, return zero. */
2872 if (integer_zerop (len
))
2874 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2879 if (TREE_CODE (arg2
) != INTEGER_CST
2880 || !tree_fits_uhwi_p (len
)
2881 || !target_char_cst_p (arg2
, &c
))
2884 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2885 unsigned HOST_WIDE_INT string_length
;
2886 const char *p1
= getbyterep (arg1
, &string_length
);
2890 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2893 tree mem_size
, offset_node
;
2894 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2895 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2896 ? 0 : tree_to_uhwi (offset_node
);
2897 /* MEM_SIZE is the size of the array the string literal
2899 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2900 gcc_checking_assert (string_length
<= string_size
);
2901 if (length
<= string_size
)
2903 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2909 unsigned HOST_WIDE_INT offset
= r
- p1
;
2910 gimple_seq stmts
= NULL
;
2911 if (lhs
!= NULL_TREE
)
2913 tree offset_cst
= build_int_cst (sizetype
, offset
);
2914 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2916 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2919 gimple_seq_add_stmt_without_update (&stmts
,
2920 gimple_build_nop ());
2922 gsi_replace_with_seq_vops (gsi
, stmts
);
2930 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2931 to the call. IGNORE is true if the value returned
2932 by the builtin will be ignored. UNLOCKED is true is true if this
2933 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2934 the known length of the string. Return NULL_TREE if no simplification
2938 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2939 tree arg0
, tree arg1
,
2942 gimple
*stmt
= gsi_stmt (*gsi
);
2944 /* If we're using an unlocked function, assume the other unlocked
2945 functions exist explicitly. */
2946 tree
const fn_fputc
= (unlocked
2947 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2948 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2949 tree
const fn_fwrite
= (unlocked
2950 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2951 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2953 /* If the return value is used, don't do the transformation. */
2954 if (gimple_call_lhs (stmt
))
2957 /* Get the length of the string passed to fputs. If the length
2958 can't be determined, punt. */
2959 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2961 || TREE_CODE (len
) != INTEGER_CST
)
2964 switch (compare_tree_int (len
, 1))
2966 case -1: /* length is 0, delete the call entirely . */
2967 replace_call_with_value (gsi
, integer_zero_node
);
2970 case 0: /* length is 1, call fputc. */
2972 const char *p
= c_getstr (arg0
);
2978 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2980 (integer_type_node
, p
[0]), arg1
);
2981 replace_call_with_call_and_fold (gsi
, repl
);
2986 case 1: /* length is greater than 1, call fwrite. */
2988 /* If optimizing for size keep fputs. */
2989 if (optimize_function_for_size_p (cfun
))
2991 /* New argument list transforming fputs(string, stream) to
2992 fwrite(string, 1, len, stream). */
2996 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2997 size_one_node
, len
, arg1
);
2998 replace_call_with_call_and_fold (gsi
, repl
);
3006 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3007 DEST, SRC, LEN, and SIZE are the arguments to the call.
3008 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3009 code of the builtin. If MAXLEN is not NULL, it is maximum length
3010 passed as third argument. */
3013 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
3014 tree dest
, tree src
, tree len
, tree size
,
3015 enum built_in_function fcode
)
3017 gimple
*stmt
= gsi_stmt (*gsi
);
3018 location_t loc
= gimple_location (stmt
);
3019 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3022 /* If SRC and DEST are the same (and not volatile), return DEST
3023 (resp. DEST+LEN for __mempcpy_chk). */
3024 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3026 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3028 replace_call_with_value (gsi
, dest
);
3033 gimple_seq stmts
= NULL
;
3034 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3035 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3036 TREE_TYPE (dest
), dest
, len
);
3037 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3038 replace_call_with_value (gsi
, temp
);
3043 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3044 if (! integer_all_onesp (size
)
3045 && !known_lower (stmt
, len
, size
)
3046 && !known_lower (stmt
, maxlen
, size
))
3048 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3049 least try to optimize (void) __mempcpy_chk () into
3050 (void) __memcpy_chk () */
3051 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3053 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3057 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3058 replace_call_with_call_and_fold (gsi
, repl
);
3065 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3066 mem{cpy,pcpy,move,set} is available. */
3069 case BUILT_IN_MEMCPY_CHK
:
3070 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3072 case BUILT_IN_MEMPCPY_CHK
:
3073 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3075 case BUILT_IN_MEMMOVE_CHK
:
3076 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3078 case BUILT_IN_MEMSET_CHK
:
3079 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3088 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3089 replace_call_with_call_and_fold (gsi
, repl
);
3093 /* Print a message in the dump file recording transformation of FROM to TO. */
3096 dump_transformation (gcall
*from
, gcall
*to
)
3098 if (dump_enabled_p ())
3099 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, from
, "simplified %T to %T\n",
3100 gimple_call_fn (from
), gimple_call_fn (to
));
3103 /* Fold a call to the __st[rp]cpy_chk builtin.
3104 DEST, SRC, and SIZE are the arguments to the call.
3105 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3106 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3107 strings passed as second argument. */
3110 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3112 tree src
, tree size
,
3113 enum built_in_function fcode
)
3115 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3116 location_t loc
= gimple_location (stmt
);
3117 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3120 /* If SRC and DEST are the same (and not volatile), return DEST. */
3121 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3123 /* Issue -Wrestrict unless the pointers are null (those do
3124 not point to objects and so do not indicate an overlap;
3125 such calls could be the result of sanitization and jump
3127 if (!integer_zerop (dest
)
3128 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3130 tree func
= gimple_call_fndecl (stmt
);
3132 warning_at (loc
, OPT_Wrestrict
,
3133 "%qD source argument is the same as destination",
3137 replace_call_with_value (gsi
, dest
);
3141 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3142 if (! integer_all_onesp (size
))
3144 len
= c_strlen (src
, 1);
3145 if (!known_lower (stmt
, len
, size
, true)
3146 && !known_lower (stmt
, maxlen
, size
, true))
3148 if (fcode
== BUILT_IN_STPCPY_CHK
)
3153 /* If return value of __stpcpy_chk is ignored,
3154 optimize into __strcpy_chk. */
3155 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3159 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3160 replace_call_with_call_and_fold (gsi
, repl
);
3164 if (! len
|| TREE_SIDE_EFFECTS (len
))
3167 /* If c_strlen returned something, but not provably less than size,
3168 transform __strcpy_chk into __memcpy_chk. */
3169 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3173 gimple_seq stmts
= NULL
;
3174 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3175 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3176 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3177 build_int_cst (size_type_node
, 1));
3178 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3179 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3180 replace_call_with_call_and_fold (gsi
, repl
);
3185 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3186 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
&& !ignore
3187 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3191 gcall
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3192 dump_transformation (stmt
, repl
);
3193 replace_call_with_call_and_fold (gsi
, repl
);
3197 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3198 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3199 length passed as third argument. IGNORE is true if return value can be
3200 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3203 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3204 tree dest
, tree src
,
3205 tree len
, tree size
,
3206 enum built_in_function fcode
)
3208 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3209 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3212 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3213 if (! integer_all_onesp (size
)
3214 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3216 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3218 /* If return value of __stpncpy_chk is ignored,
3219 optimize into __strncpy_chk. */
3220 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3223 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3224 replace_call_with_call_and_fold (gsi
, repl
);
3231 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3232 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
&& !ignore
3233 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3237 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3238 dump_transformation (stmt
, repl
);
3239 replace_call_with_call_and_fold (gsi
, repl
);
3243 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3244 Return NULL_TREE if no simplification can be made. */
3247 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3249 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3250 location_t loc
= gimple_location (stmt
);
3251 tree dest
= gimple_call_arg (stmt
, 0);
3252 tree src
= gimple_call_arg (stmt
, 1);
3255 /* If the result is unused, replace stpcpy with strcpy. */
3256 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3258 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3261 gimple_call_set_fndecl (stmt
, fn
);
3266 /* Set to non-null if ARG refers to an unterminated array. */
3267 c_strlen_data data
= { };
3268 /* The size of the unterminated array if SRC referes to one. */
3270 /* True if the size is exact/constant, false if it's the lower bound
3273 tree len
= c_strlen (src
, 1, &data
, 1);
3275 || TREE_CODE (len
) != INTEGER_CST
)
3277 data
.decl
= unterminated_array (src
, &size
, &exact
);
3284 /* Avoid folding calls with unterminated arrays. */
3285 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3286 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3288 suppress_warning (stmt
, OPT_Wstringop_overread
);
3292 if (optimize_function_for_size_p (cfun
)
3293 /* If length is zero it's small enough. */
3294 && !integer_zerop (len
))
3297 /* If the source has a known length replace stpcpy with memcpy. */
3298 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3302 gimple_seq stmts
= NULL
;
3303 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3304 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3305 tem
, build_int_cst (size_type_node
, 1));
3306 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3307 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3308 gimple_move_vops (repl
, stmt
);
3309 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3310 /* Replace the result with dest + len. */
3312 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3313 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3314 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3315 POINTER_PLUS_EXPR
, dest
, tem
);
3316 gsi_replace (gsi
, ret
, false);
3317 /* Finally fold the memcpy call. */
3318 gimple_stmt_iterator gsi2
= *gsi
;
3324 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3325 NULL_TREE if a normal call should be emitted rather than expanding
3326 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3327 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3328 passed as second argument. */
3331 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3332 enum built_in_function fcode
)
3334 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3335 tree dest
, size
, len
, fn
, fmt
, flag
;
3336 const char *fmt_str
;
3338 /* Verify the required arguments in the original call. */
3339 if (gimple_call_num_args (stmt
) < 5)
3342 dest
= gimple_call_arg (stmt
, 0);
3343 len
= gimple_call_arg (stmt
, 1);
3344 flag
= gimple_call_arg (stmt
, 2);
3345 size
= gimple_call_arg (stmt
, 3);
3346 fmt
= gimple_call_arg (stmt
, 4);
3348 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3349 if (! integer_all_onesp (size
)
3350 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3353 if (!init_target_chars ())
3356 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3357 or if format doesn't contain % chars or is "%s". */
3358 if (! integer_zerop (flag
))
3360 fmt_str
= c_getstr (fmt
);
3361 if (fmt_str
== NULL
)
3363 if (strchr (fmt_str
, target_percent
) != NULL
3364 && strcmp (fmt_str
, target_percent_s
))
3368 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3370 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3371 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3375 /* Replace the called function and the first 5 argument by 3 retaining
3376 trailing varargs. */
3377 gimple_call_set_fndecl (stmt
, fn
);
3378 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3379 gimple_call_set_arg (stmt
, 0, dest
);
3380 gimple_call_set_arg (stmt
, 1, len
);
3381 gimple_call_set_arg (stmt
, 2, fmt
);
3382 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3383 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3384 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3389 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3390 Return NULL_TREE if a normal call should be emitted rather than
3391 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3392 or BUILT_IN_VSPRINTF_CHK. */
3395 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3396 enum built_in_function fcode
)
3398 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3399 tree dest
, size
, len
, fn
, fmt
, flag
;
3400 const char *fmt_str
;
3401 unsigned nargs
= gimple_call_num_args (stmt
);
3403 /* Verify the required arguments in the original call. */
3406 dest
= gimple_call_arg (stmt
, 0);
3407 flag
= gimple_call_arg (stmt
, 1);
3408 size
= gimple_call_arg (stmt
, 2);
3409 fmt
= gimple_call_arg (stmt
, 3);
3413 if (!init_target_chars ())
3416 /* Check whether the format is a literal string constant. */
3417 fmt_str
= c_getstr (fmt
);
3418 if (fmt_str
!= NULL
)
3420 /* If the format doesn't contain % args or %%, we know the size. */
3421 if (strchr (fmt_str
, target_percent
) == 0)
3423 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3424 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3426 /* If the format is "%s" and first ... argument is a string literal,
3427 we know the size too. */
3428 else if (fcode
== BUILT_IN_SPRINTF_CHK
3429 && strcmp (fmt_str
, target_percent_s
) == 0)
3435 arg
= gimple_call_arg (stmt
, 4);
3436 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3437 len
= c_strlen (arg
, 1);
3442 if (! integer_all_onesp (size
) && !known_lower (stmt
, len
, size
, true))
3445 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3446 or if format doesn't contain % chars or is "%s". */
3447 if (! integer_zerop (flag
))
3449 if (fmt_str
== NULL
)
3451 if (strchr (fmt_str
, target_percent
) != NULL
3452 && strcmp (fmt_str
, target_percent_s
))
3456 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3457 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3458 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3462 /* Replace the called function and the first 4 argument by 2 retaining
3463 trailing varargs. */
3464 gimple_call_set_fndecl (stmt
, fn
);
3465 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3466 gimple_call_set_arg (stmt
, 0, dest
);
3467 gimple_call_set_arg (stmt
, 1, fmt
);
3468 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3469 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3470 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3475 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3476 ORIG may be null if this is a 2-argument call. We don't attempt to
3477 simplify calls with more than 3 arguments.
3479 Return true if simplification was possible, otherwise false. */
3482 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3484 gimple
*stmt
= gsi_stmt (*gsi
);
3486 /* Verify the required arguments in the original call. We deal with two
3487 types of sprintf() calls: 'sprintf (str, fmt)' and
3488 'sprintf (dest, "%s", orig)'. */
3489 if (gimple_call_num_args (stmt
) > 3)
3492 tree orig
= NULL_TREE
;
3493 if (gimple_call_num_args (stmt
) == 3)
3494 orig
= gimple_call_arg (stmt
, 2);
3496 /* Check whether the format is a literal string constant. */
3497 tree fmt
= gimple_call_arg (stmt
, 1);
3498 const char *fmt_str
= c_getstr (fmt
);
3499 if (fmt_str
== NULL
)
3502 tree dest
= gimple_call_arg (stmt
, 0);
3504 if (!init_target_chars ())
3507 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3511 /* If the format doesn't contain % args or %%, use strcpy. */
3512 if (strchr (fmt_str
, target_percent
) == NULL
)
3514 /* Don't optimize sprintf (buf, "abc", ptr++). */
3518 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3519 'format' is known to contain no % formats. */
3520 gimple_seq stmts
= NULL
;
3521 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3523 /* Propagate the NO_WARNING bit to avoid issuing the same
3524 warning more than once. */
3525 copy_warning (repl
, stmt
);
3527 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3528 if (tree lhs
= gimple_call_lhs (stmt
))
3530 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3532 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3533 gsi_replace_with_seq_vops (gsi
, stmts
);
3534 /* gsi now points at the assignment to the lhs, get a
3535 stmt iterator to the memcpy call.
3536 ??? We can't use gsi_for_stmt as that doesn't work when the
3537 CFG isn't built yet. */
3538 gimple_stmt_iterator gsi2
= *gsi
;
3544 gsi_replace_with_seq_vops (gsi
, stmts
);
3550 /* If the format is "%s", use strcpy if the result isn't used. */
3551 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3553 /* Don't crash on sprintf (str1, "%s"). */
3557 /* Don't fold calls with source arguments of invalid (nonpointer)
3559 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3562 tree orig_len
= NULL_TREE
;
3563 if (gimple_call_lhs (stmt
))
3565 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3570 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3571 gimple_seq stmts
= NULL
;
3572 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3574 /* Propagate the NO_WARNING bit to avoid issuing the same
3575 warning more than once. */
3576 copy_warning (repl
, stmt
);
3578 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3579 if (tree lhs
= gimple_call_lhs (stmt
))
3581 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3582 TREE_TYPE (orig_len
)))
3583 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3584 repl
= gimple_build_assign (lhs
, orig_len
);
3585 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3586 gsi_replace_with_seq_vops (gsi
, stmts
);
3587 /* gsi now points at the assignment to the lhs, get a
3588 stmt iterator to the memcpy call.
3589 ??? We can't use gsi_for_stmt as that doesn't work when the
3590 CFG isn't built yet. */
3591 gimple_stmt_iterator gsi2
= *gsi
;
3597 gsi_replace_with_seq_vops (gsi
, stmts
);
3605 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3606 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3607 attempt to simplify calls with more than 4 arguments.
3609 Return true if simplification was possible, otherwise false. */
3612 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3614 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3615 tree dest
= gimple_call_arg (stmt
, 0);
3616 tree destsize
= gimple_call_arg (stmt
, 1);
3617 tree fmt
= gimple_call_arg (stmt
, 2);
3618 tree orig
= NULL_TREE
;
3619 const char *fmt_str
= NULL
;
3621 if (gimple_call_num_args (stmt
) > 4)
3624 if (gimple_call_num_args (stmt
) == 4)
3625 orig
= gimple_call_arg (stmt
, 3);
3627 /* Check whether the format is a literal string constant. */
3628 fmt_str
= c_getstr (fmt
);
3629 if (fmt_str
== NULL
)
3632 if (!init_target_chars ())
3635 /* If the format doesn't contain % args or %%, use strcpy. */
3636 if (strchr (fmt_str
, target_percent
) == NULL
)
3638 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3642 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3646 tree len
= build_int_cstu (TREE_TYPE (destsize
), strlen (fmt_str
));
3648 /* We could expand this as
3649 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3651 memcpy (str, fmt_with_nul_at_cstm1, cst);
3652 but in the former case that might increase code size
3653 and in the latter case grow .rodata section too much.
3655 if (!known_lower (stmt
, len
, destsize
, true))
3658 gimple_seq stmts
= NULL
;
3659 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3660 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3661 if (tree lhs
= gimple_call_lhs (stmt
))
3663 repl
= gimple_build_assign (lhs
,
3664 fold_convert (TREE_TYPE (lhs
), len
));
3665 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3666 gsi_replace_with_seq_vops (gsi
, stmts
);
3667 /* gsi now points at the assignment to the lhs, get a
3668 stmt iterator to the memcpy call.
3669 ??? We can't use gsi_for_stmt as that doesn't work when the
3670 CFG isn't built yet. */
3671 gimple_stmt_iterator gsi2
= *gsi
;
3677 gsi_replace_with_seq_vops (gsi
, stmts
);
3683 /* If the format is "%s", use strcpy if the result isn't used. */
3684 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3686 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3690 /* Don't crash on snprintf (str1, cst, "%s"). */
3694 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3696 /* We could expand this as
3697 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3699 memcpy (str1, str2_with_nul_at_cstm1, cst);
3700 but in the former case that might increase code size
3701 and in the latter case grow .rodata section too much.
3703 if (!known_lower (stmt
, orig_len
, destsize
, true))
3706 /* Convert snprintf (str1, cst, "%s", str2) into
3707 strcpy (str1, str2) if strlen (str2) < cst. */
3708 gimple_seq stmts
= NULL
;
3709 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3710 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3711 if (tree lhs
= gimple_call_lhs (stmt
))
3713 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3714 TREE_TYPE (orig_len
)))
3715 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3716 repl
= gimple_build_assign (lhs
, orig_len
);
3717 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3718 gsi_replace_with_seq_vops (gsi
, stmts
);
3719 /* gsi now points at the assignment to the lhs, get a
3720 stmt iterator to the memcpy call.
3721 ??? We can't use gsi_for_stmt as that doesn't work when the
3722 CFG isn't built yet. */
3723 gimple_stmt_iterator gsi2
= *gsi
;
3729 gsi_replace_with_seq_vops (gsi
, stmts
);
3737 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3738 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3739 more than 3 arguments, and ARG may be null in the 2-argument case.
3741 Return NULL_TREE if no simplification was possible, otherwise return the
3742 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3743 code of the function to be simplified. */
3746 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3747 tree fp
, tree fmt
, tree arg
,
3748 enum built_in_function fcode
)
3750 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3751 tree fn_fputc
, fn_fputs
;
3752 const char *fmt_str
= NULL
;
3754 /* If the return value is used, don't do the transformation. */
3755 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3758 /* Check whether the format is a literal string constant. */
3759 fmt_str
= c_getstr (fmt
);
3760 if (fmt_str
== NULL
)
3763 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3765 /* If we're using an unlocked function, assume the other
3766 unlocked functions exist explicitly. */
3767 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3768 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3772 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3773 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3776 if (!init_target_chars ())
3779 /* If the format doesn't contain % args or %%, use strcpy. */
3780 if (strchr (fmt_str
, target_percent
) == NULL
)
3782 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3786 /* If the format specifier was "", fprintf does nothing. */
3787 if (fmt_str
[0] == '\0')
3789 replace_call_with_value (gsi
, NULL_TREE
);
3793 /* When "string" doesn't contain %, replace all cases of
3794 fprintf (fp, string) with fputs (string, fp). The fputs
3795 builtin will take care of special cases like length == 1. */
3798 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3799 replace_call_with_call_and_fold (gsi
, repl
);
3804 /* The other optimizations can be done only on the non-va_list variants. */
3805 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3808 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3809 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3811 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3815 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3816 replace_call_with_call_and_fold (gsi
, repl
);
3821 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3822 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3825 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3829 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3830 replace_call_with_call_and_fold (gsi
, repl
);
3838 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3839 FMT and ARG are the arguments to the call; we don't fold cases with
3840 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3842 Return NULL_TREE if no simplification was possible, otherwise return the
3843 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3844 code of the function to be simplified. */
3847 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3848 tree arg
, enum built_in_function fcode
)
3850 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3851 tree fn_putchar
, fn_puts
, newarg
;
3852 const char *fmt_str
= NULL
;
3854 /* If the return value is used, don't do the transformation. */
3855 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3858 /* Check whether the format is a literal string constant. */
3859 fmt_str
= c_getstr (fmt
);
3860 if (fmt_str
== NULL
)
3863 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3865 /* If we're using an unlocked function, assume the other
3866 unlocked functions exist explicitly. */
3867 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3868 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3872 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3873 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3876 if (!init_target_chars ())
3879 if (strcmp (fmt_str
, target_percent_s
) == 0
3880 || strchr (fmt_str
, target_percent
) == NULL
)
3884 if (strcmp (fmt_str
, target_percent_s
) == 0)
3886 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3889 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3892 str
= c_getstr (arg
);
3898 /* The format specifier doesn't contain any '%' characters. */
3899 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3905 /* If the string was "", printf does nothing. */
3908 replace_call_with_value (gsi
, NULL_TREE
);
3912 /* If the string has length of 1, call putchar. */
3915 /* Given printf("c"), (where c is any one character,)
3916 convert "c"[0] to an int and pass that to the replacement
3918 newarg
= build_int_cst (integer_type_node
, str
[0]);
3921 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3922 replace_call_with_call_and_fold (gsi
, repl
);
3928 /* If the string was "string\n", call puts("string"). */
3929 size_t len
= strlen (str
);
3930 if ((unsigned char)str
[len
- 1] == target_newline
3931 && (size_t) (int) len
== len
3936 /* Create a NUL-terminated string that's one char shorter
3937 than the original, stripping off the trailing '\n'. */
3938 newstr
= xstrdup (str
);
3939 newstr
[len
- 1] = '\0';
3940 newarg
= build_string_literal (len
, newstr
);
3944 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3945 replace_call_with_call_and_fold (gsi
, repl
);
3950 /* We'd like to arrange to call fputs(string,stdout) here,
3951 but we need stdout and don't have a way to get it yet. */
3956 /* The other optimizations can be done only on the non-va_list variants. */
3957 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3960 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3961 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3963 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3967 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3968 replace_call_with_call_and_fold (gsi
, repl
);
3973 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3974 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3976 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3981 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3982 replace_call_with_call_and_fold (gsi
, repl
);
3992 /* Fold a call to __builtin_strlen with known length LEN. */
3995 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3997 gimple
*stmt
= gsi_stmt (*gsi
);
3998 tree arg
= gimple_call_arg (stmt
, 0);
4003 c_strlen_data lendata
= { };
4004 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4006 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4007 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4009 /* The range of lengths refers to either a single constant
4010 string or to the longest and shortest constant string
4011 referenced by the argument of the strlen() call, or to
4012 the strings that can possibly be stored in the arrays
4013 the argument refers to. */
4014 minlen
= wi::to_wide (lendata
.minlen
);
4015 maxlen
= wi::to_wide (lendata
.maxlen
);
4019 unsigned prec
= TYPE_PRECISION (sizetype
);
4021 minlen
= wi::shwi (0, prec
);
4022 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4025 if (minlen
== maxlen
)
4027 /* Fold the strlen call to a constant. */
4028 tree type
= TREE_TYPE (lendata
.minlen
);
4029 tree len
= force_gimple_operand_gsi (gsi
,
4030 wide_int_to_tree (type
, minlen
),
4031 true, NULL
, true, GSI_SAME_STMT
);
4032 replace_call_with_value (gsi
, len
);
4036 /* Set the strlen() range to [0, MAXLEN]. */
4037 if (tree lhs
= gimple_call_lhs (stmt
))
4038 set_strlen_range (lhs
, minlen
, maxlen
);
4043 /* Fold a call to __builtin_acc_on_device. */
4046 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4048 /* Defer folding until we know which compiler we're in. */
4049 if (symtab
->state
!= EXPANSION
)
4052 unsigned val_host
= GOMP_DEVICE_HOST
;
4053 unsigned val_dev
= GOMP_DEVICE_NONE
;
4055 #ifdef ACCEL_COMPILER
4056 val_host
= GOMP_DEVICE_NOT_HOST
;
4057 val_dev
= ACCEL_COMPILER_acc_device
;
4060 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4062 tree host_eq
= make_ssa_name (boolean_type_node
);
4063 gimple
*host_ass
= gimple_build_assign
4064 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4065 gimple_set_location (host_ass
, loc
);
4066 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4068 tree dev_eq
= make_ssa_name (boolean_type_node
);
4069 gimple
*dev_ass
= gimple_build_assign
4070 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4071 gimple_set_location (dev_ass
, loc
);
4072 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4074 tree result
= make_ssa_name (boolean_type_node
);
4075 gimple
*result_ass
= gimple_build_assign
4076 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4077 gimple_set_location (result_ass
, loc
);
4078 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4080 replace_call_with_value (gsi
, result
);
4085 /* Fold realloc (0, n) -> malloc (n). */
4088 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4090 gimple
*stmt
= gsi_stmt (*gsi
);
4091 tree arg
= gimple_call_arg (stmt
, 0);
4092 tree size
= gimple_call_arg (stmt
, 1);
4094 if (operand_equal_p (arg
, null_pointer_node
, 0))
4096 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4099 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4100 replace_call_with_call_and_fold (gsi
, repl
);
4107 /* Number of bytes into which any type but aggregate or vector types
4109 static constexpr size_t clear_padding_unit
4110 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4111 /* Buffer size on which __builtin_clear_padding folding code works. */
4112 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4114 /* Data passed through __builtin_clear_padding folding. */
4115 struct clear_padding_struct
{
4117 /* 0 during __builtin_clear_padding folding, nonzero during
4118 clear_type_padding_in_mask. In that case, instead of clearing the
4119 non-padding bits in union_ptr array clear the padding bits in there. */
4123 gimple_stmt_iterator
*gsi
;
4124 /* Alignment of buf->base + 0. */
4126 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4128 /* Number of padding bytes before buf->off that don't have padding clear
4129 code emitted yet. */
4130 HOST_WIDE_INT padding_bytes
;
4131 /* The size of the whole object. Never emit code to touch
4132 buf->base + buf->sz or following bytes. */
4134 /* Number of bytes recorded in buf->buf. */
4136 /* When inside union, instead of emitting code we and bits inside of
4137 the union_ptr array. */
4138 unsigned char *union_ptr
;
4139 /* Set bits mean padding bits that need to be cleared by the builtin. */
4140 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4143 /* Emit code to clear padding requested in BUF->buf - set bits
4144 in there stand for padding that should be cleared. FULL is true
4145 if everything from the buffer should be flushed, otherwise
4146 it can leave up to 2 * clear_padding_unit bytes for further
4150 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4152 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4153 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4155 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4156 size_t end
= buf
->size
;
4158 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4159 * clear_padding_unit
);
4160 size_t padding_bytes
= buf
->padding_bytes
;
4163 if (buf
->clear_in_mask
)
4165 /* During clear_type_padding_in_mask, clear the padding
4166 bits set in buf->buf in the buf->union_ptr mask. */
4167 for (size_t i
= 0; i
< end
; i
++)
4169 if (buf
->buf
[i
] == (unsigned char) ~0)
4173 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4176 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4181 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4185 buf
->padding_bytes
= 0;
4189 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4192 buf
->padding_bytes
= padding_bytes
;
4196 /* Inside of a union, instead of emitting any code, instead
4197 clear all bits in the union_ptr buffer that are clear
4198 in buf. Whole padding bytes don't clear anything. */
4199 for (size_t i
= 0; i
< end
; i
++)
4201 if (buf
->buf
[i
] == (unsigned char) ~0)
4206 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4213 buf
->padding_bytes
= 0;
4217 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4220 buf
->padding_bytes
= padding_bytes
;
4224 size_t wordsize
= UNITS_PER_WORD
;
4225 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4227 size_t nonzero_first
= wordsize
;
4228 size_t nonzero_last
= 0;
4229 size_t zero_first
= wordsize
;
4230 size_t zero_last
= 0;
4231 bool all_ones
= true, bytes_only
= true;
4232 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4233 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4235 gcc_assert (wordsize
> 1);
4240 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4244 if (nonzero_first
== wordsize
)
4246 nonzero_first
= j
- i
;
4247 nonzero_last
= j
- i
;
4249 if (nonzero_last
!= j
- i
)
4251 nonzero_last
= j
+ 1 - i
;
4255 if (zero_first
== wordsize
)
4257 zero_last
= j
+ 1 - i
;
4259 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4265 size_t padding_end
= i
;
4268 if (nonzero_first
== 0
4269 && nonzero_last
== wordsize
4272 /* All bits are padding and we had some padding
4273 before too. Just extend it. */
4274 padding_bytes
+= wordsize
;
4277 if (all_ones
&& nonzero_first
== 0)
4279 padding_bytes
+= nonzero_last
;
4280 padding_end
+= nonzero_last
;
4281 nonzero_first
= wordsize
;
4284 else if (bytes_only
&& nonzero_first
== 0)
4286 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4287 padding_bytes
+= zero_first
;
4288 padding_end
+= zero_first
;
4291 if (padding_bytes
== 1)
4293 atype
= char_type_node
;
4294 src
= build_zero_cst (char_type_node
);
4298 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4299 src
= build_constructor (atype
, NULL
);
4301 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4302 build_int_cst (buf
->alias_type
,
4303 buf
->off
+ padding_end
4305 gimple
*g
= gimple_build_assign (dst
, src
);
4306 gimple_set_location (g
, buf
->loc
);
4307 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4309 buf
->padding_bytes
= 0;
4311 if (nonzero_first
== wordsize
)
4312 /* All bits in a word are 0, there are no padding bits. */
4314 if (all_ones
&& nonzero_last
== wordsize
)
4316 /* All bits between nonzero_first and end of word are padding
4317 bits, start counting padding_bytes. */
4318 padding_bytes
= nonzero_last
- nonzero_first
;
4323 /* If bitfields aren't involved in this word, prefer storing
4324 individual bytes or groups of them over performing a RMW
4325 operation on the whole word. */
4326 gcc_assert (i
+ zero_last
<= end
);
4327 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4332 for (k
= j
; k
< i
+ zero_last
; k
++)
4333 if (buf
->buf
[k
] == 0)
4335 HOST_WIDE_INT off
= buf
->off
+ j
;
4339 atype
= char_type_node
;
4340 src
= build_zero_cst (char_type_node
);
4344 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4345 src
= build_constructor (atype
, NULL
);
4347 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4349 build_int_cst (buf
->alias_type
, off
));
4350 gimple
*g
= gimple_build_assign (dst
, src
);
4351 gimple_set_location (g
, buf
->loc
);
4352 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4356 if (nonzero_last
== wordsize
)
4357 padding_bytes
= nonzero_last
- zero_last
;
4360 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4362 if (nonzero_last
- nonzero_first
<= eltsz
4363 && ((nonzero_first
& ~(eltsz
- 1))
4364 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4368 type
= char_type_node
;
4370 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4372 size_t start
= nonzero_first
& ~(eltsz
- 1);
4373 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4375 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4376 atype
= build_aligned_type (type
, buf
->align
);
4377 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4378 build_int_cst (buf
->alias_type
, off
));
4382 && nonzero_first
== start
4383 && nonzero_last
== start
+ eltsz
)
4384 src
= build_zero_cst (type
);
4387 src
= make_ssa_name (type
);
4388 tree tmp_dst
= unshare_expr (dst
);
4389 /* The folding introduces a read from the tmp_dst, we should
4390 prevent uninitialized warning analysis from issuing warning
4391 for such fake read. In order to suppress warning only for
4392 this expr, we should set the location of tmp_dst to
4393 UNKNOWN_LOCATION first, then suppress_warning will call
4394 set_no_warning_bit to set the no_warning flag only for
4396 SET_EXPR_LOCATION (tmp_dst
, UNKNOWN_LOCATION
);
4397 suppress_warning (tmp_dst
, OPT_Wuninitialized
);
4398 g
= gimple_build_assign (src
, tmp_dst
);
4399 gimple_set_location (g
, buf
->loc
);
4400 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4401 tree mask
= native_interpret_expr (type
,
4402 buf
->buf
+ i
+ start
,
4404 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4405 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4406 tree src_masked
= make_ssa_name (type
);
4407 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4409 gimple_set_location (g
, buf
->loc
);
4410 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4413 g
= gimple_build_assign (dst
, src
);
4414 gimple_set_location (g
, buf
->loc
);
4415 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4425 if (padding_bytes
== 1)
4427 atype
= char_type_node
;
4428 src
= build_zero_cst (char_type_node
);
4432 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4433 src
= build_constructor (atype
, NULL
);
4435 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4436 build_int_cst (buf
->alias_type
,
4439 gimple
*g
= gimple_build_assign (dst
, src
);
4440 gimple_set_location (g
, buf
->loc
);
4441 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4443 size_t end_rem
= end
% UNITS_PER_WORD
;
4444 buf
->off
+= end
- end_rem
;
4445 buf
->size
= end_rem
;
4446 memset (buf
->buf
, 0, buf
->size
);
4447 buf
->padding_bytes
= 0;
4451 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4454 buf
->padding_bytes
= padding_bytes
;
4458 /* Append PADDING_BYTES padding bytes. */
4461 clear_padding_add_padding (clear_padding_struct
*buf
,
4462 HOST_WIDE_INT padding_bytes
)
4464 if (padding_bytes
== 0)
4466 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4467 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4468 clear_padding_flush (buf
, false);
4469 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4470 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4472 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4473 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4474 buf
->size
= clear_padding_buf_size
;
4475 clear_padding_flush (buf
, false);
4476 gcc_assert (buf
->padding_bytes
);
4477 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4478 is guaranteed to be all ones. */
4479 padding_bytes
+= buf
->size
;
4480 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4481 memset (buf
->buf
, ~0, buf
->size
);
4482 buf
->off
+= padding_bytes
- buf
->size
;
4483 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4487 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4488 buf
->size
+= padding_bytes
;
4492 static void clear_padding_type (clear_padding_struct
*, tree
,
4493 HOST_WIDE_INT
, bool);
4495 /* Clear padding bits of union type TYPE. */
4498 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4499 HOST_WIDE_INT sz
, bool for_auto_init
)
4501 clear_padding_struct
*union_buf
;
4502 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4503 size_t start_size
= 0;
4506 start_off
= buf
->off
+ buf
->size
;
4507 next_off
= start_off
+ sz
;
4508 start_size
= start_off
% UNITS_PER_WORD
;
4509 start_off
-= start_size
;
4510 clear_padding_flush (buf
, true);
4515 if (sz
+ buf
->size
> clear_padding_buf_size
)
4516 clear_padding_flush (buf
, false);
4517 union_buf
= XALLOCA (clear_padding_struct
);
4518 union_buf
->loc
= buf
->loc
;
4519 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4520 union_buf
->base
= NULL_TREE
;
4521 union_buf
->alias_type
= NULL_TREE
;
4522 union_buf
->gsi
= NULL
;
4523 union_buf
->align
= 0;
4525 union_buf
->padding_bytes
= 0;
4527 union_buf
->size
= 0;
4528 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4529 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4531 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4532 memset (union_buf
->union_ptr
, ~0, sz
);
4535 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4536 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4538 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4540 if (TREE_TYPE (field
) == error_mark_node
)
4542 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4543 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4544 if (!buf
->clear_in_mask
&& !for_auto_init
)
4545 error_at (buf
->loc
, "flexible array member %qD does not have "
4546 "well defined padding bits for %qs",
4547 field
, "__builtin_clear_padding");
4550 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4551 gcc_assert (union_buf
->size
== 0);
4552 union_buf
->off
= start_off
;
4553 union_buf
->size
= start_size
;
4554 memset (union_buf
->buf
, ~0, start_size
);
4555 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4556 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4557 clear_padding_flush (union_buf
, true);
4560 if (buf
== union_buf
)
4562 buf
->off
= next_off
;
4563 buf
->size
= next_off
% UNITS_PER_WORD
;
4564 buf
->off
-= buf
->size
;
4565 memset (buf
->buf
, ~0, buf
->size
);
4567 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4571 unsigned char *union_ptr
= union_buf
->union_ptr
;
4574 clear_padding_flush (buf
, false);
4575 HOST_WIDE_INT this_sz
4576 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4577 clear_padding_buf_size
- buf
->size
);
4578 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4579 buf
->size
+= this_sz
;
4580 union_ptr
+= this_sz
;
4583 XDELETE (union_buf
->union_ptr
);
4587 /* The only known floating point formats with padding bits are the
4588 IEEE extended ones. */
4591 clear_padding_real_needs_padding_p (tree type
)
4593 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4595 && fmt
->signbit_ro
== fmt
->signbit_rw
4596 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4599 /* Return true if TYPE might contain any padding bits. */
4602 clear_padding_type_may_have_padding_p (tree type
)
4604 switch (TREE_CODE (type
))
4612 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4614 return clear_padding_real_needs_padding_p (type
);
4620 /* Emit a runtime loop:
4621 for (; buf.base != end; buf.base += sz)
4622 __builtin_clear_padding (buf.base); */
4625 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4626 tree end
, bool for_auto_init
)
4628 tree l1
= create_artificial_label (buf
->loc
);
4629 tree l2
= create_artificial_label (buf
->loc
);
4630 tree l3
= create_artificial_label (buf
->loc
);
4631 gimple
*g
= gimple_build_goto (l2
);
4632 gimple_set_location (g
, buf
->loc
);
4633 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4634 g
= gimple_build_label (l1
);
4635 gimple_set_location (g
, buf
->loc
);
4636 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4637 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4638 clear_padding_flush (buf
, true);
4639 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4640 size_int (buf
->sz
));
4641 gimple_set_location (g
, buf
->loc
);
4642 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4643 g
= gimple_build_label (l2
);
4644 gimple_set_location (g
, buf
->loc
);
4645 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4646 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4647 gimple_set_location (g
, buf
->loc
);
4648 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4649 g
= gimple_build_label (l3
);
4650 gimple_set_location (g
, buf
->loc
);
4651 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4654 /* Clear padding bits for TYPE. Called recursively from
4655 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4656 the __builtin_clear_padding is not called by the end user,
4657 instead, it's inserted by the compiler to initialize the
4658 paddings of automatic variable. Therefore, we should not
4659 emit the error messages for flexible array members to confuse
4663 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4664 HOST_WIDE_INT sz
, bool for_auto_init
)
4666 switch (TREE_CODE (type
))
4669 HOST_WIDE_INT cur_pos
;
4671 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4672 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4674 tree ftype
= TREE_TYPE (field
);
4675 if (DECL_BIT_FIELD (field
))
4677 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4680 HOST_WIDE_INT pos
= int_byte_position (field
);
4684 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4685 bpos
%= BITS_PER_UNIT
;
4687 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4688 if (pos
+ end
> cur_pos
)
4690 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4691 cur_pos
= pos
+ end
;
4693 gcc_assert (cur_pos
> pos
4694 && ((unsigned HOST_WIDE_INT
) buf
->size
4695 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4696 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4697 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4698 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4699 " in %qs", "__builtin_clear_padding");
4700 else if (BYTES_BIG_ENDIAN
)
4703 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4704 *p
&= ~(((1 << fldsz
) - 1)
4705 << (BITS_PER_UNIT
- bpos
- fldsz
));
4710 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4712 fldsz
-= BITS_PER_UNIT
- bpos
;
4714 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4715 p
+= fldsz
/ BITS_PER_UNIT
;
4716 fldsz
%= BITS_PER_UNIT
;
4718 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4723 /* Little endian. */
4724 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4725 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4730 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4732 fldsz
-= BITS_PER_UNIT
- bpos
;
4734 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4735 p
+= fldsz
/ BITS_PER_UNIT
;
4736 fldsz
%= BITS_PER_UNIT
;
4738 *p
&= ~((1 << fldsz
) - 1);
4742 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4744 if (ftype
== error_mark_node
)
4746 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4747 && !COMPLETE_TYPE_P (ftype
));
4748 if (!buf
->clear_in_mask
&& !for_auto_init
)
4749 error_at (buf
->loc
, "flexible array member %qD does not "
4750 "have well defined padding bits for %qs",
4751 field
, "__builtin_clear_padding");
4753 else if (is_empty_type (ftype
))
4757 HOST_WIDE_INT pos
= int_byte_position (field
);
4760 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4761 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4762 clear_padding_add_padding (buf
, pos
- cur_pos
);
4764 if (tree asbase
= lang_hooks
.types
.classtype_as_base (field
))
4766 clear_padding_type (buf
, ftype
, fldsz
, for_auto_init
);
4770 gcc_assert (sz
>= cur_pos
);
4771 clear_padding_add_padding (buf
, sz
- cur_pos
);
4774 HOST_WIDE_INT nelts
, fldsz
;
4775 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4780 && sz
> 8 * UNITS_PER_WORD
4781 && buf
->union_ptr
== NULL
4782 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4784 /* For sufficiently large array of more than one elements,
4785 emit a runtime loop to keep code size manageable. */
4786 tree base
= buf
->base
;
4787 unsigned int prev_align
= buf
->align
;
4788 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4789 HOST_WIDE_INT prev_sz
= buf
->sz
;
4790 clear_padding_flush (buf
, true);
4791 tree elttype
= TREE_TYPE (type
);
4792 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4793 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4794 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4795 base
, size_int (off
));
4796 gimple_set_location (g
, buf
->loc
);
4797 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4798 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4800 gimple_set_location (g
, buf
->loc
);
4801 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4803 buf
->align
= TYPE_ALIGN (elttype
);
4806 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4809 buf
->align
= prev_align
;
4810 buf
->size
= off
% UNITS_PER_WORD
;
4811 buf
->off
= off
- buf
->size
;
4812 memset (buf
->buf
, 0, buf
->size
);
4815 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4816 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4819 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4822 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4823 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4824 clear_padding_flush (buf
, false);
4825 if (clear_padding_real_needs_padding_p (type
))
4827 /* Use native_interpret_real + native_encode_expr to figure out
4828 which bits are padding. */
4829 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4830 tree cst
= native_interpret_real (type
, buf
->buf
+ buf
->size
, sz
);
4831 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4832 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4833 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4834 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4835 buf
->buf
[buf
->size
+ i
] ^= ~0;
4838 memset (buf
->buf
+ buf
->size
, 0, sz
);
4842 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4843 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4844 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4847 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4848 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4849 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4850 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4853 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4854 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4855 clear_padding_flush (buf
, false);
4856 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4860 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4861 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4862 clear_padding_flush (buf
, false);
4863 memset (buf
->buf
+ buf
->size
, 0, sz
);
4869 /* Clear padding bits of TYPE in MASK. */
4872 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4874 clear_padding_struct buf
;
4875 buf
.loc
= UNKNOWN_LOCATION
;
4876 buf
.clear_in_mask
= true;
4877 buf
.base
= NULL_TREE
;
4878 buf
.alias_type
= NULL_TREE
;
4882 buf
.padding_bytes
= 0;
4883 buf
.sz
= int_size_in_bytes (type
);
4885 buf
.union_ptr
= mask
;
4886 clear_padding_type (&buf
, type
, buf
.sz
, false);
4887 clear_padding_flush (&buf
, true);
4890 /* Fold __builtin_clear_padding builtin. */
4893 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4895 gimple
*stmt
= gsi_stmt (*gsi
);
4896 gcc_assert (gimple_call_num_args (stmt
) == 2);
4897 tree ptr
= gimple_call_arg (stmt
, 0);
4898 tree typearg
= gimple_call_arg (stmt
, 1);
4899 /* The 2nd argument of __builtin_clear_padding's value is used to
4900 distinguish whether this call is made by the user or by the compiler
4901 for automatic variable initialization. */
4902 bool for_auto_init
= (bool) TREE_INT_CST_LOW (typearg
);
4903 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4904 location_t loc
= gimple_location (stmt
);
4905 clear_padding_struct buf
;
4906 gimple_stmt_iterator gsiprev
= *gsi
;
4907 /* This should be folded during the lower pass. */
4908 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4909 gcc_assert (COMPLETE_TYPE_P (type
));
4910 gsi_prev (&gsiprev
);
4913 buf
.clear_in_mask
= false;
4915 buf
.alias_type
= NULL_TREE
;
4917 buf
.align
= get_pointer_alignment (ptr
);
4918 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4919 buf
.align
= MAX (buf
.align
, talign
);
4921 buf
.padding_bytes
= 0;
4923 buf
.sz
= int_size_in_bytes (type
);
4924 buf
.union_ptr
= NULL
;
4925 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4926 sorry_at (loc
, "%s not supported for variable length aggregates",
4927 "__builtin_clear_padding");
4928 /* The implementation currently assumes 8-bit host and target
4929 chars which is the case for all currently supported targets
4930 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4931 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4932 sorry_at (loc
, "%s not supported on this target",
4933 "__builtin_clear_padding");
4934 else if (!clear_padding_type_may_have_padding_p (type
))
4936 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4938 tree sz
= TYPE_SIZE_UNIT (type
);
4939 tree elttype
= type
;
4940 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4941 while (TREE_CODE (elttype
) == ARRAY_TYPE
4942 && int_size_in_bytes (elttype
) < 0)
4943 elttype
= TREE_TYPE (elttype
);
4944 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4945 gcc_assert (eltsz
>= 0);
4948 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4949 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4950 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4951 gimple_set_location (g
, loc
);
4952 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4953 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4954 gimple_set_location (g
, loc
);
4955 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4957 buf
.align
= TYPE_ALIGN (elttype
);
4958 buf
.alias_type
= build_pointer_type (elttype
);
4959 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4964 if (!is_gimple_mem_ref_addr (buf
.base
))
4966 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4967 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4968 gimple_set_location (g
, loc
);
4969 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4971 buf
.alias_type
= build_pointer_type (type
);
4972 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
4973 clear_padding_flush (&buf
, true);
4976 gimple_stmt_iterator gsiprev2
= *gsi
;
4977 gsi_prev (&gsiprev2
);
4978 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4979 gsi_replace (gsi
, gimple_build_nop (), true);
4982 gsi_remove (gsi
, true);
4988 /* Fold the non-target builtin at *GSI and return whether any simplification
4992 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
4994 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
4995 tree callee
= gimple_call_fndecl (stmt
);
4997 /* Give up for always_inline inline builtins until they are
4999 if (avoid_folding_inline_builtin (callee
))
5002 unsigned n
= gimple_call_num_args (stmt
);
5003 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5007 return gimple_fold_builtin_bcmp (gsi
);
5008 case BUILT_IN_BCOPY
:
5009 return gimple_fold_builtin_bcopy (gsi
);
5010 case BUILT_IN_BZERO
:
5011 return gimple_fold_builtin_bzero (gsi
);
5013 case BUILT_IN_MEMSET
:
5014 return gimple_fold_builtin_memset (gsi
,
5015 gimple_call_arg (stmt
, 1),
5016 gimple_call_arg (stmt
, 2));
5017 case BUILT_IN_MEMCPY
:
5018 case BUILT_IN_MEMPCPY
:
5019 case BUILT_IN_MEMMOVE
:
5020 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5021 gimple_call_arg (stmt
, 1), fcode
);
5022 case BUILT_IN_SPRINTF_CHK
:
5023 case BUILT_IN_VSPRINTF_CHK
:
5024 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5025 case BUILT_IN_STRCAT_CHK
:
5026 return gimple_fold_builtin_strcat_chk (gsi
);
5027 case BUILT_IN_STRNCAT_CHK
:
5028 return gimple_fold_builtin_strncat_chk (gsi
);
5029 case BUILT_IN_STRLEN
:
5030 return gimple_fold_builtin_strlen (gsi
);
5031 case BUILT_IN_STRCPY
:
5032 return gimple_fold_builtin_strcpy (gsi
,
5033 gimple_call_arg (stmt
, 0),
5034 gimple_call_arg (stmt
, 1));
5035 case BUILT_IN_STRNCPY
:
5036 return gimple_fold_builtin_strncpy (gsi
,
5037 gimple_call_arg (stmt
, 0),
5038 gimple_call_arg (stmt
, 1),
5039 gimple_call_arg (stmt
, 2));
5040 case BUILT_IN_STRCAT
:
5041 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5042 gimple_call_arg (stmt
, 1));
5043 case BUILT_IN_STRNCAT
:
5044 return gimple_fold_builtin_strncat (gsi
);
5045 case BUILT_IN_INDEX
:
5046 case BUILT_IN_STRCHR
:
5047 return gimple_fold_builtin_strchr (gsi
, false);
5048 case BUILT_IN_RINDEX
:
5049 case BUILT_IN_STRRCHR
:
5050 return gimple_fold_builtin_strchr (gsi
, true);
5051 case BUILT_IN_STRSTR
:
5052 return gimple_fold_builtin_strstr (gsi
);
5053 case BUILT_IN_STRCMP
:
5054 case BUILT_IN_STRCMP_EQ
:
5055 case BUILT_IN_STRCASECMP
:
5056 case BUILT_IN_STRNCMP
:
5057 case BUILT_IN_STRNCMP_EQ
:
5058 case BUILT_IN_STRNCASECMP
:
5059 return gimple_fold_builtin_string_compare (gsi
);
5060 case BUILT_IN_MEMCHR
:
5061 return gimple_fold_builtin_memchr (gsi
);
5062 case BUILT_IN_FPUTS
:
5063 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5064 gimple_call_arg (stmt
, 1), false);
5065 case BUILT_IN_FPUTS_UNLOCKED
:
5066 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5067 gimple_call_arg (stmt
, 1), true);
5068 case BUILT_IN_MEMCPY_CHK
:
5069 case BUILT_IN_MEMPCPY_CHK
:
5070 case BUILT_IN_MEMMOVE_CHK
:
5071 case BUILT_IN_MEMSET_CHK
:
5072 return gimple_fold_builtin_memory_chk (gsi
,
5073 gimple_call_arg (stmt
, 0),
5074 gimple_call_arg (stmt
, 1),
5075 gimple_call_arg (stmt
, 2),
5076 gimple_call_arg (stmt
, 3),
5078 case BUILT_IN_STPCPY
:
5079 return gimple_fold_builtin_stpcpy (gsi
);
5080 case BUILT_IN_STRCPY_CHK
:
5081 case BUILT_IN_STPCPY_CHK
:
5082 return gimple_fold_builtin_stxcpy_chk (gsi
,
5083 gimple_call_arg (stmt
, 0),
5084 gimple_call_arg (stmt
, 1),
5085 gimple_call_arg (stmt
, 2),
5087 case BUILT_IN_STRNCPY_CHK
:
5088 case BUILT_IN_STPNCPY_CHK
:
5089 return gimple_fold_builtin_stxncpy_chk (gsi
,
5090 gimple_call_arg (stmt
, 0),
5091 gimple_call_arg (stmt
, 1),
5092 gimple_call_arg (stmt
, 2),
5093 gimple_call_arg (stmt
, 3),
5095 case BUILT_IN_SNPRINTF_CHK
:
5096 case BUILT_IN_VSNPRINTF_CHK
:
5097 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5099 case BUILT_IN_FPRINTF
:
5100 case BUILT_IN_FPRINTF_UNLOCKED
:
5101 case BUILT_IN_VFPRINTF
:
5102 if (n
== 2 || n
== 3)
5103 return gimple_fold_builtin_fprintf (gsi
,
5104 gimple_call_arg (stmt
, 0),
5105 gimple_call_arg (stmt
, 1),
5107 ? gimple_call_arg (stmt
, 2)
5111 case BUILT_IN_FPRINTF_CHK
:
5112 case BUILT_IN_VFPRINTF_CHK
:
5113 if (n
== 3 || n
== 4)
5114 return gimple_fold_builtin_fprintf (gsi
,
5115 gimple_call_arg (stmt
, 0),
5116 gimple_call_arg (stmt
, 2),
5118 ? gimple_call_arg (stmt
, 3)
5122 case BUILT_IN_PRINTF
:
5123 case BUILT_IN_PRINTF_UNLOCKED
:
5124 case BUILT_IN_VPRINTF
:
5125 if (n
== 1 || n
== 2)
5126 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5128 ? gimple_call_arg (stmt
, 1)
5129 : NULL_TREE
, fcode
);
5131 case BUILT_IN_PRINTF_CHK
:
5132 case BUILT_IN_VPRINTF_CHK
:
5133 if (n
== 2 || n
== 3)
5134 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5136 ? gimple_call_arg (stmt
, 2)
5137 : NULL_TREE
, fcode
);
5139 case BUILT_IN_ACC_ON_DEVICE
:
5140 return gimple_fold_builtin_acc_on_device (gsi
,
5141 gimple_call_arg (stmt
, 0));
5142 case BUILT_IN_REALLOC
:
5143 return gimple_fold_builtin_realloc (gsi
);
5145 case BUILT_IN_CLEAR_PADDING
:
5146 return gimple_fold_builtin_clear_padding (gsi
);
5151 /* Try the generic builtin folder. */
5152 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5153 tree result
= fold_call_stmt (stmt
, ignore
);
5157 STRIP_NOPS (result
);
5159 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5160 gimplify_and_update_call_from_tree (gsi
, result
);
5167 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5168 function calls to constants, where possible. */
5171 fold_internal_goacc_dim (const gimple
*call
)
5173 int axis
= oacc_get_ifn_dim_arg (call
);
5174 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5175 tree result
= NULL_TREE
;
5176 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5178 switch (gimple_call_internal_fn (call
))
5180 case IFN_GOACC_DIM_POS
:
5181 /* If the size is 1, we know the answer. */
5183 result
= build_int_cst (type
, 0);
5185 case IFN_GOACC_DIM_SIZE
:
5186 /* If the size is not dynamic, we know the answer. */
5188 result
= build_int_cst (type
, size
);
5197 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5198 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5199 &var where var is only addressable because of such calls. */
5202 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5204 if (gimple_call_num_args (stmt
) != 6
5205 || !flag_inline_atomics
5207 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5208 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5209 || !gimple_vdef (stmt
)
5210 || !gimple_vuse (stmt
))
5213 tree fndecl
= gimple_call_fndecl (stmt
);
5214 switch (DECL_FUNCTION_CODE (fndecl
))
5216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5219 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5220 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5226 tree expected
= gimple_call_arg (stmt
, 1);
5227 if (TREE_CODE (expected
) != ADDR_EXPR
5228 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5231 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5232 if (!is_gimple_reg_type (etype
)
5233 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5234 || TREE_THIS_VOLATILE (etype
)
5235 || VECTOR_TYPE_P (etype
)
5236 || TREE_CODE (etype
) == COMPLEX_TYPE
5237 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5238 might not preserve all the bits. See PR71716. */
5239 || SCALAR_FLOAT_TYPE_P (etype
)
5240 || maybe_ne (TYPE_PRECISION (etype
),
5241 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5244 tree weak
= gimple_call_arg (stmt
, 3);
5245 if (!integer_zerop (weak
) && !integer_onep (weak
))
5248 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5249 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5250 machine_mode mode
= TYPE_MODE (itype
);
5252 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5254 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5257 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5264 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5266 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5267 i = IMAGPART_EXPR <t>;
5269 e = REALPART_EXPR <t>; */
5272 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5274 gimple
*stmt
= gsi_stmt (*gsi
);
5275 tree fndecl
= gimple_call_fndecl (stmt
);
5276 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5277 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5278 tree ctype
= build_complex_type (itype
);
5279 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5280 bool throws
= false;
5282 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5284 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5285 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5286 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5288 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5289 build1 (VIEW_CONVERT_EXPR
, itype
,
5290 gimple_assign_lhs (g
)));
5291 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5293 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5294 + int_size_in_bytes (itype
);
5295 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5296 gimple_call_arg (stmt
, 0),
5297 gimple_assign_lhs (g
),
5298 gimple_call_arg (stmt
, 2),
5299 build_int_cst (integer_type_node
, flag
),
5300 gimple_call_arg (stmt
, 4),
5301 gimple_call_arg (stmt
, 5));
5302 tree lhs
= make_ssa_name (ctype
);
5303 gimple_call_set_lhs (g
, lhs
);
5304 gimple_move_vops (g
, stmt
);
5305 tree oldlhs
= gimple_call_lhs (stmt
);
5306 if (stmt_can_throw_internal (cfun
, stmt
))
5309 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5311 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5312 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5313 gimple_call_set_lhs (stmt
, NULL_TREE
);
5314 gsi_replace (gsi
, g
, true);
5317 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5318 build1 (IMAGPART_EXPR
, itype
, lhs
));
5321 gsi_insert_on_edge_immediate (e
, g
);
5322 *gsi
= gsi_for_stmt (g
);
5325 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5326 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5327 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5329 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5330 build1 (REALPART_EXPR
, itype
, lhs
));
5331 if (throws
&& oldlhs
== NULL_TREE
)
5333 gsi_insert_on_edge_immediate (e
, g
);
5334 *gsi
= gsi_for_stmt (g
);
5337 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5338 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5340 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5342 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5343 gimple_assign_lhs (g
)));
5344 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5346 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5347 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5351 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5352 doesn't fit into TYPE. The test for overflow should be regardless of
5353 -fwrapv, and even for unsigned types. */
5356 arith_overflowed_p (enum tree_code code
, const_tree type
,
5357 const_tree arg0
, const_tree arg1
)
5359 widest2_int warg0
= widest2_int_cst (arg0
);
5360 widest2_int warg1
= widest2_int_cst (arg1
);
5364 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5365 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5366 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5367 default: gcc_unreachable ();
5369 signop sign
= TYPE_SIGN (type
);
5370 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5372 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5375 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5376 for the memory it references, otherwise return null. VECTYPE is the
5377 type of the memory vector. */
5380 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5382 tree ptr
= gimple_call_arg (call
, 0);
5383 tree alias_align
= gimple_call_arg (call
, 1);
5384 tree mask
= gimple_call_arg (call
, 2);
5385 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5388 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5389 if (TYPE_ALIGN (vectype
) != align
)
5390 vectype
= build_aligned_type (vectype
, align
);
5391 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5392 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5395 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5398 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5400 tree lhs
= gimple_call_lhs (call
);
5404 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5406 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5407 gimple_set_location (new_stmt
, gimple_location (call
));
5408 gimple_move_vops (new_stmt
, call
);
5409 gsi_replace (gsi
, new_stmt
, false);
5415 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5418 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5420 tree rhs
= gimple_call_arg (call
, 3);
5421 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5423 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5424 gimple_set_location (new_stmt
, gimple_location (call
));
5425 gimple_move_vops (new_stmt
, call
);
5426 gsi_replace (gsi
, new_stmt
, false);
5432 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5433 The statement may be replaced by another statement, e.g., if the call
5434 simplifies to a constant value. Return true if any changes were made.
5435 It is assumed that the operands have been previously folded. */
5438 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5440 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5442 bool changed
= false;
5444 /* Check for virtual calls that became direct calls. */
5445 callee
= gimple_call_fn (stmt
);
5446 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5448 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5450 if (dump_file
&& virtual_method_call_p (callee
)
5451 && !possible_polymorphic_call_target_p
5452 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5453 (OBJ_TYPE_REF_EXPR (callee
)))))
5456 "Type inheritance inconsistent devirtualization of ");
5457 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5458 fprintf (dump_file
, " to ");
5459 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5460 fprintf (dump_file
, "\n");
5463 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5466 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5469 vec
<cgraph_node
*>targets
5470 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5471 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5473 tree lhs
= gimple_call_lhs (stmt
);
5474 if (dump_enabled_p ())
5476 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5477 "folding virtual function call to %s\n",
5478 targets
.length () == 1
5479 ? targets
[0]->name ()
5480 : "__builtin_unreachable");
5482 if (targets
.length () == 1)
5484 tree fndecl
= targets
[0]->decl
;
5485 gimple_call_set_fndecl (stmt
, fndecl
);
5487 /* If changing the call to __cxa_pure_virtual
5488 or similar noreturn function, adjust gimple_call_fntype
5490 if (gimple_call_noreturn_p (stmt
)
5491 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5492 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5493 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5495 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5496 /* If the call becomes noreturn, remove the lhs. */
5498 && gimple_call_noreturn_p (stmt
)
5499 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5500 || should_remove_lhs_p (lhs
)))
5502 if (TREE_CODE (lhs
) == SSA_NAME
)
5504 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5505 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5506 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5507 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5509 gimple_call_set_lhs (stmt
, NULL_TREE
);
5511 maybe_remove_unused_call_args (cfun
, stmt
);
5515 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5516 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5517 gimple_set_location (new_stmt
, gimple_location (stmt
));
5518 /* If the call had a SSA name as lhs morph that into
5519 an uninitialized value. */
5520 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5522 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5523 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5524 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5525 set_ssa_default_def (cfun
, var
, lhs
);
5527 gimple_move_vops (new_stmt
, stmt
);
5528 gsi_replace (gsi
, new_stmt
, false);
5535 /* Check for indirect calls that became direct calls, and then
5536 no longer require a static chain. */
5537 if (gimple_call_chain (stmt
))
5539 tree fn
= gimple_call_fndecl (stmt
);
5540 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5542 gimple_call_set_chain (stmt
, NULL
);
5550 /* Check for builtins that CCP can handle using information not
5551 available in the generic fold routines. */
5552 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5554 if (gimple_fold_builtin (gsi
))
5557 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5559 changed
|= targetm
.gimple_fold_builtin (gsi
);
5561 else if (gimple_call_internal_p (stmt
))
5563 enum tree_code subcode
= ERROR_MARK
;
5564 tree result
= NULL_TREE
;
5565 bool cplx_result
= false;
5566 tree overflow
= NULL_TREE
;
5567 switch (gimple_call_internal_fn (stmt
))
5569 case IFN_BUILTIN_EXPECT
:
5570 result
= fold_builtin_expect (gimple_location (stmt
),
5571 gimple_call_arg (stmt
, 0),
5572 gimple_call_arg (stmt
, 1),
5573 gimple_call_arg (stmt
, 2),
5576 case IFN_UBSAN_OBJECT_SIZE
:
5578 tree offset
= gimple_call_arg (stmt
, 1);
5579 tree objsize
= gimple_call_arg (stmt
, 2);
5580 if (integer_all_onesp (objsize
)
5581 || (TREE_CODE (offset
) == INTEGER_CST
5582 && TREE_CODE (objsize
) == INTEGER_CST
5583 && tree_int_cst_le (offset
, objsize
)))
5585 replace_call_with_value (gsi
, NULL_TREE
);
5591 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5593 replace_call_with_value (gsi
, NULL_TREE
);
5597 case IFN_UBSAN_BOUNDS
:
5599 tree index
= gimple_call_arg (stmt
, 1);
5600 tree bound
= gimple_call_arg (stmt
, 2);
5601 if (TREE_CODE (index
) == INTEGER_CST
5602 && TREE_CODE (bound
) == INTEGER_CST
)
5604 index
= fold_convert (TREE_TYPE (bound
), index
);
5605 if (TREE_CODE (index
) == INTEGER_CST
5606 && tree_int_cst_le (index
, bound
))
5608 replace_call_with_value (gsi
, NULL_TREE
);
5614 case IFN_GOACC_DIM_SIZE
:
5615 case IFN_GOACC_DIM_POS
:
5616 result
= fold_internal_goacc_dim (stmt
);
5618 case IFN_UBSAN_CHECK_ADD
:
5619 subcode
= PLUS_EXPR
;
5621 case IFN_UBSAN_CHECK_SUB
:
5622 subcode
= MINUS_EXPR
;
5624 case IFN_UBSAN_CHECK_MUL
:
5625 subcode
= MULT_EXPR
;
5627 case IFN_ADD_OVERFLOW
:
5628 subcode
= PLUS_EXPR
;
5631 case IFN_SUB_OVERFLOW
:
5632 subcode
= MINUS_EXPR
;
5635 case IFN_MUL_OVERFLOW
:
5636 subcode
= MULT_EXPR
;
5640 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5642 case IFN_MASK_STORE
:
5643 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5648 if (subcode
!= ERROR_MARK
)
5650 tree arg0
= gimple_call_arg (stmt
, 0);
5651 tree arg1
= gimple_call_arg (stmt
, 1);
5652 tree type
= TREE_TYPE (arg0
);
5655 tree lhs
= gimple_call_lhs (stmt
);
5656 if (lhs
== NULL_TREE
)
5659 type
= TREE_TYPE (TREE_TYPE (lhs
));
5661 if (type
== NULL_TREE
)
5663 /* x = y + 0; x = y - 0; x = y * 0; */
5664 else if (integer_zerop (arg1
))
5665 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5666 /* x = 0 + y; x = 0 * y; */
5667 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5668 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5670 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5671 result
= integer_zero_node
;
5672 /* x = y * 1; x = 1 * y; */
5673 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5675 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5677 else if (TREE_CODE (arg0
) == INTEGER_CST
5678 && TREE_CODE (arg1
) == INTEGER_CST
)
5681 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5682 fold_convert (type
, arg1
));
5684 result
= int_const_binop (subcode
, arg0
, arg1
);
5685 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5688 overflow
= build_one_cst (type
);
5695 if (result
== integer_zero_node
)
5696 result
= build_zero_cst (type
);
5697 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5699 if (TREE_CODE (result
) == INTEGER_CST
)
5701 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5703 overflow
= build_one_cst (type
);
5705 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5706 && TYPE_UNSIGNED (type
))
5707 || (TYPE_PRECISION (type
)
5708 < (TYPE_PRECISION (TREE_TYPE (result
))
5709 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5710 && !TYPE_UNSIGNED (type
)))))
5713 result
= fold_convert (type
, result
);
5720 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5721 result
= drop_tree_overflow (result
);
5724 if (overflow
== NULL_TREE
)
5725 overflow
= build_zero_cst (TREE_TYPE (result
));
5726 tree ctype
= build_complex_type (TREE_TYPE (result
));
5727 if (TREE_CODE (result
) == INTEGER_CST
5728 && TREE_CODE (overflow
) == INTEGER_CST
)
5729 result
= build_complex (ctype
, result
, overflow
);
5731 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5732 ctype
, result
, overflow
);
5734 gimplify_and_update_call_from_tree (gsi
, result
);
5743 /* Return true whether NAME has a use on STMT. */
5746 has_use_on_stmt (tree name
, gimple
*stmt
)
5748 imm_use_iterator iter
;
5749 use_operand_p use_p
;
5750 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5751 if (USE_STMT (use_p
) == stmt
)
5756 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5759 Replaces *GSI with the simplification result in RCODE and OPS
5760 and the associated statements in *SEQ. Does the replacement
5761 according to INPLACE and returns true if the operation succeeded. */
5764 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5765 gimple_match_op
*res_op
,
5766 gimple_seq
*seq
, bool inplace
)
5768 gimple
*stmt
= gsi_stmt (*gsi
);
5769 tree
*ops
= res_op
->ops
;
5770 unsigned int num_ops
= res_op
->num_ops
;
5772 /* Play safe and do not allow abnormals to be mentioned in
5773 newly created statements. See also maybe_push_res_to_seq.
5774 As an exception allow such uses if there was a use of the
5775 same SSA name on the old stmt. */
5776 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5777 if (TREE_CODE (ops
[i
]) == SSA_NAME
5778 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5779 && !has_use_on_stmt (ops
[i
], stmt
))
5782 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5783 for (unsigned int i
= 0; i
< 2; ++i
)
5784 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5785 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5786 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5789 /* Don't insert new statements when INPLACE is true, even if we could
5790 reuse STMT for the final statement. */
5791 if (inplace
&& !gimple_seq_empty_p (*seq
))
5794 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5796 gcc_assert (res_op
->code
.is_tree_code ());
5797 auto code
= tree_code (res_op
->code
);
5798 if (TREE_CODE_CLASS (code
) == tcc_comparison
5799 /* GIMPLE_CONDs condition may not throw. */
5800 && (!flag_exceptions
5801 || !cfun
->can_throw_non_call_exceptions
5802 || !operation_could_trap_p (code
,
5803 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5805 gimple_cond_set_condition (cond_stmt
, code
, ops
[0], ops
[1]);
5806 else if (code
== SSA_NAME
)
5807 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5808 build_zero_cst (TREE_TYPE (ops
[0])));
5809 else if (code
== INTEGER_CST
)
5811 if (integer_zerop (ops
[0]))
5812 gimple_cond_make_false (cond_stmt
);
5814 gimple_cond_make_true (cond_stmt
);
5818 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5821 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5822 build_zero_cst (TREE_TYPE (res
)));
5826 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5828 fprintf (dump_file
, "gimple_simplified to ");
5829 if (!gimple_seq_empty_p (*seq
))
5830 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5831 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5834 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5837 else if (is_gimple_assign (stmt
)
5838 && res_op
->code
.is_tree_code ())
5840 auto code
= tree_code (res_op
->code
);
5842 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (code
))
5844 maybe_build_generic_op (res_op
);
5845 gimple_assign_set_rhs_with_ops (gsi
, code
,
5846 res_op
->op_or_null (0),
5847 res_op
->op_or_null (1),
5848 res_op
->op_or_null (2));
5849 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5851 fprintf (dump_file
, "gimple_simplified to ");
5852 if (!gimple_seq_empty_p (*seq
))
5853 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5854 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5857 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5861 else if (res_op
->code
.is_fn_code ()
5862 && gimple_call_combined_fn (stmt
) == combined_fn (res_op
->code
))
5864 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5865 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5866 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5867 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5869 fprintf (dump_file
, "gimple_simplified to ");
5870 if (!gimple_seq_empty_p (*seq
))
5871 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5872 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5874 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5879 if (gimple_has_lhs (stmt
))
5881 tree lhs
= gimple_get_lhs (stmt
);
5882 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5884 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5886 fprintf (dump_file
, "gimple_simplified to ");
5887 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5889 gsi_replace_with_seq_vops (gsi
, *seq
);
5899 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5902 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5907 if (TREE_CODE (*t
) == ADDR_EXPR
)
5908 t
= &TREE_OPERAND (*t
, 0);
5910 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5911 generic vector extension. The actual vector referenced is
5912 view-converted to an array type for this purpose. If the index
5913 is constant the canonical representation in the middle-end is a
5914 BIT_FIELD_REF so re-write the former to the latter here. */
5915 if (TREE_CODE (*t
) == ARRAY_REF
5916 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5917 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5918 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5920 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5921 if (VECTOR_TYPE_P (vtype
))
5923 tree low
= array_ref_low_bound (*t
);
5924 if (TREE_CODE (low
) == INTEGER_CST
)
5926 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5928 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5929 wi::to_widest (low
));
5930 idx
= wi::mul (idx
, wi::to_widest
5931 (TYPE_SIZE (TREE_TYPE (*t
))));
5933 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5934 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5936 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5938 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5939 TYPE_SIZE (TREE_TYPE (*t
)),
5940 wide_int_to_tree (bitsizetype
, idx
));
5948 while (handled_component_p (*t
))
5949 t
= &TREE_OPERAND (*t
, 0);
5951 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5952 of invariant addresses into a SSA name MEM_REF address. */
5953 if (TREE_CODE (*t
) == MEM_REF
5954 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5956 tree addr
= TREE_OPERAND (*t
, 0);
5957 if (TREE_CODE (addr
) == ADDR_EXPR
5958 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5959 || handled_component_p (TREE_OPERAND (addr
, 0))))
5963 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5972 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5973 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5974 TREE_OPERAND (*t
, 1),
5975 size_int (coffset
));
5978 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5979 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5982 /* Canonicalize back MEM_REFs to plain reference trees if the object
5983 accessed is a decl that has the same access semantics as the MEM_REF. */
5984 if (TREE_CODE (*t
) == MEM_REF
5985 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5986 && integer_zerop (TREE_OPERAND (*t
, 1))
5987 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5989 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5990 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5991 if (/* Same volatile qualification. */
5992 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5993 /* Same TBAA behavior with -fstrict-aliasing. */
5994 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
5995 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
5996 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
5997 /* Same alignment. */
5998 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
5999 /* We have to look out here to not drop a required conversion
6000 from the rhs to the lhs if *t appears on the lhs or vice-versa
6001 if it appears on the rhs. Thus require strict type
6003 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6005 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6010 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6011 && TREE_CODE (*t
) == MEM_REF
6012 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6016 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6020 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6022 if (mem_ref_offset (base
).to_shwi (&moffset
))
6025 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6028 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6035 /* Canonicalize TARGET_MEM_REF in particular with respect to
6036 the indexes becoming constant. */
6037 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6039 tree tem
= maybe_fold_tmr (*t
);
6043 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6044 recompute_tree_invariant_for_addr_expr (*orig_t
);
6052 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6053 distinguishes both cases. */
6056 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6058 bool changed
= false;
6059 gimple
*stmt
= gsi_stmt (*gsi
);
6060 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6062 fold_defer_overflow_warnings ();
6064 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6066 ??? This shouldn't be done in generic folding but in the
6067 propagation helpers which also know whether an address was
6069 Also canonicalize operand order. */
6070 switch (gimple_code (stmt
))
6073 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6075 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6076 if ((REFERENCE_CLASS_P (*rhs
)
6077 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6078 && maybe_canonicalize_mem_ref_addr (rhs
))
6080 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6081 if (REFERENCE_CLASS_P (*lhs
)
6082 && maybe_canonicalize_mem_ref_addr (lhs
))
6084 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6085 This cannot be done in maybe_canonicalize_mem_ref_addr
6086 as the gimple now has two operands rather than one.
6087 The same reason why this can't be done in
6088 maybe_canonicalize_mem_ref_addr is the same reason why
6089 this can't be done inplace. */
6090 if (!inplace
&& TREE_CODE (*rhs
) == ADDR_EXPR
)
6092 tree inner
= TREE_OPERAND (*rhs
, 0);
6093 if (TREE_CODE (inner
) == MEM_REF
6094 && TREE_CODE (TREE_OPERAND (inner
, 0)) == SSA_NAME
6095 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6097 tree ptr
= TREE_OPERAND (inner
, 0);
6098 tree addon
= TREE_OPERAND (inner
, 1);
6099 addon
= fold_convert (sizetype
, addon
);
6100 gimple_assign_set_rhs_with_ops (gsi
, POINTER_PLUS_EXPR
,
6103 stmt
= gsi_stmt (*gsi
);
6109 /* Canonicalize operand order. */
6110 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6111 if (TREE_CODE_CLASS (code
) == tcc_comparison
6112 || commutative_tree_code (code
)
6113 || commutative_ternary_tree_code (code
))
6115 tree rhs1
= gimple_assign_rhs1 (stmt
);
6116 tree rhs2
= gimple_assign_rhs2 (stmt
);
6117 if (tree_swap_operands_p (rhs1
, rhs2
))
6119 gimple_assign_set_rhs1 (stmt
, rhs2
);
6120 gimple_assign_set_rhs2 (stmt
, rhs1
);
6121 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6122 gimple_assign_set_rhs_code (stmt
,
6123 swap_tree_comparison (code
));
6131 gcall
*call
= as_a
<gcall
*> (stmt
);
6132 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
6134 tree
*arg
= gimple_call_arg_ptr (call
, i
);
6135 if (REFERENCE_CLASS_P (*arg
)
6136 && maybe_canonicalize_mem_ref_addr (arg
))
6139 tree
*lhs
= gimple_call_lhs_ptr (call
);
6141 && REFERENCE_CLASS_P (*lhs
)
6142 && maybe_canonicalize_mem_ref_addr (lhs
))
6146 combined_fn cfn
= gimple_call_combined_fn (call
);
6147 internal_fn ifn
= associated_internal_fn (cfn
, TREE_TYPE (*lhs
));
6148 int opno
= first_commutative_argument (ifn
);
6151 tree arg1
= gimple_call_arg (call
, opno
);
6152 tree arg2
= gimple_call_arg (call
, opno
+ 1);
6153 if (tree_swap_operands_p (arg1
, arg2
))
6155 gimple_call_set_arg (call
, opno
, arg2
);
6156 gimple_call_set_arg (call
, opno
+ 1, arg1
);
6165 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6166 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6168 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6169 tree op
= TREE_VALUE (link
);
6170 if (REFERENCE_CLASS_P (op
)
6171 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6174 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6176 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6177 tree op
= TREE_VALUE (link
);
6178 if ((REFERENCE_CLASS_P (op
)
6179 || TREE_CODE (op
) == ADDR_EXPR
)
6180 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6186 if (gimple_debug_bind_p (stmt
))
6188 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6190 && (REFERENCE_CLASS_P (*val
)
6191 || TREE_CODE (*val
) == ADDR_EXPR
)
6192 && maybe_canonicalize_mem_ref_addr (val
, true))
6198 /* Canonicalize operand order. */
6199 tree lhs
= gimple_cond_lhs (stmt
);
6200 tree rhs
= gimple_cond_rhs (stmt
);
6201 if (tree_swap_operands_p (lhs
, rhs
))
6203 gcond
*gc
= as_a
<gcond
*> (stmt
);
6204 gimple_cond_set_lhs (gc
, rhs
);
6205 gimple_cond_set_rhs (gc
, lhs
);
6206 gimple_cond_set_code (gc
,
6207 swap_tree_comparison (gimple_cond_code (gc
)));
6214 /* Dispatch to pattern-based folding. */
6216 || is_gimple_assign (stmt
)
6217 || gimple_code (stmt
) == GIMPLE_COND
)
6219 gimple_seq seq
= NULL
;
6220 gimple_match_op res_op
;
6221 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6222 valueize
, valueize
))
6224 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6227 gimple_seq_discard (seq
);
6231 stmt
= gsi_stmt (*gsi
);
6233 /* Fold the main computation performed by the statement. */
6234 switch (gimple_code (stmt
))
6238 /* Try to canonicalize for boolean-typed X the comparisons
6239 X == 0, X == 1, X != 0, and X != 1. */
6240 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6241 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6243 tree lhs
= gimple_assign_lhs (stmt
);
6244 tree op1
= gimple_assign_rhs1 (stmt
);
6245 tree op2
= gimple_assign_rhs2 (stmt
);
6246 tree type
= TREE_TYPE (op1
);
6248 /* Check whether the comparison operands are of the same boolean
6249 type as the result type is.
6250 Check that second operand is an integer-constant with value
6252 if (TREE_CODE (op2
) == INTEGER_CST
6253 && (integer_zerop (op2
) || integer_onep (op2
))
6254 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6256 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6257 bool is_logical_not
= false;
6259 /* X == 0 and X != 1 is a logical-not.of X
6260 X == 1 and X != 0 is X */
6261 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6262 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6263 is_logical_not
= true;
6265 if (is_logical_not
== false)
6266 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6267 /* Only for one-bit precision typed X the transformation
6268 !X -> ~X is valied. */
6269 else if (TYPE_PRECISION (type
) == 1)
6270 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6271 /* Otherwise we use !X -> X ^ 1. */
6273 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6274 build_int_cst (type
, 1));
6280 unsigned old_num_ops
= gimple_num_ops (stmt
);
6281 tree lhs
= gimple_assign_lhs (stmt
);
6282 tree new_rhs
= fold_gimple_assign (gsi
);
6284 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6285 TREE_TYPE (new_rhs
)))
6286 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6289 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6291 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6298 changed
|= gimple_fold_call (gsi
, inplace
);
6302 if (gimple_debug_bind_p (stmt
))
6304 tree val
= gimple_debug_bind_get_value (stmt
);
6305 if (val
&& REFERENCE_CLASS_P (val
))
6307 tree tem
= maybe_fold_reference (val
);
6310 gimple_debug_bind_set_value (stmt
, tem
);
6319 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6320 tree ret
= gimple_return_retval(ret_stmt
);
6322 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6324 tree val
= valueize (ret
);
6325 if (val
&& val
!= ret
6326 && may_propagate_copy (ret
, val
))
6328 gimple_return_set_retval (ret_stmt
, val
);
6338 stmt
= gsi_stmt (*gsi
);
6340 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6344 /* Valueziation callback that ends up not following SSA edges. */
6347 no_follow_ssa_edges (tree
)
6352 /* Valueization callback that ends up following single-use SSA edges only. */
6355 follow_single_use_edges (tree val
)
6357 if (TREE_CODE (val
) == SSA_NAME
6358 && !has_single_use (val
))
6363 /* Valueization callback that follows all SSA edges. */
6366 follow_all_ssa_edges (tree val
)
6371 /* Fold the statement pointed to by GSI. In some cases, this function may
6372 replace the whole statement with a new one. Returns true iff folding
6374 The statement pointed to by GSI should be in valid gimple form but may
6375 be in unfolded state as resulting from for example constant propagation
6376 which can produce *&x = 0. */
6379 fold_stmt (gimple_stmt_iterator
*gsi
)
6381 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6385 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6387 return fold_stmt_1 (gsi
, false, valueize
);
6390 /* Perform the minimal folding on statement *GSI. Only operations like
6391 *&x created by constant propagation are handled. The statement cannot
6392 be replaced with a new one. Return true if the statement was
6393 changed, false otherwise.
6394 The statement *GSI should be in valid gimple form but may
6395 be in unfolded state as resulting from for example constant propagation
6396 which can produce *&x = 0. */
6399 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6401 gimple
*stmt
= gsi_stmt (*gsi
);
6402 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6403 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6407 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6408 if EXPR is null or we don't know how.
6409 If non-null, the result always has boolean type. */
6412 canonicalize_bool (tree expr
, bool invert
)
6418 if (integer_nonzerop (expr
))
6419 return boolean_false_node
;
6420 else if (integer_zerop (expr
))
6421 return boolean_true_node
;
6422 else if (TREE_CODE (expr
) == SSA_NAME
)
6423 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6424 build_int_cst (TREE_TYPE (expr
), 0));
6425 else if (COMPARISON_CLASS_P (expr
))
6426 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6428 TREE_OPERAND (expr
, 0),
6429 TREE_OPERAND (expr
, 1));
6435 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6437 if (integer_nonzerop (expr
))
6438 return boolean_true_node
;
6439 else if (integer_zerop (expr
))
6440 return boolean_false_node
;
6441 else if (TREE_CODE (expr
) == SSA_NAME
)
6442 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6443 build_int_cst (TREE_TYPE (expr
), 0));
6444 else if (COMPARISON_CLASS_P (expr
))
6445 return fold_build2 (TREE_CODE (expr
),
6447 TREE_OPERAND (expr
, 0),
6448 TREE_OPERAND (expr
, 1));
6454 /* Check to see if a boolean expression EXPR is logically equivalent to the
6455 comparison (OP1 CODE OP2). Check for various identities involving
6459 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6460 const_tree op1
, const_tree op2
)
6464 /* The obvious case. */
6465 if (TREE_CODE (expr
) == code
6466 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6467 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6470 /* Check for comparing (name, name != 0) and the case where expr
6471 is an SSA_NAME with a definition matching the comparison. */
6472 if (TREE_CODE (expr
) == SSA_NAME
6473 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6475 if (operand_equal_p (expr
, op1
, 0))
6476 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6477 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6478 s
= SSA_NAME_DEF_STMT (expr
);
6479 if (is_gimple_assign (s
)
6480 && gimple_assign_rhs_code (s
) == code
6481 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6482 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6486 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6487 of name is a comparison, recurse. */
6488 if (TREE_CODE (op1
) == SSA_NAME
6489 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6491 s
= SSA_NAME_DEF_STMT (op1
);
6492 if (is_gimple_assign (s
)
6493 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6495 enum tree_code c
= gimple_assign_rhs_code (s
);
6496 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6497 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6498 return same_bool_comparison_p (expr
, c
,
6499 gimple_assign_rhs1 (s
),
6500 gimple_assign_rhs2 (s
));
6501 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6502 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6503 return same_bool_comparison_p (expr
,
6504 invert_tree_comparison (c
, false),
6505 gimple_assign_rhs1 (s
),
6506 gimple_assign_rhs2 (s
));
6512 /* Check to see if two boolean expressions OP1 and OP2 are logically
6516 same_bool_result_p (const_tree op1
, const_tree op2
)
6518 /* Simple cases first. */
6519 if (operand_equal_p (op1
, op2
, 0))
6522 /* Check the cases where at least one of the operands is a comparison.
6523 These are a bit smarter than operand_equal_p in that they apply some
6524 identifies on SSA_NAMEs. */
6525 if (COMPARISON_CLASS_P (op2
)
6526 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6527 TREE_OPERAND (op2
, 0),
6528 TREE_OPERAND (op2
, 1)))
6530 if (COMPARISON_CLASS_P (op1
)
6531 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6532 TREE_OPERAND (op1
, 0),
6533 TREE_OPERAND (op1
, 1)))
6540 /* Forward declarations for some mutually recursive functions. */
6543 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6544 enum tree_code code2
, tree op2a
, tree op2b
, basic_block
);
6546 and_var_with_comparison (tree type
, tree var
, bool invert
,
6547 enum tree_code code2
, tree op2a
, tree op2b
,
6550 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6551 enum tree_code code2
, tree op2a
, tree op2b
,
6554 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6555 enum tree_code code2
, tree op2a
, tree op2b
,
6558 or_var_with_comparison (tree
, tree var
, bool invert
,
6559 enum tree_code code2
, tree op2a
, tree op2b
,
6562 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6563 enum tree_code code2
, tree op2a
, tree op2b
,
6566 /* Helper function for and_comparisons_1: try to simplify the AND of the
6567 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6568 If INVERT is true, invert the value of the VAR before doing the AND.
6569 Return NULL_EXPR if we can't simplify this to a single expression. */
6572 and_var_with_comparison (tree type
, tree var
, bool invert
,
6573 enum tree_code code2
, tree op2a
, tree op2b
,
6574 basic_block outer_cond_bb
)
6577 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6579 /* We can only deal with variables whose definitions are assignments. */
6580 if (!is_gimple_assign (stmt
))
6583 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6584 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6585 Then we only have to consider the simpler non-inverted cases. */
6587 t
= or_var_with_comparison_1 (type
, stmt
,
6588 invert_tree_comparison (code2
, false),
6589 op2a
, op2b
, outer_cond_bb
);
6591 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
,
6593 return canonicalize_bool (t
, invert
);
6596 /* Try to simplify the AND of the ssa variable defined by the assignment
6597 STMT with the comparison specified by (OP2A CODE2 OP2B).
6598 Return NULL_EXPR if we can't simplify this to a single expression. */
6601 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6602 enum tree_code code2
, tree op2a
, tree op2b
,
6603 basic_block outer_cond_bb
)
6605 tree var
= gimple_assign_lhs (stmt
);
6606 tree true_test_var
= NULL_TREE
;
6607 tree false_test_var
= NULL_TREE
;
6608 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6610 /* Check for identities like (var AND (var == 0)) => false. */
6611 if (TREE_CODE (op2a
) == SSA_NAME
6612 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6614 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6615 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6617 true_test_var
= op2a
;
6618 if (var
== true_test_var
)
6621 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6622 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6624 false_test_var
= op2a
;
6625 if (var
== false_test_var
)
6626 return boolean_false_node
;
6630 /* If the definition is a comparison, recurse on it. */
6631 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6633 tree t
= and_comparisons_1 (type
, innercode
,
6634 gimple_assign_rhs1 (stmt
),
6635 gimple_assign_rhs2 (stmt
),
6638 op2b
, outer_cond_bb
);
6643 /* If the definition is an AND or OR expression, we may be able to
6644 simplify by reassociating. */
6645 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6646 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6648 tree inner1
= gimple_assign_rhs1 (stmt
);
6649 tree inner2
= gimple_assign_rhs2 (stmt
);
6652 tree partial
= NULL_TREE
;
6653 bool is_and
= (innercode
== BIT_AND_EXPR
);
6655 /* Check for boolean identities that don't require recursive examination
6657 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6658 inner1 AND (inner1 OR inner2) => inner1
6659 !inner1 AND (inner1 AND inner2) => false
6660 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6661 Likewise for similar cases involving inner2. */
6662 if (inner1
== true_test_var
)
6663 return (is_and
? var
: inner1
);
6664 else if (inner2
== true_test_var
)
6665 return (is_and
? var
: inner2
);
6666 else if (inner1
== false_test_var
)
6668 ? boolean_false_node
6669 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6670 op2b
, outer_cond_bb
));
6671 else if (inner2
== false_test_var
)
6673 ? boolean_false_node
6674 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6675 op2b
, outer_cond_bb
));
6677 /* Next, redistribute/reassociate the AND across the inner tests.
6678 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6679 if (TREE_CODE (inner1
) == SSA_NAME
6680 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6681 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6682 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6683 gimple_assign_rhs1 (s
),
6684 gimple_assign_rhs2 (s
),
6688 /* Handle the AND case, where we are reassociating:
6689 (inner1 AND inner2) AND (op2a code2 op2b)
6691 If the partial result t is a constant, we win. Otherwise
6692 continue on to try reassociating with the other inner test. */
6695 if (integer_onep (t
))
6697 else if (integer_zerop (t
))
6698 return boolean_false_node
;
6701 /* Handle the OR case, where we are redistributing:
6702 (inner1 OR inner2) AND (op2a code2 op2b)
6703 => (t OR (inner2 AND (op2a code2 op2b))) */
6704 else if (integer_onep (t
))
6705 return boolean_true_node
;
6707 /* Save partial result for later. */
6711 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6712 if (TREE_CODE (inner2
) == SSA_NAME
6713 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6714 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6715 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6716 gimple_assign_rhs1 (s
),
6717 gimple_assign_rhs2 (s
),
6721 /* Handle the AND case, where we are reassociating:
6722 (inner1 AND inner2) AND (op2a code2 op2b)
6723 => (inner1 AND t) */
6726 if (integer_onep (t
))
6728 else if (integer_zerop (t
))
6729 return boolean_false_node
;
6730 /* If both are the same, we can apply the identity
6732 else if (partial
&& same_bool_result_p (t
, partial
))
6736 /* Handle the OR case. where we are redistributing:
6737 (inner1 OR inner2) AND (op2a code2 op2b)
6738 => (t OR (inner1 AND (op2a code2 op2b)))
6739 => (t OR partial) */
6742 if (integer_onep (t
))
6743 return boolean_true_node
;
6746 /* We already got a simplification for the other
6747 operand to the redistributed OR expression. The
6748 interesting case is when at least one is false.
6749 Or, if both are the same, we can apply the identity
6751 if (integer_zerop (partial
))
6753 else if (integer_zerop (t
))
6755 else if (same_bool_result_p (t
, partial
))
6764 /* Try to simplify the AND of two comparisons defined by
6765 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6766 If this can be done without constructing an intermediate value,
6767 return the resulting tree; otherwise NULL_TREE is returned.
6768 This function is deliberately asymmetric as it recurses on SSA_DEFs
6769 in the first comparison but not the second. */
6772 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6773 enum tree_code code2
, tree op2a
, tree op2b
,
6774 basic_block outer_cond_bb
)
6776 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6778 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6779 if (operand_equal_p (op1a
, op2a
, 0)
6780 && operand_equal_p (op1b
, op2b
, 0))
6782 /* Result will be either NULL_TREE, or a combined comparison. */
6783 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6784 TRUTH_ANDIF_EXPR
, code1
, code2
,
6785 truth_type
, op1a
, op1b
);
6790 /* Likewise the swapped case of the above. */
6791 if (operand_equal_p (op1a
, op2b
, 0)
6792 && operand_equal_p (op1b
, op2a
, 0))
6794 /* Result will be either NULL_TREE, or a combined comparison. */
6795 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6796 TRUTH_ANDIF_EXPR
, code1
,
6797 swap_tree_comparison (code2
),
6798 truth_type
, op1a
, op1b
);
6803 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6804 NAME's definition is a truth value. See if there are any simplifications
6805 that can be done against the NAME's definition. */
6806 if (TREE_CODE (op1a
) == SSA_NAME
6807 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6808 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6810 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6811 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6812 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6813 switch (gimple_code (stmt
))
6816 /* Try to simplify by copy-propagating the definition. */
6817 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6818 op2b
, outer_cond_bb
);
6821 /* If every argument to the PHI produces the same result when
6822 ANDed with the second comparison, we win.
6823 Do not do this unless the type is bool since we need a bool
6824 result here anyway. */
6825 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6827 tree result
= NULL_TREE
;
6829 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6831 tree arg
= gimple_phi_arg_def (stmt
, i
);
6833 /* If this PHI has itself as an argument, ignore it.
6834 If all the other args produce the same result,
6836 if (arg
== gimple_phi_result (stmt
))
6838 else if (TREE_CODE (arg
) == INTEGER_CST
)
6840 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6843 result
= boolean_false_node
;
6844 else if (!integer_zerop (result
))
6848 result
= fold_build2 (code2
, boolean_type_node
,
6850 else if (!same_bool_comparison_p (result
,
6854 else if (TREE_CODE (arg
) == SSA_NAME
6855 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6858 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6859 /* In simple cases we can look through PHI nodes,
6860 but we have to be careful with loops.
6862 if (! dom_info_available_p (CDI_DOMINATORS
)
6863 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6864 || dominated_by_p (CDI_DOMINATORS
,
6865 gimple_bb (def_stmt
),
6868 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6875 else if (!same_bool_result_p (result
, temp
))
6891 static basic_block fosa_bb
;
6893 follow_outer_ssa_edges (tree val
)
6895 if (TREE_CODE (val
) == SSA_NAME
6896 && !SSA_NAME_IS_DEFAULT_DEF (val
))
6898 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (val
));
6900 || def_bb
== fosa_bb
6901 || (dom_info_available_p (CDI_DOMINATORS
)
6902 && (def_bb
== fosa_bb
6903 || dominated_by_p (CDI_DOMINATORS
, fosa_bb
, def_bb
))))
6910 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6911 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6912 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6913 simplify this to a single expression. As we are going to lower the cost
6914 of building SSA names / gimple stmts significantly, we need to allocate
6915 them ont the stack. This will cause the code to be a bit ugly. */
6918 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6919 enum tree_code code1
,
6920 tree op1a
, tree op1b
,
6921 enum tree_code code2
, tree op2a
,
6923 basic_block outer_cond_bb
)
6925 /* Allocate gimple stmt1 on the stack. */
6927 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6928 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6929 gimple_assign_set_rhs_code (stmt1
, code1
);
6930 gimple_assign_set_rhs1 (stmt1
, op1a
);
6931 gimple_assign_set_rhs2 (stmt1
, op1b
);
6932 gimple_set_bb (stmt1
, NULL
);
6934 /* Allocate gimple stmt2 on the stack. */
6936 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6937 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6938 gimple_assign_set_rhs_code (stmt2
, code2
);
6939 gimple_assign_set_rhs1 (stmt2
, op2a
);
6940 gimple_assign_set_rhs2 (stmt2
, op2b
);
6941 gimple_set_bb (stmt2
, NULL
);
6943 /* Allocate SSA names(lhs1) on the stack. */
6944 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6945 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6946 TREE_SET_CODE (lhs1
, SSA_NAME
);
6947 TREE_TYPE (lhs1
) = type
;
6948 init_ssa_name_imm_use (lhs1
);
6950 /* Allocate SSA names(lhs2) on the stack. */
6951 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6952 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6953 TREE_SET_CODE (lhs2
, SSA_NAME
);
6954 TREE_TYPE (lhs2
) = type
;
6955 init_ssa_name_imm_use (lhs2
);
6957 gimple_assign_set_lhs (stmt1
, lhs1
);
6958 gimple_assign_set_lhs (stmt2
, lhs2
);
6960 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6961 type
, gimple_assign_lhs (stmt1
),
6962 gimple_assign_lhs (stmt2
));
6963 fosa_bb
= outer_cond_bb
;
6964 if (op
.resimplify (NULL
, (!outer_cond_bb
6965 ? follow_all_ssa_edges
: follow_outer_ssa_edges
)))
6967 if (gimple_simplified_result_is_gimple_val (&op
))
6969 tree res
= op
.ops
[0];
6971 return build2 (code1
, type
, op1a
, op1b
);
6972 else if (res
== lhs2
)
6973 return build2 (code2
, type
, op2a
, op2b
);
6977 else if (op
.code
.is_tree_code ()
6978 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6980 tree op0
= op
.ops
[0];
6981 tree op1
= op
.ops
[1];
6982 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6983 return NULL_TREE
; /* not simple */
6985 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6992 /* Try to simplify the AND of two comparisons, specified by
6993 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6994 If this can be simplified to a single expression (without requiring
6995 introducing more SSA variables to hold intermediate values),
6996 return the resulting tree. Otherwise return NULL_TREE.
6997 If the result expression is non-null, it has boolean type. */
7000 maybe_fold_and_comparisons (tree type
,
7001 enum tree_code code1
, tree op1a
, tree op1b
,
7002 enum tree_code code2
, tree op2a
, tree op2b
,
7003 basic_block outer_cond_bb
)
7005 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
,
7009 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
,
7013 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
7014 op1a
, op1b
, code2
, op2a
,
7015 op2b
, outer_cond_bb
))
7021 /* Helper function for or_comparisons_1: try to simplify the OR of the
7022 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7023 If INVERT is true, invert the value of VAR before doing the OR.
7024 Return NULL_EXPR if we can't simplify this to a single expression. */
7027 or_var_with_comparison (tree type
, tree var
, bool invert
,
7028 enum tree_code code2
, tree op2a
, tree op2b
,
7029 basic_block outer_cond_bb
)
7032 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
7034 /* We can only deal with variables whose definitions are assignments. */
7035 if (!is_gimple_assign (stmt
))
7038 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7039 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7040 Then we only have to consider the simpler non-inverted cases. */
7042 t
= and_var_with_comparison_1 (type
, stmt
,
7043 invert_tree_comparison (code2
, false),
7044 op2a
, op2b
, outer_cond_bb
);
7046 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
,
7048 return canonicalize_bool (t
, invert
);
7051 /* Try to simplify the OR of the ssa variable defined by the assignment
7052 STMT with the comparison specified by (OP2A CODE2 OP2B).
7053 Return NULL_EXPR if we can't simplify this to a single expression. */
7056 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7057 enum tree_code code2
, tree op2a
, tree op2b
,
7058 basic_block outer_cond_bb
)
7060 tree var
= gimple_assign_lhs (stmt
);
7061 tree true_test_var
= NULL_TREE
;
7062 tree false_test_var
= NULL_TREE
;
7063 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7065 /* Check for identities like (var OR (var != 0)) => true . */
7066 if (TREE_CODE (op2a
) == SSA_NAME
7067 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7069 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7070 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7072 true_test_var
= op2a
;
7073 if (var
== true_test_var
)
7076 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7077 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7079 false_test_var
= op2a
;
7080 if (var
== false_test_var
)
7081 return boolean_true_node
;
7085 /* If the definition is a comparison, recurse on it. */
7086 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7088 tree t
= or_comparisons_1 (type
, innercode
,
7089 gimple_assign_rhs1 (stmt
),
7090 gimple_assign_rhs2 (stmt
),
7091 code2
, op2a
, op2b
, outer_cond_bb
);
7096 /* If the definition is an AND or OR expression, we may be able to
7097 simplify by reassociating. */
7098 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7099 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7101 tree inner1
= gimple_assign_rhs1 (stmt
);
7102 tree inner2
= gimple_assign_rhs2 (stmt
);
7105 tree partial
= NULL_TREE
;
7106 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7108 /* Check for boolean identities that don't require recursive examination
7110 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7111 inner1 OR (inner1 AND inner2) => inner1
7112 !inner1 OR (inner1 OR inner2) => true
7113 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7115 if (inner1
== true_test_var
)
7116 return (is_or
? var
: inner1
);
7117 else if (inner2
== true_test_var
)
7118 return (is_or
? var
: inner2
);
7119 else if (inner1
== false_test_var
)
7122 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7123 op2b
, outer_cond_bb
));
7124 else if (inner2
== false_test_var
)
7127 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7128 op2b
, outer_cond_bb
));
7130 /* Next, redistribute/reassociate the OR across the inner tests.
7131 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7132 if (TREE_CODE (inner1
) == SSA_NAME
7133 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7134 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7135 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7136 gimple_assign_rhs1 (s
),
7137 gimple_assign_rhs2 (s
),
7141 /* Handle the OR case, where we are reassociating:
7142 (inner1 OR inner2) OR (op2a code2 op2b)
7144 If the partial result t is a constant, we win. Otherwise
7145 continue on to try reassociating with the other inner test. */
7148 if (integer_onep (t
))
7149 return boolean_true_node
;
7150 else if (integer_zerop (t
))
7154 /* Handle the AND case, where we are redistributing:
7155 (inner1 AND inner2) OR (op2a code2 op2b)
7156 => (t AND (inner2 OR (op2a code op2b))) */
7157 else if (integer_zerop (t
))
7158 return boolean_false_node
;
7160 /* Save partial result for later. */
7164 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7165 if (TREE_CODE (inner2
) == SSA_NAME
7166 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7167 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7168 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7169 gimple_assign_rhs1 (s
),
7170 gimple_assign_rhs2 (s
),
7174 /* Handle the OR case, where we are reassociating:
7175 (inner1 OR inner2) OR (op2a code2 op2b)
7177 => (t OR partial) */
7180 if (integer_zerop (t
))
7182 else if (integer_onep (t
))
7183 return boolean_true_node
;
7184 /* If both are the same, we can apply the identity
7186 else if (partial
&& same_bool_result_p (t
, partial
))
7190 /* Handle the AND case, where we are redistributing:
7191 (inner1 AND inner2) OR (op2a code2 op2b)
7192 => (t AND (inner1 OR (op2a code2 op2b)))
7193 => (t AND partial) */
7196 if (integer_zerop (t
))
7197 return boolean_false_node
;
7200 /* We already got a simplification for the other
7201 operand to the redistributed AND expression. The
7202 interesting case is when at least one is true.
7203 Or, if both are the same, we can apply the identity
7205 if (integer_onep (partial
))
7207 else if (integer_onep (t
))
7209 else if (same_bool_result_p (t
, partial
))
7218 /* Try to simplify the OR of two comparisons defined by
7219 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7220 If this can be done without constructing an intermediate value,
7221 return the resulting tree; otherwise NULL_TREE is returned.
7222 This function is deliberately asymmetric as it recurses on SSA_DEFs
7223 in the first comparison but not the second. */
7226 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7227 enum tree_code code2
, tree op2a
, tree op2b
,
7228 basic_block outer_cond_bb
)
7230 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7232 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7233 if (operand_equal_p (op1a
, op2a
, 0)
7234 && operand_equal_p (op1b
, op2b
, 0))
7236 /* Result will be either NULL_TREE, or a combined comparison. */
7237 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7238 TRUTH_ORIF_EXPR
, code1
, code2
,
7239 truth_type
, op1a
, op1b
);
7244 /* Likewise the swapped case of the above. */
7245 if (operand_equal_p (op1a
, op2b
, 0)
7246 && operand_equal_p (op1b
, op2a
, 0))
7248 /* Result will be either NULL_TREE, or a combined comparison. */
7249 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7250 TRUTH_ORIF_EXPR
, code1
,
7251 swap_tree_comparison (code2
),
7252 truth_type
, op1a
, op1b
);
7257 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7258 NAME's definition is a truth value. See if there are any simplifications
7259 that can be done against the NAME's definition. */
7260 if (TREE_CODE (op1a
) == SSA_NAME
7261 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7262 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7264 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7265 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7266 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7267 switch (gimple_code (stmt
))
7270 /* Try to simplify by copy-propagating the definition. */
7271 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7272 op2b
, outer_cond_bb
);
7275 /* If every argument to the PHI produces the same result when
7276 ORed with the second comparison, we win.
7277 Do not do this unless the type is bool since we need a bool
7278 result here anyway. */
7279 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7281 tree result
= NULL_TREE
;
7283 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7285 tree arg
= gimple_phi_arg_def (stmt
, i
);
7287 /* If this PHI has itself as an argument, ignore it.
7288 If all the other args produce the same result,
7290 if (arg
== gimple_phi_result (stmt
))
7292 else if (TREE_CODE (arg
) == INTEGER_CST
)
7294 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7297 result
= boolean_true_node
;
7298 else if (!integer_onep (result
))
7302 result
= fold_build2 (code2
, boolean_type_node
,
7304 else if (!same_bool_comparison_p (result
,
7308 else if (TREE_CODE (arg
) == SSA_NAME
7309 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7312 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7313 /* In simple cases we can look through PHI nodes,
7314 but we have to be careful with loops.
7316 if (! dom_info_available_p (CDI_DOMINATORS
)
7317 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7318 || dominated_by_p (CDI_DOMINATORS
,
7319 gimple_bb (def_stmt
),
7322 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7323 op2a
, op2b
, outer_cond_bb
);
7328 else if (!same_bool_result_p (result
, temp
))
7344 /* Try to simplify the OR of two comparisons, specified by
7345 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7346 If this can be simplified to a single expression (without requiring
7347 introducing more SSA variables to hold intermediate values),
7348 return the resulting tree. Otherwise return NULL_TREE.
7349 If the result expression is non-null, it has boolean type. */
7352 maybe_fold_or_comparisons (tree type
,
7353 enum tree_code code1
, tree op1a
, tree op1b
,
7354 enum tree_code code2
, tree op2a
, tree op2b
,
7355 basic_block outer_cond_bb
)
7357 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
,
7361 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
,
7365 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7366 op1a
, op1b
, code2
, op2a
,
7367 op2b
, outer_cond_bb
))
7373 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7375 Either NULL_TREE, a simplified but non-constant or a constant
7378 ??? This should go into a gimple-fold-inline.h file to be eventually
7379 privatized with the single valueize function used in the various TUs
7380 to avoid the indirect function call overhead. */
7383 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7384 tree (*gvalueize
) (tree
))
7386 gimple_match_op res_op
;
7387 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7388 edges if there are intermediate VARYING defs. For this reason
7389 do not follow SSA edges here even though SCCVN can technically
7390 just deal fine with that. */
7391 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7393 tree res
= NULL_TREE
;
7394 if (gimple_simplified_result_is_gimple_val (&res_op
))
7395 res
= res_op
.ops
[0];
7396 else if (mprts_hook
)
7397 res
= mprts_hook (&res_op
);
7400 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7402 fprintf (dump_file
, "Match-and-simplified ");
7403 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7404 fprintf (dump_file
, " to ");
7405 print_generic_expr (dump_file
, res
);
7406 fprintf (dump_file
, "\n");
7412 location_t loc
= gimple_location (stmt
);
7413 switch (gimple_code (stmt
))
7417 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7419 switch (get_gimple_rhs_class (subcode
))
7421 case GIMPLE_SINGLE_RHS
:
7423 tree rhs
= gimple_assign_rhs1 (stmt
);
7424 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7426 if (TREE_CODE (rhs
) == SSA_NAME
)
7428 /* If the RHS is an SSA_NAME, return its known constant value,
7430 return (*valueize
) (rhs
);
7432 /* Handle propagating invariant addresses into address
7434 else if (TREE_CODE (rhs
) == ADDR_EXPR
7435 && !is_gimple_min_invariant (rhs
))
7437 poly_int64 offset
= 0;
7439 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7443 && (CONSTANT_CLASS_P (base
)
7444 || decl_address_invariant_p (base
)))
7445 return build_invariant_address (TREE_TYPE (rhs
),
7448 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7449 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7450 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7451 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7456 nelts
= CONSTRUCTOR_NELTS (rhs
);
7457 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7460 val
= (*valueize
) (val
);
7461 if (TREE_CODE (val
) == INTEGER_CST
7462 || TREE_CODE (val
) == REAL_CST
7463 || TREE_CODE (val
) == FIXED_CST
)
7464 vec
.quick_push (val
);
7469 return vec
.build ();
7471 if (subcode
== OBJ_TYPE_REF
)
7473 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7474 /* If callee is constant, we can fold away the wrapper. */
7475 if (is_gimple_min_invariant (val
))
7479 if (kind
== tcc_reference
)
7481 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7482 || TREE_CODE (rhs
) == REALPART_EXPR
7483 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7484 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7486 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7487 return fold_unary_loc (EXPR_LOCATION (rhs
),
7489 TREE_TYPE (rhs
), val
);
7491 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7492 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7494 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7495 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7497 TREE_TYPE (rhs
), val
,
7498 TREE_OPERAND (rhs
, 1),
7499 TREE_OPERAND (rhs
, 2));
7501 else if (TREE_CODE (rhs
) == MEM_REF
7502 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7504 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7505 if (TREE_CODE (val
) == ADDR_EXPR
7506 && is_gimple_min_invariant (val
))
7508 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7510 TREE_OPERAND (rhs
, 1));
7515 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7517 else if (kind
== tcc_declaration
)
7518 return get_symbol_constant_value (rhs
);
7522 case GIMPLE_UNARY_RHS
:
7525 case GIMPLE_BINARY_RHS
:
7526 /* Translate &x + CST into an invariant form suitable for
7527 further propagation. */
7528 if (subcode
== POINTER_PLUS_EXPR
)
7530 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7531 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7532 if (TREE_CODE (op0
) == ADDR_EXPR
7533 && TREE_CODE (op1
) == INTEGER_CST
)
7535 tree off
= fold_convert (ptr_type_node
, op1
);
7537 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7538 fold_build2 (MEM_REF
,
7539 TREE_TYPE (TREE_TYPE (op0
)),
7540 unshare_expr (op0
), off
));
7543 /* Canonicalize bool != 0 and bool == 0 appearing after
7544 valueization. While gimple_simplify handles this
7545 it can get confused by the ~X == 1 -> X == 0 transform
7546 which we cant reduce to a SSA name or a constant
7547 (and we have no way to tell gimple_simplify to not
7548 consider those transforms in the first place). */
7549 else if (subcode
== EQ_EXPR
7550 || subcode
== NE_EXPR
)
7552 tree lhs
= gimple_assign_lhs (stmt
);
7553 tree op0
= gimple_assign_rhs1 (stmt
);
7554 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7557 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7558 op0
= (*valueize
) (op0
);
7559 if (TREE_CODE (op0
) == INTEGER_CST
)
7560 std::swap (op0
, op1
);
7561 if (TREE_CODE (op1
) == INTEGER_CST
7562 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7563 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7569 case GIMPLE_TERNARY_RHS
:
7571 /* Handle ternary operators that can appear in GIMPLE form. */
7572 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7573 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7574 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7575 return fold_ternary_loc (loc
, subcode
,
7576 TREE_TYPE (gimple_assign_lhs (stmt
)),
7588 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7590 if (gimple_call_internal_p (stmt
))
7592 enum tree_code subcode
= ERROR_MARK
;
7593 switch (gimple_call_internal_fn (stmt
))
7595 case IFN_UBSAN_CHECK_ADD
:
7596 subcode
= PLUS_EXPR
;
7598 case IFN_UBSAN_CHECK_SUB
:
7599 subcode
= MINUS_EXPR
;
7601 case IFN_UBSAN_CHECK_MUL
:
7602 subcode
= MULT_EXPR
;
7604 case IFN_BUILTIN_EXPECT
:
7606 tree arg0
= gimple_call_arg (stmt
, 0);
7607 tree op0
= (*valueize
) (arg0
);
7608 if (TREE_CODE (op0
) == INTEGER_CST
)
7615 tree arg0
= gimple_call_arg (stmt
, 0);
7616 tree arg1
= gimple_call_arg (stmt
, 1);
7617 tree op0
= (*valueize
) (arg0
);
7618 tree op1
= (*valueize
) (arg1
);
7620 if (TREE_CODE (op0
) != INTEGER_CST
7621 || TREE_CODE (op1
) != INTEGER_CST
)
7626 /* x * 0 = 0 * x = 0 without overflow. */
7627 if (integer_zerop (op0
) || integer_zerop (op1
))
7628 return build_zero_cst (TREE_TYPE (arg0
));
7631 /* y - y = 0 without overflow. */
7632 if (operand_equal_p (op0
, op1
, 0))
7633 return build_zero_cst (TREE_TYPE (arg0
));
7640 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7642 && TREE_CODE (res
) == INTEGER_CST
7643 && !TREE_OVERFLOW (res
))
7648 fn
= (*valueize
) (gimple_call_fn (stmt
));
7649 if (TREE_CODE (fn
) == ADDR_EXPR
7650 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7651 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7652 && gimple_builtin_call_types_compatible_p (stmt
,
7653 TREE_OPERAND (fn
, 0)))
7655 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7658 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7659 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7660 retval
= fold_builtin_call_array (loc
,
7661 gimple_call_return_type (call_stmt
),
7662 fn
, gimple_call_num_args (stmt
), args
);
7665 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7666 STRIP_NOPS (retval
);
7667 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7680 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7681 Returns NULL_TREE if folding to a constant is not possible, otherwise
7682 returns a constant according to is_gimple_min_invariant. */
7685 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7687 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7688 if (res
&& is_gimple_min_invariant (res
))
7694 /* The following set of functions are supposed to fold references using
7695 their constant initializers. */
7697 /* See if we can find constructor defining value of BASE.
7698 When we know the consructor with constant offset (such as
7699 base is array[40] and we do know constructor of array), then
7700 BIT_OFFSET is adjusted accordingly.
7702 As a special case, return error_mark_node when constructor
7703 is not explicitly available, but it is known to be zero
7704 such as 'static const int a;'. */
7706 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7707 tree (*valueize
)(tree
))
7709 poly_int64 bit_offset2
, size
, max_size
;
7712 if (TREE_CODE (base
) == MEM_REF
)
7714 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7715 if (!boff
.to_shwi (bit_offset
))
7719 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7720 base
= valueize (TREE_OPERAND (base
, 0));
7721 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7723 base
= TREE_OPERAND (base
, 0);
7726 && TREE_CODE (base
) == SSA_NAME
)
7727 base
= valueize (base
);
7729 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7730 DECL_INITIAL. If BASE is a nested reference into another
7731 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7732 the inner reference. */
7733 switch (TREE_CODE (base
))
7738 tree init
= ctor_for_folding (base
);
7740 /* Our semantic is exact opposite of ctor_for_folding;
7741 NULL means unknown, while error_mark_node is 0. */
7742 if (init
== error_mark_node
)
7745 return error_mark_node
;
7749 case VIEW_CONVERT_EXPR
:
7750 return get_base_constructor (TREE_OPERAND (base
, 0),
7751 bit_offset
, valueize
);
7755 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7757 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7759 *bit_offset
+= bit_offset2
;
7760 return get_base_constructor (base
, bit_offset
, valueize
);
7766 if (CONSTANT_CLASS_P (base
))
7773 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7774 to the memory at bit OFFSET. When non-null, TYPE is the expected
7775 type of the reference; otherwise the type of the referenced element
7776 is used instead. When SIZE is zero, attempt to fold a reference to
7777 the entire element which OFFSET refers to. Increment *SUBOFF by
7778 the bit offset of the accessed element. */
7781 fold_array_ctor_reference (tree type
, tree ctor
,
7782 unsigned HOST_WIDE_INT offset
,
7783 unsigned HOST_WIDE_INT size
,
7785 unsigned HOST_WIDE_INT
*suboff
)
7787 offset_int low_bound
;
7788 offset_int elt_size
;
7789 offset_int access_index
;
7790 tree domain_type
= NULL_TREE
;
7791 HOST_WIDE_INT inner_offset
;
7793 /* Compute low bound and elt size. */
7794 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7795 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7796 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7798 /* Static constructors for variably sized objects make no sense. */
7799 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7801 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7805 /* Static constructors for variably sized objects make no sense. */
7806 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7808 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7810 /* When TYPE is non-null, verify that it specifies a constant-sized
7811 access of a multiple of the array element size. Avoid division
7812 by zero below when ELT_SIZE is zero, such as with the result of
7813 an initializer for a zero-length array or an empty struct. */
7816 && (!TYPE_SIZE_UNIT (type
)
7817 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7820 /* Compute the array index we look for. */
7821 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7823 access_index
+= low_bound
;
7825 /* And offset within the access. */
7826 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7828 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7829 if (size
> elt_sz
* BITS_PER_UNIT
)
7831 /* native_encode_expr constraints. */
7832 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7833 || size
% BITS_PER_UNIT
!= 0
7834 || inner_offset
% BITS_PER_UNIT
!= 0
7835 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7839 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7841 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7842 return build_zero_cst (type
);
7844 /* native-encode adjacent ctor elements. */
7845 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7846 unsigned bufoff
= 0;
7847 offset_int index
= 0;
7848 offset_int max_index
= access_index
;
7849 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7851 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7852 else if (!CONSTANT_CLASS_P (val
))
7856 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7858 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7859 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7862 index
= max_index
= wi::to_offset (elt
->index
);
7863 index
= wi::umax (index
, access_index
);
7866 if (bufoff
+ elt_sz
> sizeof (buf
))
7867 elt_sz
= sizeof (buf
) - bufoff
;
7868 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7869 inner_offset
/ BITS_PER_UNIT
);
7870 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7876 if (wi::cmpu (access_index
, index
) == 0)
7878 else if (wi::cmpu (access_index
, max_index
) > 0)
7881 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7883 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7888 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7890 max_index
= access_index
;
7893 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7895 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7896 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7899 index
= max_index
= wi::to_offset (elt
->index
);
7900 index
= wi::umax (index
, access_index
);
7901 if (wi::cmpu (access_index
, index
) == 0)
7904 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7908 while (bufoff
< size
/ BITS_PER_UNIT
);
7910 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7913 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7915 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7917 /* For the final reference to the entire accessed element
7918 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7919 may be null) in favor of the type of the element, and set
7920 SIZE to the size of the accessed element. */
7922 type
= TREE_TYPE (val
);
7923 size
= elt_sz
* BITS_PER_UNIT
;
7925 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7926 && TREE_CODE (val
) == CONSTRUCTOR
7927 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7928 /* If this isn't the last element in the CTOR and a CTOR itself
7929 and it does not cover the whole object we are requesting give up
7930 since we're not set up for combining from multiple CTORs. */
7933 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7934 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7938 /* Memory not explicitly mentioned in constructor is 0 (or
7939 the reference is out of range). */
7940 return type
? build_zero_cst (type
) : NULL_TREE
;
7943 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7944 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7945 is the expected type of the reference; otherwise the type of
7946 the referenced member is used instead. When SIZE is zero,
7947 attempt to fold a reference to the entire member which OFFSET
7948 refers to; in this case. Increment *SUBOFF by the bit offset
7949 of the accessed member. */
7952 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7953 unsigned HOST_WIDE_INT offset
,
7954 unsigned HOST_WIDE_INT size
,
7956 unsigned HOST_WIDE_INT
*suboff
)
7958 unsigned HOST_WIDE_INT cnt
;
7961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7964 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7965 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7966 tree field_size
= DECL_SIZE (cfield
);
7970 /* Determine the size of the flexible array member from
7971 the size of the initializer provided for it. */
7972 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7975 /* Variable sized objects in static constructors makes no sense,
7976 but field_size can be NULL for flexible array members. */
7977 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7978 && TREE_CODE (byte_offset
) == INTEGER_CST
7979 && (field_size
!= NULL_TREE
7980 ? TREE_CODE (field_size
) == INTEGER_CST
7981 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7983 /* Compute bit offset of the field. */
7984 offset_int bitoffset
7985 = (wi::to_offset (field_offset
)
7986 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7987 /* Compute bit offset where the field ends. */
7988 offset_int bitoffset_end
;
7989 if (field_size
!= NULL_TREE
)
7990 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7994 /* Compute the bit offset of the end of the desired access.
7995 As a special case, if the size of the desired access is
7996 zero, assume the access is to the entire field (and let
7997 the caller make any necessary adjustments by storing
7998 the actual bounds of the field in FIELDBOUNDS). */
7999 offset_int access_end
= offset_int (offset
);
8003 access_end
= bitoffset_end
;
8005 /* Is there any overlap between the desired access at
8006 [OFFSET, OFFSET+SIZE) and the offset of the field within
8007 the object at [BITOFFSET, BITOFFSET_END)? */
8008 if (wi::cmps (access_end
, bitoffset
) > 0
8009 && (field_size
== NULL_TREE
8010 || wi::lts_p (offset
, bitoffset_end
)))
8012 *suboff
+= bitoffset
.to_uhwi ();
8014 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
8016 /* For the final reference to the entire accessed member
8017 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8018 be null) in favor of the type of the member, and set
8019 SIZE to the size of the accessed member. */
8020 offset
= bitoffset
.to_uhwi ();
8021 type
= TREE_TYPE (cval
);
8022 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
8025 /* We do have overlap. Now see if the field is large enough
8026 to cover the access. Give up for accesses that extend
8027 beyond the end of the object or that span multiple fields. */
8028 if (wi::cmps (access_end
, bitoffset_end
) > 0)
8030 if (offset
< bitoffset
)
8033 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
8034 return fold_ctor_reference (type
, cval
,
8035 inner_offset
.to_uhwi (), size
,
8043 return build_zero_cst (type
);
8046 /* CTOR is value initializing memory. Fold a reference of TYPE and
8047 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8048 is zero, attempt to fold a reference to the entire subobject
8049 which OFFSET refers to. This is used when folding accesses to
8050 string members of aggregates. When non-null, set *SUBOFF to
8051 the bit offset of the accessed subobject. */
8054 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
8055 const poly_uint64
&poly_size
, tree from_decl
,
8056 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
8060 /* We found the field with exact match. */
8062 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8063 && known_eq (poly_offset
, 0U))
8064 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8066 /* The remaining optimizations need a constant size and offset. */
8067 unsigned HOST_WIDE_INT size
, offset
;
8068 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8071 /* We are at the end of walk, see if we can view convert the
8073 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8074 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8075 && !compare_tree_int (TYPE_SIZE (type
), size
)
8076 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8078 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8081 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8083 STRIP_USELESS_TYPE_CONVERSION (ret
);
8087 /* For constants and byte-aligned/sized reads try to go through
8088 native_encode/interpret. */
8089 if (CONSTANT_CLASS_P (ctor
)
8090 && BITS_PER_UNIT
== 8
8091 && offset
% BITS_PER_UNIT
== 0
8092 && offset
/ BITS_PER_UNIT
<= INT_MAX
8093 && size
% BITS_PER_UNIT
== 0
8094 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8095 && can_native_interpret_type_p (type
))
8097 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8098 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8099 offset
/ BITS_PER_UNIT
);
8101 return native_interpret_expr (type
, buf
, len
);
8103 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8105 unsigned HOST_WIDE_INT dummy
= 0;
8110 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8111 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8112 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8115 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8118 /* Fall back to native_encode_initializer. Needs to be done
8119 only in the outermost fold_ctor_reference call (because it itself
8120 recurses into CONSTRUCTORs) and doesn't update suboff. */
8121 if (ret
== NULL_TREE
8123 && BITS_PER_UNIT
== 8
8124 && offset
% BITS_PER_UNIT
== 0
8125 && offset
/ BITS_PER_UNIT
<= INT_MAX
8126 && size
% BITS_PER_UNIT
== 0
8127 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8128 && can_native_interpret_type_p (type
))
8130 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8131 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8132 offset
/ BITS_PER_UNIT
);
8134 return native_interpret_expr (type
, buf
, len
);
8143 /* Return the tree representing the element referenced by T if T is an
8144 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8145 names using VALUEIZE. Return NULL_TREE otherwise. */
8148 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8150 tree ctor
, idx
, base
;
8151 poly_int64 offset
, size
, max_size
;
8155 if (TREE_THIS_VOLATILE (t
))
8159 return get_symbol_constant_value (t
);
8161 tem
= fold_read_from_constant_string (t
);
8165 switch (TREE_CODE (t
))
8168 case ARRAY_RANGE_REF
:
8169 /* Constant indexes are handled well by get_base_constructor.
8170 Only special case variable offsets.
8171 FIXME: This code can't handle nested references with variable indexes
8172 (they will be handled only by iteration of ccp). Perhaps we can bring
8173 get_ref_base_and_extent here and make it use a valueize callback. */
8174 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8176 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8177 && poly_int_tree_p (idx
))
8179 tree low_bound
, unit_size
;
8181 /* If the resulting bit-offset is constant, track it. */
8182 if ((low_bound
= array_ref_low_bound (t
),
8183 poly_int_tree_p (low_bound
))
8184 && (unit_size
= array_ref_element_size (t
),
8185 tree_fits_uhwi_p (unit_size
)))
8187 poly_offset_int woffset
8188 = wi::sext (wi::to_poly_offset (idx
)
8189 - wi::to_poly_offset (low_bound
),
8190 TYPE_PRECISION (sizetype
));
8191 woffset
*= tree_to_uhwi (unit_size
);
8192 woffset
*= BITS_PER_UNIT
;
8193 if (woffset
.to_shwi (&offset
))
8195 base
= TREE_OPERAND (t
, 0);
8196 ctor
= get_base_constructor (base
, &offset
, valueize
);
8197 /* Empty constructor. Always fold to 0. */
8198 if (ctor
== error_mark_node
)
8199 return build_zero_cst (TREE_TYPE (t
));
8200 /* Out of bound array access. Value is undefined,
8202 if (maybe_lt (offset
, 0))
8204 /* We cannot determine ctor. */
8207 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8208 tree_to_uhwi (unit_size
)
8218 case TARGET_MEM_REF
:
8220 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8221 ctor
= get_base_constructor (base
, &offset
, valueize
);
8223 /* Empty constructor. Always fold to 0. */
8224 if (ctor
== error_mark_node
)
8225 return build_zero_cst (TREE_TYPE (t
));
8226 /* We do not know precise address. */
8227 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8229 /* We cannot determine ctor. */
8233 /* Out of bound array access. Value is undefined, but don't fold. */
8234 if (maybe_lt (offset
, 0))
8237 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8241 /* For bit field reads try to read the representative and
8243 if (TREE_CODE (t
) == COMPONENT_REF
8244 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8245 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8247 HOST_WIDE_INT csize
, coffset
;
8248 tree field
= TREE_OPERAND (t
, 1);
8249 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8250 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8251 && size
.is_constant (&csize
)
8252 && offset
.is_constant (&coffset
)
8253 && (coffset
% BITS_PER_UNIT
!= 0
8254 || csize
% BITS_PER_UNIT
!= 0)
8256 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8258 poly_int64 bitoffset
;
8259 poly_uint64 field_offset
, repr_offset
;
8260 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8261 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8262 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8265 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8266 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8267 HOST_WIDE_INT bitoff
;
8268 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8269 - TYPE_PRECISION (TREE_TYPE (field
)));
8270 if (bitoffset
.is_constant (&bitoff
)
8275 size
= tree_to_uhwi (DECL_SIZE (repr
));
8277 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8279 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8281 if (!BYTES_BIG_ENDIAN
)
8282 tem
= wide_int_to_tree (TREE_TYPE (field
),
8283 wi::lrshift (wi::to_wide (tem
),
8286 tem
= wide_int_to_tree (TREE_TYPE (field
),
8287 wi::lrshift (wi::to_wide (tem
),
8299 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8300 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8301 return fold_build1_loc (EXPR_LOCATION (t
),
8302 TREE_CODE (t
), TREE_TYPE (t
), c
);
8314 fold_const_aggregate_ref (tree t
)
8316 return fold_const_aggregate_ref_1 (t
, NULL
);
8319 /* Lookup virtual method with index TOKEN in a virtual table V
8321 Set CAN_REFER if non-NULL to false if method
8322 is not referable or if the virtual table is ill-formed (such as rewriten
8323 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8326 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8328 unsigned HOST_WIDE_INT offset
,
8331 tree vtable
= v
, init
, fn
;
8332 unsigned HOST_WIDE_INT size
;
8333 unsigned HOST_WIDE_INT elt_size
, access_index
;
8339 /* First of all double check we have virtual table. */
8340 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8342 /* Pass down that we lost track of the target. */
8348 init
= ctor_for_folding (v
);
8350 /* The virtual tables should always be born with constructors
8351 and we always should assume that they are avaialble for
8352 folding. At the moment we do not stream them in all cases,
8353 but it should never happen that ctor seem unreachable. */
8355 if (init
== error_mark_node
)
8357 /* Pass down that we lost track of the target. */
8362 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8363 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8364 offset
*= BITS_PER_UNIT
;
8365 offset
+= token
* size
;
8367 /* Lookup the value in the constructor that is assumed to be array.
8368 This is equivalent to
8369 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8370 offset, size, NULL);
8371 but in a constant time. We expect that frontend produced a simple
8372 array without indexed initializers. */
8374 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8375 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8376 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8377 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8379 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8380 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8382 /* The C++ FE can now produce indexed fields, and we check if the indexes
8384 if (access_index
< CONSTRUCTOR_NELTS (init
))
8386 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8387 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8388 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8394 /* For type inconsistent program we may end up looking up virtual method
8395 in virtual table that does not contain TOKEN entries. We may overrun
8396 the virtual table and pick up a constant or RTTI info pointer.
8397 In any case the call is undefined. */
8399 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8400 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8401 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8404 fn
= TREE_OPERAND (fn
, 0);
8406 /* When cgraph node is missing and function is not public, we cannot
8407 devirtualize. This can happen in WHOPR when the actual method
8408 ends up in other partition, because we found devirtualization
8409 possibility too late. */
8410 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8421 /* Make sure we create a cgraph node for functions we'll reference.
8422 They can be non-existent if the reference comes from an entry
8423 of an external vtable for example. */
8424 cgraph_node::get_create (fn
);
8429 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8430 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8431 KNOWN_BINFO carries the binfo describing the true type of
8432 OBJ_TYPE_REF_OBJECT(REF).
8433 Set CAN_REFER if non-NULL to false if method
8434 is not referable or if the virtual table is ill-formed (such as rewriten
8435 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8438 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8441 unsigned HOST_WIDE_INT offset
;
8444 v
= BINFO_VTABLE (known_binfo
);
8445 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8449 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8455 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8458 /* Given a pointer value T, return a simplified version of an
8459 indirection through T, or NULL_TREE if no simplification is
8460 possible. Note that the resulting type may be different from
8461 the type pointed to in the sense that it is still compatible
8462 from the langhooks point of view. */
8465 gimple_fold_indirect_ref (tree t
)
8467 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8472 subtype
= TREE_TYPE (sub
);
8473 if (!POINTER_TYPE_P (subtype
)
8474 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8477 if (TREE_CODE (sub
) == ADDR_EXPR
)
8479 tree op
= TREE_OPERAND (sub
, 0);
8480 tree optype
= TREE_TYPE (op
);
8482 if (useless_type_conversion_p (type
, optype
))
8485 /* *(foo *)&fooarray => fooarray[0] */
8486 if (TREE_CODE (optype
) == ARRAY_TYPE
8487 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8488 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8490 tree type_domain
= TYPE_DOMAIN (optype
);
8491 tree min_val
= size_zero_node
;
8492 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8493 min_val
= TYPE_MIN_VALUE (type_domain
);
8494 if (TREE_CODE (min_val
) == INTEGER_CST
)
8495 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8497 /* *(foo *)&complexfoo => __real__ complexfoo */
8498 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8499 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8500 return fold_build1 (REALPART_EXPR
, type
, op
);
8501 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8502 else if (TREE_CODE (optype
) == VECTOR_TYPE
8503 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8505 tree part_width
= TYPE_SIZE (type
);
8506 tree index
= bitsize_int (0);
8507 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8511 /* *(p + CST) -> ... */
8512 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8513 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8515 tree addr
= TREE_OPERAND (sub
, 0);
8516 tree off
= TREE_OPERAND (sub
, 1);
8520 addrtype
= TREE_TYPE (addr
);
8522 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8523 if (TREE_CODE (addr
) == ADDR_EXPR
8524 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8525 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8526 && tree_fits_uhwi_p (off
))
8528 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8529 tree part_width
= TYPE_SIZE (type
);
8530 unsigned HOST_WIDE_INT part_widthi
8531 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8532 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8533 tree index
= bitsize_int (indexi
);
8534 if (known_lt (offset
/ part_widthi
,
8535 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8536 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8540 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8541 if (TREE_CODE (addr
) == ADDR_EXPR
8542 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8543 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8545 tree size
= TYPE_SIZE_UNIT (type
);
8546 if (tree_int_cst_equal (size
, off
))
8547 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8550 /* *(p + CST) -> MEM_REF <p, CST>. */
8551 if (TREE_CODE (addr
) != ADDR_EXPR
8552 || DECL_P (TREE_OPERAND (addr
, 0)))
8553 return fold_build2 (MEM_REF
, type
,
8555 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8558 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8559 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8560 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8561 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8564 tree min_val
= size_zero_node
;
8566 sub
= gimple_fold_indirect_ref (sub
);
8568 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8569 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8570 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8571 min_val
= TYPE_MIN_VALUE (type_domain
);
8572 if (TREE_CODE (min_val
) == INTEGER_CST
)
8573 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8579 /* Return true if CODE is an operation that when operating on signed
8580 integer types involves undefined behavior on overflow and the
8581 operation can be expressed with unsigned arithmetic. */
8584 arith_code_with_undefined_signed_overflow (tree_code code
)
8593 case POINTER_PLUS_EXPR
:
8600 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8601 operation that can be transformed to unsigned arithmetic by converting
8602 its operand, carrying out the operation in the corresponding unsigned
8603 type and converting the result back to the original type.
8605 If IN_PLACE is true, adjust the stmt in place and return NULL.
8606 Otherwise returns a sequence of statements that replace STMT and also
8607 contain a modified form of STMT itself. */
8610 rewrite_to_defined_overflow (gimple
*stmt
, bool in_place
/* = false */)
8612 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8614 fprintf (dump_file
, "rewriting stmt with undefined signed "
8616 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8619 tree lhs
= gimple_assign_lhs (stmt
);
8620 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8621 gimple_seq stmts
= NULL
;
8622 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8623 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8625 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8627 tree op
= gimple_op (stmt
, i
);
8628 op
= gimple_convert (&stmts
, type
, op
);
8629 gimple_set_op (stmt
, i
, op
);
8631 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8632 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8633 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8634 gimple_set_modified (stmt
, true);
8637 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
8639 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
8643 gimple_seq_add_stmt (&stmts
, stmt
);
8644 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8647 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
8648 gsi_insert_after (&gsi
, cvt
, GSI_SAME_STMT
);
8652 gimple_seq_add_stmt (&stmts
, cvt
);
8658 /* The valueization hook we use for the gimple_build API simplification.
8659 This makes us match fold_buildN behavior by only combining with
8660 statements in the sequence(s) we are currently building. */
8663 gimple_build_valueize (tree op
)
8665 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8670 /* Build the expression CODE OP0 of type TYPE with location LOC,
8671 simplifying it first if possible. Returns the built
8672 expression value and appends statements possibly defining it
8676 gimple_build (gimple_seq
*seq
, location_t loc
,
8677 enum tree_code code
, tree type
, tree op0
)
8679 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8682 res
= create_tmp_reg_or_ssa_name (type
);
8684 if (code
== REALPART_EXPR
8685 || code
== IMAGPART_EXPR
8686 || code
== VIEW_CONVERT_EXPR
)
8687 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8689 stmt
= gimple_build_assign (res
, code
, op0
);
8690 gimple_set_location (stmt
, loc
);
8691 gimple_seq_add_stmt_without_update (seq
, stmt
);
8696 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8697 simplifying it first if possible. Returns the built
8698 expression value and appends statements possibly defining it
8702 gimple_build (gimple_seq
*seq
, location_t loc
,
8703 enum tree_code code
, tree type
, tree op0
, tree op1
)
8705 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8708 res
= create_tmp_reg_or_ssa_name (type
);
8709 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8710 gimple_set_location (stmt
, loc
);
8711 gimple_seq_add_stmt_without_update (seq
, stmt
);
8716 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8717 simplifying it first if possible. Returns the built
8718 expression value and appends statements possibly defining it
8722 gimple_build (gimple_seq
*seq
, location_t loc
,
8723 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8725 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8726 seq
, gimple_build_valueize
);
8729 res
= create_tmp_reg_or_ssa_name (type
);
8731 if (code
== BIT_FIELD_REF
)
8732 stmt
= gimple_build_assign (res
, code
,
8733 build3 (code
, type
, op0
, op1
, op2
));
8735 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8736 gimple_set_location (stmt
, loc
);
8737 gimple_seq_add_stmt_without_update (seq
, stmt
);
8742 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8743 void) with a location LOC. Returns the built expression value (or NULL_TREE
8744 if TYPE is void) and appends statements possibly defining it to SEQ. */
8747 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8749 tree res
= NULL_TREE
;
8751 if (internal_fn_p (fn
))
8752 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8755 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8756 stmt
= gimple_build_call (decl
, 0);
8758 if (!VOID_TYPE_P (type
))
8760 res
= create_tmp_reg_or_ssa_name (type
);
8761 gimple_call_set_lhs (stmt
, res
);
8763 gimple_set_location (stmt
, loc
);
8764 gimple_seq_add_stmt_without_update (seq
, stmt
);
8768 /* Build the call FN (ARG0) with a result of type TYPE
8769 (or no result if TYPE is void) with location LOC,
8770 simplifying it first if possible. Returns the built
8771 expression value (or NULL_TREE if TYPE is void) and appends
8772 statements possibly defining it to SEQ. */
8775 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8776 tree type
, tree arg0
)
8778 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8782 if (internal_fn_p (fn
))
8783 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8786 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8787 stmt
= gimple_build_call (decl
, 1, arg0
);
8789 if (!VOID_TYPE_P (type
))
8791 res
= create_tmp_reg_or_ssa_name (type
);
8792 gimple_call_set_lhs (stmt
, res
);
8794 gimple_set_location (stmt
, loc
);
8795 gimple_seq_add_stmt_without_update (seq
, stmt
);
8800 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8801 (or no result if TYPE is void) with location LOC,
8802 simplifying it first if possible. Returns the built
8803 expression value (or NULL_TREE if TYPE is void) and appends
8804 statements possibly defining it to SEQ. */
8807 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8808 tree type
, tree arg0
, tree arg1
)
8810 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8814 if (internal_fn_p (fn
))
8815 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8818 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8819 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8821 if (!VOID_TYPE_P (type
))
8823 res
= create_tmp_reg_or_ssa_name (type
);
8824 gimple_call_set_lhs (stmt
, res
);
8826 gimple_set_location (stmt
, loc
);
8827 gimple_seq_add_stmt_without_update (seq
, stmt
);
8832 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8833 (or no result if TYPE is void) with location LOC,
8834 simplifying it first if possible. Returns the built
8835 expression value (or NULL_TREE if TYPE is void) and appends
8836 statements possibly defining it to SEQ. */
8839 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8840 tree type
, tree arg0
, tree arg1
, tree arg2
)
8842 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8843 seq
, gimple_build_valueize
);
8847 if (internal_fn_p (fn
))
8848 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8849 3, arg0
, arg1
, arg2
);
8852 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8853 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8855 if (!VOID_TYPE_P (type
))
8857 res
= create_tmp_reg_or_ssa_name (type
);
8858 gimple_call_set_lhs (stmt
, res
);
8860 gimple_set_location (stmt
, loc
);
8861 gimple_seq_add_stmt_without_update (seq
, stmt
);
8866 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8867 void) with location LOC, simplifying it first if possible. Returns the
8868 built expression value (or NULL_TREE if TYPE is void) and appends
8869 statements possibly defining it to SEQ. */
8872 gimple_build (gimple_seq
*seq
, location_t loc
, code_helper code
,
8873 tree type
, tree op0
)
8875 if (code
.is_tree_code ())
8876 return gimple_build (seq
, loc
, tree_code (code
), type
, op0
);
8877 return gimple_build (seq
, loc
, combined_fn (code
), type
, op0
);
8880 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8881 void) with location LOC, simplifying it first if possible. Returns the
8882 built expression value (or NULL_TREE if TYPE is void) and appends
8883 statements possibly defining it to SEQ. */
8886 gimple_build (gimple_seq
*seq
, location_t loc
, code_helper code
,
8887 tree type
, tree op0
, tree op1
)
8889 if (code
.is_tree_code ())
8890 return gimple_build (seq
, loc
, tree_code (code
), type
, op0
, op1
);
8891 return gimple_build (seq
, loc
, combined_fn (code
), type
, op0
, op1
);
8894 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8895 is void) with location LOC, simplifying it first if possible. Returns the
8896 built expression value (or NULL_TREE if TYPE is void) and appends statements
8897 possibly defining it to SEQ. */
8900 gimple_build (gimple_seq
*seq
, location_t loc
, code_helper code
,
8901 tree type
, tree op0
, tree op1
, tree op2
)
8903 if (code
.is_tree_code ())
8904 return gimple_build (seq
, loc
, tree_code (code
), type
, op0
, op1
, op2
);
8905 return gimple_build (seq
, loc
, combined_fn (code
), type
, op0
, op1
, op2
);
8908 /* Build the conversion (TYPE) OP with a result of type TYPE
8909 with location LOC if such conversion is neccesary in GIMPLE,
8910 simplifying it first.
8911 Returns the built expression value and appends
8912 statements possibly defining it to SEQ. */
8915 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8917 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8919 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8922 /* Build the conversion (ptrofftype) OP with a result of a type
8923 compatible with ptrofftype with location LOC if such conversion
8924 is neccesary in GIMPLE, simplifying it first.
8925 Returns the built expression value and appends
8926 statements possibly defining it to SEQ. */
8929 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8931 if (ptrofftype_p (TREE_TYPE (op
)))
8933 return gimple_convert (seq
, loc
, sizetype
, op
);
8936 /* Build a vector of type TYPE in which each element has the value OP.
8937 Return a gimple value for the result, appending any new statements
8941 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8944 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8945 && !CONSTANT_CLASS_P (op
))
8946 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8948 tree res
, vec
= build_vector_from_val (type
, op
);
8949 if (is_gimple_val (vec
))
8951 if (gimple_in_ssa_p (cfun
))
8952 res
= make_ssa_name (type
);
8954 res
= create_tmp_reg (type
);
8955 gimple
*stmt
= gimple_build_assign (res
, vec
);
8956 gimple_set_location (stmt
, loc
);
8957 gimple_seq_add_stmt_without_update (seq
, stmt
);
8961 /* Build a vector from BUILDER, handling the case in which some elements
8962 are non-constant. Return a gimple value for the result, appending any
8963 new instructions to SEQ.
8965 BUILDER must not have a stepped encoding on entry. This is because
8966 the function is not geared up to handle the arithmetic that would
8967 be needed in the variable case, and any code building a vector that
8968 is known to be constant should use BUILDER->build () directly. */
8971 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8972 tree_vector_builder
*builder
)
8974 gcc_assert (builder
->nelts_per_pattern () <= 2);
8975 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8976 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8977 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8979 tree type
= builder
->type ();
8980 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8981 vec
<constructor_elt
, va_gc
> *v
;
8982 vec_alloc (v
, nelts
);
8983 for (i
= 0; i
< nelts
; ++i
)
8984 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8987 if (gimple_in_ssa_p (cfun
))
8988 res
= make_ssa_name (type
);
8990 res
= create_tmp_reg (type
);
8991 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8992 gimple_set_location (stmt
, loc
);
8993 gimple_seq_add_stmt_without_update (seq
, stmt
);
8996 return builder
->build ();
8999 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9000 and generate a value guaranteed to be rounded upwards to ALIGN.
9002 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9005 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
9006 tree old_size
, unsigned HOST_WIDE_INT align
)
9008 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
9009 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9010 gcc_assert (INTEGRAL_TYPE_P (type
));
9011 tree tree_mask
= build_int_cst (type
, tg_mask
);
9012 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
9015 tree mask
= build_int_cst (type
, -align
);
9016 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
9019 /* Return true if the result of assignment STMT is known to be non-negative.
9020 If the return value is based on the assumption that signed overflow is
9021 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9022 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9025 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9028 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9029 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
9030 switch (get_gimple_rhs_class (code
))
9032 case GIMPLE_UNARY_RHS
:
9033 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
9035 gimple_assign_rhs1 (stmt
),
9036 strict_overflow_p
, depth
);
9037 case GIMPLE_BINARY_RHS
:
9038 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
9040 gimple_assign_rhs1 (stmt
),
9041 gimple_assign_rhs2 (stmt
),
9042 strict_overflow_p
, depth
);
9043 case GIMPLE_TERNARY_RHS
:
9045 case GIMPLE_SINGLE_RHS
:
9046 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
9047 strict_overflow_p
, depth
);
9048 case GIMPLE_INVALID_RHS
:
9054 /* Return true if return value of call STMT is known to be non-negative.
9055 If the return value is based on the assumption that signed overflow is
9056 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9057 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9060 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9063 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
9064 gimple_call_arg (stmt
, 0) : NULL_TREE
;
9065 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
9066 gimple_call_arg (stmt
, 1) : NULL_TREE
;
9067 tree lhs
= gimple_call_lhs (stmt
);
9069 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
9070 gimple_call_combined_fn (stmt
),
9072 strict_overflow_p
, depth
));
9075 /* Return true if return value of call STMT is known to be non-negative.
9076 If the return value is based on the assumption that signed overflow is
9077 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9078 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9081 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9084 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9086 tree arg
= gimple_phi_arg_def (stmt
, i
);
9087 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
9093 /* Return true if STMT is known to compute a non-negative value.
9094 If the return value is based on the assumption that signed overflow is
9095 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9096 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9099 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9102 switch (gimple_code (stmt
))
9105 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9108 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9111 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9118 /* Return true if the floating-point value computed by assignment STMT
9119 is known to have an integer value. We also allow +Inf, -Inf and NaN
9120 to be considered integer values. Return false for signaling NaN.
9122 DEPTH is the current nesting depth of the query. */
9125 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9127 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9128 switch (get_gimple_rhs_class (code
))
9130 case GIMPLE_UNARY_RHS
:
9131 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9132 gimple_assign_rhs1 (stmt
), depth
);
9133 case GIMPLE_BINARY_RHS
:
9134 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9135 gimple_assign_rhs1 (stmt
),
9136 gimple_assign_rhs2 (stmt
), depth
);
9137 case GIMPLE_TERNARY_RHS
:
9139 case GIMPLE_SINGLE_RHS
:
9140 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9141 case GIMPLE_INVALID_RHS
:
9147 /* Return true if the floating-point value computed by call STMT is known
9148 to have an integer value. We also allow +Inf, -Inf and NaN to be
9149 considered integer values. Return false for signaling NaN.
9151 DEPTH is the current nesting depth of the query. */
9154 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9156 tree arg0
= (gimple_call_num_args (stmt
) > 0
9157 ? gimple_call_arg (stmt
, 0)
9159 tree arg1
= (gimple_call_num_args (stmt
) > 1
9160 ? gimple_call_arg (stmt
, 1)
9162 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9166 /* Return true if the floating-point result of phi STMT is known to have
9167 an integer value. We also allow +Inf, -Inf and NaN to be considered
9168 integer values. Return false for signaling NaN.
9170 DEPTH is the current nesting depth of the query. */
9173 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9175 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9177 tree arg
= gimple_phi_arg_def (stmt
, i
);
9178 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9184 /* Return true if the floating-point value computed by STMT is known
9185 to have an integer value. We also allow +Inf, -Inf and NaN to be
9186 considered integer values. Return false for signaling NaN.
9188 DEPTH is the current nesting depth of the query. */
9191 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9193 switch (gimple_code (stmt
))
9196 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9198 return gimple_call_integer_valued_real_p (stmt
, depth
);
9200 return gimple_phi_integer_valued_real_p (stmt
, depth
);