1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
72 #include "internal-fn.h"
74 enum strlen_range_kind
{
75 /* Compute the exact constant string length. */
77 /* Compute the maximum constant string length. */
79 /* Compute a range of string lengths bounded by object sizes. When
80 the length of a string cannot be determined, consider as the upper
81 bound the size of the enclosing object the string may be a member
82 or element of. Also determine the size of the largest character
83 array the string may refer to. */
85 /* Determine the integer value of the argument (not string length). */
90 get_range_strlen (tree
, bitmap
, strlen_range_kind
, c_strlen_data
*, unsigned);
92 /* Return true when DECL can be referenced from current unit.
93 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 We can get declarations that are not possible to reference for various
97 1) When analyzing C++ virtual tables.
98 C++ virtual tables do have known constructors even
99 when they are keyed to other compilation unit.
100 Those tables can contain pointers to methods and vars
101 in other units. Those methods have both STATIC and EXTERNAL
103 2) In WHOPR mode devirtualization might lead to reference
104 to method that was partitioned elsehwere.
105 In this case we have static VAR_DECL or FUNCTION_DECL
106 that has no corresponding callgraph/varpool node
108 3) COMDAT functions referred by external vtables that
109 we devirtualize only during final compilation stage.
110 At this time we already decided that we will not output
111 the function body and thus we can't reference the symbol
115 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
118 struct cgraph_node
*node
;
121 if (DECL_ABSTRACT_P (decl
))
124 /* We are concerned only about static/external vars and functions. */
125 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
126 || !VAR_OR_FUNCTION_DECL_P (decl
))
129 /* Static objects can be referred only if they are defined and not optimized
131 if (!TREE_PUBLIC (decl
))
133 if (DECL_EXTERNAL (decl
))
135 /* Before we start optimizing unreachable code we can be sure all
136 static objects are defined. */
137 if (symtab
->function_flags_ready
)
139 snode
= symtab_node::get (decl
);
140 if (!snode
|| !snode
->definition
)
142 node
= dyn_cast
<cgraph_node
*> (snode
);
143 return !node
|| !node
->inlined_to
;
146 /* We will later output the initializer, so we can refer to it.
147 So we are concerned only when DECL comes from initializer of
148 external var or var that has been optimized out. */
150 || !VAR_P (from_decl
)
151 || (!DECL_EXTERNAL (from_decl
)
152 && (vnode
= varpool_node::get (from_decl
)) != NULL
153 && vnode
->definition
)
155 && (vnode
= varpool_node::get (from_decl
)) != NULL
156 && vnode
->in_other_partition
))
158 /* We are folding reference from external vtable. The vtable may reffer
159 to a symbol keyed to other compilation unit. The other compilation
160 unit may be in separate DSO and the symbol may be hidden. */
161 if (DECL_VISIBILITY_SPECIFIED (decl
)
162 && DECL_EXTERNAL (decl
)
163 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
164 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
166 /* When function is public, we always can introduce new reference.
167 Exception are the COMDAT functions where introducing a direct
168 reference imply need to include function body in the curren tunit. */
169 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
171 /* We have COMDAT. We are going to check if we still have definition
172 or if the definition is going to be output in other partition.
173 Bypass this when gimplifying; all needed functions will be produced.
175 As observed in PR20991 for already optimized out comdat virtual functions
176 it may be tempting to not necessarily give up because the copy will be
177 output elsewhere when corresponding vtable is output.
178 This is however not possible - ABI specify that COMDATs are output in
179 units where they are used and when the other unit was compiled with LTO
180 it is possible that vtable was kept public while the function itself
182 if (!symtab
->function_flags_ready
)
185 snode
= symtab_node::get (decl
);
187 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
188 && (!snode
->in_other_partition
189 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
191 node
= dyn_cast
<cgraph_node
*> (snode
);
192 return !node
|| !node
->inlined_to
;
195 /* Create a temporary for TYPE for a statement STMT. If the current function
196 is in SSA form, a SSA name is created. Otherwise a temporary register
200 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
202 if (gimple_in_ssa_p (cfun
))
203 return make_ssa_name (type
, stmt
);
205 return create_tmp_reg (type
);
208 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
209 acceptable form for is_gimple_min_invariant.
210 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
213 canonicalize_constructor_val (tree cval
, tree from_decl
)
215 if (CONSTANT_CLASS_P (cval
))
218 tree orig_cval
= cval
;
220 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
221 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
223 tree ptr
= TREE_OPERAND (cval
, 0);
224 if (is_gimple_min_invariant (ptr
))
225 cval
= build1_loc (EXPR_LOCATION (cval
),
226 ADDR_EXPR
, TREE_TYPE (ptr
),
227 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
229 fold_convert (ptr_type_node
,
230 TREE_OPERAND (cval
, 1))));
232 if (TREE_CODE (cval
) == ADDR_EXPR
)
234 tree base
= NULL_TREE
;
235 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
237 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
239 TREE_OPERAND (cval
, 0) = base
;
242 base
= get_base_address (TREE_OPERAND (cval
, 0));
246 if (VAR_OR_FUNCTION_DECL_P (base
)
247 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
249 if (TREE_TYPE (base
) == error_mark_node
)
252 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
253 but since the use can be in a debug stmt we can't. */
255 else if (TREE_CODE (base
) == FUNCTION_DECL
)
257 /* Make sure we create a cgraph node for functions we'll reference.
258 They can be non-existent if the reference comes from an entry
259 of an external vtable for example. */
260 cgraph_node::get_create (base
);
262 /* Fixup types in global initializers. */
263 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
264 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
266 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
267 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
270 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
271 if (TREE_CODE (cval
) == INTEGER_CST
)
273 if (TREE_OVERFLOW_P (cval
))
274 cval
= drop_tree_overflow (cval
);
275 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
276 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
282 /* If SYM is a constant variable with known value, return the value.
283 NULL_TREE is returned otherwise. */
286 get_symbol_constant_value (tree sym
)
288 tree val
= ctor_for_folding (sym
);
289 if (val
!= error_mark_node
)
293 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
294 if (val
&& is_gimple_min_invariant (val
))
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
303 && is_gimple_reg_type (TREE_TYPE (sym
)))
304 return build_zero_cst (TREE_TYPE (sym
));
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
316 maybe_fold_reference (tree expr
)
318 tree result
= NULL_TREE
;
320 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr
) == REALPART_EXPR
322 || TREE_CODE (expr
) == IMAGPART_EXPR
)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
324 result
= fold_unary_loc (EXPR_LOCATION (expr
),
327 TREE_OPERAND (expr
, 0));
328 else if (TREE_CODE (expr
) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
330 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
333 TREE_OPERAND (expr
, 0),
334 TREE_OPERAND (expr
, 1),
335 TREE_OPERAND (expr
, 2));
337 result
= fold_const_aggregate_ref (expr
);
339 if (result
&& is_gimple_min_invariant (result
))
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.c
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
358 valid_gimple_rhs_p (tree expr
)
360 enum tree_code code
= TREE_CODE (expr
);
362 switch (TREE_CODE_CLASS (code
))
364 case tcc_declaration
:
365 if (!is_gimple_variable (expr
))
370 /* All constants are ok. */
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
378 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
385 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
386 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
391 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
401 if (is_gimple_min_invariant (expr
))
403 t
= TREE_OPERAND (expr
, 0);
404 while (handled_component_p (t
))
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t
) == ARRAY_REF
408 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
409 && !is_gimple_val (TREE_OPERAND (t
, 1)))
411 t
= TREE_OPERAND (t
, 0);
413 if (!is_gimple_id (t
))
419 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
421 if ((code
== COND_EXPR
422 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
423 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
424 || !is_gimple_val (TREE_OPERAND (expr
, 1))
425 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
436 case tcc_exceptional
:
437 if (code
== CONSTRUCTOR
)
441 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
442 if (!is_gimple_val (elt
))
446 if (code
!= SSA_NAME
)
451 if (code
== BIT_FIELD_REF
)
452 return is_gimple_val (TREE_OPERAND (expr
, 0));
463 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
464 replacement rhs for the statement or NULL_TREE if no simplification
465 could be made. It is assumed that the operands have been previously
469 fold_gimple_assign (gimple_stmt_iterator
*si
)
471 gimple
*stmt
= gsi_stmt (*si
);
472 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
473 location_t loc
= gimple_location (stmt
);
475 tree result
= NULL_TREE
;
477 switch (get_gimple_rhs_class (subcode
))
479 case GIMPLE_SINGLE_RHS
:
481 tree rhs
= gimple_assign_rhs1 (stmt
);
483 if (TREE_CLOBBER_P (rhs
))
486 if (REFERENCE_CLASS_P (rhs
))
487 return maybe_fold_reference (rhs
);
489 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
491 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
492 if (is_gimple_min_invariant (val
))
494 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
497 vec
<cgraph_node
*>targets
498 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
499 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
501 if (dump_enabled_p ())
503 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
504 "resolving virtual function address "
505 "reference to function %s\n",
506 targets
.length () == 1
507 ? targets
[0]->name ()
510 if (targets
.length () == 1)
512 val
= fold_convert (TREE_TYPE (val
),
513 build_fold_addr_expr_loc
514 (loc
, targets
[0]->decl
));
515 STRIP_USELESS_TYPE_CONVERSION (val
);
518 /* We cannot use __builtin_unreachable here because it
519 cannot have address taken. */
520 val
= build_int_cst (TREE_TYPE (val
), 0);
526 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
528 tree ref
= TREE_OPERAND (rhs
, 0);
529 if (TREE_CODE (ref
) == MEM_REF
530 && integer_zerop (TREE_OPERAND (ref
, 1)))
532 result
= TREE_OPERAND (ref
, 0);
533 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
535 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
540 else if (TREE_CODE (rhs
) == CONSTRUCTOR
541 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
543 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
547 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
548 if (! CONSTANT_CLASS_P (val
))
551 return build_vector_from_ctor (TREE_TYPE (rhs
),
552 CONSTRUCTOR_ELTS (rhs
));
555 else if (DECL_P (rhs
)
556 && is_gimple_reg_type (TREE_TYPE (rhs
)))
557 return get_symbol_constant_value (rhs
);
561 case GIMPLE_UNARY_RHS
:
564 case GIMPLE_BINARY_RHS
:
567 case GIMPLE_TERNARY_RHS
:
568 result
= fold_ternary_loc (loc
, subcode
,
569 TREE_TYPE (gimple_assign_lhs (stmt
)),
570 gimple_assign_rhs1 (stmt
),
571 gimple_assign_rhs2 (stmt
),
572 gimple_assign_rhs3 (stmt
));
576 STRIP_USELESS_TYPE_CONVERSION (result
);
577 if (valid_gimple_rhs_p (result
))
582 case GIMPLE_INVALID_RHS
:
590 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
591 adjusting the replacement stmts location and virtual operands.
592 If the statement has a lhs the last stmt in the sequence is expected
593 to assign to that lhs. */
596 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
598 gimple
*stmt
= gsi_stmt (*si_p
);
600 if (gimple_has_location (stmt
))
601 annotate_all_with_location (stmts
, gimple_location (stmt
));
603 /* First iterate over the replacement statements backward, assigning
604 virtual operands to their defining statements. */
605 gimple
*laststore
= NULL
;
606 for (gimple_stmt_iterator i
= gsi_last (stmts
);
607 !gsi_end_p (i
); gsi_prev (&i
))
609 gimple
*new_stmt
= gsi_stmt (i
);
610 if ((gimple_assign_single_p (new_stmt
)
611 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
612 || (is_gimple_call (new_stmt
)
613 && (gimple_call_flags (new_stmt
)
614 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
618 vdef
= gimple_vdef (stmt
);
620 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
621 gimple_set_vdef (new_stmt
, vdef
);
622 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
623 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
624 laststore
= new_stmt
;
628 /* Second iterate over the statements forward, assigning virtual
629 operands to their uses. */
630 tree reaching_vuse
= gimple_vuse (stmt
);
631 for (gimple_stmt_iterator i
= gsi_start (stmts
);
632 !gsi_end_p (i
); gsi_next (&i
))
634 gimple
*new_stmt
= gsi_stmt (i
);
635 /* If the new statement possibly has a VUSE, update it with exact SSA
636 name we know will reach this one. */
637 if (gimple_has_mem_ops (new_stmt
))
638 gimple_set_vuse (new_stmt
, reaching_vuse
);
639 gimple_set_modified (new_stmt
, true);
640 if (gimple_vdef (new_stmt
))
641 reaching_vuse
= gimple_vdef (new_stmt
);
644 /* If the new sequence does not do a store release the virtual
645 definition of the original statement. */
647 && reaching_vuse
== gimple_vuse (stmt
))
649 tree vdef
= gimple_vdef (stmt
);
651 && TREE_CODE (vdef
) == SSA_NAME
)
653 unlink_stmt_vdef (stmt
);
654 release_ssa_name (vdef
);
658 /* Finally replace the original statement with the sequence. */
659 gsi_replace_with_seq (si_p
, stmts
, false);
662 /* Helper function for update_gimple_call and
663 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
664 with GIMPLE_CALL NEW_STMT. */
667 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
670 tree lhs
= gimple_call_lhs (stmt
);
671 gimple_call_set_lhs (new_stmt
, lhs
);
672 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
673 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
674 gimple_move_vops (new_stmt
, stmt
);
675 gimple_set_location (new_stmt
, gimple_location (stmt
));
676 if (gimple_block (new_stmt
) == NULL_TREE
)
677 gimple_set_block (new_stmt
, gimple_block (stmt
));
678 gsi_replace (si_p
, new_stmt
, false);
681 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
682 with number of arguments NARGS, where the arguments in GIMPLE form
683 follow NARGS argument. */
686 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
689 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
691 gcc_assert (is_gimple_call (stmt
));
692 va_start (ap
, nargs
);
693 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
694 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
699 /* Return true if EXPR is a CALL_EXPR suitable for representation
700 as a single GIMPLE_CALL statement. If the arguments require
701 further gimplification, return false. */
704 valid_gimple_call_p (tree expr
)
708 if (TREE_CODE (expr
) != CALL_EXPR
)
711 nargs
= call_expr_nargs (expr
);
712 for (i
= 0; i
< nargs
; i
++)
714 tree arg
= CALL_EXPR_ARG (expr
, i
);
715 if (is_gimple_reg_type (TREE_TYPE (arg
)))
717 if (!is_gimple_val (arg
))
721 if (!is_gimple_lvalue (arg
))
728 /* Convert EXPR into a GIMPLE value suitable for substitution on the
729 RHS of an assignment. Insert the necessary statements before
730 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
731 is replaced. If the call is expected to produces a result, then it
732 is replaced by an assignment of the new RHS to the result variable.
733 If the result is to be ignored, then the call is replaced by a
734 GIMPLE_NOP. A proper VDEF chain is retained by making the first
735 VUSE and the last VDEF of the whole sequence be the same as the replaced
736 statement and using new SSA names for stores in between. */
739 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
742 gimple
*stmt
, *new_stmt
;
743 gimple_stmt_iterator i
;
744 gimple_seq stmts
= NULL
;
746 stmt
= gsi_stmt (*si_p
);
748 gcc_assert (is_gimple_call (stmt
));
750 if (valid_gimple_call_p (expr
))
752 /* The call has simplified to another call. */
753 tree fn
= CALL_EXPR_FN (expr
);
755 unsigned nargs
= call_expr_nargs (expr
);
756 vec
<tree
> args
= vNULL
;
762 args
.safe_grow_cleared (nargs
, true);
764 for (i
= 0; i
< nargs
; i
++)
765 args
[i
] = CALL_EXPR_ARG (expr
, i
);
768 new_stmt
= gimple_build_call_vec (fn
, args
);
769 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
774 lhs
= gimple_call_lhs (stmt
);
775 if (lhs
== NULL_TREE
)
777 push_gimplify_context (gimple_in_ssa_p (cfun
));
778 gimplify_and_add (expr
, &stmts
);
779 pop_gimplify_context (NULL
);
781 /* We can end up with folding a memcpy of an empty class assignment
782 which gets optimized away by C++ gimplification. */
783 if (gimple_seq_empty_p (stmts
))
785 if (gimple_in_ssa_p (cfun
))
787 unlink_stmt_vdef (stmt
);
790 gsi_replace (si_p
, gimple_build_nop (), false);
796 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
797 new_stmt
= gimple_build_assign (lhs
, tmp
);
798 i
= gsi_last (stmts
);
799 gsi_insert_after_without_update (&i
, new_stmt
,
800 GSI_CONTINUE_LINKING
);
803 gsi_replace_with_seq_vops (si_p
, stmts
);
807 /* Replace the call at *GSI with the gimple value VAL. */
810 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
812 gimple
*stmt
= gsi_stmt (*gsi
);
813 tree lhs
= gimple_call_lhs (stmt
);
817 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
818 val
= fold_convert (TREE_TYPE (lhs
), val
);
819 repl
= gimple_build_assign (lhs
, val
);
822 repl
= gimple_build_nop ();
823 tree vdef
= gimple_vdef (stmt
);
824 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
826 unlink_stmt_vdef (stmt
);
827 release_ssa_name (vdef
);
829 gsi_replace (gsi
, repl
, false);
832 /* Replace the call at *GSI with the new call REPL and fold that
836 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
838 gimple
*stmt
= gsi_stmt (*gsi
);
839 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
840 gimple_set_location (repl
, gimple_location (stmt
));
841 gimple_move_vops (repl
, stmt
);
842 gsi_replace (gsi
, repl
, false);
846 /* Return true if VAR is a VAR_DECL or a component thereof. */
849 var_decl_component_p (tree var
)
852 while (handled_component_p (inner
))
853 inner
= TREE_OPERAND (inner
, 0);
854 return (DECL_P (inner
)
855 || (TREE_CODE (inner
) == MEM_REF
856 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
859 /* Return TRUE if the SIZE argument, representing the size of an
860 object, is in a range of values of which exactly zero is valid. */
863 size_must_be_zero_p (tree size
)
865 if (integer_zerop (size
))
868 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
871 tree type
= TREE_TYPE (size
);
872 int prec
= TYPE_PRECISION (type
);
874 /* Compute the value of SSIZE_MAX, the largest positive value that
875 can be stored in ssize_t, the signed counterpart of size_t. */
876 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
877 value_range
valid_range (build_int_cst (type
, 0),
878 wide_int_to_tree (type
, ssize_max
));
881 get_range_query (cfun
)->range_of_expr (vr
, size
);
883 get_global_range_query ()->range_of_expr (vr
, size
);
884 if (vr
.undefined_p ())
885 vr
.set_varying (TREE_TYPE (size
));
886 vr
.intersect (&valid_range
);
890 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
891 diagnose (otherwise undefined) overlapping copies without preventing
892 folding. When folded, GCC guarantees that overlapping memcpy has
893 the same semantics as memmove. Call to the library memcpy need not
894 provide the same guarantee. Return false if no simplification can
898 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
899 tree dest
, tree src
, enum built_in_function code
)
901 gimple
*stmt
= gsi_stmt (*gsi
);
902 tree lhs
= gimple_call_lhs (stmt
);
903 tree len
= gimple_call_arg (stmt
, 2);
904 location_t loc
= gimple_location (stmt
);
906 /* If the LEN parameter is a constant zero or in range where
907 the only valid value is zero, return DEST. */
908 if (size_must_be_zero_p (len
))
911 if (gimple_call_lhs (stmt
))
912 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
914 repl
= gimple_build_nop ();
915 tree vdef
= gimple_vdef (stmt
);
916 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
918 unlink_stmt_vdef (stmt
);
919 release_ssa_name (vdef
);
921 gsi_replace (gsi
, repl
, false);
925 /* If SRC and DEST are the same (and not volatile), return
926 DEST{,+LEN,+LEN-1}. */
927 if (operand_equal_p (src
, dest
, 0))
929 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
930 It's safe and may even be emitted by GCC itself (see bug
932 unlink_stmt_vdef (stmt
);
933 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
934 release_ssa_name (gimple_vdef (stmt
));
937 gsi_replace (gsi
, gimple_build_nop (), false);
944 /* We cannot (easily) change the type of the copy if it is a storage
945 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
946 modify the storage order of objects (see storage_order_barrier_p). */
948 = POINTER_TYPE_P (TREE_TYPE (src
))
949 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
951 = POINTER_TYPE_P (TREE_TYPE (dest
))
952 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
953 tree destvar
, srcvar
, srcoff
;
954 unsigned int src_align
, dest_align
;
955 unsigned HOST_WIDE_INT tmp_len
;
958 /* Build accesses at offset zero with a ref-all character type. */
960 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
963 /* If we can perform the copy efficiently with first doing all loads and
964 then all stores inline it that way. Currently efficiently means that
965 we can load all the memory with a single set operation and that the
966 total size is less than MOVE_MAX * MOVE_RATIO. */
967 src_align
= get_pointer_alignment (src
);
968 dest_align
= get_pointer_alignment (dest
);
969 if (tree_fits_uhwi_p (len
)
972 * MOVE_RATIO (optimize_function_for_size_p (cfun
))))
974 /* FIXME: Don't transform copies from strings with known length.
975 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
976 from being handled, and the case was XFAILed for that reason.
977 Now that it is handled and the XFAIL removed, as soon as other
978 strlenopt tests that rely on it for passing are adjusted, this
979 hack can be removed. */
980 && !c_strlen (src
, 1)
981 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
982 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
984 && AGGREGATE_TYPE_P (srctype
)
985 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
987 && AGGREGATE_TYPE_P (desttype
)
988 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
990 unsigned ilen
= tree_to_uhwi (len
);
991 if (pow2p_hwi (ilen
))
993 /* Detect out-of-bounds accesses without issuing warnings.
994 Avoid folding out-of-bounds copies but to avoid false
995 positives for unreachable code defer warning until after
996 DCE has worked its magic.
997 -Wrestrict is still diagnosed. */
998 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1001 if (warning
!= OPT_Wrestrict
)
1004 scalar_int_mode mode
;
1005 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1006 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1007 && have_insn_for (SET
, mode
)
1008 /* If the destination pointer is not aligned we must be able
1009 to emit an unaligned store. */
1010 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1011 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1012 || (optab_handler (movmisalign_optab
, mode
)
1013 != CODE_FOR_nothing
)))
1015 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1016 tree srctype
= type
;
1017 tree desttype
= type
;
1018 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1019 srctype
= build_aligned_type (type
, src_align
);
1020 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1021 tree tem
= fold_const_aggregate_ref (srcmem
);
1024 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1025 && targetm
.slow_unaligned_access (mode
, src_align
)
1026 && (optab_handler (movmisalign_optab
, mode
)
1027 == CODE_FOR_nothing
))
1032 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1034 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1036 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1038 gimple_assign_set_lhs (new_stmt
, srcmem
);
1039 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1040 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1042 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1043 desttype
= build_aligned_type (type
, dest_align
);
1045 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1048 gimple_move_vops (new_stmt
, stmt
);
1051 gsi_replace (gsi
, new_stmt
, false);
1054 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1061 if (code
== BUILT_IN_MEMMOVE
)
1063 /* Both DEST and SRC must be pointer types.
1064 ??? This is what old code did. Is the testing for pointer types
1067 If either SRC is readonly or length is 1, we can use memcpy. */
1068 if (!dest_align
|| !src_align
)
1070 if (readonly_data_expr (src
)
1071 || (tree_fits_uhwi_p (len
)
1072 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1073 >= tree_to_uhwi (len
))))
1075 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1078 gimple_call_set_fndecl (stmt
, fn
);
1079 gimple_call_set_arg (stmt
, 0, dest
);
1080 gimple_call_set_arg (stmt
, 1, src
);
1085 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1086 if (TREE_CODE (src
) == ADDR_EXPR
1087 && TREE_CODE (dest
) == ADDR_EXPR
)
1089 tree src_base
, dest_base
, fn
;
1090 poly_int64 src_offset
= 0, dest_offset
= 0;
1091 poly_uint64 maxsize
;
1093 srcvar
= TREE_OPERAND (src
, 0);
1094 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1095 if (src_base
== NULL
)
1097 destvar
= TREE_OPERAND (dest
, 0);
1098 dest_base
= get_addr_base_and_unit_offset (destvar
,
1100 if (dest_base
== NULL
)
1101 dest_base
= destvar
;
1102 if (!poly_int_tree_p (len
, &maxsize
))
1104 if (SSA_VAR_P (src_base
)
1105 && SSA_VAR_P (dest_base
))
1107 if (operand_equal_p (src_base
, dest_base
, 0)
1108 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1109 dest_offset
, maxsize
))
1112 else if (TREE_CODE (src_base
) == MEM_REF
1113 && TREE_CODE (dest_base
) == MEM_REF
)
1115 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1116 TREE_OPERAND (dest_base
, 0), 0))
1118 poly_offset_int full_src_offset
1119 = mem_ref_offset (src_base
) + src_offset
;
1120 poly_offset_int full_dest_offset
1121 = mem_ref_offset (dest_base
) + dest_offset
;
1122 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1123 full_dest_offset
, maxsize
))
1129 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1132 gimple_call_set_fndecl (stmt
, fn
);
1133 gimple_call_set_arg (stmt
, 0, dest
);
1134 gimple_call_set_arg (stmt
, 1, src
);
1139 /* If the destination and source do not alias optimize into
1141 if ((is_gimple_min_invariant (dest
)
1142 || TREE_CODE (dest
) == SSA_NAME
)
1143 && (is_gimple_min_invariant (src
)
1144 || TREE_CODE (src
) == SSA_NAME
))
1147 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1148 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1149 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1152 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1155 gimple_call_set_fndecl (stmt
, fn
);
1156 gimple_call_set_arg (stmt
, 0, dest
);
1157 gimple_call_set_arg (stmt
, 1, src
);
1166 if (!tree_fits_shwi_p (len
))
1169 || (AGGREGATE_TYPE_P (srctype
)
1170 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1173 || (AGGREGATE_TYPE_P (desttype
)
1174 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1176 /* In the following try to find a type that is most natural to be
1177 used for the memcpy source and destination and that allows
1178 the most optimization when memcpy is turned into a plain assignment
1179 using that type. In theory we could always use a char[len] type
1180 but that only gains us that the destination and source possibly
1181 no longer will have their address taken. */
1182 if (TREE_CODE (srctype
) == ARRAY_TYPE
1183 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1184 srctype
= TREE_TYPE (srctype
);
1185 if (TREE_CODE (desttype
) == ARRAY_TYPE
1186 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1187 desttype
= TREE_TYPE (desttype
);
1188 if (TREE_ADDRESSABLE (srctype
)
1189 || TREE_ADDRESSABLE (desttype
))
1192 /* Make sure we are not copying using a floating-point mode or
1193 a type whose size possibly does not match its precision. */
1194 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1195 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1196 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1197 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1198 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1199 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1200 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1201 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1209 src_align
= get_pointer_alignment (src
);
1210 dest_align
= get_pointer_alignment (dest
);
1212 /* Choose between src and destination type for the access based
1213 on alignment, whether the access constitutes a register access
1214 and whether it may actually expose a declaration for SSA rewrite
1215 or SRA decomposition. Also try to expose a string constant, we
1216 might be able to concatenate several of them later into a single
1218 destvar
= NULL_TREE
;
1220 if (TREE_CODE (dest
) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1223 && dest_align
>= TYPE_ALIGN (desttype
)
1224 && (is_gimple_reg_type (desttype
)
1225 || src_align
>= TYPE_ALIGN (desttype
)))
1226 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1227 else if (TREE_CODE (src
) == ADDR_EXPR
1228 && var_decl_component_p (TREE_OPERAND (src
, 0))
1229 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1230 && src_align
>= TYPE_ALIGN (srctype
)
1231 && (is_gimple_reg_type (srctype
)
1232 || dest_align
>= TYPE_ALIGN (srctype
)))
1233 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1234 /* FIXME: Don't transform copies from strings with known original length.
1235 As soon as strlenopt tests that rely on it for passing are adjusted,
1236 this hack can be removed. */
1237 else if (gimple_call_alloca_for_var_p (stmt
)
1238 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1239 && integer_zerop (srcoff
)
1240 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1241 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1242 srctype
= TREE_TYPE (srcvar
);
1246 /* Now that we chose an access type express the other side in
1247 terms of it if the target allows that with respect to alignment
1249 if (srcvar
== NULL_TREE
)
1251 if (src_align
>= TYPE_ALIGN (desttype
))
1252 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1255 if (STRICT_ALIGNMENT
)
1257 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1259 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1262 else if (destvar
== NULL_TREE
)
1264 if (dest_align
>= TYPE_ALIGN (srctype
))
1265 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1268 if (STRICT_ALIGNMENT
)
1270 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1272 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1276 /* Same as above, detect out-of-bounds accesses without issuing
1277 warnings. Avoid folding out-of-bounds copies but to avoid
1278 false positives for unreachable code defer warning until
1279 after DCE has worked its magic.
1280 -Wrestrict is still diagnosed. */
1281 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1282 dest
, src
, len
, len
,
1284 if (warning
!= OPT_Wrestrict
)
1288 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1290 tree tem
= fold_const_aggregate_ref (srcvar
);
1293 if (! is_gimple_min_invariant (srcvar
))
1295 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1296 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1298 gimple_assign_set_lhs (new_stmt
, srcvar
);
1299 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1300 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1302 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1303 goto set_vop_and_replace
;
1306 /* We get an aggregate copy. If the source is a STRING_CST, then
1307 directly use its type to perform the copy. */
1308 if (TREE_CODE (srcvar
) == STRING_CST
)
1311 /* Or else, use an unsigned char[] type to perform the copy in order
1312 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1313 types or float modes behavior on copying. */
1316 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1317 tree_to_uhwi (len
));
1319 if (src_align
> TYPE_ALIGN (srctype
))
1320 srctype
= build_aligned_type (srctype
, src_align
);
1321 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1324 if (dest_align
> TYPE_ALIGN (desttype
))
1325 desttype
= build_aligned_type (desttype
, dest_align
);
1326 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1327 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1329 set_vop_and_replace
:
1330 gimple_move_vops (new_stmt
, stmt
);
1333 gsi_replace (gsi
, new_stmt
, false);
1336 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1340 gimple_seq stmts
= NULL
;
1341 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1343 else if (code
== BUILT_IN_MEMPCPY
)
1345 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1346 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1347 TREE_TYPE (dest
), dest
, len
);
1352 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1353 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1354 gsi_replace (gsi
, repl
, false);
1358 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1359 to built-in memcmp (a, b, len). */
1362 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1364 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1369 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1371 gimple
*stmt
= gsi_stmt (*gsi
);
1372 tree a
= gimple_call_arg (stmt
, 0);
1373 tree b
= gimple_call_arg (stmt
, 1);
1374 tree len
= gimple_call_arg (stmt
, 2);
1376 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1377 replace_call_with_call_and_fold (gsi
, repl
);
1382 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1383 to built-in memmove (dest, src, len). */
1386 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1388 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1393 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1394 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1395 len) into memmove (dest, src, len). */
1397 gimple
*stmt
= gsi_stmt (*gsi
);
1398 tree src
= gimple_call_arg (stmt
, 0);
1399 tree dest
= gimple_call_arg (stmt
, 1);
1400 tree len
= gimple_call_arg (stmt
, 2);
1402 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1403 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1404 replace_call_with_call_and_fold (gsi
, repl
);
1409 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1410 to built-in memset (dest, 0, len). */
1413 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1415 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1420 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1422 gimple
*stmt
= gsi_stmt (*gsi
);
1423 tree dest
= gimple_call_arg (stmt
, 0);
1424 tree len
= gimple_call_arg (stmt
, 1);
1426 gimple_seq seq
= NULL
;
1427 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1428 gimple_seq_add_stmt_without_update (&seq
, repl
);
1429 gsi_replace_with_seq_vops (gsi
, seq
);
1435 /* Fold function call to builtin memset or bzero at *GSI setting the
1436 memory of size LEN to VAL. Return whether a simplification was made. */
1439 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1441 gimple
*stmt
= gsi_stmt (*gsi
);
1443 unsigned HOST_WIDE_INT length
, cval
;
1445 /* If the LEN parameter is zero, return DEST. */
1446 if (integer_zerop (len
))
1448 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1452 if (! tree_fits_uhwi_p (len
))
1455 if (TREE_CODE (c
) != INTEGER_CST
)
1458 tree dest
= gimple_call_arg (stmt
, 0);
1460 if (TREE_CODE (var
) != ADDR_EXPR
)
1463 var
= TREE_OPERAND (var
, 0);
1464 if (TREE_THIS_VOLATILE (var
))
1467 etype
= TREE_TYPE (var
);
1468 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1469 etype
= TREE_TYPE (etype
);
1471 if (!INTEGRAL_TYPE_P (etype
)
1472 && !POINTER_TYPE_P (etype
))
1475 if (! var_decl_component_p (var
))
1478 length
= tree_to_uhwi (len
);
1479 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1480 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1481 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1482 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1485 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1488 if (!type_has_mode_precision_p (etype
))
1489 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1490 TYPE_UNSIGNED (etype
));
1492 if (integer_zerop (c
))
1496 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1499 cval
= TREE_INT_CST_LOW (c
);
1503 cval
|= (cval
<< 31) << 1;
1506 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1507 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1508 gimple_move_vops (store
, stmt
);
1509 gimple_set_location (store
, gimple_location (stmt
));
1510 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1511 if (gimple_call_lhs (stmt
))
1513 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1514 gsi_replace (gsi
, asgn
, false);
1518 gimple_stmt_iterator gsi2
= *gsi
;
1520 gsi_remove (&gsi2
, true);
1526 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1529 get_range_strlen_tree (tree arg
, bitmap visited
, strlen_range_kind rkind
,
1530 c_strlen_data
*pdata
, unsigned eltsize
)
1532 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1534 /* The length computed by this invocation of the function. */
1535 tree val
= NULL_TREE
;
1537 /* True if VAL is an optimistic (tight) bound determined from
1538 the size of the character array in which the string may be
1539 stored. In that case, the computed VAL is used to set
1541 bool tight_bound
= false;
1543 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1544 if (TREE_CODE (arg
) == ADDR_EXPR
1545 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1547 tree op
= TREE_OPERAND (arg
, 0);
1548 if (integer_zerop (TREE_OPERAND (op
, 1)))
1550 tree aop0
= TREE_OPERAND (op
, 0);
1551 if (TREE_CODE (aop0
) == INDIRECT_REF
1552 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1553 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1556 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1557 && rkind
== SRK_LENRANGE
)
1559 /* Fail if an array is the last member of a struct object
1560 since it could be treated as a (fake) flexible array
1562 tree idx
= TREE_OPERAND (op
, 1);
1564 arg
= TREE_OPERAND (op
, 0);
1565 tree optype
= TREE_TYPE (arg
);
1566 if (tree dom
= TYPE_DOMAIN (optype
))
1567 if (tree bound
= TYPE_MAX_VALUE (dom
))
1568 if (TREE_CODE (bound
) == INTEGER_CST
1569 && TREE_CODE (idx
) == INTEGER_CST
1570 && tree_int_cst_lt (bound
, idx
))
1575 if (rkind
== SRK_INT_VALUE
)
1577 /* We are computing the maximum value (not string length). */
1579 if (TREE_CODE (val
) != INTEGER_CST
1580 || tree_int_cst_sgn (val
) < 0)
1585 c_strlen_data lendata
= { };
1586 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1588 if (!val
&& lendata
.decl
)
1590 /* ARG refers to an unterminated const character array.
1591 DATA.DECL with size DATA.LEN. */
1592 val
= lendata
.minlen
;
1593 pdata
->decl
= lendata
.decl
;
1597 /* Set if VAL represents the maximum length based on array size (set
1598 when exact length cannot be determined). */
1599 bool maxbound
= false;
1601 if (!val
&& rkind
== SRK_LENRANGE
)
1603 if (TREE_CODE (arg
) == ADDR_EXPR
)
1604 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1607 if (TREE_CODE (arg
) == ARRAY_REF
)
1609 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1611 /* Determine the "innermost" array type. */
1612 while (TREE_CODE (optype
) == ARRAY_TYPE
1613 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1614 optype
= TREE_TYPE (optype
);
1616 /* Avoid arrays of pointers. */
1617 tree eltype
= TREE_TYPE (optype
);
1618 if (TREE_CODE (optype
) != ARRAY_TYPE
1619 || !INTEGRAL_TYPE_P (eltype
))
1622 /* Fail when the array bound is unknown or zero. */
1623 val
= TYPE_SIZE_UNIT (optype
);
1625 || TREE_CODE (val
) != INTEGER_CST
1626 || integer_zerop (val
))
1629 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1632 /* Set the minimum size to zero since the string in
1633 the array could have zero length. */
1634 pdata
->minlen
= ssize_int (0);
1638 else if (TREE_CODE (arg
) == COMPONENT_REF
1639 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1642 /* Use the type of the member array to determine the upper
1643 bound on the length of the array. This may be overly
1644 optimistic if the array itself isn't NUL-terminated and
1645 the caller relies on the subsequent member to contain
1646 the NUL but that would only be considered valid if
1647 the array were the last member of a struct. */
1649 tree fld
= TREE_OPERAND (arg
, 1);
1651 tree optype
= TREE_TYPE (fld
);
1653 /* Determine the "innermost" array type. */
1654 while (TREE_CODE (optype
) == ARRAY_TYPE
1655 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1656 optype
= TREE_TYPE (optype
);
1658 /* Fail when the array bound is unknown or zero. */
1659 val
= TYPE_SIZE_UNIT (optype
);
1661 || TREE_CODE (val
) != INTEGER_CST
1662 || integer_zerop (val
))
1664 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1667 /* Set the minimum size to zero since the string in
1668 the array could have zero length. */
1669 pdata
->minlen
= ssize_int (0);
1671 /* The array size determined above is an optimistic bound
1672 on the length. If the array isn't nul-terminated the
1673 length computed by the library function would be greater.
1674 Even though using strlen to cross the subobject boundary
1675 is undefined, avoid drawing conclusions from the member
1676 type about the length here. */
1679 else if (TREE_CODE (arg
) == MEM_REF
1680 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1682 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1684 /* Handle a MEM_REF into a DECL accessing an array of integers,
1685 being conservative about references to extern structures with
1686 flexible array members that can be initialized to arbitrary
1687 numbers of elements as an extension (static structs are okay).
1688 FIXME: Make this less conservative -- see
1689 component_ref_size in tree.c. */
1690 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1691 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1692 && (decl_binds_to_current_def_p (ref
)
1693 || !array_at_struct_end_p (arg
)))
1695 /* Fail if the offset is out of bounds. Such accesses
1696 should be diagnosed at some point. */
1697 val
= DECL_SIZE_UNIT (ref
);
1699 || TREE_CODE (val
) != INTEGER_CST
1700 || integer_zerop (val
))
1703 poly_offset_int psiz
= wi::to_offset (val
);
1704 poly_offset_int poff
= mem_ref_offset (arg
);
1705 if (known_le (psiz
, poff
))
1708 pdata
->minlen
= ssize_int (0);
1710 /* Subtract the offset and one for the terminating nul. */
1713 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1714 /* Since VAL reflects the size of a declared object
1715 rather the type of the access it is not a tight bound. */
1718 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1720 /* Avoid handling pointers to arrays. GCC might misuse
1721 a pointer to an array of one bound to point to an array
1722 object of a greater bound. */
1723 tree argtype
= TREE_TYPE (arg
);
1724 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1726 val
= TYPE_SIZE_UNIT (argtype
);
1728 || TREE_CODE (val
) != INTEGER_CST
1729 || integer_zerop (val
))
1731 val
= wide_int_to_tree (TREE_TYPE (val
),
1732 wi::sub (wi::to_wide (val
), 1));
1734 /* Set the minimum size to zero since the string in
1735 the array could have zero length. */
1736 pdata
->minlen
= ssize_int (0);
1745 /* Adjust the lower bound on the string length as necessary. */
1747 || (rkind
!= SRK_STRLEN
1748 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1749 && TREE_CODE (val
) == INTEGER_CST
1750 && tree_int_cst_lt (val
, pdata
->minlen
)))
1751 pdata
->minlen
= val
;
1753 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1755 /* Adjust the tighter (more optimistic) string length bound
1756 if necessary and proceed to adjust the more conservative
1758 if (TREE_CODE (val
) == INTEGER_CST
)
1760 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1761 pdata
->maxbound
= val
;
1764 pdata
->maxbound
= val
;
1766 else if (pdata
->maxbound
|| maxbound
)
1767 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1768 if VAL corresponds to the maximum length determined based
1769 on the type of the object. */
1770 pdata
->maxbound
= val
;
1774 /* VAL computed above represents an optimistically tight bound
1775 on the length of the string based on the referenced object's
1776 or subobject's type. Determine the conservative upper bound
1777 based on the enclosing object's size if possible. */
1778 if (rkind
== SRK_LENRANGE
)
1781 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1784 /* When the call above fails due to a non-constant offset
1785 assume the offset is zero and use the size of the whole
1786 enclosing object instead. */
1787 base
= get_base_address (arg
);
1790 /* If the base object is a pointer no upper bound on the length
1791 can be determined. Otherwise the maximum length is equal to
1792 the size of the enclosing object minus the offset of
1793 the referenced subobject minus 1 (for the terminating nul). */
1794 tree type
= TREE_TYPE (base
);
1795 if (TREE_CODE (type
) == POINTER_TYPE
1796 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1797 || !(val
= DECL_SIZE_UNIT (base
)))
1798 val
= build_all_ones_cst (size_type_node
);
1801 val
= DECL_SIZE_UNIT (base
);
1802 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1803 size_int (offset
+ 1));
1812 /* Adjust the more conservative bound if possible/necessary
1813 and fail otherwise. */
1814 if (rkind
!= SRK_STRLEN
)
1816 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1817 || TREE_CODE (val
) != INTEGER_CST
)
1820 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1821 pdata
->maxlen
= val
;
1824 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1826 /* Fail if the length of this ARG is different from that
1827 previously determined from another ARG. */
1832 pdata
->maxlen
= val
;
1833 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1836 /* For an ARG referencing one or more strings, try to obtain the range
1837 of their lengths, or the size of the largest array ARG referes to if
1838 the range of lengths cannot be determined, and store all in *PDATA.
1839 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1840 the maximum constant value.
1841 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1842 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1843 length or if we are unable to determine the length, return false.
1844 VISITED is a bitmap of visited variables.
1845 RKIND determines the kind of value or range to obtain (see
1847 Set PDATA->DECL if ARG refers to an unterminated constant array.
1848 On input, set ELTSIZE to 1 for normal single byte character strings,
1849 and either 2 or 4 for wide characer strings (the size of wchar_t).
1850 Return true if *PDATA was successfully populated and false otherwise. */
1853 get_range_strlen (tree arg
, bitmap visited
,
1854 strlen_range_kind rkind
,
1855 c_strlen_data
*pdata
, unsigned eltsize
)
1858 if (TREE_CODE (arg
) != SSA_NAME
)
1859 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1861 /* If ARG is registered for SSA update we cannot look at its defining
1863 if (name_registered_for_update_p (arg
))
1866 /* If we were already here, break the infinite cycle. */
1867 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1871 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1873 switch (gimple_code (def_stmt
))
1876 /* The RHS of the statement defining VAR must either have a
1877 constant length or come from another SSA_NAME with a constant
1879 if (gimple_assign_single_p (def_stmt
)
1880 || gimple_assign_unary_nop_p (def_stmt
))
1882 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1883 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1885 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1887 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1888 gimple_assign_rhs3 (def_stmt
) };
1890 for (unsigned int i
= 0; i
< 2; i
++)
1891 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1893 if (rkind
!= SRK_LENRANGE
)
1895 /* Set the upper bound to the maximum to prevent
1896 it from being adjusted in the next iteration but
1897 leave MINLEN and the more conservative MAXBOUND
1898 determined so far alone (or leave them null if
1899 they haven't been set yet). That the MINLEN is
1900 in fact zero can be determined from MAXLEN being
1901 unbounded but the discovered minimum is used for
1903 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1910 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1911 must have a constant length. */
1912 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1914 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1916 /* If this PHI has itself as an argument, we cannot
1917 determine the string length of this argument. However,
1918 if we can find a constant string length for the other
1919 PHI args then we can still be sure that this is a
1920 constant string length. So be optimistic and just
1921 continue with the next argument. */
1922 if (arg
== gimple_phi_result (def_stmt
))
1925 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1927 if (rkind
!= SRK_LENRANGE
)
1929 /* Set the upper bound to the maximum to prevent
1930 it from being adjusted in the next iteration but
1931 leave MINLEN and the more conservative MAXBOUND
1932 determined so far alone (or leave them null if
1933 they haven't been set yet). That the MINLEN is
1934 in fact zero can be determined from MAXLEN being
1935 unbounded but the discovered minimum is used for
1937 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1947 /* Try to obtain the range of the lengths of the string(s) referenced
1948 by ARG, or the size of the largest array ARG refers to if the range
1949 of lengths cannot be determined, and store all in *PDATA which must
1950 be zero-initialized on input except PDATA->MAXBOUND may be set to
1951 a non-null tree node other than INTEGER_CST to request to have it
1952 set to the length of the longest string in a PHI. ELTSIZE is
1953 the expected size of the string element in bytes: 1 for char and
1954 some power of 2 for wide characters.
1955 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1956 for optimization. Returning false means that a nonzero PDATA->MINLEN
1957 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1958 is -1 (in that case, the actual range is indeterminate, i.e.,
1959 [0, PTRDIFF_MAX - 2]. */
1962 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1964 auto_bitmap visited
;
1965 tree maxbound
= pdata
->maxbound
;
1967 if (!get_range_strlen (arg
, visited
, SRK_LENRANGE
, pdata
, eltsize
))
1969 /* On failure extend the length range to an impossible maximum
1970 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1971 members can stay unchanged regardless. */
1972 pdata
->minlen
= ssize_int (0);
1973 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1975 else if (!pdata
->minlen
)
1976 pdata
->minlen
= ssize_int (0);
1978 /* If it's unchanged from it initial non-null value, set the conservative
1979 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1980 if (maxbound
&& pdata
->maxbound
== maxbound
)
1981 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1983 return !integer_all_onesp (pdata
->maxlen
);
1986 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1987 For ARG of pointer types, NONSTR indicates if the caller is prepared
1988 to handle unterminated strings. For integer ARG and when RKIND ==
1989 SRK_INT_VALUE, NONSTR must be null.
1991 If an unterminated array is discovered and our caller handles
1992 unterminated arrays, then bubble up the offending DECL and
1993 return the maximum size. Otherwise return NULL. */
1996 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1998 /* A non-null NONSTR is meaningless when determining the maximum
1999 value of an integer ARG. */
2000 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2001 /* ARG must have an integral type when RKIND says so. */
2002 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2004 auto_bitmap visited
;
2006 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2008 c_strlen_data lendata
= { };
2009 if (!get_range_strlen (arg
, visited
, rkind
, &lendata
, /* eltsize = */1))
2010 lendata
.maxlen
= NULL_TREE
;
2011 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2012 lendata
.maxlen
= NULL_TREE
;
2016 /* For callers prepared to handle unterminated arrays set
2017 *NONSTR to point to the declaration of the array and return
2018 the maximum length/size. */
2019 *nonstr
= lendata
.decl
;
2020 return lendata
.maxlen
;
2023 /* Fail if the constant array isn't nul-terminated. */
2024 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2027 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2028 true, strictly less than) the lower bound of SIZE at compile time and false
2032 known_lower (gimple
*stmt
, tree len
, tree size
, bool strict
= false)
2034 if (len
== NULL_TREE
)
2037 wide_int size_range
[2];
2038 wide_int len_range
[2];
2039 if (get_range (len
, stmt
, len_range
) && get_range (size
, stmt
, size_range
))
2042 return wi::ltu_p (len_range
[1], size_range
[0]);
2044 return wi::leu_p (len_range
[1], size_range
[0]);
2050 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2051 If LEN is not NULL, it represents the length of the string to be
2052 copied. Return NULL_TREE if no simplification can be made. */
2055 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2056 tree dest
, tree src
)
2058 gimple
*stmt
= gsi_stmt (*gsi
);
2059 location_t loc
= gimple_location (stmt
);
2062 /* If SRC and DEST are the same (and not volatile), return DEST. */
2063 if (operand_equal_p (src
, dest
, 0))
2065 /* Issue -Wrestrict unless the pointers are null (those do
2066 not point to objects and so do not indicate an overlap;
2067 such calls could be the result of sanitization and jump
2069 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2071 tree func
= gimple_call_fndecl (stmt
);
2073 warning_at (loc
, OPT_Wrestrict
,
2074 "%qD source argument is the same as destination",
2078 replace_call_with_value (gsi
, dest
);
2082 if (optimize_function_for_size_p (cfun
))
2085 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2089 /* Set to non-null if ARG refers to an unterminated array. */
2091 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2095 /* Avoid folding calls with unterminated arrays. */
2096 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2097 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2098 suppress_warning (stmt
, OPT_Wstringop_overread
);
2105 len
= fold_convert_loc (loc
, size_type_node
, len
);
2106 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2107 len
= force_gimple_operand_gsi (gsi
, len
, true,
2108 NULL_TREE
, true, GSI_SAME_STMT
);
2109 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2110 replace_call_with_call_and_fold (gsi
, repl
);
2114 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2115 If SLEN is not NULL, it represents the length of the source string.
2116 Return NULL_TREE if no simplification can be made. */
2119 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2120 tree dest
, tree src
, tree len
)
2122 gimple
*stmt
= gsi_stmt (*gsi
);
2123 location_t loc
= gimple_location (stmt
);
2124 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2126 /* If the LEN parameter is zero, return DEST. */
2127 if (integer_zerop (len
))
2129 /* Avoid warning if the destination refers to an array/pointer
2130 decorate with attribute nonstring. */
2133 tree fndecl
= gimple_call_fndecl (stmt
);
2135 /* Warn about the lack of nul termination: the result is not
2136 a (nul-terminated) string. */
2137 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2138 if (slen
&& !integer_zerop (slen
))
2139 warning_at (loc
, OPT_Wstringop_truncation
,
2140 "%qD destination unchanged after copying no bytes "
2141 "from a string of length %E",
2144 warning_at (loc
, OPT_Wstringop_truncation
,
2145 "%qD destination unchanged after copying no bytes",
2149 replace_call_with_value (gsi
, dest
);
2153 /* We can't compare slen with len as constants below if len is not a
2155 if (TREE_CODE (len
) != INTEGER_CST
)
2158 /* Now, we must be passed a constant src ptr parameter. */
2159 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2160 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2163 /* The size of the source string including the terminating nul. */
2164 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2166 /* We do not support simplification of this case, though we do
2167 support it when expanding trees into RTL. */
2168 /* FIXME: generate a call to __builtin_memset. */
2169 if (tree_int_cst_lt (ssize
, len
))
2172 /* Diagnose truncation that leaves the copy unterminated. */
2173 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2175 /* OK transform into builtin memcpy. */
2176 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2180 len
= fold_convert_loc (loc
, size_type_node
, len
);
2181 len
= force_gimple_operand_gsi (gsi
, len
, true,
2182 NULL_TREE
, true, GSI_SAME_STMT
);
2183 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2184 replace_call_with_call_and_fold (gsi
, repl
);
2189 /* Fold function call to builtin strchr or strrchr.
2190 If both arguments are constant, evaluate and fold the result,
2191 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2192 In general strlen is significantly faster than strchr
2193 due to being a simpler operation. */
2195 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2197 gimple
*stmt
= gsi_stmt (*gsi
);
2198 tree str
= gimple_call_arg (stmt
, 0);
2199 tree c
= gimple_call_arg (stmt
, 1);
2200 location_t loc
= gimple_location (stmt
);
2204 if (!gimple_call_lhs (stmt
))
2207 /* Avoid folding if the first argument is not a nul-terminated array.
2208 Defer warning until later. */
2209 if (!check_nul_terminated_array (NULL_TREE
, str
))
2212 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2214 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2218 replace_call_with_value (gsi
, integer_zero_node
);
2222 tree len
= build_int_cst (size_type_node
, p1
- p
);
2223 gimple_seq stmts
= NULL
;
2224 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2225 POINTER_PLUS_EXPR
, str
, len
);
2226 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2227 gsi_replace_with_seq_vops (gsi
, stmts
);
2231 if (!integer_zerop (c
))
2234 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2235 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2237 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2241 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2242 replace_call_with_call_and_fold (gsi
, repl
);
2250 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2255 /* Create newstr = strlen (str). */
2256 gimple_seq stmts
= NULL
;
2257 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2258 gimple_set_location (new_stmt
, loc
);
2259 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2260 gimple_call_set_lhs (new_stmt
, len
);
2261 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2263 /* Create (str p+ strlen (str)). */
2264 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2265 POINTER_PLUS_EXPR
, str
, len
);
2266 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2267 gsi_replace_with_seq_vops (gsi
, stmts
);
2268 /* gsi now points at the assignment to the lhs, get a
2269 stmt iterator to the strlen.
2270 ??? We can't use gsi_for_stmt as that doesn't work when the
2271 CFG isn't built yet. */
2272 gimple_stmt_iterator gsi2
= *gsi
;
2278 /* Fold function call to builtin strstr.
2279 If both arguments are constant, evaluate and fold the result,
2280 additionally fold strstr (x, "") into x and strstr (x, "c")
2281 into strchr (x, 'c'). */
2283 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2285 gimple
*stmt
= gsi_stmt (*gsi
);
2286 if (!gimple_call_lhs (stmt
))
2289 tree haystack
= gimple_call_arg (stmt
, 0);
2290 tree needle
= gimple_call_arg (stmt
, 1);
2292 /* Avoid folding if either argument is not a nul-terminated array.
2293 Defer warning until later. */
2294 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2295 || !check_nul_terminated_array (NULL_TREE
, needle
))
2298 const char *q
= c_getstr (needle
);
2302 if (const char *p
= c_getstr (haystack
))
2304 const char *r
= strstr (p
, q
);
2308 replace_call_with_value (gsi
, integer_zero_node
);
2312 tree len
= build_int_cst (size_type_node
, r
- p
);
2313 gimple_seq stmts
= NULL
;
2315 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2317 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2318 gsi_replace_with_seq_vops (gsi
, stmts
);
2322 /* For strstr (x, "") return x. */
2325 replace_call_with_value (gsi
, haystack
);
2329 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2332 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2335 tree c
= build_int_cst (integer_type_node
, q
[0]);
2336 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2337 replace_call_with_call_and_fold (gsi
, repl
);
2345 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2348 Return NULL_TREE if no simplification was possible, otherwise return the
2349 simplified form of the call as a tree.
2351 The simplified form may be a constant or other expression which
2352 computes the same value, but in a more efficient manner (including
2353 calls to other builtin functions).
2355 The call may contain arguments which need to be evaluated, but
2356 which are not useful to determine the result of the call. In
2357 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2358 COMPOUND_EXPR will be an argument which must be evaluated.
2359 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2360 COMPOUND_EXPR in the chain will contain the tree for the simplified
2361 form of the builtin function call. */
2364 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2366 gimple
*stmt
= gsi_stmt (*gsi
);
2367 location_t loc
= gimple_location (stmt
);
2369 const char *p
= c_getstr (src
);
2371 /* If the string length is zero, return the dst parameter. */
2372 if (p
&& *p
== '\0')
2374 replace_call_with_value (gsi
, dst
);
2378 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2381 /* See if we can store by pieces into (dst + strlen(dst)). */
2383 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2384 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2386 if (!strlen_fn
|| !memcpy_fn
)
2389 /* If the length of the source string isn't computable don't
2390 split strcat into strlen and memcpy. */
2391 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2395 /* Create strlen (dst). */
2396 gimple_seq stmts
= NULL
, stmts2
;
2397 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2398 gimple_set_location (repl
, loc
);
2399 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2400 gimple_call_set_lhs (repl
, newdst
);
2401 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2403 /* Create (dst p+ strlen (dst)). */
2404 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2405 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2406 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2408 len
= fold_convert_loc (loc
, size_type_node
, len
);
2409 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2410 build_int_cst (size_type_node
, 1));
2411 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2412 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2414 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2415 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2416 if (gimple_call_lhs (stmt
))
2418 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2419 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2420 gsi_replace_with_seq_vops (gsi
, stmts
);
2421 /* gsi now points at the assignment to the lhs, get a
2422 stmt iterator to the memcpy call.
2423 ??? We can't use gsi_for_stmt as that doesn't work when the
2424 CFG isn't built yet. */
2425 gimple_stmt_iterator gsi2
= *gsi
;
2431 gsi_replace_with_seq_vops (gsi
, stmts
);
2437 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2438 are the arguments to the call. */
2441 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2443 gimple
*stmt
= gsi_stmt (*gsi
);
2444 tree dest
= gimple_call_arg (stmt
, 0);
2445 tree src
= gimple_call_arg (stmt
, 1);
2446 tree size
= gimple_call_arg (stmt
, 2);
2452 /* If the SRC parameter is "", return DEST. */
2453 if (p
&& *p
== '\0')
2455 replace_call_with_value (gsi
, dest
);
2459 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2462 /* If __builtin_strcat_chk is used, assume strcat is available. */
2463 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2467 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2468 replace_call_with_call_and_fold (gsi
, repl
);
2472 /* Simplify a call to the strncat builtin. */
2475 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2477 gimple
*stmt
= gsi_stmt (*gsi
);
2478 tree dst
= gimple_call_arg (stmt
, 0);
2479 tree src
= gimple_call_arg (stmt
, 1);
2480 tree len
= gimple_call_arg (stmt
, 2);
2481 tree src_len
= c_strlen (src
, 1);
2483 /* If the requested length is zero, or the src parameter string
2484 length is zero, return the dst parameter. */
2485 if (integer_zerop (len
) || (src_len
&& integer_zerop (src_len
)))
2487 replace_call_with_value (gsi
, dst
);
2491 /* Return early if the requested len is less than the string length.
2492 Warnings will be issued elsewhere later. */
2493 if (!src_len
|| known_lower (stmt
, len
, src_len
, true))
2496 /* Warn on constant LEN. */
2497 if (TREE_CODE (len
) == INTEGER_CST
)
2499 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2502 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
)
2503 && TREE_CODE (dstsize
) == INTEGER_CST
)
2505 int cmpdst
= tree_int_cst_compare (len
, dstsize
);
2509 tree fndecl
= gimple_call_fndecl (stmt
);
2511 /* Strncat copies (at most) LEN bytes and always appends
2512 the terminating NUL so the specified bound should never
2513 be equal to (or greater than) the size of the destination.
2514 If it is, the copy could overflow. */
2515 location_t loc
= gimple_location (stmt
);
2516 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2518 ? G_("%qD specified bound %E equals "
2520 : G_("%qD specified bound %E exceeds "
2521 "destination size %E"),
2522 fndecl
, len
, dstsize
);
2524 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2528 if (!nowarn
&& TREE_CODE (src_len
) == INTEGER_CST
2529 && tree_int_cst_compare (src_len
, len
) == 0)
2531 tree fndecl
= gimple_call_fndecl (stmt
);
2532 location_t loc
= gimple_location (stmt
);
2534 /* To avoid possible overflow the specified bound should also
2535 not be equal to the length of the source, even when the size
2536 of the destination is unknown (it's not an uncommon mistake
2537 to specify as the bound to strncpy the length of the source). */
2538 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2539 "%qD specified bound %E equals source length",
2541 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2545 if (!known_lower (stmt
, src_len
, len
))
2548 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2550 /* If the replacement _DECL isn't initialized, don't do the
2555 /* Otherwise, emit a call to strcat. */
2556 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2557 replace_call_with_call_and_fold (gsi
, repl
);
2561 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2565 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2567 gimple
*stmt
= gsi_stmt (*gsi
);
2568 tree dest
= gimple_call_arg (stmt
, 0);
2569 tree src
= gimple_call_arg (stmt
, 1);
2570 tree len
= gimple_call_arg (stmt
, 2);
2571 tree size
= gimple_call_arg (stmt
, 3);
2576 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2577 if ((p
&& *p
== '\0')
2578 || integer_zerop (len
))
2580 replace_call_with_value (gsi
, dest
);
2584 if (! integer_all_onesp (size
))
2586 tree src_len
= c_strlen (src
, 1);
2587 if (known_lower (stmt
, src_len
, len
))
2589 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2590 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2594 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2595 replace_call_with_call_and_fold (gsi
, repl
);
2601 /* If __builtin_strncat_chk is used, assume strncat is available. */
2602 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2606 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2607 replace_call_with_call_and_fold (gsi
, repl
);
2611 /* Build and append gimple statements to STMTS that would load a first
2612 character of a memory location identified by STR. LOC is location
2613 of the statement. */
2616 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2620 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2621 tree cst_uchar_ptr_node
2622 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2623 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2625 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2626 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2627 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2629 gimple_assign_set_lhs (stmt
, var
);
2630 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2635 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2638 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2640 gimple
*stmt
= gsi_stmt (*gsi
);
2641 tree callee
= gimple_call_fndecl (stmt
);
2642 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2644 tree type
= integer_type_node
;
2645 tree str1
= gimple_call_arg (stmt
, 0);
2646 tree str2
= gimple_call_arg (stmt
, 1);
2647 tree lhs
= gimple_call_lhs (stmt
);
2649 tree bound_node
= NULL_TREE
;
2650 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2652 /* Handle strncmp and strncasecmp functions. */
2653 if (gimple_call_num_args (stmt
) == 3)
2655 bound_node
= gimple_call_arg (stmt
, 2);
2656 if (tree_fits_uhwi_p (bound_node
))
2657 bound
= tree_to_uhwi (bound_node
);
2660 /* If the BOUND parameter is zero, return zero. */
2663 replace_call_with_value (gsi
, integer_zero_node
);
2667 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2668 if (operand_equal_p (str1
, str2
, 0))
2670 replace_call_with_value (gsi
, integer_zero_node
);
2674 /* Initially set to the number of characters, including the terminating
2675 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2676 the array Sx is not terminated by a nul.
2677 For nul-terminated strings then adjusted to their length so that
2678 LENx == NULPOSx holds. */
2679 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2680 const char *p1
= getbyterep (str1
, &len1
);
2681 const char *p2
= getbyterep (str2
, &len2
);
2683 /* The position of the terminating nul character if one exists, otherwise
2684 a value greater than LENx. */
2685 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2689 size_t n
= strnlen (p1
, len1
);
2696 size_t n
= strnlen (p2
, len2
);
2701 /* For known strings, return an immediate value. */
2705 bool known_result
= false;
2709 case BUILT_IN_STRCMP
:
2710 case BUILT_IN_STRCMP_EQ
:
2711 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2714 r
= strcmp (p1
, p2
);
2715 known_result
= true;
2718 case BUILT_IN_STRNCMP
:
2719 case BUILT_IN_STRNCMP_EQ
:
2721 if (bound
== HOST_WIDE_INT_M1U
)
2724 /* Reduce the bound to be no more than the length
2725 of the shorter of the two strings, or the sizes
2726 of the unterminated arrays. */
2727 unsigned HOST_WIDE_INT n
= bound
;
2729 if (len1
== nulpos1
&& len1
< n
)
2731 if (len2
== nulpos2
&& len2
< n
)
2734 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2737 r
= strncmp (p1
, p2
, n
);
2738 known_result
= true;
2741 /* Only handleable situation is where the string are equal (result 0),
2742 which is already handled by operand_equal_p case. */
2743 case BUILT_IN_STRCASECMP
:
2745 case BUILT_IN_STRNCASECMP
:
2747 if (bound
== HOST_WIDE_INT_M1U
)
2749 r
= strncmp (p1
, p2
, bound
);
2751 known_result
= true;
2760 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2765 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2766 || fcode
== BUILT_IN_STRCMP
2767 || fcode
== BUILT_IN_STRCMP_EQ
2768 || fcode
== BUILT_IN_STRCASECMP
;
2770 location_t loc
= gimple_location (stmt
);
2772 /* If the second arg is "", return *(const unsigned char*)arg1. */
2773 if (p2
&& *p2
== '\0' && nonzero_bound
)
2775 gimple_seq stmts
= NULL
;
2776 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2779 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2780 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2783 gsi_replace_with_seq_vops (gsi
, stmts
);
2787 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2788 if (p1
&& *p1
== '\0' && nonzero_bound
)
2790 gimple_seq stmts
= NULL
;
2791 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2795 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2796 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2797 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2799 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2800 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2803 gsi_replace_with_seq_vops (gsi
, stmts
);
2807 /* If BOUND is one, return an expression corresponding to
2808 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2809 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2811 gimple_seq stmts
= NULL
;
2812 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2813 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2817 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2818 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2819 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2821 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2822 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2823 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2825 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2826 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2829 gsi_replace_with_seq_vops (gsi
, stmts
);
2833 /* If BOUND is greater than the length of one constant string,
2834 and the other argument is also a nul-terminated string, replace
2835 strncmp with strcmp. */
2836 if (fcode
== BUILT_IN_STRNCMP
2837 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2838 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2839 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2841 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2844 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2845 replace_call_with_call_and_fold (gsi
, repl
);
2852 /* Fold a call to the memchr pointed by GSI iterator. */
2855 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2857 gimple
*stmt
= gsi_stmt (*gsi
);
2858 tree lhs
= gimple_call_lhs (stmt
);
2859 tree arg1
= gimple_call_arg (stmt
, 0);
2860 tree arg2
= gimple_call_arg (stmt
, 1);
2861 tree len
= gimple_call_arg (stmt
, 2);
2863 /* If the LEN parameter is zero, return zero. */
2864 if (integer_zerop (len
))
2866 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2871 if (TREE_CODE (arg2
) != INTEGER_CST
2872 || !tree_fits_uhwi_p (len
)
2873 || !target_char_cst_p (arg2
, &c
))
2876 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2877 unsigned HOST_WIDE_INT string_length
;
2878 const char *p1
= getbyterep (arg1
, &string_length
);
2882 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2885 tree mem_size
, offset_node
;
2886 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2887 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2888 ? 0 : tree_to_uhwi (offset_node
);
2889 /* MEM_SIZE is the size of the array the string literal
2891 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2892 gcc_checking_assert (string_length
<= string_size
);
2893 if (length
<= string_size
)
2895 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2901 unsigned HOST_WIDE_INT offset
= r
- p1
;
2902 gimple_seq stmts
= NULL
;
2903 if (lhs
!= NULL_TREE
)
2905 tree offset_cst
= build_int_cst (sizetype
, offset
);
2906 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2908 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2911 gimple_seq_add_stmt_without_update (&stmts
,
2912 gimple_build_nop ());
2914 gsi_replace_with_seq_vops (gsi
, stmts
);
2922 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2923 to the call. IGNORE is true if the value returned
2924 by the builtin will be ignored. UNLOCKED is true is true if this
2925 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2926 the known length of the string. Return NULL_TREE if no simplification
2930 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2931 tree arg0
, tree arg1
,
2934 gimple
*stmt
= gsi_stmt (*gsi
);
2936 /* If we're using an unlocked function, assume the other unlocked
2937 functions exist explicitly. */
2938 tree
const fn_fputc
= (unlocked
2939 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2940 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2941 tree
const fn_fwrite
= (unlocked
2942 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2943 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2945 /* If the return value is used, don't do the transformation. */
2946 if (gimple_call_lhs (stmt
))
2949 /* Get the length of the string passed to fputs. If the length
2950 can't be determined, punt. */
2951 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2953 || TREE_CODE (len
) != INTEGER_CST
)
2956 switch (compare_tree_int (len
, 1))
2958 case -1: /* length is 0, delete the call entirely . */
2959 replace_call_with_value (gsi
, integer_zero_node
);
2962 case 0: /* length is 1, call fputc. */
2964 const char *p
= c_getstr (arg0
);
2970 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2972 (integer_type_node
, p
[0]), arg1
);
2973 replace_call_with_call_and_fold (gsi
, repl
);
2978 case 1: /* length is greater than 1, call fwrite. */
2980 /* If optimizing for size keep fputs. */
2981 if (optimize_function_for_size_p (cfun
))
2983 /* New argument list transforming fputs(string, stream) to
2984 fwrite(string, 1, len, stream). */
2988 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2989 size_one_node
, len
, arg1
);
2990 replace_call_with_call_and_fold (gsi
, repl
);
2998 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2999 DEST, SRC, LEN, and SIZE are the arguments to the call.
3000 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3001 code of the builtin. If MAXLEN is not NULL, it is maximum length
3002 passed as third argument. */
3005 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
3006 tree dest
, tree src
, tree len
, tree size
,
3007 enum built_in_function fcode
)
3009 gimple
*stmt
= gsi_stmt (*gsi
);
3010 location_t loc
= gimple_location (stmt
);
3011 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3014 /* If SRC and DEST are the same (and not volatile), return DEST
3015 (resp. DEST+LEN for __mempcpy_chk). */
3016 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3018 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3020 replace_call_with_value (gsi
, dest
);
3025 gimple_seq stmts
= NULL
;
3026 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3027 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3028 TREE_TYPE (dest
), dest
, len
);
3029 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3030 replace_call_with_value (gsi
, temp
);
3035 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3036 if (! integer_all_onesp (size
)
3037 && !known_lower (stmt
, len
, size
)
3038 && !known_lower (stmt
, maxlen
, size
))
3040 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3041 least try to optimize (void) __mempcpy_chk () into
3042 (void) __memcpy_chk () */
3043 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3045 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3049 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3050 replace_call_with_call_and_fold (gsi
, repl
);
3057 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3058 mem{cpy,pcpy,move,set} is available. */
3061 case BUILT_IN_MEMCPY_CHK
:
3062 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3064 case BUILT_IN_MEMPCPY_CHK
:
3065 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3067 case BUILT_IN_MEMMOVE_CHK
:
3068 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3070 case BUILT_IN_MEMSET_CHK
:
3071 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3080 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3081 replace_call_with_call_and_fold (gsi
, repl
);
3085 /* Print a message in the dump file recording transformation of FROM to TO. */
3088 dump_transformation (gcall
*from
, gcall
*to
)
3090 if (dump_enabled_p ())
3091 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, from
, "simplified %T to %T\n",
3092 gimple_call_fn (from
), gimple_call_fn (to
));
3095 /* Fold a call to the __st[rp]cpy_chk builtin.
3096 DEST, SRC, and SIZE are the arguments to the call.
3097 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3098 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3099 strings passed as second argument. */
3102 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3104 tree src
, tree size
,
3105 enum built_in_function fcode
)
3107 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3108 location_t loc
= gimple_location (stmt
);
3109 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3112 /* If SRC and DEST are the same (and not volatile), return DEST. */
3113 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3115 /* Issue -Wrestrict unless the pointers are null (those do
3116 not point to objects and so do not indicate an overlap;
3117 such calls could be the result of sanitization and jump
3119 if (!integer_zerop (dest
)
3120 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3122 tree func
= gimple_call_fndecl (stmt
);
3124 warning_at (loc
, OPT_Wrestrict
,
3125 "%qD source argument is the same as destination",
3129 replace_call_with_value (gsi
, dest
);
3133 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3134 if (! integer_all_onesp (size
))
3136 len
= c_strlen (src
, 1);
3137 if (!known_lower (stmt
, len
, size
, true)
3138 && !known_lower (stmt
, maxlen
, size
, true))
3140 if (fcode
== BUILT_IN_STPCPY_CHK
)
3145 /* If return value of __stpcpy_chk is ignored,
3146 optimize into __strcpy_chk. */
3147 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3151 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3152 replace_call_with_call_and_fold (gsi
, repl
);
3156 if (! len
|| TREE_SIDE_EFFECTS (len
))
3159 /* If c_strlen returned something, but not provably less than size,
3160 transform __strcpy_chk into __memcpy_chk. */
3161 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3165 gimple_seq stmts
= NULL
;
3166 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3167 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3168 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3169 build_int_cst (size_type_node
, 1));
3170 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3171 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3172 replace_call_with_call_and_fold (gsi
, repl
);
3177 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3178 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
&& !ignore
3179 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3183 gcall
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3184 dump_transformation (stmt
, repl
);
3185 replace_call_with_call_and_fold (gsi
, repl
);
3189 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3190 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3191 length passed as third argument. IGNORE is true if return value can be
3192 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3195 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3196 tree dest
, tree src
,
3197 tree len
, tree size
,
3198 enum built_in_function fcode
)
3200 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3201 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3204 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3205 if (! integer_all_onesp (size
)
3206 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3208 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3210 /* If return value of __stpncpy_chk is ignored,
3211 optimize into __strncpy_chk. */
3212 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3215 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3216 replace_call_with_call_and_fold (gsi
, repl
);
3223 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3224 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
&& !ignore
3225 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3229 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3230 dump_transformation (stmt
, repl
);
3231 replace_call_with_call_and_fold (gsi
, repl
);
3235 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3236 Return NULL_TREE if no simplification can be made. */
3239 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3241 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3242 location_t loc
= gimple_location (stmt
);
3243 tree dest
= gimple_call_arg (stmt
, 0);
3244 tree src
= gimple_call_arg (stmt
, 1);
3247 /* If the result is unused, replace stpcpy with strcpy. */
3248 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3250 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3253 gimple_call_set_fndecl (stmt
, fn
);
3258 /* Set to non-null if ARG refers to an unterminated array. */
3259 c_strlen_data data
= { };
3260 /* The size of the unterminated array if SRC referes to one. */
3262 /* True if the size is exact/constant, false if it's the lower bound
3265 tree len
= c_strlen (src
, 1, &data
, 1);
3267 || TREE_CODE (len
) != INTEGER_CST
)
3269 data
.decl
= unterminated_array (src
, &size
, &exact
);
3276 /* Avoid folding calls with unterminated arrays. */
3277 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3278 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3280 suppress_warning (stmt
, OPT_Wstringop_overread
);
3284 if (optimize_function_for_size_p (cfun
)
3285 /* If length is zero it's small enough. */
3286 && !integer_zerop (len
))
3289 /* If the source has a known length replace stpcpy with memcpy. */
3290 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3294 gimple_seq stmts
= NULL
;
3295 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3296 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3297 tem
, build_int_cst (size_type_node
, 1));
3298 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3299 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3300 gimple_move_vops (repl
, stmt
);
3301 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3302 /* Replace the result with dest + len. */
3304 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3305 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3306 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3307 POINTER_PLUS_EXPR
, dest
, tem
);
3308 gsi_replace (gsi
, ret
, false);
3309 /* Finally fold the memcpy call. */
3310 gimple_stmt_iterator gsi2
= *gsi
;
3316 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3317 NULL_TREE if a normal call should be emitted rather than expanding
3318 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3319 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3320 passed as second argument. */
3323 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3324 enum built_in_function fcode
)
3326 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3327 tree dest
, size
, len
, fn
, fmt
, flag
;
3328 const char *fmt_str
;
3330 /* Verify the required arguments in the original call. */
3331 if (gimple_call_num_args (stmt
) < 5)
3334 dest
= gimple_call_arg (stmt
, 0);
3335 len
= gimple_call_arg (stmt
, 1);
3336 flag
= gimple_call_arg (stmt
, 2);
3337 size
= gimple_call_arg (stmt
, 3);
3338 fmt
= gimple_call_arg (stmt
, 4);
3340 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3341 if (! integer_all_onesp (size
)
3342 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3345 if (!init_target_chars ())
3348 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3349 or if format doesn't contain % chars or is "%s". */
3350 if (! integer_zerop (flag
))
3352 fmt_str
= c_getstr (fmt
);
3353 if (fmt_str
== NULL
)
3355 if (strchr (fmt_str
, target_percent
) != NULL
3356 && strcmp (fmt_str
, target_percent_s
))
3360 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3362 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3363 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3367 /* Replace the called function and the first 5 argument by 3 retaining
3368 trailing varargs. */
3369 gimple_call_set_fndecl (stmt
, fn
);
3370 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3371 gimple_call_set_arg (stmt
, 0, dest
);
3372 gimple_call_set_arg (stmt
, 1, len
);
3373 gimple_call_set_arg (stmt
, 2, fmt
);
3374 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3375 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3376 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3381 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3382 Return NULL_TREE if a normal call should be emitted rather than
3383 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3384 or BUILT_IN_VSPRINTF_CHK. */
3387 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3388 enum built_in_function fcode
)
3390 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3391 tree dest
, size
, len
, fn
, fmt
, flag
;
3392 const char *fmt_str
;
3393 unsigned nargs
= gimple_call_num_args (stmt
);
3395 /* Verify the required arguments in the original call. */
3398 dest
= gimple_call_arg (stmt
, 0);
3399 flag
= gimple_call_arg (stmt
, 1);
3400 size
= gimple_call_arg (stmt
, 2);
3401 fmt
= gimple_call_arg (stmt
, 3);
3405 if (!init_target_chars ())
3408 /* Check whether the format is a literal string constant. */
3409 fmt_str
= c_getstr (fmt
);
3410 if (fmt_str
!= NULL
)
3412 /* If the format doesn't contain % args or %%, we know the size. */
3413 if (strchr (fmt_str
, target_percent
) == 0)
3415 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3416 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3418 /* If the format is "%s" and first ... argument is a string literal,
3419 we know the size too. */
3420 else if (fcode
== BUILT_IN_SPRINTF_CHK
3421 && strcmp (fmt_str
, target_percent_s
) == 0)
3427 arg
= gimple_call_arg (stmt
, 4);
3428 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3429 len
= c_strlen (arg
, 1);
3434 if (! integer_all_onesp (size
) && !known_lower (stmt
, len
, size
, true))
3437 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3438 or if format doesn't contain % chars or is "%s". */
3439 if (! integer_zerop (flag
))
3441 if (fmt_str
== NULL
)
3443 if (strchr (fmt_str
, target_percent
) != NULL
3444 && strcmp (fmt_str
, target_percent_s
))
3448 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3449 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3450 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3454 /* Replace the called function and the first 4 argument by 2 retaining
3455 trailing varargs. */
3456 gimple_call_set_fndecl (stmt
, fn
);
3457 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3458 gimple_call_set_arg (stmt
, 0, dest
);
3459 gimple_call_set_arg (stmt
, 1, fmt
);
3460 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3461 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3462 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3467 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3468 ORIG may be null if this is a 2-argument call. We don't attempt to
3469 simplify calls with more than 3 arguments.
3471 Return true if simplification was possible, otherwise false. */
3474 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3476 gimple
*stmt
= gsi_stmt (*gsi
);
3478 /* Verify the required arguments in the original call. We deal with two
3479 types of sprintf() calls: 'sprintf (str, fmt)' and
3480 'sprintf (dest, "%s", orig)'. */
3481 if (gimple_call_num_args (stmt
) > 3)
3484 tree orig
= NULL_TREE
;
3485 if (gimple_call_num_args (stmt
) == 3)
3486 orig
= gimple_call_arg (stmt
, 2);
3488 /* Check whether the format is a literal string constant. */
3489 tree fmt
= gimple_call_arg (stmt
, 1);
3490 const char *fmt_str
= c_getstr (fmt
);
3491 if (fmt_str
== NULL
)
3494 tree dest
= gimple_call_arg (stmt
, 0);
3496 if (!init_target_chars ())
3499 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3503 /* If the format doesn't contain % args or %%, use strcpy. */
3504 if (strchr (fmt_str
, target_percent
) == NULL
)
3506 /* Don't optimize sprintf (buf, "abc", ptr++). */
3510 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3511 'format' is known to contain no % formats. */
3512 gimple_seq stmts
= NULL
;
3513 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3515 /* Propagate the NO_WARNING bit to avoid issuing the same
3516 warning more than once. */
3517 copy_warning (repl
, stmt
);
3519 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3520 if (tree lhs
= gimple_call_lhs (stmt
))
3522 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3524 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3525 gsi_replace_with_seq_vops (gsi
, stmts
);
3526 /* gsi now points at the assignment to the lhs, get a
3527 stmt iterator to the memcpy call.
3528 ??? We can't use gsi_for_stmt as that doesn't work when the
3529 CFG isn't built yet. */
3530 gimple_stmt_iterator gsi2
= *gsi
;
3536 gsi_replace_with_seq_vops (gsi
, stmts
);
3542 /* If the format is "%s", use strcpy if the result isn't used. */
3543 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3545 /* Don't crash on sprintf (str1, "%s"). */
3549 /* Don't fold calls with source arguments of invalid (nonpointer)
3551 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3554 tree orig_len
= NULL_TREE
;
3555 if (gimple_call_lhs (stmt
))
3557 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3562 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3563 gimple_seq stmts
= NULL
;
3564 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3566 /* Propagate the NO_WARNING bit to avoid issuing the same
3567 warning more than once. */
3568 copy_warning (repl
, stmt
);
3570 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3571 if (tree lhs
= gimple_call_lhs (stmt
))
3573 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3574 TREE_TYPE (orig_len
)))
3575 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3576 repl
= gimple_build_assign (lhs
, orig_len
);
3577 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3578 gsi_replace_with_seq_vops (gsi
, stmts
);
3579 /* gsi now points at the assignment to the lhs, get a
3580 stmt iterator to the memcpy call.
3581 ??? We can't use gsi_for_stmt as that doesn't work when the
3582 CFG isn't built yet. */
3583 gimple_stmt_iterator gsi2
= *gsi
;
3589 gsi_replace_with_seq_vops (gsi
, stmts
);
3597 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3598 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3599 attempt to simplify calls with more than 4 arguments.
3601 Return true if simplification was possible, otherwise false. */
3604 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3606 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3607 tree dest
= gimple_call_arg (stmt
, 0);
3608 tree destsize
= gimple_call_arg (stmt
, 1);
3609 tree fmt
= gimple_call_arg (stmt
, 2);
3610 tree orig
= NULL_TREE
;
3611 const char *fmt_str
= NULL
;
3613 if (gimple_call_num_args (stmt
) > 4)
3616 if (gimple_call_num_args (stmt
) == 4)
3617 orig
= gimple_call_arg (stmt
, 3);
3619 /* Check whether the format is a literal string constant. */
3620 fmt_str
= c_getstr (fmt
);
3621 if (fmt_str
== NULL
)
3624 if (!init_target_chars ())
3627 /* If the format doesn't contain % args or %%, use strcpy. */
3628 if (strchr (fmt_str
, target_percent
) == NULL
)
3630 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3634 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3638 tree len
= build_int_cstu (TREE_TYPE (destsize
), strlen (fmt_str
));
3640 /* We could expand this as
3641 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3643 memcpy (str, fmt_with_nul_at_cstm1, cst);
3644 but in the former case that might increase code size
3645 and in the latter case grow .rodata section too much.
3647 if (!known_lower (stmt
, len
, destsize
, true))
3650 gimple_seq stmts
= NULL
;
3651 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3652 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3653 if (tree lhs
= gimple_call_lhs (stmt
))
3655 repl
= gimple_build_assign (lhs
,
3656 fold_convert (TREE_TYPE (lhs
), len
));
3657 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3658 gsi_replace_with_seq_vops (gsi
, stmts
);
3659 /* gsi now points at the assignment to the lhs, get a
3660 stmt iterator to the memcpy call.
3661 ??? We can't use gsi_for_stmt as that doesn't work when the
3662 CFG isn't built yet. */
3663 gimple_stmt_iterator gsi2
= *gsi
;
3669 gsi_replace_with_seq_vops (gsi
, stmts
);
3675 /* If the format is "%s", use strcpy if the result isn't used. */
3676 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3678 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3682 /* Don't crash on snprintf (str1, cst, "%s"). */
3686 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3688 /* We could expand this as
3689 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3691 memcpy (str1, str2_with_nul_at_cstm1, cst);
3692 but in the former case that might increase code size
3693 and in the latter case grow .rodata section too much.
3695 if (!known_lower (stmt
, orig_len
, destsize
, true))
3698 /* Convert snprintf (str1, cst, "%s", str2) into
3699 strcpy (str1, str2) if strlen (str2) < cst. */
3700 gimple_seq stmts
= NULL
;
3701 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3702 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3703 if (tree lhs
= gimple_call_lhs (stmt
))
3705 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3706 TREE_TYPE (orig_len
)))
3707 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3708 repl
= gimple_build_assign (lhs
, orig_len
);
3709 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3710 gsi_replace_with_seq_vops (gsi
, stmts
);
3711 /* gsi now points at the assignment to the lhs, get a
3712 stmt iterator to the memcpy call.
3713 ??? We can't use gsi_for_stmt as that doesn't work when the
3714 CFG isn't built yet. */
3715 gimple_stmt_iterator gsi2
= *gsi
;
3721 gsi_replace_with_seq_vops (gsi
, stmts
);
3729 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3730 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3731 more than 3 arguments, and ARG may be null in the 2-argument case.
3733 Return NULL_TREE if no simplification was possible, otherwise return the
3734 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3735 code of the function to be simplified. */
3738 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3739 tree fp
, tree fmt
, tree arg
,
3740 enum built_in_function fcode
)
3742 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3743 tree fn_fputc
, fn_fputs
;
3744 const char *fmt_str
= NULL
;
3746 /* If the return value is used, don't do the transformation. */
3747 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3750 /* Check whether the format is a literal string constant. */
3751 fmt_str
= c_getstr (fmt
);
3752 if (fmt_str
== NULL
)
3755 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3757 /* If we're using an unlocked function, assume the other
3758 unlocked functions exist explicitly. */
3759 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3760 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3764 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3765 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3768 if (!init_target_chars ())
3771 /* If the format doesn't contain % args or %%, use strcpy. */
3772 if (strchr (fmt_str
, target_percent
) == NULL
)
3774 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3778 /* If the format specifier was "", fprintf does nothing. */
3779 if (fmt_str
[0] == '\0')
3781 replace_call_with_value (gsi
, NULL_TREE
);
3785 /* When "string" doesn't contain %, replace all cases of
3786 fprintf (fp, string) with fputs (string, fp). The fputs
3787 builtin will take care of special cases like length == 1. */
3790 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3791 replace_call_with_call_and_fold (gsi
, repl
);
3796 /* The other optimizations can be done only on the non-va_list variants. */
3797 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3800 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3801 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3803 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3807 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3808 replace_call_with_call_and_fold (gsi
, repl
);
3813 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3814 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3817 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3821 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3822 replace_call_with_call_and_fold (gsi
, repl
);
3830 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3831 FMT and ARG are the arguments to the call; we don't fold cases with
3832 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3834 Return NULL_TREE if no simplification was possible, otherwise return the
3835 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3836 code of the function to be simplified. */
3839 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3840 tree arg
, enum built_in_function fcode
)
3842 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3843 tree fn_putchar
, fn_puts
, newarg
;
3844 const char *fmt_str
= NULL
;
3846 /* If the return value is used, don't do the transformation. */
3847 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3850 /* Check whether the format is a literal string constant. */
3851 fmt_str
= c_getstr (fmt
);
3852 if (fmt_str
== NULL
)
3855 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3857 /* If we're using an unlocked function, assume the other
3858 unlocked functions exist explicitly. */
3859 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3860 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3864 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3865 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3868 if (!init_target_chars ())
3871 if (strcmp (fmt_str
, target_percent_s
) == 0
3872 || strchr (fmt_str
, target_percent
) == NULL
)
3876 if (strcmp (fmt_str
, target_percent_s
) == 0)
3878 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3881 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3884 str
= c_getstr (arg
);
3890 /* The format specifier doesn't contain any '%' characters. */
3891 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3897 /* If the string was "", printf does nothing. */
3900 replace_call_with_value (gsi
, NULL_TREE
);
3904 /* If the string has length of 1, call putchar. */
3907 /* Given printf("c"), (where c is any one character,)
3908 convert "c"[0] to an int and pass that to the replacement
3910 newarg
= build_int_cst (integer_type_node
, str
[0]);
3913 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3914 replace_call_with_call_and_fold (gsi
, repl
);
3920 /* If the string was "string\n", call puts("string"). */
3921 size_t len
= strlen (str
);
3922 if ((unsigned char)str
[len
- 1] == target_newline
3923 && (size_t) (int) len
== len
3928 /* Create a NUL-terminated string that's one char shorter
3929 than the original, stripping off the trailing '\n'. */
3930 newstr
= xstrdup (str
);
3931 newstr
[len
- 1] = '\0';
3932 newarg
= build_string_literal (len
, newstr
);
3936 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3937 replace_call_with_call_and_fold (gsi
, repl
);
3942 /* We'd like to arrange to call fputs(string,stdout) here,
3943 but we need stdout and don't have a way to get it yet. */
3948 /* The other optimizations can be done only on the non-va_list variants. */
3949 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3952 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3953 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3955 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3959 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3960 replace_call_with_call_and_fold (gsi
, repl
);
3965 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3966 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3968 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3973 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3974 replace_call_with_call_and_fold (gsi
, repl
);
3984 /* Fold a call to __builtin_strlen with known length LEN. */
3987 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3989 gimple
*stmt
= gsi_stmt (*gsi
);
3990 tree arg
= gimple_call_arg (stmt
, 0);
3995 c_strlen_data lendata
= { };
3996 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
3998 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
3999 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4001 /* The range of lengths refers to either a single constant
4002 string or to the longest and shortest constant string
4003 referenced by the argument of the strlen() call, or to
4004 the strings that can possibly be stored in the arrays
4005 the argument refers to. */
4006 minlen
= wi::to_wide (lendata
.minlen
);
4007 maxlen
= wi::to_wide (lendata
.maxlen
);
4011 unsigned prec
= TYPE_PRECISION (sizetype
);
4013 minlen
= wi::shwi (0, prec
);
4014 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4017 if (minlen
== maxlen
)
4019 /* Fold the strlen call to a constant. */
4020 tree type
= TREE_TYPE (lendata
.minlen
);
4021 tree len
= force_gimple_operand_gsi (gsi
,
4022 wide_int_to_tree (type
, minlen
),
4023 true, NULL
, true, GSI_SAME_STMT
);
4024 replace_call_with_value (gsi
, len
);
4028 /* Set the strlen() range to [0, MAXLEN]. */
4029 if (tree lhs
= gimple_call_lhs (stmt
))
4030 set_strlen_range (lhs
, minlen
, maxlen
);
4035 /* Fold a call to __builtin_acc_on_device. */
4038 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4040 /* Defer folding until we know which compiler we're in. */
4041 if (symtab
->state
!= EXPANSION
)
4044 unsigned val_host
= GOMP_DEVICE_HOST
;
4045 unsigned val_dev
= GOMP_DEVICE_NONE
;
4047 #ifdef ACCEL_COMPILER
4048 val_host
= GOMP_DEVICE_NOT_HOST
;
4049 val_dev
= ACCEL_COMPILER_acc_device
;
4052 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4054 tree host_eq
= make_ssa_name (boolean_type_node
);
4055 gimple
*host_ass
= gimple_build_assign
4056 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4057 gimple_set_location (host_ass
, loc
);
4058 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4060 tree dev_eq
= make_ssa_name (boolean_type_node
);
4061 gimple
*dev_ass
= gimple_build_assign
4062 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4063 gimple_set_location (dev_ass
, loc
);
4064 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4066 tree result
= make_ssa_name (boolean_type_node
);
4067 gimple
*result_ass
= gimple_build_assign
4068 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4069 gimple_set_location (result_ass
, loc
);
4070 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4072 replace_call_with_value (gsi
, result
);
4077 /* Fold realloc (0, n) -> malloc (n). */
4080 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4082 gimple
*stmt
= gsi_stmt (*gsi
);
4083 tree arg
= gimple_call_arg (stmt
, 0);
4084 tree size
= gimple_call_arg (stmt
, 1);
4086 if (operand_equal_p (arg
, null_pointer_node
, 0))
4088 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4091 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4092 replace_call_with_call_and_fold (gsi
, repl
);
4099 /* Number of bytes into which any type but aggregate or vector types
4101 static constexpr size_t clear_padding_unit
4102 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4103 /* Buffer size on which __builtin_clear_padding folding code works. */
4104 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4106 /* Data passed through __builtin_clear_padding folding. */
4107 struct clear_padding_struct
{
4109 /* 0 during __builtin_clear_padding folding, nonzero during
4110 clear_type_padding_in_mask. In that case, instead of clearing the
4111 non-padding bits in union_ptr array clear the padding bits in there. */
4115 gimple_stmt_iterator
*gsi
;
4116 /* Alignment of buf->base + 0. */
4118 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4120 /* Number of padding bytes before buf->off that don't have padding clear
4121 code emitted yet. */
4122 HOST_WIDE_INT padding_bytes
;
4123 /* The size of the whole object. Never emit code to touch
4124 buf->base + buf->sz or following bytes. */
4126 /* Number of bytes recorded in buf->buf. */
4128 /* When inside union, instead of emitting code we and bits inside of
4129 the union_ptr array. */
4130 unsigned char *union_ptr
;
4131 /* Set bits mean padding bits that need to be cleared by the builtin. */
4132 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4135 /* Emit code to clear padding requested in BUF->buf - set bits
4136 in there stand for padding that should be cleared. FULL is true
4137 if everything from the buffer should be flushed, otherwise
4138 it can leave up to 2 * clear_padding_unit bytes for further
4142 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4144 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4145 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4147 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4148 size_t end
= buf
->size
;
4150 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4151 * clear_padding_unit
);
4152 size_t padding_bytes
= buf
->padding_bytes
;
4155 if (buf
->clear_in_mask
)
4157 /* During clear_type_padding_in_mask, clear the padding
4158 bits set in buf->buf in the buf->union_ptr mask. */
4159 for (size_t i
= 0; i
< end
; i
++)
4161 if (buf
->buf
[i
] == (unsigned char) ~0)
4165 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4168 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4173 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4177 buf
->padding_bytes
= 0;
4181 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4184 buf
->padding_bytes
= padding_bytes
;
4188 /* Inside of a union, instead of emitting any code, instead
4189 clear all bits in the union_ptr buffer that are clear
4190 in buf. Whole padding bytes don't clear anything. */
4191 for (size_t i
= 0; i
< end
; i
++)
4193 if (buf
->buf
[i
] == (unsigned char) ~0)
4198 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4205 buf
->padding_bytes
= 0;
4209 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4212 buf
->padding_bytes
= padding_bytes
;
4216 size_t wordsize
= UNITS_PER_WORD
;
4217 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4219 size_t nonzero_first
= wordsize
;
4220 size_t nonzero_last
= 0;
4221 size_t zero_first
= wordsize
;
4222 size_t zero_last
= 0;
4223 bool all_ones
= true, bytes_only
= true;
4224 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4225 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4227 gcc_assert (wordsize
> 1);
4232 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4236 if (nonzero_first
== wordsize
)
4238 nonzero_first
= j
- i
;
4239 nonzero_last
= j
- i
;
4241 if (nonzero_last
!= j
- i
)
4243 nonzero_last
= j
+ 1 - i
;
4247 if (zero_first
== wordsize
)
4249 zero_last
= j
+ 1 - i
;
4251 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4257 size_t padding_end
= i
;
4260 if (nonzero_first
== 0
4261 && nonzero_last
== wordsize
4264 /* All bits are padding and we had some padding
4265 before too. Just extend it. */
4266 padding_bytes
+= wordsize
;
4269 if (all_ones
&& nonzero_first
== 0)
4271 padding_bytes
+= nonzero_last
;
4272 padding_end
+= nonzero_last
;
4273 nonzero_first
= wordsize
;
4276 else if (bytes_only
&& nonzero_first
== 0)
4278 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4279 padding_bytes
+= zero_first
;
4280 padding_end
+= zero_first
;
4283 if (padding_bytes
== 1)
4285 atype
= char_type_node
;
4286 src
= build_zero_cst (char_type_node
);
4290 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4291 src
= build_constructor (atype
, NULL
);
4293 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4294 build_int_cst (buf
->alias_type
,
4295 buf
->off
+ padding_end
4297 gimple
*g
= gimple_build_assign (dst
, src
);
4298 gimple_set_location (g
, buf
->loc
);
4299 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4301 buf
->padding_bytes
= 0;
4303 if (nonzero_first
== wordsize
)
4304 /* All bits in a word are 0, there are no padding bits. */
4306 if (all_ones
&& nonzero_last
== wordsize
)
4308 /* All bits between nonzero_first and end of word are padding
4309 bits, start counting padding_bytes. */
4310 padding_bytes
= nonzero_last
- nonzero_first
;
4315 /* If bitfields aren't involved in this word, prefer storing
4316 individual bytes or groups of them over performing a RMW
4317 operation on the whole word. */
4318 gcc_assert (i
+ zero_last
<= end
);
4319 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4324 for (k
= j
; k
< i
+ zero_last
; k
++)
4325 if (buf
->buf
[k
] == 0)
4327 HOST_WIDE_INT off
= buf
->off
+ j
;
4331 atype
= char_type_node
;
4332 src
= build_zero_cst (char_type_node
);
4336 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4337 src
= build_constructor (atype
, NULL
);
4339 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4341 build_int_cst (buf
->alias_type
, off
));
4342 gimple
*g
= gimple_build_assign (dst
, src
);
4343 gimple_set_location (g
, buf
->loc
);
4344 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4348 if (nonzero_last
== wordsize
)
4349 padding_bytes
= nonzero_last
- zero_last
;
4352 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4354 if (nonzero_last
- nonzero_first
<= eltsz
4355 && ((nonzero_first
& ~(eltsz
- 1))
4356 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4360 type
= char_type_node
;
4362 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4364 size_t start
= nonzero_first
& ~(eltsz
- 1);
4365 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4367 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4368 atype
= build_aligned_type (type
, buf
->align
);
4369 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4370 build_int_cst (buf
->alias_type
, off
));
4374 && nonzero_first
== start
4375 && nonzero_last
== start
+ eltsz
)
4376 src
= build_zero_cst (type
);
4379 src
= make_ssa_name (type
);
4380 g
= gimple_build_assign (src
, unshare_expr (dst
));
4381 gimple_set_location (g
, buf
->loc
);
4382 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4383 tree mask
= native_interpret_expr (type
,
4384 buf
->buf
+ i
+ start
,
4386 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4387 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4388 tree src_masked
= make_ssa_name (type
);
4389 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4391 gimple_set_location (g
, buf
->loc
);
4392 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4395 g
= gimple_build_assign (dst
, src
);
4396 gimple_set_location (g
, buf
->loc
);
4397 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4407 if (padding_bytes
== 1)
4409 atype
= char_type_node
;
4410 src
= build_zero_cst (char_type_node
);
4414 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4415 src
= build_constructor (atype
, NULL
);
4417 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4418 build_int_cst (buf
->alias_type
,
4421 gimple
*g
= gimple_build_assign (dst
, src
);
4422 gimple_set_location (g
, buf
->loc
);
4423 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4425 size_t end_rem
= end
% UNITS_PER_WORD
;
4426 buf
->off
+= end
- end_rem
;
4427 buf
->size
= end_rem
;
4428 memset (buf
->buf
, 0, buf
->size
);
4429 buf
->padding_bytes
= 0;
4433 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4436 buf
->padding_bytes
= padding_bytes
;
4440 /* Append PADDING_BYTES padding bytes. */
4443 clear_padding_add_padding (clear_padding_struct
*buf
,
4444 HOST_WIDE_INT padding_bytes
)
4446 if (padding_bytes
== 0)
4448 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4449 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4450 clear_padding_flush (buf
, false);
4451 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4452 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4454 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4455 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4456 buf
->size
= clear_padding_buf_size
;
4457 clear_padding_flush (buf
, false);
4458 gcc_assert (buf
->padding_bytes
);
4459 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4460 is guaranteed to be all ones. */
4461 padding_bytes
+= buf
->size
;
4462 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4463 memset (buf
->buf
, ~0, buf
->size
);
4464 buf
->off
+= padding_bytes
- buf
->size
;
4465 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4469 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4470 buf
->size
+= padding_bytes
;
4474 static void clear_padding_type (clear_padding_struct
*, tree
,
4475 HOST_WIDE_INT
, bool);
4477 /* Clear padding bits of union type TYPE. */
4480 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4481 HOST_WIDE_INT sz
, bool for_auto_init
)
4483 clear_padding_struct
*union_buf
;
4484 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4485 size_t start_size
= 0;
4488 start_off
= buf
->off
+ buf
->size
;
4489 next_off
= start_off
+ sz
;
4490 start_size
= start_off
% UNITS_PER_WORD
;
4491 start_off
-= start_size
;
4492 clear_padding_flush (buf
, true);
4497 if (sz
+ buf
->size
> clear_padding_buf_size
)
4498 clear_padding_flush (buf
, false);
4499 union_buf
= XALLOCA (clear_padding_struct
);
4500 union_buf
->loc
= buf
->loc
;
4501 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4502 union_buf
->base
= NULL_TREE
;
4503 union_buf
->alias_type
= NULL_TREE
;
4504 union_buf
->gsi
= NULL
;
4505 union_buf
->align
= 0;
4507 union_buf
->padding_bytes
= 0;
4509 union_buf
->size
= 0;
4510 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4511 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4513 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4514 memset (union_buf
->union_ptr
, ~0, sz
);
4517 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4518 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4520 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4522 if (TREE_TYPE (field
) == error_mark_node
)
4524 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4525 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4526 if (!buf
->clear_in_mask
&& !for_auto_init
)
4527 error_at (buf
->loc
, "flexible array member %qD does not have "
4528 "well defined padding bits for %qs",
4529 field
, "__builtin_clear_padding");
4532 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4533 gcc_assert (union_buf
->size
== 0);
4534 union_buf
->off
= start_off
;
4535 union_buf
->size
= start_size
;
4536 memset (union_buf
->buf
, ~0, start_size
);
4537 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4538 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4539 clear_padding_flush (union_buf
, true);
4542 if (buf
== union_buf
)
4544 buf
->off
= next_off
;
4545 buf
->size
= next_off
% UNITS_PER_WORD
;
4546 buf
->off
-= buf
->size
;
4547 memset (buf
->buf
, ~0, buf
->size
);
4549 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4553 unsigned char *union_ptr
= union_buf
->union_ptr
;
4556 clear_padding_flush (buf
, false);
4557 HOST_WIDE_INT this_sz
4558 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4559 clear_padding_buf_size
- buf
->size
);
4560 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4561 buf
->size
+= this_sz
;
4562 union_ptr
+= this_sz
;
4565 XDELETE (union_buf
->union_ptr
);
4569 /* The only known floating point formats with padding bits are the
4570 IEEE extended ones. */
4573 clear_padding_real_needs_padding_p (tree type
)
4575 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4577 && fmt
->signbit_ro
== fmt
->signbit_rw
4578 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4581 /* Return true if TYPE might contain any padding bits. */
4584 clear_padding_type_may_have_padding_p (tree type
)
4586 switch (TREE_CODE (type
))
4594 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4596 return clear_padding_real_needs_padding_p (type
);
4602 /* Emit a runtime loop:
4603 for (; buf.base != end; buf.base += sz)
4604 __builtin_clear_padding (buf.base); */
4607 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4608 tree end
, bool for_auto_init
)
4610 tree l1
= create_artificial_label (buf
->loc
);
4611 tree l2
= create_artificial_label (buf
->loc
);
4612 tree l3
= create_artificial_label (buf
->loc
);
4613 gimple
*g
= gimple_build_goto (l2
);
4614 gimple_set_location (g
, buf
->loc
);
4615 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4616 g
= gimple_build_label (l1
);
4617 gimple_set_location (g
, buf
->loc
);
4618 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4619 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4620 clear_padding_flush (buf
, true);
4621 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4622 size_int (buf
->sz
));
4623 gimple_set_location (g
, buf
->loc
);
4624 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4625 g
= gimple_build_label (l2
);
4626 gimple_set_location (g
, buf
->loc
);
4627 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4628 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4629 gimple_set_location (g
, buf
->loc
);
4630 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4631 g
= gimple_build_label (l3
);
4632 gimple_set_location (g
, buf
->loc
);
4633 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4636 /* Clear padding bits for TYPE. Called recursively from
4637 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4638 the __builtin_clear_padding is not called by the end user,
4639 instead, it's inserted by the compiler to initialize the
4640 paddings of automatic variable. Therefore, we should not
4641 emit the error messages for flexible array members to confuse
4645 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4646 HOST_WIDE_INT sz
, bool for_auto_init
)
4648 switch (TREE_CODE (type
))
4651 HOST_WIDE_INT cur_pos
;
4653 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4654 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4656 tree ftype
= TREE_TYPE (field
);
4657 if (DECL_BIT_FIELD (field
))
4659 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4662 HOST_WIDE_INT pos
= int_byte_position (field
);
4666 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4667 bpos
%= BITS_PER_UNIT
;
4669 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4670 if (pos
+ end
> cur_pos
)
4672 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4673 cur_pos
= pos
+ end
;
4675 gcc_assert (cur_pos
> pos
4676 && ((unsigned HOST_WIDE_INT
) buf
->size
4677 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4678 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4679 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4680 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4681 " in %qs", "__builtin_clear_padding");
4682 else if (BYTES_BIG_ENDIAN
)
4685 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4686 *p
&= ~(((1 << fldsz
) - 1)
4687 << (BITS_PER_UNIT
- bpos
- fldsz
));
4692 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4694 fldsz
-= BITS_PER_UNIT
- bpos
;
4696 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4697 p
+= fldsz
/ BITS_PER_UNIT
;
4698 fldsz
%= BITS_PER_UNIT
;
4700 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4705 /* Little endian. */
4706 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4707 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4712 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4714 fldsz
-= BITS_PER_UNIT
- bpos
;
4716 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4717 p
+= fldsz
/ BITS_PER_UNIT
;
4718 fldsz
%= BITS_PER_UNIT
;
4720 *p
&= ~((1 << fldsz
) - 1);
4724 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4726 if (ftype
== error_mark_node
)
4728 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4729 && !COMPLETE_TYPE_P (ftype
));
4730 if (!buf
->clear_in_mask
&& !for_auto_init
)
4731 error_at (buf
->loc
, "flexible array member %qD does not "
4732 "have well defined padding bits for %qs",
4733 field
, "__builtin_clear_padding");
4735 else if (is_empty_type (TREE_TYPE (field
)))
4739 HOST_WIDE_INT pos
= int_byte_position (field
);
4742 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4743 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4744 clear_padding_add_padding (buf
, pos
- cur_pos
);
4746 clear_padding_type (buf
, TREE_TYPE (field
),
4747 fldsz
, for_auto_init
);
4751 gcc_assert (sz
>= cur_pos
);
4752 clear_padding_add_padding (buf
, sz
- cur_pos
);
4755 HOST_WIDE_INT nelts
, fldsz
;
4756 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4761 && sz
> 8 * UNITS_PER_WORD
4762 && buf
->union_ptr
== NULL
4763 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4765 /* For sufficiently large array of more than one elements,
4766 emit a runtime loop to keep code size manageable. */
4767 tree base
= buf
->base
;
4768 unsigned int prev_align
= buf
->align
;
4769 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4770 HOST_WIDE_INT prev_sz
= buf
->sz
;
4771 clear_padding_flush (buf
, true);
4772 tree elttype
= TREE_TYPE (type
);
4773 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4774 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4775 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4776 base
, size_int (off
));
4777 gimple_set_location (g
, buf
->loc
);
4778 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4779 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4781 gimple_set_location (g
, buf
->loc
);
4782 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4784 buf
->align
= TYPE_ALIGN (elttype
);
4787 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4790 buf
->align
= prev_align
;
4791 buf
->size
= off
% UNITS_PER_WORD
;
4792 buf
->off
= off
- buf
->size
;
4793 memset (buf
->buf
, 0, buf
->size
);
4796 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4797 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4800 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4803 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4804 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4805 clear_padding_flush (buf
, false);
4806 if (clear_padding_real_needs_padding_p (type
))
4808 /* Use native_interpret_expr + native_encode_expr to figure out
4809 which bits are padding. */
4810 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4811 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4812 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4813 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4814 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4815 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4816 buf
->buf
[buf
->size
+ i
] ^= ~0;
4819 memset (buf
->buf
+ buf
->size
, 0, sz
);
4823 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4824 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4825 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4828 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4829 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4830 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4831 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4834 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4835 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4836 clear_padding_flush (buf
, false);
4837 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4841 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4842 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4843 clear_padding_flush (buf
, false);
4844 memset (buf
->buf
+ buf
->size
, 0, sz
);
4850 /* Clear padding bits of TYPE in MASK. */
4853 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4855 clear_padding_struct buf
;
4856 buf
.loc
= UNKNOWN_LOCATION
;
4857 buf
.clear_in_mask
= true;
4858 buf
.base
= NULL_TREE
;
4859 buf
.alias_type
= NULL_TREE
;
4863 buf
.padding_bytes
= 0;
4864 buf
.sz
= int_size_in_bytes (type
);
4866 buf
.union_ptr
= mask
;
4867 clear_padding_type (&buf
, type
, buf
.sz
, false);
4868 clear_padding_flush (&buf
, true);
4871 /* Fold __builtin_clear_padding builtin. */
4874 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4876 gimple
*stmt
= gsi_stmt (*gsi
);
4877 gcc_assert (gimple_call_num_args (stmt
) == 3);
4878 tree ptr
= gimple_call_arg (stmt
, 0);
4879 tree typearg
= gimple_call_arg (stmt
, 1);
4880 /* the 3rd argument of __builtin_clear_padding is to distinguish whether
4881 this call is made by the user or by the compiler for automatic variable
4883 bool for_auto_init
= (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 2));
4884 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4885 location_t loc
= gimple_location (stmt
);
4886 clear_padding_struct buf
;
4887 gimple_stmt_iterator gsiprev
= *gsi
;
4888 /* This should be folded during the lower pass. */
4889 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4890 gcc_assert (COMPLETE_TYPE_P (type
));
4891 gsi_prev (&gsiprev
);
4894 buf
.clear_in_mask
= false;
4896 buf
.alias_type
= NULL_TREE
;
4898 buf
.align
= get_pointer_alignment (ptr
);
4899 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4900 buf
.align
= MAX (buf
.align
, talign
);
4902 buf
.padding_bytes
= 0;
4904 buf
.sz
= int_size_in_bytes (type
);
4905 buf
.union_ptr
= NULL
;
4906 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4907 sorry_at (loc
, "%s not supported for variable length aggregates",
4908 "__builtin_clear_padding");
4909 /* The implementation currently assumes 8-bit host and target
4910 chars which is the case for all currently supported targets
4911 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4912 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4913 sorry_at (loc
, "%s not supported on this target",
4914 "__builtin_clear_padding");
4915 else if (!clear_padding_type_may_have_padding_p (type
))
4917 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4919 tree sz
= TYPE_SIZE_UNIT (type
);
4920 tree elttype
= type
;
4921 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4922 while (TREE_CODE (elttype
) == ARRAY_TYPE
4923 && int_size_in_bytes (elttype
) < 0)
4924 elttype
= TREE_TYPE (elttype
);
4925 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4926 gcc_assert (eltsz
>= 0);
4929 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4930 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4931 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4932 gimple_set_location (g
, loc
);
4933 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4934 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4935 gimple_set_location (g
, loc
);
4936 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4938 buf
.align
= TYPE_ALIGN (elttype
);
4939 buf
.alias_type
= build_pointer_type (elttype
);
4940 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4945 if (!is_gimple_mem_ref_addr (buf
.base
))
4947 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4948 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4949 gimple_set_location (g
, loc
);
4950 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4952 buf
.alias_type
= build_pointer_type (type
);
4953 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
4954 clear_padding_flush (&buf
, true);
4957 gimple_stmt_iterator gsiprev2
= *gsi
;
4958 gsi_prev (&gsiprev2
);
4959 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4960 gsi_replace (gsi
, gimple_build_nop (), true);
4963 gsi_remove (gsi
, true);
4969 /* Fold the non-target builtin at *GSI and return whether any simplification
4973 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
4975 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
4976 tree callee
= gimple_call_fndecl (stmt
);
4978 /* Give up for always_inline inline builtins until they are
4980 if (avoid_folding_inline_builtin (callee
))
4983 unsigned n
= gimple_call_num_args (stmt
);
4984 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
4988 return gimple_fold_builtin_bcmp (gsi
);
4989 case BUILT_IN_BCOPY
:
4990 return gimple_fold_builtin_bcopy (gsi
);
4991 case BUILT_IN_BZERO
:
4992 return gimple_fold_builtin_bzero (gsi
);
4994 case BUILT_IN_MEMSET
:
4995 return gimple_fold_builtin_memset (gsi
,
4996 gimple_call_arg (stmt
, 1),
4997 gimple_call_arg (stmt
, 2));
4998 case BUILT_IN_MEMCPY
:
4999 case BUILT_IN_MEMPCPY
:
5000 case BUILT_IN_MEMMOVE
:
5001 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5002 gimple_call_arg (stmt
, 1), fcode
);
5003 case BUILT_IN_SPRINTF_CHK
:
5004 case BUILT_IN_VSPRINTF_CHK
:
5005 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5006 case BUILT_IN_STRCAT_CHK
:
5007 return gimple_fold_builtin_strcat_chk (gsi
);
5008 case BUILT_IN_STRNCAT_CHK
:
5009 return gimple_fold_builtin_strncat_chk (gsi
);
5010 case BUILT_IN_STRLEN
:
5011 return gimple_fold_builtin_strlen (gsi
);
5012 case BUILT_IN_STRCPY
:
5013 return gimple_fold_builtin_strcpy (gsi
,
5014 gimple_call_arg (stmt
, 0),
5015 gimple_call_arg (stmt
, 1));
5016 case BUILT_IN_STRNCPY
:
5017 return gimple_fold_builtin_strncpy (gsi
,
5018 gimple_call_arg (stmt
, 0),
5019 gimple_call_arg (stmt
, 1),
5020 gimple_call_arg (stmt
, 2));
5021 case BUILT_IN_STRCAT
:
5022 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5023 gimple_call_arg (stmt
, 1));
5024 case BUILT_IN_STRNCAT
:
5025 return gimple_fold_builtin_strncat (gsi
);
5026 case BUILT_IN_INDEX
:
5027 case BUILT_IN_STRCHR
:
5028 return gimple_fold_builtin_strchr (gsi
, false);
5029 case BUILT_IN_RINDEX
:
5030 case BUILT_IN_STRRCHR
:
5031 return gimple_fold_builtin_strchr (gsi
, true);
5032 case BUILT_IN_STRSTR
:
5033 return gimple_fold_builtin_strstr (gsi
);
5034 case BUILT_IN_STRCMP
:
5035 case BUILT_IN_STRCMP_EQ
:
5036 case BUILT_IN_STRCASECMP
:
5037 case BUILT_IN_STRNCMP
:
5038 case BUILT_IN_STRNCMP_EQ
:
5039 case BUILT_IN_STRNCASECMP
:
5040 return gimple_fold_builtin_string_compare (gsi
);
5041 case BUILT_IN_MEMCHR
:
5042 return gimple_fold_builtin_memchr (gsi
);
5043 case BUILT_IN_FPUTS
:
5044 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5045 gimple_call_arg (stmt
, 1), false);
5046 case BUILT_IN_FPUTS_UNLOCKED
:
5047 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5048 gimple_call_arg (stmt
, 1), true);
5049 case BUILT_IN_MEMCPY_CHK
:
5050 case BUILT_IN_MEMPCPY_CHK
:
5051 case BUILT_IN_MEMMOVE_CHK
:
5052 case BUILT_IN_MEMSET_CHK
:
5053 return gimple_fold_builtin_memory_chk (gsi
,
5054 gimple_call_arg (stmt
, 0),
5055 gimple_call_arg (stmt
, 1),
5056 gimple_call_arg (stmt
, 2),
5057 gimple_call_arg (stmt
, 3),
5059 case BUILT_IN_STPCPY
:
5060 return gimple_fold_builtin_stpcpy (gsi
);
5061 case BUILT_IN_STRCPY_CHK
:
5062 case BUILT_IN_STPCPY_CHK
:
5063 return gimple_fold_builtin_stxcpy_chk (gsi
,
5064 gimple_call_arg (stmt
, 0),
5065 gimple_call_arg (stmt
, 1),
5066 gimple_call_arg (stmt
, 2),
5068 case BUILT_IN_STRNCPY_CHK
:
5069 case BUILT_IN_STPNCPY_CHK
:
5070 return gimple_fold_builtin_stxncpy_chk (gsi
,
5071 gimple_call_arg (stmt
, 0),
5072 gimple_call_arg (stmt
, 1),
5073 gimple_call_arg (stmt
, 2),
5074 gimple_call_arg (stmt
, 3),
5076 case BUILT_IN_SNPRINTF_CHK
:
5077 case BUILT_IN_VSNPRINTF_CHK
:
5078 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5080 case BUILT_IN_FPRINTF
:
5081 case BUILT_IN_FPRINTF_UNLOCKED
:
5082 case BUILT_IN_VFPRINTF
:
5083 if (n
== 2 || n
== 3)
5084 return gimple_fold_builtin_fprintf (gsi
,
5085 gimple_call_arg (stmt
, 0),
5086 gimple_call_arg (stmt
, 1),
5088 ? gimple_call_arg (stmt
, 2)
5092 case BUILT_IN_FPRINTF_CHK
:
5093 case BUILT_IN_VFPRINTF_CHK
:
5094 if (n
== 3 || n
== 4)
5095 return gimple_fold_builtin_fprintf (gsi
,
5096 gimple_call_arg (stmt
, 0),
5097 gimple_call_arg (stmt
, 2),
5099 ? gimple_call_arg (stmt
, 3)
5103 case BUILT_IN_PRINTF
:
5104 case BUILT_IN_PRINTF_UNLOCKED
:
5105 case BUILT_IN_VPRINTF
:
5106 if (n
== 1 || n
== 2)
5107 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5109 ? gimple_call_arg (stmt
, 1)
5110 : NULL_TREE
, fcode
);
5112 case BUILT_IN_PRINTF_CHK
:
5113 case BUILT_IN_VPRINTF_CHK
:
5114 if (n
== 2 || n
== 3)
5115 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5117 ? gimple_call_arg (stmt
, 2)
5118 : NULL_TREE
, fcode
);
5120 case BUILT_IN_ACC_ON_DEVICE
:
5121 return gimple_fold_builtin_acc_on_device (gsi
,
5122 gimple_call_arg (stmt
, 0));
5123 case BUILT_IN_REALLOC
:
5124 return gimple_fold_builtin_realloc (gsi
);
5126 case BUILT_IN_CLEAR_PADDING
:
5127 return gimple_fold_builtin_clear_padding (gsi
);
5132 /* Try the generic builtin folder. */
5133 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5134 tree result
= fold_call_stmt (stmt
, ignore
);
5138 STRIP_NOPS (result
);
5140 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5141 gimplify_and_update_call_from_tree (gsi
, result
);
5148 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5149 function calls to constants, where possible. */
5152 fold_internal_goacc_dim (const gimple
*call
)
5154 int axis
= oacc_get_ifn_dim_arg (call
);
5155 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5156 tree result
= NULL_TREE
;
5157 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5159 switch (gimple_call_internal_fn (call
))
5161 case IFN_GOACC_DIM_POS
:
5162 /* If the size is 1, we know the answer. */
5164 result
= build_int_cst (type
, 0);
5166 case IFN_GOACC_DIM_SIZE
:
5167 /* If the size is not dynamic, we know the answer. */
5169 result
= build_int_cst (type
, size
);
5178 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5179 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5180 &var where var is only addressable because of such calls. */
5183 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5185 if (gimple_call_num_args (stmt
) != 6
5186 || !flag_inline_atomics
5188 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5189 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5190 || !gimple_vdef (stmt
)
5191 || !gimple_vuse (stmt
))
5194 tree fndecl
= gimple_call_fndecl (stmt
);
5195 switch (DECL_FUNCTION_CODE (fndecl
))
5197 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5198 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5199 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5200 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5201 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5207 tree expected
= gimple_call_arg (stmt
, 1);
5208 if (TREE_CODE (expected
) != ADDR_EXPR
5209 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5212 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5213 if (!is_gimple_reg_type (etype
)
5214 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5215 || TREE_THIS_VOLATILE (etype
)
5216 || VECTOR_TYPE_P (etype
)
5217 || TREE_CODE (etype
) == COMPLEX_TYPE
5218 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5219 might not preserve all the bits. See PR71716. */
5220 || SCALAR_FLOAT_TYPE_P (etype
)
5221 || maybe_ne (TYPE_PRECISION (etype
),
5222 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5225 tree weak
= gimple_call_arg (stmt
, 3);
5226 if (!integer_zerop (weak
) && !integer_onep (weak
))
5229 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5230 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5231 machine_mode mode
= TYPE_MODE (itype
);
5233 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5235 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5238 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5245 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5247 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5248 i = IMAGPART_EXPR <t>;
5250 e = REALPART_EXPR <t>; */
5253 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5255 gimple
*stmt
= gsi_stmt (*gsi
);
5256 tree fndecl
= gimple_call_fndecl (stmt
);
5257 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5258 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5259 tree ctype
= build_complex_type (itype
);
5260 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5261 bool throws
= false;
5263 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5265 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5266 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5267 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5269 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5270 build1 (VIEW_CONVERT_EXPR
, itype
,
5271 gimple_assign_lhs (g
)));
5272 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5274 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5275 + int_size_in_bytes (itype
);
5276 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5277 gimple_call_arg (stmt
, 0),
5278 gimple_assign_lhs (g
),
5279 gimple_call_arg (stmt
, 2),
5280 build_int_cst (integer_type_node
, flag
),
5281 gimple_call_arg (stmt
, 4),
5282 gimple_call_arg (stmt
, 5));
5283 tree lhs
= make_ssa_name (ctype
);
5284 gimple_call_set_lhs (g
, lhs
);
5285 gimple_move_vops (g
, stmt
);
5286 tree oldlhs
= gimple_call_lhs (stmt
);
5287 if (stmt_can_throw_internal (cfun
, stmt
))
5290 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5292 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5293 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5294 gimple_call_set_lhs (stmt
, NULL_TREE
);
5295 gsi_replace (gsi
, g
, true);
5298 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5299 build1 (IMAGPART_EXPR
, itype
, lhs
));
5302 gsi_insert_on_edge_immediate (e
, g
);
5303 *gsi
= gsi_for_stmt (g
);
5306 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5307 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5308 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5310 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5311 build1 (REALPART_EXPR
, itype
, lhs
));
5312 if (throws
&& oldlhs
== NULL_TREE
)
5314 gsi_insert_on_edge_immediate (e
, g
);
5315 *gsi
= gsi_for_stmt (g
);
5318 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5319 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5321 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5323 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5324 gimple_assign_lhs (g
)));
5325 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5327 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5328 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5332 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5333 doesn't fit into TYPE. The test for overflow should be regardless of
5334 -fwrapv, and even for unsigned types. */
5337 arith_overflowed_p (enum tree_code code
, const_tree type
,
5338 const_tree arg0
, const_tree arg1
)
5340 widest2_int warg0
= widest2_int_cst (arg0
);
5341 widest2_int warg1
= widest2_int_cst (arg1
);
5345 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5346 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5347 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5348 default: gcc_unreachable ();
5350 signop sign
= TYPE_SIGN (type
);
5351 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5353 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5356 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5357 for the memory it references, otherwise return null. VECTYPE is the
5358 type of the memory vector. */
5361 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5363 tree ptr
= gimple_call_arg (call
, 0);
5364 tree alias_align
= gimple_call_arg (call
, 1);
5365 tree mask
= gimple_call_arg (call
, 2);
5366 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5369 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5370 if (TYPE_ALIGN (vectype
) != align
)
5371 vectype
= build_aligned_type (vectype
, align
);
5372 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5373 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5376 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5379 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5381 tree lhs
= gimple_call_lhs (call
);
5385 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5387 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5388 gimple_set_location (new_stmt
, gimple_location (call
));
5389 gimple_move_vops (new_stmt
, call
);
5390 gsi_replace (gsi
, new_stmt
, false);
5396 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5399 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5401 tree rhs
= gimple_call_arg (call
, 3);
5402 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5404 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5405 gimple_set_location (new_stmt
, gimple_location (call
));
5406 gimple_move_vops (new_stmt
, call
);
5407 gsi_replace (gsi
, new_stmt
, false);
5413 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5414 The statement may be replaced by another statement, e.g., if the call
5415 simplifies to a constant value. Return true if any changes were made.
5416 It is assumed that the operands have been previously folded. */
5419 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5421 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5423 bool changed
= false;
5425 /* Check for virtual calls that became direct calls. */
5426 callee
= gimple_call_fn (stmt
);
5427 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5429 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5431 if (dump_file
&& virtual_method_call_p (callee
)
5432 && !possible_polymorphic_call_target_p
5433 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5434 (OBJ_TYPE_REF_EXPR (callee
)))))
5437 "Type inheritance inconsistent devirtualization of ");
5438 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5439 fprintf (dump_file
, " to ");
5440 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5441 fprintf (dump_file
, "\n");
5444 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5447 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5450 vec
<cgraph_node
*>targets
5451 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5452 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5454 tree lhs
= gimple_call_lhs (stmt
);
5455 if (dump_enabled_p ())
5457 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5458 "folding virtual function call to %s\n",
5459 targets
.length () == 1
5460 ? targets
[0]->name ()
5461 : "__builtin_unreachable");
5463 if (targets
.length () == 1)
5465 tree fndecl
= targets
[0]->decl
;
5466 gimple_call_set_fndecl (stmt
, fndecl
);
5468 /* If changing the call to __cxa_pure_virtual
5469 or similar noreturn function, adjust gimple_call_fntype
5471 if (gimple_call_noreturn_p (stmt
)
5472 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5473 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5474 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5476 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5477 /* If the call becomes noreturn, remove the lhs. */
5479 && gimple_call_noreturn_p (stmt
)
5480 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5481 || should_remove_lhs_p (lhs
)))
5483 if (TREE_CODE (lhs
) == SSA_NAME
)
5485 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5486 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5487 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5488 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5490 gimple_call_set_lhs (stmt
, NULL_TREE
);
5492 maybe_remove_unused_call_args (cfun
, stmt
);
5496 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5497 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5498 gimple_set_location (new_stmt
, gimple_location (stmt
));
5499 /* If the call had a SSA name as lhs morph that into
5500 an uninitialized value. */
5501 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5503 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5504 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5505 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5506 set_ssa_default_def (cfun
, var
, lhs
);
5508 gimple_move_vops (new_stmt
, stmt
);
5509 gsi_replace (gsi
, new_stmt
, false);
5516 /* Check for indirect calls that became direct calls, and then
5517 no longer require a static chain. */
5518 if (gimple_call_chain (stmt
))
5520 tree fn
= gimple_call_fndecl (stmt
);
5521 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5523 gimple_call_set_chain (stmt
, NULL
);
5531 /* Check for builtins that CCP can handle using information not
5532 available in the generic fold routines. */
5533 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5535 if (gimple_fold_builtin (gsi
))
5538 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5540 changed
|= targetm
.gimple_fold_builtin (gsi
);
5542 else if (gimple_call_internal_p (stmt
))
5544 enum tree_code subcode
= ERROR_MARK
;
5545 tree result
= NULL_TREE
;
5546 bool cplx_result
= false;
5547 tree overflow
= NULL_TREE
;
5548 switch (gimple_call_internal_fn (stmt
))
5550 case IFN_BUILTIN_EXPECT
:
5551 result
= fold_builtin_expect (gimple_location (stmt
),
5552 gimple_call_arg (stmt
, 0),
5553 gimple_call_arg (stmt
, 1),
5554 gimple_call_arg (stmt
, 2),
5557 case IFN_UBSAN_OBJECT_SIZE
:
5559 tree offset
= gimple_call_arg (stmt
, 1);
5560 tree objsize
= gimple_call_arg (stmt
, 2);
5561 if (integer_all_onesp (objsize
)
5562 || (TREE_CODE (offset
) == INTEGER_CST
5563 && TREE_CODE (objsize
) == INTEGER_CST
5564 && tree_int_cst_le (offset
, objsize
)))
5566 replace_call_with_value (gsi
, NULL_TREE
);
5572 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5574 replace_call_with_value (gsi
, NULL_TREE
);
5578 case IFN_UBSAN_BOUNDS
:
5580 tree index
= gimple_call_arg (stmt
, 1);
5581 tree bound
= gimple_call_arg (stmt
, 2);
5582 if (TREE_CODE (index
) == INTEGER_CST
5583 && TREE_CODE (bound
) == INTEGER_CST
)
5585 index
= fold_convert (TREE_TYPE (bound
), index
);
5586 if (TREE_CODE (index
) == INTEGER_CST
5587 && tree_int_cst_le (index
, bound
))
5589 replace_call_with_value (gsi
, NULL_TREE
);
5595 case IFN_GOACC_DIM_SIZE
:
5596 case IFN_GOACC_DIM_POS
:
5597 result
= fold_internal_goacc_dim (stmt
);
5599 case IFN_UBSAN_CHECK_ADD
:
5600 subcode
= PLUS_EXPR
;
5602 case IFN_UBSAN_CHECK_SUB
:
5603 subcode
= MINUS_EXPR
;
5605 case IFN_UBSAN_CHECK_MUL
:
5606 subcode
= MULT_EXPR
;
5608 case IFN_ADD_OVERFLOW
:
5609 subcode
= PLUS_EXPR
;
5612 case IFN_SUB_OVERFLOW
:
5613 subcode
= MINUS_EXPR
;
5616 case IFN_MUL_OVERFLOW
:
5617 subcode
= MULT_EXPR
;
5621 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5623 case IFN_MASK_STORE
:
5624 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5629 if (subcode
!= ERROR_MARK
)
5631 tree arg0
= gimple_call_arg (stmt
, 0);
5632 tree arg1
= gimple_call_arg (stmt
, 1);
5633 tree type
= TREE_TYPE (arg0
);
5636 tree lhs
= gimple_call_lhs (stmt
);
5637 if (lhs
== NULL_TREE
)
5640 type
= TREE_TYPE (TREE_TYPE (lhs
));
5642 if (type
== NULL_TREE
)
5644 /* x = y + 0; x = y - 0; x = y * 0; */
5645 else if (integer_zerop (arg1
))
5646 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5647 /* x = 0 + y; x = 0 * y; */
5648 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5649 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5651 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5652 result
= integer_zero_node
;
5653 /* x = y * 1; x = 1 * y; */
5654 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5656 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5658 else if (TREE_CODE (arg0
) == INTEGER_CST
5659 && TREE_CODE (arg1
) == INTEGER_CST
)
5662 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5663 fold_convert (type
, arg1
));
5665 result
= int_const_binop (subcode
, arg0
, arg1
);
5666 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5669 overflow
= build_one_cst (type
);
5676 if (result
== integer_zero_node
)
5677 result
= build_zero_cst (type
);
5678 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5680 if (TREE_CODE (result
) == INTEGER_CST
)
5682 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5684 overflow
= build_one_cst (type
);
5686 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5687 && TYPE_UNSIGNED (type
))
5688 || (TYPE_PRECISION (type
)
5689 < (TYPE_PRECISION (TREE_TYPE (result
))
5690 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5691 && !TYPE_UNSIGNED (type
)))))
5694 result
= fold_convert (type
, result
);
5701 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5702 result
= drop_tree_overflow (result
);
5705 if (overflow
== NULL_TREE
)
5706 overflow
= build_zero_cst (TREE_TYPE (result
));
5707 tree ctype
= build_complex_type (TREE_TYPE (result
));
5708 if (TREE_CODE (result
) == INTEGER_CST
5709 && TREE_CODE (overflow
) == INTEGER_CST
)
5710 result
= build_complex (ctype
, result
, overflow
);
5712 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5713 ctype
, result
, overflow
);
5715 gimplify_and_update_call_from_tree (gsi
, result
);
5724 /* Return true whether NAME has a use on STMT. */
5727 has_use_on_stmt (tree name
, gimple
*stmt
)
5729 imm_use_iterator iter
;
5730 use_operand_p use_p
;
5731 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5732 if (USE_STMT (use_p
) == stmt
)
5737 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5740 Replaces *GSI with the simplification result in RCODE and OPS
5741 and the associated statements in *SEQ. Does the replacement
5742 according to INPLACE and returns true if the operation succeeded. */
5745 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5746 gimple_match_op
*res_op
,
5747 gimple_seq
*seq
, bool inplace
)
5749 gimple
*stmt
= gsi_stmt (*gsi
);
5750 tree
*ops
= res_op
->ops
;
5751 unsigned int num_ops
= res_op
->num_ops
;
5753 /* Play safe and do not allow abnormals to be mentioned in
5754 newly created statements. See also maybe_push_res_to_seq.
5755 As an exception allow such uses if there was a use of the
5756 same SSA name on the old stmt. */
5757 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5758 if (TREE_CODE (ops
[i
]) == SSA_NAME
5759 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5760 && !has_use_on_stmt (ops
[i
], stmt
))
5763 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5764 for (unsigned int i
= 0; i
< 2; ++i
)
5765 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5766 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5767 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5770 /* Don't insert new statements when INPLACE is true, even if we could
5771 reuse STMT for the final statement. */
5772 if (inplace
&& !gimple_seq_empty_p (*seq
))
5775 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5777 gcc_assert (res_op
->code
.is_tree_code ());
5778 auto code
= tree_code (res_op
->code
);
5779 if (TREE_CODE_CLASS (code
) == tcc_comparison
5780 /* GIMPLE_CONDs condition may not throw. */
5781 && (!flag_exceptions
5782 || !cfun
->can_throw_non_call_exceptions
5783 || !operation_could_trap_p (code
,
5784 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5786 gimple_cond_set_condition (cond_stmt
, code
, ops
[0], ops
[1]);
5787 else if (code
== SSA_NAME
)
5788 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5789 build_zero_cst (TREE_TYPE (ops
[0])));
5790 else if (code
== INTEGER_CST
)
5792 if (integer_zerop (ops
[0]))
5793 gimple_cond_make_false (cond_stmt
);
5795 gimple_cond_make_true (cond_stmt
);
5799 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5802 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5803 build_zero_cst (TREE_TYPE (res
)));
5807 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5809 fprintf (dump_file
, "gimple_simplified to ");
5810 if (!gimple_seq_empty_p (*seq
))
5811 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5812 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5815 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5818 else if (is_gimple_assign (stmt
)
5819 && res_op
->code
.is_tree_code ())
5821 auto code
= tree_code (res_op
->code
);
5823 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (code
))
5825 maybe_build_generic_op (res_op
);
5826 gimple_assign_set_rhs_with_ops (gsi
, code
,
5827 res_op
->op_or_null (0),
5828 res_op
->op_or_null (1),
5829 res_op
->op_or_null (2));
5830 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5832 fprintf (dump_file
, "gimple_simplified to ");
5833 if (!gimple_seq_empty_p (*seq
))
5834 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5835 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5838 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5842 else if (res_op
->code
.is_fn_code ()
5843 && gimple_call_combined_fn (stmt
) == combined_fn (res_op
->code
))
5845 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5846 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5847 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5848 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5850 fprintf (dump_file
, "gimple_simplified to ");
5851 if (!gimple_seq_empty_p (*seq
))
5852 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5853 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5855 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5860 if (gimple_has_lhs (stmt
))
5862 tree lhs
= gimple_get_lhs (stmt
);
5863 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5865 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5867 fprintf (dump_file
, "gimple_simplified to ");
5868 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5870 gsi_replace_with_seq_vops (gsi
, *seq
);
5880 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5883 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5888 if (TREE_CODE (*t
) == ADDR_EXPR
)
5889 t
= &TREE_OPERAND (*t
, 0);
5891 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5892 generic vector extension. The actual vector referenced is
5893 view-converted to an array type for this purpose. If the index
5894 is constant the canonical representation in the middle-end is a
5895 BIT_FIELD_REF so re-write the former to the latter here. */
5896 if (TREE_CODE (*t
) == ARRAY_REF
5897 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5898 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5899 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5901 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5902 if (VECTOR_TYPE_P (vtype
))
5904 tree low
= array_ref_low_bound (*t
);
5905 if (TREE_CODE (low
) == INTEGER_CST
)
5907 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5909 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5910 wi::to_widest (low
));
5911 idx
= wi::mul (idx
, wi::to_widest
5912 (TYPE_SIZE (TREE_TYPE (*t
))));
5914 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5915 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5917 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5919 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5920 TYPE_SIZE (TREE_TYPE (*t
)),
5921 wide_int_to_tree (bitsizetype
, idx
));
5929 while (handled_component_p (*t
))
5930 t
= &TREE_OPERAND (*t
, 0);
5932 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5933 of invariant addresses into a SSA name MEM_REF address. */
5934 if (TREE_CODE (*t
) == MEM_REF
5935 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5937 tree addr
= TREE_OPERAND (*t
, 0);
5938 if (TREE_CODE (addr
) == ADDR_EXPR
5939 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5940 || handled_component_p (TREE_OPERAND (addr
, 0))))
5944 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5953 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5954 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5955 TREE_OPERAND (*t
, 1),
5956 size_int (coffset
));
5959 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5960 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5963 /* Canonicalize back MEM_REFs to plain reference trees if the object
5964 accessed is a decl that has the same access semantics as the MEM_REF. */
5965 if (TREE_CODE (*t
) == MEM_REF
5966 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5967 && integer_zerop (TREE_OPERAND (*t
, 1))
5968 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5970 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5971 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5972 if (/* Same volatile qualification. */
5973 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5974 /* Same TBAA behavior with -fstrict-aliasing. */
5975 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
5976 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
5977 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
5978 /* Same alignment. */
5979 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
5980 /* We have to look out here to not drop a required conversion
5981 from the rhs to the lhs if *t appears on the lhs or vice-versa
5982 if it appears on the rhs. Thus require strict type
5984 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
5986 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5991 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
5992 && TREE_CODE (*t
) == MEM_REF
5993 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
5997 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6001 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6003 if (mem_ref_offset (base
).to_shwi (&moffset
))
6006 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6009 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6016 /* Canonicalize TARGET_MEM_REF in particular with respect to
6017 the indexes becoming constant. */
6018 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6020 tree tem
= maybe_fold_tmr (*t
);
6024 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6025 recompute_tree_invariant_for_addr_expr (*orig_t
);
6033 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6034 distinguishes both cases. */
6037 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6039 bool changed
= false;
6040 gimple
*stmt
= gsi_stmt (*gsi
);
6041 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6043 fold_defer_overflow_warnings ();
6045 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6047 ??? This shouldn't be done in generic folding but in the
6048 propagation helpers which also know whether an address was
6050 Also canonicalize operand order. */
6051 switch (gimple_code (stmt
))
6054 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6056 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6057 if ((REFERENCE_CLASS_P (*rhs
)
6058 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6059 && maybe_canonicalize_mem_ref_addr (rhs
))
6061 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6062 if (REFERENCE_CLASS_P (*lhs
)
6063 && maybe_canonicalize_mem_ref_addr (lhs
))
6065 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6066 This cannot be done in maybe_canonicalize_mem_ref_addr
6067 as the gimple now has two operands rather than one.
6068 The same reason why this can't be done in
6069 maybe_canonicalize_mem_ref_addr is the same reason why
6070 this can't be done inplace. */
6071 if (!inplace
&& TREE_CODE (*rhs
) == ADDR_EXPR
)
6073 tree inner
= TREE_OPERAND (*rhs
, 0);
6074 if (TREE_CODE (inner
) == MEM_REF
6075 && TREE_CODE (TREE_OPERAND (inner
, 0)) == SSA_NAME
6076 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6078 tree ptr
= TREE_OPERAND (inner
, 0);
6079 tree addon
= TREE_OPERAND (inner
, 1);
6080 addon
= fold_convert (sizetype
, addon
);
6081 gimple_assign_set_rhs_with_ops (gsi
, POINTER_PLUS_EXPR
,
6084 stmt
= gsi_stmt (*gsi
);
6090 /* Canonicalize operand order. */
6091 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6092 if (TREE_CODE_CLASS (code
) == tcc_comparison
6093 || commutative_tree_code (code
)
6094 || commutative_ternary_tree_code (code
))
6096 tree rhs1
= gimple_assign_rhs1 (stmt
);
6097 tree rhs2
= gimple_assign_rhs2 (stmt
);
6098 if (tree_swap_operands_p (rhs1
, rhs2
))
6100 gimple_assign_set_rhs1 (stmt
, rhs2
);
6101 gimple_assign_set_rhs2 (stmt
, rhs1
);
6102 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6103 gimple_assign_set_rhs_code (stmt
,
6104 swap_tree_comparison (code
));
6112 gcall
*call
= as_a
<gcall
*> (stmt
);
6113 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
6115 tree
*arg
= gimple_call_arg_ptr (call
, i
);
6116 if (REFERENCE_CLASS_P (*arg
)
6117 && maybe_canonicalize_mem_ref_addr (arg
))
6120 tree
*lhs
= gimple_call_lhs_ptr (call
);
6122 && REFERENCE_CLASS_P (*lhs
)
6123 && maybe_canonicalize_mem_ref_addr (lhs
))
6127 combined_fn cfn
= gimple_call_combined_fn (call
);
6128 internal_fn ifn
= associated_internal_fn (cfn
, TREE_TYPE (*lhs
));
6129 int opno
= first_commutative_argument (ifn
);
6132 tree arg1
= gimple_call_arg (call
, opno
);
6133 tree arg2
= gimple_call_arg (call
, opno
+ 1);
6134 if (tree_swap_operands_p (arg1
, arg2
))
6136 gimple_call_set_arg (call
, opno
, arg2
);
6137 gimple_call_set_arg (call
, opno
+ 1, arg1
);
6146 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6147 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6149 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6150 tree op
= TREE_VALUE (link
);
6151 if (REFERENCE_CLASS_P (op
)
6152 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6155 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6157 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6158 tree op
= TREE_VALUE (link
);
6159 if ((REFERENCE_CLASS_P (op
)
6160 || TREE_CODE (op
) == ADDR_EXPR
)
6161 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6167 if (gimple_debug_bind_p (stmt
))
6169 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6171 && (REFERENCE_CLASS_P (*val
)
6172 || TREE_CODE (*val
) == ADDR_EXPR
)
6173 && maybe_canonicalize_mem_ref_addr (val
, true))
6179 /* Canonicalize operand order. */
6180 tree lhs
= gimple_cond_lhs (stmt
);
6181 tree rhs
= gimple_cond_rhs (stmt
);
6182 if (tree_swap_operands_p (lhs
, rhs
))
6184 gcond
*gc
= as_a
<gcond
*> (stmt
);
6185 gimple_cond_set_lhs (gc
, rhs
);
6186 gimple_cond_set_rhs (gc
, lhs
);
6187 gimple_cond_set_code (gc
,
6188 swap_tree_comparison (gimple_cond_code (gc
)));
6195 /* Dispatch to pattern-based folding. */
6197 || is_gimple_assign (stmt
)
6198 || gimple_code (stmt
) == GIMPLE_COND
)
6200 gimple_seq seq
= NULL
;
6201 gimple_match_op res_op
;
6202 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6203 valueize
, valueize
))
6205 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6208 gimple_seq_discard (seq
);
6212 stmt
= gsi_stmt (*gsi
);
6214 /* Fold the main computation performed by the statement. */
6215 switch (gimple_code (stmt
))
6219 /* Try to canonicalize for boolean-typed X the comparisons
6220 X == 0, X == 1, X != 0, and X != 1. */
6221 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6222 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6224 tree lhs
= gimple_assign_lhs (stmt
);
6225 tree op1
= gimple_assign_rhs1 (stmt
);
6226 tree op2
= gimple_assign_rhs2 (stmt
);
6227 tree type
= TREE_TYPE (op1
);
6229 /* Check whether the comparison operands are of the same boolean
6230 type as the result type is.
6231 Check that second operand is an integer-constant with value
6233 if (TREE_CODE (op2
) == INTEGER_CST
6234 && (integer_zerop (op2
) || integer_onep (op2
))
6235 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6237 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6238 bool is_logical_not
= false;
6240 /* X == 0 and X != 1 is a logical-not.of X
6241 X == 1 and X != 0 is X */
6242 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6243 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6244 is_logical_not
= true;
6246 if (is_logical_not
== false)
6247 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6248 /* Only for one-bit precision typed X the transformation
6249 !X -> ~X is valied. */
6250 else if (TYPE_PRECISION (type
) == 1)
6251 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6252 /* Otherwise we use !X -> X ^ 1. */
6254 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6255 build_int_cst (type
, 1));
6261 unsigned old_num_ops
= gimple_num_ops (stmt
);
6262 tree lhs
= gimple_assign_lhs (stmt
);
6263 tree new_rhs
= fold_gimple_assign (gsi
);
6265 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6266 TREE_TYPE (new_rhs
)))
6267 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6270 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6272 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6279 changed
|= gimple_fold_call (gsi
, inplace
);
6283 if (gimple_debug_bind_p (stmt
))
6285 tree val
= gimple_debug_bind_get_value (stmt
);
6287 && REFERENCE_CLASS_P (val
))
6289 tree tem
= maybe_fold_reference (val
);
6292 gimple_debug_bind_set_value (stmt
, tem
);
6297 && TREE_CODE (val
) == ADDR_EXPR
)
6299 tree ref
= TREE_OPERAND (val
, 0);
6300 tree tem
= maybe_fold_reference (ref
);
6303 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6304 gimple_debug_bind_set_value (stmt
, tem
);
6313 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6314 tree ret
= gimple_return_retval(ret_stmt
);
6316 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6318 tree val
= valueize (ret
);
6319 if (val
&& val
!= ret
6320 && may_propagate_copy (ret
, val
))
6322 gimple_return_set_retval (ret_stmt
, val
);
6332 stmt
= gsi_stmt (*gsi
);
6334 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6338 /* Valueziation callback that ends up not following SSA edges. */
6341 no_follow_ssa_edges (tree
)
6346 /* Valueization callback that ends up following single-use SSA edges only. */
6349 follow_single_use_edges (tree val
)
6351 if (TREE_CODE (val
) == SSA_NAME
6352 && !has_single_use (val
))
6357 /* Valueization callback that follows all SSA edges. */
6360 follow_all_ssa_edges (tree val
)
6365 /* Fold the statement pointed to by GSI. In some cases, this function may
6366 replace the whole statement with a new one. Returns true iff folding
6368 The statement pointed to by GSI should be in valid gimple form but may
6369 be in unfolded state as resulting from for example constant propagation
6370 which can produce *&x = 0. */
6373 fold_stmt (gimple_stmt_iterator
*gsi
)
6375 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6379 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6381 return fold_stmt_1 (gsi
, false, valueize
);
6384 /* Perform the minimal folding on statement *GSI. Only operations like
6385 *&x created by constant propagation are handled. The statement cannot
6386 be replaced with a new one. Return true if the statement was
6387 changed, false otherwise.
6388 The statement *GSI should be in valid gimple form but may
6389 be in unfolded state as resulting from for example constant propagation
6390 which can produce *&x = 0. */
6393 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6395 gimple
*stmt
= gsi_stmt (*gsi
);
6396 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6397 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6401 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6402 if EXPR is null or we don't know how.
6403 If non-null, the result always has boolean type. */
6406 canonicalize_bool (tree expr
, bool invert
)
6412 if (integer_nonzerop (expr
))
6413 return boolean_false_node
;
6414 else if (integer_zerop (expr
))
6415 return boolean_true_node
;
6416 else if (TREE_CODE (expr
) == SSA_NAME
)
6417 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6418 build_int_cst (TREE_TYPE (expr
), 0));
6419 else if (COMPARISON_CLASS_P (expr
))
6420 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6422 TREE_OPERAND (expr
, 0),
6423 TREE_OPERAND (expr
, 1));
6429 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6431 if (integer_nonzerop (expr
))
6432 return boolean_true_node
;
6433 else if (integer_zerop (expr
))
6434 return boolean_false_node
;
6435 else if (TREE_CODE (expr
) == SSA_NAME
)
6436 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6437 build_int_cst (TREE_TYPE (expr
), 0));
6438 else if (COMPARISON_CLASS_P (expr
))
6439 return fold_build2 (TREE_CODE (expr
),
6441 TREE_OPERAND (expr
, 0),
6442 TREE_OPERAND (expr
, 1));
6448 /* Check to see if a boolean expression EXPR is logically equivalent to the
6449 comparison (OP1 CODE OP2). Check for various identities involving
6453 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6454 const_tree op1
, const_tree op2
)
6458 /* The obvious case. */
6459 if (TREE_CODE (expr
) == code
6460 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6461 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6464 /* Check for comparing (name, name != 0) and the case where expr
6465 is an SSA_NAME with a definition matching the comparison. */
6466 if (TREE_CODE (expr
) == SSA_NAME
6467 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6469 if (operand_equal_p (expr
, op1
, 0))
6470 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6471 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6472 s
= SSA_NAME_DEF_STMT (expr
);
6473 if (is_gimple_assign (s
)
6474 && gimple_assign_rhs_code (s
) == code
6475 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6476 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6480 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6481 of name is a comparison, recurse. */
6482 if (TREE_CODE (op1
) == SSA_NAME
6483 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6485 s
= SSA_NAME_DEF_STMT (op1
);
6486 if (is_gimple_assign (s
)
6487 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6489 enum tree_code c
= gimple_assign_rhs_code (s
);
6490 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6491 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6492 return same_bool_comparison_p (expr
, c
,
6493 gimple_assign_rhs1 (s
),
6494 gimple_assign_rhs2 (s
));
6495 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6496 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6497 return same_bool_comparison_p (expr
,
6498 invert_tree_comparison (c
, false),
6499 gimple_assign_rhs1 (s
),
6500 gimple_assign_rhs2 (s
));
6506 /* Check to see if two boolean expressions OP1 and OP2 are logically
6510 same_bool_result_p (const_tree op1
, const_tree op2
)
6512 /* Simple cases first. */
6513 if (operand_equal_p (op1
, op2
, 0))
6516 /* Check the cases where at least one of the operands is a comparison.
6517 These are a bit smarter than operand_equal_p in that they apply some
6518 identifies on SSA_NAMEs. */
6519 if (COMPARISON_CLASS_P (op2
)
6520 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6521 TREE_OPERAND (op2
, 0),
6522 TREE_OPERAND (op2
, 1)))
6524 if (COMPARISON_CLASS_P (op1
)
6525 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6526 TREE_OPERAND (op1
, 0),
6527 TREE_OPERAND (op1
, 1)))
6534 /* Forward declarations for some mutually recursive functions. */
6537 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6538 enum tree_code code2
, tree op2a
, tree op2b
);
6540 and_var_with_comparison (tree type
, tree var
, bool invert
,
6541 enum tree_code code2
, tree op2a
, tree op2b
);
6543 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6544 enum tree_code code2
, tree op2a
, tree op2b
);
6546 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6547 enum tree_code code2
, tree op2a
, tree op2b
);
6549 or_var_with_comparison (tree
, tree var
, bool invert
,
6550 enum tree_code code2
, tree op2a
, tree op2b
);
6552 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6553 enum tree_code code2
, tree op2a
, tree op2b
);
6555 /* Helper function for and_comparisons_1: try to simplify the AND of the
6556 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6557 If INVERT is true, invert the value of the VAR before doing the AND.
6558 Return NULL_EXPR if we can't simplify this to a single expression. */
6561 and_var_with_comparison (tree type
, tree var
, bool invert
,
6562 enum tree_code code2
, tree op2a
, tree op2b
)
6565 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6567 /* We can only deal with variables whose definitions are assignments. */
6568 if (!is_gimple_assign (stmt
))
6571 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6572 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6573 Then we only have to consider the simpler non-inverted cases. */
6575 t
= or_var_with_comparison_1 (type
, stmt
,
6576 invert_tree_comparison (code2
, false),
6579 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6580 return canonicalize_bool (t
, invert
);
6583 /* Try to simplify the AND of the ssa variable defined by the assignment
6584 STMT with the comparison specified by (OP2A CODE2 OP2B).
6585 Return NULL_EXPR if we can't simplify this to a single expression. */
6588 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6589 enum tree_code code2
, tree op2a
, tree op2b
)
6591 tree var
= gimple_assign_lhs (stmt
);
6592 tree true_test_var
= NULL_TREE
;
6593 tree false_test_var
= NULL_TREE
;
6594 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6596 /* Check for identities like (var AND (var == 0)) => false. */
6597 if (TREE_CODE (op2a
) == SSA_NAME
6598 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6600 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6601 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6603 true_test_var
= op2a
;
6604 if (var
== true_test_var
)
6607 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6608 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6610 false_test_var
= op2a
;
6611 if (var
== false_test_var
)
6612 return boolean_false_node
;
6616 /* If the definition is a comparison, recurse on it. */
6617 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6619 tree t
= and_comparisons_1 (type
, innercode
,
6620 gimple_assign_rhs1 (stmt
),
6621 gimple_assign_rhs2 (stmt
),
6629 /* If the definition is an AND or OR expression, we may be able to
6630 simplify by reassociating. */
6631 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6632 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6634 tree inner1
= gimple_assign_rhs1 (stmt
);
6635 tree inner2
= gimple_assign_rhs2 (stmt
);
6638 tree partial
= NULL_TREE
;
6639 bool is_and
= (innercode
== BIT_AND_EXPR
);
6641 /* Check for boolean identities that don't require recursive examination
6643 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6644 inner1 AND (inner1 OR inner2) => inner1
6645 !inner1 AND (inner1 AND inner2) => false
6646 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6647 Likewise for similar cases involving inner2. */
6648 if (inner1
== true_test_var
)
6649 return (is_and
? var
: inner1
);
6650 else if (inner2
== true_test_var
)
6651 return (is_and
? var
: inner2
);
6652 else if (inner1
== false_test_var
)
6654 ? boolean_false_node
6655 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6657 else if (inner2
== false_test_var
)
6659 ? boolean_false_node
6660 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6663 /* Next, redistribute/reassociate the AND across the inner tests.
6664 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6665 if (TREE_CODE (inner1
) == SSA_NAME
6666 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6667 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6668 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6669 gimple_assign_rhs1 (s
),
6670 gimple_assign_rhs2 (s
),
6671 code2
, op2a
, op2b
)))
6673 /* Handle the AND case, where we are reassociating:
6674 (inner1 AND inner2) AND (op2a code2 op2b)
6676 If the partial result t is a constant, we win. Otherwise
6677 continue on to try reassociating with the other inner test. */
6680 if (integer_onep (t
))
6682 else if (integer_zerop (t
))
6683 return boolean_false_node
;
6686 /* Handle the OR case, where we are redistributing:
6687 (inner1 OR inner2) AND (op2a code2 op2b)
6688 => (t OR (inner2 AND (op2a code2 op2b))) */
6689 else if (integer_onep (t
))
6690 return boolean_true_node
;
6692 /* Save partial result for later. */
6696 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6697 if (TREE_CODE (inner2
) == SSA_NAME
6698 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6699 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6700 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6701 gimple_assign_rhs1 (s
),
6702 gimple_assign_rhs2 (s
),
6703 code2
, op2a
, op2b
)))
6705 /* Handle the AND case, where we are reassociating:
6706 (inner1 AND inner2) AND (op2a code2 op2b)
6707 => (inner1 AND t) */
6710 if (integer_onep (t
))
6712 else if (integer_zerop (t
))
6713 return boolean_false_node
;
6714 /* If both are the same, we can apply the identity
6716 else if (partial
&& same_bool_result_p (t
, partial
))
6720 /* Handle the OR case. where we are redistributing:
6721 (inner1 OR inner2) AND (op2a code2 op2b)
6722 => (t OR (inner1 AND (op2a code2 op2b)))
6723 => (t OR partial) */
6726 if (integer_onep (t
))
6727 return boolean_true_node
;
6730 /* We already got a simplification for the other
6731 operand to the redistributed OR expression. The
6732 interesting case is when at least one is false.
6733 Or, if both are the same, we can apply the identity
6735 if (integer_zerop (partial
))
6737 else if (integer_zerop (t
))
6739 else if (same_bool_result_p (t
, partial
))
6748 /* Try to simplify the AND of two comparisons defined by
6749 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6750 If this can be done without constructing an intermediate value,
6751 return the resulting tree; otherwise NULL_TREE is returned.
6752 This function is deliberately asymmetric as it recurses on SSA_DEFs
6753 in the first comparison but not the second. */
6756 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6757 enum tree_code code2
, tree op2a
, tree op2b
)
6759 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6761 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6762 if (operand_equal_p (op1a
, op2a
, 0)
6763 && operand_equal_p (op1b
, op2b
, 0))
6765 /* Result will be either NULL_TREE, or a combined comparison. */
6766 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6767 TRUTH_ANDIF_EXPR
, code1
, code2
,
6768 truth_type
, op1a
, op1b
);
6773 /* Likewise the swapped case of the above. */
6774 if (operand_equal_p (op1a
, op2b
, 0)
6775 && operand_equal_p (op1b
, op2a
, 0))
6777 /* Result will be either NULL_TREE, or a combined comparison. */
6778 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6779 TRUTH_ANDIF_EXPR
, code1
,
6780 swap_tree_comparison (code2
),
6781 truth_type
, op1a
, op1b
);
6786 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6787 NAME's definition is a truth value. See if there are any simplifications
6788 that can be done against the NAME's definition. */
6789 if (TREE_CODE (op1a
) == SSA_NAME
6790 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6791 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6793 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6794 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6795 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6796 switch (gimple_code (stmt
))
6799 /* Try to simplify by copy-propagating the definition. */
6800 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6804 /* If every argument to the PHI produces the same result when
6805 ANDed with the second comparison, we win.
6806 Do not do this unless the type is bool since we need a bool
6807 result here anyway. */
6808 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6810 tree result
= NULL_TREE
;
6812 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6814 tree arg
= gimple_phi_arg_def (stmt
, i
);
6816 /* If this PHI has itself as an argument, ignore it.
6817 If all the other args produce the same result,
6819 if (arg
== gimple_phi_result (stmt
))
6821 else if (TREE_CODE (arg
) == INTEGER_CST
)
6823 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6826 result
= boolean_false_node
;
6827 else if (!integer_zerop (result
))
6831 result
= fold_build2 (code2
, boolean_type_node
,
6833 else if (!same_bool_comparison_p (result
,
6837 else if (TREE_CODE (arg
) == SSA_NAME
6838 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6841 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6842 /* In simple cases we can look through PHI nodes,
6843 but we have to be careful with loops.
6845 if (! dom_info_available_p (CDI_DOMINATORS
)
6846 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6847 || dominated_by_p (CDI_DOMINATORS
,
6848 gimple_bb (def_stmt
),
6851 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6857 else if (!same_bool_result_p (result
, temp
))
6873 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6874 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6875 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6876 simplify this to a single expression. As we are going to lower the cost
6877 of building SSA names / gimple stmts significantly, we need to allocate
6878 them ont the stack. This will cause the code to be a bit ugly. */
6881 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6882 enum tree_code code1
,
6883 tree op1a
, tree op1b
,
6884 enum tree_code code2
, tree op2a
,
6887 /* Allocate gimple stmt1 on the stack. */
6889 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6890 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6891 gimple_assign_set_rhs_code (stmt1
, code1
);
6892 gimple_assign_set_rhs1 (stmt1
, op1a
);
6893 gimple_assign_set_rhs2 (stmt1
, op1b
);
6895 /* Allocate gimple stmt2 on the stack. */
6897 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6898 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6899 gimple_assign_set_rhs_code (stmt2
, code2
);
6900 gimple_assign_set_rhs1 (stmt2
, op2a
);
6901 gimple_assign_set_rhs2 (stmt2
, op2b
);
6903 /* Allocate SSA names(lhs1) on the stack. */
6904 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6905 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6906 TREE_SET_CODE (lhs1
, SSA_NAME
);
6907 TREE_TYPE (lhs1
) = type
;
6908 init_ssa_name_imm_use (lhs1
);
6910 /* Allocate SSA names(lhs2) on the stack. */
6911 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6912 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6913 TREE_SET_CODE (lhs2
, SSA_NAME
);
6914 TREE_TYPE (lhs2
) = type
;
6915 init_ssa_name_imm_use (lhs2
);
6917 gimple_assign_set_lhs (stmt1
, lhs1
);
6918 gimple_assign_set_lhs (stmt2
, lhs2
);
6920 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6921 type
, gimple_assign_lhs (stmt1
),
6922 gimple_assign_lhs (stmt2
));
6923 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6925 if (gimple_simplified_result_is_gimple_val (&op
))
6927 tree res
= op
.ops
[0];
6929 return build2 (code1
, type
, op1a
, op1b
);
6930 else if (res
== lhs2
)
6931 return build2 (code2
, type
, op2a
, op2b
);
6935 else if (op
.code
.is_tree_code ()
6936 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6938 tree op0
= op
.ops
[0];
6939 tree op1
= op
.ops
[1];
6940 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6941 return NULL_TREE
; /* not simple */
6943 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6950 /* Try to simplify the AND of two comparisons, specified by
6951 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6952 If this can be simplified to a single expression (without requiring
6953 introducing more SSA variables to hold intermediate values),
6954 return the resulting tree. Otherwise return NULL_TREE.
6955 If the result expression is non-null, it has boolean type. */
6958 maybe_fold_and_comparisons (tree type
,
6959 enum tree_code code1
, tree op1a
, tree op1b
,
6960 enum tree_code code2
, tree op2a
, tree op2b
)
6962 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6965 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6968 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6969 op1a
, op1b
, code2
, op2a
,
6976 /* Helper function for or_comparisons_1: try to simplify the OR of the
6977 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6978 If INVERT is true, invert the value of VAR before doing the OR.
6979 Return NULL_EXPR if we can't simplify this to a single expression. */
6982 or_var_with_comparison (tree type
, tree var
, bool invert
,
6983 enum tree_code code2
, tree op2a
, tree op2b
)
6986 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6988 /* We can only deal with variables whose definitions are assignments. */
6989 if (!is_gimple_assign (stmt
))
6992 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6993 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6994 Then we only have to consider the simpler non-inverted cases. */
6996 t
= and_var_with_comparison_1 (type
, stmt
,
6997 invert_tree_comparison (code2
, false),
7000 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
7001 return canonicalize_bool (t
, invert
);
7004 /* Try to simplify the OR of the ssa variable defined by the assignment
7005 STMT with the comparison specified by (OP2A CODE2 OP2B).
7006 Return NULL_EXPR if we can't simplify this to a single expression. */
7009 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7010 enum tree_code code2
, tree op2a
, tree op2b
)
7012 tree var
= gimple_assign_lhs (stmt
);
7013 tree true_test_var
= NULL_TREE
;
7014 tree false_test_var
= NULL_TREE
;
7015 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7017 /* Check for identities like (var OR (var != 0)) => true . */
7018 if (TREE_CODE (op2a
) == SSA_NAME
7019 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7021 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7022 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7024 true_test_var
= op2a
;
7025 if (var
== true_test_var
)
7028 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7029 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7031 false_test_var
= op2a
;
7032 if (var
== false_test_var
)
7033 return boolean_true_node
;
7037 /* If the definition is a comparison, recurse on it. */
7038 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7040 tree t
= or_comparisons_1 (type
, innercode
,
7041 gimple_assign_rhs1 (stmt
),
7042 gimple_assign_rhs2 (stmt
),
7050 /* If the definition is an AND or OR expression, we may be able to
7051 simplify by reassociating. */
7052 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7053 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7055 tree inner1
= gimple_assign_rhs1 (stmt
);
7056 tree inner2
= gimple_assign_rhs2 (stmt
);
7059 tree partial
= NULL_TREE
;
7060 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7062 /* Check for boolean identities that don't require recursive examination
7064 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7065 inner1 OR (inner1 AND inner2) => inner1
7066 !inner1 OR (inner1 OR inner2) => true
7067 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7069 if (inner1
== true_test_var
)
7070 return (is_or
? var
: inner1
);
7071 else if (inner2
== true_test_var
)
7072 return (is_or
? var
: inner2
);
7073 else if (inner1
== false_test_var
)
7076 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7078 else if (inner2
== false_test_var
)
7081 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7084 /* Next, redistribute/reassociate the OR across the inner tests.
7085 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7086 if (TREE_CODE (inner1
) == SSA_NAME
7087 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7088 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7089 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7090 gimple_assign_rhs1 (s
),
7091 gimple_assign_rhs2 (s
),
7092 code2
, op2a
, op2b
)))
7094 /* Handle the OR case, where we are reassociating:
7095 (inner1 OR inner2) OR (op2a code2 op2b)
7097 If the partial result t is a constant, we win. Otherwise
7098 continue on to try reassociating with the other inner test. */
7101 if (integer_onep (t
))
7102 return boolean_true_node
;
7103 else if (integer_zerop (t
))
7107 /* Handle the AND case, where we are redistributing:
7108 (inner1 AND inner2) OR (op2a code2 op2b)
7109 => (t AND (inner2 OR (op2a code op2b))) */
7110 else if (integer_zerop (t
))
7111 return boolean_false_node
;
7113 /* Save partial result for later. */
7117 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7118 if (TREE_CODE (inner2
) == SSA_NAME
7119 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7120 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7121 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7122 gimple_assign_rhs1 (s
),
7123 gimple_assign_rhs2 (s
),
7124 code2
, op2a
, op2b
)))
7126 /* Handle the OR case, where we are reassociating:
7127 (inner1 OR inner2) OR (op2a code2 op2b)
7129 => (t OR partial) */
7132 if (integer_zerop (t
))
7134 else if (integer_onep (t
))
7135 return boolean_true_node
;
7136 /* If both are the same, we can apply the identity
7138 else if (partial
&& same_bool_result_p (t
, partial
))
7142 /* Handle the AND case, where we are redistributing:
7143 (inner1 AND inner2) OR (op2a code2 op2b)
7144 => (t AND (inner1 OR (op2a code2 op2b)))
7145 => (t AND partial) */
7148 if (integer_zerop (t
))
7149 return boolean_false_node
;
7152 /* We already got a simplification for the other
7153 operand to the redistributed AND expression. The
7154 interesting case is when at least one is true.
7155 Or, if both are the same, we can apply the identity
7157 if (integer_onep (partial
))
7159 else if (integer_onep (t
))
7161 else if (same_bool_result_p (t
, partial
))
7170 /* Try to simplify the OR of two comparisons defined by
7171 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7172 If this can be done without constructing an intermediate value,
7173 return the resulting tree; otherwise NULL_TREE is returned.
7174 This function is deliberately asymmetric as it recurses on SSA_DEFs
7175 in the first comparison but not the second. */
7178 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7179 enum tree_code code2
, tree op2a
, tree op2b
)
7181 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7183 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7184 if (operand_equal_p (op1a
, op2a
, 0)
7185 && operand_equal_p (op1b
, op2b
, 0))
7187 /* Result will be either NULL_TREE, or a combined comparison. */
7188 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7189 TRUTH_ORIF_EXPR
, code1
, code2
,
7190 truth_type
, op1a
, op1b
);
7195 /* Likewise the swapped case of the above. */
7196 if (operand_equal_p (op1a
, op2b
, 0)
7197 && operand_equal_p (op1b
, op2a
, 0))
7199 /* Result will be either NULL_TREE, or a combined comparison. */
7200 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7201 TRUTH_ORIF_EXPR
, code1
,
7202 swap_tree_comparison (code2
),
7203 truth_type
, op1a
, op1b
);
7208 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7209 NAME's definition is a truth value. See if there are any simplifications
7210 that can be done against the NAME's definition. */
7211 if (TREE_CODE (op1a
) == SSA_NAME
7212 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7213 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7215 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7216 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7217 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7218 switch (gimple_code (stmt
))
7221 /* Try to simplify by copy-propagating the definition. */
7222 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7226 /* If every argument to the PHI produces the same result when
7227 ORed with the second comparison, we win.
7228 Do not do this unless the type is bool since we need a bool
7229 result here anyway. */
7230 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7232 tree result
= NULL_TREE
;
7234 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7236 tree arg
= gimple_phi_arg_def (stmt
, i
);
7238 /* If this PHI has itself as an argument, ignore it.
7239 If all the other args produce the same result,
7241 if (arg
== gimple_phi_result (stmt
))
7243 else if (TREE_CODE (arg
) == INTEGER_CST
)
7245 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7248 result
= boolean_true_node
;
7249 else if (!integer_onep (result
))
7253 result
= fold_build2 (code2
, boolean_type_node
,
7255 else if (!same_bool_comparison_p (result
,
7259 else if (TREE_CODE (arg
) == SSA_NAME
7260 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7263 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7264 /* In simple cases we can look through PHI nodes,
7265 but we have to be careful with loops.
7267 if (! dom_info_available_p (CDI_DOMINATORS
)
7268 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7269 || dominated_by_p (CDI_DOMINATORS
,
7270 gimple_bb (def_stmt
),
7273 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7279 else if (!same_bool_result_p (result
, temp
))
7295 /* Try to simplify the OR of two comparisons, specified by
7296 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7297 If this can be simplified to a single expression (without requiring
7298 introducing more SSA variables to hold intermediate values),
7299 return the resulting tree. Otherwise return NULL_TREE.
7300 If the result expression is non-null, it has boolean type. */
7303 maybe_fold_or_comparisons (tree type
,
7304 enum tree_code code1
, tree op1a
, tree op1b
,
7305 enum tree_code code2
, tree op2a
, tree op2b
)
7307 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7310 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7313 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7314 op1a
, op1b
, code2
, op2a
,
7321 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7323 Either NULL_TREE, a simplified but non-constant or a constant
7326 ??? This should go into a gimple-fold-inline.h file to be eventually
7327 privatized with the single valueize function used in the various TUs
7328 to avoid the indirect function call overhead. */
7331 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7332 tree (*gvalueize
) (tree
))
7334 gimple_match_op res_op
;
7335 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7336 edges if there are intermediate VARYING defs. For this reason
7337 do not follow SSA edges here even though SCCVN can technically
7338 just deal fine with that. */
7339 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7341 tree res
= NULL_TREE
;
7342 if (gimple_simplified_result_is_gimple_val (&res_op
))
7343 res
= res_op
.ops
[0];
7344 else if (mprts_hook
)
7345 res
= mprts_hook (&res_op
);
7348 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7350 fprintf (dump_file
, "Match-and-simplified ");
7351 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7352 fprintf (dump_file
, " to ");
7353 print_generic_expr (dump_file
, res
);
7354 fprintf (dump_file
, "\n");
7360 location_t loc
= gimple_location (stmt
);
7361 switch (gimple_code (stmt
))
7365 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7367 switch (get_gimple_rhs_class (subcode
))
7369 case GIMPLE_SINGLE_RHS
:
7371 tree rhs
= gimple_assign_rhs1 (stmt
);
7372 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7374 if (TREE_CODE (rhs
) == SSA_NAME
)
7376 /* If the RHS is an SSA_NAME, return its known constant value,
7378 return (*valueize
) (rhs
);
7380 /* Handle propagating invariant addresses into address
7382 else if (TREE_CODE (rhs
) == ADDR_EXPR
7383 && !is_gimple_min_invariant (rhs
))
7385 poly_int64 offset
= 0;
7387 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7391 && (CONSTANT_CLASS_P (base
)
7392 || decl_address_invariant_p (base
)))
7393 return build_invariant_address (TREE_TYPE (rhs
),
7396 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7397 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7398 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7399 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7404 nelts
= CONSTRUCTOR_NELTS (rhs
);
7405 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7406 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7408 val
= (*valueize
) (val
);
7409 if (TREE_CODE (val
) == INTEGER_CST
7410 || TREE_CODE (val
) == REAL_CST
7411 || TREE_CODE (val
) == FIXED_CST
)
7412 vec
.quick_push (val
);
7417 return vec
.build ();
7419 if (subcode
== OBJ_TYPE_REF
)
7421 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7422 /* If callee is constant, we can fold away the wrapper. */
7423 if (is_gimple_min_invariant (val
))
7427 if (kind
== tcc_reference
)
7429 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7430 || TREE_CODE (rhs
) == REALPART_EXPR
7431 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7432 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7434 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7435 return fold_unary_loc (EXPR_LOCATION (rhs
),
7437 TREE_TYPE (rhs
), val
);
7439 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7440 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7442 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7443 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7445 TREE_TYPE (rhs
), val
,
7446 TREE_OPERAND (rhs
, 1),
7447 TREE_OPERAND (rhs
, 2));
7449 else if (TREE_CODE (rhs
) == MEM_REF
7450 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7452 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7453 if (TREE_CODE (val
) == ADDR_EXPR
7454 && is_gimple_min_invariant (val
))
7456 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7458 TREE_OPERAND (rhs
, 1));
7463 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7465 else if (kind
== tcc_declaration
)
7466 return get_symbol_constant_value (rhs
);
7470 case GIMPLE_UNARY_RHS
:
7473 case GIMPLE_BINARY_RHS
:
7474 /* Translate &x + CST into an invariant form suitable for
7475 further propagation. */
7476 if (subcode
== POINTER_PLUS_EXPR
)
7478 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7479 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7480 if (TREE_CODE (op0
) == ADDR_EXPR
7481 && TREE_CODE (op1
) == INTEGER_CST
)
7483 tree off
= fold_convert (ptr_type_node
, op1
);
7485 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7486 fold_build2 (MEM_REF
,
7487 TREE_TYPE (TREE_TYPE (op0
)),
7488 unshare_expr (op0
), off
));
7491 /* Canonicalize bool != 0 and bool == 0 appearing after
7492 valueization. While gimple_simplify handles this
7493 it can get confused by the ~X == 1 -> X == 0 transform
7494 which we cant reduce to a SSA name or a constant
7495 (and we have no way to tell gimple_simplify to not
7496 consider those transforms in the first place). */
7497 else if (subcode
== EQ_EXPR
7498 || subcode
== NE_EXPR
)
7500 tree lhs
= gimple_assign_lhs (stmt
);
7501 tree op0
= gimple_assign_rhs1 (stmt
);
7502 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7505 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7506 op0
= (*valueize
) (op0
);
7507 if (TREE_CODE (op0
) == INTEGER_CST
)
7508 std::swap (op0
, op1
);
7509 if (TREE_CODE (op1
) == INTEGER_CST
7510 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7511 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7517 case GIMPLE_TERNARY_RHS
:
7519 /* Handle ternary operators that can appear in GIMPLE form. */
7520 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7521 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7522 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7523 return fold_ternary_loc (loc
, subcode
,
7524 TREE_TYPE (gimple_assign_lhs (stmt
)),
7536 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7538 if (gimple_call_internal_p (stmt
))
7540 enum tree_code subcode
= ERROR_MARK
;
7541 switch (gimple_call_internal_fn (stmt
))
7543 case IFN_UBSAN_CHECK_ADD
:
7544 subcode
= PLUS_EXPR
;
7546 case IFN_UBSAN_CHECK_SUB
:
7547 subcode
= MINUS_EXPR
;
7549 case IFN_UBSAN_CHECK_MUL
:
7550 subcode
= MULT_EXPR
;
7552 case IFN_BUILTIN_EXPECT
:
7554 tree arg0
= gimple_call_arg (stmt
, 0);
7555 tree op0
= (*valueize
) (arg0
);
7556 if (TREE_CODE (op0
) == INTEGER_CST
)
7563 tree arg0
= gimple_call_arg (stmt
, 0);
7564 tree arg1
= gimple_call_arg (stmt
, 1);
7565 tree op0
= (*valueize
) (arg0
);
7566 tree op1
= (*valueize
) (arg1
);
7568 if (TREE_CODE (op0
) != INTEGER_CST
7569 || TREE_CODE (op1
) != INTEGER_CST
)
7574 /* x * 0 = 0 * x = 0 without overflow. */
7575 if (integer_zerop (op0
) || integer_zerop (op1
))
7576 return build_zero_cst (TREE_TYPE (arg0
));
7579 /* y - y = 0 without overflow. */
7580 if (operand_equal_p (op0
, op1
, 0))
7581 return build_zero_cst (TREE_TYPE (arg0
));
7588 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7590 && TREE_CODE (res
) == INTEGER_CST
7591 && !TREE_OVERFLOW (res
))
7596 fn
= (*valueize
) (gimple_call_fn (stmt
));
7597 if (TREE_CODE (fn
) == ADDR_EXPR
7598 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7599 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7600 && gimple_builtin_call_types_compatible_p (stmt
,
7601 TREE_OPERAND (fn
, 0)))
7603 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7606 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7607 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7608 retval
= fold_builtin_call_array (loc
,
7609 gimple_call_return_type (call_stmt
),
7610 fn
, gimple_call_num_args (stmt
), args
);
7613 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7614 STRIP_NOPS (retval
);
7615 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7628 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7629 Returns NULL_TREE if folding to a constant is not possible, otherwise
7630 returns a constant according to is_gimple_min_invariant. */
7633 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7635 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7636 if (res
&& is_gimple_min_invariant (res
))
7642 /* The following set of functions are supposed to fold references using
7643 their constant initializers. */
7645 /* See if we can find constructor defining value of BASE.
7646 When we know the consructor with constant offset (such as
7647 base is array[40] and we do know constructor of array), then
7648 BIT_OFFSET is adjusted accordingly.
7650 As a special case, return error_mark_node when constructor
7651 is not explicitly available, but it is known to be zero
7652 such as 'static const int a;'. */
7654 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7655 tree (*valueize
)(tree
))
7657 poly_int64 bit_offset2
, size
, max_size
;
7660 if (TREE_CODE (base
) == MEM_REF
)
7662 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7663 if (!boff
.to_shwi (bit_offset
))
7667 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7668 base
= valueize (TREE_OPERAND (base
, 0));
7669 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7671 base
= TREE_OPERAND (base
, 0);
7674 && TREE_CODE (base
) == SSA_NAME
)
7675 base
= valueize (base
);
7677 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7678 DECL_INITIAL. If BASE is a nested reference into another
7679 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7680 the inner reference. */
7681 switch (TREE_CODE (base
))
7686 tree init
= ctor_for_folding (base
);
7688 /* Our semantic is exact opposite of ctor_for_folding;
7689 NULL means unknown, while error_mark_node is 0. */
7690 if (init
== error_mark_node
)
7693 return error_mark_node
;
7697 case VIEW_CONVERT_EXPR
:
7698 return get_base_constructor (TREE_OPERAND (base
, 0),
7699 bit_offset
, valueize
);
7703 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7705 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7707 *bit_offset
+= bit_offset2
;
7708 return get_base_constructor (base
, bit_offset
, valueize
);
7714 if (CONSTANT_CLASS_P (base
))
7721 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7722 to the memory at bit OFFSET. When non-null, TYPE is the expected
7723 type of the reference; otherwise the type of the referenced element
7724 is used instead. When SIZE is zero, attempt to fold a reference to
7725 the entire element which OFFSET refers to. Increment *SUBOFF by
7726 the bit offset of the accessed element. */
7729 fold_array_ctor_reference (tree type
, tree ctor
,
7730 unsigned HOST_WIDE_INT offset
,
7731 unsigned HOST_WIDE_INT size
,
7733 unsigned HOST_WIDE_INT
*suboff
)
7735 offset_int low_bound
;
7736 offset_int elt_size
;
7737 offset_int access_index
;
7738 tree domain_type
= NULL_TREE
;
7739 HOST_WIDE_INT inner_offset
;
7741 /* Compute low bound and elt size. */
7742 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7743 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7744 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7746 /* Static constructors for variably sized objects make no sense. */
7747 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7749 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7753 /* Static constructors for variably sized objects make no sense. */
7754 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7756 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7758 /* When TYPE is non-null, verify that it specifies a constant-sized
7759 access of a multiple of the array element size. Avoid division
7760 by zero below when ELT_SIZE is zero, such as with the result of
7761 an initializer for a zero-length array or an empty struct. */
7764 && (!TYPE_SIZE_UNIT (type
)
7765 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7768 /* Compute the array index we look for. */
7769 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7771 access_index
+= low_bound
;
7773 /* And offset within the access. */
7774 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7776 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7777 if (size
> elt_sz
* BITS_PER_UNIT
)
7779 /* native_encode_expr constraints. */
7780 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7781 || size
% BITS_PER_UNIT
!= 0
7782 || inner_offset
% BITS_PER_UNIT
!= 0
7783 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7787 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7789 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7790 return build_zero_cst (type
);
7792 /* native-encode adjacent ctor elements. */
7793 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7794 unsigned bufoff
= 0;
7795 offset_int index
= 0;
7796 offset_int max_index
= access_index
;
7797 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7799 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7800 else if (!CONSTANT_CLASS_P (val
))
7804 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7806 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7807 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7810 index
= max_index
= wi::to_offset (elt
->index
);
7811 index
= wi::umax (index
, access_index
);
7814 if (bufoff
+ elt_sz
> sizeof (buf
))
7815 elt_sz
= sizeof (buf
) - bufoff
;
7816 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7817 inner_offset
/ BITS_PER_UNIT
);
7818 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7824 if (wi::cmpu (access_index
, index
) == 0)
7826 else if (wi::cmpu (access_index
, max_index
) > 0)
7829 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7831 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7836 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7838 max_index
= access_index
;
7841 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7843 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7844 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7847 index
= max_index
= wi::to_offset (elt
->index
);
7848 index
= wi::umax (index
, access_index
);
7849 if (wi::cmpu (access_index
, index
) == 0)
7852 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7856 while (bufoff
< size
/ BITS_PER_UNIT
);
7858 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7861 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7863 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7865 /* For the final reference to the entire accessed element
7866 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7867 may be null) in favor of the type of the element, and set
7868 SIZE to the size of the accessed element. */
7870 type
= TREE_TYPE (val
);
7871 size
= elt_sz
* BITS_PER_UNIT
;
7873 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7874 && TREE_CODE (val
) == CONSTRUCTOR
7875 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7876 /* If this isn't the last element in the CTOR and a CTOR itself
7877 and it does not cover the whole object we are requesting give up
7878 since we're not set up for combining from multiple CTORs. */
7881 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7882 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7886 /* Memory not explicitly mentioned in constructor is 0 (or
7887 the reference is out of range). */
7888 return type
? build_zero_cst (type
) : NULL_TREE
;
7891 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7892 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7893 is the expected type of the reference; otherwise the type of
7894 the referenced member is used instead. When SIZE is zero,
7895 attempt to fold a reference to the entire member which OFFSET
7896 refers to; in this case. Increment *SUBOFF by the bit offset
7897 of the accessed member. */
7900 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7901 unsigned HOST_WIDE_INT offset
,
7902 unsigned HOST_WIDE_INT size
,
7904 unsigned HOST_WIDE_INT
*suboff
)
7906 unsigned HOST_WIDE_INT cnt
;
7909 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7912 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7913 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7914 tree field_size
= DECL_SIZE (cfield
);
7918 /* Determine the size of the flexible array member from
7919 the size of the initializer provided for it. */
7920 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7923 /* Variable sized objects in static constructors makes no sense,
7924 but field_size can be NULL for flexible array members. */
7925 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7926 && TREE_CODE (byte_offset
) == INTEGER_CST
7927 && (field_size
!= NULL_TREE
7928 ? TREE_CODE (field_size
) == INTEGER_CST
7929 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7931 /* Compute bit offset of the field. */
7932 offset_int bitoffset
7933 = (wi::to_offset (field_offset
)
7934 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7935 /* Compute bit offset where the field ends. */
7936 offset_int bitoffset_end
;
7937 if (field_size
!= NULL_TREE
)
7938 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7942 /* Compute the bit offset of the end of the desired access.
7943 As a special case, if the size of the desired access is
7944 zero, assume the access is to the entire field (and let
7945 the caller make any necessary adjustments by storing
7946 the actual bounds of the field in FIELDBOUNDS). */
7947 offset_int access_end
= offset_int (offset
);
7951 access_end
= bitoffset_end
;
7953 /* Is there any overlap between the desired access at
7954 [OFFSET, OFFSET+SIZE) and the offset of the field within
7955 the object at [BITOFFSET, BITOFFSET_END)? */
7956 if (wi::cmps (access_end
, bitoffset
) > 0
7957 && (field_size
== NULL_TREE
7958 || wi::lts_p (offset
, bitoffset_end
)))
7960 *suboff
+= bitoffset
.to_uhwi ();
7962 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7964 /* For the final reference to the entire accessed member
7965 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7966 be null) in favor of the type of the member, and set
7967 SIZE to the size of the accessed member. */
7968 offset
= bitoffset
.to_uhwi ();
7969 type
= TREE_TYPE (cval
);
7970 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7973 /* We do have overlap. Now see if the field is large enough
7974 to cover the access. Give up for accesses that extend
7975 beyond the end of the object or that span multiple fields. */
7976 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7978 if (offset
< bitoffset
)
7981 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7982 return fold_ctor_reference (type
, cval
,
7983 inner_offset
.to_uhwi (), size
,
7991 return build_zero_cst (type
);
7994 /* CTOR is value initializing memory. Fold a reference of TYPE and
7995 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7996 is zero, attempt to fold a reference to the entire subobject
7997 which OFFSET refers to. This is used when folding accesses to
7998 string members of aggregates. When non-null, set *SUBOFF to
7999 the bit offset of the accessed subobject. */
8002 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
8003 const poly_uint64
&poly_size
, tree from_decl
,
8004 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
8008 /* We found the field with exact match. */
8010 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8011 && known_eq (poly_offset
, 0U))
8012 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8014 /* The remaining optimizations need a constant size and offset. */
8015 unsigned HOST_WIDE_INT size
, offset
;
8016 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8019 /* We are at the end of walk, see if we can view convert the
8021 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8022 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8023 && !compare_tree_int (TYPE_SIZE (type
), size
)
8024 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8026 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8029 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8031 STRIP_USELESS_TYPE_CONVERSION (ret
);
8035 /* For constants and byte-aligned/sized reads try to go through
8036 native_encode/interpret. */
8037 if (CONSTANT_CLASS_P (ctor
)
8038 && BITS_PER_UNIT
== 8
8039 && offset
% BITS_PER_UNIT
== 0
8040 && offset
/ BITS_PER_UNIT
<= INT_MAX
8041 && size
% BITS_PER_UNIT
== 0
8042 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8043 && can_native_interpret_type_p (type
))
8045 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8046 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8047 offset
/ BITS_PER_UNIT
);
8049 return native_interpret_expr (type
, buf
, len
);
8051 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8053 unsigned HOST_WIDE_INT dummy
= 0;
8058 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8059 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8060 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8063 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8066 /* Fall back to native_encode_initializer. Needs to be done
8067 only in the outermost fold_ctor_reference call (because it itself
8068 recurses into CONSTRUCTORs) and doesn't update suboff. */
8069 if (ret
== NULL_TREE
8071 && BITS_PER_UNIT
== 8
8072 && offset
% BITS_PER_UNIT
== 0
8073 && offset
/ BITS_PER_UNIT
<= INT_MAX
8074 && size
% BITS_PER_UNIT
== 0
8075 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8076 && can_native_interpret_type_p (type
))
8078 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8079 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8080 offset
/ BITS_PER_UNIT
);
8082 return native_interpret_expr (type
, buf
, len
);
8091 /* Return the tree representing the element referenced by T if T is an
8092 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8093 names using VALUEIZE. Return NULL_TREE otherwise. */
8096 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8098 tree ctor
, idx
, base
;
8099 poly_int64 offset
, size
, max_size
;
8103 if (TREE_THIS_VOLATILE (t
))
8107 return get_symbol_constant_value (t
);
8109 tem
= fold_read_from_constant_string (t
);
8113 switch (TREE_CODE (t
))
8116 case ARRAY_RANGE_REF
:
8117 /* Constant indexes are handled well by get_base_constructor.
8118 Only special case variable offsets.
8119 FIXME: This code can't handle nested references with variable indexes
8120 (they will be handled only by iteration of ccp). Perhaps we can bring
8121 get_ref_base_and_extent here and make it use a valueize callback. */
8122 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8124 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8125 && poly_int_tree_p (idx
))
8127 tree low_bound
, unit_size
;
8129 /* If the resulting bit-offset is constant, track it. */
8130 if ((low_bound
= array_ref_low_bound (t
),
8131 poly_int_tree_p (low_bound
))
8132 && (unit_size
= array_ref_element_size (t
),
8133 tree_fits_uhwi_p (unit_size
)))
8135 poly_offset_int woffset
8136 = wi::sext (wi::to_poly_offset (idx
)
8137 - wi::to_poly_offset (low_bound
),
8138 TYPE_PRECISION (sizetype
));
8139 woffset
*= tree_to_uhwi (unit_size
);
8140 woffset
*= BITS_PER_UNIT
;
8141 if (woffset
.to_shwi (&offset
))
8143 base
= TREE_OPERAND (t
, 0);
8144 ctor
= get_base_constructor (base
, &offset
, valueize
);
8145 /* Empty constructor. Always fold to 0. */
8146 if (ctor
== error_mark_node
)
8147 return build_zero_cst (TREE_TYPE (t
));
8148 /* Out of bound array access. Value is undefined,
8150 if (maybe_lt (offset
, 0))
8152 /* We cannot determine ctor. */
8155 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8156 tree_to_uhwi (unit_size
)
8166 case TARGET_MEM_REF
:
8168 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8169 ctor
= get_base_constructor (base
, &offset
, valueize
);
8171 /* Empty constructor. Always fold to 0. */
8172 if (ctor
== error_mark_node
)
8173 return build_zero_cst (TREE_TYPE (t
));
8174 /* We do not know precise address. */
8175 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8177 /* We cannot determine ctor. */
8181 /* Out of bound array access. Value is undefined, but don't fold. */
8182 if (maybe_lt (offset
, 0))
8185 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8189 /* For bit field reads try to read the representative and
8191 if (TREE_CODE (t
) == COMPONENT_REF
8192 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8193 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8195 HOST_WIDE_INT csize
, coffset
;
8196 tree field
= TREE_OPERAND (t
, 1);
8197 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8198 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8199 && size
.is_constant (&csize
)
8200 && offset
.is_constant (&coffset
)
8201 && (coffset
% BITS_PER_UNIT
!= 0
8202 || csize
% BITS_PER_UNIT
!= 0)
8204 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8206 poly_int64 bitoffset
;
8207 poly_uint64 field_offset
, repr_offset
;
8208 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8209 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8210 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8213 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8214 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8215 HOST_WIDE_INT bitoff
;
8216 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8217 - TYPE_PRECISION (TREE_TYPE (field
)));
8218 if (bitoffset
.is_constant (&bitoff
)
8223 size
= tree_to_uhwi (DECL_SIZE (repr
));
8225 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8227 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8229 if (!BYTES_BIG_ENDIAN
)
8230 tem
= wide_int_to_tree (TREE_TYPE (field
),
8231 wi::lrshift (wi::to_wide (tem
),
8234 tem
= wide_int_to_tree (TREE_TYPE (field
),
8235 wi::lrshift (wi::to_wide (tem
),
8247 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8248 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8249 return fold_build1_loc (EXPR_LOCATION (t
),
8250 TREE_CODE (t
), TREE_TYPE (t
), c
);
8262 fold_const_aggregate_ref (tree t
)
8264 return fold_const_aggregate_ref_1 (t
, NULL
);
8267 /* Lookup virtual method with index TOKEN in a virtual table V
8269 Set CAN_REFER if non-NULL to false if method
8270 is not referable or if the virtual table is ill-formed (such as rewriten
8271 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8274 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8276 unsigned HOST_WIDE_INT offset
,
8279 tree vtable
= v
, init
, fn
;
8280 unsigned HOST_WIDE_INT size
;
8281 unsigned HOST_WIDE_INT elt_size
, access_index
;
8287 /* First of all double check we have virtual table. */
8288 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8290 /* Pass down that we lost track of the target. */
8296 init
= ctor_for_folding (v
);
8298 /* The virtual tables should always be born with constructors
8299 and we always should assume that they are avaialble for
8300 folding. At the moment we do not stream them in all cases,
8301 but it should never happen that ctor seem unreachable. */
8303 if (init
== error_mark_node
)
8305 /* Pass down that we lost track of the target. */
8310 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8311 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8312 offset
*= BITS_PER_UNIT
;
8313 offset
+= token
* size
;
8315 /* Lookup the value in the constructor that is assumed to be array.
8316 This is equivalent to
8317 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8318 offset, size, NULL);
8319 but in a constant time. We expect that frontend produced a simple
8320 array without indexed initializers. */
8322 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8323 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8324 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8325 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8327 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8328 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8330 /* The C++ FE can now produce indexed fields, and we check if the indexes
8332 if (access_index
< CONSTRUCTOR_NELTS (init
))
8334 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8335 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8336 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8342 /* For type inconsistent program we may end up looking up virtual method
8343 in virtual table that does not contain TOKEN entries. We may overrun
8344 the virtual table and pick up a constant or RTTI info pointer.
8345 In any case the call is undefined. */
8347 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8348 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8349 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8352 fn
= TREE_OPERAND (fn
, 0);
8354 /* When cgraph node is missing and function is not public, we cannot
8355 devirtualize. This can happen in WHOPR when the actual method
8356 ends up in other partition, because we found devirtualization
8357 possibility too late. */
8358 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8369 /* Make sure we create a cgraph node for functions we'll reference.
8370 They can be non-existent if the reference comes from an entry
8371 of an external vtable for example. */
8372 cgraph_node::get_create (fn
);
8377 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8378 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8379 KNOWN_BINFO carries the binfo describing the true type of
8380 OBJ_TYPE_REF_OBJECT(REF).
8381 Set CAN_REFER if non-NULL to false if method
8382 is not referable or if the virtual table is ill-formed (such as rewriten
8383 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8386 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8389 unsigned HOST_WIDE_INT offset
;
8392 v
= BINFO_VTABLE (known_binfo
);
8393 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8397 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8403 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8406 /* Given a pointer value T, return a simplified version of an
8407 indirection through T, or NULL_TREE if no simplification is
8408 possible. Note that the resulting type may be different from
8409 the type pointed to in the sense that it is still compatible
8410 from the langhooks point of view. */
8413 gimple_fold_indirect_ref (tree t
)
8415 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8420 subtype
= TREE_TYPE (sub
);
8421 if (!POINTER_TYPE_P (subtype
)
8422 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8425 if (TREE_CODE (sub
) == ADDR_EXPR
)
8427 tree op
= TREE_OPERAND (sub
, 0);
8428 tree optype
= TREE_TYPE (op
);
8430 if (useless_type_conversion_p (type
, optype
))
8433 /* *(foo *)&fooarray => fooarray[0] */
8434 if (TREE_CODE (optype
) == ARRAY_TYPE
8435 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8436 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8438 tree type_domain
= TYPE_DOMAIN (optype
);
8439 tree min_val
= size_zero_node
;
8440 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8441 min_val
= TYPE_MIN_VALUE (type_domain
);
8442 if (TREE_CODE (min_val
) == INTEGER_CST
)
8443 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8445 /* *(foo *)&complexfoo => __real__ complexfoo */
8446 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8447 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8448 return fold_build1 (REALPART_EXPR
, type
, op
);
8449 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8450 else if (TREE_CODE (optype
) == VECTOR_TYPE
8451 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8453 tree part_width
= TYPE_SIZE (type
);
8454 tree index
= bitsize_int (0);
8455 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8459 /* *(p + CST) -> ... */
8460 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8461 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8463 tree addr
= TREE_OPERAND (sub
, 0);
8464 tree off
= TREE_OPERAND (sub
, 1);
8468 addrtype
= TREE_TYPE (addr
);
8470 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8471 if (TREE_CODE (addr
) == ADDR_EXPR
8472 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8473 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8474 && tree_fits_uhwi_p (off
))
8476 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8477 tree part_width
= TYPE_SIZE (type
);
8478 unsigned HOST_WIDE_INT part_widthi
8479 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8480 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8481 tree index
= bitsize_int (indexi
);
8482 if (known_lt (offset
/ part_widthi
,
8483 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8484 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8488 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8489 if (TREE_CODE (addr
) == ADDR_EXPR
8490 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8491 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8493 tree size
= TYPE_SIZE_UNIT (type
);
8494 if (tree_int_cst_equal (size
, off
))
8495 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8498 /* *(p + CST) -> MEM_REF <p, CST>. */
8499 if (TREE_CODE (addr
) != ADDR_EXPR
8500 || DECL_P (TREE_OPERAND (addr
, 0)))
8501 return fold_build2 (MEM_REF
, type
,
8503 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8506 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8507 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8508 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8509 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8512 tree min_val
= size_zero_node
;
8514 sub
= gimple_fold_indirect_ref (sub
);
8516 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8517 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8518 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8519 min_val
= TYPE_MIN_VALUE (type_domain
);
8520 if (TREE_CODE (min_val
) == INTEGER_CST
)
8521 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8527 /* Return true if CODE is an operation that when operating on signed
8528 integer types involves undefined behavior on overflow and the
8529 operation can be expressed with unsigned arithmetic. */
8532 arith_code_with_undefined_signed_overflow (tree_code code
)
8541 case POINTER_PLUS_EXPR
:
8548 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8549 operation that can be transformed to unsigned arithmetic by converting
8550 its operand, carrying out the operation in the corresponding unsigned
8551 type and converting the result back to the original type.
8553 Returns a sequence of statements that replace STMT and also contain
8554 a modified form of STMT itself. */
8557 rewrite_to_defined_overflow (gimple
*stmt
)
8559 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8561 fprintf (dump_file
, "rewriting stmt with undefined signed "
8563 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8566 tree lhs
= gimple_assign_lhs (stmt
);
8567 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8568 gimple_seq stmts
= NULL
;
8569 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8570 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8572 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8574 tree op
= gimple_op (stmt
, i
);
8575 op
= gimple_convert (&stmts
, type
, op
);
8576 gimple_set_op (stmt
, i
, op
);
8578 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8579 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8580 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8581 gimple_set_modified (stmt
, true);
8582 gimple_seq_add_stmt (&stmts
, stmt
);
8583 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8584 gimple_seq_add_stmt (&stmts
, cvt
);
8590 /* The valueization hook we use for the gimple_build API simplification.
8591 This makes us match fold_buildN behavior by only combining with
8592 statements in the sequence(s) we are currently building. */
8595 gimple_build_valueize (tree op
)
8597 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8602 /* Build the expression CODE OP0 of type TYPE with location LOC,
8603 simplifying it first if possible. Returns the built
8604 expression value and appends statements possibly defining it
8608 gimple_build (gimple_seq
*seq
, location_t loc
,
8609 enum tree_code code
, tree type
, tree op0
)
8611 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8614 res
= create_tmp_reg_or_ssa_name (type
);
8616 if (code
== REALPART_EXPR
8617 || code
== IMAGPART_EXPR
8618 || code
== VIEW_CONVERT_EXPR
)
8619 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8621 stmt
= gimple_build_assign (res
, code
, op0
);
8622 gimple_set_location (stmt
, loc
);
8623 gimple_seq_add_stmt_without_update (seq
, stmt
);
8628 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8629 simplifying it first if possible. Returns the built
8630 expression value and appends statements possibly defining it
8634 gimple_build (gimple_seq
*seq
, location_t loc
,
8635 enum tree_code code
, tree type
, tree op0
, tree op1
)
8637 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8640 res
= create_tmp_reg_or_ssa_name (type
);
8641 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8642 gimple_set_location (stmt
, loc
);
8643 gimple_seq_add_stmt_without_update (seq
, stmt
);
8648 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8649 simplifying it first if possible. Returns the built
8650 expression value and appends statements possibly defining it
8654 gimple_build (gimple_seq
*seq
, location_t loc
,
8655 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8657 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8658 seq
, gimple_build_valueize
);
8661 res
= create_tmp_reg_or_ssa_name (type
);
8663 if (code
== BIT_FIELD_REF
)
8664 stmt
= gimple_build_assign (res
, code
,
8665 build3 (code
, type
, op0
, op1
, op2
));
8667 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8668 gimple_set_location (stmt
, loc
);
8669 gimple_seq_add_stmt_without_update (seq
, stmt
);
8674 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8675 void) with a location LOC. Returns the built expression value (or NULL_TREE
8676 if TYPE is void) and appends statements possibly defining it to SEQ. */
8679 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8681 tree res
= NULL_TREE
;
8683 if (internal_fn_p (fn
))
8684 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8687 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8688 stmt
= gimple_build_call (decl
, 0);
8690 if (!VOID_TYPE_P (type
))
8692 res
= create_tmp_reg_or_ssa_name (type
);
8693 gimple_call_set_lhs (stmt
, res
);
8695 gimple_set_location (stmt
, loc
);
8696 gimple_seq_add_stmt_without_update (seq
, stmt
);
8700 /* Build the call FN (ARG0) with a result of type TYPE
8701 (or no result if TYPE is void) with location LOC,
8702 simplifying it first if possible. Returns the built
8703 expression value (or NULL_TREE if TYPE is void) and appends
8704 statements possibly defining it to SEQ. */
8707 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8708 tree type
, tree arg0
)
8710 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8714 if (internal_fn_p (fn
))
8715 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8718 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8719 stmt
= gimple_build_call (decl
, 1, arg0
);
8721 if (!VOID_TYPE_P (type
))
8723 res
= create_tmp_reg_or_ssa_name (type
);
8724 gimple_call_set_lhs (stmt
, res
);
8726 gimple_set_location (stmt
, loc
);
8727 gimple_seq_add_stmt_without_update (seq
, stmt
);
8732 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8733 (or no result if TYPE is void) with location LOC,
8734 simplifying it first if possible. Returns the built
8735 expression value (or NULL_TREE if TYPE is void) and appends
8736 statements possibly defining it to SEQ. */
8739 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8740 tree type
, tree arg0
, tree arg1
)
8742 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8746 if (internal_fn_p (fn
))
8747 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8750 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8751 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8753 if (!VOID_TYPE_P (type
))
8755 res
= create_tmp_reg_or_ssa_name (type
);
8756 gimple_call_set_lhs (stmt
, res
);
8758 gimple_set_location (stmt
, loc
);
8759 gimple_seq_add_stmt_without_update (seq
, stmt
);
8764 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8765 (or no result if TYPE is void) with location LOC,
8766 simplifying it first if possible. Returns the built
8767 expression value (or NULL_TREE if TYPE is void) and appends
8768 statements possibly defining it to SEQ. */
8771 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8772 tree type
, tree arg0
, tree arg1
, tree arg2
)
8774 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8775 seq
, gimple_build_valueize
);
8779 if (internal_fn_p (fn
))
8780 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8781 3, arg0
, arg1
, arg2
);
8784 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8785 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8787 if (!VOID_TYPE_P (type
))
8789 res
= create_tmp_reg_or_ssa_name (type
);
8790 gimple_call_set_lhs (stmt
, res
);
8792 gimple_set_location (stmt
, loc
);
8793 gimple_seq_add_stmt_without_update (seq
, stmt
);
8798 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8799 void) with location LOC, simplifying it first if possible. Returns the
8800 built expression value (or NULL_TREE if TYPE is void) and appends
8801 statements possibly defining it to SEQ. */
8804 gimple_build (gimple_seq
*seq
, location_t loc
, code_helper code
,
8805 tree type
, tree op0
)
8807 if (code
.is_tree_code ())
8808 return gimple_build (seq
, loc
, tree_code (code
), type
, op0
);
8809 return gimple_build (seq
, loc
, combined_fn (code
), type
, op0
);
8812 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8813 void) with location LOC, simplifying it first if possible. Returns the
8814 built expression value (or NULL_TREE if TYPE is void) and appends
8815 statements possibly defining it to SEQ. */
8818 gimple_build (gimple_seq
*seq
, location_t loc
, code_helper code
,
8819 tree type
, tree op0
, tree op1
)
8821 if (code
.is_tree_code ())
8822 return gimple_build (seq
, loc
, tree_code (code
), type
, op0
, op1
);
8823 return gimple_build (seq
, loc
, combined_fn (code
), type
, op0
, op1
);
8826 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8827 is void) with location LOC, simplifying it first if possible. Returns the
8828 built expression value (or NULL_TREE if TYPE is void) and appends statements
8829 possibly defining it to SEQ. */
8832 gimple_build (gimple_seq
*seq
, location_t loc
, code_helper code
,
8833 tree type
, tree op0
, tree op1
, tree op2
)
8835 if (code
.is_tree_code ())
8836 return gimple_build (seq
, loc
, tree_code (code
), type
, op0
, op1
, op2
);
8837 return gimple_build (seq
, loc
, combined_fn (code
), type
, op0
, op1
, op2
);
8840 /* Build the conversion (TYPE) OP with a result of type TYPE
8841 with location LOC if such conversion is neccesary in GIMPLE,
8842 simplifying it first.
8843 Returns the built expression value and appends
8844 statements possibly defining it to SEQ. */
8847 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8849 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8851 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8854 /* Build the conversion (ptrofftype) OP with a result of a type
8855 compatible with ptrofftype with location LOC if such conversion
8856 is neccesary in GIMPLE, simplifying it first.
8857 Returns the built expression value and appends
8858 statements possibly defining it to SEQ. */
8861 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8863 if (ptrofftype_p (TREE_TYPE (op
)))
8865 return gimple_convert (seq
, loc
, sizetype
, op
);
8868 /* Build a vector of type TYPE in which each element has the value OP.
8869 Return a gimple value for the result, appending any new statements
8873 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8876 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8877 && !CONSTANT_CLASS_P (op
))
8878 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8880 tree res
, vec
= build_vector_from_val (type
, op
);
8881 if (is_gimple_val (vec
))
8883 if (gimple_in_ssa_p (cfun
))
8884 res
= make_ssa_name (type
);
8886 res
= create_tmp_reg (type
);
8887 gimple
*stmt
= gimple_build_assign (res
, vec
);
8888 gimple_set_location (stmt
, loc
);
8889 gimple_seq_add_stmt_without_update (seq
, stmt
);
8893 /* Build a vector from BUILDER, handling the case in which some elements
8894 are non-constant. Return a gimple value for the result, appending any
8895 new instructions to SEQ.
8897 BUILDER must not have a stepped encoding on entry. This is because
8898 the function is not geared up to handle the arithmetic that would
8899 be needed in the variable case, and any code building a vector that
8900 is known to be constant should use BUILDER->build () directly. */
8903 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8904 tree_vector_builder
*builder
)
8906 gcc_assert (builder
->nelts_per_pattern () <= 2);
8907 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8908 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8909 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8911 tree type
= builder
->type ();
8912 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8913 vec
<constructor_elt
, va_gc
> *v
;
8914 vec_alloc (v
, nelts
);
8915 for (i
= 0; i
< nelts
; ++i
)
8916 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8919 if (gimple_in_ssa_p (cfun
))
8920 res
= make_ssa_name (type
);
8922 res
= create_tmp_reg (type
);
8923 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8924 gimple_set_location (stmt
, loc
);
8925 gimple_seq_add_stmt_without_update (seq
, stmt
);
8928 return builder
->build ();
8931 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8932 and generate a value guaranteed to be rounded upwards to ALIGN.
8934 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8937 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8938 tree old_size
, unsigned HOST_WIDE_INT align
)
8940 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8941 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8942 gcc_assert (INTEGRAL_TYPE_P (type
));
8943 tree tree_mask
= build_int_cst (type
, tg_mask
);
8944 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8947 tree mask
= build_int_cst (type
, -align
);
8948 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8951 /* Return true if the result of assignment STMT is known to be non-negative.
8952 If the return value is based on the assumption that signed overflow is
8953 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8954 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8957 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8960 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8961 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
8962 switch (get_gimple_rhs_class (code
))
8964 case GIMPLE_UNARY_RHS
:
8965 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8967 gimple_assign_rhs1 (stmt
),
8968 strict_overflow_p
, depth
);
8969 case GIMPLE_BINARY_RHS
:
8970 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8972 gimple_assign_rhs1 (stmt
),
8973 gimple_assign_rhs2 (stmt
),
8974 strict_overflow_p
, depth
);
8975 case GIMPLE_TERNARY_RHS
:
8977 case GIMPLE_SINGLE_RHS
:
8978 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8979 strict_overflow_p
, depth
);
8980 case GIMPLE_INVALID_RHS
:
8986 /* Return true if return value of call STMT is known to be non-negative.
8987 If the return value is based on the assumption that signed overflow is
8988 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8989 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8992 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8995 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8996 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8997 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8998 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8999 tree lhs
= gimple_call_lhs (stmt
);
9001 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
9002 gimple_call_combined_fn (stmt
),
9004 strict_overflow_p
, depth
));
9007 /* Return true if return value of call STMT is known to be non-negative.
9008 If the return value is based on the assumption that signed overflow is
9009 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9010 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9013 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9016 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9018 tree arg
= gimple_phi_arg_def (stmt
, i
);
9019 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
9025 /* Return true if STMT is known to compute a non-negative value.
9026 If the return value is based on the assumption that signed overflow is
9027 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9028 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9031 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9034 switch (gimple_code (stmt
))
9037 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9040 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9043 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9050 /* Return true if the floating-point value computed by assignment STMT
9051 is known to have an integer value. We also allow +Inf, -Inf and NaN
9052 to be considered integer values. Return false for signaling NaN.
9054 DEPTH is the current nesting depth of the query. */
9057 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9059 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9060 switch (get_gimple_rhs_class (code
))
9062 case GIMPLE_UNARY_RHS
:
9063 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9064 gimple_assign_rhs1 (stmt
), depth
);
9065 case GIMPLE_BINARY_RHS
:
9066 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9067 gimple_assign_rhs1 (stmt
),
9068 gimple_assign_rhs2 (stmt
), depth
);
9069 case GIMPLE_TERNARY_RHS
:
9071 case GIMPLE_SINGLE_RHS
:
9072 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9073 case GIMPLE_INVALID_RHS
:
9079 /* Return true if the floating-point value computed by call STMT is known
9080 to have an integer value. We also allow +Inf, -Inf and NaN to be
9081 considered integer values. Return false for signaling NaN.
9083 DEPTH is the current nesting depth of the query. */
9086 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9088 tree arg0
= (gimple_call_num_args (stmt
) > 0
9089 ? gimple_call_arg (stmt
, 0)
9091 tree arg1
= (gimple_call_num_args (stmt
) > 1
9092 ? gimple_call_arg (stmt
, 1)
9094 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9098 /* Return true if the floating-point result of phi STMT is known to have
9099 an integer value. We also allow +Inf, -Inf and NaN to be considered
9100 integer values. Return false for signaling NaN.
9102 DEPTH is the current nesting depth of the query. */
9105 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9107 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9109 tree arg
= gimple_phi_arg_def (stmt
, i
);
9110 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9116 /* Return true if the floating-point value computed by STMT is known
9117 to have an integer value. We also allow +Inf, -Inf and NaN to be
9118 considered integer values. Return false for signaling NaN.
9120 DEPTH is the current nesting depth of the query. */
9123 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9125 switch (gimple_code (stmt
))
9128 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9130 return gimple_call_integer_valued_real_p (stmt
, depth
);
9132 return gimple_phi_integer_valued_real_p (stmt
, depth
);