1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2022 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 #include "internal-fn.h"
72 enum strlen_range_kind
{
73 /* Compute the exact constant string length. */
75 /* Compute the maximum constant string length. */
77 /* Compute a range of string lengths bounded by object sizes. When
78 the length of a string cannot be determined, consider as the upper
79 bound the size of the enclosing object the string may be a member
80 or element of. Also determine the size of the largest character
81 array the string may refer to. */
83 /* Determine the integer value of the argument (not string length). */
88 get_range_strlen (tree
, bitmap
, strlen_range_kind
, c_strlen_data
*, unsigned);
90 /* Return true when DECL can be referenced from current unit.
91 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
92 We can get declarations that are not possible to reference for various
95 1) When analyzing C++ virtual tables.
96 C++ virtual tables do have known constructors even
97 when they are keyed to other compilation unit.
98 Those tables can contain pointers to methods and vars
99 in other units. Those methods have both STATIC and EXTERNAL
101 2) In WHOPR mode devirtualization might lead to reference
102 to method that was partitioned elsehwere.
103 In this case we have static VAR_DECL or FUNCTION_DECL
104 that has no corresponding callgraph/varpool node
106 3) COMDAT functions referred by external vtables that
107 we devirtualize only during final compilation stage.
108 At this time we already decided that we will not output
109 the function body and thus we can't reference the symbol
113 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
116 struct cgraph_node
*node
;
119 if (DECL_ABSTRACT_P (decl
))
122 /* We are concerned only about static/external vars and functions. */
123 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
124 || !VAR_OR_FUNCTION_DECL_P (decl
))
127 /* Static objects can be referred only if they are defined and not optimized
129 if (!TREE_PUBLIC (decl
))
131 if (DECL_EXTERNAL (decl
))
133 /* Before we start optimizing unreachable code we can be sure all
134 static objects are defined. */
135 if (symtab
->function_flags_ready
)
137 snode
= symtab_node::get (decl
);
138 if (!snode
|| !snode
->definition
)
140 node
= dyn_cast
<cgraph_node
*> (snode
);
141 return !node
|| !node
->inlined_to
;
144 /* We will later output the initializer, so we can refer to it.
145 So we are concerned only when DECL comes from initializer of
146 external var or var that has been optimized out. */
148 || !VAR_P (from_decl
)
149 || (!DECL_EXTERNAL (from_decl
)
150 && (vnode
= varpool_node::get (from_decl
)) != NULL
151 && vnode
->definition
)
153 && (vnode
= varpool_node::get (from_decl
)) != NULL
154 && vnode
->in_other_partition
))
156 /* We are folding reference from external vtable. The vtable may reffer
157 to a symbol keyed to other compilation unit. The other compilation
158 unit may be in separate DSO and the symbol may be hidden. */
159 if (DECL_VISIBILITY_SPECIFIED (decl
)
160 && DECL_EXTERNAL (decl
)
161 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
162 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
164 /* When function is public, we always can introduce new reference.
165 Exception are the COMDAT functions where introducing a direct
166 reference imply need to include function body in the curren tunit. */
167 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
169 /* We have COMDAT. We are going to check if we still have definition
170 or if the definition is going to be output in other partition.
171 Bypass this when gimplifying; all needed functions will be produced.
173 As observed in PR20991 for already optimized out comdat virtual functions
174 it may be tempting to not necessarily give up because the copy will be
175 output elsewhere when corresponding vtable is output.
176 This is however not possible - ABI specify that COMDATs are output in
177 units where they are used and when the other unit was compiled with LTO
178 it is possible that vtable was kept public while the function itself
180 if (!symtab
->function_flags_ready
)
183 snode
= symtab_node::get (decl
);
185 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
186 && (!snode
->in_other_partition
187 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
189 node
= dyn_cast
<cgraph_node
*> (snode
);
190 return !node
|| !node
->inlined_to
;
193 /* Create a temporary for TYPE for a statement STMT. If the current function
194 is in SSA form, a SSA name is created. Otherwise a temporary register
198 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
200 if (gimple_in_ssa_p (cfun
))
201 return make_ssa_name (type
, stmt
);
203 return create_tmp_reg (type
);
206 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
207 acceptable form for is_gimple_min_invariant.
208 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 canonicalize_constructor_val (tree cval
, tree from_decl
)
213 if (CONSTANT_CLASS_P (cval
))
216 tree orig_cval
= cval
;
218 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
219 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
221 tree ptr
= TREE_OPERAND (cval
, 0);
222 if (is_gimple_min_invariant (ptr
))
223 cval
= build1_loc (EXPR_LOCATION (cval
),
224 ADDR_EXPR
, TREE_TYPE (ptr
),
225 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
227 fold_convert (ptr_type_node
,
228 TREE_OPERAND (cval
, 1))));
230 if (TREE_CODE (cval
) == ADDR_EXPR
)
232 tree base
= NULL_TREE
;
233 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
235 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
237 TREE_OPERAND (cval
, 0) = base
;
240 base
= get_base_address (TREE_OPERAND (cval
, 0));
244 if (VAR_OR_FUNCTION_DECL_P (base
)
245 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
247 if (TREE_TYPE (base
) == error_mark_node
)
250 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
251 but since the use can be in a debug stmt we can't. */
253 else if (TREE_CODE (base
) == FUNCTION_DECL
)
255 /* Make sure we create a cgraph node for functions we'll reference.
256 They can be non-existent if the reference comes from an entry
257 of an external vtable for example. */
258 cgraph_node::get_create (base
);
260 /* Fixup types in global initializers. */
261 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
262 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
264 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
265 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
268 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
269 if (TREE_CODE (cval
) == INTEGER_CST
)
271 if (TREE_OVERFLOW_P (cval
))
272 cval
= drop_tree_overflow (cval
);
273 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
274 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
280 /* If SYM is a constant variable with known value, return the value.
281 NULL_TREE is returned otherwise. */
284 get_symbol_constant_value (tree sym
)
286 tree val
= ctor_for_folding (sym
);
287 if (val
!= error_mark_node
)
291 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
293 && is_gimple_min_invariant (val
)
294 && useless_type_conversion_p (TREE_TYPE (sym
), TREE_TYPE (val
)))
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
303 && is_gimple_reg_type (TREE_TYPE (sym
)))
304 return build_zero_cst (TREE_TYPE (sym
));
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
316 maybe_fold_reference (tree expr
)
318 tree result
= NULL_TREE
;
320 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr
) == REALPART_EXPR
322 || TREE_CODE (expr
) == IMAGPART_EXPR
)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
324 result
= fold_unary_loc (EXPR_LOCATION (expr
),
327 TREE_OPERAND (expr
, 0));
328 else if (TREE_CODE (expr
) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
330 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
333 TREE_OPERAND (expr
, 0),
334 TREE_OPERAND (expr
, 1),
335 TREE_OPERAND (expr
, 2));
337 result
= fold_const_aggregate_ref (expr
);
339 if (result
&& is_gimple_min_invariant (result
))
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.cc
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
358 valid_gimple_rhs_p (tree expr
)
360 enum tree_code code
= TREE_CODE (expr
);
362 switch (TREE_CODE_CLASS (code
))
364 case tcc_declaration
:
365 if (!is_gimple_variable (expr
))
370 /* All constants are ok. */
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
378 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
385 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
386 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
391 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
401 if (is_gimple_min_invariant (expr
))
403 t
= TREE_OPERAND (expr
, 0);
404 while (handled_component_p (t
))
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t
) == ARRAY_REF
408 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
409 && !is_gimple_val (TREE_OPERAND (t
, 1)))
411 t
= TREE_OPERAND (t
, 0);
413 if (!is_gimple_id (t
))
419 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
421 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
422 || !is_gimple_val (TREE_OPERAND (expr
, 1))
423 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
434 case tcc_exceptional
:
435 if (code
== CONSTRUCTOR
)
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
440 if (!is_gimple_val (elt
))
444 if (code
!= SSA_NAME
)
449 if (code
== BIT_FIELD_REF
)
450 return is_gimple_val (TREE_OPERAND (expr
, 0));
461 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
462 replacement rhs for the statement or NULL_TREE if no simplification
463 could be made. It is assumed that the operands have been previously
467 fold_gimple_assign (gimple_stmt_iterator
*si
)
469 gimple
*stmt
= gsi_stmt (*si
);
470 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
471 location_t loc
= gimple_location (stmt
);
473 tree result
= NULL_TREE
;
475 switch (get_gimple_rhs_class (subcode
))
477 case GIMPLE_SINGLE_RHS
:
479 tree rhs
= gimple_assign_rhs1 (stmt
);
481 if (TREE_CLOBBER_P (rhs
))
484 if (REFERENCE_CLASS_P (rhs
))
485 return maybe_fold_reference (rhs
);
487 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
489 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
490 if (is_gimple_min_invariant (val
))
492 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
495 vec
<cgraph_node
*>targets
496 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
497 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
499 if (dump_enabled_p ())
501 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
502 "resolving virtual function address "
503 "reference to function %s\n",
504 targets
.length () == 1
505 ? targets
[0]->name ()
508 if (targets
.length () == 1)
510 val
= fold_convert (TREE_TYPE (val
),
511 build_fold_addr_expr_loc
512 (loc
, targets
[0]->decl
));
513 STRIP_USELESS_TYPE_CONVERSION (val
);
516 /* We cannot use __builtin_unreachable here because it
517 cannot have address taken. */
518 val
= build_int_cst (TREE_TYPE (val
), 0);
524 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
526 tree ref
= TREE_OPERAND (rhs
, 0);
527 if (TREE_CODE (ref
) == MEM_REF
528 && integer_zerop (TREE_OPERAND (ref
, 1)))
530 result
= TREE_OPERAND (ref
, 0);
531 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
533 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
538 else if (TREE_CODE (rhs
) == CONSTRUCTOR
539 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
541 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
545 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
546 if (! CONSTANT_CLASS_P (val
))
549 return build_vector_from_ctor (TREE_TYPE (rhs
),
550 CONSTRUCTOR_ELTS (rhs
));
553 else if (DECL_P (rhs
)
554 && is_gimple_reg_type (TREE_TYPE (rhs
)))
555 return get_symbol_constant_value (rhs
);
559 case GIMPLE_UNARY_RHS
:
562 case GIMPLE_BINARY_RHS
:
565 case GIMPLE_TERNARY_RHS
:
566 result
= fold_ternary_loc (loc
, subcode
,
567 TREE_TYPE (gimple_assign_lhs (stmt
)),
568 gimple_assign_rhs1 (stmt
),
569 gimple_assign_rhs2 (stmt
),
570 gimple_assign_rhs3 (stmt
));
574 STRIP_USELESS_TYPE_CONVERSION (result
);
575 if (valid_gimple_rhs_p (result
))
580 case GIMPLE_INVALID_RHS
:
588 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
589 adjusting the replacement stmts location and virtual operands.
590 If the statement has a lhs the last stmt in the sequence is expected
591 to assign to that lhs. */
594 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
596 gimple
*stmt
= gsi_stmt (*si_p
);
598 if (gimple_has_location (stmt
))
599 annotate_all_with_location (stmts
, gimple_location (stmt
));
601 /* First iterate over the replacement statements backward, assigning
602 virtual operands to their defining statements. */
603 gimple
*laststore
= NULL
;
604 for (gimple_stmt_iterator i
= gsi_last (stmts
);
605 !gsi_end_p (i
); gsi_prev (&i
))
607 gimple
*new_stmt
= gsi_stmt (i
);
608 if ((gimple_assign_single_p (new_stmt
)
609 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
610 || (is_gimple_call (new_stmt
)
611 && (gimple_call_flags (new_stmt
)
612 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
616 vdef
= gimple_vdef (stmt
);
618 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
619 gimple_set_vdef (new_stmt
, vdef
);
620 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
621 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
622 laststore
= new_stmt
;
626 /* Second iterate over the statements forward, assigning virtual
627 operands to their uses. */
628 tree reaching_vuse
= gimple_vuse (stmt
);
629 for (gimple_stmt_iterator i
= gsi_start (stmts
);
630 !gsi_end_p (i
); gsi_next (&i
))
632 gimple
*new_stmt
= gsi_stmt (i
);
633 /* If the new statement possibly has a VUSE, update it with exact SSA
634 name we know will reach this one. */
635 if (gimple_has_mem_ops (new_stmt
))
636 gimple_set_vuse (new_stmt
, reaching_vuse
);
637 gimple_set_modified (new_stmt
, true);
638 if (gimple_vdef (new_stmt
))
639 reaching_vuse
= gimple_vdef (new_stmt
);
642 /* If the new sequence does not do a store release the virtual
643 definition of the original statement. */
645 && reaching_vuse
== gimple_vuse (stmt
))
647 tree vdef
= gimple_vdef (stmt
);
649 && TREE_CODE (vdef
) == SSA_NAME
)
651 unlink_stmt_vdef (stmt
);
652 release_ssa_name (vdef
);
656 /* Finally replace the original statement with the sequence. */
657 gsi_replace_with_seq (si_p
, stmts
, false);
660 /* Helper function for update_gimple_call and
661 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
662 with GIMPLE_CALL NEW_STMT. */
665 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
668 tree lhs
= gimple_call_lhs (stmt
);
669 gimple_call_set_lhs (new_stmt
, lhs
);
670 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
671 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
672 gimple_move_vops (new_stmt
, stmt
);
673 gimple_set_location (new_stmt
, gimple_location (stmt
));
674 if (gimple_block (new_stmt
) == NULL_TREE
)
675 gimple_set_block (new_stmt
, gimple_block (stmt
));
676 gsi_replace (si_p
, new_stmt
, false);
679 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
680 with number of arguments NARGS, where the arguments in GIMPLE form
681 follow NARGS argument. */
684 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
687 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
689 gcc_assert (is_gimple_call (stmt
));
690 va_start (ap
, nargs
);
691 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
692 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
697 /* Return true if EXPR is a CALL_EXPR suitable for representation
698 as a single GIMPLE_CALL statement. If the arguments require
699 further gimplification, return false. */
702 valid_gimple_call_p (tree expr
)
706 if (TREE_CODE (expr
) != CALL_EXPR
)
709 nargs
= call_expr_nargs (expr
);
710 for (i
= 0; i
< nargs
; i
++)
712 tree arg
= CALL_EXPR_ARG (expr
, i
);
713 if (is_gimple_reg_type (TREE_TYPE (arg
)))
715 if (!is_gimple_val (arg
))
719 if (!is_gimple_lvalue (arg
))
726 /* Convert EXPR into a GIMPLE value suitable for substitution on the
727 RHS of an assignment. Insert the necessary statements before
728 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
729 is replaced. If the call is expected to produces a result, then it
730 is replaced by an assignment of the new RHS to the result variable.
731 If the result is to be ignored, then the call is replaced by a
732 GIMPLE_NOP. A proper VDEF chain is retained by making the first
733 VUSE and the last VDEF of the whole sequence be the same as the replaced
734 statement and using new SSA names for stores in between. */
737 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
740 gimple
*stmt
, *new_stmt
;
741 gimple_stmt_iterator i
;
742 gimple_seq stmts
= NULL
;
744 stmt
= gsi_stmt (*si_p
);
746 gcc_assert (is_gimple_call (stmt
));
748 if (valid_gimple_call_p (expr
))
750 /* The call has simplified to another call. */
751 tree fn
= CALL_EXPR_FN (expr
);
753 unsigned nargs
= call_expr_nargs (expr
);
754 vec
<tree
> args
= vNULL
;
760 args
.safe_grow_cleared (nargs
, true);
762 for (i
= 0; i
< nargs
; i
++)
763 args
[i
] = CALL_EXPR_ARG (expr
, i
);
766 new_stmt
= gimple_build_call_vec (fn
, args
);
767 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
772 lhs
= gimple_call_lhs (stmt
);
773 if (lhs
== NULL_TREE
)
775 push_gimplify_context (gimple_in_ssa_p (cfun
));
776 gimplify_and_add (expr
, &stmts
);
777 pop_gimplify_context (NULL
);
779 /* We can end up with folding a memcpy of an empty class assignment
780 which gets optimized away by C++ gimplification. */
781 if (gimple_seq_empty_p (stmts
))
783 if (gimple_in_ssa_p (cfun
))
785 unlink_stmt_vdef (stmt
);
788 gsi_replace (si_p
, gimple_build_nop (), false);
794 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
795 new_stmt
= gimple_build_assign (lhs
, tmp
);
796 i
= gsi_last (stmts
);
797 gsi_insert_after_without_update (&i
, new_stmt
,
798 GSI_CONTINUE_LINKING
);
801 gsi_replace_with_seq_vops (si_p
, stmts
);
805 /* Replace the call at *GSI with the gimple value VAL. */
808 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
810 gimple
*stmt
= gsi_stmt (*gsi
);
811 tree lhs
= gimple_call_lhs (stmt
);
815 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
816 val
= fold_convert (TREE_TYPE (lhs
), val
);
817 repl
= gimple_build_assign (lhs
, val
);
820 repl
= gimple_build_nop ();
821 tree vdef
= gimple_vdef (stmt
);
822 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
824 unlink_stmt_vdef (stmt
);
825 release_ssa_name (vdef
);
827 gsi_replace (gsi
, repl
, false);
830 /* Replace the call at *GSI with the new call REPL and fold that
834 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
836 gimple
*stmt
= gsi_stmt (*gsi
);
837 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
838 gimple_set_location (repl
, gimple_location (stmt
));
839 gimple_move_vops (repl
, stmt
);
840 gsi_replace (gsi
, repl
, false);
844 /* Return true if VAR is a VAR_DECL or a component thereof. */
847 var_decl_component_p (tree var
)
850 while (handled_component_p (inner
))
851 inner
= TREE_OPERAND (inner
, 0);
852 return (DECL_P (inner
)
853 || (TREE_CODE (inner
) == MEM_REF
854 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
857 /* Return TRUE if the SIZE argument, representing the size of an
858 object, is in a range of values of which exactly zero is valid. */
861 size_must_be_zero_p (tree size
)
863 if (integer_zerop (size
))
866 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
869 tree type
= TREE_TYPE (size
);
870 int prec
= TYPE_PRECISION (type
);
872 /* Compute the value of SSIZE_MAX, the largest positive value that
873 can be stored in ssize_t, the signed counterpart of size_t. */
874 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
875 value_range
valid_range (build_int_cst (type
, 0),
876 wide_int_to_tree (type
, ssize_max
));
879 get_range_query (cfun
)->range_of_expr (vr
, size
);
881 get_global_range_query ()->range_of_expr (vr
, size
);
882 if (vr
.undefined_p ())
883 vr
.set_varying (TREE_TYPE (size
));
884 vr
.intersect (valid_range
);
888 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
889 diagnose (otherwise undefined) overlapping copies without preventing
890 folding. When folded, GCC guarantees that overlapping memcpy has
891 the same semantics as memmove. Call to the library memcpy need not
892 provide the same guarantee. Return false if no simplification can
896 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
897 tree dest
, tree src
, enum built_in_function code
)
899 gimple
*stmt
= gsi_stmt (*gsi
);
900 tree lhs
= gimple_call_lhs (stmt
);
901 tree len
= gimple_call_arg (stmt
, 2);
902 location_t loc
= gimple_location (stmt
);
904 /* If the LEN parameter is a constant zero or in range where
905 the only valid value is zero, return DEST. */
906 if (size_must_be_zero_p (len
))
909 if (gimple_call_lhs (stmt
))
910 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
912 repl
= gimple_build_nop ();
913 tree vdef
= gimple_vdef (stmt
);
914 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
916 unlink_stmt_vdef (stmt
);
917 release_ssa_name (vdef
);
919 gsi_replace (gsi
, repl
, false);
923 /* If SRC and DEST are the same (and not volatile), return
924 DEST{,+LEN,+LEN-1}. */
925 if (operand_equal_p (src
, dest
, 0))
927 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
928 It's safe and may even be emitted by GCC itself (see bug
930 unlink_stmt_vdef (stmt
);
931 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
932 release_ssa_name (gimple_vdef (stmt
));
935 gsi_replace (gsi
, gimple_build_nop (), false);
942 /* We cannot (easily) change the type of the copy if it is a storage
943 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
944 modify the storage order of objects (see storage_order_barrier_p). */
946 = POINTER_TYPE_P (TREE_TYPE (src
))
947 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
949 = POINTER_TYPE_P (TREE_TYPE (dest
))
950 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
951 tree destvar
, srcvar
, srcoff
;
952 unsigned int src_align
, dest_align
;
953 unsigned HOST_WIDE_INT tmp_len
;
956 /* Build accesses at offset zero with a ref-all character type. */
958 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
961 /* If we can perform the copy efficiently with first doing all loads
962 and then all stores inline it that way. Currently efficiently
963 means that we can load all the memory into a single integer
964 register which is what MOVE_MAX gives us. */
965 src_align
= get_pointer_alignment (src
);
966 dest_align
= get_pointer_alignment (dest
);
967 if (tree_fits_uhwi_p (len
)
968 && compare_tree_int (len
, MOVE_MAX
) <= 0
969 /* FIXME: Don't transform copies from strings with known length.
970 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
971 from being handled, and the case was XFAILed for that reason.
972 Now that it is handled and the XFAIL removed, as soon as other
973 strlenopt tests that rely on it for passing are adjusted, this
974 hack can be removed. */
975 && !c_strlen (src
, 1)
976 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
977 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
979 && AGGREGATE_TYPE_P (srctype
)
980 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
982 && AGGREGATE_TYPE_P (desttype
)
983 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
985 unsigned ilen
= tree_to_uhwi (len
);
986 if (pow2p_hwi (ilen
))
988 /* Detect out-of-bounds accesses without issuing warnings.
989 Avoid folding out-of-bounds copies but to avoid false
990 positives for unreachable code defer warning until after
991 DCE has worked its magic.
992 -Wrestrict is still diagnosed. */
993 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
996 if (warning
!= OPT_Wrestrict
)
999 scalar_int_mode mode
;
1000 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1001 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
1004 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1005 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1006 || (optab_handler (movmisalign_optab
, mode
)
1007 != CODE_FOR_nothing
)))
1009 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1010 tree srctype
= type
;
1011 tree desttype
= type
;
1012 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1013 srctype
= build_aligned_type (type
, src_align
);
1014 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1015 tree tem
= fold_const_aggregate_ref (srcmem
);
1018 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1019 && targetm
.slow_unaligned_access (mode
, src_align
)
1020 && (optab_handler (movmisalign_optab
, mode
)
1021 == CODE_FOR_nothing
))
1026 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1028 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1030 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1032 gimple_assign_set_lhs (new_stmt
, srcmem
);
1033 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1034 gimple_set_location (new_stmt
, loc
);
1035 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1037 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1038 desttype
= build_aligned_type (type
, dest_align
);
1040 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1043 gimple_move_vops (new_stmt
, stmt
);
1046 gsi_replace (gsi
, new_stmt
, false);
1049 gimple_set_location (new_stmt
, loc
);
1050 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1057 if (code
== BUILT_IN_MEMMOVE
)
1059 /* Both DEST and SRC must be pointer types.
1060 ??? This is what old code did. Is the testing for pointer types
1063 If either SRC is readonly or length is 1, we can use memcpy. */
1064 if (!dest_align
|| !src_align
)
1066 if (readonly_data_expr (src
)
1067 || (tree_fits_uhwi_p (len
)
1068 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1069 >= tree_to_uhwi (len
))))
1071 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1074 gimple_call_set_fndecl (stmt
, fn
);
1075 gimple_call_set_arg (stmt
, 0, dest
);
1076 gimple_call_set_arg (stmt
, 1, src
);
1081 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1082 if (TREE_CODE (src
) == ADDR_EXPR
1083 && TREE_CODE (dest
) == ADDR_EXPR
)
1085 tree src_base
, dest_base
, fn
;
1086 poly_int64 src_offset
= 0, dest_offset
= 0;
1087 poly_uint64 maxsize
;
1089 srcvar
= TREE_OPERAND (src
, 0);
1090 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1091 if (src_base
== NULL
)
1093 destvar
= TREE_OPERAND (dest
, 0);
1094 dest_base
= get_addr_base_and_unit_offset (destvar
,
1096 if (dest_base
== NULL
)
1097 dest_base
= destvar
;
1098 if (!poly_int_tree_p (len
, &maxsize
))
1100 if (SSA_VAR_P (src_base
)
1101 && SSA_VAR_P (dest_base
))
1103 if (operand_equal_p (src_base
, dest_base
, 0)
1104 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1105 dest_offset
, maxsize
))
1108 else if (TREE_CODE (src_base
) == MEM_REF
1109 && TREE_CODE (dest_base
) == MEM_REF
)
1111 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1112 TREE_OPERAND (dest_base
, 0), 0))
1114 poly_offset_int full_src_offset
1115 = mem_ref_offset (src_base
) + src_offset
;
1116 poly_offset_int full_dest_offset
1117 = mem_ref_offset (dest_base
) + dest_offset
;
1118 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1119 full_dest_offset
, maxsize
))
1125 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1128 gimple_call_set_fndecl (stmt
, fn
);
1129 gimple_call_set_arg (stmt
, 0, dest
);
1130 gimple_call_set_arg (stmt
, 1, src
);
1135 /* If the destination and source do not alias optimize into
1137 if ((is_gimple_min_invariant (dest
)
1138 || TREE_CODE (dest
) == SSA_NAME
)
1139 && (is_gimple_min_invariant (src
)
1140 || TREE_CODE (src
) == SSA_NAME
))
1143 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1144 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1145 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1148 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1151 gimple_call_set_fndecl (stmt
, fn
);
1152 gimple_call_set_arg (stmt
, 0, dest
);
1153 gimple_call_set_arg (stmt
, 1, src
);
1162 if (!tree_fits_shwi_p (len
))
1165 || (AGGREGATE_TYPE_P (srctype
)
1166 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1169 || (AGGREGATE_TYPE_P (desttype
)
1170 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1172 /* In the following try to find a type that is most natural to be
1173 used for the memcpy source and destination and that allows
1174 the most optimization when memcpy is turned into a plain assignment
1175 using that type. In theory we could always use a char[len] type
1176 but that only gains us that the destination and source possibly
1177 no longer will have their address taken. */
1178 if (TREE_CODE (srctype
) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1180 srctype
= TREE_TYPE (srctype
);
1181 if (TREE_CODE (desttype
) == ARRAY_TYPE
1182 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1183 desttype
= TREE_TYPE (desttype
);
1184 if (TREE_ADDRESSABLE (srctype
)
1185 || TREE_ADDRESSABLE (desttype
))
1188 /* Make sure we are not copying using a floating-point mode or
1189 a type whose size possibly does not match its precision. */
1190 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1191 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1192 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1193 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1194 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1195 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1196 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1197 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1205 src_align
= get_pointer_alignment (src
);
1206 dest_align
= get_pointer_alignment (dest
);
1208 /* Choose between src and destination type for the access based
1209 on alignment, whether the access constitutes a register access
1210 and whether it may actually expose a declaration for SSA rewrite
1211 or SRA decomposition. Also try to expose a string constant, we
1212 might be able to concatenate several of them later into a single
1214 destvar
= NULL_TREE
;
1216 if (TREE_CODE (dest
) == ADDR_EXPR
1217 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1218 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1219 && dest_align
>= TYPE_ALIGN (desttype
)
1220 && (is_gimple_reg_type (desttype
)
1221 || src_align
>= TYPE_ALIGN (desttype
)))
1222 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1223 else if (TREE_CODE (src
) == ADDR_EXPR
1224 && var_decl_component_p (TREE_OPERAND (src
, 0))
1225 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1226 && src_align
>= TYPE_ALIGN (srctype
)
1227 && (is_gimple_reg_type (srctype
)
1228 || dest_align
>= TYPE_ALIGN (srctype
)))
1229 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1230 /* FIXME: Don't transform copies from strings with known original length.
1231 As soon as strlenopt tests that rely on it for passing are adjusted,
1232 this hack can be removed. */
1233 else if (gimple_call_alloca_for_var_p (stmt
)
1234 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1235 && integer_zerop (srcoff
)
1236 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1237 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1238 srctype
= TREE_TYPE (srcvar
);
1242 /* Now that we chose an access type express the other side in
1243 terms of it if the target allows that with respect to alignment
1245 if (srcvar
== NULL_TREE
)
1247 if (src_align
>= TYPE_ALIGN (desttype
))
1248 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1251 enum machine_mode mode
= TYPE_MODE (desttype
);
1252 if ((mode
== BLKmode
&& STRICT_ALIGNMENT
)
1253 || (targetm
.slow_unaligned_access (mode
, src_align
)
1254 && (optab_handler (movmisalign_optab
, mode
)
1255 == CODE_FOR_nothing
)))
1257 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1259 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1262 else if (destvar
== NULL_TREE
)
1264 if (dest_align
>= TYPE_ALIGN (srctype
))
1265 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1268 enum machine_mode mode
= TYPE_MODE (srctype
);
1269 if ((mode
== BLKmode
&& STRICT_ALIGNMENT
)
1270 || (targetm
.slow_unaligned_access (mode
, dest_align
)
1271 && (optab_handler (movmisalign_optab
, mode
)
1272 == CODE_FOR_nothing
)))
1274 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1276 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1280 /* Same as above, detect out-of-bounds accesses without issuing
1281 warnings. Avoid folding out-of-bounds copies but to avoid
1282 false positives for unreachable code defer warning until
1283 after DCE has worked its magic.
1284 -Wrestrict is still diagnosed. */
1285 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1286 dest
, src
, len
, len
,
1288 if (warning
!= OPT_Wrestrict
)
1292 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1294 tree tem
= fold_const_aggregate_ref (srcvar
);
1297 if (! is_gimple_min_invariant (srcvar
))
1299 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1300 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1302 gimple_assign_set_lhs (new_stmt
, srcvar
);
1303 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1304 gimple_set_location (new_stmt
, loc
);
1305 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1307 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1308 goto set_vop_and_replace
;
1311 /* We get an aggregate copy. If the source is a STRING_CST, then
1312 directly use its type to perform the copy. */
1313 if (TREE_CODE (srcvar
) == STRING_CST
)
1316 /* Or else, use an unsigned char[] type to perform the copy in order
1317 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1318 types or float modes behavior on copying. */
1321 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1322 tree_to_uhwi (len
));
1324 if (src_align
> TYPE_ALIGN (srctype
))
1325 srctype
= build_aligned_type (srctype
, src_align
);
1326 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1329 if (dest_align
> TYPE_ALIGN (desttype
))
1330 desttype
= build_aligned_type (desttype
, dest_align
);
1331 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1332 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1334 set_vop_and_replace
:
1335 gimple_move_vops (new_stmt
, stmt
);
1338 gsi_replace (gsi
, new_stmt
, false);
1341 gimple_set_location (new_stmt
, loc
);
1342 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1346 gimple_seq stmts
= NULL
;
1347 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1349 else if (code
== BUILT_IN_MEMPCPY
)
1351 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1352 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1353 TREE_TYPE (dest
), dest
, len
);
1358 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1359 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1360 gsi_replace (gsi
, repl
, false);
1364 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1365 to built-in memcmp (a, b, len). */
1368 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1370 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1375 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1377 gimple
*stmt
= gsi_stmt (*gsi
);
1378 tree a
= gimple_call_arg (stmt
, 0);
1379 tree b
= gimple_call_arg (stmt
, 1);
1380 tree len
= gimple_call_arg (stmt
, 2);
1382 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1383 replace_call_with_call_and_fold (gsi
, repl
);
1388 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1389 to built-in memmove (dest, src, len). */
1392 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1394 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1399 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1400 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1401 len) into memmove (dest, src, len). */
1403 gimple
*stmt
= gsi_stmt (*gsi
);
1404 tree src
= gimple_call_arg (stmt
, 0);
1405 tree dest
= gimple_call_arg (stmt
, 1);
1406 tree len
= gimple_call_arg (stmt
, 2);
1408 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1409 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1410 replace_call_with_call_and_fold (gsi
, repl
);
1415 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1416 to built-in memset (dest, 0, len). */
1419 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1421 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1426 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1428 gimple
*stmt
= gsi_stmt (*gsi
);
1429 tree dest
= gimple_call_arg (stmt
, 0);
1430 tree len
= gimple_call_arg (stmt
, 1);
1432 gimple_seq seq
= NULL
;
1433 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1434 gimple_seq_add_stmt_without_update (&seq
, repl
);
1435 gsi_replace_with_seq_vops (gsi
, seq
);
1441 /* Fold function call to builtin memset or bzero at *GSI setting the
1442 memory of size LEN to VAL. Return whether a simplification was made. */
1445 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1447 gimple
*stmt
= gsi_stmt (*gsi
);
1449 unsigned HOST_WIDE_INT length
, cval
;
1451 /* If the LEN parameter is zero, return DEST. */
1452 if (integer_zerop (len
))
1454 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1458 if (! tree_fits_uhwi_p (len
))
1461 if (TREE_CODE (c
) != INTEGER_CST
)
1464 tree dest
= gimple_call_arg (stmt
, 0);
1466 if (TREE_CODE (var
) != ADDR_EXPR
)
1469 var
= TREE_OPERAND (var
, 0);
1470 if (TREE_THIS_VOLATILE (var
))
1473 etype
= TREE_TYPE (var
);
1474 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1475 etype
= TREE_TYPE (etype
);
1477 if (!INTEGRAL_TYPE_P (etype
)
1478 && !POINTER_TYPE_P (etype
))
1481 if (! var_decl_component_p (var
))
1484 length
= tree_to_uhwi (len
);
1485 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1486 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1487 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1488 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1491 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1494 if (!type_has_mode_precision_p (etype
))
1495 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1496 TYPE_UNSIGNED (etype
));
1498 if (integer_zerop (c
))
1502 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1505 cval
= TREE_INT_CST_LOW (c
);
1509 cval
|= (cval
<< 31) << 1;
1512 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1513 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1514 gimple_move_vops (store
, stmt
);
1515 gimple_set_location (store
, gimple_location (stmt
));
1516 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1517 if (gimple_call_lhs (stmt
))
1519 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1520 gsi_replace (gsi
, asgn
, false);
1524 gimple_stmt_iterator gsi2
= *gsi
;
1526 gsi_remove (&gsi2
, true);
1532 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1535 get_range_strlen_tree (tree arg
, bitmap visited
, strlen_range_kind rkind
,
1536 c_strlen_data
*pdata
, unsigned eltsize
)
1538 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1540 /* The length computed by this invocation of the function. */
1541 tree val
= NULL_TREE
;
1543 /* True if VAL is an optimistic (tight) bound determined from
1544 the size of the character array in which the string may be
1545 stored. In that case, the computed VAL is used to set
1547 bool tight_bound
= false;
1549 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1550 if (TREE_CODE (arg
) == ADDR_EXPR
1551 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1553 tree op
= TREE_OPERAND (arg
, 0);
1554 if (integer_zerop (TREE_OPERAND (op
, 1)))
1556 tree aop0
= TREE_OPERAND (op
, 0);
1557 if (TREE_CODE (aop0
) == INDIRECT_REF
1558 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1559 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1562 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1563 && rkind
== SRK_LENRANGE
)
1565 /* Fail if an array is the last member of a struct object
1566 since it could be treated as a (fake) flexible array
1568 tree idx
= TREE_OPERAND (op
, 1);
1570 arg
= TREE_OPERAND (op
, 0);
1571 tree optype
= TREE_TYPE (arg
);
1572 if (tree dom
= TYPE_DOMAIN (optype
))
1573 if (tree bound
= TYPE_MAX_VALUE (dom
))
1574 if (TREE_CODE (bound
) == INTEGER_CST
1575 && TREE_CODE (idx
) == INTEGER_CST
1576 && tree_int_cst_lt (bound
, idx
))
1581 if (rkind
== SRK_INT_VALUE
)
1583 /* We are computing the maximum value (not string length). */
1585 if (TREE_CODE (val
) != INTEGER_CST
1586 || tree_int_cst_sgn (val
) < 0)
1591 c_strlen_data lendata
= { };
1592 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1594 if (!val
&& lendata
.decl
)
1596 /* ARG refers to an unterminated const character array.
1597 DATA.DECL with size DATA.LEN. */
1598 val
= lendata
.minlen
;
1599 pdata
->decl
= lendata
.decl
;
1603 /* Set if VAL represents the maximum length based on array size (set
1604 when exact length cannot be determined). */
1605 bool maxbound
= false;
1607 if (!val
&& rkind
== SRK_LENRANGE
)
1609 if (TREE_CODE (arg
) == ADDR_EXPR
)
1610 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1613 if (TREE_CODE (arg
) == ARRAY_REF
)
1615 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1617 /* Determine the "innermost" array type. */
1618 while (TREE_CODE (optype
) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1620 optype
= TREE_TYPE (optype
);
1622 /* Avoid arrays of pointers. */
1623 tree eltype
= TREE_TYPE (optype
);
1624 if (TREE_CODE (optype
) != ARRAY_TYPE
1625 || !INTEGRAL_TYPE_P (eltype
))
1628 /* Fail when the array bound is unknown or zero. */
1629 val
= TYPE_SIZE_UNIT (optype
);
1631 || TREE_CODE (val
) != INTEGER_CST
1632 || integer_zerop (val
))
1635 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1638 /* Set the minimum size to zero since the string in
1639 the array could have zero length. */
1640 pdata
->minlen
= ssize_int (0);
1644 else if (TREE_CODE (arg
) == COMPONENT_REF
1645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1648 /* Use the type of the member array to determine the upper
1649 bound on the length of the array. This may be overly
1650 optimistic if the array itself isn't NUL-terminated and
1651 the caller relies on the subsequent member to contain
1652 the NUL but that would only be considered valid if
1653 the array were the last member of a struct. */
1655 tree fld
= TREE_OPERAND (arg
, 1);
1657 tree optype
= TREE_TYPE (fld
);
1659 /* Determine the "innermost" array type. */
1660 while (TREE_CODE (optype
) == ARRAY_TYPE
1661 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1662 optype
= TREE_TYPE (optype
);
1664 /* Fail when the array bound is unknown or zero. */
1665 val
= TYPE_SIZE_UNIT (optype
);
1667 || TREE_CODE (val
) != INTEGER_CST
1668 || integer_zerop (val
))
1670 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1673 /* Set the minimum size to zero since the string in
1674 the array could have zero length. */
1675 pdata
->minlen
= ssize_int (0);
1677 /* The array size determined above is an optimistic bound
1678 on the length. If the array isn't nul-terminated the
1679 length computed by the library function would be greater.
1680 Even though using strlen to cross the subobject boundary
1681 is undefined, avoid drawing conclusions from the member
1682 type about the length here. */
1685 else if (TREE_CODE (arg
) == MEM_REF
1686 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1688 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1690 /* Handle a MEM_REF into a DECL accessing an array of integers,
1691 being conservative about references to extern structures with
1692 flexible array members that can be initialized to arbitrary
1693 numbers of elements as an extension (static structs are okay).
1694 FIXME: Make this less conservative -- see
1695 component_ref_size in tree.cc. */
1696 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1697 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1698 && (decl_binds_to_current_def_p (ref
)
1699 || !array_at_struct_end_p (arg
)))
1701 /* Fail if the offset is out of bounds. Such accesses
1702 should be diagnosed at some point. */
1703 val
= DECL_SIZE_UNIT (ref
);
1705 || TREE_CODE (val
) != INTEGER_CST
1706 || integer_zerop (val
))
1709 poly_offset_int psiz
= wi::to_offset (val
);
1710 poly_offset_int poff
= mem_ref_offset (arg
);
1711 if (known_le (psiz
, poff
))
1714 pdata
->minlen
= ssize_int (0);
1716 /* Subtract the offset and one for the terminating nul. */
1719 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1720 /* Since VAL reflects the size of a declared object
1721 rather the type of the access it is not a tight bound. */
1724 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1726 /* Avoid handling pointers to arrays. GCC might misuse
1727 a pointer to an array of one bound to point to an array
1728 object of a greater bound. */
1729 tree argtype
= TREE_TYPE (arg
);
1730 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1732 val
= TYPE_SIZE_UNIT (argtype
);
1734 || TREE_CODE (val
) != INTEGER_CST
1735 || integer_zerop (val
))
1737 val
= wide_int_to_tree (TREE_TYPE (val
),
1738 wi::sub (wi::to_wide (val
), 1));
1740 /* Set the minimum size to zero since the string in
1741 the array could have zero length. */
1742 pdata
->minlen
= ssize_int (0);
1751 /* Adjust the lower bound on the string length as necessary. */
1753 || (rkind
!= SRK_STRLEN
1754 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1755 && TREE_CODE (val
) == INTEGER_CST
1756 && tree_int_cst_lt (val
, pdata
->minlen
)))
1757 pdata
->minlen
= val
;
1759 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1761 /* Adjust the tighter (more optimistic) string length bound
1762 if necessary and proceed to adjust the more conservative
1764 if (TREE_CODE (val
) == INTEGER_CST
)
1766 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1767 pdata
->maxbound
= val
;
1770 pdata
->maxbound
= val
;
1772 else if (pdata
->maxbound
|| maxbound
)
1773 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1774 if VAL corresponds to the maximum length determined based
1775 on the type of the object. */
1776 pdata
->maxbound
= val
;
1780 /* VAL computed above represents an optimistically tight bound
1781 on the length of the string based on the referenced object's
1782 or subobject's type. Determine the conservative upper bound
1783 based on the enclosing object's size if possible. */
1784 if (rkind
== SRK_LENRANGE
)
1787 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1790 /* When the call above fails due to a non-constant offset
1791 assume the offset is zero and use the size of the whole
1792 enclosing object instead. */
1793 base
= get_base_address (arg
);
1796 /* If the base object is a pointer no upper bound on the length
1797 can be determined. Otherwise the maximum length is equal to
1798 the size of the enclosing object minus the offset of
1799 the referenced subobject minus 1 (for the terminating nul). */
1800 tree type
= TREE_TYPE (base
);
1801 if (TREE_CODE (type
) == POINTER_TYPE
1802 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1803 || !(val
= DECL_SIZE_UNIT (base
)))
1804 val
= build_all_ones_cst (size_type_node
);
1807 val
= DECL_SIZE_UNIT (base
);
1808 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1809 size_int (offset
+ 1));
1818 /* Adjust the more conservative bound if possible/necessary
1819 and fail otherwise. */
1820 if (rkind
!= SRK_STRLEN
)
1822 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1823 || TREE_CODE (val
) != INTEGER_CST
)
1826 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1827 pdata
->maxlen
= val
;
1830 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1832 /* Fail if the length of this ARG is different from that
1833 previously determined from another ARG. */
1838 pdata
->maxlen
= val
;
1839 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1842 /* For an ARG referencing one or more strings, try to obtain the range
1843 of their lengths, or the size of the largest array ARG referes to if
1844 the range of lengths cannot be determined, and store all in *PDATA.
1845 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1846 the maximum constant value.
1847 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1848 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1849 length or if we are unable to determine the length, return false.
1850 VISITED is a bitmap of visited variables.
1851 RKIND determines the kind of value or range to obtain (see
1853 Set PDATA->DECL if ARG refers to an unterminated constant array.
1854 On input, set ELTSIZE to 1 for normal single byte character strings,
1855 and either 2 or 4 for wide characer strings (the size of wchar_t).
1856 Return true if *PDATA was successfully populated and false otherwise. */
1859 get_range_strlen (tree arg
, bitmap visited
,
1860 strlen_range_kind rkind
,
1861 c_strlen_data
*pdata
, unsigned eltsize
)
1864 if (TREE_CODE (arg
) != SSA_NAME
)
1865 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1867 /* If ARG is registered for SSA update we cannot look at its defining
1869 if (name_registered_for_update_p (arg
))
1872 /* If we were already here, break the infinite cycle. */
1873 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1877 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1879 switch (gimple_code (def_stmt
))
1882 /* The RHS of the statement defining VAR must either have a
1883 constant length or come from another SSA_NAME with a constant
1885 if (gimple_assign_single_p (def_stmt
)
1886 || gimple_assign_unary_nop_p (def_stmt
))
1888 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1889 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1891 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1893 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1894 gimple_assign_rhs3 (def_stmt
) };
1896 for (unsigned int i
= 0; i
< 2; i
++)
1897 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1899 if (rkind
!= SRK_LENRANGE
)
1901 /* Set the upper bound to the maximum to prevent
1902 it from being adjusted in the next iteration but
1903 leave MINLEN and the more conservative MAXBOUND
1904 determined so far alone (or leave them null if
1905 they haven't been set yet). That the MINLEN is
1906 in fact zero can be determined from MAXLEN being
1907 unbounded but the discovered minimum is used for
1909 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1916 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1917 must have a constant length. */
1918 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1920 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1922 /* If this PHI has itself as an argument, we cannot
1923 determine the string length of this argument. However,
1924 if we can find a constant string length for the other
1925 PHI args then we can still be sure that this is a
1926 constant string length. So be optimistic and just
1927 continue with the next argument. */
1928 if (arg
== gimple_phi_result (def_stmt
))
1931 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1933 if (rkind
!= SRK_LENRANGE
)
1935 /* Set the upper bound to the maximum to prevent
1936 it from being adjusted in the next iteration but
1937 leave MINLEN and the more conservative MAXBOUND
1938 determined so far alone (or leave them null if
1939 they haven't been set yet). That the MINLEN is
1940 in fact zero can be determined from MAXLEN being
1941 unbounded but the discovered minimum is used for
1943 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1953 /* Try to obtain the range of the lengths of the string(s) referenced
1954 by ARG, or the size of the largest array ARG refers to if the range
1955 of lengths cannot be determined, and store all in *PDATA which must
1956 be zero-initialized on input except PDATA->MAXBOUND may be set to
1957 a non-null tree node other than INTEGER_CST to request to have it
1958 set to the length of the longest string in a PHI. ELTSIZE is
1959 the expected size of the string element in bytes: 1 for char and
1960 some power of 2 for wide characters.
1961 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1962 for optimization. Returning false means that a nonzero PDATA->MINLEN
1963 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1964 is -1 (in that case, the actual range is indeterminate, i.e.,
1965 [0, PTRDIFF_MAX - 2]. */
1968 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1970 auto_bitmap visited
;
1971 tree maxbound
= pdata
->maxbound
;
1973 if (!get_range_strlen (arg
, visited
, SRK_LENRANGE
, pdata
, eltsize
))
1975 /* On failure extend the length range to an impossible maximum
1976 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1977 members can stay unchanged regardless. */
1978 pdata
->minlen
= ssize_int (0);
1979 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1981 else if (!pdata
->minlen
)
1982 pdata
->minlen
= ssize_int (0);
1984 /* If it's unchanged from it initial non-null value, set the conservative
1985 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1986 if (maxbound
&& pdata
->maxbound
== maxbound
)
1987 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1989 return !integer_all_onesp (pdata
->maxlen
);
1992 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1993 For ARG of pointer types, NONSTR indicates if the caller is prepared
1994 to handle unterminated strings. For integer ARG and when RKIND ==
1995 SRK_INT_VALUE, NONSTR must be null.
1997 If an unterminated array is discovered and our caller handles
1998 unterminated arrays, then bubble up the offending DECL and
1999 return the maximum size. Otherwise return NULL. */
2002 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
2004 /* A non-null NONSTR is meaningless when determining the maximum
2005 value of an integer ARG. */
2006 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2007 /* ARG must have an integral type when RKIND says so. */
2008 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2010 auto_bitmap visited
;
2012 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2014 c_strlen_data lendata
= { };
2015 if (!get_range_strlen (arg
, visited
, rkind
, &lendata
, /* eltsize = */1))
2016 lendata
.maxlen
= NULL_TREE
;
2017 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2018 lendata
.maxlen
= NULL_TREE
;
2022 /* For callers prepared to handle unterminated arrays set
2023 *NONSTR to point to the declaration of the array and return
2024 the maximum length/size. */
2025 *nonstr
= lendata
.decl
;
2026 return lendata
.maxlen
;
2029 /* Fail if the constant array isn't nul-terminated. */
2030 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2033 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2034 true, strictly less than) the lower bound of SIZE at compile time and false
2038 known_lower (gimple
*stmt
, tree len
, tree size
, bool strict
= false)
2040 if (len
== NULL_TREE
)
2043 wide_int size_range
[2];
2044 wide_int len_range
[2];
2045 if (get_range (len
, stmt
, len_range
) && get_range (size
, stmt
, size_range
))
2048 return wi::ltu_p (len_range
[1], size_range
[0]);
2050 return wi::leu_p (len_range
[1], size_range
[0]);
2056 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2057 If LEN is not NULL, it represents the length of the string to be
2058 copied. Return NULL_TREE if no simplification can be made. */
2061 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2062 tree dest
, tree src
)
2064 gimple
*stmt
= gsi_stmt (*gsi
);
2065 location_t loc
= gimple_location (stmt
);
2068 /* If SRC and DEST are the same (and not volatile), return DEST. */
2069 if (operand_equal_p (src
, dest
, 0))
2071 /* Issue -Wrestrict unless the pointers are null (those do
2072 not point to objects and so do not indicate an overlap;
2073 such calls could be the result of sanitization and jump
2075 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2077 tree func
= gimple_call_fndecl (stmt
);
2079 warning_at (loc
, OPT_Wrestrict
,
2080 "%qD source argument is the same as destination",
2084 replace_call_with_value (gsi
, dest
);
2088 if (optimize_function_for_size_p (cfun
))
2091 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2095 /* Set to non-null if ARG refers to an unterminated array. */
2097 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2101 /* Avoid folding calls with unterminated arrays. */
2102 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2103 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2104 suppress_warning (stmt
, OPT_Wstringop_overread
);
2111 len
= fold_convert_loc (loc
, size_type_node
, len
);
2112 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2113 len
= force_gimple_operand_gsi (gsi
, len
, true,
2114 NULL_TREE
, true, GSI_SAME_STMT
);
2115 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2116 replace_call_with_call_and_fold (gsi
, repl
);
2120 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2121 If SLEN is not NULL, it represents the length of the source string.
2122 Return NULL_TREE if no simplification can be made. */
2125 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2126 tree dest
, tree src
, tree len
)
2128 gimple
*stmt
= gsi_stmt (*gsi
);
2129 location_t loc
= gimple_location (stmt
);
2130 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2132 /* If the LEN parameter is zero, return DEST. */
2133 if (integer_zerop (len
))
2135 /* Avoid warning if the destination refers to an array/pointer
2136 decorate with attribute nonstring. */
2139 tree fndecl
= gimple_call_fndecl (stmt
);
2141 /* Warn about the lack of nul termination: the result is not
2142 a (nul-terminated) string. */
2143 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2144 if (slen
&& !integer_zerop (slen
))
2145 warning_at (loc
, OPT_Wstringop_truncation
,
2146 "%qD destination unchanged after copying no bytes "
2147 "from a string of length %E",
2150 warning_at (loc
, OPT_Wstringop_truncation
,
2151 "%qD destination unchanged after copying no bytes",
2155 replace_call_with_value (gsi
, dest
);
2159 /* We can't compare slen with len as constants below if len is not a
2161 if (TREE_CODE (len
) != INTEGER_CST
)
2164 /* Now, we must be passed a constant src ptr parameter. */
2165 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2166 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2169 /* The size of the source string including the terminating nul. */
2170 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2172 /* We do not support simplification of this case, though we do
2173 support it when expanding trees into RTL. */
2174 /* FIXME: generate a call to __builtin_memset. */
2175 if (tree_int_cst_lt (ssize
, len
))
2178 /* Diagnose truncation that leaves the copy unterminated. */
2179 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2181 /* OK transform into builtin memcpy. */
2182 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2186 len
= fold_convert_loc (loc
, size_type_node
, len
);
2187 len
= force_gimple_operand_gsi (gsi
, len
, true,
2188 NULL_TREE
, true, GSI_SAME_STMT
);
2189 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2190 replace_call_with_call_and_fold (gsi
, repl
);
2195 /* Fold function call to builtin strchr or strrchr.
2196 If both arguments are constant, evaluate and fold the result,
2197 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2198 In general strlen is significantly faster than strchr
2199 due to being a simpler operation. */
2201 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2203 gimple
*stmt
= gsi_stmt (*gsi
);
2204 tree str
= gimple_call_arg (stmt
, 0);
2205 tree c
= gimple_call_arg (stmt
, 1);
2206 location_t loc
= gimple_location (stmt
);
2210 if (!gimple_call_lhs (stmt
))
2213 /* Avoid folding if the first argument is not a nul-terminated array.
2214 Defer warning until later. */
2215 if (!check_nul_terminated_array (NULL_TREE
, str
))
2218 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2220 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2224 replace_call_with_value (gsi
, integer_zero_node
);
2228 tree len
= build_int_cst (size_type_node
, p1
- p
);
2229 gimple_seq stmts
= NULL
;
2230 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2231 POINTER_PLUS_EXPR
, str
, len
);
2232 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2233 gsi_replace_with_seq_vops (gsi
, stmts
);
2237 if (!integer_zerop (c
))
2240 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2241 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2243 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2247 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2248 replace_call_with_call_and_fold (gsi
, repl
);
2256 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2261 /* Create newstr = strlen (str). */
2262 gimple_seq stmts
= NULL
;
2263 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2264 gimple_set_location (new_stmt
, loc
);
2265 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2266 gimple_call_set_lhs (new_stmt
, len
);
2267 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2269 /* Create (str p+ strlen (str)). */
2270 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2271 POINTER_PLUS_EXPR
, str
, len
);
2272 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2273 gsi_replace_with_seq_vops (gsi
, stmts
);
2274 /* gsi now points at the assignment to the lhs, get a
2275 stmt iterator to the strlen.
2276 ??? We can't use gsi_for_stmt as that doesn't work when the
2277 CFG isn't built yet. */
2278 gimple_stmt_iterator gsi2
= *gsi
;
2284 /* Fold function call to builtin strstr.
2285 If both arguments are constant, evaluate and fold the result,
2286 additionally fold strstr (x, "") into x and strstr (x, "c")
2287 into strchr (x, 'c'). */
2289 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2291 gimple
*stmt
= gsi_stmt (*gsi
);
2292 if (!gimple_call_lhs (stmt
))
2295 tree haystack
= gimple_call_arg (stmt
, 0);
2296 tree needle
= gimple_call_arg (stmt
, 1);
2298 /* Avoid folding if either argument is not a nul-terminated array.
2299 Defer warning until later. */
2300 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2301 || !check_nul_terminated_array (NULL_TREE
, needle
))
2304 const char *q
= c_getstr (needle
);
2308 if (const char *p
= c_getstr (haystack
))
2310 const char *r
= strstr (p
, q
);
2314 replace_call_with_value (gsi
, integer_zero_node
);
2318 tree len
= build_int_cst (size_type_node
, r
- p
);
2319 gimple_seq stmts
= NULL
;
2321 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2323 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2324 gsi_replace_with_seq_vops (gsi
, stmts
);
2328 /* For strstr (x, "") return x. */
2331 replace_call_with_value (gsi
, haystack
);
2335 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2338 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2341 tree c
= build_int_cst (integer_type_node
, q
[0]);
2342 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2343 replace_call_with_call_and_fold (gsi
, repl
);
2351 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2354 Return NULL_TREE if no simplification was possible, otherwise return the
2355 simplified form of the call as a tree.
2357 The simplified form may be a constant or other expression which
2358 computes the same value, but in a more efficient manner (including
2359 calls to other builtin functions).
2361 The call may contain arguments which need to be evaluated, but
2362 which are not useful to determine the result of the call. In
2363 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2364 COMPOUND_EXPR will be an argument which must be evaluated.
2365 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2366 COMPOUND_EXPR in the chain will contain the tree for the simplified
2367 form of the builtin function call. */
2370 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2372 gimple
*stmt
= gsi_stmt (*gsi
);
2373 location_t loc
= gimple_location (stmt
);
2375 const char *p
= c_getstr (src
);
2377 /* If the string length is zero, return the dst parameter. */
2378 if (p
&& *p
== '\0')
2380 replace_call_with_value (gsi
, dst
);
2384 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2387 /* See if we can store by pieces into (dst + strlen(dst)). */
2389 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2390 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2392 if (!strlen_fn
|| !memcpy_fn
)
2395 /* If the length of the source string isn't computable don't
2396 split strcat into strlen and memcpy. */
2397 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2401 /* Create strlen (dst). */
2402 gimple_seq stmts
= NULL
, stmts2
;
2403 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2404 gimple_set_location (repl
, loc
);
2405 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2406 gimple_call_set_lhs (repl
, newdst
);
2407 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2409 /* Create (dst p+ strlen (dst)). */
2410 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2411 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2412 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2414 len
= fold_convert_loc (loc
, size_type_node
, len
);
2415 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2416 build_int_cst (size_type_node
, 1));
2417 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2418 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2420 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2421 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2422 if (gimple_call_lhs (stmt
))
2424 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2425 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2426 gsi_replace_with_seq_vops (gsi
, stmts
);
2427 /* gsi now points at the assignment to the lhs, get a
2428 stmt iterator to the memcpy call.
2429 ??? We can't use gsi_for_stmt as that doesn't work when the
2430 CFG isn't built yet. */
2431 gimple_stmt_iterator gsi2
= *gsi
;
2437 gsi_replace_with_seq_vops (gsi
, stmts
);
2443 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2444 are the arguments to the call. */
2447 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2449 gimple
*stmt
= gsi_stmt (*gsi
);
2450 tree dest
= gimple_call_arg (stmt
, 0);
2451 tree src
= gimple_call_arg (stmt
, 1);
2452 tree size
= gimple_call_arg (stmt
, 2);
2458 /* If the SRC parameter is "", return DEST. */
2459 if (p
&& *p
== '\0')
2461 replace_call_with_value (gsi
, dest
);
2465 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2468 /* If __builtin_strcat_chk is used, assume strcat is available. */
2469 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2473 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2474 replace_call_with_call_and_fold (gsi
, repl
);
2478 /* Simplify a call to the strncat builtin. */
2481 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2483 gimple
*stmt
= gsi_stmt (*gsi
);
2484 tree dst
= gimple_call_arg (stmt
, 0);
2485 tree src
= gimple_call_arg (stmt
, 1);
2486 tree len
= gimple_call_arg (stmt
, 2);
2487 tree src_len
= c_strlen (src
, 1);
2489 /* If the requested length is zero, or the src parameter string
2490 length is zero, return the dst parameter. */
2491 if (integer_zerop (len
) || (src_len
&& integer_zerop (src_len
)))
2493 replace_call_with_value (gsi
, dst
);
2497 /* Return early if the requested len is less than the string length.
2498 Warnings will be issued elsewhere later. */
2499 if (!src_len
|| known_lower (stmt
, len
, src_len
, true))
2502 /* Warn on constant LEN. */
2503 if (TREE_CODE (len
) == INTEGER_CST
)
2505 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2508 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
)
2509 && TREE_CODE (dstsize
) == INTEGER_CST
)
2511 int cmpdst
= tree_int_cst_compare (len
, dstsize
);
2515 tree fndecl
= gimple_call_fndecl (stmt
);
2517 /* Strncat copies (at most) LEN bytes and always appends
2518 the terminating NUL so the specified bound should never
2519 be equal to (or greater than) the size of the destination.
2520 If it is, the copy could overflow. */
2521 location_t loc
= gimple_location (stmt
);
2522 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2524 ? G_("%qD specified bound %E equals "
2526 : G_("%qD specified bound %E exceeds "
2527 "destination size %E"),
2528 fndecl
, len
, dstsize
);
2530 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2534 if (!nowarn
&& TREE_CODE (src_len
) == INTEGER_CST
2535 && tree_int_cst_compare (src_len
, len
) == 0)
2537 tree fndecl
= gimple_call_fndecl (stmt
);
2538 location_t loc
= gimple_location (stmt
);
2540 /* To avoid possible overflow the specified bound should also
2541 not be equal to the length of the source, even when the size
2542 of the destination is unknown (it's not an uncommon mistake
2543 to specify as the bound to strncpy the length of the source). */
2544 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2545 "%qD specified bound %E equals source length",
2547 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2551 if (!known_lower (stmt
, src_len
, len
))
2554 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2556 /* If the replacement _DECL isn't initialized, don't do the
2561 /* Otherwise, emit a call to strcat. */
2562 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2563 replace_call_with_call_and_fold (gsi
, repl
);
2567 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2571 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2573 gimple
*stmt
= gsi_stmt (*gsi
);
2574 tree dest
= gimple_call_arg (stmt
, 0);
2575 tree src
= gimple_call_arg (stmt
, 1);
2576 tree len
= gimple_call_arg (stmt
, 2);
2577 tree size
= gimple_call_arg (stmt
, 3);
2582 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2583 if ((p
&& *p
== '\0')
2584 || integer_zerop (len
))
2586 replace_call_with_value (gsi
, dest
);
2590 if (! integer_all_onesp (size
))
2592 tree src_len
= c_strlen (src
, 1);
2593 if (known_lower (stmt
, src_len
, len
))
2595 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2596 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2600 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2601 replace_call_with_call_and_fold (gsi
, repl
);
2607 /* If __builtin_strncat_chk is used, assume strncat is available. */
2608 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2612 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2613 replace_call_with_call_and_fold (gsi
, repl
);
2617 /* Build and append gimple statements to STMTS that would load a first
2618 character of a memory location identified by STR. LOC is location
2619 of the statement. */
2622 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2626 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2627 tree cst_uchar_ptr_node
2628 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2629 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2631 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2632 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2633 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2635 gimple_assign_set_lhs (stmt
, var
);
2636 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2641 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2644 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2646 gimple
*stmt
= gsi_stmt (*gsi
);
2647 tree callee
= gimple_call_fndecl (stmt
);
2648 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2650 tree type
= integer_type_node
;
2651 tree str1
= gimple_call_arg (stmt
, 0);
2652 tree str2
= gimple_call_arg (stmt
, 1);
2653 tree lhs
= gimple_call_lhs (stmt
);
2655 tree bound_node
= NULL_TREE
;
2656 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2658 /* Handle strncmp and strncasecmp functions. */
2659 if (gimple_call_num_args (stmt
) == 3)
2661 bound_node
= gimple_call_arg (stmt
, 2);
2662 if (tree_fits_uhwi_p (bound_node
))
2663 bound
= tree_to_uhwi (bound_node
);
2666 /* If the BOUND parameter is zero, return zero. */
2669 replace_call_with_value (gsi
, integer_zero_node
);
2673 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2674 if (operand_equal_p (str1
, str2
, 0))
2676 replace_call_with_value (gsi
, integer_zero_node
);
2680 /* Initially set to the number of characters, including the terminating
2681 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2682 the array Sx is not terminated by a nul.
2683 For nul-terminated strings then adjusted to their length so that
2684 LENx == NULPOSx holds. */
2685 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2686 const char *p1
= getbyterep (str1
, &len1
);
2687 const char *p2
= getbyterep (str2
, &len2
);
2689 /* The position of the terminating nul character if one exists, otherwise
2690 a value greater than LENx. */
2691 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2695 size_t n
= strnlen (p1
, len1
);
2702 size_t n
= strnlen (p2
, len2
);
2707 /* For known strings, return an immediate value. */
2711 bool known_result
= false;
2715 case BUILT_IN_STRCMP
:
2716 case BUILT_IN_STRCMP_EQ
:
2717 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2720 r
= strcmp (p1
, p2
);
2721 known_result
= true;
2724 case BUILT_IN_STRNCMP
:
2725 case BUILT_IN_STRNCMP_EQ
:
2727 if (bound
== HOST_WIDE_INT_M1U
)
2730 /* Reduce the bound to be no more than the length
2731 of the shorter of the two strings, or the sizes
2732 of the unterminated arrays. */
2733 unsigned HOST_WIDE_INT n
= bound
;
2735 if (len1
== nulpos1
&& len1
< n
)
2737 if (len2
== nulpos2
&& len2
< n
)
2740 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2743 r
= strncmp (p1
, p2
, n
);
2744 known_result
= true;
2747 /* Only handleable situation is where the string are equal (result 0),
2748 which is already handled by operand_equal_p case. */
2749 case BUILT_IN_STRCASECMP
:
2751 case BUILT_IN_STRNCASECMP
:
2753 if (bound
== HOST_WIDE_INT_M1U
)
2755 r
= strncmp (p1
, p2
, bound
);
2757 known_result
= true;
2766 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2771 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2772 || fcode
== BUILT_IN_STRCMP
2773 || fcode
== BUILT_IN_STRCMP_EQ
2774 || fcode
== BUILT_IN_STRCASECMP
;
2776 location_t loc
= gimple_location (stmt
);
2778 /* If the second arg is "", return *(const unsigned char*)arg1. */
2779 if (p2
&& *p2
== '\0' && nonzero_bound
)
2781 gimple_seq stmts
= NULL
;
2782 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2785 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2786 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2789 gsi_replace_with_seq_vops (gsi
, stmts
);
2793 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2794 if (p1
&& *p1
== '\0' && nonzero_bound
)
2796 gimple_seq stmts
= NULL
;
2797 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2801 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2802 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2803 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2805 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2806 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2809 gsi_replace_with_seq_vops (gsi
, stmts
);
2813 /* If BOUND is one, return an expression corresponding to
2814 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2815 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2817 gimple_seq stmts
= NULL
;
2818 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2819 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2823 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2824 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2825 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2827 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2828 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2829 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2831 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2832 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2835 gsi_replace_with_seq_vops (gsi
, stmts
);
2839 /* If BOUND is greater than the length of one constant string,
2840 and the other argument is also a nul-terminated string, replace
2841 strncmp with strcmp. */
2842 if (fcode
== BUILT_IN_STRNCMP
2843 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2844 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2845 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2847 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2850 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2851 replace_call_with_call_and_fold (gsi
, repl
);
2858 /* Fold a call to the memchr pointed by GSI iterator. */
2861 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2863 gimple
*stmt
= gsi_stmt (*gsi
);
2864 tree lhs
= gimple_call_lhs (stmt
);
2865 tree arg1
= gimple_call_arg (stmt
, 0);
2866 tree arg2
= gimple_call_arg (stmt
, 1);
2867 tree len
= gimple_call_arg (stmt
, 2);
2869 /* If the LEN parameter is zero, return zero. */
2870 if (integer_zerop (len
))
2872 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2877 if (TREE_CODE (arg2
) != INTEGER_CST
2878 || !tree_fits_uhwi_p (len
)
2879 || !target_char_cst_p (arg2
, &c
))
2882 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2883 unsigned HOST_WIDE_INT string_length
;
2884 const char *p1
= getbyterep (arg1
, &string_length
);
2888 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2891 tree mem_size
, offset_node
;
2892 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2893 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2894 ? 0 : tree_to_uhwi (offset_node
);
2895 /* MEM_SIZE is the size of the array the string literal
2897 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2898 gcc_checking_assert (string_length
<= string_size
);
2899 if (length
<= string_size
)
2901 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2907 unsigned HOST_WIDE_INT offset
= r
- p1
;
2908 gimple_seq stmts
= NULL
;
2909 if (lhs
!= NULL_TREE
)
2911 tree offset_cst
= build_int_cst (sizetype
, offset
);
2912 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2914 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2917 gimple_seq_add_stmt_without_update (&stmts
,
2918 gimple_build_nop ());
2920 gsi_replace_with_seq_vops (gsi
, stmts
);
2928 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2929 to the call. IGNORE is true if the value returned
2930 by the builtin will be ignored. UNLOCKED is true is true if this
2931 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2932 the known length of the string. Return NULL_TREE if no simplification
2936 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2937 tree arg0
, tree arg1
,
2940 gimple
*stmt
= gsi_stmt (*gsi
);
2942 /* If we're using an unlocked function, assume the other unlocked
2943 functions exist explicitly. */
2944 tree
const fn_fputc
= (unlocked
2945 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2946 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2947 tree
const fn_fwrite
= (unlocked
2948 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2949 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2951 /* If the return value is used, don't do the transformation. */
2952 if (gimple_call_lhs (stmt
))
2955 /* Get the length of the string passed to fputs. If the length
2956 can't be determined, punt. */
2957 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2959 || TREE_CODE (len
) != INTEGER_CST
)
2962 switch (compare_tree_int (len
, 1))
2964 case -1: /* length is 0, delete the call entirely . */
2965 replace_call_with_value (gsi
, integer_zero_node
);
2968 case 0: /* length is 1, call fputc. */
2970 const char *p
= c_getstr (arg0
);
2976 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2978 (integer_type_node
, p
[0]), arg1
);
2979 replace_call_with_call_and_fold (gsi
, repl
);
2984 case 1: /* length is greater than 1, call fwrite. */
2986 /* If optimizing for size keep fputs. */
2987 if (optimize_function_for_size_p (cfun
))
2989 /* New argument list transforming fputs(string, stream) to
2990 fwrite(string, 1, len, stream). */
2994 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2995 size_one_node
, len
, arg1
);
2996 replace_call_with_call_and_fold (gsi
, repl
);
3004 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3005 DEST, SRC, LEN, and SIZE are the arguments to the call.
3006 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3007 code of the builtin. If MAXLEN is not NULL, it is maximum length
3008 passed as third argument. */
3011 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
3012 tree dest
, tree src
, tree len
, tree size
,
3013 enum built_in_function fcode
)
3015 gimple
*stmt
= gsi_stmt (*gsi
);
3016 location_t loc
= gimple_location (stmt
);
3017 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3020 /* If SRC and DEST are the same (and not volatile), return DEST
3021 (resp. DEST+LEN for __mempcpy_chk). */
3022 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3024 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3026 replace_call_with_value (gsi
, dest
);
3031 gimple_seq stmts
= NULL
;
3032 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3033 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3034 TREE_TYPE (dest
), dest
, len
);
3035 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3036 replace_call_with_value (gsi
, temp
);
3041 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3042 if (! integer_all_onesp (size
)
3043 && !known_lower (stmt
, len
, size
)
3044 && !known_lower (stmt
, maxlen
, size
))
3046 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3047 least try to optimize (void) __mempcpy_chk () into
3048 (void) __memcpy_chk () */
3049 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3051 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3055 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3056 replace_call_with_call_and_fold (gsi
, repl
);
3063 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3064 mem{cpy,pcpy,move,set} is available. */
3067 case BUILT_IN_MEMCPY_CHK
:
3068 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3070 case BUILT_IN_MEMPCPY_CHK
:
3071 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3073 case BUILT_IN_MEMMOVE_CHK
:
3074 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3076 case BUILT_IN_MEMSET_CHK
:
3077 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3086 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3087 replace_call_with_call_and_fold (gsi
, repl
);
3091 /* Print a message in the dump file recording transformation of FROM to TO. */
3094 dump_transformation (gcall
*from
, gcall
*to
)
3096 if (dump_enabled_p ())
3097 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, from
, "simplified %T to %T\n",
3098 gimple_call_fn (from
), gimple_call_fn (to
));
3101 /* Fold a call to the __st[rp]cpy_chk builtin.
3102 DEST, SRC, and SIZE are the arguments to the call.
3103 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3104 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3105 strings passed as second argument. */
3108 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3110 tree src
, tree size
,
3111 enum built_in_function fcode
)
3113 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3114 location_t loc
= gimple_location (stmt
);
3115 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3118 /* If SRC and DEST are the same (and not volatile), return DEST. */
3119 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3121 /* Issue -Wrestrict unless the pointers are null (those do
3122 not point to objects and so do not indicate an overlap;
3123 such calls could be the result of sanitization and jump
3125 if (!integer_zerop (dest
)
3126 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3128 tree func
= gimple_call_fndecl (stmt
);
3130 warning_at (loc
, OPT_Wrestrict
,
3131 "%qD source argument is the same as destination",
3135 replace_call_with_value (gsi
, dest
);
3139 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3140 if (! integer_all_onesp (size
))
3142 len
= c_strlen (src
, 1);
3143 if (!known_lower (stmt
, len
, size
, true)
3144 && !known_lower (stmt
, maxlen
, size
, true))
3146 if (fcode
== BUILT_IN_STPCPY_CHK
)
3151 /* If return value of __stpcpy_chk is ignored,
3152 optimize into __strcpy_chk. */
3153 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3157 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3158 replace_call_with_call_and_fold (gsi
, repl
);
3162 if (! len
|| TREE_SIDE_EFFECTS (len
))
3165 /* If c_strlen returned something, but not provably less than size,
3166 transform __strcpy_chk into __memcpy_chk. */
3167 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3171 gimple_seq stmts
= NULL
;
3172 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3173 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3174 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3175 build_int_cst (size_type_node
, 1));
3176 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3177 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3178 replace_call_with_call_and_fold (gsi
, repl
);
3183 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3184 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
&& !ignore
3185 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3189 gcall
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3190 dump_transformation (stmt
, repl
);
3191 replace_call_with_call_and_fold (gsi
, repl
);
3195 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3196 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3197 length passed as third argument. IGNORE is true if return value can be
3198 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3201 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3202 tree dest
, tree src
,
3203 tree len
, tree size
,
3204 enum built_in_function fcode
)
3206 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3207 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3210 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3211 if (! integer_all_onesp (size
)
3212 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3214 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3216 /* If return value of __stpncpy_chk is ignored,
3217 optimize into __strncpy_chk. */
3218 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3221 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3222 replace_call_with_call_and_fold (gsi
, repl
);
3229 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3230 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
&& !ignore
3231 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3235 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3236 dump_transformation (stmt
, repl
);
3237 replace_call_with_call_and_fold (gsi
, repl
);
3241 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3242 Return NULL_TREE if no simplification can be made. */
3245 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3247 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3248 location_t loc
= gimple_location (stmt
);
3249 tree dest
= gimple_call_arg (stmt
, 0);
3250 tree src
= gimple_call_arg (stmt
, 1);
3253 /* If the result is unused, replace stpcpy with strcpy. */
3254 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3256 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3259 gimple_call_set_fndecl (stmt
, fn
);
3264 /* Set to non-null if ARG refers to an unterminated array. */
3265 c_strlen_data data
= { };
3266 /* The size of the unterminated array if SRC referes to one. */
3268 /* True if the size is exact/constant, false if it's the lower bound
3271 tree len
= c_strlen (src
, 1, &data
, 1);
3273 || TREE_CODE (len
) != INTEGER_CST
)
3275 data
.decl
= unterminated_array (src
, &size
, &exact
);
3282 /* Avoid folding calls with unterminated arrays. */
3283 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3284 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3286 suppress_warning (stmt
, OPT_Wstringop_overread
);
3290 if (optimize_function_for_size_p (cfun
)
3291 /* If length is zero it's small enough. */
3292 && !integer_zerop (len
))
3295 /* If the source has a known length replace stpcpy with memcpy. */
3296 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3300 gimple_seq stmts
= NULL
;
3301 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3302 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3303 tem
, build_int_cst (size_type_node
, 1));
3304 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3305 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3306 gimple_move_vops (repl
, stmt
);
3307 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3308 /* Replace the result with dest + len. */
3310 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3311 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3312 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3313 POINTER_PLUS_EXPR
, dest
, tem
);
3314 gsi_replace (gsi
, ret
, false);
3315 /* Finally fold the memcpy call. */
3316 gimple_stmt_iterator gsi2
= *gsi
;
3322 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3323 NULL_TREE if a normal call should be emitted rather than expanding
3324 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3325 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3326 passed as second argument. */
3329 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3330 enum built_in_function fcode
)
3332 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3333 tree dest
, size
, len
, fn
, fmt
, flag
;
3334 const char *fmt_str
;
3336 /* Verify the required arguments in the original call. */
3337 if (gimple_call_num_args (stmt
) < 5)
3340 dest
= gimple_call_arg (stmt
, 0);
3341 len
= gimple_call_arg (stmt
, 1);
3342 flag
= gimple_call_arg (stmt
, 2);
3343 size
= gimple_call_arg (stmt
, 3);
3344 fmt
= gimple_call_arg (stmt
, 4);
3346 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3347 if (! integer_all_onesp (size
)
3348 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3351 if (!init_target_chars ())
3354 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3355 or if format doesn't contain % chars or is "%s". */
3356 if (! integer_zerop (flag
))
3358 fmt_str
= c_getstr (fmt
);
3359 if (fmt_str
== NULL
)
3361 if (strchr (fmt_str
, target_percent
) != NULL
3362 && strcmp (fmt_str
, target_percent_s
))
3366 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3368 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3369 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3373 /* Replace the called function and the first 5 argument by 3 retaining
3374 trailing varargs. */
3375 gimple_call_set_fndecl (stmt
, fn
);
3376 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3377 gimple_call_set_arg (stmt
, 0, dest
);
3378 gimple_call_set_arg (stmt
, 1, len
);
3379 gimple_call_set_arg (stmt
, 2, fmt
);
3380 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3381 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3382 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3387 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3388 Return NULL_TREE if a normal call should be emitted rather than
3389 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3390 or BUILT_IN_VSPRINTF_CHK. */
3393 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3394 enum built_in_function fcode
)
3396 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3397 tree dest
, size
, len
, fn
, fmt
, flag
;
3398 const char *fmt_str
;
3399 unsigned nargs
= gimple_call_num_args (stmt
);
3401 /* Verify the required arguments in the original call. */
3404 dest
= gimple_call_arg (stmt
, 0);
3405 flag
= gimple_call_arg (stmt
, 1);
3406 size
= gimple_call_arg (stmt
, 2);
3407 fmt
= gimple_call_arg (stmt
, 3);
3411 if (!init_target_chars ())
3414 /* Check whether the format is a literal string constant. */
3415 fmt_str
= c_getstr (fmt
);
3416 if (fmt_str
!= NULL
)
3418 /* If the format doesn't contain % args or %%, we know the size. */
3419 if (strchr (fmt_str
, target_percent
) == 0)
3421 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3422 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3424 /* If the format is "%s" and first ... argument is a string literal,
3425 we know the size too. */
3426 else if (fcode
== BUILT_IN_SPRINTF_CHK
3427 && strcmp (fmt_str
, target_percent_s
) == 0)
3433 arg
= gimple_call_arg (stmt
, 4);
3434 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3435 len
= c_strlen (arg
, 1);
3440 if (! integer_all_onesp (size
) && !known_lower (stmt
, len
, size
, true))
3443 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3444 or if format doesn't contain % chars or is "%s". */
3445 if (! integer_zerop (flag
))
3447 if (fmt_str
== NULL
)
3449 if (strchr (fmt_str
, target_percent
) != NULL
3450 && strcmp (fmt_str
, target_percent_s
))
3454 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3455 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3456 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3460 /* Replace the called function and the first 4 argument by 2 retaining
3461 trailing varargs. */
3462 gimple_call_set_fndecl (stmt
, fn
);
3463 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3464 gimple_call_set_arg (stmt
, 0, dest
);
3465 gimple_call_set_arg (stmt
, 1, fmt
);
3466 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3467 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3468 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3473 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3474 ORIG may be null if this is a 2-argument call. We don't attempt to
3475 simplify calls with more than 3 arguments.
3477 Return true if simplification was possible, otherwise false. */
3480 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3482 gimple
*stmt
= gsi_stmt (*gsi
);
3484 /* Verify the required arguments in the original call. We deal with two
3485 types of sprintf() calls: 'sprintf (str, fmt)' and
3486 'sprintf (dest, "%s", orig)'. */
3487 if (gimple_call_num_args (stmt
) > 3)
3490 tree orig
= NULL_TREE
;
3491 if (gimple_call_num_args (stmt
) == 3)
3492 orig
= gimple_call_arg (stmt
, 2);
3494 /* Check whether the format is a literal string constant. */
3495 tree fmt
= gimple_call_arg (stmt
, 1);
3496 const char *fmt_str
= c_getstr (fmt
);
3497 if (fmt_str
== NULL
)
3500 tree dest
= gimple_call_arg (stmt
, 0);
3502 if (!init_target_chars ())
3505 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3509 /* If the format doesn't contain % args or %%, use strcpy. */
3510 if (strchr (fmt_str
, target_percent
) == NULL
)
3512 /* Don't optimize sprintf (buf, "abc", ptr++). */
3516 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3517 'format' is known to contain no % formats. */
3518 gimple_seq stmts
= NULL
;
3519 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3521 /* Propagate the NO_WARNING bit to avoid issuing the same
3522 warning more than once. */
3523 copy_warning (repl
, stmt
);
3525 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3526 if (tree lhs
= gimple_call_lhs (stmt
))
3528 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3530 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3531 gsi_replace_with_seq_vops (gsi
, stmts
);
3532 /* gsi now points at the assignment to the lhs, get a
3533 stmt iterator to the memcpy call.
3534 ??? We can't use gsi_for_stmt as that doesn't work when the
3535 CFG isn't built yet. */
3536 gimple_stmt_iterator gsi2
= *gsi
;
3542 gsi_replace_with_seq_vops (gsi
, stmts
);
3548 /* If the format is "%s", use strcpy if the result isn't used. */
3549 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3551 /* Don't crash on sprintf (str1, "%s"). */
3555 /* Don't fold calls with source arguments of invalid (nonpointer)
3557 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3560 tree orig_len
= NULL_TREE
;
3561 if (gimple_call_lhs (stmt
))
3563 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3568 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3569 gimple_seq stmts
= NULL
;
3570 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3572 /* Propagate the NO_WARNING bit to avoid issuing the same
3573 warning more than once. */
3574 copy_warning (repl
, stmt
);
3576 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3577 if (tree lhs
= gimple_call_lhs (stmt
))
3579 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3580 TREE_TYPE (orig_len
)))
3581 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3582 repl
= gimple_build_assign (lhs
, orig_len
);
3583 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3584 gsi_replace_with_seq_vops (gsi
, stmts
);
3585 /* gsi now points at the assignment to the lhs, get a
3586 stmt iterator to the memcpy call.
3587 ??? We can't use gsi_for_stmt as that doesn't work when the
3588 CFG isn't built yet. */
3589 gimple_stmt_iterator gsi2
= *gsi
;
3595 gsi_replace_with_seq_vops (gsi
, stmts
);
3603 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3604 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3605 attempt to simplify calls with more than 4 arguments.
3607 Return true if simplification was possible, otherwise false. */
3610 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3612 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3613 tree dest
= gimple_call_arg (stmt
, 0);
3614 tree destsize
= gimple_call_arg (stmt
, 1);
3615 tree fmt
= gimple_call_arg (stmt
, 2);
3616 tree orig
= NULL_TREE
;
3617 const char *fmt_str
= NULL
;
3619 if (gimple_call_num_args (stmt
) > 4)
3622 if (gimple_call_num_args (stmt
) == 4)
3623 orig
= gimple_call_arg (stmt
, 3);
3625 /* Check whether the format is a literal string constant. */
3626 fmt_str
= c_getstr (fmt
);
3627 if (fmt_str
== NULL
)
3630 if (!init_target_chars ())
3633 /* If the format doesn't contain % args or %%, use strcpy. */
3634 if (strchr (fmt_str
, target_percent
) == NULL
)
3636 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3640 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3644 tree len
= build_int_cstu (TREE_TYPE (destsize
), strlen (fmt_str
));
3646 /* We could expand this as
3647 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3649 memcpy (str, fmt_with_nul_at_cstm1, cst);
3650 but in the former case that might increase code size
3651 and in the latter case grow .rodata section too much.
3653 if (!known_lower (stmt
, len
, destsize
, true))
3656 gimple_seq stmts
= NULL
;
3657 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3658 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3659 if (tree lhs
= gimple_call_lhs (stmt
))
3661 repl
= gimple_build_assign (lhs
,
3662 fold_convert (TREE_TYPE (lhs
), len
));
3663 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3664 gsi_replace_with_seq_vops (gsi
, stmts
);
3665 /* gsi now points at the assignment to the lhs, get a
3666 stmt iterator to the memcpy call.
3667 ??? We can't use gsi_for_stmt as that doesn't work when the
3668 CFG isn't built yet. */
3669 gimple_stmt_iterator gsi2
= *gsi
;
3675 gsi_replace_with_seq_vops (gsi
, stmts
);
3681 /* If the format is "%s", use strcpy if the result isn't used. */
3682 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3684 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3688 /* Don't crash on snprintf (str1, cst, "%s"). */
3692 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3694 /* We could expand this as
3695 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3697 memcpy (str1, str2_with_nul_at_cstm1, cst);
3698 but in the former case that might increase code size
3699 and in the latter case grow .rodata section too much.
3701 if (!known_lower (stmt
, orig_len
, destsize
, true))
3704 /* Convert snprintf (str1, cst, "%s", str2) into
3705 strcpy (str1, str2) if strlen (str2) < cst. */
3706 gimple_seq stmts
= NULL
;
3707 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3708 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3709 if (tree lhs
= gimple_call_lhs (stmt
))
3711 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3712 TREE_TYPE (orig_len
)))
3713 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3714 repl
= gimple_build_assign (lhs
, orig_len
);
3715 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3716 gsi_replace_with_seq_vops (gsi
, stmts
);
3717 /* gsi now points at the assignment to the lhs, get a
3718 stmt iterator to the memcpy call.
3719 ??? We can't use gsi_for_stmt as that doesn't work when the
3720 CFG isn't built yet. */
3721 gimple_stmt_iterator gsi2
= *gsi
;
3727 gsi_replace_with_seq_vops (gsi
, stmts
);
3735 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3736 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3737 more than 3 arguments, and ARG may be null in the 2-argument case.
3739 Return NULL_TREE if no simplification was possible, otherwise return the
3740 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3741 code of the function to be simplified. */
3744 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3745 tree fp
, tree fmt
, tree arg
,
3746 enum built_in_function fcode
)
3748 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3749 tree fn_fputc
, fn_fputs
;
3750 const char *fmt_str
= NULL
;
3752 /* If the return value is used, don't do the transformation. */
3753 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3756 /* Check whether the format is a literal string constant. */
3757 fmt_str
= c_getstr (fmt
);
3758 if (fmt_str
== NULL
)
3761 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3763 /* If we're using an unlocked function, assume the other
3764 unlocked functions exist explicitly. */
3765 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3766 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3770 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3771 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3774 if (!init_target_chars ())
3777 /* If the format doesn't contain % args or %%, use strcpy. */
3778 if (strchr (fmt_str
, target_percent
) == NULL
)
3780 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3784 /* If the format specifier was "", fprintf does nothing. */
3785 if (fmt_str
[0] == '\0')
3787 replace_call_with_value (gsi
, NULL_TREE
);
3791 /* When "string" doesn't contain %, replace all cases of
3792 fprintf (fp, string) with fputs (string, fp). The fputs
3793 builtin will take care of special cases like length == 1. */
3796 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3797 replace_call_with_call_and_fold (gsi
, repl
);
3802 /* The other optimizations can be done only on the non-va_list variants. */
3803 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3806 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3807 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3809 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3813 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3814 replace_call_with_call_and_fold (gsi
, repl
);
3819 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3820 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3823 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3827 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3828 replace_call_with_call_and_fold (gsi
, repl
);
3836 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3837 FMT and ARG are the arguments to the call; we don't fold cases with
3838 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3840 Return NULL_TREE if no simplification was possible, otherwise return the
3841 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3842 code of the function to be simplified. */
3845 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3846 tree arg
, enum built_in_function fcode
)
3848 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3849 tree fn_putchar
, fn_puts
, newarg
;
3850 const char *fmt_str
= NULL
;
3852 /* If the return value is used, don't do the transformation. */
3853 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3856 /* Check whether the format is a literal string constant. */
3857 fmt_str
= c_getstr (fmt
);
3858 if (fmt_str
== NULL
)
3861 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3863 /* If we're using an unlocked function, assume the other
3864 unlocked functions exist explicitly. */
3865 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3866 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3870 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3871 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3874 if (!init_target_chars ())
3877 if (strcmp (fmt_str
, target_percent_s
) == 0
3878 || strchr (fmt_str
, target_percent
) == NULL
)
3882 if (strcmp (fmt_str
, target_percent_s
) == 0)
3884 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3887 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3890 str
= c_getstr (arg
);
3896 /* The format specifier doesn't contain any '%' characters. */
3897 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3903 /* If the string was "", printf does nothing. */
3906 replace_call_with_value (gsi
, NULL_TREE
);
3910 /* If the string has length of 1, call putchar. */
3913 /* Given printf("c"), (where c is any one character,)
3914 convert "c"[0] to an int and pass that to the replacement
3916 newarg
= build_int_cst (integer_type_node
, str
[0]);
3919 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3920 replace_call_with_call_and_fold (gsi
, repl
);
3926 /* If the string was "string\n", call puts("string"). */
3927 size_t len
= strlen (str
);
3928 if ((unsigned char)str
[len
- 1] == target_newline
3929 && (size_t) (int) len
== len
3934 /* Create a NUL-terminated string that's one char shorter
3935 than the original, stripping off the trailing '\n'. */
3936 newstr
= xstrdup (str
);
3937 newstr
[len
- 1] = '\0';
3938 newarg
= build_string_literal (len
, newstr
);
3942 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3943 replace_call_with_call_and_fold (gsi
, repl
);
3948 /* We'd like to arrange to call fputs(string,stdout) here,
3949 but we need stdout and don't have a way to get it yet. */
3954 /* The other optimizations can be done only on the non-va_list variants. */
3955 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3958 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3959 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3961 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3965 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3966 replace_call_with_call_and_fold (gsi
, repl
);
3971 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3972 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3974 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3979 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3980 replace_call_with_call_and_fold (gsi
, repl
);
3990 /* Fold a call to __builtin_strlen with known length LEN. */
3993 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3995 gimple
*stmt
= gsi_stmt (*gsi
);
3996 tree arg
= gimple_call_arg (stmt
, 0);
4001 c_strlen_data lendata
= { };
4002 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4004 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4005 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4007 /* The range of lengths refers to either a single constant
4008 string or to the longest and shortest constant string
4009 referenced by the argument of the strlen() call, or to
4010 the strings that can possibly be stored in the arrays
4011 the argument refers to. */
4012 minlen
= wi::to_wide (lendata
.minlen
);
4013 maxlen
= wi::to_wide (lendata
.maxlen
);
4017 unsigned prec
= TYPE_PRECISION (sizetype
);
4019 minlen
= wi::shwi (0, prec
);
4020 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4023 if (minlen
== maxlen
)
4025 /* Fold the strlen call to a constant. */
4026 tree type
= TREE_TYPE (lendata
.minlen
);
4027 tree len
= force_gimple_operand_gsi (gsi
,
4028 wide_int_to_tree (type
, minlen
),
4029 true, NULL
, true, GSI_SAME_STMT
);
4030 replace_call_with_value (gsi
, len
);
4034 /* Set the strlen() range to [0, MAXLEN]. */
4035 if (tree lhs
= gimple_call_lhs (stmt
))
4036 set_strlen_range (lhs
, minlen
, maxlen
);
4041 /* Fold a call to __builtin_acc_on_device. */
4044 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4046 /* Defer folding until we know which compiler we're in. */
4047 if (symtab
->state
!= EXPANSION
)
4050 unsigned val_host
= GOMP_DEVICE_HOST
;
4051 unsigned val_dev
= GOMP_DEVICE_NONE
;
4053 #ifdef ACCEL_COMPILER
4054 val_host
= GOMP_DEVICE_NOT_HOST
;
4055 val_dev
= ACCEL_COMPILER_acc_device
;
4058 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4060 tree host_eq
= make_ssa_name (boolean_type_node
);
4061 gimple
*host_ass
= gimple_build_assign
4062 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4063 gimple_set_location (host_ass
, loc
);
4064 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4066 tree dev_eq
= make_ssa_name (boolean_type_node
);
4067 gimple
*dev_ass
= gimple_build_assign
4068 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4069 gimple_set_location (dev_ass
, loc
);
4070 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4072 tree result
= make_ssa_name (boolean_type_node
);
4073 gimple
*result_ass
= gimple_build_assign
4074 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4075 gimple_set_location (result_ass
, loc
);
4076 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4078 replace_call_with_value (gsi
, result
);
4083 /* Fold realloc (0, n) -> malloc (n). */
4086 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4088 gimple
*stmt
= gsi_stmt (*gsi
);
4089 tree arg
= gimple_call_arg (stmt
, 0);
4090 tree size
= gimple_call_arg (stmt
, 1);
4092 if (operand_equal_p (arg
, null_pointer_node
, 0))
4094 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4097 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4098 replace_call_with_call_and_fold (gsi
, repl
);
4105 /* Number of bytes into which any type but aggregate or vector types
4107 static constexpr size_t clear_padding_unit
4108 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4109 /* Buffer size on which __builtin_clear_padding folding code works. */
4110 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4112 /* Data passed through __builtin_clear_padding folding. */
4113 struct clear_padding_struct
{
4115 /* 0 during __builtin_clear_padding folding, nonzero during
4116 clear_type_padding_in_mask. In that case, instead of clearing the
4117 non-padding bits in union_ptr array clear the padding bits in there. */
4121 gimple_stmt_iterator
*gsi
;
4122 /* Alignment of buf->base + 0. */
4124 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4126 /* Number of padding bytes before buf->off that don't have padding clear
4127 code emitted yet. */
4128 HOST_WIDE_INT padding_bytes
;
4129 /* The size of the whole object. Never emit code to touch
4130 buf->base + buf->sz or following bytes. */
4132 /* Number of bytes recorded in buf->buf. */
4134 /* When inside union, instead of emitting code we and bits inside of
4135 the union_ptr array. */
4136 unsigned char *union_ptr
;
4137 /* Set bits mean padding bits that need to be cleared by the builtin. */
4138 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4141 /* Emit code to clear padding requested in BUF->buf - set bits
4142 in there stand for padding that should be cleared. FULL is true
4143 if everything from the buffer should be flushed, otherwise
4144 it can leave up to 2 * clear_padding_unit bytes for further
4148 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4150 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4151 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4153 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4154 size_t end
= buf
->size
;
4156 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4157 * clear_padding_unit
);
4158 size_t padding_bytes
= buf
->padding_bytes
;
4161 if (buf
->clear_in_mask
)
4163 /* During clear_type_padding_in_mask, clear the padding
4164 bits set in buf->buf in the buf->union_ptr mask. */
4165 for (size_t i
= 0; i
< end
; i
++)
4167 if (buf
->buf
[i
] == (unsigned char) ~0)
4171 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4174 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4179 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4183 buf
->padding_bytes
= 0;
4187 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4190 buf
->padding_bytes
= padding_bytes
;
4194 /* Inside of a union, instead of emitting any code, instead
4195 clear all bits in the union_ptr buffer that are clear
4196 in buf. Whole padding bytes don't clear anything. */
4197 for (size_t i
= 0; i
< end
; i
++)
4199 if (buf
->buf
[i
] == (unsigned char) ~0)
4204 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4211 buf
->padding_bytes
= 0;
4215 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4218 buf
->padding_bytes
= padding_bytes
;
4222 size_t wordsize
= UNITS_PER_WORD
;
4223 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4225 size_t nonzero_first
= wordsize
;
4226 size_t nonzero_last
= 0;
4227 size_t zero_first
= wordsize
;
4228 size_t zero_last
= 0;
4229 bool all_ones
= true, bytes_only
= true;
4230 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4231 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4233 gcc_assert (wordsize
> 1);
4238 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4242 if (nonzero_first
== wordsize
)
4244 nonzero_first
= j
- i
;
4245 nonzero_last
= j
- i
;
4247 if (nonzero_last
!= j
- i
)
4249 nonzero_last
= j
+ 1 - i
;
4253 if (zero_first
== wordsize
)
4255 zero_last
= j
+ 1 - i
;
4257 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4263 size_t padding_end
= i
;
4266 if (nonzero_first
== 0
4267 && nonzero_last
== wordsize
4270 /* All bits are padding and we had some padding
4271 before too. Just extend it. */
4272 padding_bytes
+= wordsize
;
4275 if (all_ones
&& nonzero_first
== 0)
4277 padding_bytes
+= nonzero_last
;
4278 padding_end
+= nonzero_last
;
4279 nonzero_first
= wordsize
;
4282 else if (bytes_only
&& nonzero_first
== 0)
4284 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4285 padding_bytes
+= zero_first
;
4286 padding_end
+= zero_first
;
4289 if (padding_bytes
== 1)
4291 atype
= char_type_node
;
4292 src
= build_zero_cst (char_type_node
);
4296 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4297 src
= build_constructor (atype
, NULL
);
4299 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4300 build_int_cst (buf
->alias_type
,
4301 buf
->off
+ padding_end
4303 gimple
*g
= gimple_build_assign (dst
, src
);
4304 gimple_set_location (g
, buf
->loc
);
4305 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4307 buf
->padding_bytes
= 0;
4309 if (nonzero_first
== wordsize
)
4310 /* All bits in a word are 0, there are no padding bits. */
4312 if (all_ones
&& nonzero_last
== wordsize
)
4314 /* All bits between nonzero_first and end of word are padding
4315 bits, start counting padding_bytes. */
4316 padding_bytes
= nonzero_last
- nonzero_first
;
4321 /* If bitfields aren't involved in this word, prefer storing
4322 individual bytes or groups of them over performing a RMW
4323 operation on the whole word. */
4324 gcc_assert (i
+ zero_last
<= end
);
4325 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4330 for (k
= j
; k
< i
+ zero_last
; k
++)
4331 if (buf
->buf
[k
] == 0)
4333 HOST_WIDE_INT off
= buf
->off
+ j
;
4337 atype
= char_type_node
;
4338 src
= build_zero_cst (char_type_node
);
4342 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4343 src
= build_constructor (atype
, NULL
);
4345 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4347 build_int_cst (buf
->alias_type
, off
));
4348 gimple
*g
= gimple_build_assign (dst
, src
);
4349 gimple_set_location (g
, buf
->loc
);
4350 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4354 if (nonzero_last
== wordsize
)
4355 padding_bytes
= nonzero_last
- zero_last
;
4358 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4360 if (nonzero_last
- nonzero_first
<= eltsz
4361 && ((nonzero_first
& ~(eltsz
- 1))
4362 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4366 type
= char_type_node
;
4368 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4370 size_t start
= nonzero_first
& ~(eltsz
- 1);
4371 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4373 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4374 atype
= build_aligned_type (type
, buf
->align
);
4375 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4376 build_int_cst (buf
->alias_type
, off
));
4380 && nonzero_first
== start
4381 && nonzero_last
== start
+ eltsz
)
4382 src
= build_zero_cst (type
);
4385 src
= make_ssa_name (type
);
4386 tree tmp_dst
= unshare_expr (dst
);
4387 /* The folding introduces a read from the tmp_dst, we should
4388 prevent uninitialized warning analysis from issuing warning
4389 for such fake read. In order to suppress warning only for
4390 this expr, we should set the location of tmp_dst to
4391 UNKNOWN_LOCATION first, then suppress_warning will call
4392 set_no_warning_bit to set the no_warning flag only for
4394 SET_EXPR_LOCATION (tmp_dst
, UNKNOWN_LOCATION
);
4395 suppress_warning (tmp_dst
, OPT_Wuninitialized
);
4396 g
= gimple_build_assign (src
, tmp_dst
);
4397 gimple_set_location (g
, buf
->loc
);
4398 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4399 tree mask
= native_interpret_expr (type
,
4400 buf
->buf
+ i
+ start
,
4402 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4403 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4404 tree src_masked
= make_ssa_name (type
);
4405 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4407 gimple_set_location (g
, buf
->loc
);
4408 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4411 g
= gimple_build_assign (dst
, src
);
4412 gimple_set_location (g
, buf
->loc
);
4413 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4423 if (padding_bytes
== 1)
4425 atype
= char_type_node
;
4426 src
= build_zero_cst (char_type_node
);
4430 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4431 src
= build_constructor (atype
, NULL
);
4433 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4434 build_int_cst (buf
->alias_type
,
4437 gimple
*g
= gimple_build_assign (dst
, src
);
4438 gimple_set_location (g
, buf
->loc
);
4439 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4441 size_t end_rem
= end
% UNITS_PER_WORD
;
4442 buf
->off
+= end
- end_rem
;
4443 buf
->size
= end_rem
;
4444 memset (buf
->buf
, 0, buf
->size
);
4445 buf
->padding_bytes
= 0;
4449 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4452 buf
->padding_bytes
= padding_bytes
;
4456 /* Append PADDING_BYTES padding bytes. */
4459 clear_padding_add_padding (clear_padding_struct
*buf
,
4460 HOST_WIDE_INT padding_bytes
)
4462 if (padding_bytes
== 0)
4464 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4465 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4466 clear_padding_flush (buf
, false);
4467 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4468 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4470 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4471 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4472 buf
->size
= clear_padding_buf_size
;
4473 clear_padding_flush (buf
, false);
4474 gcc_assert (buf
->padding_bytes
);
4475 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4476 is guaranteed to be all ones. */
4477 padding_bytes
+= buf
->size
;
4478 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4479 memset (buf
->buf
, ~0, buf
->size
);
4480 buf
->off
+= padding_bytes
- buf
->size
;
4481 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4485 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4486 buf
->size
+= padding_bytes
;
4490 static void clear_padding_type (clear_padding_struct
*, tree
,
4491 HOST_WIDE_INT
, bool);
4493 /* Clear padding bits of union type TYPE. */
4496 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4497 HOST_WIDE_INT sz
, bool for_auto_init
)
4499 clear_padding_struct
*union_buf
;
4500 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4501 size_t start_size
= 0;
4504 start_off
= buf
->off
+ buf
->size
;
4505 next_off
= start_off
+ sz
;
4506 start_size
= start_off
% UNITS_PER_WORD
;
4507 start_off
-= start_size
;
4508 clear_padding_flush (buf
, true);
4513 if (sz
+ buf
->size
> clear_padding_buf_size
)
4514 clear_padding_flush (buf
, false);
4515 union_buf
= XALLOCA (clear_padding_struct
);
4516 union_buf
->loc
= buf
->loc
;
4517 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4518 union_buf
->base
= NULL_TREE
;
4519 union_buf
->alias_type
= NULL_TREE
;
4520 union_buf
->gsi
= NULL
;
4521 union_buf
->align
= 0;
4523 union_buf
->padding_bytes
= 0;
4525 union_buf
->size
= 0;
4526 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4527 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4529 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4530 memset (union_buf
->union_ptr
, ~0, sz
);
4533 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4534 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4536 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4538 if (TREE_TYPE (field
) == error_mark_node
)
4540 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4541 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4542 if (!buf
->clear_in_mask
&& !for_auto_init
)
4543 error_at (buf
->loc
, "flexible array member %qD does not have "
4544 "well defined padding bits for %qs",
4545 field
, "__builtin_clear_padding");
4548 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4549 gcc_assert (union_buf
->size
== 0);
4550 union_buf
->off
= start_off
;
4551 union_buf
->size
= start_size
;
4552 memset (union_buf
->buf
, ~0, start_size
);
4553 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4554 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4555 clear_padding_flush (union_buf
, true);
4558 if (buf
== union_buf
)
4560 buf
->off
= next_off
;
4561 buf
->size
= next_off
% UNITS_PER_WORD
;
4562 buf
->off
-= buf
->size
;
4563 memset (buf
->buf
, ~0, buf
->size
);
4565 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4569 unsigned char *union_ptr
= union_buf
->union_ptr
;
4572 clear_padding_flush (buf
, false);
4573 HOST_WIDE_INT this_sz
4574 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4575 clear_padding_buf_size
- buf
->size
);
4576 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4577 buf
->size
+= this_sz
;
4578 union_ptr
+= this_sz
;
4581 XDELETE (union_buf
->union_ptr
);
4585 /* The only known floating point formats with padding bits are the
4586 IEEE extended ones. */
4589 clear_padding_real_needs_padding_p (tree type
)
4591 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4593 && fmt
->signbit_ro
== fmt
->signbit_rw
4594 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4597 /* Return true if TYPE might contain any padding bits. */
4600 clear_padding_type_may_have_padding_p (tree type
)
4602 switch (TREE_CODE (type
))
4610 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4612 return clear_padding_real_needs_padding_p (type
);
4618 /* Emit a runtime loop:
4619 for (; buf.base != end; buf.base += sz)
4620 __builtin_clear_padding (buf.base); */
4623 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4624 tree end
, bool for_auto_init
)
4626 tree l1
= create_artificial_label (buf
->loc
);
4627 tree l2
= create_artificial_label (buf
->loc
);
4628 tree l3
= create_artificial_label (buf
->loc
);
4629 gimple
*g
= gimple_build_goto (l2
);
4630 gimple_set_location (g
, buf
->loc
);
4631 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4632 g
= gimple_build_label (l1
);
4633 gimple_set_location (g
, buf
->loc
);
4634 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4635 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4636 clear_padding_flush (buf
, true);
4637 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4638 size_int (buf
->sz
));
4639 gimple_set_location (g
, buf
->loc
);
4640 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4641 g
= gimple_build_label (l2
);
4642 gimple_set_location (g
, buf
->loc
);
4643 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4644 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4645 gimple_set_location (g
, buf
->loc
);
4646 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4647 g
= gimple_build_label (l3
);
4648 gimple_set_location (g
, buf
->loc
);
4649 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4652 /* Clear padding bits for TYPE. Called recursively from
4653 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4654 the __builtin_clear_padding is not called by the end user,
4655 instead, it's inserted by the compiler to initialize the
4656 paddings of automatic variable. Therefore, we should not
4657 emit the error messages for flexible array members to confuse
4661 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4662 HOST_WIDE_INT sz
, bool for_auto_init
)
4664 switch (TREE_CODE (type
))
4667 HOST_WIDE_INT cur_pos
;
4669 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4670 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4672 tree ftype
= TREE_TYPE (field
);
4673 if (DECL_BIT_FIELD (field
))
4675 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4678 HOST_WIDE_INT pos
= int_byte_position (field
);
4682 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4683 bpos
%= BITS_PER_UNIT
;
4685 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4686 if (pos
+ end
> cur_pos
)
4688 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4689 cur_pos
= pos
+ end
;
4691 gcc_assert (cur_pos
> pos
4692 && ((unsigned HOST_WIDE_INT
) buf
->size
4693 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4694 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4695 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4696 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4697 " in %qs", "__builtin_clear_padding");
4698 else if (BYTES_BIG_ENDIAN
)
4701 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4702 *p
&= ~(((1 << fldsz
) - 1)
4703 << (BITS_PER_UNIT
- bpos
- fldsz
));
4708 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4710 fldsz
-= BITS_PER_UNIT
- bpos
;
4712 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4713 p
+= fldsz
/ BITS_PER_UNIT
;
4714 fldsz
%= BITS_PER_UNIT
;
4716 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4721 /* Little endian. */
4722 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4723 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4728 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4730 fldsz
-= BITS_PER_UNIT
- bpos
;
4732 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4733 p
+= fldsz
/ BITS_PER_UNIT
;
4734 fldsz
%= BITS_PER_UNIT
;
4736 *p
&= ~((1 << fldsz
) - 1);
4740 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4742 if (ftype
== error_mark_node
)
4744 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4745 && !COMPLETE_TYPE_P (ftype
));
4746 if (!buf
->clear_in_mask
&& !for_auto_init
)
4747 error_at (buf
->loc
, "flexible array member %qD does not "
4748 "have well defined padding bits for %qs",
4749 field
, "__builtin_clear_padding");
4751 else if (is_empty_type (ftype
))
4755 HOST_WIDE_INT pos
= int_byte_position (field
);
4758 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4759 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4760 clear_padding_add_padding (buf
, pos
- cur_pos
);
4762 if (tree asbase
= lang_hooks
.types
.classtype_as_base (field
))
4764 clear_padding_type (buf
, ftype
, fldsz
, for_auto_init
);
4768 gcc_assert (sz
>= cur_pos
);
4769 clear_padding_add_padding (buf
, sz
- cur_pos
);
4772 HOST_WIDE_INT nelts
, fldsz
;
4773 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4778 && sz
> 8 * UNITS_PER_WORD
4779 && buf
->union_ptr
== NULL
4780 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4782 /* For sufficiently large array of more than one elements,
4783 emit a runtime loop to keep code size manageable. */
4784 tree base
= buf
->base
;
4785 unsigned int prev_align
= buf
->align
;
4786 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4787 HOST_WIDE_INT prev_sz
= buf
->sz
;
4788 clear_padding_flush (buf
, true);
4789 tree elttype
= TREE_TYPE (type
);
4790 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4791 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4792 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4793 base
, size_int (off
));
4794 gimple_set_location (g
, buf
->loc
);
4795 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4796 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4798 gimple_set_location (g
, buf
->loc
);
4799 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4801 buf
->align
= TYPE_ALIGN (elttype
);
4804 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4807 buf
->align
= prev_align
;
4808 buf
->size
= off
% UNITS_PER_WORD
;
4809 buf
->off
= off
- buf
->size
;
4810 memset (buf
->buf
, 0, buf
->size
);
4813 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4814 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4817 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4820 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4821 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4822 clear_padding_flush (buf
, false);
4823 if (clear_padding_real_needs_padding_p (type
))
4825 /* Use native_interpret_real + native_encode_expr to figure out
4826 which bits are padding. */
4827 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4828 tree cst
= native_interpret_real (type
, buf
->buf
+ buf
->size
, sz
);
4829 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4830 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4831 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4832 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4833 buf
->buf
[buf
->size
+ i
] ^= ~0;
4836 memset (buf
->buf
+ buf
->size
, 0, sz
);
4840 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4841 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4842 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4845 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4846 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4847 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4848 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4851 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4852 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4853 clear_padding_flush (buf
, false);
4854 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4858 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4859 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4860 clear_padding_flush (buf
, false);
4861 memset (buf
->buf
+ buf
->size
, 0, sz
);
4867 /* Clear padding bits of TYPE in MASK. */
4870 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4872 clear_padding_struct buf
;
4873 buf
.loc
= UNKNOWN_LOCATION
;
4874 buf
.clear_in_mask
= true;
4875 buf
.base
= NULL_TREE
;
4876 buf
.alias_type
= NULL_TREE
;
4880 buf
.padding_bytes
= 0;
4881 buf
.sz
= int_size_in_bytes (type
);
4883 buf
.union_ptr
= mask
;
4884 clear_padding_type (&buf
, type
, buf
.sz
, false);
4885 clear_padding_flush (&buf
, true);
4888 /* Fold __builtin_clear_padding builtin. */
4891 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4893 gimple
*stmt
= gsi_stmt (*gsi
);
4894 gcc_assert (gimple_call_num_args (stmt
) == 2);
4895 tree ptr
= gimple_call_arg (stmt
, 0);
4896 tree typearg
= gimple_call_arg (stmt
, 1);
4897 /* The 2nd argument of __builtin_clear_padding's value is used to
4898 distinguish whether this call is made by the user or by the compiler
4899 for automatic variable initialization. */
4900 bool for_auto_init
= (bool) TREE_INT_CST_LOW (typearg
);
4901 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4902 location_t loc
= gimple_location (stmt
);
4903 clear_padding_struct buf
;
4904 gimple_stmt_iterator gsiprev
= *gsi
;
4905 /* This should be folded during the lower pass. */
4906 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4907 gcc_assert (COMPLETE_TYPE_P (type
));
4908 gsi_prev (&gsiprev
);
4911 buf
.clear_in_mask
= false;
4913 buf
.alias_type
= NULL_TREE
;
4915 buf
.align
= get_pointer_alignment (ptr
);
4916 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4917 buf
.align
= MAX (buf
.align
, talign
);
4919 buf
.padding_bytes
= 0;
4921 buf
.sz
= int_size_in_bytes (type
);
4922 buf
.union_ptr
= NULL
;
4923 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4924 sorry_at (loc
, "%s not supported for variable length aggregates",
4925 "__builtin_clear_padding");
4926 /* The implementation currently assumes 8-bit host and target
4927 chars which is the case for all currently supported targets
4928 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4929 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4930 sorry_at (loc
, "%s not supported on this target",
4931 "__builtin_clear_padding");
4932 else if (!clear_padding_type_may_have_padding_p (type
))
4934 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4936 tree sz
= TYPE_SIZE_UNIT (type
);
4937 tree elttype
= type
;
4938 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4939 while (TREE_CODE (elttype
) == ARRAY_TYPE
4940 && int_size_in_bytes (elttype
) < 0)
4941 elttype
= TREE_TYPE (elttype
);
4942 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4943 gcc_assert (eltsz
>= 0);
4946 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4947 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4948 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4949 gimple_set_location (g
, loc
);
4950 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4951 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4952 gimple_set_location (g
, loc
);
4953 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4955 buf
.align
= TYPE_ALIGN (elttype
);
4956 buf
.alias_type
= build_pointer_type (elttype
);
4957 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4962 if (!is_gimple_mem_ref_addr (buf
.base
))
4964 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4965 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4966 gimple_set_location (g
, loc
);
4967 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4969 buf
.alias_type
= build_pointer_type (type
);
4970 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
4971 clear_padding_flush (&buf
, true);
4974 gimple_stmt_iterator gsiprev2
= *gsi
;
4975 gsi_prev (&gsiprev2
);
4976 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4977 gsi_replace (gsi
, gimple_build_nop (), true);
4980 gsi_remove (gsi
, true);
4986 /* Fold the non-target builtin at *GSI and return whether any simplification
4990 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
4992 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
4993 tree callee
= gimple_call_fndecl (stmt
);
4995 /* Give up for always_inline inline builtins until they are
4997 if (avoid_folding_inline_builtin (callee
))
5000 unsigned n
= gimple_call_num_args (stmt
);
5001 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5005 return gimple_fold_builtin_bcmp (gsi
);
5006 case BUILT_IN_BCOPY
:
5007 return gimple_fold_builtin_bcopy (gsi
);
5008 case BUILT_IN_BZERO
:
5009 return gimple_fold_builtin_bzero (gsi
);
5011 case BUILT_IN_MEMSET
:
5012 return gimple_fold_builtin_memset (gsi
,
5013 gimple_call_arg (stmt
, 1),
5014 gimple_call_arg (stmt
, 2));
5015 case BUILT_IN_MEMCPY
:
5016 case BUILT_IN_MEMPCPY
:
5017 case BUILT_IN_MEMMOVE
:
5018 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5019 gimple_call_arg (stmt
, 1), fcode
);
5020 case BUILT_IN_SPRINTF_CHK
:
5021 case BUILT_IN_VSPRINTF_CHK
:
5022 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5023 case BUILT_IN_STRCAT_CHK
:
5024 return gimple_fold_builtin_strcat_chk (gsi
);
5025 case BUILT_IN_STRNCAT_CHK
:
5026 return gimple_fold_builtin_strncat_chk (gsi
);
5027 case BUILT_IN_STRLEN
:
5028 return gimple_fold_builtin_strlen (gsi
);
5029 case BUILT_IN_STRCPY
:
5030 return gimple_fold_builtin_strcpy (gsi
,
5031 gimple_call_arg (stmt
, 0),
5032 gimple_call_arg (stmt
, 1));
5033 case BUILT_IN_STRNCPY
:
5034 return gimple_fold_builtin_strncpy (gsi
,
5035 gimple_call_arg (stmt
, 0),
5036 gimple_call_arg (stmt
, 1),
5037 gimple_call_arg (stmt
, 2));
5038 case BUILT_IN_STRCAT
:
5039 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5040 gimple_call_arg (stmt
, 1));
5041 case BUILT_IN_STRNCAT
:
5042 return gimple_fold_builtin_strncat (gsi
);
5043 case BUILT_IN_INDEX
:
5044 case BUILT_IN_STRCHR
:
5045 return gimple_fold_builtin_strchr (gsi
, false);
5046 case BUILT_IN_RINDEX
:
5047 case BUILT_IN_STRRCHR
:
5048 return gimple_fold_builtin_strchr (gsi
, true);
5049 case BUILT_IN_STRSTR
:
5050 return gimple_fold_builtin_strstr (gsi
);
5051 case BUILT_IN_STRCMP
:
5052 case BUILT_IN_STRCMP_EQ
:
5053 case BUILT_IN_STRCASECMP
:
5054 case BUILT_IN_STRNCMP
:
5055 case BUILT_IN_STRNCMP_EQ
:
5056 case BUILT_IN_STRNCASECMP
:
5057 return gimple_fold_builtin_string_compare (gsi
);
5058 case BUILT_IN_MEMCHR
:
5059 return gimple_fold_builtin_memchr (gsi
);
5060 case BUILT_IN_FPUTS
:
5061 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5062 gimple_call_arg (stmt
, 1), false);
5063 case BUILT_IN_FPUTS_UNLOCKED
:
5064 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5065 gimple_call_arg (stmt
, 1), true);
5066 case BUILT_IN_MEMCPY_CHK
:
5067 case BUILT_IN_MEMPCPY_CHK
:
5068 case BUILT_IN_MEMMOVE_CHK
:
5069 case BUILT_IN_MEMSET_CHK
:
5070 return gimple_fold_builtin_memory_chk (gsi
,
5071 gimple_call_arg (stmt
, 0),
5072 gimple_call_arg (stmt
, 1),
5073 gimple_call_arg (stmt
, 2),
5074 gimple_call_arg (stmt
, 3),
5076 case BUILT_IN_STPCPY
:
5077 return gimple_fold_builtin_stpcpy (gsi
);
5078 case BUILT_IN_STRCPY_CHK
:
5079 case BUILT_IN_STPCPY_CHK
:
5080 return gimple_fold_builtin_stxcpy_chk (gsi
,
5081 gimple_call_arg (stmt
, 0),
5082 gimple_call_arg (stmt
, 1),
5083 gimple_call_arg (stmt
, 2),
5085 case BUILT_IN_STRNCPY_CHK
:
5086 case BUILT_IN_STPNCPY_CHK
:
5087 return gimple_fold_builtin_stxncpy_chk (gsi
,
5088 gimple_call_arg (stmt
, 0),
5089 gimple_call_arg (stmt
, 1),
5090 gimple_call_arg (stmt
, 2),
5091 gimple_call_arg (stmt
, 3),
5093 case BUILT_IN_SNPRINTF_CHK
:
5094 case BUILT_IN_VSNPRINTF_CHK
:
5095 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5097 case BUILT_IN_FPRINTF
:
5098 case BUILT_IN_FPRINTF_UNLOCKED
:
5099 case BUILT_IN_VFPRINTF
:
5100 if (n
== 2 || n
== 3)
5101 return gimple_fold_builtin_fprintf (gsi
,
5102 gimple_call_arg (stmt
, 0),
5103 gimple_call_arg (stmt
, 1),
5105 ? gimple_call_arg (stmt
, 2)
5109 case BUILT_IN_FPRINTF_CHK
:
5110 case BUILT_IN_VFPRINTF_CHK
:
5111 if (n
== 3 || n
== 4)
5112 return gimple_fold_builtin_fprintf (gsi
,
5113 gimple_call_arg (stmt
, 0),
5114 gimple_call_arg (stmt
, 2),
5116 ? gimple_call_arg (stmt
, 3)
5120 case BUILT_IN_PRINTF
:
5121 case BUILT_IN_PRINTF_UNLOCKED
:
5122 case BUILT_IN_VPRINTF
:
5123 if (n
== 1 || n
== 2)
5124 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5126 ? gimple_call_arg (stmt
, 1)
5127 : NULL_TREE
, fcode
);
5129 case BUILT_IN_PRINTF_CHK
:
5130 case BUILT_IN_VPRINTF_CHK
:
5131 if (n
== 2 || n
== 3)
5132 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5134 ? gimple_call_arg (stmt
, 2)
5135 : NULL_TREE
, fcode
);
5137 case BUILT_IN_ACC_ON_DEVICE
:
5138 return gimple_fold_builtin_acc_on_device (gsi
,
5139 gimple_call_arg (stmt
, 0));
5140 case BUILT_IN_REALLOC
:
5141 return gimple_fold_builtin_realloc (gsi
);
5143 case BUILT_IN_CLEAR_PADDING
:
5144 return gimple_fold_builtin_clear_padding (gsi
);
5149 /* Try the generic builtin folder. */
5150 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5151 tree result
= fold_call_stmt (stmt
, ignore
);
5155 STRIP_NOPS (result
);
5157 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5158 gimplify_and_update_call_from_tree (gsi
, result
);
5165 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5166 function calls to constants, where possible. */
5169 fold_internal_goacc_dim (const gimple
*call
)
5171 int axis
= oacc_get_ifn_dim_arg (call
);
5172 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5173 tree result
= NULL_TREE
;
5174 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5176 switch (gimple_call_internal_fn (call
))
5178 case IFN_GOACC_DIM_POS
:
5179 /* If the size is 1, we know the answer. */
5181 result
= build_int_cst (type
, 0);
5183 case IFN_GOACC_DIM_SIZE
:
5184 /* If the size is not dynamic, we know the answer. */
5186 result
= build_int_cst (type
, size
);
5195 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5196 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5197 &var where var is only addressable because of such calls. */
5200 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5202 if (gimple_call_num_args (stmt
) != 6
5203 || !flag_inline_atomics
5205 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5206 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5207 || !gimple_vdef (stmt
)
5208 || !gimple_vuse (stmt
))
5211 tree fndecl
= gimple_call_fndecl (stmt
);
5212 switch (DECL_FUNCTION_CODE (fndecl
))
5214 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5215 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5224 tree expected
= gimple_call_arg (stmt
, 1);
5225 if (TREE_CODE (expected
) != ADDR_EXPR
5226 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5229 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5230 if (!is_gimple_reg_type (etype
)
5231 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5232 || TREE_THIS_VOLATILE (etype
)
5233 || VECTOR_TYPE_P (etype
)
5234 || TREE_CODE (etype
) == COMPLEX_TYPE
5235 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5236 might not preserve all the bits. See PR71716. */
5237 || SCALAR_FLOAT_TYPE_P (etype
)
5238 || maybe_ne (TYPE_PRECISION (etype
),
5239 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5242 tree weak
= gimple_call_arg (stmt
, 3);
5243 if (!integer_zerop (weak
) && !integer_onep (weak
))
5246 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5247 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5248 machine_mode mode
= TYPE_MODE (itype
);
5250 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5252 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5255 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5262 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5264 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5265 i = IMAGPART_EXPR <t>;
5267 e = REALPART_EXPR <t>; */
5270 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5272 gimple
*stmt
= gsi_stmt (*gsi
);
5273 tree fndecl
= gimple_call_fndecl (stmt
);
5274 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5275 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5276 tree ctype
= build_complex_type (itype
);
5277 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5278 bool throws
= false;
5280 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5282 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5283 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5284 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5286 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5287 build1 (VIEW_CONVERT_EXPR
, itype
,
5288 gimple_assign_lhs (g
)));
5289 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5291 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5292 + int_size_in_bytes (itype
);
5293 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5294 gimple_call_arg (stmt
, 0),
5295 gimple_assign_lhs (g
),
5296 gimple_call_arg (stmt
, 2),
5297 build_int_cst (integer_type_node
, flag
),
5298 gimple_call_arg (stmt
, 4),
5299 gimple_call_arg (stmt
, 5));
5300 tree lhs
= make_ssa_name (ctype
);
5301 gimple_call_set_lhs (g
, lhs
);
5302 gimple_move_vops (g
, stmt
);
5303 tree oldlhs
= gimple_call_lhs (stmt
);
5304 if (stmt_can_throw_internal (cfun
, stmt
))
5307 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5309 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5310 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5311 gimple_call_set_lhs (stmt
, NULL_TREE
);
5312 gsi_replace (gsi
, g
, true);
5315 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5316 build1 (IMAGPART_EXPR
, itype
, lhs
));
5319 gsi_insert_on_edge_immediate (e
, g
);
5320 *gsi
= gsi_for_stmt (g
);
5323 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5324 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5325 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5327 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5328 build1 (REALPART_EXPR
, itype
, lhs
));
5329 if (throws
&& oldlhs
== NULL_TREE
)
5331 gsi_insert_on_edge_immediate (e
, g
);
5332 *gsi
= gsi_for_stmt (g
);
5335 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5336 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5338 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5340 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5341 gimple_assign_lhs (g
)));
5342 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5344 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5345 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5349 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5350 doesn't fit into TYPE. The test for overflow should be regardless of
5351 -fwrapv, and even for unsigned types. */
5354 arith_overflowed_p (enum tree_code code
, const_tree type
,
5355 const_tree arg0
, const_tree arg1
)
5357 widest2_int warg0
= widest2_int_cst (arg0
);
5358 widest2_int warg1
= widest2_int_cst (arg1
);
5362 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5363 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5364 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5365 default: gcc_unreachable ();
5367 signop sign
= TYPE_SIGN (type
);
5368 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5370 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5373 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5374 for the memory it references, otherwise return null. VECTYPE is the
5375 type of the memory vector. */
5378 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5380 tree ptr
= gimple_call_arg (call
, 0);
5381 tree alias_align
= gimple_call_arg (call
, 1);
5382 tree mask
= gimple_call_arg (call
, 2);
5383 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5386 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5387 if (TYPE_ALIGN (vectype
) != align
)
5388 vectype
= build_aligned_type (vectype
, align
);
5389 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5390 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5393 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5396 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5398 tree lhs
= gimple_call_lhs (call
);
5402 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5404 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5405 gimple_set_location (new_stmt
, gimple_location (call
));
5406 gimple_move_vops (new_stmt
, call
);
5407 gsi_replace (gsi
, new_stmt
, false);
5413 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5416 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5418 tree rhs
= gimple_call_arg (call
, 3);
5419 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5421 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5422 gimple_set_location (new_stmt
, gimple_location (call
));
5423 gimple_move_vops (new_stmt
, call
);
5424 gsi_replace (gsi
, new_stmt
, false);
5430 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5431 The statement may be replaced by another statement, e.g., if the call
5432 simplifies to a constant value. Return true if any changes were made.
5433 It is assumed that the operands have been previously folded. */
5436 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5438 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5440 bool changed
= false;
5442 /* Check for virtual calls that became direct calls. */
5443 callee
= gimple_call_fn (stmt
);
5444 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5446 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5448 if (dump_file
&& virtual_method_call_p (callee
)
5449 && !possible_polymorphic_call_target_p
5450 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5451 (OBJ_TYPE_REF_EXPR (callee
)))))
5454 "Type inheritance inconsistent devirtualization of ");
5455 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5456 fprintf (dump_file
, " to ");
5457 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5458 fprintf (dump_file
, "\n");
5461 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5464 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5467 vec
<cgraph_node
*>targets
5468 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5469 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5471 tree lhs
= gimple_call_lhs (stmt
);
5472 if (dump_enabled_p ())
5474 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5475 "folding virtual function call to %s\n",
5476 targets
.length () == 1
5477 ? targets
[0]->name ()
5478 : "__builtin_unreachable");
5480 if (targets
.length () == 1)
5482 tree fndecl
= targets
[0]->decl
;
5483 gimple_call_set_fndecl (stmt
, fndecl
);
5485 /* If changing the call to __cxa_pure_virtual
5486 or similar noreturn function, adjust gimple_call_fntype
5488 if (gimple_call_noreturn_p (stmt
)
5489 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5490 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5491 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5493 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5494 /* If the call becomes noreturn, remove the lhs. */
5496 && gimple_call_noreturn_p (stmt
)
5497 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5498 || should_remove_lhs_p (lhs
)))
5500 if (TREE_CODE (lhs
) == SSA_NAME
)
5502 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5503 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5504 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5505 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5507 gimple_call_set_lhs (stmt
, NULL_TREE
);
5509 maybe_remove_unused_call_args (cfun
, stmt
);
5513 location_t loc
= gimple_location (stmt
);
5514 gimple
*new_stmt
= gimple_build_builtin_unreachable (loc
);
5515 /* If the call had a SSA name as lhs morph that into
5516 an uninitialized value. */
5517 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5519 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5520 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5521 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5522 set_ssa_default_def (cfun
, var
, lhs
);
5524 gimple_move_vops (new_stmt
, stmt
);
5525 gsi_replace (gsi
, new_stmt
, false);
5532 /* Check for indirect calls that became direct calls, and then
5533 no longer require a static chain. */
5534 if (gimple_call_chain (stmt
))
5536 tree fn
= gimple_call_fndecl (stmt
);
5537 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5539 gimple_call_set_chain (stmt
, NULL
);
5547 /* Check for builtins that CCP can handle using information not
5548 available in the generic fold routines. */
5549 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5551 if (gimple_fold_builtin (gsi
))
5554 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5556 changed
|= targetm
.gimple_fold_builtin (gsi
);
5558 else if (gimple_call_internal_p (stmt
))
5560 enum tree_code subcode
= ERROR_MARK
;
5561 tree result
= NULL_TREE
;
5562 bool cplx_result
= false;
5563 tree overflow
= NULL_TREE
;
5564 switch (gimple_call_internal_fn (stmt
))
5566 case IFN_BUILTIN_EXPECT
:
5567 result
= fold_builtin_expect (gimple_location (stmt
),
5568 gimple_call_arg (stmt
, 0),
5569 gimple_call_arg (stmt
, 1),
5570 gimple_call_arg (stmt
, 2),
5573 case IFN_UBSAN_OBJECT_SIZE
:
5575 tree offset
= gimple_call_arg (stmt
, 1);
5576 tree objsize
= gimple_call_arg (stmt
, 2);
5577 if (integer_all_onesp (objsize
)
5578 || (TREE_CODE (offset
) == INTEGER_CST
5579 && TREE_CODE (objsize
) == INTEGER_CST
5580 && tree_int_cst_le (offset
, objsize
)))
5582 replace_call_with_value (gsi
, NULL_TREE
);
5588 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5590 replace_call_with_value (gsi
, NULL_TREE
);
5594 case IFN_UBSAN_BOUNDS
:
5596 tree index
= gimple_call_arg (stmt
, 1);
5597 tree bound
= gimple_call_arg (stmt
, 2);
5598 if (TREE_CODE (index
) == INTEGER_CST
5599 && TREE_CODE (bound
) == INTEGER_CST
)
5601 index
= fold_convert (TREE_TYPE (bound
), index
);
5602 if (TREE_CODE (index
) == INTEGER_CST
5603 && tree_int_cst_le (index
, bound
))
5605 replace_call_with_value (gsi
, NULL_TREE
);
5611 case IFN_GOACC_DIM_SIZE
:
5612 case IFN_GOACC_DIM_POS
:
5613 result
= fold_internal_goacc_dim (stmt
);
5615 case IFN_UBSAN_CHECK_ADD
:
5616 subcode
= PLUS_EXPR
;
5618 case IFN_UBSAN_CHECK_SUB
:
5619 subcode
= MINUS_EXPR
;
5621 case IFN_UBSAN_CHECK_MUL
:
5622 subcode
= MULT_EXPR
;
5624 case IFN_ADD_OVERFLOW
:
5625 subcode
= PLUS_EXPR
;
5628 case IFN_SUB_OVERFLOW
:
5629 subcode
= MINUS_EXPR
;
5632 case IFN_MUL_OVERFLOW
:
5633 subcode
= MULT_EXPR
;
5637 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5639 case IFN_MASK_STORE
:
5640 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5645 if (subcode
!= ERROR_MARK
)
5647 tree arg0
= gimple_call_arg (stmt
, 0);
5648 tree arg1
= gimple_call_arg (stmt
, 1);
5649 tree type
= TREE_TYPE (arg0
);
5652 tree lhs
= gimple_call_lhs (stmt
);
5653 if (lhs
== NULL_TREE
)
5656 type
= TREE_TYPE (TREE_TYPE (lhs
));
5658 if (type
== NULL_TREE
)
5660 /* x = y + 0; x = y - 0; x = y * 0; */
5661 else if (integer_zerop (arg1
))
5662 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5663 /* x = 0 + y; x = 0 * y; */
5664 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5665 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5667 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5668 result
= integer_zero_node
;
5669 /* x = y * 1; x = 1 * y; */
5670 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5672 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5674 else if (TREE_CODE (arg0
) == INTEGER_CST
5675 && TREE_CODE (arg1
) == INTEGER_CST
)
5678 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5679 fold_convert (type
, arg1
));
5681 result
= int_const_binop (subcode
, arg0
, arg1
);
5682 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5685 overflow
= build_one_cst (type
);
5692 if (result
== integer_zero_node
)
5693 result
= build_zero_cst (type
);
5694 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5696 if (TREE_CODE (result
) == INTEGER_CST
)
5698 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5700 overflow
= build_one_cst (type
);
5702 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5703 && TYPE_UNSIGNED (type
))
5704 || (TYPE_PRECISION (type
)
5705 < (TYPE_PRECISION (TREE_TYPE (result
))
5706 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5707 && !TYPE_UNSIGNED (type
)))))
5710 result
= fold_convert (type
, result
);
5717 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5718 result
= drop_tree_overflow (result
);
5721 if (overflow
== NULL_TREE
)
5722 overflow
= build_zero_cst (TREE_TYPE (result
));
5723 tree ctype
= build_complex_type (TREE_TYPE (result
));
5724 if (TREE_CODE (result
) == INTEGER_CST
5725 && TREE_CODE (overflow
) == INTEGER_CST
)
5726 result
= build_complex (ctype
, result
, overflow
);
5728 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5729 ctype
, result
, overflow
);
5731 gimplify_and_update_call_from_tree (gsi
, result
);
5740 /* Return true whether NAME has a use on STMT. */
5743 has_use_on_stmt (tree name
, gimple
*stmt
)
5745 imm_use_iterator iter
;
5746 use_operand_p use_p
;
5747 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5748 if (USE_STMT (use_p
) == stmt
)
5753 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5756 Replaces *GSI with the simplification result in RCODE and OPS
5757 and the associated statements in *SEQ. Does the replacement
5758 according to INPLACE and returns true if the operation succeeded. */
5761 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5762 gimple_match_op
*res_op
,
5763 gimple_seq
*seq
, bool inplace
)
5765 gimple
*stmt
= gsi_stmt (*gsi
);
5766 tree
*ops
= res_op
->ops
;
5767 unsigned int num_ops
= res_op
->num_ops
;
5769 /* Play safe and do not allow abnormals to be mentioned in
5770 newly created statements. See also maybe_push_res_to_seq.
5771 As an exception allow such uses if there was a use of the
5772 same SSA name on the old stmt. */
5773 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5774 if (TREE_CODE (ops
[i
]) == SSA_NAME
5775 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5776 && !has_use_on_stmt (ops
[i
], stmt
))
5779 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5780 for (unsigned int i
= 0; i
< 2; ++i
)
5781 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5782 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5783 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5786 /* Don't insert new statements when INPLACE is true, even if we could
5787 reuse STMT for the final statement. */
5788 if (inplace
&& !gimple_seq_empty_p (*seq
))
5791 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5793 gcc_assert (res_op
->code
.is_tree_code ());
5794 auto code
= tree_code (res_op
->code
);
5795 if (TREE_CODE_CLASS (code
) == tcc_comparison
5796 /* GIMPLE_CONDs condition may not throw. */
5797 && (!flag_exceptions
5798 || !cfun
->can_throw_non_call_exceptions
5799 || !operation_could_trap_p (code
,
5800 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5802 gimple_cond_set_condition (cond_stmt
, code
, ops
[0], ops
[1]);
5803 else if (code
== SSA_NAME
)
5804 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5805 build_zero_cst (TREE_TYPE (ops
[0])));
5806 else if (code
== INTEGER_CST
)
5808 if (integer_zerop (ops
[0]))
5809 gimple_cond_make_false (cond_stmt
);
5811 gimple_cond_make_true (cond_stmt
);
5815 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5818 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5819 build_zero_cst (TREE_TYPE (res
)));
5823 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5825 fprintf (dump_file
, "gimple_simplified to ");
5826 if (!gimple_seq_empty_p (*seq
))
5827 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5828 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5831 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5834 else if (is_gimple_assign (stmt
)
5835 && res_op
->code
.is_tree_code ())
5837 auto code
= tree_code (res_op
->code
);
5839 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (code
))
5841 maybe_build_generic_op (res_op
);
5842 gimple_assign_set_rhs_with_ops (gsi
, code
,
5843 res_op
->op_or_null (0),
5844 res_op
->op_or_null (1),
5845 res_op
->op_or_null (2));
5846 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5848 fprintf (dump_file
, "gimple_simplified to ");
5849 if (!gimple_seq_empty_p (*seq
))
5850 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5851 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5854 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5858 else if (res_op
->code
.is_fn_code ()
5859 && gimple_call_combined_fn (stmt
) == combined_fn (res_op
->code
))
5861 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5862 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5863 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5864 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5866 fprintf (dump_file
, "gimple_simplified to ");
5867 if (!gimple_seq_empty_p (*seq
))
5868 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5869 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5871 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5876 if (gimple_has_lhs (stmt
))
5878 tree lhs
= gimple_get_lhs (stmt
);
5879 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5881 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5883 fprintf (dump_file
, "gimple_simplified to ");
5884 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5886 gsi_replace_with_seq_vops (gsi
, *seq
);
5896 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5899 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5904 if (TREE_CODE (*t
) == ADDR_EXPR
)
5905 t
= &TREE_OPERAND (*t
, 0);
5907 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5908 generic vector extension. The actual vector referenced is
5909 view-converted to an array type for this purpose. If the index
5910 is constant the canonical representation in the middle-end is a
5911 BIT_FIELD_REF so re-write the former to the latter here. */
5912 if (TREE_CODE (*t
) == ARRAY_REF
5913 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5914 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5915 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5917 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5918 if (VECTOR_TYPE_P (vtype
))
5920 tree low
= array_ref_low_bound (*t
);
5921 if (TREE_CODE (low
) == INTEGER_CST
)
5923 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5925 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5926 wi::to_widest (low
));
5927 idx
= wi::mul (idx
, wi::to_widest
5928 (TYPE_SIZE (TREE_TYPE (*t
))));
5930 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5931 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5933 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5935 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5936 TYPE_SIZE (TREE_TYPE (*t
)),
5937 wide_int_to_tree (bitsizetype
, idx
));
5945 while (handled_component_p (*t
))
5946 t
= &TREE_OPERAND (*t
, 0);
5948 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5949 of invariant addresses into a SSA name MEM_REF address. */
5950 if (TREE_CODE (*t
) == MEM_REF
5951 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5953 tree addr
= TREE_OPERAND (*t
, 0);
5954 if (TREE_CODE (addr
) == ADDR_EXPR
5955 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5956 || handled_component_p (TREE_OPERAND (addr
, 0))))
5960 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5969 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5970 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5971 TREE_OPERAND (*t
, 1),
5972 size_int (coffset
));
5975 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5976 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5979 /* Canonicalize back MEM_REFs to plain reference trees if the object
5980 accessed is a decl that has the same access semantics as the MEM_REF. */
5981 if (TREE_CODE (*t
) == MEM_REF
5982 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5983 && integer_zerop (TREE_OPERAND (*t
, 1))
5984 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5986 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5987 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5988 if (/* Same volatile qualification. */
5989 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5990 /* Same TBAA behavior with -fstrict-aliasing. */
5991 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
5992 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
5993 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
5994 /* Same alignment. */
5995 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
5996 /* We have to look out here to not drop a required conversion
5997 from the rhs to the lhs if *t appears on the lhs or vice-versa
5998 if it appears on the rhs. Thus require strict type
6000 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6002 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6007 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6008 && TREE_CODE (*t
) == MEM_REF
6009 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6013 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6017 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6019 if (mem_ref_offset (base
).to_shwi (&moffset
))
6022 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6025 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6032 /* Canonicalize TARGET_MEM_REF in particular with respect to
6033 the indexes becoming constant. */
6034 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6036 tree tem
= maybe_fold_tmr (*t
);
6040 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6041 recompute_tree_invariant_for_addr_expr (*orig_t
);
6049 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6050 distinguishes both cases. */
6053 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6055 bool changed
= false;
6056 gimple
*stmt
= gsi_stmt (*gsi
);
6057 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6059 fold_defer_overflow_warnings ();
6061 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6063 ??? This shouldn't be done in generic folding but in the
6064 propagation helpers which also know whether an address was
6066 Also canonicalize operand order. */
6067 switch (gimple_code (stmt
))
6070 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6072 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6073 if ((REFERENCE_CLASS_P (*rhs
)
6074 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6075 && maybe_canonicalize_mem_ref_addr (rhs
))
6077 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6078 if (REFERENCE_CLASS_P (*lhs
)
6079 && maybe_canonicalize_mem_ref_addr (lhs
))
6081 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6082 This cannot be done in maybe_canonicalize_mem_ref_addr
6083 as the gimple now has two operands rather than one.
6084 The same reason why this can't be done in
6085 maybe_canonicalize_mem_ref_addr is the same reason why
6086 this can't be done inplace. */
6087 if (!inplace
&& TREE_CODE (*rhs
) == ADDR_EXPR
)
6089 tree inner
= TREE_OPERAND (*rhs
, 0);
6090 if (TREE_CODE (inner
) == MEM_REF
6091 && TREE_CODE (TREE_OPERAND (inner
, 0)) == SSA_NAME
6092 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6094 tree ptr
= TREE_OPERAND (inner
, 0);
6095 tree addon
= TREE_OPERAND (inner
, 1);
6096 addon
= fold_convert (sizetype
, addon
);
6097 gimple_assign_set_rhs_with_ops (gsi
, POINTER_PLUS_EXPR
,
6100 stmt
= gsi_stmt (*gsi
);
6106 /* Canonicalize operand order. */
6107 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6108 if (TREE_CODE_CLASS (code
) == tcc_comparison
6109 || commutative_tree_code (code
)
6110 || commutative_ternary_tree_code (code
))
6112 tree rhs1
= gimple_assign_rhs1 (stmt
);
6113 tree rhs2
= gimple_assign_rhs2 (stmt
);
6114 if (tree_swap_operands_p (rhs1
, rhs2
))
6116 gimple_assign_set_rhs1 (stmt
, rhs2
);
6117 gimple_assign_set_rhs2 (stmt
, rhs1
);
6118 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6119 gimple_assign_set_rhs_code (stmt
,
6120 swap_tree_comparison (code
));
6128 gcall
*call
= as_a
<gcall
*> (stmt
);
6129 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
6131 tree
*arg
= gimple_call_arg_ptr (call
, i
);
6132 if (REFERENCE_CLASS_P (*arg
)
6133 && maybe_canonicalize_mem_ref_addr (arg
))
6136 tree
*lhs
= gimple_call_lhs_ptr (call
);
6138 && REFERENCE_CLASS_P (*lhs
)
6139 && maybe_canonicalize_mem_ref_addr (lhs
))
6143 combined_fn cfn
= gimple_call_combined_fn (call
);
6144 internal_fn ifn
= associated_internal_fn (cfn
, TREE_TYPE (*lhs
));
6145 int opno
= first_commutative_argument (ifn
);
6148 tree arg1
= gimple_call_arg (call
, opno
);
6149 tree arg2
= gimple_call_arg (call
, opno
+ 1);
6150 if (tree_swap_operands_p (arg1
, arg2
))
6152 gimple_call_set_arg (call
, opno
, arg2
);
6153 gimple_call_set_arg (call
, opno
+ 1, arg1
);
6162 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6163 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6165 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6166 tree op
= TREE_VALUE (link
);
6167 if (REFERENCE_CLASS_P (op
)
6168 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6171 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6173 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6174 tree op
= TREE_VALUE (link
);
6175 if ((REFERENCE_CLASS_P (op
)
6176 || TREE_CODE (op
) == ADDR_EXPR
)
6177 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6183 if (gimple_debug_bind_p (stmt
))
6185 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6187 && (REFERENCE_CLASS_P (*val
)
6188 || TREE_CODE (*val
) == ADDR_EXPR
)
6189 && maybe_canonicalize_mem_ref_addr (val
, true))
6195 /* Canonicalize operand order. */
6196 tree lhs
= gimple_cond_lhs (stmt
);
6197 tree rhs
= gimple_cond_rhs (stmt
);
6198 if (tree_swap_operands_p (lhs
, rhs
))
6200 gcond
*gc
= as_a
<gcond
*> (stmt
);
6201 gimple_cond_set_lhs (gc
, rhs
);
6202 gimple_cond_set_rhs (gc
, lhs
);
6203 gimple_cond_set_code (gc
,
6204 swap_tree_comparison (gimple_cond_code (gc
)));
6211 /* Dispatch to pattern-based folding. */
6213 || is_gimple_assign (stmt
)
6214 || gimple_code (stmt
) == GIMPLE_COND
)
6216 gimple_seq seq
= NULL
;
6217 gimple_match_op res_op
;
6218 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6219 valueize
, valueize
))
6221 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6224 gimple_seq_discard (seq
);
6228 stmt
= gsi_stmt (*gsi
);
6230 /* Fold the main computation performed by the statement. */
6231 switch (gimple_code (stmt
))
6235 /* Try to canonicalize for boolean-typed X the comparisons
6236 X == 0, X == 1, X != 0, and X != 1. */
6237 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6238 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6240 tree lhs
= gimple_assign_lhs (stmt
);
6241 tree op1
= gimple_assign_rhs1 (stmt
);
6242 tree op2
= gimple_assign_rhs2 (stmt
);
6243 tree type
= TREE_TYPE (op1
);
6245 /* Check whether the comparison operands are of the same boolean
6246 type as the result type is.
6247 Check that second operand is an integer-constant with value
6249 if (TREE_CODE (op2
) == INTEGER_CST
6250 && (integer_zerop (op2
) || integer_onep (op2
))
6251 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6253 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6254 bool is_logical_not
= false;
6256 /* X == 0 and X != 1 is a logical-not.of X
6257 X == 1 and X != 0 is X */
6258 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6259 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6260 is_logical_not
= true;
6262 if (is_logical_not
== false)
6263 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6264 /* Only for one-bit precision typed X the transformation
6265 !X -> ~X is valied. */
6266 else if (TYPE_PRECISION (type
) == 1)
6267 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6268 /* Otherwise we use !X -> X ^ 1. */
6270 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6271 build_int_cst (type
, 1));
6277 unsigned old_num_ops
= gimple_num_ops (stmt
);
6278 tree lhs
= gimple_assign_lhs (stmt
);
6279 tree new_rhs
= fold_gimple_assign (gsi
);
6281 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6282 TREE_TYPE (new_rhs
)))
6283 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6286 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6288 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6295 changed
|= gimple_fold_call (gsi
, inplace
);
6299 if (gimple_debug_bind_p (stmt
))
6301 tree val
= gimple_debug_bind_get_value (stmt
);
6302 if (val
&& REFERENCE_CLASS_P (val
))
6304 tree tem
= maybe_fold_reference (val
);
6307 gimple_debug_bind_set_value (stmt
, tem
);
6316 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6317 tree ret
= gimple_return_retval(ret_stmt
);
6319 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6321 tree val
= valueize (ret
);
6322 if (val
&& val
!= ret
6323 && may_propagate_copy (ret
, val
))
6325 gimple_return_set_retval (ret_stmt
, val
);
6335 stmt
= gsi_stmt (*gsi
);
6337 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6341 /* Valueziation callback that ends up not following SSA edges. */
6344 no_follow_ssa_edges (tree
)
6349 /* Valueization callback that ends up following single-use SSA edges only. */
6352 follow_single_use_edges (tree val
)
6354 if (TREE_CODE (val
) == SSA_NAME
6355 && !has_single_use (val
))
6360 /* Valueization callback that follows all SSA edges. */
6363 follow_all_ssa_edges (tree val
)
6368 /* Fold the statement pointed to by GSI. In some cases, this function may
6369 replace the whole statement with a new one. Returns true iff folding
6371 The statement pointed to by GSI should be in valid gimple form but may
6372 be in unfolded state as resulting from for example constant propagation
6373 which can produce *&x = 0. */
6376 fold_stmt (gimple_stmt_iterator
*gsi
)
6378 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6382 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6384 return fold_stmt_1 (gsi
, false, valueize
);
6387 /* Perform the minimal folding on statement *GSI. Only operations like
6388 *&x created by constant propagation are handled. The statement cannot
6389 be replaced with a new one. Return true if the statement was
6390 changed, false otherwise.
6391 The statement *GSI should be in valid gimple form but may
6392 be in unfolded state as resulting from for example constant propagation
6393 which can produce *&x = 0. */
6396 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6398 gimple
*stmt
= gsi_stmt (*gsi
);
6399 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6400 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6404 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6405 if EXPR is null or we don't know how.
6406 If non-null, the result always has boolean type. */
6409 canonicalize_bool (tree expr
, bool invert
)
6415 if (integer_nonzerop (expr
))
6416 return boolean_false_node
;
6417 else if (integer_zerop (expr
))
6418 return boolean_true_node
;
6419 else if (TREE_CODE (expr
) == SSA_NAME
)
6420 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6421 build_int_cst (TREE_TYPE (expr
), 0));
6422 else if (COMPARISON_CLASS_P (expr
))
6423 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6425 TREE_OPERAND (expr
, 0),
6426 TREE_OPERAND (expr
, 1));
6432 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6434 if (integer_nonzerop (expr
))
6435 return boolean_true_node
;
6436 else if (integer_zerop (expr
))
6437 return boolean_false_node
;
6438 else if (TREE_CODE (expr
) == SSA_NAME
)
6439 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6440 build_int_cst (TREE_TYPE (expr
), 0));
6441 else if (COMPARISON_CLASS_P (expr
))
6442 return fold_build2 (TREE_CODE (expr
),
6444 TREE_OPERAND (expr
, 0),
6445 TREE_OPERAND (expr
, 1));
6451 /* Check to see if a boolean expression EXPR is logically equivalent to the
6452 comparison (OP1 CODE OP2). Check for various identities involving
6456 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6457 const_tree op1
, const_tree op2
)
6461 /* The obvious case. */
6462 if (TREE_CODE (expr
) == code
6463 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6464 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6467 /* Check for comparing (name, name != 0) and the case where expr
6468 is an SSA_NAME with a definition matching the comparison. */
6469 if (TREE_CODE (expr
) == SSA_NAME
6470 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6472 if (operand_equal_p (expr
, op1
, 0))
6473 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6474 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6475 s
= SSA_NAME_DEF_STMT (expr
);
6476 if (is_gimple_assign (s
)
6477 && gimple_assign_rhs_code (s
) == code
6478 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6479 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6483 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6484 of name is a comparison, recurse. */
6485 if (TREE_CODE (op1
) == SSA_NAME
6486 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6488 s
= SSA_NAME_DEF_STMT (op1
);
6489 if (is_gimple_assign (s
)
6490 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6492 enum tree_code c
= gimple_assign_rhs_code (s
);
6493 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6494 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6495 return same_bool_comparison_p (expr
, c
,
6496 gimple_assign_rhs1 (s
),
6497 gimple_assign_rhs2 (s
));
6498 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6499 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6500 return same_bool_comparison_p (expr
,
6501 invert_tree_comparison (c
, false),
6502 gimple_assign_rhs1 (s
),
6503 gimple_assign_rhs2 (s
));
6509 /* Check to see if two boolean expressions OP1 and OP2 are logically
6513 same_bool_result_p (const_tree op1
, const_tree op2
)
6515 /* Simple cases first. */
6516 if (operand_equal_p (op1
, op2
, 0))
6519 /* Check the cases where at least one of the operands is a comparison.
6520 These are a bit smarter than operand_equal_p in that they apply some
6521 identifies on SSA_NAMEs. */
6522 if (COMPARISON_CLASS_P (op2
)
6523 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6524 TREE_OPERAND (op2
, 0),
6525 TREE_OPERAND (op2
, 1)))
6527 if (COMPARISON_CLASS_P (op1
)
6528 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6529 TREE_OPERAND (op1
, 0),
6530 TREE_OPERAND (op1
, 1)))
6537 /* Forward declarations for some mutually recursive functions. */
6540 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6541 enum tree_code code2
, tree op2a
, tree op2b
, basic_block
);
6543 and_var_with_comparison (tree type
, tree var
, bool invert
,
6544 enum tree_code code2
, tree op2a
, tree op2b
,
6547 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6548 enum tree_code code2
, tree op2a
, tree op2b
,
6551 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6552 enum tree_code code2
, tree op2a
, tree op2b
,
6555 or_var_with_comparison (tree
, tree var
, bool invert
,
6556 enum tree_code code2
, tree op2a
, tree op2b
,
6559 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6560 enum tree_code code2
, tree op2a
, tree op2b
,
6563 /* Helper function for and_comparisons_1: try to simplify the AND of the
6564 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6565 If INVERT is true, invert the value of the VAR before doing the AND.
6566 Return NULL_EXPR if we can't simplify this to a single expression. */
6569 and_var_with_comparison (tree type
, tree var
, bool invert
,
6570 enum tree_code code2
, tree op2a
, tree op2b
,
6571 basic_block outer_cond_bb
)
6574 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6576 /* We can only deal with variables whose definitions are assignments. */
6577 if (!is_gimple_assign (stmt
))
6580 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6581 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6582 Then we only have to consider the simpler non-inverted cases. */
6584 t
= or_var_with_comparison_1 (type
, stmt
,
6585 invert_tree_comparison (code2
, false),
6586 op2a
, op2b
, outer_cond_bb
);
6588 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
,
6590 return canonicalize_bool (t
, invert
);
6593 /* Try to simplify the AND of the ssa variable defined by the assignment
6594 STMT with the comparison specified by (OP2A CODE2 OP2B).
6595 Return NULL_EXPR if we can't simplify this to a single expression. */
6598 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6599 enum tree_code code2
, tree op2a
, tree op2b
,
6600 basic_block outer_cond_bb
)
6602 tree var
= gimple_assign_lhs (stmt
);
6603 tree true_test_var
= NULL_TREE
;
6604 tree false_test_var
= NULL_TREE
;
6605 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6607 /* Check for identities like (var AND (var == 0)) => false. */
6608 if (TREE_CODE (op2a
) == SSA_NAME
6609 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6611 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6612 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6614 true_test_var
= op2a
;
6615 if (var
== true_test_var
)
6618 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6619 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6621 false_test_var
= op2a
;
6622 if (var
== false_test_var
)
6623 return boolean_false_node
;
6627 /* If the definition is a comparison, recurse on it. */
6628 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6630 tree t
= and_comparisons_1 (type
, innercode
,
6631 gimple_assign_rhs1 (stmt
),
6632 gimple_assign_rhs2 (stmt
),
6635 op2b
, outer_cond_bb
);
6640 /* If the definition is an AND or OR expression, we may be able to
6641 simplify by reassociating. */
6642 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6643 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6645 tree inner1
= gimple_assign_rhs1 (stmt
);
6646 tree inner2
= gimple_assign_rhs2 (stmt
);
6649 tree partial
= NULL_TREE
;
6650 bool is_and
= (innercode
== BIT_AND_EXPR
);
6652 /* Check for boolean identities that don't require recursive examination
6654 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6655 inner1 AND (inner1 OR inner2) => inner1
6656 !inner1 AND (inner1 AND inner2) => false
6657 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6658 Likewise for similar cases involving inner2. */
6659 if (inner1
== true_test_var
)
6660 return (is_and
? var
: inner1
);
6661 else if (inner2
== true_test_var
)
6662 return (is_and
? var
: inner2
);
6663 else if (inner1
== false_test_var
)
6665 ? boolean_false_node
6666 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6667 op2b
, outer_cond_bb
));
6668 else if (inner2
== false_test_var
)
6670 ? boolean_false_node
6671 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6672 op2b
, outer_cond_bb
));
6674 /* Next, redistribute/reassociate the AND across the inner tests.
6675 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6676 if (TREE_CODE (inner1
) == SSA_NAME
6677 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6678 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6679 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6680 gimple_assign_rhs1 (s
),
6681 gimple_assign_rhs2 (s
),
6685 /* Handle the AND case, where we are reassociating:
6686 (inner1 AND inner2) AND (op2a code2 op2b)
6688 If the partial result t is a constant, we win. Otherwise
6689 continue on to try reassociating with the other inner test. */
6692 if (integer_onep (t
))
6694 else if (integer_zerop (t
))
6695 return boolean_false_node
;
6698 /* Handle the OR case, where we are redistributing:
6699 (inner1 OR inner2) AND (op2a code2 op2b)
6700 => (t OR (inner2 AND (op2a code2 op2b))) */
6701 else if (integer_onep (t
))
6702 return boolean_true_node
;
6704 /* Save partial result for later. */
6708 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6709 if (TREE_CODE (inner2
) == SSA_NAME
6710 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6711 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6712 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6713 gimple_assign_rhs1 (s
),
6714 gimple_assign_rhs2 (s
),
6718 /* Handle the AND case, where we are reassociating:
6719 (inner1 AND inner2) AND (op2a code2 op2b)
6720 => (inner1 AND t) */
6723 if (integer_onep (t
))
6725 else if (integer_zerop (t
))
6726 return boolean_false_node
;
6727 /* If both are the same, we can apply the identity
6729 else if (partial
&& same_bool_result_p (t
, partial
))
6733 /* Handle the OR case. where we are redistributing:
6734 (inner1 OR inner2) AND (op2a code2 op2b)
6735 => (t OR (inner1 AND (op2a code2 op2b)))
6736 => (t OR partial) */
6739 if (integer_onep (t
))
6740 return boolean_true_node
;
6743 /* We already got a simplification for the other
6744 operand to the redistributed OR expression. The
6745 interesting case is when at least one is false.
6746 Or, if both are the same, we can apply the identity
6748 if (integer_zerop (partial
))
6750 else if (integer_zerop (t
))
6752 else if (same_bool_result_p (t
, partial
))
6761 /* Try to simplify the AND of two comparisons defined by
6762 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6763 If this can be done without constructing an intermediate value,
6764 return the resulting tree; otherwise NULL_TREE is returned.
6765 This function is deliberately asymmetric as it recurses on SSA_DEFs
6766 in the first comparison but not the second. */
6769 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6770 enum tree_code code2
, tree op2a
, tree op2b
,
6771 basic_block outer_cond_bb
)
6773 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6775 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6776 if (operand_equal_p (op1a
, op2a
, 0)
6777 && operand_equal_p (op1b
, op2b
, 0))
6779 /* Result will be either NULL_TREE, or a combined comparison. */
6780 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6781 TRUTH_ANDIF_EXPR
, code1
, code2
,
6782 truth_type
, op1a
, op1b
);
6787 /* Likewise the swapped case of the above. */
6788 if (operand_equal_p (op1a
, op2b
, 0)
6789 && operand_equal_p (op1b
, op2a
, 0))
6791 /* Result will be either NULL_TREE, or a combined comparison. */
6792 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6793 TRUTH_ANDIF_EXPR
, code1
,
6794 swap_tree_comparison (code2
),
6795 truth_type
, op1a
, op1b
);
6800 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6801 NAME's definition is a truth value. See if there are any simplifications
6802 that can be done against the NAME's definition. */
6803 if (TREE_CODE (op1a
) == SSA_NAME
6804 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6805 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6807 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6808 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6809 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6810 switch (gimple_code (stmt
))
6813 /* Try to simplify by copy-propagating the definition. */
6814 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6815 op2b
, outer_cond_bb
);
6818 /* If every argument to the PHI produces the same result when
6819 ANDed with the second comparison, we win.
6820 Do not do this unless the type is bool since we need a bool
6821 result here anyway. */
6822 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6824 tree result
= NULL_TREE
;
6826 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6828 tree arg
= gimple_phi_arg_def (stmt
, i
);
6830 /* If this PHI has itself as an argument, ignore it.
6831 If all the other args produce the same result,
6833 if (arg
== gimple_phi_result (stmt
))
6835 else if (TREE_CODE (arg
) == INTEGER_CST
)
6837 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6840 result
= boolean_false_node
;
6841 else if (!integer_zerop (result
))
6845 result
= fold_build2 (code2
, boolean_type_node
,
6847 else if (!same_bool_comparison_p (result
,
6851 else if (TREE_CODE (arg
) == SSA_NAME
6852 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6855 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6856 /* In simple cases we can look through PHI nodes,
6857 but we have to be careful with loops.
6859 if (! dom_info_available_p (CDI_DOMINATORS
)
6860 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6861 || dominated_by_p (CDI_DOMINATORS
,
6862 gimple_bb (def_stmt
),
6865 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6872 else if (!same_bool_result_p (result
, temp
))
6888 static basic_block fosa_bb
;
6890 follow_outer_ssa_edges (tree val
)
6892 if (TREE_CODE (val
) == SSA_NAME
6893 && !SSA_NAME_IS_DEFAULT_DEF (val
))
6895 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (val
));
6897 || def_bb
== fosa_bb
6898 || (dom_info_available_p (CDI_DOMINATORS
)
6899 && (def_bb
== fosa_bb
6900 || dominated_by_p (CDI_DOMINATORS
, fosa_bb
, def_bb
))))
6907 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6908 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6909 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6910 simplify this to a single expression. As we are going to lower the cost
6911 of building SSA names / gimple stmts significantly, we need to allocate
6912 them ont the stack. This will cause the code to be a bit ugly. */
6915 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6916 enum tree_code code1
,
6917 tree op1a
, tree op1b
,
6918 enum tree_code code2
, tree op2a
,
6920 basic_block outer_cond_bb
)
6922 /* Allocate gimple stmt1 on the stack. */
6924 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6925 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6926 gimple_assign_set_rhs_code (stmt1
, code1
);
6927 gimple_assign_set_rhs1 (stmt1
, op1a
);
6928 gimple_assign_set_rhs2 (stmt1
, op1b
);
6929 gimple_set_bb (stmt1
, NULL
);
6931 /* Allocate gimple stmt2 on the stack. */
6933 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6934 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6935 gimple_assign_set_rhs_code (stmt2
, code2
);
6936 gimple_assign_set_rhs1 (stmt2
, op2a
);
6937 gimple_assign_set_rhs2 (stmt2
, op2b
);
6938 gimple_set_bb (stmt2
, NULL
);
6940 /* Allocate SSA names(lhs1) on the stack. */
6941 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6942 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6943 TREE_SET_CODE (lhs1
, SSA_NAME
);
6944 TREE_TYPE (lhs1
) = type
;
6945 init_ssa_name_imm_use (lhs1
);
6947 /* Allocate SSA names(lhs2) on the stack. */
6948 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6949 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6950 TREE_SET_CODE (lhs2
, SSA_NAME
);
6951 TREE_TYPE (lhs2
) = type
;
6952 init_ssa_name_imm_use (lhs2
);
6954 gimple_assign_set_lhs (stmt1
, lhs1
);
6955 gimple_assign_set_lhs (stmt2
, lhs2
);
6957 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6958 type
, gimple_assign_lhs (stmt1
),
6959 gimple_assign_lhs (stmt2
));
6960 fosa_bb
= outer_cond_bb
;
6961 if (op
.resimplify (NULL
, (!outer_cond_bb
6962 ? follow_all_ssa_edges
: follow_outer_ssa_edges
)))
6964 if (gimple_simplified_result_is_gimple_val (&op
))
6966 tree res
= op
.ops
[0];
6968 return build2 (code1
, type
, op1a
, op1b
);
6969 else if (res
== lhs2
)
6970 return build2 (code2
, type
, op2a
, op2b
);
6974 else if (op
.code
.is_tree_code ()
6975 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6977 tree op0
= op
.ops
[0];
6978 tree op1
= op
.ops
[1];
6979 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6980 return NULL_TREE
; /* not simple */
6982 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6989 /* Try to simplify the AND of two comparisons, specified by
6990 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6991 If this can be simplified to a single expression (without requiring
6992 introducing more SSA variables to hold intermediate values),
6993 return the resulting tree. Otherwise return NULL_TREE.
6994 If the result expression is non-null, it has boolean type. */
6997 maybe_fold_and_comparisons (tree type
,
6998 enum tree_code code1
, tree op1a
, tree op1b
,
6999 enum tree_code code2
, tree op2a
, tree op2b
,
7000 basic_block outer_cond_bb
)
7002 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
,
7006 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
,
7010 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
7011 op1a
, op1b
, code2
, op2a
,
7012 op2b
, outer_cond_bb
))
7018 /* Helper function for or_comparisons_1: try to simplify the OR of the
7019 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7020 If INVERT is true, invert the value of VAR before doing the OR.
7021 Return NULL_EXPR if we can't simplify this to a single expression. */
7024 or_var_with_comparison (tree type
, tree var
, bool invert
,
7025 enum tree_code code2
, tree op2a
, tree op2b
,
7026 basic_block outer_cond_bb
)
7029 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
7031 /* We can only deal with variables whose definitions are assignments. */
7032 if (!is_gimple_assign (stmt
))
7035 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7036 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7037 Then we only have to consider the simpler non-inverted cases. */
7039 t
= and_var_with_comparison_1 (type
, stmt
,
7040 invert_tree_comparison (code2
, false),
7041 op2a
, op2b
, outer_cond_bb
);
7043 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
,
7045 return canonicalize_bool (t
, invert
);
7048 /* Try to simplify the OR of the ssa variable defined by the assignment
7049 STMT with the comparison specified by (OP2A CODE2 OP2B).
7050 Return NULL_EXPR if we can't simplify this to a single expression. */
7053 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7054 enum tree_code code2
, tree op2a
, tree op2b
,
7055 basic_block outer_cond_bb
)
7057 tree var
= gimple_assign_lhs (stmt
);
7058 tree true_test_var
= NULL_TREE
;
7059 tree false_test_var
= NULL_TREE
;
7060 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7062 /* Check for identities like (var OR (var != 0)) => true . */
7063 if (TREE_CODE (op2a
) == SSA_NAME
7064 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7066 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7067 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7069 true_test_var
= op2a
;
7070 if (var
== true_test_var
)
7073 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7074 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7076 false_test_var
= op2a
;
7077 if (var
== false_test_var
)
7078 return boolean_true_node
;
7082 /* If the definition is a comparison, recurse on it. */
7083 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7085 tree t
= or_comparisons_1 (type
, innercode
,
7086 gimple_assign_rhs1 (stmt
),
7087 gimple_assign_rhs2 (stmt
),
7088 code2
, op2a
, op2b
, outer_cond_bb
);
7093 /* If the definition is an AND or OR expression, we may be able to
7094 simplify by reassociating. */
7095 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7096 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7098 tree inner1
= gimple_assign_rhs1 (stmt
);
7099 tree inner2
= gimple_assign_rhs2 (stmt
);
7102 tree partial
= NULL_TREE
;
7103 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7105 /* Check for boolean identities that don't require recursive examination
7107 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7108 inner1 OR (inner1 AND inner2) => inner1
7109 !inner1 OR (inner1 OR inner2) => true
7110 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7112 if (inner1
== true_test_var
)
7113 return (is_or
? var
: inner1
);
7114 else if (inner2
== true_test_var
)
7115 return (is_or
? var
: inner2
);
7116 else if (inner1
== false_test_var
)
7119 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7120 op2b
, outer_cond_bb
));
7121 else if (inner2
== false_test_var
)
7124 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7125 op2b
, outer_cond_bb
));
7127 /* Next, redistribute/reassociate the OR across the inner tests.
7128 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7129 if (TREE_CODE (inner1
) == SSA_NAME
7130 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7131 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7132 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7133 gimple_assign_rhs1 (s
),
7134 gimple_assign_rhs2 (s
),
7138 /* Handle the OR case, where we are reassociating:
7139 (inner1 OR inner2) OR (op2a code2 op2b)
7141 If the partial result t is a constant, we win. Otherwise
7142 continue on to try reassociating with the other inner test. */
7145 if (integer_onep (t
))
7146 return boolean_true_node
;
7147 else if (integer_zerop (t
))
7151 /* Handle the AND case, where we are redistributing:
7152 (inner1 AND inner2) OR (op2a code2 op2b)
7153 => (t AND (inner2 OR (op2a code op2b))) */
7154 else if (integer_zerop (t
))
7155 return boolean_false_node
;
7157 /* Save partial result for later. */
7161 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7162 if (TREE_CODE (inner2
) == SSA_NAME
7163 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7164 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7165 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7166 gimple_assign_rhs1 (s
),
7167 gimple_assign_rhs2 (s
),
7171 /* Handle the OR case, where we are reassociating:
7172 (inner1 OR inner2) OR (op2a code2 op2b)
7174 => (t OR partial) */
7177 if (integer_zerop (t
))
7179 else if (integer_onep (t
))
7180 return boolean_true_node
;
7181 /* If both are the same, we can apply the identity
7183 else if (partial
&& same_bool_result_p (t
, partial
))
7187 /* Handle the AND case, where we are redistributing:
7188 (inner1 AND inner2) OR (op2a code2 op2b)
7189 => (t AND (inner1 OR (op2a code2 op2b)))
7190 => (t AND partial) */
7193 if (integer_zerop (t
))
7194 return boolean_false_node
;
7197 /* We already got a simplification for the other
7198 operand to the redistributed AND expression. The
7199 interesting case is when at least one is true.
7200 Or, if both are the same, we can apply the identity
7202 if (integer_onep (partial
))
7204 else if (integer_onep (t
))
7206 else if (same_bool_result_p (t
, partial
))
7215 /* Try to simplify the OR of two comparisons defined by
7216 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7217 If this can be done without constructing an intermediate value,
7218 return the resulting tree; otherwise NULL_TREE is returned.
7219 This function is deliberately asymmetric as it recurses on SSA_DEFs
7220 in the first comparison but not the second. */
7223 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7224 enum tree_code code2
, tree op2a
, tree op2b
,
7225 basic_block outer_cond_bb
)
7227 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7229 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7230 if (operand_equal_p (op1a
, op2a
, 0)
7231 && operand_equal_p (op1b
, op2b
, 0))
7233 /* Result will be either NULL_TREE, or a combined comparison. */
7234 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7235 TRUTH_ORIF_EXPR
, code1
, code2
,
7236 truth_type
, op1a
, op1b
);
7241 /* Likewise the swapped case of the above. */
7242 if (operand_equal_p (op1a
, op2b
, 0)
7243 && operand_equal_p (op1b
, op2a
, 0))
7245 /* Result will be either NULL_TREE, or a combined comparison. */
7246 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7247 TRUTH_ORIF_EXPR
, code1
,
7248 swap_tree_comparison (code2
),
7249 truth_type
, op1a
, op1b
);
7254 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7255 NAME's definition is a truth value. See if there are any simplifications
7256 that can be done against the NAME's definition. */
7257 if (TREE_CODE (op1a
) == SSA_NAME
7258 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7259 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7261 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7262 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7263 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7264 switch (gimple_code (stmt
))
7267 /* Try to simplify by copy-propagating the definition. */
7268 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7269 op2b
, outer_cond_bb
);
7272 /* If every argument to the PHI produces the same result when
7273 ORed with the second comparison, we win.
7274 Do not do this unless the type is bool since we need a bool
7275 result here anyway. */
7276 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7278 tree result
= NULL_TREE
;
7280 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7282 tree arg
= gimple_phi_arg_def (stmt
, i
);
7284 /* If this PHI has itself as an argument, ignore it.
7285 If all the other args produce the same result,
7287 if (arg
== gimple_phi_result (stmt
))
7289 else if (TREE_CODE (arg
) == INTEGER_CST
)
7291 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7294 result
= boolean_true_node
;
7295 else if (!integer_onep (result
))
7299 result
= fold_build2 (code2
, boolean_type_node
,
7301 else if (!same_bool_comparison_p (result
,
7305 else if (TREE_CODE (arg
) == SSA_NAME
7306 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7309 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7310 /* In simple cases we can look through PHI nodes,
7311 but we have to be careful with loops.
7313 if (! dom_info_available_p (CDI_DOMINATORS
)
7314 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7315 || dominated_by_p (CDI_DOMINATORS
,
7316 gimple_bb (def_stmt
),
7319 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7320 op2a
, op2b
, outer_cond_bb
);
7325 else if (!same_bool_result_p (result
, temp
))
7341 /* Try to simplify the OR of two comparisons, specified by
7342 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7343 If this can be simplified to a single expression (without requiring
7344 introducing more SSA variables to hold intermediate values),
7345 return the resulting tree. Otherwise return NULL_TREE.
7346 If the result expression is non-null, it has boolean type. */
7349 maybe_fold_or_comparisons (tree type
,
7350 enum tree_code code1
, tree op1a
, tree op1b
,
7351 enum tree_code code2
, tree op2a
, tree op2b
,
7352 basic_block outer_cond_bb
)
7354 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
,
7358 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
,
7362 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7363 op1a
, op1b
, code2
, op2a
,
7364 op2b
, outer_cond_bb
))
7370 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7372 Either NULL_TREE, a simplified but non-constant or a constant
7375 ??? This should go into a gimple-fold-inline.h file to be eventually
7376 privatized with the single valueize function used in the various TUs
7377 to avoid the indirect function call overhead. */
7380 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7381 tree (*gvalueize
) (tree
))
7383 gimple_match_op res_op
;
7384 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7385 edges if there are intermediate VARYING defs. For this reason
7386 do not follow SSA edges here even though SCCVN can technically
7387 just deal fine with that. */
7388 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7390 tree res
= NULL_TREE
;
7391 if (gimple_simplified_result_is_gimple_val (&res_op
))
7392 res
= res_op
.ops
[0];
7393 else if (mprts_hook
)
7394 res
= mprts_hook (&res_op
);
7397 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7399 fprintf (dump_file
, "Match-and-simplified ");
7400 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7401 fprintf (dump_file
, " to ");
7402 print_generic_expr (dump_file
, res
);
7403 fprintf (dump_file
, "\n");
7409 location_t loc
= gimple_location (stmt
);
7410 switch (gimple_code (stmt
))
7414 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7416 switch (get_gimple_rhs_class (subcode
))
7418 case GIMPLE_SINGLE_RHS
:
7420 tree rhs
= gimple_assign_rhs1 (stmt
);
7421 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7423 if (TREE_CODE (rhs
) == SSA_NAME
)
7425 /* If the RHS is an SSA_NAME, return its known constant value,
7427 return (*valueize
) (rhs
);
7429 /* Handle propagating invariant addresses into address
7431 else if (TREE_CODE (rhs
) == ADDR_EXPR
7432 && !is_gimple_min_invariant (rhs
))
7434 poly_int64 offset
= 0;
7436 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7440 && (CONSTANT_CLASS_P (base
)
7441 || decl_address_invariant_p (base
)))
7442 return build_invariant_address (TREE_TYPE (rhs
),
7445 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7446 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7447 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7448 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7453 nelts
= CONSTRUCTOR_NELTS (rhs
);
7454 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7455 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7457 val
= (*valueize
) (val
);
7458 if (TREE_CODE (val
) == INTEGER_CST
7459 || TREE_CODE (val
) == REAL_CST
7460 || TREE_CODE (val
) == FIXED_CST
)
7461 vec
.quick_push (val
);
7466 return vec
.build ();
7468 if (subcode
== OBJ_TYPE_REF
)
7470 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7471 /* If callee is constant, we can fold away the wrapper. */
7472 if (is_gimple_min_invariant (val
))
7476 if (kind
== tcc_reference
)
7478 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7479 || TREE_CODE (rhs
) == REALPART_EXPR
7480 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7481 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7483 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7484 return fold_unary_loc (EXPR_LOCATION (rhs
),
7486 TREE_TYPE (rhs
), val
);
7488 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7489 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7491 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7492 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7494 TREE_TYPE (rhs
), val
,
7495 TREE_OPERAND (rhs
, 1),
7496 TREE_OPERAND (rhs
, 2));
7498 else if (TREE_CODE (rhs
) == MEM_REF
7499 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7501 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7502 if (TREE_CODE (val
) == ADDR_EXPR
7503 && is_gimple_min_invariant (val
))
7505 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7507 TREE_OPERAND (rhs
, 1));
7512 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7514 else if (kind
== tcc_declaration
)
7515 return get_symbol_constant_value (rhs
);
7519 case GIMPLE_UNARY_RHS
:
7522 case GIMPLE_BINARY_RHS
:
7523 /* Translate &x + CST into an invariant form suitable for
7524 further propagation. */
7525 if (subcode
== POINTER_PLUS_EXPR
)
7527 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7528 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7529 if (TREE_CODE (op0
) == ADDR_EXPR
7530 && TREE_CODE (op1
) == INTEGER_CST
)
7532 tree off
= fold_convert (ptr_type_node
, op1
);
7534 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7535 fold_build2 (MEM_REF
,
7536 TREE_TYPE (TREE_TYPE (op0
)),
7537 unshare_expr (op0
), off
));
7540 /* Canonicalize bool != 0 and bool == 0 appearing after
7541 valueization. While gimple_simplify handles this
7542 it can get confused by the ~X == 1 -> X == 0 transform
7543 which we cant reduce to a SSA name or a constant
7544 (and we have no way to tell gimple_simplify to not
7545 consider those transforms in the first place). */
7546 else if (subcode
== EQ_EXPR
7547 || subcode
== NE_EXPR
)
7549 tree lhs
= gimple_assign_lhs (stmt
);
7550 tree op0
= gimple_assign_rhs1 (stmt
);
7551 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7554 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7555 op0
= (*valueize
) (op0
);
7556 if (TREE_CODE (op0
) == INTEGER_CST
)
7557 std::swap (op0
, op1
);
7558 if (TREE_CODE (op1
) == INTEGER_CST
7559 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7560 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7566 case GIMPLE_TERNARY_RHS
:
7568 /* Handle ternary operators that can appear in GIMPLE form. */
7569 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7570 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7571 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7572 return fold_ternary_loc (loc
, subcode
,
7573 TREE_TYPE (gimple_assign_lhs (stmt
)),
7585 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7587 if (gimple_call_internal_p (stmt
))
7589 enum tree_code subcode
= ERROR_MARK
;
7590 switch (gimple_call_internal_fn (stmt
))
7592 case IFN_UBSAN_CHECK_ADD
:
7593 subcode
= PLUS_EXPR
;
7595 case IFN_UBSAN_CHECK_SUB
:
7596 subcode
= MINUS_EXPR
;
7598 case IFN_UBSAN_CHECK_MUL
:
7599 subcode
= MULT_EXPR
;
7601 case IFN_BUILTIN_EXPECT
:
7603 tree arg0
= gimple_call_arg (stmt
, 0);
7604 tree op0
= (*valueize
) (arg0
);
7605 if (TREE_CODE (op0
) == INTEGER_CST
)
7612 tree arg0
= gimple_call_arg (stmt
, 0);
7613 tree arg1
= gimple_call_arg (stmt
, 1);
7614 tree op0
= (*valueize
) (arg0
);
7615 tree op1
= (*valueize
) (arg1
);
7617 if (TREE_CODE (op0
) != INTEGER_CST
7618 || TREE_CODE (op1
) != INTEGER_CST
)
7623 /* x * 0 = 0 * x = 0 without overflow. */
7624 if (integer_zerop (op0
) || integer_zerop (op1
))
7625 return build_zero_cst (TREE_TYPE (arg0
));
7628 /* y - y = 0 without overflow. */
7629 if (operand_equal_p (op0
, op1
, 0))
7630 return build_zero_cst (TREE_TYPE (arg0
));
7637 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7639 && TREE_CODE (res
) == INTEGER_CST
7640 && !TREE_OVERFLOW (res
))
7645 fn
= (*valueize
) (gimple_call_fn (stmt
));
7646 if (TREE_CODE (fn
) == ADDR_EXPR
7647 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7648 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7649 && gimple_builtin_call_types_compatible_p (stmt
,
7650 TREE_OPERAND (fn
, 0)))
7652 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7655 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7656 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7657 retval
= fold_builtin_call_array (loc
,
7658 gimple_call_return_type (call_stmt
),
7659 fn
, gimple_call_num_args (stmt
), args
);
7662 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7663 STRIP_NOPS (retval
);
7664 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7677 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7678 Returns NULL_TREE if folding to a constant is not possible, otherwise
7679 returns a constant according to is_gimple_min_invariant. */
7682 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7684 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7685 if (res
&& is_gimple_min_invariant (res
))
7691 /* The following set of functions are supposed to fold references using
7692 their constant initializers. */
7694 /* See if we can find constructor defining value of BASE.
7695 When we know the consructor with constant offset (such as
7696 base is array[40] and we do know constructor of array), then
7697 BIT_OFFSET is adjusted accordingly.
7699 As a special case, return error_mark_node when constructor
7700 is not explicitly available, but it is known to be zero
7701 such as 'static const int a;'. */
7703 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7704 tree (*valueize
)(tree
))
7706 poly_int64 bit_offset2
, size
, max_size
;
7709 if (TREE_CODE (base
) == MEM_REF
)
7711 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7712 if (!boff
.to_shwi (bit_offset
))
7716 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7717 base
= valueize (TREE_OPERAND (base
, 0));
7718 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7720 base
= TREE_OPERAND (base
, 0);
7723 && TREE_CODE (base
) == SSA_NAME
)
7724 base
= valueize (base
);
7726 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7727 DECL_INITIAL. If BASE is a nested reference into another
7728 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7729 the inner reference. */
7730 switch (TREE_CODE (base
))
7735 tree init
= ctor_for_folding (base
);
7737 /* Our semantic is exact opposite of ctor_for_folding;
7738 NULL means unknown, while error_mark_node is 0. */
7739 if (init
== error_mark_node
)
7742 return error_mark_node
;
7746 case VIEW_CONVERT_EXPR
:
7747 return get_base_constructor (TREE_OPERAND (base
, 0),
7748 bit_offset
, valueize
);
7752 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7754 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7756 *bit_offset
+= bit_offset2
;
7757 return get_base_constructor (base
, bit_offset
, valueize
);
7763 if (CONSTANT_CLASS_P (base
))
7770 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7771 to the memory at bit OFFSET. When non-null, TYPE is the expected
7772 type of the reference; otherwise the type of the referenced element
7773 is used instead. When SIZE is zero, attempt to fold a reference to
7774 the entire element which OFFSET refers to. Increment *SUBOFF by
7775 the bit offset of the accessed element. */
7778 fold_array_ctor_reference (tree type
, tree ctor
,
7779 unsigned HOST_WIDE_INT offset
,
7780 unsigned HOST_WIDE_INT size
,
7782 unsigned HOST_WIDE_INT
*suboff
)
7784 offset_int low_bound
;
7785 offset_int elt_size
;
7786 offset_int access_index
;
7787 tree domain_type
= NULL_TREE
;
7788 HOST_WIDE_INT inner_offset
;
7790 /* Compute low bound and elt size. */
7791 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7792 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7793 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7795 /* Static constructors for variably sized objects make no sense. */
7796 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7798 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7802 /* Static constructors for variably sized objects make no sense. */
7803 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7805 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7807 /* When TYPE is non-null, verify that it specifies a constant-sized
7808 access of a multiple of the array element size. Avoid division
7809 by zero below when ELT_SIZE is zero, such as with the result of
7810 an initializer for a zero-length array or an empty struct. */
7813 && (!TYPE_SIZE_UNIT (type
)
7814 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7817 /* Compute the array index we look for. */
7818 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7820 access_index
+= low_bound
;
7822 /* And offset within the access. */
7823 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7825 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7826 if (size
> elt_sz
* BITS_PER_UNIT
)
7828 /* native_encode_expr constraints. */
7829 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7830 || size
% BITS_PER_UNIT
!= 0
7831 || inner_offset
% BITS_PER_UNIT
!= 0
7832 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7836 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7838 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7839 return build_zero_cst (type
);
7841 /* native-encode adjacent ctor elements. */
7842 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7843 unsigned bufoff
= 0;
7844 offset_int index
= 0;
7845 offset_int max_index
= access_index
;
7846 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7848 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7849 else if (!CONSTANT_CLASS_P (val
))
7853 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7855 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7856 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7859 index
= max_index
= wi::to_offset (elt
->index
);
7860 index
= wi::umax (index
, access_index
);
7863 if (bufoff
+ elt_sz
> sizeof (buf
))
7864 elt_sz
= sizeof (buf
) - bufoff
;
7865 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7866 inner_offset
/ BITS_PER_UNIT
);
7867 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7873 if (wi::cmpu (access_index
, index
) == 0)
7875 else if (wi::cmpu (access_index
, max_index
) > 0)
7878 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7880 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7885 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7887 max_index
= access_index
;
7890 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7892 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7893 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7896 index
= max_index
= wi::to_offset (elt
->index
);
7897 index
= wi::umax (index
, access_index
);
7898 if (wi::cmpu (access_index
, index
) == 0)
7901 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7905 while (bufoff
< size
/ BITS_PER_UNIT
);
7907 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7910 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7912 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7914 /* For the final reference to the entire accessed element
7915 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7916 may be null) in favor of the type of the element, and set
7917 SIZE to the size of the accessed element. */
7919 type
= TREE_TYPE (val
);
7920 size
= elt_sz
* BITS_PER_UNIT
;
7922 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7923 && TREE_CODE (val
) == CONSTRUCTOR
7924 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7925 /* If this isn't the last element in the CTOR and a CTOR itself
7926 and it does not cover the whole object we are requesting give up
7927 since we're not set up for combining from multiple CTORs. */
7930 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7931 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7935 /* Memory not explicitly mentioned in constructor is 0 (or
7936 the reference is out of range). */
7937 return type
? build_zero_cst (type
) : NULL_TREE
;
7940 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7941 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7942 is the expected type of the reference; otherwise the type of
7943 the referenced member is used instead. When SIZE is zero,
7944 attempt to fold a reference to the entire member which OFFSET
7945 refers to; in this case. Increment *SUBOFF by the bit offset
7946 of the accessed member. */
7949 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7950 unsigned HOST_WIDE_INT offset
,
7951 unsigned HOST_WIDE_INT size
,
7953 unsigned HOST_WIDE_INT
*suboff
)
7955 unsigned HOST_WIDE_INT cnt
;
7958 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7961 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7962 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7963 tree field_size
= DECL_SIZE (cfield
);
7967 /* Determine the size of the flexible array member from
7968 the size of the initializer provided for it. */
7969 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7972 /* Variable sized objects in static constructors makes no sense,
7973 but field_size can be NULL for flexible array members. */
7974 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7975 && TREE_CODE (byte_offset
) == INTEGER_CST
7976 && (field_size
!= NULL_TREE
7977 ? TREE_CODE (field_size
) == INTEGER_CST
7978 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7980 /* Compute bit offset of the field. */
7981 offset_int bitoffset
7982 = (wi::to_offset (field_offset
)
7983 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7984 /* Compute bit offset where the field ends. */
7985 offset_int bitoffset_end
;
7986 if (field_size
!= NULL_TREE
)
7987 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7991 /* Compute the bit offset of the end of the desired access.
7992 As a special case, if the size of the desired access is
7993 zero, assume the access is to the entire field (and let
7994 the caller make any necessary adjustments by storing
7995 the actual bounds of the field in FIELDBOUNDS). */
7996 offset_int access_end
= offset_int (offset
);
8000 access_end
= bitoffset_end
;
8002 /* Is there any overlap between the desired access at
8003 [OFFSET, OFFSET+SIZE) and the offset of the field within
8004 the object at [BITOFFSET, BITOFFSET_END)? */
8005 if (wi::cmps (access_end
, bitoffset
) > 0
8006 && (field_size
== NULL_TREE
8007 || wi::lts_p (offset
, bitoffset_end
)))
8009 *suboff
+= bitoffset
.to_uhwi ();
8011 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
8013 /* For the final reference to the entire accessed member
8014 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8015 be null) in favor of the type of the member, and set
8016 SIZE to the size of the accessed member. */
8017 offset
= bitoffset
.to_uhwi ();
8018 type
= TREE_TYPE (cval
);
8019 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
8022 /* We do have overlap. Now see if the field is large enough
8023 to cover the access. Give up for accesses that extend
8024 beyond the end of the object or that span multiple fields. */
8025 if (wi::cmps (access_end
, bitoffset_end
) > 0)
8027 if (offset
< bitoffset
)
8030 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
8031 return fold_ctor_reference (type
, cval
,
8032 inner_offset
.to_uhwi (), size
,
8040 return build_zero_cst (type
);
8043 /* CTOR is value initializing memory. Fold a reference of TYPE and
8044 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8045 is zero, attempt to fold a reference to the entire subobject
8046 which OFFSET refers to. This is used when folding accesses to
8047 string members of aggregates. When non-null, set *SUBOFF to
8048 the bit offset of the accessed subobject. */
8051 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
8052 const poly_uint64
&poly_size
, tree from_decl
,
8053 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
8057 /* We found the field with exact match. */
8059 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8060 && known_eq (poly_offset
, 0U))
8061 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8063 /* The remaining optimizations need a constant size and offset. */
8064 unsigned HOST_WIDE_INT size
, offset
;
8065 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8068 /* We are at the end of walk, see if we can view convert the
8070 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8071 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8072 && !compare_tree_int (TYPE_SIZE (type
), size
)
8073 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8075 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8078 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8080 STRIP_USELESS_TYPE_CONVERSION (ret
);
8084 /* For constants and byte-aligned/sized reads try to go through
8085 native_encode/interpret. */
8086 if (CONSTANT_CLASS_P (ctor
)
8087 && BITS_PER_UNIT
== 8
8088 && offset
% BITS_PER_UNIT
== 0
8089 && offset
/ BITS_PER_UNIT
<= INT_MAX
8090 && size
% BITS_PER_UNIT
== 0
8091 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8092 && can_native_interpret_type_p (type
))
8094 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8095 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8096 offset
/ BITS_PER_UNIT
);
8098 return native_interpret_expr (type
, buf
, len
);
8100 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8102 unsigned HOST_WIDE_INT dummy
= 0;
8107 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8108 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8109 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8112 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8115 /* Fall back to native_encode_initializer. Needs to be done
8116 only in the outermost fold_ctor_reference call (because it itself
8117 recurses into CONSTRUCTORs) and doesn't update suboff. */
8118 if (ret
== NULL_TREE
8120 && BITS_PER_UNIT
== 8
8121 && offset
% BITS_PER_UNIT
== 0
8122 && offset
/ BITS_PER_UNIT
<= INT_MAX
8123 && size
% BITS_PER_UNIT
== 0
8124 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8125 && can_native_interpret_type_p (type
))
8127 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8128 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8129 offset
/ BITS_PER_UNIT
);
8131 return native_interpret_expr (type
, buf
, len
);
8140 /* Return the tree representing the element referenced by T if T is an
8141 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8142 names using VALUEIZE. Return NULL_TREE otherwise. */
8145 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8147 tree ctor
, idx
, base
;
8148 poly_int64 offset
, size
, max_size
;
8152 if (TREE_THIS_VOLATILE (t
))
8156 return get_symbol_constant_value (t
);
8158 tem
= fold_read_from_constant_string (t
);
8162 switch (TREE_CODE (t
))
8165 case ARRAY_RANGE_REF
:
8166 /* Constant indexes are handled well by get_base_constructor.
8167 Only special case variable offsets.
8168 FIXME: This code can't handle nested references with variable indexes
8169 (they will be handled only by iteration of ccp). Perhaps we can bring
8170 get_ref_base_and_extent here and make it use a valueize callback. */
8171 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8173 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8174 && poly_int_tree_p (idx
))
8176 tree low_bound
, unit_size
;
8178 /* If the resulting bit-offset is constant, track it. */
8179 if ((low_bound
= array_ref_low_bound (t
),
8180 poly_int_tree_p (low_bound
))
8181 && (unit_size
= array_ref_element_size (t
),
8182 tree_fits_uhwi_p (unit_size
)))
8184 poly_offset_int woffset
8185 = wi::sext (wi::to_poly_offset (idx
)
8186 - wi::to_poly_offset (low_bound
),
8187 TYPE_PRECISION (sizetype
));
8188 woffset
*= tree_to_uhwi (unit_size
);
8189 woffset
*= BITS_PER_UNIT
;
8190 if (woffset
.to_shwi (&offset
))
8192 base
= TREE_OPERAND (t
, 0);
8193 ctor
= get_base_constructor (base
, &offset
, valueize
);
8194 /* Empty constructor. Always fold to 0. */
8195 if (ctor
== error_mark_node
)
8196 return build_zero_cst (TREE_TYPE (t
));
8197 /* Out of bound array access. Value is undefined,
8199 if (maybe_lt (offset
, 0))
8201 /* We cannot determine ctor. */
8204 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8205 tree_to_uhwi (unit_size
)
8215 case TARGET_MEM_REF
:
8217 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8218 ctor
= get_base_constructor (base
, &offset
, valueize
);
8220 /* Empty constructor. Always fold to 0. */
8221 if (ctor
== error_mark_node
)
8222 return build_zero_cst (TREE_TYPE (t
));
8223 /* We do not know precise address. */
8224 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8226 /* We cannot determine ctor. */
8230 /* Out of bound array access. Value is undefined, but don't fold. */
8231 if (maybe_lt (offset
, 0))
8234 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8238 /* For bit field reads try to read the representative and
8240 if (TREE_CODE (t
) == COMPONENT_REF
8241 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8242 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8244 HOST_WIDE_INT csize
, coffset
;
8245 tree field
= TREE_OPERAND (t
, 1);
8246 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8247 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8248 && size
.is_constant (&csize
)
8249 && offset
.is_constant (&coffset
)
8250 && (coffset
% BITS_PER_UNIT
!= 0
8251 || csize
% BITS_PER_UNIT
!= 0)
8253 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8255 poly_int64 bitoffset
;
8256 poly_uint64 field_offset
, repr_offset
;
8257 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8258 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8259 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8262 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8263 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8264 HOST_WIDE_INT bitoff
;
8265 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8266 - TYPE_PRECISION (TREE_TYPE (field
)));
8267 if (bitoffset
.is_constant (&bitoff
)
8272 size
= tree_to_uhwi (DECL_SIZE (repr
));
8274 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8276 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8278 if (!BYTES_BIG_ENDIAN
)
8279 tem
= wide_int_to_tree (TREE_TYPE (field
),
8280 wi::lrshift (wi::to_wide (tem
),
8283 tem
= wide_int_to_tree (TREE_TYPE (field
),
8284 wi::lrshift (wi::to_wide (tem
),
8296 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8297 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8298 return fold_build1_loc (EXPR_LOCATION (t
),
8299 TREE_CODE (t
), TREE_TYPE (t
), c
);
8311 fold_const_aggregate_ref (tree t
)
8313 return fold_const_aggregate_ref_1 (t
, NULL
);
8316 /* Lookup virtual method with index TOKEN in a virtual table V
8318 Set CAN_REFER if non-NULL to false if method
8319 is not referable or if the virtual table is ill-formed (such as rewriten
8320 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8323 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8325 unsigned HOST_WIDE_INT offset
,
8328 tree vtable
= v
, init
, fn
;
8329 unsigned HOST_WIDE_INT size
;
8330 unsigned HOST_WIDE_INT elt_size
, access_index
;
8336 /* First of all double check we have virtual table. */
8337 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8339 /* Pass down that we lost track of the target. */
8345 init
= ctor_for_folding (v
);
8347 /* The virtual tables should always be born with constructors
8348 and we always should assume that they are avaialble for
8349 folding. At the moment we do not stream them in all cases,
8350 but it should never happen that ctor seem unreachable. */
8352 if (init
== error_mark_node
)
8354 /* Pass down that we lost track of the target. */
8359 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8360 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8361 offset
*= BITS_PER_UNIT
;
8362 offset
+= token
* size
;
8364 /* Lookup the value in the constructor that is assumed to be array.
8365 This is equivalent to
8366 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8367 offset, size, NULL);
8368 but in a constant time. We expect that frontend produced a simple
8369 array without indexed initializers. */
8371 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8372 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8373 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8374 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8376 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8377 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8379 /* The C++ FE can now produce indexed fields, and we check if the indexes
8381 if (access_index
< CONSTRUCTOR_NELTS (init
))
8383 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8384 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8385 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8391 /* For type inconsistent program we may end up looking up virtual method
8392 in virtual table that does not contain TOKEN entries. We may overrun
8393 the virtual table and pick up a constant or RTTI info pointer.
8394 In any case the call is undefined. */
8396 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8397 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8398 fn
= builtin_decl_unreachable ();
8401 fn
= TREE_OPERAND (fn
, 0);
8403 /* When cgraph node is missing and function is not public, we cannot
8404 devirtualize. This can happen in WHOPR when the actual method
8405 ends up in other partition, because we found devirtualization
8406 possibility too late. */
8407 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8418 /* Make sure we create a cgraph node for functions we'll reference.
8419 They can be non-existent if the reference comes from an entry
8420 of an external vtable for example. */
8421 cgraph_node::get_create (fn
);
8426 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8427 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8428 KNOWN_BINFO carries the binfo describing the true type of
8429 OBJ_TYPE_REF_OBJECT(REF).
8430 Set CAN_REFER if non-NULL to false if method
8431 is not referable or if the virtual table is ill-formed (such as rewriten
8432 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8435 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8438 unsigned HOST_WIDE_INT offset
;
8441 v
= BINFO_VTABLE (known_binfo
);
8442 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8446 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8452 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8455 /* Given a pointer value T, return a simplified version of an
8456 indirection through T, or NULL_TREE if no simplification is
8457 possible. Note that the resulting type may be different from
8458 the type pointed to in the sense that it is still compatible
8459 from the langhooks point of view. */
8462 gimple_fold_indirect_ref (tree t
)
8464 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8469 subtype
= TREE_TYPE (sub
);
8470 if (!POINTER_TYPE_P (subtype
)
8471 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8474 if (TREE_CODE (sub
) == ADDR_EXPR
)
8476 tree op
= TREE_OPERAND (sub
, 0);
8477 tree optype
= TREE_TYPE (op
);
8479 if (useless_type_conversion_p (type
, optype
))
8482 /* *(foo *)&fooarray => fooarray[0] */
8483 if (TREE_CODE (optype
) == ARRAY_TYPE
8484 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8485 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8487 tree type_domain
= TYPE_DOMAIN (optype
);
8488 tree min_val
= size_zero_node
;
8489 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8490 min_val
= TYPE_MIN_VALUE (type_domain
);
8491 if (TREE_CODE (min_val
) == INTEGER_CST
)
8492 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8494 /* *(foo *)&complexfoo => __real__ complexfoo */
8495 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8496 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8497 return fold_build1 (REALPART_EXPR
, type
, op
);
8498 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8499 else if (TREE_CODE (optype
) == VECTOR_TYPE
8500 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8502 tree part_width
= TYPE_SIZE (type
);
8503 tree index
= bitsize_int (0);
8504 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8508 /* *(p + CST) -> ... */
8509 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8510 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8512 tree addr
= TREE_OPERAND (sub
, 0);
8513 tree off
= TREE_OPERAND (sub
, 1);
8517 addrtype
= TREE_TYPE (addr
);
8519 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8520 if (TREE_CODE (addr
) == ADDR_EXPR
8521 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8522 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8523 && tree_fits_uhwi_p (off
))
8525 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8526 tree part_width
= TYPE_SIZE (type
);
8527 unsigned HOST_WIDE_INT part_widthi
8528 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8529 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8530 tree index
= bitsize_int (indexi
);
8531 if (known_lt (offset
/ part_widthi
,
8532 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8533 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8537 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8538 if (TREE_CODE (addr
) == ADDR_EXPR
8539 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8540 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8542 tree size
= TYPE_SIZE_UNIT (type
);
8543 if (tree_int_cst_equal (size
, off
))
8544 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8547 /* *(p + CST) -> MEM_REF <p, CST>. */
8548 if (TREE_CODE (addr
) != ADDR_EXPR
8549 || DECL_P (TREE_OPERAND (addr
, 0)))
8550 return fold_build2 (MEM_REF
, type
,
8552 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8555 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8556 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8557 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8558 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8561 tree min_val
= size_zero_node
;
8563 sub
= gimple_fold_indirect_ref (sub
);
8565 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8566 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8567 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8568 min_val
= TYPE_MIN_VALUE (type_domain
);
8569 if (TREE_CODE (min_val
) == INTEGER_CST
)
8570 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8576 /* Return true if CODE is an operation that when operating on signed
8577 integer types involves undefined behavior on overflow and the
8578 operation can be expressed with unsigned arithmetic. */
8581 arith_code_with_undefined_signed_overflow (tree_code code
)
8590 case POINTER_PLUS_EXPR
:
8597 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8598 operation that can be transformed to unsigned arithmetic by converting
8599 its operand, carrying out the operation in the corresponding unsigned
8600 type and converting the result back to the original type.
8602 If IN_PLACE is true, adjust the stmt in place and return NULL.
8603 Otherwise returns a sequence of statements that replace STMT and also
8604 contain a modified form of STMT itself. */
8607 rewrite_to_defined_overflow (gimple
*stmt
, bool in_place
/* = false */)
8609 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8611 fprintf (dump_file
, "rewriting stmt with undefined signed "
8613 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8616 tree lhs
= gimple_assign_lhs (stmt
);
8617 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8618 gimple_seq stmts
= NULL
;
8619 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8620 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8622 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8624 tree op
= gimple_op (stmt
, i
);
8625 op
= gimple_convert (&stmts
, type
, op
);
8626 gimple_set_op (stmt
, i
, op
);
8628 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8629 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8630 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8631 gimple_set_modified (stmt
, true);
8634 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
8636 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
8640 gimple_seq_add_stmt (&stmts
, stmt
);
8641 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8644 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
8645 gsi_insert_after (&gsi
, cvt
, GSI_SAME_STMT
);
8649 gimple_seq_add_stmt (&stmts
, cvt
);
8655 /* The valueization hook we use for the gimple_build API simplification.
8656 This makes us match fold_buildN behavior by only combining with
8657 statements in the sequence(s) we are currently building. */
8660 gimple_build_valueize (tree op
)
8662 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8667 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8670 gimple_build_insert_seq (gimple_stmt_iterator
*gsi
,
8671 bool before
, gsi_iterator_update update
,
8677 gsi_insert_seq_before (gsi
, seq
, update
);
8679 gsi_insert_seq_before_without_update (gsi
, seq
, update
);
8684 gsi_insert_seq_after (gsi
, seq
, update
);
8686 gsi_insert_seq_after_without_update (gsi
, seq
, update
);
8690 /* Build the expression CODE OP0 of type TYPE with location LOC,
8691 simplifying it first if possible. Returns the built
8692 expression value and inserts statements possibly defining it
8693 before GSI if BEFORE is true or after GSI if false and advance
8694 the iterator accordingly.
8695 If gsi refers to a basic block simplifying is allowed to look
8696 at all SSA defs while when it does not it is restricted to
8697 SSA defs that are not associated with a basic block yet,
8698 indicating they belong to the currently building sequence. */
8701 gimple_build (gimple_stmt_iterator
*gsi
,
8702 bool before
, gsi_iterator_update update
,
8703 location_t loc
, enum tree_code code
, tree type
, tree op0
)
8705 gimple_seq seq
= NULL
;
8707 = gimple_simplify (code
, type
, op0
, &seq
,
8708 gsi
->bb
? follow_all_ssa_edges
: gimple_build_valueize
);
8711 res
= create_tmp_reg_or_ssa_name (type
);
8713 if (code
== REALPART_EXPR
8714 || code
== IMAGPART_EXPR
8715 || code
== VIEW_CONVERT_EXPR
)
8716 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8718 stmt
= gimple_build_assign (res
, code
, op0
);
8719 gimple_set_location (stmt
, loc
);
8720 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8722 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8726 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8727 simplifying it first if possible. Returns the built
8728 expression value inserting any new statements at GSI honoring BEFORE
8732 gimple_build (gimple_stmt_iterator
*gsi
,
8733 bool before
, gsi_iterator_update update
,
8734 location_t loc
, enum tree_code code
, tree type
,
8737 gimple_seq seq
= NULL
;
8739 = gimple_simplify (code
, type
, op0
, op1
, &seq
,
8740 gsi
->bb
? follow_all_ssa_edges
: gimple_build_valueize
);
8743 res
= create_tmp_reg_or_ssa_name (type
);
8744 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8745 gimple_set_location (stmt
, loc
);
8746 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8748 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8752 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8753 simplifying it first if possible. Returns the built
8754 expression value inserting any new statements at GSI honoring BEFORE
8758 gimple_build (gimple_stmt_iterator
*gsi
,
8759 bool before
, gsi_iterator_update update
,
8760 location_t loc
, enum tree_code code
, tree type
,
8761 tree op0
, tree op1
, tree op2
)
8764 gimple_seq seq
= NULL
;
8766 = gimple_simplify (code
, type
, op0
, op1
, op2
, &seq
,
8767 gsi
->bb
? follow_all_ssa_edges
: gimple_build_valueize
);
8770 res
= create_tmp_reg_or_ssa_name (type
);
8772 if (code
== BIT_FIELD_REF
)
8773 stmt
= gimple_build_assign (res
, code
,
8774 build3 (code
, type
, op0
, op1
, op2
));
8776 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8777 gimple_set_location (stmt
, loc
);
8778 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8780 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8784 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8785 void) with a location LOC. Returns the built expression value (or NULL_TREE
8786 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8790 gimple_build (gimple_stmt_iterator
*gsi
,
8791 bool before
, gsi_iterator_update update
,
8792 location_t loc
, combined_fn fn
, tree type
)
8794 tree res
= NULL_TREE
;
8795 gimple_seq seq
= NULL
;
8797 if (internal_fn_p (fn
))
8798 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8801 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8802 stmt
= gimple_build_call (decl
, 0);
8804 if (!VOID_TYPE_P (type
))
8806 res
= create_tmp_reg_or_ssa_name (type
);
8807 gimple_call_set_lhs (stmt
, res
);
8809 gimple_set_location (stmt
, loc
);
8810 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8811 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8815 /* Build the call FN (ARG0) with a result of type TYPE
8816 (or no result if TYPE is void) with location LOC,
8817 simplifying it first if possible. Returns the built
8818 expression value (or NULL_TREE if TYPE is void) inserting any new
8819 statements at GSI honoring BEFORE and UPDATE. */
8822 gimple_build (gimple_stmt_iterator
*gsi
,
8823 bool before
, gsi_iterator_update update
,
8824 location_t loc
, combined_fn fn
,
8825 tree type
, tree arg0
)
8827 gimple_seq seq
= NULL
;
8828 tree res
= gimple_simplify (fn
, type
, arg0
, &seq
, gimple_build_valueize
);
8832 if (internal_fn_p (fn
))
8833 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8836 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8837 stmt
= gimple_build_call (decl
, 1, arg0
);
8839 if (!VOID_TYPE_P (type
))
8841 res
= create_tmp_reg_or_ssa_name (type
);
8842 gimple_call_set_lhs (stmt
, res
);
8844 gimple_set_location (stmt
, loc
);
8845 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8847 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8851 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8852 (or no result if TYPE is void) with location LOC,
8853 simplifying it first if possible. Returns the built
8854 expression value (or NULL_TREE if TYPE is void) inserting any new
8855 statements at GSI honoring BEFORE and UPDATE. */
8858 gimple_build (gimple_stmt_iterator
*gsi
,
8859 bool before
, gsi_iterator_update update
,
8860 location_t loc
, combined_fn fn
,
8861 tree type
, tree arg0
, tree arg1
)
8863 gimple_seq seq
= NULL
;
8864 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, &seq
,
8865 gimple_build_valueize
);
8869 if (internal_fn_p (fn
))
8870 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8873 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8874 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8876 if (!VOID_TYPE_P (type
))
8878 res
= create_tmp_reg_or_ssa_name (type
);
8879 gimple_call_set_lhs (stmt
, res
);
8881 gimple_set_location (stmt
, loc
);
8882 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8884 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8888 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8889 (or no result if TYPE is void) with location LOC,
8890 simplifying it first if possible. Returns the built
8891 expression value (or NULL_TREE if TYPE is void) inserting any new
8892 statements at GSI honoring BEFORE and UPDATE. */
8895 gimple_build (gimple_stmt_iterator
*gsi
,
8896 bool before
, gsi_iterator_update update
,
8897 location_t loc
, combined_fn fn
,
8898 tree type
, tree arg0
, tree arg1
, tree arg2
)
8900 gimple_seq seq
= NULL
;
8901 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8902 &seq
, gimple_build_valueize
);
8906 if (internal_fn_p (fn
))
8907 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8908 3, arg0
, arg1
, arg2
);
8911 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8912 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8914 if (!VOID_TYPE_P (type
))
8916 res
= create_tmp_reg_or_ssa_name (type
);
8917 gimple_call_set_lhs (stmt
, res
);
8919 gimple_set_location (stmt
, loc
);
8920 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8922 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8926 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8927 void) with location LOC, simplifying it first if possible. Returns the
8928 built expression value (or NULL_TREE if TYPE is void) inserting any new
8929 statements at GSI honoring BEFORE and UPDATE. */
8932 gimple_build (gimple_stmt_iterator
*gsi
,
8933 bool before
, gsi_iterator_update update
,
8934 location_t loc
, code_helper code
, tree type
, tree op0
)
8936 if (code
.is_tree_code ())
8937 return gimple_build (gsi
, before
, update
, loc
, tree_code (code
), type
, op0
);
8938 return gimple_build (gsi
, before
, update
, loc
, combined_fn (code
), type
, op0
);
8941 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8942 void) with location LOC, simplifying it first if possible. Returns the
8943 built expression value (or NULL_TREE if TYPE is void) inserting any new
8944 statements at GSI honoring BEFORE and UPDATE. */
8947 gimple_build (gimple_stmt_iterator
*gsi
,
8948 bool before
, gsi_iterator_update update
,
8949 location_t loc
, code_helper code
, tree type
, tree op0
, tree op1
)
8951 if (code
.is_tree_code ())
8952 return gimple_build (gsi
, before
, update
,
8953 loc
, tree_code (code
), type
, op0
, op1
);
8954 return gimple_build (gsi
, before
, update
,
8955 loc
, combined_fn (code
), type
, op0
, op1
);
8958 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8959 is void) with location LOC, simplifying it first if possible. Returns the
8960 built expression value (or NULL_TREE if TYPE is void) inserting any new
8961 statements at GSI honoring BEFORE and UPDATE. */
8964 gimple_build (gimple_stmt_iterator
*gsi
,
8965 bool before
, gsi_iterator_update update
,
8966 location_t loc
, code_helper code
,
8967 tree type
, tree op0
, tree op1
, tree op2
)
8969 if (code
.is_tree_code ())
8970 return gimple_build (gsi
, before
, update
,
8971 loc
, tree_code (code
), type
, op0
, op1
, op2
);
8972 return gimple_build (gsi
, before
, update
,
8973 loc
, combined_fn (code
), type
, op0
, op1
, op2
);
8976 /* Build the conversion (TYPE) OP with a result of type TYPE
8977 with location LOC if such conversion is neccesary in GIMPLE,
8978 simplifying it first.
8979 Returns the built expression inserting any new statements
8980 at GSI honoring BEFORE and UPDATE. */
8983 gimple_convert (gimple_stmt_iterator
*gsi
,
8984 bool before
, gsi_iterator_update update
,
8985 location_t loc
, tree type
, tree op
)
8987 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8989 return gimple_build (gsi
, before
, update
, loc
, NOP_EXPR
, type
, op
);
8992 /* Build the conversion (ptrofftype) OP with a result of a type
8993 compatible with ptrofftype with location LOC if such conversion
8994 is neccesary in GIMPLE, simplifying it first.
8995 Returns the built expression value inserting any new statements
8996 at GSI honoring BEFORE and UPDATE. */
8999 gimple_convert_to_ptrofftype (gimple_stmt_iterator
*gsi
,
9000 bool before
, gsi_iterator_update update
,
9001 location_t loc
, tree op
)
9003 if (ptrofftype_p (TREE_TYPE (op
)))
9005 return gimple_convert (gsi
, before
, update
, loc
, sizetype
, op
);
9008 /* Build a vector of type TYPE in which each element has the value OP.
9009 Return a gimple value for the result, inserting any new statements
9010 at GSI honoring BEFORE and UPDATE. */
9013 gimple_build_vector_from_val (gimple_stmt_iterator
*gsi
,
9014 bool before
, gsi_iterator_update update
,
9015 location_t loc
, tree type
, tree op
)
9017 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
9018 && !CONSTANT_CLASS_P (op
))
9019 return gimple_build (gsi
, before
, update
,
9020 loc
, VEC_DUPLICATE_EXPR
, type
, op
);
9022 tree res
, vec
= build_vector_from_val (type
, op
);
9023 if (is_gimple_val (vec
))
9025 if (gimple_in_ssa_p (cfun
))
9026 res
= make_ssa_name (type
);
9028 res
= create_tmp_reg (type
);
9029 gimple_seq seq
= NULL
;
9030 gimple
*stmt
= gimple_build_assign (res
, vec
);
9031 gimple_set_location (stmt
, loc
);
9032 gimple_seq_add_stmt_without_update (&seq
, stmt
);
9033 gimple_build_insert_seq (gsi
, before
, update
, seq
);
9037 /* Build a vector from BUILDER, handling the case in which some elements
9038 are non-constant. Return a gimple value for the result, inserting
9039 any new instructions to GSI honoring BEFORE and UPDATE.
9041 BUILDER must not have a stepped encoding on entry. This is because
9042 the function is not geared up to handle the arithmetic that would
9043 be needed in the variable case, and any code building a vector that
9044 is known to be constant should use BUILDER->build () directly. */
9047 gimple_build_vector (gimple_stmt_iterator
*gsi
,
9048 bool before
, gsi_iterator_update update
,
9049 location_t loc
, tree_vector_builder
*builder
)
9051 gcc_assert (builder
->nelts_per_pattern () <= 2);
9052 unsigned int encoded_nelts
= builder
->encoded_nelts ();
9053 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
9054 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
9056 gimple_seq seq
= NULL
;
9057 tree type
= builder
->type ();
9058 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
9059 vec
<constructor_elt
, va_gc
> *v
;
9060 vec_alloc (v
, nelts
);
9061 for (i
= 0; i
< nelts
; ++i
)
9062 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
9065 if (gimple_in_ssa_p (cfun
))
9066 res
= make_ssa_name (type
);
9068 res
= create_tmp_reg (type
);
9069 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
9070 gimple_set_location (stmt
, loc
);
9071 gimple_seq_add_stmt_without_update (&seq
, stmt
);
9072 gimple_build_insert_seq (gsi
, before
, update
, seq
);
9075 return builder
->build ();
9078 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9079 and generate a value guaranteed to be rounded upwards to ALIGN.
9081 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9084 gimple_build_round_up (gimple_stmt_iterator
*gsi
,
9085 bool before
, gsi_iterator_update update
,
9086 location_t loc
, tree type
,
9087 tree old_size
, unsigned HOST_WIDE_INT align
)
9089 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
9090 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9091 gcc_assert (INTEGRAL_TYPE_P (type
));
9092 tree tree_mask
= build_int_cst (type
, tg_mask
);
9093 tree oversize
= gimple_build (gsi
, before
, update
,
9094 loc
, PLUS_EXPR
, type
, old_size
, tree_mask
);
9096 tree mask
= build_int_cst (type
, -align
);
9097 return gimple_build (gsi
, before
, update
,
9098 loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
9101 /* Return true if the result of assignment STMT is known to be non-negative.
9102 If the return value is based on the assumption that signed overflow is
9103 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9104 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9107 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9110 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9111 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
9112 switch (get_gimple_rhs_class (code
))
9114 case GIMPLE_UNARY_RHS
:
9115 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
9117 gimple_assign_rhs1 (stmt
),
9118 strict_overflow_p
, depth
);
9119 case GIMPLE_BINARY_RHS
:
9120 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
9122 gimple_assign_rhs1 (stmt
),
9123 gimple_assign_rhs2 (stmt
),
9124 strict_overflow_p
, depth
);
9125 case GIMPLE_TERNARY_RHS
:
9127 case GIMPLE_SINGLE_RHS
:
9128 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
9129 strict_overflow_p
, depth
);
9130 case GIMPLE_INVALID_RHS
:
9136 /* Return true if return value of call STMT is known to be non-negative.
9137 If the return value is based on the assumption that signed overflow is
9138 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9139 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9142 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9145 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
9146 gimple_call_arg (stmt
, 0) : NULL_TREE
;
9147 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
9148 gimple_call_arg (stmt
, 1) : NULL_TREE
;
9149 tree lhs
= gimple_call_lhs (stmt
);
9151 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
9152 gimple_call_combined_fn (stmt
),
9154 strict_overflow_p
, depth
));
9157 /* Return true if return value of call STMT is known to be non-negative.
9158 If the return value is based on the assumption that signed overflow is
9159 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9160 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9163 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9166 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9168 tree arg
= gimple_phi_arg_def (stmt
, i
);
9169 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
9175 /* Return true if STMT is known to compute a non-negative value.
9176 If the return value is based on the assumption that signed overflow is
9177 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9178 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9181 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9184 switch (gimple_code (stmt
))
9187 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9190 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9193 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9200 /* Return true if the floating-point value computed by assignment STMT
9201 is known to have an integer value. We also allow +Inf, -Inf and NaN
9202 to be considered integer values. Return false for signaling NaN.
9204 DEPTH is the current nesting depth of the query. */
9207 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9209 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9210 switch (get_gimple_rhs_class (code
))
9212 case GIMPLE_UNARY_RHS
:
9213 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9214 gimple_assign_rhs1 (stmt
), depth
);
9215 case GIMPLE_BINARY_RHS
:
9216 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9217 gimple_assign_rhs1 (stmt
),
9218 gimple_assign_rhs2 (stmt
), depth
);
9219 case GIMPLE_TERNARY_RHS
:
9221 case GIMPLE_SINGLE_RHS
:
9222 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9223 case GIMPLE_INVALID_RHS
:
9229 /* Return true if the floating-point value computed by call STMT is known
9230 to have an integer value. We also allow +Inf, -Inf and NaN to be
9231 considered integer values. Return false for signaling NaN.
9233 DEPTH is the current nesting depth of the query. */
9236 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9238 tree arg0
= (gimple_call_num_args (stmt
) > 0
9239 ? gimple_call_arg (stmt
, 0)
9241 tree arg1
= (gimple_call_num_args (stmt
) > 1
9242 ? gimple_call_arg (stmt
, 1)
9244 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9248 /* Return true if the floating-point result of phi STMT is known to have
9249 an integer value. We also allow +Inf, -Inf and NaN to be considered
9250 integer values. Return false for signaling NaN.
9252 DEPTH is the current nesting depth of the query. */
9255 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9257 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9259 tree arg
= gimple_phi_arg_def (stmt
, i
);
9260 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9266 /* Return true if the floating-point value computed by STMT is known
9267 to have an integer value. We also allow +Inf, -Inf and NaN to be
9268 considered integer values. Return false for signaling NaN.
9270 DEPTH is the current nesting depth of the query. */
9273 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9275 switch (gimple_code (stmt
))
9278 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9280 return gimple_call_integer_valued_real_p (stmt
, depth
);
9282 return gimple_phi_integer_valued_real_p (stmt
, depth
);