1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2022 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 #include "internal-fn.h"
72 enum strlen_range_kind
{
73 /* Compute the exact constant string length. */
75 /* Compute the maximum constant string length. */
77 /* Compute a range of string lengths bounded by object sizes. When
78 the length of a string cannot be determined, consider as the upper
79 bound the size of the enclosing object the string may be a member
80 or element of. Also determine the size of the largest character
81 array the string may refer to. */
83 /* Determine the integer value of the argument (not string length). */
88 get_range_strlen (tree
, bitmap
, strlen_range_kind
, c_strlen_data
*, unsigned);
90 /* Return true when DECL can be referenced from current unit.
91 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
92 We can get declarations that are not possible to reference for various
95 1) When analyzing C++ virtual tables.
96 C++ virtual tables do have known constructors even
97 when they are keyed to other compilation unit.
98 Those tables can contain pointers to methods and vars
99 in other units. Those methods have both STATIC and EXTERNAL
101 2) In WHOPR mode devirtualization might lead to reference
102 to method that was partitioned elsehwere.
103 In this case we have static VAR_DECL or FUNCTION_DECL
104 that has no corresponding callgraph/varpool node
106 3) COMDAT functions referred by external vtables that
107 we devirtualize only during final compilation stage.
108 At this time we already decided that we will not output
109 the function body and thus we can't reference the symbol
113 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
116 struct cgraph_node
*node
;
119 if (DECL_ABSTRACT_P (decl
))
122 /* We are concerned only about static/external vars and functions. */
123 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
124 || !VAR_OR_FUNCTION_DECL_P (decl
))
127 /* Static objects can be referred only if they are defined and not optimized
129 if (!TREE_PUBLIC (decl
))
131 if (DECL_EXTERNAL (decl
))
133 /* Before we start optimizing unreachable code we can be sure all
134 static objects are defined. */
135 if (symtab
->function_flags_ready
)
137 snode
= symtab_node::get (decl
);
138 if (!snode
|| !snode
->definition
)
140 node
= dyn_cast
<cgraph_node
*> (snode
);
141 return !node
|| !node
->inlined_to
;
144 /* We will later output the initializer, so we can refer to it.
145 So we are concerned only when DECL comes from initializer of
146 external var or var that has been optimized out. */
148 || !VAR_P (from_decl
)
149 || (!DECL_EXTERNAL (from_decl
)
150 && (vnode
= varpool_node::get (from_decl
)) != NULL
151 && vnode
->definition
)
153 && (vnode
= varpool_node::get (from_decl
)) != NULL
154 && vnode
->in_other_partition
))
156 /* We are folding reference from external vtable. The vtable may reffer
157 to a symbol keyed to other compilation unit. The other compilation
158 unit may be in separate DSO and the symbol may be hidden. */
159 if (DECL_VISIBILITY_SPECIFIED (decl
)
160 && DECL_EXTERNAL (decl
)
161 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
162 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
164 /* When function is public, we always can introduce new reference.
165 Exception are the COMDAT functions where introducing a direct
166 reference imply need to include function body in the curren tunit. */
167 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
169 /* We have COMDAT. We are going to check if we still have definition
170 or if the definition is going to be output in other partition.
171 Bypass this when gimplifying; all needed functions will be produced.
173 As observed in PR20991 for already optimized out comdat virtual functions
174 it may be tempting to not necessarily give up because the copy will be
175 output elsewhere when corresponding vtable is output.
176 This is however not possible - ABI specify that COMDATs are output in
177 units where they are used and when the other unit was compiled with LTO
178 it is possible that vtable was kept public while the function itself
180 if (!symtab
->function_flags_ready
)
183 snode
= symtab_node::get (decl
);
185 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
186 && (!snode
->in_other_partition
187 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
189 node
= dyn_cast
<cgraph_node
*> (snode
);
190 return !node
|| !node
->inlined_to
;
193 /* Create a temporary for TYPE for a statement STMT. If the current function
194 is in SSA form, a SSA name is created. Otherwise a temporary register
198 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
200 if (gimple_in_ssa_p (cfun
))
201 return make_ssa_name (type
, stmt
);
203 return create_tmp_reg (type
);
206 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
207 acceptable form for is_gimple_min_invariant.
208 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
211 canonicalize_constructor_val (tree cval
, tree from_decl
)
213 if (CONSTANT_CLASS_P (cval
))
216 tree orig_cval
= cval
;
218 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
219 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
221 tree ptr
= TREE_OPERAND (cval
, 0);
222 if (is_gimple_min_invariant (ptr
))
223 cval
= build1_loc (EXPR_LOCATION (cval
),
224 ADDR_EXPR
, TREE_TYPE (ptr
),
225 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
227 fold_convert (ptr_type_node
,
228 TREE_OPERAND (cval
, 1))));
230 if (TREE_CODE (cval
) == ADDR_EXPR
)
232 tree base
= NULL_TREE
;
233 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
235 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
237 TREE_OPERAND (cval
, 0) = base
;
240 base
= get_base_address (TREE_OPERAND (cval
, 0));
244 if (VAR_OR_FUNCTION_DECL_P (base
)
245 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
247 if (TREE_TYPE (base
) == error_mark_node
)
250 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
251 but since the use can be in a debug stmt we can't. */
253 else if (TREE_CODE (base
) == FUNCTION_DECL
)
255 /* Make sure we create a cgraph node for functions we'll reference.
256 They can be non-existent if the reference comes from an entry
257 of an external vtable for example. */
258 cgraph_node::get_create (base
);
260 /* Fixup types in global initializers. */
261 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
262 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
264 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
265 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
268 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
269 if (TREE_CODE (cval
) == INTEGER_CST
)
271 if (TREE_OVERFLOW_P (cval
))
272 cval
= drop_tree_overflow (cval
);
273 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
274 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
280 /* If SYM is a constant variable with known value, return the value.
281 NULL_TREE is returned otherwise. */
284 get_symbol_constant_value (tree sym
)
286 tree val
= ctor_for_folding (sym
);
287 if (val
!= error_mark_node
)
291 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
293 && is_gimple_min_invariant (val
)
294 && useless_type_conversion_p (TREE_TYPE (sym
), TREE_TYPE (val
)))
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
303 && is_gimple_reg_type (TREE_TYPE (sym
)))
304 return build_zero_cst (TREE_TYPE (sym
));
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
316 maybe_fold_reference (tree expr
)
318 tree result
= NULL_TREE
;
320 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr
) == REALPART_EXPR
322 || TREE_CODE (expr
) == IMAGPART_EXPR
)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
324 result
= fold_unary_loc (EXPR_LOCATION (expr
),
327 TREE_OPERAND (expr
, 0));
328 else if (TREE_CODE (expr
) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
330 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
333 TREE_OPERAND (expr
, 0),
334 TREE_OPERAND (expr
, 1),
335 TREE_OPERAND (expr
, 2));
337 result
= fold_const_aggregate_ref (expr
);
339 if (result
&& is_gimple_min_invariant (result
))
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.cc
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
358 valid_gimple_rhs_p (tree expr
)
360 enum tree_code code
= TREE_CODE (expr
);
362 switch (TREE_CODE_CLASS (code
))
364 case tcc_declaration
:
365 if (!is_gimple_variable (expr
))
370 /* All constants are ok. */
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
378 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
385 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
386 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
391 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
401 if (is_gimple_min_invariant (expr
))
403 t
= TREE_OPERAND (expr
, 0);
404 while (handled_component_p (t
))
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t
) == ARRAY_REF
408 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
409 && !is_gimple_val (TREE_OPERAND (t
, 1)))
411 t
= TREE_OPERAND (t
, 0);
413 if (!is_gimple_id (t
))
419 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
421 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
422 || !is_gimple_val (TREE_OPERAND (expr
, 1))
423 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
434 case tcc_exceptional
:
435 if (code
== CONSTRUCTOR
)
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
440 if (!is_gimple_val (elt
))
444 if (code
!= SSA_NAME
)
449 if (code
== BIT_FIELD_REF
)
450 return is_gimple_val (TREE_OPERAND (expr
, 0));
461 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
462 replacement rhs for the statement or NULL_TREE if no simplification
463 could be made. It is assumed that the operands have been previously
467 fold_gimple_assign (gimple_stmt_iterator
*si
)
469 gimple
*stmt
= gsi_stmt (*si
);
470 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
471 location_t loc
= gimple_location (stmt
);
473 tree result
= NULL_TREE
;
475 switch (get_gimple_rhs_class (subcode
))
477 case GIMPLE_SINGLE_RHS
:
479 tree rhs
= gimple_assign_rhs1 (stmt
);
481 if (TREE_CLOBBER_P (rhs
))
484 if (REFERENCE_CLASS_P (rhs
))
485 return maybe_fold_reference (rhs
);
487 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
489 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
490 if (is_gimple_min_invariant (val
))
492 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
495 vec
<cgraph_node
*>targets
496 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
497 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
499 if (dump_enabled_p ())
501 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
502 "resolving virtual function address "
503 "reference to function %s\n",
504 targets
.length () == 1
505 ? targets
[0]->name ()
508 if (targets
.length () == 1)
510 val
= fold_convert (TREE_TYPE (val
),
511 build_fold_addr_expr_loc
512 (loc
, targets
[0]->decl
));
513 STRIP_USELESS_TYPE_CONVERSION (val
);
516 /* We cannot use __builtin_unreachable here because it
517 cannot have address taken. */
518 val
= build_int_cst (TREE_TYPE (val
), 0);
524 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
526 tree ref
= TREE_OPERAND (rhs
, 0);
527 if (TREE_CODE (ref
) == MEM_REF
528 && integer_zerop (TREE_OPERAND (ref
, 1)))
530 result
= TREE_OPERAND (ref
, 0);
531 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
533 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
538 else if (TREE_CODE (rhs
) == CONSTRUCTOR
539 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
541 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
545 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
546 if (! CONSTANT_CLASS_P (val
))
549 return build_vector_from_ctor (TREE_TYPE (rhs
),
550 CONSTRUCTOR_ELTS (rhs
));
553 else if (DECL_P (rhs
)
554 && is_gimple_reg_type (TREE_TYPE (rhs
)))
555 return get_symbol_constant_value (rhs
);
559 case GIMPLE_UNARY_RHS
:
562 case GIMPLE_BINARY_RHS
:
565 case GIMPLE_TERNARY_RHS
:
566 result
= fold_ternary_loc (loc
, subcode
,
567 TREE_TYPE (gimple_assign_lhs (stmt
)),
568 gimple_assign_rhs1 (stmt
),
569 gimple_assign_rhs2 (stmt
),
570 gimple_assign_rhs3 (stmt
));
574 STRIP_USELESS_TYPE_CONVERSION (result
);
575 if (valid_gimple_rhs_p (result
))
580 case GIMPLE_INVALID_RHS
:
588 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
589 adjusting the replacement stmts location and virtual operands.
590 If the statement has a lhs the last stmt in the sequence is expected
591 to assign to that lhs. */
594 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
596 gimple
*stmt
= gsi_stmt (*si_p
);
598 if (gimple_has_location (stmt
))
599 annotate_all_with_location (stmts
, gimple_location (stmt
));
601 /* First iterate over the replacement statements backward, assigning
602 virtual operands to their defining statements. */
603 gimple
*laststore
= NULL
;
604 for (gimple_stmt_iterator i
= gsi_last (stmts
);
605 !gsi_end_p (i
); gsi_prev (&i
))
607 gimple
*new_stmt
= gsi_stmt (i
);
608 if ((gimple_assign_single_p (new_stmt
)
609 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
610 || (is_gimple_call (new_stmt
)
611 && (gimple_call_flags (new_stmt
)
612 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
616 vdef
= gimple_vdef (stmt
);
618 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
619 gimple_set_vdef (new_stmt
, vdef
);
620 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
621 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
622 laststore
= new_stmt
;
626 /* Second iterate over the statements forward, assigning virtual
627 operands to their uses. */
628 tree reaching_vuse
= gimple_vuse (stmt
);
629 for (gimple_stmt_iterator i
= gsi_start (stmts
);
630 !gsi_end_p (i
); gsi_next (&i
))
632 gimple
*new_stmt
= gsi_stmt (i
);
633 /* If the new statement possibly has a VUSE, update it with exact SSA
634 name we know will reach this one. */
635 if (gimple_has_mem_ops (new_stmt
))
636 gimple_set_vuse (new_stmt
, reaching_vuse
);
637 gimple_set_modified (new_stmt
, true);
638 if (gimple_vdef (new_stmt
))
639 reaching_vuse
= gimple_vdef (new_stmt
);
642 /* If the new sequence does not do a store release the virtual
643 definition of the original statement. */
645 && reaching_vuse
== gimple_vuse (stmt
))
647 tree vdef
= gimple_vdef (stmt
);
649 && TREE_CODE (vdef
) == SSA_NAME
)
651 unlink_stmt_vdef (stmt
);
652 release_ssa_name (vdef
);
656 /* Finally replace the original statement with the sequence. */
657 gsi_replace_with_seq (si_p
, stmts
, false);
660 /* Helper function for update_gimple_call and
661 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
662 with GIMPLE_CALL NEW_STMT. */
665 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
668 tree lhs
= gimple_call_lhs (stmt
);
669 gimple_call_set_lhs (new_stmt
, lhs
);
670 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
671 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
672 gimple_move_vops (new_stmt
, stmt
);
673 gimple_set_location (new_stmt
, gimple_location (stmt
));
674 if (gimple_block (new_stmt
) == NULL_TREE
)
675 gimple_set_block (new_stmt
, gimple_block (stmt
));
676 gsi_replace (si_p
, new_stmt
, false);
679 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
680 with number of arguments NARGS, where the arguments in GIMPLE form
681 follow NARGS argument. */
684 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
687 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
689 gcc_assert (is_gimple_call (stmt
));
690 va_start (ap
, nargs
);
691 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
692 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
697 /* Return true if EXPR is a CALL_EXPR suitable for representation
698 as a single GIMPLE_CALL statement. If the arguments require
699 further gimplification, return false. */
702 valid_gimple_call_p (tree expr
)
706 if (TREE_CODE (expr
) != CALL_EXPR
)
709 nargs
= call_expr_nargs (expr
);
710 for (i
= 0; i
< nargs
; i
++)
712 tree arg
= CALL_EXPR_ARG (expr
, i
);
713 if (is_gimple_reg_type (TREE_TYPE (arg
)))
715 if (!is_gimple_val (arg
))
719 if (!is_gimple_lvalue (arg
))
726 /* Convert EXPR into a GIMPLE value suitable for substitution on the
727 RHS of an assignment. Insert the necessary statements before
728 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
729 is replaced. If the call is expected to produces a result, then it
730 is replaced by an assignment of the new RHS to the result variable.
731 If the result is to be ignored, then the call is replaced by a
732 GIMPLE_NOP. A proper VDEF chain is retained by making the first
733 VUSE and the last VDEF of the whole sequence be the same as the replaced
734 statement and using new SSA names for stores in between. */
737 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
740 gimple
*stmt
, *new_stmt
;
741 gimple_stmt_iterator i
;
742 gimple_seq stmts
= NULL
;
744 stmt
= gsi_stmt (*si_p
);
746 gcc_assert (is_gimple_call (stmt
));
748 if (valid_gimple_call_p (expr
))
750 /* The call has simplified to another call. */
751 tree fn
= CALL_EXPR_FN (expr
);
753 unsigned nargs
= call_expr_nargs (expr
);
754 vec
<tree
> args
= vNULL
;
760 args
.safe_grow_cleared (nargs
, true);
762 for (i
= 0; i
< nargs
; i
++)
763 args
[i
] = CALL_EXPR_ARG (expr
, i
);
766 new_stmt
= gimple_build_call_vec (fn
, args
);
767 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
772 lhs
= gimple_call_lhs (stmt
);
773 if (lhs
== NULL_TREE
)
775 push_gimplify_context (gimple_in_ssa_p (cfun
));
776 gimplify_and_add (expr
, &stmts
);
777 pop_gimplify_context (NULL
);
779 /* We can end up with folding a memcpy of an empty class assignment
780 which gets optimized away by C++ gimplification. */
781 if (gimple_seq_empty_p (stmts
))
783 if (gimple_in_ssa_p (cfun
))
785 unlink_stmt_vdef (stmt
);
788 gsi_replace (si_p
, gimple_build_nop (), false);
794 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
795 new_stmt
= gimple_build_assign (lhs
, tmp
);
796 i
= gsi_last (stmts
);
797 gsi_insert_after_without_update (&i
, new_stmt
,
798 GSI_CONTINUE_LINKING
);
801 gsi_replace_with_seq_vops (si_p
, stmts
);
805 /* Replace the call at *GSI with the gimple value VAL. */
808 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
810 gimple
*stmt
= gsi_stmt (*gsi
);
811 tree lhs
= gimple_call_lhs (stmt
);
815 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
816 val
= fold_convert (TREE_TYPE (lhs
), val
);
817 repl
= gimple_build_assign (lhs
, val
);
820 repl
= gimple_build_nop ();
821 tree vdef
= gimple_vdef (stmt
);
822 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
824 unlink_stmt_vdef (stmt
);
825 release_ssa_name (vdef
);
827 gsi_replace (gsi
, repl
, false);
830 /* Replace the call at *GSI with the new call REPL and fold that
834 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
836 gimple
*stmt
= gsi_stmt (*gsi
);
837 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
838 gimple_set_location (repl
, gimple_location (stmt
));
839 gimple_move_vops (repl
, stmt
);
840 gsi_replace (gsi
, repl
, false);
844 /* Return true if VAR is a VAR_DECL or a component thereof. */
847 var_decl_component_p (tree var
)
850 while (handled_component_p (inner
))
851 inner
= TREE_OPERAND (inner
, 0);
852 return (DECL_P (inner
)
853 || (TREE_CODE (inner
) == MEM_REF
854 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
857 /* Return TRUE if the SIZE argument, representing the size of an
858 object, is in a range of values of which exactly zero is valid. */
861 size_must_be_zero_p (tree size
)
863 if (integer_zerop (size
))
866 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
869 tree type
= TREE_TYPE (size
);
870 int prec
= TYPE_PRECISION (type
);
872 /* Compute the value of SSIZE_MAX, the largest positive value that
873 can be stored in ssize_t, the signed counterpart of size_t. */
874 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
875 value_range
valid_range (build_int_cst (type
, 0),
876 wide_int_to_tree (type
, ssize_max
));
879 get_range_query (cfun
)->range_of_expr (vr
, size
);
881 get_global_range_query ()->range_of_expr (vr
, size
);
882 if (vr
.undefined_p ())
883 vr
.set_varying (TREE_TYPE (size
));
884 vr
.intersect (valid_range
);
888 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
889 diagnose (otherwise undefined) overlapping copies without preventing
890 folding. When folded, GCC guarantees that overlapping memcpy has
891 the same semantics as memmove. Call to the library memcpy need not
892 provide the same guarantee. Return false if no simplification can
896 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
897 tree dest
, tree src
, enum built_in_function code
)
899 gimple
*stmt
= gsi_stmt (*gsi
);
900 tree lhs
= gimple_call_lhs (stmt
);
901 tree len
= gimple_call_arg (stmt
, 2);
902 location_t loc
= gimple_location (stmt
);
904 /* If the LEN parameter is a constant zero or in range where
905 the only valid value is zero, return DEST. */
906 if (size_must_be_zero_p (len
))
909 if (gimple_call_lhs (stmt
))
910 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
912 repl
= gimple_build_nop ();
913 tree vdef
= gimple_vdef (stmt
);
914 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
916 unlink_stmt_vdef (stmt
);
917 release_ssa_name (vdef
);
919 gsi_replace (gsi
, repl
, false);
923 /* If SRC and DEST are the same (and not volatile), return
924 DEST{,+LEN,+LEN-1}. */
925 if (operand_equal_p (src
, dest
, 0))
927 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
928 It's safe and may even be emitted by GCC itself (see bug
930 unlink_stmt_vdef (stmt
);
931 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
932 release_ssa_name (gimple_vdef (stmt
));
935 gsi_replace (gsi
, gimple_build_nop (), false);
942 /* We cannot (easily) change the type of the copy if it is a storage
943 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
944 modify the storage order of objects (see storage_order_barrier_p). */
946 = POINTER_TYPE_P (TREE_TYPE (src
))
947 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
949 = POINTER_TYPE_P (TREE_TYPE (dest
))
950 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
951 tree destvar
, srcvar
, srcoff
;
952 unsigned int src_align
, dest_align
;
953 unsigned HOST_WIDE_INT tmp_len
;
956 /* Build accesses at offset zero with a ref-all character type. */
958 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
961 /* If we can perform the copy efficiently with first doing all loads
962 and then all stores inline it that way. Currently efficiently
963 means that we can load all the memory into a single integer
964 register which is what MOVE_MAX gives us. */
965 src_align
= get_pointer_alignment (src
);
966 dest_align
= get_pointer_alignment (dest
);
967 if (tree_fits_uhwi_p (len
)
968 && compare_tree_int (len
, MOVE_MAX
) <= 0
969 /* FIXME: Don't transform copies from strings with known length.
970 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
971 from being handled, and the case was XFAILed for that reason.
972 Now that it is handled and the XFAIL removed, as soon as other
973 strlenopt tests that rely on it for passing are adjusted, this
974 hack can be removed. */
975 && !c_strlen (src
, 1)
976 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
977 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
979 && AGGREGATE_TYPE_P (srctype
)
980 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
982 && AGGREGATE_TYPE_P (desttype
)
983 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
985 unsigned ilen
= tree_to_uhwi (len
);
986 if (pow2p_hwi (ilen
))
988 /* Detect out-of-bounds accesses without issuing warnings.
989 Avoid folding out-of-bounds copies but to avoid false
990 positives for unreachable code defer warning until after
991 DCE has worked its magic.
992 -Wrestrict is still diagnosed. */
993 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
996 if (warning
!= OPT_Wrestrict
)
999 scalar_int_mode mode
;
1000 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1001 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
1004 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1005 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1006 || (optab_handler (movmisalign_optab
, mode
)
1007 != CODE_FOR_nothing
)))
1009 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1010 tree srctype
= type
;
1011 tree desttype
= type
;
1012 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1013 srctype
= build_aligned_type (type
, src_align
);
1014 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1015 tree tem
= fold_const_aggregate_ref (srcmem
);
1018 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1019 && targetm
.slow_unaligned_access (mode
, src_align
)
1020 && (optab_handler (movmisalign_optab
, mode
)
1021 == CODE_FOR_nothing
))
1026 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1028 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1030 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1032 gimple_assign_set_lhs (new_stmt
, srcmem
);
1033 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1034 gimple_set_location (new_stmt
, loc
);
1035 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1037 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1038 desttype
= build_aligned_type (type
, dest_align
);
1040 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1043 gimple_move_vops (new_stmt
, stmt
);
1046 gsi_replace (gsi
, new_stmt
, false);
1049 gimple_set_location (new_stmt
, loc
);
1050 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1057 if (code
== BUILT_IN_MEMMOVE
)
1059 /* Both DEST and SRC must be pointer types.
1060 ??? This is what old code did. Is the testing for pointer types
1063 If either SRC is readonly or length is 1, we can use memcpy. */
1064 if (!dest_align
|| !src_align
)
1066 if (readonly_data_expr (src
)
1067 || (tree_fits_uhwi_p (len
)
1068 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1069 >= tree_to_uhwi (len
))))
1071 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1074 gimple_call_set_fndecl (stmt
, fn
);
1075 gimple_call_set_arg (stmt
, 0, dest
);
1076 gimple_call_set_arg (stmt
, 1, src
);
1081 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1082 if (TREE_CODE (src
) == ADDR_EXPR
1083 && TREE_CODE (dest
) == ADDR_EXPR
)
1085 tree src_base
, dest_base
, fn
;
1086 poly_int64 src_offset
= 0, dest_offset
= 0;
1087 poly_uint64 maxsize
;
1089 srcvar
= TREE_OPERAND (src
, 0);
1090 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1091 if (src_base
== NULL
)
1093 destvar
= TREE_OPERAND (dest
, 0);
1094 dest_base
= get_addr_base_and_unit_offset (destvar
,
1096 if (dest_base
== NULL
)
1097 dest_base
= destvar
;
1098 if (!poly_int_tree_p (len
, &maxsize
))
1100 if (SSA_VAR_P (src_base
)
1101 && SSA_VAR_P (dest_base
))
1103 if (operand_equal_p (src_base
, dest_base
, 0)
1104 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1105 dest_offset
, maxsize
))
1108 else if (TREE_CODE (src_base
) == MEM_REF
1109 && TREE_CODE (dest_base
) == MEM_REF
)
1111 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1112 TREE_OPERAND (dest_base
, 0), 0))
1114 poly_offset_int full_src_offset
1115 = mem_ref_offset (src_base
) + src_offset
;
1116 poly_offset_int full_dest_offset
1117 = mem_ref_offset (dest_base
) + dest_offset
;
1118 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1119 full_dest_offset
, maxsize
))
1125 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1128 gimple_call_set_fndecl (stmt
, fn
);
1129 gimple_call_set_arg (stmt
, 0, dest
);
1130 gimple_call_set_arg (stmt
, 1, src
);
1135 /* If the destination and source do not alias optimize into
1137 if ((is_gimple_min_invariant (dest
)
1138 || TREE_CODE (dest
) == SSA_NAME
)
1139 && (is_gimple_min_invariant (src
)
1140 || TREE_CODE (src
) == SSA_NAME
))
1143 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1144 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1145 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1148 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1151 gimple_call_set_fndecl (stmt
, fn
);
1152 gimple_call_set_arg (stmt
, 0, dest
);
1153 gimple_call_set_arg (stmt
, 1, src
);
1162 if (!tree_fits_shwi_p (len
))
1165 || (AGGREGATE_TYPE_P (srctype
)
1166 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1169 || (AGGREGATE_TYPE_P (desttype
)
1170 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1172 /* In the following try to find a type that is most natural to be
1173 used for the memcpy source and destination and that allows
1174 the most optimization when memcpy is turned into a plain assignment
1175 using that type. In theory we could always use a char[len] type
1176 but that only gains us that the destination and source possibly
1177 no longer will have their address taken. */
1178 if (TREE_CODE (srctype
) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1180 srctype
= TREE_TYPE (srctype
);
1181 if (TREE_CODE (desttype
) == ARRAY_TYPE
1182 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1183 desttype
= TREE_TYPE (desttype
);
1184 if (TREE_ADDRESSABLE (srctype
)
1185 || TREE_ADDRESSABLE (desttype
))
1188 /* Make sure we are not copying using a floating-point mode or
1189 a type whose size possibly does not match its precision. */
1190 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1191 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1192 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1193 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1194 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1195 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1196 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1197 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1205 src_align
= get_pointer_alignment (src
);
1206 dest_align
= get_pointer_alignment (dest
);
1208 /* Choose between src and destination type for the access based
1209 on alignment, whether the access constitutes a register access
1210 and whether it may actually expose a declaration for SSA rewrite
1211 or SRA decomposition. Also try to expose a string constant, we
1212 might be able to concatenate several of them later into a single
1214 destvar
= NULL_TREE
;
1216 if (TREE_CODE (dest
) == ADDR_EXPR
1217 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1218 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1219 && dest_align
>= TYPE_ALIGN (desttype
)
1220 && (is_gimple_reg_type (desttype
)
1221 || src_align
>= TYPE_ALIGN (desttype
)))
1222 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1223 else if (TREE_CODE (src
) == ADDR_EXPR
1224 && var_decl_component_p (TREE_OPERAND (src
, 0))
1225 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1226 && src_align
>= TYPE_ALIGN (srctype
)
1227 && (is_gimple_reg_type (srctype
)
1228 || dest_align
>= TYPE_ALIGN (srctype
)))
1229 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1230 /* FIXME: Don't transform copies from strings with known original length.
1231 As soon as strlenopt tests that rely on it for passing are adjusted,
1232 this hack can be removed. */
1233 else if (gimple_call_alloca_for_var_p (stmt
)
1234 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1235 && integer_zerop (srcoff
)
1236 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1237 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1238 srctype
= TREE_TYPE (srcvar
);
1242 /* Now that we chose an access type express the other side in
1243 terms of it if the target allows that with respect to alignment
1245 if (srcvar
== NULL_TREE
)
1247 if (src_align
>= TYPE_ALIGN (desttype
))
1248 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1251 enum machine_mode mode
= TYPE_MODE (desttype
);
1252 if ((mode
== BLKmode
&& STRICT_ALIGNMENT
)
1253 || (targetm
.slow_unaligned_access (mode
, src_align
)
1254 && (optab_handler (movmisalign_optab
, mode
)
1255 == CODE_FOR_nothing
)))
1257 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1259 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1262 else if (destvar
== NULL_TREE
)
1264 if (dest_align
>= TYPE_ALIGN (srctype
))
1265 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1268 enum machine_mode mode
= TYPE_MODE (srctype
);
1269 if ((mode
== BLKmode
&& STRICT_ALIGNMENT
)
1270 || (targetm
.slow_unaligned_access (mode
, dest_align
)
1271 && (optab_handler (movmisalign_optab
, mode
)
1272 == CODE_FOR_nothing
)))
1274 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1276 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1280 /* Same as above, detect out-of-bounds accesses without issuing
1281 warnings. Avoid folding out-of-bounds copies but to avoid
1282 false positives for unreachable code defer warning until
1283 after DCE has worked its magic.
1284 -Wrestrict is still diagnosed. */
1285 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1286 dest
, src
, len
, len
,
1288 if (warning
!= OPT_Wrestrict
)
1292 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1294 tree tem
= fold_const_aggregate_ref (srcvar
);
1297 if (! is_gimple_min_invariant (srcvar
))
1299 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1300 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1302 gimple_assign_set_lhs (new_stmt
, srcvar
);
1303 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1304 gimple_set_location (new_stmt
, loc
);
1305 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1307 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1308 goto set_vop_and_replace
;
1311 /* We get an aggregate copy. If the source is a STRING_CST, then
1312 directly use its type to perform the copy. */
1313 if (TREE_CODE (srcvar
) == STRING_CST
)
1316 /* Or else, use an unsigned char[] type to perform the copy in order
1317 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1318 types or float modes behavior on copying. */
1321 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1322 tree_to_uhwi (len
));
1324 if (src_align
> TYPE_ALIGN (srctype
))
1325 srctype
= build_aligned_type (srctype
, src_align
);
1326 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1329 if (dest_align
> TYPE_ALIGN (desttype
))
1330 desttype
= build_aligned_type (desttype
, dest_align
);
1331 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1332 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1334 set_vop_and_replace
:
1335 gimple_move_vops (new_stmt
, stmt
);
1338 gsi_replace (gsi
, new_stmt
, false);
1341 gimple_set_location (new_stmt
, loc
);
1342 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1346 gimple_seq stmts
= NULL
;
1347 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1349 else if (code
== BUILT_IN_MEMPCPY
)
1351 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1352 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1353 TREE_TYPE (dest
), dest
, len
);
1358 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1359 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1360 gsi_replace (gsi
, repl
, false);
1364 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1365 to built-in memcmp (a, b, len). */
1368 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1370 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1375 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1377 gimple
*stmt
= gsi_stmt (*gsi
);
1378 tree a
= gimple_call_arg (stmt
, 0);
1379 tree b
= gimple_call_arg (stmt
, 1);
1380 tree len
= gimple_call_arg (stmt
, 2);
1382 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1383 replace_call_with_call_and_fold (gsi
, repl
);
1388 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1389 to built-in memmove (dest, src, len). */
1392 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1394 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1399 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1400 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1401 len) into memmove (dest, src, len). */
1403 gimple
*stmt
= gsi_stmt (*gsi
);
1404 tree src
= gimple_call_arg (stmt
, 0);
1405 tree dest
= gimple_call_arg (stmt
, 1);
1406 tree len
= gimple_call_arg (stmt
, 2);
1408 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1409 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1410 replace_call_with_call_and_fold (gsi
, repl
);
1415 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1416 to built-in memset (dest, 0, len). */
1419 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1421 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1426 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1428 gimple
*stmt
= gsi_stmt (*gsi
);
1429 tree dest
= gimple_call_arg (stmt
, 0);
1430 tree len
= gimple_call_arg (stmt
, 1);
1432 gimple_seq seq
= NULL
;
1433 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1434 gimple_seq_add_stmt_without_update (&seq
, repl
);
1435 gsi_replace_with_seq_vops (gsi
, seq
);
1441 /* Fold function call to builtin memset or bzero at *GSI setting the
1442 memory of size LEN to VAL. Return whether a simplification was made. */
1445 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1447 gimple
*stmt
= gsi_stmt (*gsi
);
1449 unsigned HOST_WIDE_INT length
, cval
;
1451 /* If the LEN parameter is zero, return DEST. */
1452 if (integer_zerop (len
))
1454 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1458 if (! tree_fits_uhwi_p (len
))
1461 if (TREE_CODE (c
) != INTEGER_CST
)
1464 tree dest
= gimple_call_arg (stmt
, 0);
1466 if (TREE_CODE (var
) != ADDR_EXPR
)
1469 var
= TREE_OPERAND (var
, 0);
1470 if (TREE_THIS_VOLATILE (var
))
1473 etype
= TREE_TYPE (var
);
1474 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1475 etype
= TREE_TYPE (etype
);
1477 if (!INTEGRAL_TYPE_P (etype
)
1478 && !POINTER_TYPE_P (etype
))
1481 if (! var_decl_component_p (var
))
1484 length
= tree_to_uhwi (len
);
1485 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1486 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1487 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1488 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1491 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1494 if (!type_has_mode_precision_p (etype
))
1495 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1496 TYPE_UNSIGNED (etype
));
1498 if (integer_zerop (c
))
1502 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1505 cval
= TREE_INT_CST_LOW (c
);
1509 cval
|= (cval
<< 31) << 1;
1512 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1513 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1514 gimple_move_vops (store
, stmt
);
1515 gimple_set_location (store
, gimple_location (stmt
));
1516 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1517 if (gimple_call_lhs (stmt
))
1519 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1520 gsi_replace (gsi
, asgn
, false);
1524 gimple_stmt_iterator gsi2
= *gsi
;
1526 gsi_remove (&gsi2
, true);
1532 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1535 get_range_strlen_tree (tree arg
, bitmap visited
, strlen_range_kind rkind
,
1536 c_strlen_data
*pdata
, unsigned eltsize
)
1538 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1540 /* The length computed by this invocation of the function. */
1541 tree val
= NULL_TREE
;
1543 /* True if VAL is an optimistic (tight) bound determined from
1544 the size of the character array in which the string may be
1545 stored. In that case, the computed VAL is used to set
1547 bool tight_bound
= false;
1549 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1550 if (TREE_CODE (arg
) == ADDR_EXPR
1551 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1553 tree op
= TREE_OPERAND (arg
, 0);
1554 if (integer_zerop (TREE_OPERAND (op
, 1)))
1556 tree aop0
= TREE_OPERAND (op
, 0);
1557 if (TREE_CODE (aop0
) == INDIRECT_REF
1558 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1559 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1562 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1563 && rkind
== SRK_LENRANGE
)
1565 /* Fail if an array is the last member of a struct object
1566 since it could be treated as a (fake) flexible array
1568 tree idx
= TREE_OPERAND (op
, 1);
1570 arg
= TREE_OPERAND (op
, 0);
1571 tree optype
= TREE_TYPE (arg
);
1572 if (tree dom
= TYPE_DOMAIN (optype
))
1573 if (tree bound
= TYPE_MAX_VALUE (dom
))
1574 if (TREE_CODE (bound
) == INTEGER_CST
1575 && TREE_CODE (idx
) == INTEGER_CST
1576 && tree_int_cst_lt (bound
, idx
))
1581 if (rkind
== SRK_INT_VALUE
)
1583 /* We are computing the maximum value (not string length). */
1585 if (TREE_CODE (val
) != INTEGER_CST
1586 || tree_int_cst_sgn (val
) < 0)
1591 c_strlen_data lendata
= { };
1592 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1594 if (!val
&& lendata
.decl
)
1596 /* ARG refers to an unterminated const character array.
1597 DATA.DECL with size DATA.LEN. */
1598 val
= lendata
.minlen
;
1599 pdata
->decl
= lendata
.decl
;
1603 /* Set if VAL represents the maximum length based on array size (set
1604 when exact length cannot be determined). */
1605 bool maxbound
= false;
1607 if (!val
&& rkind
== SRK_LENRANGE
)
1609 if (TREE_CODE (arg
) == ADDR_EXPR
)
1610 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1613 if (TREE_CODE (arg
) == ARRAY_REF
)
1615 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1617 /* Determine the "innermost" array type. */
1618 while (TREE_CODE (optype
) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1620 optype
= TREE_TYPE (optype
);
1622 /* Avoid arrays of pointers. */
1623 tree eltype
= TREE_TYPE (optype
);
1624 if (TREE_CODE (optype
) != ARRAY_TYPE
1625 || !INTEGRAL_TYPE_P (eltype
))
1628 /* Fail when the array bound is unknown or zero. */
1629 val
= TYPE_SIZE_UNIT (optype
);
1631 || TREE_CODE (val
) != INTEGER_CST
1632 || integer_zerop (val
))
1635 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1638 /* Set the minimum size to zero since the string in
1639 the array could have zero length. */
1640 pdata
->minlen
= ssize_int (0);
1644 else if (TREE_CODE (arg
) == COMPONENT_REF
1645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1648 /* Use the type of the member array to determine the upper
1649 bound on the length of the array. This may be overly
1650 optimistic if the array itself isn't NUL-terminated and
1651 the caller relies on the subsequent member to contain
1652 the NUL but that would only be considered valid if
1653 the array were the last member of a struct. */
1655 tree fld
= TREE_OPERAND (arg
, 1);
1657 tree optype
= TREE_TYPE (fld
);
1659 /* Determine the "innermost" array type. */
1660 while (TREE_CODE (optype
) == ARRAY_TYPE
1661 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1662 optype
= TREE_TYPE (optype
);
1664 /* Fail when the array bound is unknown or zero. */
1665 val
= TYPE_SIZE_UNIT (optype
);
1667 || TREE_CODE (val
) != INTEGER_CST
1668 || integer_zerop (val
))
1670 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1673 /* Set the minimum size to zero since the string in
1674 the array could have zero length. */
1675 pdata
->minlen
= ssize_int (0);
1677 /* The array size determined above is an optimistic bound
1678 on the length. If the array isn't nul-terminated the
1679 length computed by the library function would be greater.
1680 Even though using strlen to cross the subobject boundary
1681 is undefined, avoid drawing conclusions from the member
1682 type about the length here. */
1685 else if (TREE_CODE (arg
) == MEM_REF
1686 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1688 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1690 /* Handle a MEM_REF into a DECL accessing an array of integers,
1691 being conservative about references to extern structures with
1692 flexible array members that can be initialized to arbitrary
1693 numbers of elements as an extension (static structs are okay).
1694 FIXME: Make this less conservative -- see
1695 component_ref_size in tree.cc. */
1696 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1697 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1698 && (decl_binds_to_current_def_p (ref
)
1699 || !array_at_struct_end_p (arg
)))
1701 /* Fail if the offset is out of bounds. Such accesses
1702 should be diagnosed at some point. */
1703 val
= DECL_SIZE_UNIT (ref
);
1705 || TREE_CODE (val
) != INTEGER_CST
1706 || integer_zerop (val
))
1709 poly_offset_int psiz
= wi::to_offset (val
);
1710 poly_offset_int poff
= mem_ref_offset (arg
);
1711 if (known_le (psiz
, poff
))
1714 pdata
->minlen
= ssize_int (0);
1716 /* Subtract the offset and one for the terminating nul. */
1719 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1720 /* Since VAL reflects the size of a declared object
1721 rather the type of the access it is not a tight bound. */
1724 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1726 /* Avoid handling pointers to arrays. GCC might misuse
1727 a pointer to an array of one bound to point to an array
1728 object of a greater bound. */
1729 tree argtype
= TREE_TYPE (arg
);
1730 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1732 val
= TYPE_SIZE_UNIT (argtype
);
1734 || TREE_CODE (val
) != INTEGER_CST
1735 || integer_zerop (val
))
1737 val
= wide_int_to_tree (TREE_TYPE (val
),
1738 wi::sub (wi::to_wide (val
), 1));
1740 /* Set the minimum size to zero since the string in
1741 the array could have zero length. */
1742 pdata
->minlen
= ssize_int (0);
1751 /* Adjust the lower bound on the string length as necessary. */
1753 || (rkind
!= SRK_STRLEN
1754 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1755 && TREE_CODE (val
) == INTEGER_CST
1756 && tree_int_cst_lt (val
, pdata
->minlen
)))
1757 pdata
->minlen
= val
;
1759 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1761 /* Adjust the tighter (more optimistic) string length bound
1762 if necessary and proceed to adjust the more conservative
1764 if (TREE_CODE (val
) == INTEGER_CST
)
1766 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1767 pdata
->maxbound
= val
;
1770 pdata
->maxbound
= val
;
1772 else if (pdata
->maxbound
|| maxbound
)
1773 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1774 if VAL corresponds to the maximum length determined based
1775 on the type of the object. */
1776 pdata
->maxbound
= val
;
1780 /* VAL computed above represents an optimistically tight bound
1781 on the length of the string based on the referenced object's
1782 or subobject's type. Determine the conservative upper bound
1783 based on the enclosing object's size if possible. */
1784 if (rkind
== SRK_LENRANGE
)
1787 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1790 /* When the call above fails due to a non-constant offset
1791 assume the offset is zero and use the size of the whole
1792 enclosing object instead. */
1793 base
= get_base_address (arg
);
1796 /* If the base object is a pointer no upper bound on the length
1797 can be determined. Otherwise the maximum length is equal to
1798 the size of the enclosing object minus the offset of
1799 the referenced subobject minus 1 (for the terminating nul). */
1800 tree type
= TREE_TYPE (base
);
1801 if (TREE_CODE (type
) == POINTER_TYPE
1802 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1803 || !(val
= DECL_SIZE_UNIT (base
)))
1804 val
= build_all_ones_cst (size_type_node
);
1807 val
= DECL_SIZE_UNIT (base
);
1808 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1809 size_int (offset
+ 1));
1818 /* Adjust the more conservative bound if possible/necessary
1819 and fail otherwise. */
1820 if (rkind
!= SRK_STRLEN
)
1822 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1823 || TREE_CODE (val
) != INTEGER_CST
)
1826 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1827 pdata
->maxlen
= val
;
1830 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1832 /* Fail if the length of this ARG is different from that
1833 previously determined from another ARG. */
1838 pdata
->maxlen
= val
;
1839 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1842 /* For an ARG referencing one or more strings, try to obtain the range
1843 of their lengths, or the size of the largest array ARG referes to if
1844 the range of lengths cannot be determined, and store all in *PDATA.
1845 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1846 the maximum constant value.
1847 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1848 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1849 length or if we are unable to determine the length, return false.
1850 VISITED is a bitmap of visited variables.
1851 RKIND determines the kind of value or range to obtain (see
1853 Set PDATA->DECL if ARG refers to an unterminated constant array.
1854 On input, set ELTSIZE to 1 for normal single byte character strings,
1855 and either 2 or 4 for wide characer strings (the size of wchar_t).
1856 Return true if *PDATA was successfully populated and false otherwise. */
1859 get_range_strlen (tree arg
, bitmap visited
,
1860 strlen_range_kind rkind
,
1861 c_strlen_data
*pdata
, unsigned eltsize
)
1864 if (TREE_CODE (arg
) != SSA_NAME
)
1865 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1867 /* If ARG is registered for SSA update we cannot look at its defining
1869 if (name_registered_for_update_p (arg
))
1872 /* If we were already here, break the infinite cycle. */
1873 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
1877 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1879 switch (gimple_code (def_stmt
))
1882 /* The RHS of the statement defining VAR must either have a
1883 constant length or come from another SSA_NAME with a constant
1885 if (gimple_assign_single_p (def_stmt
)
1886 || gimple_assign_unary_nop_p (def_stmt
))
1888 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1889 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1891 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1893 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1894 gimple_assign_rhs3 (def_stmt
) };
1896 for (unsigned int i
= 0; i
< 2; i
++)
1897 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1899 if (rkind
!= SRK_LENRANGE
)
1901 /* Set the upper bound to the maximum to prevent
1902 it from being adjusted in the next iteration but
1903 leave MINLEN and the more conservative MAXBOUND
1904 determined so far alone (or leave them null if
1905 they haven't been set yet). That the MINLEN is
1906 in fact zero can be determined from MAXLEN being
1907 unbounded but the discovered minimum is used for
1909 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1916 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1917 must have a constant length. */
1918 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1920 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1922 /* If this PHI has itself as an argument, we cannot
1923 determine the string length of this argument. However,
1924 if we can find a constant string length for the other
1925 PHI args then we can still be sure that this is a
1926 constant string length. So be optimistic and just
1927 continue with the next argument. */
1928 if (arg
== gimple_phi_result (def_stmt
))
1931 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1933 if (rkind
!= SRK_LENRANGE
)
1935 /* Set the upper bound to the maximum to prevent
1936 it from being adjusted in the next iteration but
1937 leave MINLEN and the more conservative MAXBOUND
1938 determined so far alone (or leave them null if
1939 they haven't been set yet). That the MINLEN is
1940 in fact zero can be determined from MAXLEN being
1941 unbounded but the discovered minimum is used for
1943 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1953 /* Try to obtain the range of the lengths of the string(s) referenced
1954 by ARG, or the size of the largest array ARG refers to if the range
1955 of lengths cannot be determined, and store all in *PDATA which must
1956 be zero-initialized on input except PDATA->MAXBOUND may be set to
1957 a non-null tree node other than INTEGER_CST to request to have it
1958 set to the length of the longest string in a PHI. ELTSIZE is
1959 the expected size of the string element in bytes: 1 for char and
1960 some power of 2 for wide characters.
1961 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1962 for optimization. Returning false means that a nonzero PDATA->MINLEN
1963 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1964 is -1 (in that case, the actual range is indeterminate, i.e.,
1965 [0, PTRDIFF_MAX - 2]. */
1968 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1970 auto_bitmap visited
;
1971 tree maxbound
= pdata
->maxbound
;
1973 if (!get_range_strlen (arg
, visited
, SRK_LENRANGE
, pdata
, eltsize
))
1975 /* On failure extend the length range to an impossible maximum
1976 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1977 members can stay unchanged regardless. */
1978 pdata
->minlen
= ssize_int (0);
1979 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1981 else if (!pdata
->minlen
)
1982 pdata
->minlen
= ssize_int (0);
1984 /* If it's unchanged from it initial non-null value, set the conservative
1985 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1986 if (maxbound
&& pdata
->maxbound
== maxbound
)
1987 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1989 return !integer_all_onesp (pdata
->maxlen
);
1992 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1993 For ARG of pointer types, NONSTR indicates if the caller is prepared
1994 to handle unterminated strings. For integer ARG and when RKIND ==
1995 SRK_INT_VALUE, NONSTR must be null.
1997 If an unterminated array is discovered and our caller handles
1998 unterminated arrays, then bubble up the offending DECL and
1999 return the maximum size. Otherwise return NULL. */
2002 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
2004 /* A non-null NONSTR is meaningless when determining the maximum
2005 value of an integer ARG. */
2006 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2007 /* ARG must have an integral type when RKIND says so. */
2008 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2010 auto_bitmap visited
;
2012 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2014 c_strlen_data lendata
= { };
2015 if (!get_range_strlen (arg
, visited
, rkind
, &lendata
, /* eltsize = */1))
2016 lendata
.maxlen
= NULL_TREE
;
2017 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2018 lendata
.maxlen
= NULL_TREE
;
2022 /* For callers prepared to handle unterminated arrays set
2023 *NONSTR to point to the declaration of the array and return
2024 the maximum length/size. */
2025 *nonstr
= lendata
.decl
;
2026 return lendata
.maxlen
;
2029 /* Fail if the constant array isn't nul-terminated. */
2030 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2033 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2034 true, strictly less than) the lower bound of SIZE at compile time and false
2038 known_lower (gimple
*stmt
, tree len
, tree size
, bool strict
= false)
2040 if (len
== NULL_TREE
)
2043 wide_int size_range
[2];
2044 wide_int len_range
[2];
2045 if (get_range (len
, stmt
, len_range
) && get_range (size
, stmt
, size_range
))
2048 return wi::ltu_p (len_range
[1], size_range
[0]);
2050 return wi::leu_p (len_range
[1], size_range
[0]);
2056 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2057 If LEN is not NULL, it represents the length of the string to be
2058 copied. Return NULL_TREE if no simplification can be made. */
2061 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2062 tree dest
, tree src
)
2064 gimple
*stmt
= gsi_stmt (*gsi
);
2065 location_t loc
= gimple_location (stmt
);
2068 /* If SRC and DEST are the same (and not volatile), return DEST. */
2069 if (operand_equal_p (src
, dest
, 0))
2071 /* Issue -Wrestrict unless the pointers are null (those do
2072 not point to objects and so do not indicate an overlap;
2073 such calls could be the result of sanitization and jump
2075 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2077 tree func
= gimple_call_fndecl (stmt
);
2079 warning_at (loc
, OPT_Wrestrict
,
2080 "%qD source argument is the same as destination",
2084 replace_call_with_value (gsi
, dest
);
2088 if (optimize_function_for_size_p (cfun
))
2091 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2095 /* Set to non-null if ARG refers to an unterminated array. */
2097 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2101 /* Avoid folding calls with unterminated arrays. */
2102 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2103 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2104 suppress_warning (stmt
, OPT_Wstringop_overread
);
2111 len
= fold_convert_loc (loc
, size_type_node
, len
);
2112 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2113 len
= force_gimple_operand_gsi (gsi
, len
, true,
2114 NULL_TREE
, true, GSI_SAME_STMT
);
2115 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2116 replace_call_with_call_and_fold (gsi
, repl
);
2120 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2121 If SLEN is not NULL, it represents the length of the source string.
2122 Return NULL_TREE if no simplification can be made. */
2125 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2126 tree dest
, tree src
, tree len
)
2128 gimple
*stmt
= gsi_stmt (*gsi
);
2129 location_t loc
= gimple_location (stmt
);
2130 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2132 /* If the LEN parameter is zero, return DEST. */
2133 if (integer_zerop (len
))
2135 /* Avoid warning if the destination refers to an array/pointer
2136 decorate with attribute nonstring. */
2139 tree fndecl
= gimple_call_fndecl (stmt
);
2141 /* Warn about the lack of nul termination: the result is not
2142 a (nul-terminated) string. */
2143 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2144 if (slen
&& !integer_zerop (slen
))
2145 warning_at (loc
, OPT_Wstringop_truncation
,
2146 "%qD destination unchanged after copying no bytes "
2147 "from a string of length %E",
2150 warning_at (loc
, OPT_Wstringop_truncation
,
2151 "%qD destination unchanged after copying no bytes",
2155 replace_call_with_value (gsi
, dest
);
2159 /* We can't compare slen with len as constants below if len is not a
2161 if (TREE_CODE (len
) != INTEGER_CST
)
2164 /* Now, we must be passed a constant src ptr parameter. */
2165 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2166 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2169 /* The size of the source string including the terminating nul. */
2170 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2172 /* We do not support simplification of this case, though we do
2173 support it when expanding trees into RTL. */
2174 /* FIXME: generate a call to __builtin_memset. */
2175 if (tree_int_cst_lt (ssize
, len
))
2178 /* Diagnose truncation that leaves the copy unterminated. */
2179 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2181 /* OK transform into builtin memcpy. */
2182 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2186 len
= fold_convert_loc (loc
, size_type_node
, len
);
2187 len
= force_gimple_operand_gsi (gsi
, len
, true,
2188 NULL_TREE
, true, GSI_SAME_STMT
);
2189 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2190 replace_call_with_call_and_fold (gsi
, repl
);
2195 /* Fold function call to builtin strchr or strrchr.
2196 If both arguments are constant, evaluate and fold the result,
2197 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2198 In general strlen is significantly faster than strchr
2199 due to being a simpler operation. */
2201 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2203 gimple
*stmt
= gsi_stmt (*gsi
);
2204 tree str
= gimple_call_arg (stmt
, 0);
2205 tree c
= gimple_call_arg (stmt
, 1);
2206 location_t loc
= gimple_location (stmt
);
2210 if (!gimple_call_lhs (stmt
))
2213 /* Avoid folding if the first argument is not a nul-terminated array.
2214 Defer warning until later. */
2215 if (!check_nul_terminated_array (NULL_TREE
, str
))
2218 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2220 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2224 replace_call_with_value (gsi
, integer_zero_node
);
2228 tree len
= build_int_cst (size_type_node
, p1
- p
);
2229 gimple_seq stmts
= NULL
;
2230 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2231 POINTER_PLUS_EXPR
, str
, len
);
2232 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2233 gsi_replace_with_seq_vops (gsi
, stmts
);
2237 if (!integer_zerop (c
))
2240 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2241 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2243 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2247 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2248 replace_call_with_call_and_fold (gsi
, repl
);
2256 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2261 /* Create newstr = strlen (str). */
2262 gimple_seq stmts
= NULL
;
2263 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2264 gimple_set_location (new_stmt
, loc
);
2265 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2266 gimple_call_set_lhs (new_stmt
, len
);
2267 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2269 /* Create (str p+ strlen (str)). */
2270 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2271 POINTER_PLUS_EXPR
, str
, len
);
2272 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2273 gsi_replace_with_seq_vops (gsi
, stmts
);
2274 /* gsi now points at the assignment to the lhs, get a
2275 stmt iterator to the strlen.
2276 ??? We can't use gsi_for_stmt as that doesn't work when the
2277 CFG isn't built yet. */
2278 gimple_stmt_iterator gsi2
= *gsi
;
2284 /* Fold function call to builtin strstr.
2285 If both arguments are constant, evaluate and fold the result,
2286 additionally fold strstr (x, "") into x and strstr (x, "c")
2287 into strchr (x, 'c'). */
2289 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2291 gimple
*stmt
= gsi_stmt (*gsi
);
2292 if (!gimple_call_lhs (stmt
))
2295 tree haystack
= gimple_call_arg (stmt
, 0);
2296 tree needle
= gimple_call_arg (stmt
, 1);
2298 /* Avoid folding if either argument is not a nul-terminated array.
2299 Defer warning until later. */
2300 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2301 || !check_nul_terminated_array (NULL_TREE
, needle
))
2304 const char *q
= c_getstr (needle
);
2308 if (const char *p
= c_getstr (haystack
))
2310 const char *r
= strstr (p
, q
);
2314 replace_call_with_value (gsi
, integer_zero_node
);
2318 tree len
= build_int_cst (size_type_node
, r
- p
);
2319 gimple_seq stmts
= NULL
;
2321 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2323 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2324 gsi_replace_with_seq_vops (gsi
, stmts
);
2328 /* For strstr (x, "") return x. */
2331 replace_call_with_value (gsi
, haystack
);
2335 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2338 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2341 tree c
= build_int_cst (integer_type_node
, q
[0]);
2342 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2343 replace_call_with_call_and_fold (gsi
, repl
);
2351 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2354 Return NULL_TREE if no simplification was possible, otherwise return the
2355 simplified form of the call as a tree.
2357 The simplified form may be a constant or other expression which
2358 computes the same value, but in a more efficient manner (including
2359 calls to other builtin functions).
2361 The call may contain arguments which need to be evaluated, but
2362 which are not useful to determine the result of the call. In
2363 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2364 COMPOUND_EXPR will be an argument which must be evaluated.
2365 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2366 COMPOUND_EXPR in the chain will contain the tree for the simplified
2367 form of the builtin function call. */
2370 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2372 gimple
*stmt
= gsi_stmt (*gsi
);
2373 location_t loc
= gimple_location (stmt
);
2375 const char *p
= c_getstr (src
);
2377 /* If the string length is zero, return the dst parameter. */
2378 if (p
&& *p
== '\0')
2380 replace_call_with_value (gsi
, dst
);
2384 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2387 /* See if we can store by pieces into (dst + strlen(dst)). */
2389 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2390 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2392 if (!strlen_fn
|| !memcpy_fn
)
2395 /* If the length of the source string isn't computable don't
2396 split strcat into strlen and memcpy. */
2397 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2401 /* Create strlen (dst). */
2402 gimple_seq stmts
= NULL
, stmts2
;
2403 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2404 gimple_set_location (repl
, loc
);
2405 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2406 gimple_call_set_lhs (repl
, newdst
);
2407 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2409 /* Create (dst p+ strlen (dst)). */
2410 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2411 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2412 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2414 len
= fold_convert_loc (loc
, size_type_node
, len
);
2415 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2416 build_int_cst (size_type_node
, 1));
2417 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2418 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2420 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2421 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2422 if (gimple_call_lhs (stmt
))
2424 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2425 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2426 gsi_replace_with_seq_vops (gsi
, stmts
);
2427 /* gsi now points at the assignment to the lhs, get a
2428 stmt iterator to the memcpy call.
2429 ??? We can't use gsi_for_stmt as that doesn't work when the
2430 CFG isn't built yet. */
2431 gimple_stmt_iterator gsi2
= *gsi
;
2437 gsi_replace_with_seq_vops (gsi
, stmts
);
2443 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2444 are the arguments to the call. */
2447 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2449 gimple
*stmt
= gsi_stmt (*gsi
);
2450 tree dest
= gimple_call_arg (stmt
, 0);
2451 tree src
= gimple_call_arg (stmt
, 1);
2452 tree size
= gimple_call_arg (stmt
, 2);
2458 /* If the SRC parameter is "", return DEST. */
2459 if (p
&& *p
== '\0')
2461 replace_call_with_value (gsi
, dest
);
2465 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2468 /* If __builtin_strcat_chk is used, assume strcat is available. */
2469 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2473 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2474 replace_call_with_call_and_fold (gsi
, repl
);
2478 /* Simplify a call to the strncat builtin. */
2481 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2483 gimple
*stmt
= gsi_stmt (*gsi
);
2484 tree dst
= gimple_call_arg (stmt
, 0);
2485 tree src
= gimple_call_arg (stmt
, 1);
2486 tree len
= gimple_call_arg (stmt
, 2);
2487 tree src_len
= c_strlen (src
, 1);
2489 /* If the requested length is zero, or the src parameter string
2490 length is zero, return the dst parameter. */
2491 if (integer_zerop (len
) || (src_len
&& integer_zerop (src_len
)))
2493 replace_call_with_value (gsi
, dst
);
2497 /* Return early if the requested len is less than the string length.
2498 Warnings will be issued elsewhere later. */
2499 if (!src_len
|| known_lower (stmt
, len
, src_len
, true))
2502 /* Warn on constant LEN. */
2503 if (TREE_CODE (len
) == INTEGER_CST
)
2505 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2508 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
)
2509 && TREE_CODE (dstsize
) == INTEGER_CST
)
2511 int cmpdst
= tree_int_cst_compare (len
, dstsize
);
2515 tree fndecl
= gimple_call_fndecl (stmt
);
2517 /* Strncat copies (at most) LEN bytes and always appends
2518 the terminating NUL so the specified bound should never
2519 be equal to (or greater than) the size of the destination.
2520 If it is, the copy could overflow. */
2521 location_t loc
= gimple_location (stmt
);
2522 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2524 ? G_("%qD specified bound %E equals "
2526 : G_("%qD specified bound %E exceeds "
2527 "destination size %E"),
2528 fndecl
, len
, dstsize
);
2530 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2534 if (!nowarn
&& TREE_CODE (src_len
) == INTEGER_CST
2535 && tree_int_cst_compare (src_len
, len
) == 0)
2537 tree fndecl
= gimple_call_fndecl (stmt
);
2538 location_t loc
= gimple_location (stmt
);
2540 /* To avoid possible overflow the specified bound should also
2541 not be equal to the length of the source, even when the size
2542 of the destination is unknown (it's not an uncommon mistake
2543 to specify as the bound to strncpy the length of the source). */
2544 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2545 "%qD specified bound %E equals source length",
2547 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2551 if (!known_lower (stmt
, src_len
, len
))
2554 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2556 /* If the replacement _DECL isn't initialized, don't do the
2561 /* Otherwise, emit a call to strcat. */
2562 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2563 replace_call_with_call_and_fold (gsi
, repl
);
2567 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2571 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2573 gimple
*stmt
= gsi_stmt (*gsi
);
2574 tree dest
= gimple_call_arg (stmt
, 0);
2575 tree src
= gimple_call_arg (stmt
, 1);
2576 tree len
= gimple_call_arg (stmt
, 2);
2577 tree size
= gimple_call_arg (stmt
, 3);
2582 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2583 if ((p
&& *p
== '\0')
2584 || integer_zerop (len
))
2586 replace_call_with_value (gsi
, dest
);
2590 if (! integer_all_onesp (size
))
2592 tree src_len
= c_strlen (src
, 1);
2593 if (known_lower (stmt
, src_len
, len
))
2595 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2596 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2600 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2601 replace_call_with_call_and_fold (gsi
, repl
);
2607 /* If __builtin_strncat_chk is used, assume strncat is available. */
2608 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2612 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2613 replace_call_with_call_and_fold (gsi
, repl
);
2617 /* Build and append gimple statements to STMTS that would load a first
2618 character of a memory location identified by STR. LOC is location
2619 of the statement. */
2622 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2626 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2627 tree cst_uchar_ptr_node
2628 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2629 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2631 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2632 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2633 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2635 gimple_assign_set_lhs (stmt
, var
);
2636 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2641 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2644 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2646 gimple
*stmt
= gsi_stmt (*gsi
);
2647 tree callee
= gimple_call_fndecl (stmt
);
2648 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2650 tree type
= integer_type_node
;
2651 tree str1
= gimple_call_arg (stmt
, 0);
2652 tree str2
= gimple_call_arg (stmt
, 1);
2653 tree lhs
= gimple_call_lhs (stmt
);
2655 tree bound_node
= NULL_TREE
;
2656 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2658 /* Handle strncmp and strncasecmp functions. */
2659 if (gimple_call_num_args (stmt
) == 3)
2661 bound_node
= gimple_call_arg (stmt
, 2);
2662 if (tree_fits_uhwi_p (bound_node
))
2663 bound
= tree_to_uhwi (bound_node
);
2666 /* If the BOUND parameter is zero, return zero. */
2669 replace_call_with_value (gsi
, integer_zero_node
);
2673 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2674 if (operand_equal_p (str1
, str2
, 0))
2676 replace_call_with_value (gsi
, integer_zero_node
);
2680 /* Initially set to the number of characters, including the terminating
2681 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2682 the array Sx is not terminated by a nul.
2683 For nul-terminated strings then adjusted to their length so that
2684 LENx == NULPOSx holds. */
2685 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2686 const char *p1
= getbyterep (str1
, &len1
);
2687 const char *p2
= getbyterep (str2
, &len2
);
2689 /* The position of the terminating nul character if one exists, otherwise
2690 a value greater than LENx. */
2691 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2695 size_t n
= strnlen (p1
, len1
);
2702 size_t n
= strnlen (p2
, len2
);
2707 /* For known strings, return an immediate value. */
2711 bool known_result
= false;
2715 case BUILT_IN_STRCMP
:
2716 case BUILT_IN_STRCMP_EQ
:
2717 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2720 r
= strcmp (p1
, p2
);
2721 known_result
= true;
2724 case BUILT_IN_STRNCMP
:
2725 case BUILT_IN_STRNCMP_EQ
:
2727 if (bound
== HOST_WIDE_INT_M1U
)
2730 /* Reduce the bound to be no more than the length
2731 of the shorter of the two strings, or the sizes
2732 of the unterminated arrays. */
2733 unsigned HOST_WIDE_INT n
= bound
;
2735 if (len1
== nulpos1
&& len1
< n
)
2737 if (len2
== nulpos2
&& len2
< n
)
2740 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2743 r
= strncmp (p1
, p2
, n
);
2744 known_result
= true;
2747 /* Only handleable situation is where the string are equal (result 0),
2748 which is already handled by operand_equal_p case. */
2749 case BUILT_IN_STRCASECMP
:
2751 case BUILT_IN_STRNCASECMP
:
2753 if (bound
== HOST_WIDE_INT_M1U
)
2755 r
= strncmp (p1
, p2
, bound
);
2757 known_result
= true;
2766 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2771 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2772 || fcode
== BUILT_IN_STRCMP
2773 || fcode
== BUILT_IN_STRCMP_EQ
2774 || fcode
== BUILT_IN_STRCASECMP
;
2776 location_t loc
= gimple_location (stmt
);
2778 /* If the second arg is "", return *(const unsigned char*)arg1. */
2779 if (p2
&& *p2
== '\0' && nonzero_bound
)
2781 gimple_seq stmts
= NULL
;
2782 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2785 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2786 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2789 gsi_replace_with_seq_vops (gsi
, stmts
);
2793 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2794 if (p1
&& *p1
== '\0' && nonzero_bound
)
2796 gimple_seq stmts
= NULL
;
2797 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2801 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2802 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2803 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2805 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2806 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2809 gsi_replace_with_seq_vops (gsi
, stmts
);
2813 /* If BOUND is one, return an expression corresponding to
2814 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2815 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2817 gimple_seq stmts
= NULL
;
2818 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2819 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2823 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2824 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2825 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2827 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2828 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2829 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2831 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2832 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2835 gsi_replace_with_seq_vops (gsi
, stmts
);
2839 /* If BOUND is greater than the length of one constant string,
2840 and the other argument is also a nul-terminated string, replace
2841 strncmp with strcmp. */
2842 if (fcode
== BUILT_IN_STRNCMP
2843 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2844 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2845 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2847 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2850 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2851 replace_call_with_call_and_fold (gsi
, repl
);
2858 /* Fold a call to the memchr pointed by GSI iterator. */
2861 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2863 gimple
*stmt
= gsi_stmt (*gsi
);
2864 tree lhs
= gimple_call_lhs (stmt
);
2865 tree arg1
= gimple_call_arg (stmt
, 0);
2866 tree arg2
= gimple_call_arg (stmt
, 1);
2867 tree len
= gimple_call_arg (stmt
, 2);
2869 /* If the LEN parameter is zero, return zero. */
2870 if (integer_zerop (len
))
2872 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2877 if (TREE_CODE (arg2
) != INTEGER_CST
2878 || !tree_fits_uhwi_p (len
)
2879 || !target_char_cst_p (arg2
, &c
))
2882 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2883 unsigned HOST_WIDE_INT string_length
;
2884 const char *p1
= getbyterep (arg1
, &string_length
);
2888 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2891 tree mem_size
, offset_node
;
2892 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2893 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2894 ? 0 : tree_to_uhwi (offset_node
);
2895 /* MEM_SIZE is the size of the array the string literal
2897 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2898 gcc_checking_assert (string_length
<= string_size
);
2899 if (length
<= string_size
)
2901 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2907 unsigned HOST_WIDE_INT offset
= r
- p1
;
2908 gimple_seq stmts
= NULL
;
2909 if (lhs
!= NULL_TREE
)
2911 tree offset_cst
= build_int_cst (sizetype
, offset
);
2912 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2914 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2917 gimple_seq_add_stmt_without_update (&stmts
,
2918 gimple_build_nop ());
2920 gsi_replace_with_seq_vops (gsi
, stmts
);
2928 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2929 to the call. IGNORE is true if the value returned
2930 by the builtin will be ignored. UNLOCKED is true is true if this
2931 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2932 the known length of the string. Return NULL_TREE if no simplification
2936 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2937 tree arg0
, tree arg1
,
2940 gimple
*stmt
= gsi_stmt (*gsi
);
2942 /* If we're using an unlocked function, assume the other unlocked
2943 functions exist explicitly. */
2944 tree
const fn_fputc
= (unlocked
2945 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2946 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2947 tree
const fn_fwrite
= (unlocked
2948 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2949 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2951 /* If the return value is used, don't do the transformation. */
2952 if (gimple_call_lhs (stmt
))
2955 /* Get the length of the string passed to fputs. If the length
2956 can't be determined, punt. */
2957 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2959 || TREE_CODE (len
) != INTEGER_CST
)
2962 switch (compare_tree_int (len
, 1))
2964 case -1: /* length is 0, delete the call entirely . */
2965 replace_call_with_value (gsi
, integer_zero_node
);
2968 case 0: /* length is 1, call fputc. */
2970 const char *p
= c_getstr (arg0
);
2976 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2978 (integer_type_node
, p
[0]), arg1
);
2979 replace_call_with_call_and_fold (gsi
, repl
);
2984 case 1: /* length is greater than 1, call fwrite. */
2986 /* If optimizing for size keep fputs. */
2987 if (optimize_function_for_size_p (cfun
))
2989 /* New argument list transforming fputs(string, stream) to
2990 fwrite(string, 1, len, stream). */
2994 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2995 size_one_node
, len
, arg1
);
2996 replace_call_with_call_and_fold (gsi
, repl
);
3004 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3005 DEST, SRC, LEN, and SIZE are the arguments to the call.
3006 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3007 code of the builtin. If MAXLEN is not NULL, it is maximum length
3008 passed as third argument. */
3011 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
3012 tree dest
, tree src
, tree len
, tree size
,
3013 enum built_in_function fcode
)
3015 gimple
*stmt
= gsi_stmt (*gsi
);
3016 location_t loc
= gimple_location (stmt
);
3017 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3020 /* If SRC and DEST are the same (and not volatile), return DEST
3021 (resp. DEST+LEN for __mempcpy_chk). */
3022 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3024 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3026 replace_call_with_value (gsi
, dest
);
3031 gimple_seq stmts
= NULL
;
3032 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3033 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3034 TREE_TYPE (dest
), dest
, len
);
3035 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3036 replace_call_with_value (gsi
, temp
);
3041 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3042 if (! integer_all_onesp (size
)
3043 && !known_lower (stmt
, len
, size
)
3044 && !known_lower (stmt
, maxlen
, size
))
3046 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3047 least try to optimize (void) __mempcpy_chk () into
3048 (void) __memcpy_chk () */
3049 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3051 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3055 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3056 replace_call_with_call_and_fold (gsi
, repl
);
3063 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3064 mem{cpy,pcpy,move,set} is available. */
3067 case BUILT_IN_MEMCPY_CHK
:
3068 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3070 case BUILT_IN_MEMPCPY_CHK
:
3071 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3073 case BUILT_IN_MEMMOVE_CHK
:
3074 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3076 case BUILT_IN_MEMSET_CHK
:
3077 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3086 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3087 replace_call_with_call_and_fold (gsi
, repl
);
3091 /* Print a message in the dump file recording transformation of FROM to TO. */
3094 dump_transformation (gcall
*from
, gcall
*to
)
3096 if (dump_enabled_p ())
3097 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, from
, "simplified %T to %T\n",
3098 gimple_call_fn (from
), gimple_call_fn (to
));
3101 /* Fold a call to the __st[rp]cpy_chk builtin.
3102 DEST, SRC, and SIZE are the arguments to the call.
3103 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3104 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3105 strings passed as second argument. */
3108 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3110 tree src
, tree size
,
3111 enum built_in_function fcode
)
3113 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3114 location_t loc
= gimple_location (stmt
);
3115 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3118 /* If SRC and DEST are the same (and not volatile), return DEST. */
3119 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3121 /* Issue -Wrestrict unless the pointers are null (those do
3122 not point to objects and so do not indicate an overlap;
3123 such calls could be the result of sanitization and jump
3125 if (!integer_zerop (dest
)
3126 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3128 tree func
= gimple_call_fndecl (stmt
);
3130 warning_at (loc
, OPT_Wrestrict
,
3131 "%qD source argument is the same as destination",
3135 replace_call_with_value (gsi
, dest
);
3139 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3140 if (! integer_all_onesp (size
))
3142 len
= c_strlen (src
, 1);
3143 if (!known_lower (stmt
, len
, size
, true)
3144 && !known_lower (stmt
, maxlen
, size
, true))
3146 if (fcode
== BUILT_IN_STPCPY_CHK
)
3151 /* If return value of __stpcpy_chk is ignored,
3152 optimize into __strcpy_chk. */
3153 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3157 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3158 replace_call_with_call_and_fold (gsi
, repl
);
3162 if (! len
|| TREE_SIDE_EFFECTS (len
))
3165 /* If c_strlen returned something, but not provably less than size,
3166 transform __strcpy_chk into __memcpy_chk. */
3167 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3171 gimple_seq stmts
= NULL
;
3172 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3173 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3174 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3175 build_int_cst (size_type_node
, 1));
3176 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3177 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3178 replace_call_with_call_and_fold (gsi
, repl
);
3183 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3184 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
&& !ignore
3185 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3189 gcall
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3190 dump_transformation (stmt
, repl
);
3191 replace_call_with_call_and_fold (gsi
, repl
);
3195 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3196 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3197 length passed as third argument. IGNORE is true if return value can be
3198 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3201 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3202 tree dest
, tree src
,
3203 tree len
, tree size
,
3204 enum built_in_function fcode
)
3206 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3207 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3210 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3211 if (! integer_all_onesp (size
)
3212 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3214 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3216 /* If return value of __stpncpy_chk is ignored,
3217 optimize into __strncpy_chk. */
3218 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3221 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3222 replace_call_with_call_and_fold (gsi
, repl
);
3229 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3230 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
&& !ignore
3231 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3235 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3236 dump_transformation (stmt
, repl
);
3237 replace_call_with_call_and_fold (gsi
, repl
);
3241 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3242 Return NULL_TREE if no simplification can be made. */
3245 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3247 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3248 location_t loc
= gimple_location (stmt
);
3249 tree dest
= gimple_call_arg (stmt
, 0);
3250 tree src
= gimple_call_arg (stmt
, 1);
3253 /* If the result is unused, replace stpcpy with strcpy. */
3254 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3256 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3259 gimple_call_set_fndecl (stmt
, fn
);
3264 /* Set to non-null if ARG refers to an unterminated array. */
3265 c_strlen_data data
= { };
3266 /* The size of the unterminated array if SRC referes to one. */
3268 /* True if the size is exact/constant, false if it's the lower bound
3271 tree len
= c_strlen (src
, 1, &data
, 1);
3273 || TREE_CODE (len
) != INTEGER_CST
)
3275 data
.decl
= unterminated_array (src
, &size
, &exact
);
3282 /* Avoid folding calls with unterminated arrays. */
3283 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3284 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3286 suppress_warning (stmt
, OPT_Wstringop_overread
);
3290 if (optimize_function_for_size_p (cfun
)
3291 /* If length is zero it's small enough. */
3292 && !integer_zerop (len
))
3295 /* If the source has a known length replace stpcpy with memcpy. */
3296 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3300 gimple_seq stmts
= NULL
;
3301 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3302 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3303 tem
, build_int_cst (size_type_node
, 1));
3304 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3305 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3306 gimple_move_vops (repl
, stmt
);
3307 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3308 /* Replace the result with dest + len. */
3310 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3311 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3312 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3313 POINTER_PLUS_EXPR
, dest
, tem
);
3314 gsi_replace (gsi
, ret
, false);
3315 /* Finally fold the memcpy call. */
3316 gimple_stmt_iterator gsi2
= *gsi
;
3322 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3323 NULL_TREE if a normal call should be emitted rather than expanding
3324 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3325 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3326 passed as second argument. */
3329 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3330 enum built_in_function fcode
)
3332 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3333 tree dest
, size
, len
, fn
, fmt
, flag
;
3334 const char *fmt_str
;
3336 /* Verify the required arguments in the original call. */
3337 if (gimple_call_num_args (stmt
) < 5)
3340 dest
= gimple_call_arg (stmt
, 0);
3341 len
= gimple_call_arg (stmt
, 1);
3342 flag
= gimple_call_arg (stmt
, 2);
3343 size
= gimple_call_arg (stmt
, 3);
3344 fmt
= gimple_call_arg (stmt
, 4);
3346 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3347 if (! integer_all_onesp (size
)
3348 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3351 if (!init_target_chars ())
3354 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3355 or if format doesn't contain % chars or is "%s". */
3356 if (! integer_zerop (flag
))
3358 fmt_str
= c_getstr (fmt
);
3359 if (fmt_str
== NULL
)
3361 if (strchr (fmt_str
, target_percent
) != NULL
3362 && strcmp (fmt_str
, target_percent_s
))
3366 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3368 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3369 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3373 /* Replace the called function and the first 5 argument by 3 retaining
3374 trailing varargs. */
3375 gimple_call_set_fndecl (stmt
, fn
);
3376 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3377 gimple_call_set_arg (stmt
, 0, dest
);
3378 gimple_call_set_arg (stmt
, 1, len
);
3379 gimple_call_set_arg (stmt
, 2, fmt
);
3380 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3381 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3382 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3387 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3388 Return NULL_TREE if a normal call should be emitted rather than
3389 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3390 or BUILT_IN_VSPRINTF_CHK. */
3393 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3394 enum built_in_function fcode
)
3396 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3397 tree dest
, size
, len
, fn
, fmt
, flag
;
3398 const char *fmt_str
;
3399 unsigned nargs
= gimple_call_num_args (stmt
);
3401 /* Verify the required arguments in the original call. */
3404 dest
= gimple_call_arg (stmt
, 0);
3405 flag
= gimple_call_arg (stmt
, 1);
3406 size
= gimple_call_arg (stmt
, 2);
3407 fmt
= gimple_call_arg (stmt
, 3);
3411 if (!init_target_chars ())
3414 /* Check whether the format is a literal string constant. */
3415 fmt_str
= c_getstr (fmt
);
3416 if (fmt_str
!= NULL
)
3418 /* If the format doesn't contain % args or %%, we know the size. */
3419 if (strchr (fmt_str
, target_percent
) == 0)
3421 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3422 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3424 /* If the format is "%s" and first ... argument is a string literal,
3425 we know the size too. */
3426 else if (fcode
== BUILT_IN_SPRINTF_CHK
3427 && strcmp (fmt_str
, target_percent_s
) == 0)
3433 arg
= gimple_call_arg (stmt
, 4);
3434 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3435 len
= c_strlen (arg
, 1);
3440 if (! integer_all_onesp (size
) && !known_lower (stmt
, len
, size
, true))
3443 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3444 or if format doesn't contain % chars or is "%s". */
3445 if (! integer_zerop (flag
))
3447 if (fmt_str
== NULL
)
3449 if (strchr (fmt_str
, target_percent
) != NULL
3450 && strcmp (fmt_str
, target_percent_s
))
3454 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3455 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3456 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3460 /* Replace the called function and the first 4 argument by 2 retaining
3461 trailing varargs. */
3462 gimple_call_set_fndecl (stmt
, fn
);
3463 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3464 gimple_call_set_arg (stmt
, 0, dest
);
3465 gimple_call_set_arg (stmt
, 1, fmt
);
3466 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3467 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3468 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3473 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3474 ORIG may be null if this is a 2-argument call. We don't attempt to
3475 simplify calls with more than 3 arguments.
3477 Return true if simplification was possible, otherwise false. */
3480 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3482 gimple
*stmt
= gsi_stmt (*gsi
);
3484 /* Verify the required arguments in the original call. We deal with two
3485 types of sprintf() calls: 'sprintf (str, fmt)' and
3486 'sprintf (dest, "%s", orig)'. */
3487 if (gimple_call_num_args (stmt
) > 3)
3490 tree orig
= NULL_TREE
;
3491 if (gimple_call_num_args (stmt
) == 3)
3492 orig
= gimple_call_arg (stmt
, 2);
3494 /* Check whether the format is a literal string constant. */
3495 tree fmt
= gimple_call_arg (stmt
, 1);
3496 const char *fmt_str
= c_getstr (fmt
);
3497 if (fmt_str
== NULL
)
3500 tree dest
= gimple_call_arg (stmt
, 0);
3502 if (!init_target_chars ())
3505 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3509 /* If the format doesn't contain % args or %%, use strcpy. */
3510 if (strchr (fmt_str
, target_percent
) == NULL
)
3512 /* Don't optimize sprintf (buf, "abc", ptr++). */
3516 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3517 'format' is known to contain no % formats. */
3518 gimple_seq stmts
= NULL
;
3519 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3521 /* Propagate the NO_WARNING bit to avoid issuing the same
3522 warning more than once. */
3523 copy_warning (repl
, stmt
);
3525 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3526 if (tree lhs
= gimple_call_lhs (stmt
))
3528 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3530 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3531 gsi_replace_with_seq_vops (gsi
, stmts
);
3532 /* gsi now points at the assignment to the lhs, get a
3533 stmt iterator to the memcpy call.
3534 ??? We can't use gsi_for_stmt as that doesn't work when the
3535 CFG isn't built yet. */
3536 gimple_stmt_iterator gsi2
= *gsi
;
3542 gsi_replace_with_seq_vops (gsi
, stmts
);
3548 /* If the format is "%s", use strcpy if the result isn't used. */
3549 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3551 /* Don't crash on sprintf (str1, "%s"). */
3555 /* Don't fold calls with source arguments of invalid (nonpointer)
3557 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3560 tree orig_len
= NULL_TREE
;
3561 if (gimple_call_lhs (stmt
))
3563 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3568 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3569 gimple_seq stmts
= NULL
;
3570 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3572 /* Propagate the NO_WARNING bit to avoid issuing the same
3573 warning more than once. */
3574 copy_warning (repl
, stmt
);
3576 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3577 if (tree lhs
= gimple_call_lhs (stmt
))
3579 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3580 TREE_TYPE (orig_len
)))
3581 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3582 repl
= gimple_build_assign (lhs
, orig_len
);
3583 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3584 gsi_replace_with_seq_vops (gsi
, stmts
);
3585 /* gsi now points at the assignment to the lhs, get a
3586 stmt iterator to the memcpy call.
3587 ??? We can't use gsi_for_stmt as that doesn't work when the
3588 CFG isn't built yet. */
3589 gimple_stmt_iterator gsi2
= *gsi
;
3595 gsi_replace_with_seq_vops (gsi
, stmts
);
3603 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3604 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3605 attempt to simplify calls with more than 4 arguments.
3607 Return true if simplification was possible, otherwise false. */
3610 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3612 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3613 tree dest
= gimple_call_arg (stmt
, 0);
3614 tree destsize
= gimple_call_arg (stmt
, 1);
3615 tree fmt
= gimple_call_arg (stmt
, 2);
3616 tree orig
= NULL_TREE
;
3617 const char *fmt_str
= NULL
;
3619 if (gimple_call_num_args (stmt
) > 4)
3622 if (gimple_call_num_args (stmt
) == 4)
3623 orig
= gimple_call_arg (stmt
, 3);
3625 /* Check whether the format is a literal string constant. */
3626 fmt_str
= c_getstr (fmt
);
3627 if (fmt_str
== NULL
)
3630 if (!init_target_chars ())
3633 /* If the format doesn't contain % args or %%, use strcpy. */
3634 if (strchr (fmt_str
, target_percent
) == NULL
)
3636 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3640 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3644 tree len
= build_int_cstu (TREE_TYPE (destsize
), strlen (fmt_str
));
3646 /* We could expand this as
3647 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3649 memcpy (str, fmt_with_nul_at_cstm1, cst);
3650 but in the former case that might increase code size
3651 and in the latter case grow .rodata section too much.
3653 if (!known_lower (stmt
, len
, destsize
, true))
3656 gimple_seq stmts
= NULL
;
3657 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3658 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3659 if (tree lhs
= gimple_call_lhs (stmt
))
3661 repl
= gimple_build_assign (lhs
,
3662 fold_convert (TREE_TYPE (lhs
), len
));
3663 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3664 gsi_replace_with_seq_vops (gsi
, stmts
);
3665 /* gsi now points at the assignment to the lhs, get a
3666 stmt iterator to the memcpy call.
3667 ??? We can't use gsi_for_stmt as that doesn't work when the
3668 CFG isn't built yet. */
3669 gimple_stmt_iterator gsi2
= *gsi
;
3675 gsi_replace_with_seq_vops (gsi
, stmts
);
3681 /* If the format is "%s", use strcpy if the result isn't used. */
3682 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3684 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3688 /* Don't crash on snprintf (str1, cst, "%s"). */
3692 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3694 /* We could expand this as
3695 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3697 memcpy (str1, str2_with_nul_at_cstm1, cst);
3698 but in the former case that might increase code size
3699 and in the latter case grow .rodata section too much.
3701 if (!known_lower (stmt
, orig_len
, destsize
, true))
3704 /* Convert snprintf (str1, cst, "%s", str2) into
3705 strcpy (str1, str2) if strlen (str2) < cst. */
3706 gimple_seq stmts
= NULL
;
3707 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3708 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3709 if (tree lhs
= gimple_call_lhs (stmt
))
3711 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3712 TREE_TYPE (orig_len
)))
3713 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3714 repl
= gimple_build_assign (lhs
, orig_len
);
3715 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3716 gsi_replace_with_seq_vops (gsi
, stmts
);
3717 /* gsi now points at the assignment to the lhs, get a
3718 stmt iterator to the memcpy call.
3719 ??? We can't use gsi_for_stmt as that doesn't work when the
3720 CFG isn't built yet. */
3721 gimple_stmt_iterator gsi2
= *gsi
;
3727 gsi_replace_with_seq_vops (gsi
, stmts
);
3735 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3736 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3737 more than 3 arguments, and ARG may be null in the 2-argument case.
3739 Return NULL_TREE if no simplification was possible, otherwise return the
3740 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3741 code of the function to be simplified. */
3744 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3745 tree fp
, tree fmt
, tree arg
,
3746 enum built_in_function fcode
)
3748 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3749 tree fn_fputc
, fn_fputs
;
3750 const char *fmt_str
= NULL
;
3752 /* If the return value is used, don't do the transformation. */
3753 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3756 /* Check whether the format is a literal string constant. */
3757 fmt_str
= c_getstr (fmt
);
3758 if (fmt_str
== NULL
)
3761 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3763 /* If we're using an unlocked function, assume the other
3764 unlocked functions exist explicitly. */
3765 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3766 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3770 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3771 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3774 if (!init_target_chars ())
3777 /* If the format doesn't contain % args or %%, use strcpy. */
3778 if (strchr (fmt_str
, target_percent
) == NULL
)
3780 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3784 /* If the format specifier was "", fprintf does nothing. */
3785 if (fmt_str
[0] == '\0')
3787 replace_call_with_value (gsi
, NULL_TREE
);
3791 /* When "string" doesn't contain %, replace all cases of
3792 fprintf (fp, string) with fputs (string, fp). The fputs
3793 builtin will take care of special cases like length == 1. */
3796 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3797 replace_call_with_call_and_fold (gsi
, repl
);
3802 /* The other optimizations can be done only on the non-va_list variants. */
3803 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3806 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3807 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3809 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3813 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3814 replace_call_with_call_and_fold (gsi
, repl
);
3819 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3820 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3823 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3827 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3828 replace_call_with_call_and_fold (gsi
, repl
);
3836 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3837 FMT and ARG are the arguments to the call; we don't fold cases with
3838 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3840 Return NULL_TREE if no simplification was possible, otherwise return the
3841 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3842 code of the function to be simplified. */
3845 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3846 tree arg
, enum built_in_function fcode
)
3848 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3849 tree fn_putchar
, fn_puts
, newarg
;
3850 const char *fmt_str
= NULL
;
3852 /* If the return value is used, don't do the transformation. */
3853 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3856 /* Check whether the format is a literal string constant. */
3857 fmt_str
= c_getstr (fmt
);
3858 if (fmt_str
== NULL
)
3861 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3863 /* If we're using an unlocked function, assume the other
3864 unlocked functions exist explicitly. */
3865 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3866 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3870 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3871 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3874 if (!init_target_chars ())
3877 if (strcmp (fmt_str
, target_percent_s
) == 0
3878 || strchr (fmt_str
, target_percent
) == NULL
)
3882 if (strcmp (fmt_str
, target_percent_s
) == 0)
3884 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3887 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3890 str
= c_getstr (arg
);
3896 /* The format specifier doesn't contain any '%' characters. */
3897 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3903 /* If the string was "", printf does nothing. */
3906 replace_call_with_value (gsi
, NULL_TREE
);
3910 /* If the string has length of 1, call putchar. */
3913 /* Given printf("c"), (where c is any one character,)
3914 convert "c"[0] to an int and pass that to the replacement
3916 newarg
= build_int_cst (integer_type_node
, str
[0]);
3919 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3920 replace_call_with_call_and_fold (gsi
, repl
);
3926 /* If the string was "string\n", call puts("string"). */
3927 size_t len
= strlen (str
);
3928 if ((unsigned char)str
[len
- 1] == target_newline
3929 && (size_t) (int) len
== len
3934 /* Create a NUL-terminated string that's one char shorter
3935 than the original, stripping off the trailing '\n'. */
3936 newstr
= xstrdup (str
);
3937 newstr
[len
- 1] = '\0';
3938 newarg
= build_string_literal (len
, newstr
);
3942 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3943 replace_call_with_call_and_fold (gsi
, repl
);
3948 /* We'd like to arrange to call fputs(string,stdout) here,
3949 but we need stdout and don't have a way to get it yet. */
3954 /* The other optimizations can be done only on the non-va_list variants. */
3955 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3958 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3959 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3961 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3965 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3966 replace_call_with_call_and_fold (gsi
, repl
);
3971 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3972 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3974 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3979 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3980 replace_call_with_call_and_fold (gsi
, repl
);
3990 /* Fold a call to __builtin_strlen with known length LEN. */
3993 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3995 gimple
*stmt
= gsi_stmt (*gsi
);
3996 tree arg
= gimple_call_arg (stmt
, 0);
4001 c_strlen_data lendata
= { };
4002 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4004 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4005 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4007 /* The range of lengths refers to either a single constant
4008 string or to the longest and shortest constant string
4009 referenced by the argument of the strlen() call, or to
4010 the strings that can possibly be stored in the arrays
4011 the argument refers to. */
4012 minlen
= wi::to_wide (lendata
.minlen
);
4013 maxlen
= wi::to_wide (lendata
.maxlen
);
4017 unsigned prec
= TYPE_PRECISION (sizetype
);
4019 minlen
= wi::shwi (0, prec
);
4020 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4023 if (minlen
== maxlen
)
4025 /* Fold the strlen call to a constant. */
4026 tree type
= TREE_TYPE (lendata
.minlen
);
4027 tree len
= force_gimple_operand_gsi (gsi
,
4028 wide_int_to_tree (type
, minlen
),
4029 true, NULL
, true, GSI_SAME_STMT
);
4030 replace_call_with_value (gsi
, len
);
4034 /* Set the strlen() range to [0, MAXLEN]. */
4035 if (tree lhs
= gimple_call_lhs (stmt
))
4036 set_strlen_range (lhs
, minlen
, maxlen
);
4041 /* Fold a call to __builtin_acc_on_device. */
4044 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4046 /* Defer folding until we know which compiler we're in. */
4047 if (symtab
->state
!= EXPANSION
)
4050 unsigned val_host
= GOMP_DEVICE_HOST
;
4051 unsigned val_dev
= GOMP_DEVICE_NONE
;
4053 #ifdef ACCEL_COMPILER
4054 val_host
= GOMP_DEVICE_NOT_HOST
;
4055 val_dev
= ACCEL_COMPILER_acc_device
;
4058 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4060 tree host_eq
= make_ssa_name (boolean_type_node
);
4061 gimple
*host_ass
= gimple_build_assign
4062 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4063 gimple_set_location (host_ass
, loc
);
4064 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4066 tree dev_eq
= make_ssa_name (boolean_type_node
);
4067 gimple
*dev_ass
= gimple_build_assign
4068 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4069 gimple_set_location (dev_ass
, loc
);
4070 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4072 tree result
= make_ssa_name (boolean_type_node
);
4073 gimple
*result_ass
= gimple_build_assign
4074 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4075 gimple_set_location (result_ass
, loc
);
4076 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4078 replace_call_with_value (gsi
, result
);
4083 /* Fold realloc (0, n) -> malloc (n). */
4086 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4088 gimple
*stmt
= gsi_stmt (*gsi
);
4089 tree arg
= gimple_call_arg (stmt
, 0);
4090 tree size
= gimple_call_arg (stmt
, 1);
4092 if (operand_equal_p (arg
, null_pointer_node
, 0))
4094 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4097 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4098 replace_call_with_call_and_fold (gsi
, repl
);
4105 /* Number of bytes into which any type but aggregate or vector types
4107 static constexpr size_t clear_padding_unit
4108 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4109 /* Buffer size on which __builtin_clear_padding folding code works. */
4110 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4112 /* Data passed through __builtin_clear_padding folding. */
4113 struct clear_padding_struct
{
4115 /* 0 during __builtin_clear_padding folding, nonzero during
4116 clear_type_padding_in_mask. In that case, instead of clearing the
4117 non-padding bits in union_ptr array clear the padding bits in there. */
4121 gimple_stmt_iterator
*gsi
;
4122 /* Alignment of buf->base + 0. */
4124 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4126 /* Number of padding bytes before buf->off that don't have padding clear
4127 code emitted yet. */
4128 HOST_WIDE_INT padding_bytes
;
4129 /* The size of the whole object. Never emit code to touch
4130 buf->base + buf->sz or following bytes. */
4132 /* Number of bytes recorded in buf->buf. */
4134 /* When inside union, instead of emitting code we and bits inside of
4135 the union_ptr array. */
4136 unsigned char *union_ptr
;
4137 /* Set bits mean padding bits that need to be cleared by the builtin. */
4138 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4141 /* Emit code to clear padding requested in BUF->buf - set bits
4142 in there stand for padding that should be cleared. FULL is true
4143 if everything from the buffer should be flushed, otherwise
4144 it can leave up to 2 * clear_padding_unit bytes for further
4148 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4150 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4151 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4153 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4154 size_t end
= buf
->size
;
4156 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4157 * clear_padding_unit
);
4158 size_t padding_bytes
= buf
->padding_bytes
;
4161 if (buf
->clear_in_mask
)
4163 /* During clear_type_padding_in_mask, clear the padding
4164 bits set in buf->buf in the buf->union_ptr mask. */
4165 for (size_t i
= 0; i
< end
; i
++)
4167 if (buf
->buf
[i
] == (unsigned char) ~0)
4171 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4174 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4179 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4183 buf
->padding_bytes
= 0;
4187 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4190 buf
->padding_bytes
= padding_bytes
;
4194 /* Inside of a union, instead of emitting any code, instead
4195 clear all bits in the union_ptr buffer that are clear
4196 in buf. Whole padding bytes don't clear anything. */
4197 for (size_t i
= 0; i
< end
; i
++)
4199 if (buf
->buf
[i
] == (unsigned char) ~0)
4204 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4211 buf
->padding_bytes
= 0;
4215 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4218 buf
->padding_bytes
= padding_bytes
;
4222 size_t wordsize
= UNITS_PER_WORD
;
4223 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4225 size_t nonzero_first
= wordsize
;
4226 size_t nonzero_last
= 0;
4227 size_t zero_first
= wordsize
;
4228 size_t zero_last
= 0;
4229 bool all_ones
= true, bytes_only
= true;
4230 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4231 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4233 gcc_assert (wordsize
> 1);
4238 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4242 if (nonzero_first
== wordsize
)
4244 nonzero_first
= j
- i
;
4245 nonzero_last
= j
- i
;
4247 if (nonzero_last
!= j
- i
)
4249 nonzero_last
= j
+ 1 - i
;
4253 if (zero_first
== wordsize
)
4255 zero_last
= j
+ 1 - i
;
4257 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4263 size_t padding_end
= i
;
4266 if (nonzero_first
== 0
4267 && nonzero_last
== wordsize
4270 /* All bits are padding and we had some padding
4271 before too. Just extend it. */
4272 padding_bytes
+= wordsize
;
4275 if (all_ones
&& nonzero_first
== 0)
4277 padding_bytes
+= nonzero_last
;
4278 padding_end
+= nonzero_last
;
4279 nonzero_first
= wordsize
;
4282 else if (bytes_only
&& nonzero_first
== 0)
4284 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4285 padding_bytes
+= zero_first
;
4286 padding_end
+= zero_first
;
4289 if (padding_bytes
== 1)
4291 atype
= char_type_node
;
4292 src
= build_zero_cst (char_type_node
);
4296 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4297 src
= build_constructor (atype
, NULL
);
4299 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4300 build_int_cst (buf
->alias_type
,
4301 buf
->off
+ padding_end
4303 gimple
*g
= gimple_build_assign (dst
, src
);
4304 gimple_set_location (g
, buf
->loc
);
4305 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4307 buf
->padding_bytes
= 0;
4309 if (nonzero_first
== wordsize
)
4310 /* All bits in a word are 0, there are no padding bits. */
4312 if (all_ones
&& nonzero_last
== wordsize
)
4314 /* All bits between nonzero_first and end of word are padding
4315 bits, start counting padding_bytes. */
4316 padding_bytes
= nonzero_last
- nonzero_first
;
4321 /* If bitfields aren't involved in this word, prefer storing
4322 individual bytes or groups of them over performing a RMW
4323 operation on the whole word. */
4324 gcc_assert (i
+ zero_last
<= end
);
4325 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4330 for (k
= j
; k
< i
+ zero_last
; k
++)
4331 if (buf
->buf
[k
] == 0)
4333 HOST_WIDE_INT off
= buf
->off
+ j
;
4337 atype
= char_type_node
;
4338 src
= build_zero_cst (char_type_node
);
4342 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4343 src
= build_constructor (atype
, NULL
);
4345 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4347 build_int_cst (buf
->alias_type
, off
));
4348 gimple
*g
= gimple_build_assign (dst
, src
);
4349 gimple_set_location (g
, buf
->loc
);
4350 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4354 if (nonzero_last
== wordsize
)
4355 padding_bytes
= nonzero_last
- zero_last
;
4358 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4360 if (nonzero_last
- nonzero_first
<= eltsz
4361 && ((nonzero_first
& ~(eltsz
- 1))
4362 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4366 type
= char_type_node
;
4368 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4370 size_t start
= nonzero_first
& ~(eltsz
- 1);
4371 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4373 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4374 atype
= build_aligned_type (type
, buf
->align
);
4375 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4376 build_int_cst (buf
->alias_type
, off
));
4380 && nonzero_first
== start
4381 && nonzero_last
== start
+ eltsz
)
4382 src
= build_zero_cst (type
);
4385 src
= make_ssa_name (type
);
4386 tree tmp_dst
= unshare_expr (dst
);
4387 /* The folding introduces a read from the tmp_dst, we should
4388 prevent uninitialized warning analysis from issuing warning
4389 for such fake read. In order to suppress warning only for
4390 this expr, we should set the location of tmp_dst to
4391 UNKNOWN_LOCATION first, then suppress_warning will call
4392 set_no_warning_bit to set the no_warning flag only for
4394 SET_EXPR_LOCATION (tmp_dst
, UNKNOWN_LOCATION
);
4395 suppress_warning (tmp_dst
, OPT_Wuninitialized
);
4396 g
= gimple_build_assign (src
, tmp_dst
);
4397 gimple_set_location (g
, buf
->loc
);
4398 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4399 tree mask
= native_interpret_expr (type
,
4400 buf
->buf
+ i
+ start
,
4402 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4403 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4404 tree src_masked
= make_ssa_name (type
);
4405 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4407 gimple_set_location (g
, buf
->loc
);
4408 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4411 g
= gimple_build_assign (dst
, src
);
4412 gimple_set_location (g
, buf
->loc
);
4413 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4423 if (padding_bytes
== 1)
4425 atype
= char_type_node
;
4426 src
= build_zero_cst (char_type_node
);
4430 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4431 src
= build_constructor (atype
, NULL
);
4433 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4434 build_int_cst (buf
->alias_type
,
4437 gimple
*g
= gimple_build_assign (dst
, src
);
4438 gimple_set_location (g
, buf
->loc
);
4439 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4441 size_t end_rem
= end
% UNITS_PER_WORD
;
4442 buf
->off
+= end
- end_rem
;
4443 buf
->size
= end_rem
;
4444 memset (buf
->buf
, 0, buf
->size
);
4445 buf
->padding_bytes
= 0;
4449 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4452 buf
->padding_bytes
= padding_bytes
;
4456 /* Append PADDING_BYTES padding bytes. */
4459 clear_padding_add_padding (clear_padding_struct
*buf
,
4460 HOST_WIDE_INT padding_bytes
)
4462 if (padding_bytes
== 0)
4464 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4465 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4466 clear_padding_flush (buf
, false);
4467 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4468 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4470 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4471 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4472 buf
->size
= clear_padding_buf_size
;
4473 clear_padding_flush (buf
, false);
4474 gcc_assert (buf
->padding_bytes
);
4475 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4476 is guaranteed to be all ones. */
4477 padding_bytes
+= buf
->size
;
4478 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4479 memset (buf
->buf
, ~0, buf
->size
);
4480 buf
->off
+= padding_bytes
- buf
->size
;
4481 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4485 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4486 buf
->size
+= padding_bytes
;
4490 static void clear_padding_type (clear_padding_struct
*, tree
,
4491 HOST_WIDE_INT
, bool);
4493 /* Clear padding bits of union type TYPE. */
4496 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4497 HOST_WIDE_INT sz
, bool for_auto_init
)
4499 clear_padding_struct
*union_buf
;
4500 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4501 size_t start_size
= 0;
4504 start_off
= buf
->off
+ buf
->size
;
4505 next_off
= start_off
+ sz
;
4506 start_size
= start_off
% UNITS_PER_WORD
;
4507 start_off
-= start_size
;
4508 clear_padding_flush (buf
, true);
4513 if (sz
+ buf
->size
> clear_padding_buf_size
)
4514 clear_padding_flush (buf
, false);
4515 union_buf
= XALLOCA (clear_padding_struct
);
4516 union_buf
->loc
= buf
->loc
;
4517 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4518 union_buf
->base
= NULL_TREE
;
4519 union_buf
->alias_type
= NULL_TREE
;
4520 union_buf
->gsi
= NULL
;
4521 union_buf
->align
= 0;
4523 union_buf
->padding_bytes
= 0;
4525 union_buf
->size
= 0;
4526 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4527 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4529 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4530 memset (union_buf
->union_ptr
, ~0, sz
);
4533 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4534 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4536 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4538 if (TREE_TYPE (field
) == error_mark_node
)
4540 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4541 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4542 if (!buf
->clear_in_mask
&& !for_auto_init
)
4543 error_at (buf
->loc
, "flexible array member %qD does not have "
4544 "well defined padding bits for %qs",
4545 field
, "__builtin_clear_padding");
4548 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4549 gcc_assert (union_buf
->size
== 0);
4550 union_buf
->off
= start_off
;
4551 union_buf
->size
= start_size
;
4552 memset (union_buf
->buf
, ~0, start_size
);
4553 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4554 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4555 clear_padding_flush (union_buf
, true);
4558 if (buf
== union_buf
)
4560 buf
->off
= next_off
;
4561 buf
->size
= next_off
% UNITS_PER_WORD
;
4562 buf
->off
-= buf
->size
;
4563 memset (buf
->buf
, ~0, buf
->size
);
4565 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4569 unsigned char *union_ptr
= union_buf
->union_ptr
;
4572 clear_padding_flush (buf
, false);
4573 HOST_WIDE_INT this_sz
4574 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4575 clear_padding_buf_size
- buf
->size
);
4576 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4577 buf
->size
+= this_sz
;
4578 union_ptr
+= this_sz
;
4581 XDELETE (union_buf
->union_ptr
);
4585 /* The only known floating point formats with padding bits are the
4586 IEEE extended ones. */
4589 clear_padding_real_needs_padding_p (tree type
)
4591 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4593 && fmt
->signbit_ro
== fmt
->signbit_rw
4594 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4597 /* Return true if TYPE might contain any padding bits. */
4600 clear_padding_type_may_have_padding_p (tree type
)
4602 switch (TREE_CODE (type
))
4610 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4612 return clear_padding_real_needs_padding_p (type
);
4618 /* Emit a runtime loop:
4619 for (; buf.base != end; buf.base += sz)
4620 __builtin_clear_padding (buf.base); */
4623 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4624 tree end
, bool for_auto_init
)
4626 tree l1
= create_artificial_label (buf
->loc
);
4627 tree l2
= create_artificial_label (buf
->loc
);
4628 tree l3
= create_artificial_label (buf
->loc
);
4629 gimple
*g
= gimple_build_goto (l2
);
4630 gimple_set_location (g
, buf
->loc
);
4631 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4632 g
= gimple_build_label (l1
);
4633 gimple_set_location (g
, buf
->loc
);
4634 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4635 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4636 clear_padding_flush (buf
, true);
4637 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4638 size_int (buf
->sz
));
4639 gimple_set_location (g
, buf
->loc
);
4640 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4641 g
= gimple_build_label (l2
);
4642 gimple_set_location (g
, buf
->loc
);
4643 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4644 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4645 gimple_set_location (g
, buf
->loc
);
4646 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4647 g
= gimple_build_label (l3
);
4648 gimple_set_location (g
, buf
->loc
);
4649 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4652 /* Clear padding bits for TYPE. Called recursively from
4653 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4654 the __builtin_clear_padding is not called by the end user,
4655 instead, it's inserted by the compiler to initialize the
4656 paddings of automatic variable. Therefore, we should not
4657 emit the error messages for flexible array members to confuse
4661 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4662 HOST_WIDE_INT sz
, bool for_auto_init
)
4664 switch (TREE_CODE (type
))
4667 HOST_WIDE_INT cur_pos
;
4669 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4670 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4672 tree ftype
= TREE_TYPE (field
);
4673 if (DECL_BIT_FIELD (field
))
4675 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4678 HOST_WIDE_INT pos
= int_byte_position (field
);
4682 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4683 bpos
%= BITS_PER_UNIT
;
4685 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4686 if (pos
+ end
> cur_pos
)
4688 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4689 cur_pos
= pos
+ end
;
4691 gcc_assert (cur_pos
> pos
4692 && ((unsigned HOST_WIDE_INT
) buf
->size
4693 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4694 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4695 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4696 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4697 " in %qs", "__builtin_clear_padding");
4698 else if (BYTES_BIG_ENDIAN
)
4701 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4702 *p
&= ~(((1 << fldsz
) - 1)
4703 << (BITS_PER_UNIT
- bpos
- fldsz
));
4708 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4710 fldsz
-= BITS_PER_UNIT
- bpos
;
4712 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4713 p
+= fldsz
/ BITS_PER_UNIT
;
4714 fldsz
%= BITS_PER_UNIT
;
4716 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4721 /* Little endian. */
4722 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4723 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4728 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4730 fldsz
-= BITS_PER_UNIT
- bpos
;
4732 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4733 p
+= fldsz
/ BITS_PER_UNIT
;
4734 fldsz
%= BITS_PER_UNIT
;
4736 *p
&= ~((1 << fldsz
) - 1);
4740 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4742 if (ftype
== error_mark_node
)
4744 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4745 && !COMPLETE_TYPE_P (ftype
));
4746 if (!buf
->clear_in_mask
&& !for_auto_init
)
4747 error_at (buf
->loc
, "flexible array member %qD does not "
4748 "have well defined padding bits for %qs",
4749 field
, "__builtin_clear_padding");
4751 else if (is_empty_type (ftype
))
4755 HOST_WIDE_INT pos
= int_byte_position (field
);
4758 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4759 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4760 clear_padding_add_padding (buf
, pos
- cur_pos
);
4762 if (tree asbase
= lang_hooks
.types
.classtype_as_base (field
))
4764 clear_padding_type (buf
, ftype
, fldsz
, for_auto_init
);
4768 gcc_assert (sz
>= cur_pos
);
4769 clear_padding_add_padding (buf
, sz
- cur_pos
);
4772 HOST_WIDE_INT nelts
, fldsz
;
4773 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4778 && sz
> 8 * UNITS_PER_WORD
4779 && buf
->union_ptr
== NULL
4780 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4782 /* For sufficiently large array of more than one elements,
4783 emit a runtime loop to keep code size manageable. */
4784 tree base
= buf
->base
;
4785 unsigned int prev_align
= buf
->align
;
4786 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4787 HOST_WIDE_INT prev_sz
= buf
->sz
;
4788 clear_padding_flush (buf
, true);
4789 tree elttype
= TREE_TYPE (type
);
4790 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4791 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4792 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4793 base
, size_int (off
));
4794 gimple_set_location (g
, buf
->loc
);
4795 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4796 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4798 gimple_set_location (g
, buf
->loc
);
4799 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4801 buf
->align
= TYPE_ALIGN (elttype
);
4804 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4807 buf
->align
= prev_align
;
4808 buf
->size
= off
% UNITS_PER_WORD
;
4809 buf
->off
= off
- buf
->size
;
4810 memset (buf
->buf
, 0, buf
->size
);
4813 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4814 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4817 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4820 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4821 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4822 clear_padding_flush (buf
, false);
4823 if (clear_padding_real_needs_padding_p (type
))
4825 /* Use native_interpret_real + native_encode_expr to figure out
4826 which bits are padding. */
4827 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4828 tree cst
= native_interpret_real (type
, buf
->buf
+ buf
->size
, sz
);
4829 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4830 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4831 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4832 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4833 buf
->buf
[buf
->size
+ i
] ^= ~0;
4836 memset (buf
->buf
+ buf
->size
, 0, sz
);
4840 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4841 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4842 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4845 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4846 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4847 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4848 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4851 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4852 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4853 clear_padding_flush (buf
, false);
4854 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4858 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4859 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4860 clear_padding_flush (buf
, false);
4861 memset (buf
->buf
+ buf
->size
, 0, sz
);
4867 /* Clear padding bits of TYPE in MASK. */
4870 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4872 clear_padding_struct buf
;
4873 buf
.loc
= UNKNOWN_LOCATION
;
4874 buf
.clear_in_mask
= true;
4875 buf
.base
= NULL_TREE
;
4876 buf
.alias_type
= NULL_TREE
;
4880 buf
.padding_bytes
= 0;
4881 buf
.sz
= int_size_in_bytes (type
);
4883 buf
.union_ptr
= mask
;
4884 clear_padding_type (&buf
, type
, buf
.sz
, false);
4885 clear_padding_flush (&buf
, true);
4888 /* Fold __builtin_clear_padding builtin. */
4891 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4893 gimple
*stmt
= gsi_stmt (*gsi
);
4894 gcc_assert (gimple_call_num_args (stmt
) == 2);
4895 tree ptr
= gimple_call_arg (stmt
, 0);
4896 tree typearg
= gimple_call_arg (stmt
, 1);
4897 /* The 2nd argument of __builtin_clear_padding's value is used to
4898 distinguish whether this call is made by the user or by the compiler
4899 for automatic variable initialization. */
4900 bool for_auto_init
= (bool) TREE_INT_CST_LOW (typearg
);
4901 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4902 location_t loc
= gimple_location (stmt
);
4903 clear_padding_struct buf
;
4904 gimple_stmt_iterator gsiprev
= *gsi
;
4905 /* This should be folded during the lower pass. */
4906 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4907 gcc_assert (COMPLETE_TYPE_P (type
));
4908 gsi_prev (&gsiprev
);
4911 buf
.clear_in_mask
= false;
4913 buf
.alias_type
= NULL_TREE
;
4915 buf
.align
= get_pointer_alignment (ptr
);
4916 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4917 buf
.align
= MAX (buf
.align
, talign
);
4919 buf
.padding_bytes
= 0;
4921 buf
.sz
= int_size_in_bytes (type
);
4922 buf
.union_ptr
= NULL
;
4923 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4924 sorry_at (loc
, "%s not supported for variable length aggregates",
4925 "__builtin_clear_padding");
4926 /* The implementation currently assumes 8-bit host and target
4927 chars which is the case for all currently supported targets
4928 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4929 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4930 sorry_at (loc
, "%s not supported on this target",
4931 "__builtin_clear_padding");
4932 else if (!clear_padding_type_may_have_padding_p (type
))
4934 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4936 tree sz
= TYPE_SIZE_UNIT (type
);
4937 tree elttype
= type
;
4938 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4939 while (TREE_CODE (elttype
) == ARRAY_TYPE
4940 && int_size_in_bytes (elttype
) < 0)
4941 elttype
= TREE_TYPE (elttype
);
4942 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4943 gcc_assert (eltsz
>= 0);
4946 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4947 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4948 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4949 gimple_set_location (g
, loc
);
4950 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4951 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4952 gimple_set_location (g
, loc
);
4953 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4955 buf
.align
= TYPE_ALIGN (elttype
);
4956 buf
.alias_type
= build_pointer_type (elttype
);
4957 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4962 if (!is_gimple_mem_ref_addr (buf
.base
))
4964 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4965 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4966 gimple_set_location (g
, loc
);
4967 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4969 buf
.alias_type
= build_pointer_type (type
);
4970 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
4971 clear_padding_flush (&buf
, true);
4974 gimple_stmt_iterator gsiprev2
= *gsi
;
4975 gsi_prev (&gsiprev2
);
4976 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4977 gsi_replace (gsi
, gimple_build_nop (), true);
4980 gsi_remove (gsi
, true);
4986 /* Fold the non-target builtin at *GSI and return whether any simplification
4990 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
4992 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
4993 tree callee
= gimple_call_fndecl (stmt
);
4995 /* Give up for always_inline inline builtins until they are
4997 if (avoid_folding_inline_builtin (callee
))
5000 unsigned n
= gimple_call_num_args (stmt
);
5001 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5005 return gimple_fold_builtin_bcmp (gsi
);
5006 case BUILT_IN_BCOPY
:
5007 return gimple_fold_builtin_bcopy (gsi
);
5008 case BUILT_IN_BZERO
:
5009 return gimple_fold_builtin_bzero (gsi
);
5011 case BUILT_IN_MEMSET
:
5012 return gimple_fold_builtin_memset (gsi
,
5013 gimple_call_arg (stmt
, 1),
5014 gimple_call_arg (stmt
, 2));
5015 case BUILT_IN_MEMCPY
:
5016 case BUILT_IN_MEMPCPY
:
5017 case BUILT_IN_MEMMOVE
:
5018 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5019 gimple_call_arg (stmt
, 1), fcode
);
5020 case BUILT_IN_SPRINTF_CHK
:
5021 case BUILT_IN_VSPRINTF_CHK
:
5022 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5023 case BUILT_IN_STRCAT_CHK
:
5024 return gimple_fold_builtin_strcat_chk (gsi
);
5025 case BUILT_IN_STRNCAT_CHK
:
5026 return gimple_fold_builtin_strncat_chk (gsi
);
5027 case BUILT_IN_STRLEN
:
5028 return gimple_fold_builtin_strlen (gsi
);
5029 case BUILT_IN_STRCPY
:
5030 return gimple_fold_builtin_strcpy (gsi
,
5031 gimple_call_arg (stmt
, 0),
5032 gimple_call_arg (stmt
, 1));
5033 case BUILT_IN_STRNCPY
:
5034 return gimple_fold_builtin_strncpy (gsi
,
5035 gimple_call_arg (stmt
, 0),
5036 gimple_call_arg (stmt
, 1),
5037 gimple_call_arg (stmt
, 2));
5038 case BUILT_IN_STRCAT
:
5039 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5040 gimple_call_arg (stmt
, 1));
5041 case BUILT_IN_STRNCAT
:
5042 return gimple_fold_builtin_strncat (gsi
);
5043 case BUILT_IN_INDEX
:
5044 case BUILT_IN_STRCHR
:
5045 return gimple_fold_builtin_strchr (gsi
, false);
5046 case BUILT_IN_RINDEX
:
5047 case BUILT_IN_STRRCHR
:
5048 return gimple_fold_builtin_strchr (gsi
, true);
5049 case BUILT_IN_STRSTR
:
5050 return gimple_fold_builtin_strstr (gsi
);
5051 case BUILT_IN_STRCMP
:
5052 case BUILT_IN_STRCMP_EQ
:
5053 case BUILT_IN_STRCASECMP
:
5054 case BUILT_IN_STRNCMP
:
5055 case BUILT_IN_STRNCMP_EQ
:
5056 case BUILT_IN_STRNCASECMP
:
5057 return gimple_fold_builtin_string_compare (gsi
);
5058 case BUILT_IN_MEMCHR
:
5059 return gimple_fold_builtin_memchr (gsi
);
5060 case BUILT_IN_FPUTS
:
5061 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5062 gimple_call_arg (stmt
, 1), false);
5063 case BUILT_IN_FPUTS_UNLOCKED
:
5064 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5065 gimple_call_arg (stmt
, 1), true);
5066 case BUILT_IN_MEMCPY_CHK
:
5067 case BUILT_IN_MEMPCPY_CHK
:
5068 case BUILT_IN_MEMMOVE_CHK
:
5069 case BUILT_IN_MEMSET_CHK
:
5070 return gimple_fold_builtin_memory_chk (gsi
,
5071 gimple_call_arg (stmt
, 0),
5072 gimple_call_arg (stmt
, 1),
5073 gimple_call_arg (stmt
, 2),
5074 gimple_call_arg (stmt
, 3),
5076 case BUILT_IN_STPCPY
:
5077 return gimple_fold_builtin_stpcpy (gsi
);
5078 case BUILT_IN_STRCPY_CHK
:
5079 case BUILT_IN_STPCPY_CHK
:
5080 return gimple_fold_builtin_stxcpy_chk (gsi
,
5081 gimple_call_arg (stmt
, 0),
5082 gimple_call_arg (stmt
, 1),
5083 gimple_call_arg (stmt
, 2),
5085 case BUILT_IN_STRNCPY_CHK
:
5086 case BUILT_IN_STPNCPY_CHK
:
5087 return gimple_fold_builtin_stxncpy_chk (gsi
,
5088 gimple_call_arg (stmt
, 0),
5089 gimple_call_arg (stmt
, 1),
5090 gimple_call_arg (stmt
, 2),
5091 gimple_call_arg (stmt
, 3),
5093 case BUILT_IN_SNPRINTF_CHK
:
5094 case BUILT_IN_VSNPRINTF_CHK
:
5095 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5097 case BUILT_IN_FPRINTF
:
5098 case BUILT_IN_FPRINTF_UNLOCKED
:
5099 case BUILT_IN_VFPRINTF
:
5100 if (n
== 2 || n
== 3)
5101 return gimple_fold_builtin_fprintf (gsi
,
5102 gimple_call_arg (stmt
, 0),
5103 gimple_call_arg (stmt
, 1),
5105 ? gimple_call_arg (stmt
, 2)
5109 case BUILT_IN_FPRINTF_CHK
:
5110 case BUILT_IN_VFPRINTF_CHK
:
5111 if (n
== 3 || n
== 4)
5112 return gimple_fold_builtin_fprintf (gsi
,
5113 gimple_call_arg (stmt
, 0),
5114 gimple_call_arg (stmt
, 2),
5116 ? gimple_call_arg (stmt
, 3)
5120 case BUILT_IN_PRINTF
:
5121 case BUILT_IN_PRINTF_UNLOCKED
:
5122 case BUILT_IN_VPRINTF
:
5123 if (n
== 1 || n
== 2)
5124 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5126 ? gimple_call_arg (stmt
, 1)
5127 : NULL_TREE
, fcode
);
5129 case BUILT_IN_PRINTF_CHK
:
5130 case BUILT_IN_VPRINTF_CHK
:
5131 if (n
== 2 || n
== 3)
5132 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5134 ? gimple_call_arg (stmt
, 2)
5135 : NULL_TREE
, fcode
);
5137 case BUILT_IN_ACC_ON_DEVICE
:
5138 return gimple_fold_builtin_acc_on_device (gsi
,
5139 gimple_call_arg (stmt
, 0));
5140 case BUILT_IN_REALLOC
:
5141 return gimple_fold_builtin_realloc (gsi
);
5143 case BUILT_IN_CLEAR_PADDING
:
5144 return gimple_fold_builtin_clear_padding (gsi
);
5149 /* Try the generic builtin folder. */
5150 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5151 tree result
= fold_call_stmt (stmt
, ignore
);
5155 STRIP_NOPS (result
);
5157 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5158 gimplify_and_update_call_from_tree (gsi
, result
);
5165 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5166 function calls to constants, where possible. */
5169 fold_internal_goacc_dim (const gimple
*call
)
5171 int axis
= oacc_get_ifn_dim_arg (call
);
5172 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5173 tree result
= NULL_TREE
;
5174 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5176 switch (gimple_call_internal_fn (call
))
5178 case IFN_GOACC_DIM_POS
:
5179 /* If the size is 1, we know the answer. */
5181 result
= build_int_cst (type
, 0);
5183 case IFN_GOACC_DIM_SIZE
:
5184 /* If the size is not dynamic, we know the answer. */
5186 result
= build_int_cst (type
, size
);
5195 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5196 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5197 &var where var is only addressable because of such calls. */
5200 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5202 if (gimple_call_num_args (stmt
) != 6
5203 || !flag_inline_atomics
5205 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5206 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5207 || !gimple_vdef (stmt
)
5208 || !gimple_vuse (stmt
))
5211 tree fndecl
= gimple_call_fndecl (stmt
);
5212 switch (DECL_FUNCTION_CODE (fndecl
))
5214 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5215 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5224 tree expected
= gimple_call_arg (stmt
, 1);
5225 if (TREE_CODE (expected
) != ADDR_EXPR
5226 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5229 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5230 if (!is_gimple_reg_type (etype
)
5231 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5232 || TREE_THIS_VOLATILE (etype
)
5233 || VECTOR_TYPE_P (etype
)
5234 || TREE_CODE (etype
) == COMPLEX_TYPE
5235 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5236 might not preserve all the bits. See PR71716. */
5237 || SCALAR_FLOAT_TYPE_P (etype
)
5238 || maybe_ne (TYPE_PRECISION (etype
),
5239 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5242 tree weak
= gimple_call_arg (stmt
, 3);
5243 if (!integer_zerop (weak
) && !integer_onep (weak
))
5246 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5247 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5248 machine_mode mode
= TYPE_MODE (itype
);
5250 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5252 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5255 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5262 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5264 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5265 i = IMAGPART_EXPR <t>;
5267 e = REALPART_EXPR <t>; */
5270 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5272 gimple
*stmt
= gsi_stmt (*gsi
);
5273 tree fndecl
= gimple_call_fndecl (stmt
);
5274 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5275 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5276 tree ctype
= build_complex_type (itype
);
5277 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5278 bool throws
= false;
5280 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5282 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5283 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5284 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5286 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5287 build1 (VIEW_CONVERT_EXPR
, itype
,
5288 gimple_assign_lhs (g
)));
5289 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5291 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5292 + int_size_in_bytes (itype
);
5293 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5294 gimple_call_arg (stmt
, 0),
5295 gimple_assign_lhs (g
),
5296 gimple_call_arg (stmt
, 2),
5297 build_int_cst (integer_type_node
, flag
),
5298 gimple_call_arg (stmt
, 4),
5299 gimple_call_arg (stmt
, 5));
5300 tree lhs
= make_ssa_name (ctype
);
5301 gimple_call_set_lhs (g
, lhs
);
5302 gimple_move_vops (g
, stmt
);
5303 tree oldlhs
= gimple_call_lhs (stmt
);
5304 if (stmt_can_throw_internal (cfun
, stmt
))
5307 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5309 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5310 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5311 gimple_call_set_lhs (stmt
, NULL_TREE
);
5312 gsi_replace (gsi
, g
, true);
5315 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5316 build1 (IMAGPART_EXPR
, itype
, lhs
));
5319 gsi_insert_on_edge_immediate (e
, g
);
5320 *gsi
= gsi_for_stmt (g
);
5323 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5324 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5325 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5327 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5328 build1 (REALPART_EXPR
, itype
, lhs
));
5329 if (throws
&& oldlhs
== NULL_TREE
)
5331 gsi_insert_on_edge_immediate (e
, g
);
5332 *gsi
= gsi_for_stmt (g
);
5335 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5336 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5338 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5340 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5341 gimple_assign_lhs (g
)));
5342 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5344 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5345 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5349 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5350 doesn't fit into TYPE. The test for overflow should be regardless of
5351 -fwrapv, and even for unsigned types. */
5354 arith_overflowed_p (enum tree_code code
, const_tree type
,
5355 const_tree arg0
, const_tree arg1
)
5357 widest2_int warg0
= widest2_int_cst (arg0
);
5358 widest2_int warg1
= widest2_int_cst (arg1
);
5362 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5363 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5364 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5365 default: gcc_unreachable ();
5367 signop sign
= TYPE_SIGN (type
);
5368 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5370 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5373 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5374 for the memory it references, otherwise return null. VECTYPE is the
5375 type of the memory vector. */
5378 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5380 tree ptr
= gimple_call_arg (call
, 0);
5381 tree alias_align
= gimple_call_arg (call
, 1);
5382 tree mask
= gimple_call_arg (call
, 2);
5383 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5386 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5387 if (TYPE_ALIGN (vectype
) != align
)
5388 vectype
= build_aligned_type (vectype
, align
);
5389 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5390 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5393 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5396 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5398 tree lhs
= gimple_call_lhs (call
);
5402 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5404 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5405 gimple_set_location (new_stmt
, gimple_location (call
));
5406 gimple_move_vops (new_stmt
, call
);
5407 gsi_replace (gsi
, new_stmt
, false);
5413 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5416 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5418 tree rhs
= gimple_call_arg (call
, 3);
5419 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5421 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5422 gimple_set_location (new_stmt
, gimple_location (call
));
5423 gimple_move_vops (new_stmt
, call
);
5424 gsi_replace (gsi
, new_stmt
, false);
5430 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5431 The statement may be replaced by another statement, e.g., if the call
5432 simplifies to a constant value. Return true if any changes were made.
5433 It is assumed that the operands have been previously folded. */
5436 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5438 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5440 bool changed
= false;
5442 /* Check for virtual calls that became direct calls. */
5443 callee
= gimple_call_fn (stmt
);
5444 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5446 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5448 if (dump_file
&& virtual_method_call_p (callee
)
5449 && !possible_polymorphic_call_target_p
5450 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5451 (OBJ_TYPE_REF_EXPR (callee
)))))
5454 "Type inheritance inconsistent devirtualization of ");
5455 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5456 fprintf (dump_file
, " to ");
5457 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5458 fprintf (dump_file
, "\n");
5461 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5464 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5467 vec
<cgraph_node
*>targets
5468 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5469 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5471 tree lhs
= gimple_call_lhs (stmt
);
5472 if (dump_enabled_p ())
5474 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5475 "folding virtual function call to %s\n",
5476 targets
.length () == 1
5477 ? targets
[0]->name ()
5478 : "__builtin_unreachable");
5480 if (targets
.length () == 1)
5482 tree fndecl
= targets
[0]->decl
;
5483 gimple_call_set_fndecl (stmt
, fndecl
);
5485 /* If changing the call to __cxa_pure_virtual
5486 or similar noreturn function, adjust gimple_call_fntype
5488 if (gimple_call_noreturn_p (stmt
)
5489 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5490 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5491 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5493 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5494 /* If the call becomes noreturn, remove the lhs. */
5496 && gimple_call_noreturn_p (stmt
)
5497 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5498 || should_remove_lhs_p (lhs
)))
5500 if (TREE_CODE (lhs
) == SSA_NAME
)
5502 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5503 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5504 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5505 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5507 gimple_call_set_lhs (stmt
, NULL_TREE
);
5509 maybe_remove_unused_call_args (cfun
, stmt
);
5513 location_t loc
= gimple_location (stmt
);
5514 gimple
*new_stmt
= gimple_build_builtin_unreachable (loc
);
5515 gimple_call_set_ctrl_altering (new_stmt
, false);
5516 /* If the call had a SSA name as lhs morph that into
5517 an uninitialized value. */
5518 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5520 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5521 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5522 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5523 set_ssa_default_def (cfun
, var
, lhs
);
5525 gimple_move_vops (new_stmt
, stmt
);
5526 gsi_replace (gsi
, new_stmt
, false);
5533 /* Check for indirect calls that became direct calls, and then
5534 no longer require a static chain. */
5535 if (gimple_call_chain (stmt
))
5537 tree fn
= gimple_call_fndecl (stmt
);
5538 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5540 gimple_call_set_chain (stmt
, NULL
);
5548 /* Check for builtins that CCP can handle using information not
5549 available in the generic fold routines. */
5550 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5552 if (gimple_fold_builtin (gsi
))
5555 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5557 changed
|= targetm
.gimple_fold_builtin (gsi
);
5559 else if (gimple_call_internal_p (stmt
))
5561 enum tree_code subcode
= ERROR_MARK
;
5562 tree result
= NULL_TREE
;
5563 bool cplx_result
= false;
5564 tree overflow
= NULL_TREE
;
5565 switch (gimple_call_internal_fn (stmt
))
5567 case IFN_BUILTIN_EXPECT
:
5568 result
= fold_builtin_expect (gimple_location (stmt
),
5569 gimple_call_arg (stmt
, 0),
5570 gimple_call_arg (stmt
, 1),
5571 gimple_call_arg (stmt
, 2),
5574 case IFN_UBSAN_OBJECT_SIZE
:
5576 tree offset
= gimple_call_arg (stmt
, 1);
5577 tree objsize
= gimple_call_arg (stmt
, 2);
5578 if (integer_all_onesp (objsize
)
5579 || (TREE_CODE (offset
) == INTEGER_CST
5580 && TREE_CODE (objsize
) == INTEGER_CST
5581 && tree_int_cst_le (offset
, objsize
)))
5583 replace_call_with_value (gsi
, NULL_TREE
);
5589 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5591 replace_call_with_value (gsi
, NULL_TREE
);
5595 case IFN_UBSAN_BOUNDS
:
5597 tree index
= gimple_call_arg (stmt
, 1);
5598 tree bound
= gimple_call_arg (stmt
, 2);
5599 if (TREE_CODE (index
) == INTEGER_CST
5600 && TREE_CODE (bound
) == INTEGER_CST
)
5602 index
= fold_convert (TREE_TYPE (bound
), index
);
5603 if (TREE_CODE (index
) == INTEGER_CST
5604 && tree_int_cst_le (index
, bound
))
5606 replace_call_with_value (gsi
, NULL_TREE
);
5612 case IFN_GOACC_DIM_SIZE
:
5613 case IFN_GOACC_DIM_POS
:
5614 result
= fold_internal_goacc_dim (stmt
);
5616 case IFN_UBSAN_CHECK_ADD
:
5617 subcode
= PLUS_EXPR
;
5619 case IFN_UBSAN_CHECK_SUB
:
5620 subcode
= MINUS_EXPR
;
5622 case IFN_UBSAN_CHECK_MUL
:
5623 subcode
= MULT_EXPR
;
5625 case IFN_ADD_OVERFLOW
:
5626 subcode
= PLUS_EXPR
;
5629 case IFN_SUB_OVERFLOW
:
5630 subcode
= MINUS_EXPR
;
5633 case IFN_MUL_OVERFLOW
:
5634 subcode
= MULT_EXPR
;
5638 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5640 case IFN_MASK_STORE
:
5641 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5646 if (subcode
!= ERROR_MARK
)
5648 tree arg0
= gimple_call_arg (stmt
, 0);
5649 tree arg1
= gimple_call_arg (stmt
, 1);
5650 tree type
= TREE_TYPE (arg0
);
5653 tree lhs
= gimple_call_lhs (stmt
);
5654 if (lhs
== NULL_TREE
)
5657 type
= TREE_TYPE (TREE_TYPE (lhs
));
5659 if (type
== NULL_TREE
)
5661 /* x = y + 0; x = y - 0; x = y * 0; */
5662 else if (integer_zerop (arg1
))
5663 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5664 /* x = 0 + y; x = 0 * y; */
5665 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5666 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5668 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5669 result
= integer_zero_node
;
5670 /* x = y * 1; x = 1 * y; */
5671 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5673 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5675 else if (TREE_CODE (arg0
) == INTEGER_CST
5676 && TREE_CODE (arg1
) == INTEGER_CST
)
5679 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5680 fold_convert (type
, arg1
));
5682 result
= int_const_binop (subcode
, arg0
, arg1
);
5683 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5686 overflow
= build_one_cst (type
);
5693 if (result
== integer_zero_node
)
5694 result
= build_zero_cst (type
);
5695 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5697 if (TREE_CODE (result
) == INTEGER_CST
)
5699 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5701 overflow
= build_one_cst (type
);
5703 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5704 && TYPE_UNSIGNED (type
))
5705 || (TYPE_PRECISION (type
)
5706 < (TYPE_PRECISION (TREE_TYPE (result
))
5707 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5708 && !TYPE_UNSIGNED (type
)))))
5711 result
= fold_convert (type
, result
);
5718 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5719 result
= drop_tree_overflow (result
);
5722 if (overflow
== NULL_TREE
)
5723 overflow
= build_zero_cst (TREE_TYPE (result
));
5724 tree ctype
= build_complex_type (TREE_TYPE (result
));
5725 if (TREE_CODE (result
) == INTEGER_CST
5726 && TREE_CODE (overflow
) == INTEGER_CST
)
5727 result
= build_complex (ctype
, result
, overflow
);
5729 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5730 ctype
, result
, overflow
);
5732 gimplify_and_update_call_from_tree (gsi
, result
);
5741 /* Return true whether NAME has a use on STMT. */
5744 has_use_on_stmt (tree name
, gimple
*stmt
)
5746 imm_use_iterator iter
;
5747 use_operand_p use_p
;
5748 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5749 if (USE_STMT (use_p
) == stmt
)
5754 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5757 Replaces *GSI with the simplification result in RCODE and OPS
5758 and the associated statements in *SEQ. Does the replacement
5759 according to INPLACE and returns true if the operation succeeded. */
5762 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5763 gimple_match_op
*res_op
,
5764 gimple_seq
*seq
, bool inplace
)
5766 gimple
*stmt
= gsi_stmt (*gsi
);
5767 tree
*ops
= res_op
->ops
;
5768 unsigned int num_ops
= res_op
->num_ops
;
5770 /* Play safe and do not allow abnormals to be mentioned in
5771 newly created statements. See also maybe_push_res_to_seq.
5772 As an exception allow such uses if there was a use of the
5773 same SSA name on the old stmt. */
5774 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5775 if (TREE_CODE (ops
[i
]) == SSA_NAME
5776 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5777 && !has_use_on_stmt (ops
[i
], stmt
))
5780 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5781 for (unsigned int i
= 0; i
< 2; ++i
)
5782 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5783 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5784 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5787 /* Don't insert new statements when INPLACE is true, even if we could
5788 reuse STMT for the final statement. */
5789 if (inplace
&& !gimple_seq_empty_p (*seq
))
5792 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5794 gcc_assert (res_op
->code
.is_tree_code ());
5795 auto code
= tree_code (res_op
->code
);
5796 if (TREE_CODE_CLASS (code
) == tcc_comparison
5797 /* GIMPLE_CONDs condition may not throw. */
5798 && (!flag_exceptions
5799 || !cfun
->can_throw_non_call_exceptions
5800 || !operation_could_trap_p (code
,
5801 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5803 gimple_cond_set_condition (cond_stmt
, code
, ops
[0], ops
[1]);
5804 else if (code
== SSA_NAME
)
5805 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5806 build_zero_cst (TREE_TYPE (ops
[0])));
5807 else if (code
== INTEGER_CST
)
5809 if (integer_zerop (ops
[0]))
5810 gimple_cond_make_false (cond_stmt
);
5812 gimple_cond_make_true (cond_stmt
);
5816 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5819 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5820 build_zero_cst (TREE_TYPE (res
)));
5824 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5826 fprintf (dump_file
, "gimple_simplified to ");
5827 if (!gimple_seq_empty_p (*seq
))
5828 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5829 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5832 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5835 else if (is_gimple_assign (stmt
)
5836 && res_op
->code
.is_tree_code ())
5838 auto code
= tree_code (res_op
->code
);
5840 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (code
))
5842 maybe_build_generic_op (res_op
);
5843 gimple_assign_set_rhs_with_ops (gsi
, code
,
5844 res_op
->op_or_null (0),
5845 res_op
->op_or_null (1),
5846 res_op
->op_or_null (2));
5847 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5849 fprintf (dump_file
, "gimple_simplified to ");
5850 if (!gimple_seq_empty_p (*seq
))
5851 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5852 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5855 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5859 else if (res_op
->code
.is_fn_code ()
5860 && gimple_call_combined_fn (stmt
) == combined_fn (res_op
->code
))
5862 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5863 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5864 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5865 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5867 fprintf (dump_file
, "gimple_simplified to ");
5868 if (!gimple_seq_empty_p (*seq
))
5869 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5870 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5872 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5877 if (gimple_has_lhs (stmt
))
5879 tree lhs
= gimple_get_lhs (stmt
);
5880 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5882 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5884 fprintf (dump_file
, "gimple_simplified to ");
5885 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5887 gsi_replace_with_seq_vops (gsi
, *seq
);
5897 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5900 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5905 if (TREE_CODE (*t
) == ADDR_EXPR
)
5906 t
= &TREE_OPERAND (*t
, 0);
5908 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5909 generic vector extension. The actual vector referenced is
5910 view-converted to an array type for this purpose. If the index
5911 is constant the canonical representation in the middle-end is a
5912 BIT_FIELD_REF so re-write the former to the latter here. */
5913 if (TREE_CODE (*t
) == ARRAY_REF
5914 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5915 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5916 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5918 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5919 if (VECTOR_TYPE_P (vtype
))
5921 tree low
= array_ref_low_bound (*t
);
5922 if (TREE_CODE (low
) == INTEGER_CST
)
5924 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5926 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5927 wi::to_widest (low
));
5928 idx
= wi::mul (idx
, wi::to_widest
5929 (TYPE_SIZE (TREE_TYPE (*t
))));
5931 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5932 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5934 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5936 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5937 TYPE_SIZE (TREE_TYPE (*t
)),
5938 wide_int_to_tree (bitsizetype
, idx
));
5946 while (handled_component_p (*t
))
5947 t
= &TREE_OPERAND (*t
, 0);
5949 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5950 of invariant addresses into a SSA name MEM_REF address. */
5951 if (TREE_CODE (*t
) == MEM_REF
5952 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5954 tree addr
= TREE_OPERAND (*t
, 0);
5955 if (TREE_CODE (addr
) == ADDR_EXPR
5956 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5957 || handled_component_p (TREE_OPERAND (addr
, 0))))
5961 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5970 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5971 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5972 TREE_OPERAND (*t
, 1),
5973 size_int (coffset
));
5976 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5977 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5980 /* Canonicalize back MEM_REFs to plain reference trees if the object
5981 accessed is a decl that has the same access semantics as the MEM_REF. */
5982 if (TREE_CODE (*t
) == MEM_REF
5983 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5984 && integer_zerop (TREE_OPERAND (*t
, 1))
5985 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5987 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5988 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5989 if (/* Same volatile qualification. */
5990 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5991 /* Same TBAA behavior with -fstrict-aliasing. */
5992 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
5993 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
5994 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
5995 /* Same alignment. */
5996 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
5997 /* We have to look out here to not drop a required conversion
5998 from the rhs to the lhs if *t appears on the lhs or vice-versa
5999 if it appears on the rhs. Thus require strict type
6001 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6003 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6008 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6009 && TREE_CODE (*t
) == MEM_REF
6010 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6014 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6018 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6020 if (mem_ref_offset (base
).to_shwi (&moffset
))
6023 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6026 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6033 /* Canonicalize TARGET_MEM_REF in particular with respect to
6034 the indexes becoming constant. */
6035 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6037 tree tem
= maybe_fold_tmr (*t
);
6041 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6042 recompute_tree_invariant_for_addr_expr (*orig_t
);
6050 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6051 distinguishes both cases. */
6054 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6056 bool changed
= false;
6057 gimple
*stmt
= gsi_stmt (*gsi
);
6058 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6060 fold_defer_overflow_warnings ();
6062 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6064 ??? This shouldn't be done in generic folding but in the
6065 propagation helpers which also know whether an address was
6067 Also canonicalize operand order. */
6068 switch (gimple_code (stmt
))
6071 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6073 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6074 if ((REFERENCE_CLASS_P (*rhs
)
6075 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6076 && maybe_canonicalize_mem_ref_addr (rhs
))
6078 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6079 if (REFERENCE_CLASS_P (*lhs
)
6080 && maybe_canonicalize_mem_ref_addr (lhs
))
6082 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6083 This cannot be done in maybe_canonicalize_mem_ref_addr
6084 as the gimple now has two operands rather than one.
6085 The same reason why this can't be done in
6086 maybe_canonicalize_mem_ref_addr is the same reason why
6087 this can't be done inplace. */
6088 if (!inplace
&& TREE_CODE (*rhs
) == ADDR_EXPR
)
6090 tree inner
= TREE_OPERAND (*rhs
, 0);
6091 if (TREE_CODE (inner
) == MEM_REF
6092 && TREE_CODE (TREE_OPERAND (inner
, 0)) == SSA_NAME
6093 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
6095 tree ptr
= TREE_OPERAND (inner
, 0);
6096 tree addon
= TREE_OPERAND (inner
, 1);
6097 addon
= fold_convert (sizetype
, addon
);
6098 gimple_assign_set_rhs_with_ops (gsi
, POINTER_PLUS_EXPR
,
6101 stmt
= gsi_stmt (*gsi
);
6107 /* Canonicalize operand order. */
6108 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6109 if (TREE_CODE_CLASS (code
) == tcc_comparison
6110 || commutative_tree_code (code
)
6111 || commutative_ternary_tree_code (code
))
6113 tree rhs1
= gimple_assign_rhs1 (stmt
);
6114 tree rhs2
= gimple_assign_rhs2 (stmt
);
6115 if (tree_swap_operands_p (rhs1
, rhs2
))
6117 gimple_assign_set_rhs1 (stmt
, rhs2
);
6118 gimple_assign_set_rhs2 (stmt
, rhs1
);
6119 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6120 gimple_assign_set_rhs_code (stmt
,
6121 swap_tree_comparison (code
));
6129 gcall
*call
= as_a
<gcall
*> (stmt
);
6130 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
6132 tree
*arg
= gimple_call_arg_ptr (call
, i
);
6133 if (REFERENCE_CLASS_P (*arg
)
6134 && maybe_canonicalize_mem_ref_addr (arg
))
6137 tree
*lhs
= gimple_call_lhs_ptr (call
);
6139 && REFERENCE_CLASS_P (*lhs
)
6140 && maybe_canonicalize_mem_ref_addr (lhs
))
6144 combined_fn cfn
= gimple_call_combined_fn (call
);
6145 internal_fn ifn
= associated_internal_fn (cfn
, TREE_TYPE (*lhs
));
6146 int opno
= first_commutative_argument (ifn
);
6149 tree arg1
= gimple_call_arg (call
, opno
);
6150 tree arg2
= gimple_call_arg (call
, opno
+ 1);
6151 if (tree_swap_operands_p (arg1
, arg2
))
6153 gimple_call_set_arg (call
, opno
, arg2
);
6154 gimple_call_set_arg (call
, opno
+ 1, arg1
);
6163 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6164 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6166 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6167 tree op
= TREE_VALUE (link
);
6168 if (REFERENCE_CLASS_P (op
)
6169 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6172 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6174 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6175 tree op
= TREE_VALUE (link
);
6176 if ((REFERENCE_CLASS_P (op
)
6177 || TREE_CODE (op
) == ADDR_EXPR
)
6178 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6184 if (gimple_debug_bind_p (stmt
))
6186 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6188 && (REFERENCE_CLASS_P (*val
)
6189 || TREE_CODE (*val
) == ADDR_EXPR
)
6190 && maybe_canonicalize_mem_ref_addr (val
, true))
6196 /* Canonicalize operand order. */
6197 tree lhs
= gimple_cond_lhs (stmt
);
6198 tree rhs
= gimple_cond_rhs (stmt
);
6199 if (tree_swap_operands_p (lhs
, rhs
))
6201 gcond
*gc
= as_a
<gcond
*> (stmt
);
6202 gimple_cond_set_lhs (gc
, rhs
);
6203 gimple_cond_set_rhs (gc
, lhs
);
6204 gimple_cond_set_code (gc
,
6205 swap_tree_comparison (gimple_cond_code (gc
)));
6212 /* Dispatch to pattern-based folding. */
6214 || is_gimple_assign (stmt
)
6215 || gimple_code (stmt
) == GIMPLE_COND
)
6217 gimple_seq seq
= NULL
;
6218 gimple_match_op res_op
;
6219 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6220 valueize
, valueize
))
6222 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6225 gimple_seq_discard (seq
);
6229 stmt
= gsi_stmt (*gsi
);
6231 /* Fold the main computation performed by the statement. */
6232 switch (gimple_code (stmt
))
6236 /* Try to canonicalize for boolean-typed X the comparisons
6237 X == 0, X == 1, X != 0, and X != 1. */
6238 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6239 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6241 tree lhs
= gimple_assign_lhs (stmt
);
6242 tree op1
= gimple_assign_rhs1 (stmt
);
6243 tree op2
= gimple_assign_rhs2 (stmt
);
6244 tree type
= TREE_TYPE (op1
);
6246 /* Check whether the comparison operands are of the same boolean
6247 type as the result type is.
6248 Check that second operand is an integer-constant with value
6250 if (TREE_CODE (op2
) == INTEGER_CST
6251 && (integer_zerop (op2
) || integer_onep (op2
))
6252 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6254 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6255 bool is_logical_not
= false;
6257 /* X == 0 and X != 1 is a logical-not.of X
6258 X == 1 and X != 0 is X */
6259 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6260 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6261 is_logical_not
= true;
6263 if (is_logical_not
== false)
6264 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6265 /* Only for one-bit precision typed X the transformation
6266 !X -> ~X is valied. */
6267 else if (TYPE_PRECISION (type
) == 1)
6268 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6269 /* Otherwise we use !X -> X ^ 1. */
6271 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6272 build_int_cst (type
, 1));
6278 unsigned old_num_ops
= gimple_num_ops (stmt
);
6279 tree lhs
= gimple_assign_lhs (stmt
);
6280 tree new_rhs
= fold_gimple_assign (gsi
);
6282 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6283 TREE_TYPE (new_rhs
)))
6284 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6287 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6289 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6296 changed
|= gimple_fold_call (gsi
, inplace
);
6300 if (gimple_debug_bind_p (stmt
))
6302 tree val
= gimple_debug_bind_get_value (stmt
);
6303 if (val
&& REFERENCE_CLASS_P (val
))
6305 tree tem
= maybe_fold_reference (val
);
6308 gimple_debug_bind_set_value (stmt
, tem
);
6317 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6318 tree ret
= gimple_return_retval(ret_stmt
);
6320 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6322 tree val
= valueize (ret
);
6323 if (val
&& val
!= ret
6324 && may_propagate_copy (ret
, val
))
6326 gimple_return_set_retval (ret_stmt
, val
);
6336 stmt
= gsi_stmt (*gsi
);
6338 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6342 /* Valueziation callback that ends up not following SSA edges. */
6345 no_follow_ssa_edges (tree
)
6350 /* Valueization callback that ends up following single-use SSA edges only. */
6353 follow_single_use_edges (tree val
)
6355 if (TREE_CODE (val
) == SSA_NAME
6356 && !has_single_use (val
))
6361 /* Valueization callback that follows all SSA edges. */
6364 follow_all_ssa_edges (tree val
)
6369 /* Fold the statement pointed to by GSI. In some cases, this function may
6370 replace the whole statement with a new one. Returns true iff folding
6372 The statement pointed to by GSI should be in valid gimple form but may
6373 be in unfolded state as resulting from for example constant propagation
6374 which can produce *&x = 0. */
6377 fold_stmt (gimple_stmt_iterator
*gsi
)
6379 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6383 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6385 return fold_stmt_1 (gsi
, false, valueize
);
6388 /* Perform the minimal folding on statement *GSI. Only operations like
6389 *&x created by constant propagation are handled. The statement cannot
6390 be replaced with a new one. Return true if the statement was
6391 changed, false otherwise.
6392 The statement *GSI should be in valid gimple form but may
6393 be in unfolded state as resulting from for example constant propagation
6394 which can produce *&x = 0. */
6397 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6399 gimple
*stmt
= gsi_stmt (*gsi
);
6400 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6401 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6405 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6406 if EXPR is null or we don't know how.
6407 If non-null, the result always has boolean type. */
6410 canonicalize_bool (tree expr
, bool invert
)
6416 if (integer_nonzerop (expr
))
6417 return boolean_false_node
;
6418 else if (integer_zerop (expr
))
6419 return boolean_true_node
;
6420 else if (TREE_CODE (expr
) == SSA_NAME
)
6421 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6422 build_int_cst (TREE_TYPE (expr
), 0));
6423 else if (COMPARISON_CLASS_P (expr
))
6424 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6426 TREE_OPERAND (expr
, 0),
6427 TREE_OPERAND (expr
, 1));
6433 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6435 if (integer_nonzerop (expr
))
6436 return boolean_true_node
;
6437 else if (integer_zerop (expr
))
6438 return boolean_false_node
;
6439 else if (TREE_CODE (expr
) == SSA_NAME
)
6440 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6441 build_int_cst (TREE_TYPE (expr
), 0));
6442 else if (COMPARISON_CLASS_P (expr
))
6443 return fold_build2 (TREE_CODE (expr
),
6445 TREE_OPERAND (expr
, 0),
6446 TREE_OPERAND (expr
, 1));
6452 /* Check to see if a boolean expression EXPR is logically equivalent to the
6453 comparison (OP1 CODE OP2). Check for various identities involving
6457 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6458 const_tree op1
, const_tree op2
)
6462 /* The obvious case. */
6463 if (TREE_CODE (expr
) == code
6464 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6465 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6468 /* Check for comparing (name, name != 0) and the case where expr
6469 is an SSA_NAME with a definition matching the comparison. */
6470 if (TREE_CODE (expr
) == SSA_NAME
6471 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6473 if (operand_equal_p (expr
, op1
, 0))
6474 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6475 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6476 s
= SSA_NAME_DEF_STMT (expr
);
6477 if (is_gimple_assign (s
)
6478 && gimple_assign_rhs_code (s
) == code
6479 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6480 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6484 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6485 of name is a comparison, recurse. */
6486 if (TREE_CODE (op1
) == SSA_NAME
6487 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6489 s
= SSA_NAME_DEF_STMT (op1
);
6490 if (is_gimple_assign (s
)
6491 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6493 enum tree_code c
= gimple_assign_rhs_code (s
);
6494 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6495 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6496 return same_bool_comparison_p (expr
, c
,
6497 gimple_assign_rhs1 (s
),
6498 gimple_assign_rhs2 (s
));
6499 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6500 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6501 return same_bool_comparison_p (expr
,
6502 invert_tree_comparison (c
, false),
6503 gimple_assign_rhs1 (s
),
6504 gimple_assign_rhs2 (s
));
6510 /* Check to see if two boolean expressions OP1 and OP2 are logically
6514 same_bool_result_p (const_tree op1
, const_tree op2
)
6516 /* Simple cases first. */
6517 if (operand_equal_p (op1
, op2
, 0))
6520 /* Check the cases where at least one of the operands is a comparison.
6521 These are a bit smarter than operand_equal_p in that they apply some
6522 identifies on SSA_NAMEs. */
6523 if (COMPARISON_CLASS_P (op2
)
6524 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6525 TREE_OPERAND (op2
, 0),
6526 TREE_OPERAND (op2
, 1)))
6528 if (COMPARISON_CLASS_P (op1
)
6529 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6530 TREE_OPERAND (op1
, 0),
6531 TREE_OPERAND (op1
, 1)))
6538 /* Forward declarations for some mutually recursive functions. */
6541 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6542 enum tree_code code2
, tree op2a
, tree op2b
, basic_block
);
6544 and_var_with_comparison (tree type
, tree var
, bool invert
,
6545 enum tree_code code2
, tree op2a
, tree op2b
,
6548 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6549 enum tree_code code2
, tree op2a
, tree op2b
,
6552 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6553 enum tree_code code2
, tree op2a
, tree op2b
,
6556 or_var_with_comparison (tree
, tree var
, bool invert
,
6557 enum tree_code code2
, tree op2a
, tree op2b
,
6560 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6561 enum tree_code code2
, tree op2a
, tree op2b
,
6564 /* Helper function for and_comparisons_1: try to simplify the AND of the
6565 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6566 If INVERT is true, invert the value of the VAR before doing the AND.
6567 Return NULL_EXPR if we can't simplify this to a single expression. */
6570 and_var_with_comparison (tree type
, tree var
, bool invert
,
6571 enum tree_code code2
, tree op2a
, tree op2b
,
6572 basic_block outer_cond_bb
)
6575 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6577 /* We can only deal with variables whose definitions are assignments. */
6578 if (!is_gimple_assign (stmt
))
6581 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6582 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6583 Then we only have to consider the simpler non-inverted cases. */
6585 t
= or_var_with_comparison_1 (type
, stmt
,
6586 invert_tree_comparison (code2
, false),
6587 op2a
, op2b
, outer_cond_bb
);
6589 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
,
6591 return canonicalize_bool (t
, invert
);
6594 /* Try to simplify the AND of the ssa variable defined by the assignment
6595 STMT with the comparison specified by (OP2A CODE2 OP2B).
6596 Return NULL_EXPR if we can't simplify this to a single expression. */
6599 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6600 enum tree_code code2
, tree op2a
, tree op2b
,
6601 basic_block outer_cond_bb
)
6603 tree var
= gimple_assign_lhs (stmt
);
6604 tree true_test_var
= NULL_TREE
;
6605 tree false_test_var
= NULL_TREE
;
6606 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6608 /* Check for identities like (var AND (var == 0)) => false. */
6609 if (TREE_CODE (op2a
) == SSA_NAME
6610 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6612 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6613 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6615 true_test_var
= op2a
;
6616 if (var
== true_test_var
)
6619 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6620 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6622 false_test_var
= op2a
;
6623 if (var
== false_test_var
)
6624 return boolean_false_node
;
6628 /* If the definition is a comparison, recurse on it. */
6629 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6631 tree t
= and_comparisons_1 (type
, innercode
,
6632 gimple_assign_rhs1 (stmt
),
6633 gimple_assign_rhs2 (stmt
),
6636 op2b
, outer_cond_bb
);
6641 /* If the definition is an AND or OR expression, we may be able to
6642 simplify by reassociating. */
6643 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6644 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6646 tree inner1
= gimple_assign_rhs1 (stmt
);
6647 tree inner2
= gimple_assign_rhs2 (stmt
);
6650 tree partial
= NULL_TREE
;
6651 bool is_and
= (innercode
== BIT_AND_EXPR
);
6653 /* Check for boolean identities that don't require recursive examination
6655 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6656 inner1 AND (inner1 OR inner2) => inner1
6657 !inner1 AND (inner1 AND inner2) => false
6658 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6659 Likewise for similar cases involving inner2. */
6660 if (inner1
== true_test_var
)
6661 return (is_and
? var
: inner1
);
6662 else if (inner2
== true_test_var
)
6663 return (is_and
? var
: inner2
);
6664 else if (inner1
== false_test_var
)
6666 ? boolean_false_node
6667 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6668 op2b
, outer_cond_bb
));
6669 else if (inner2
== false_test_var
)
6671 ? boolean_false_node
6672 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6673 op2b
, outer_cond_bb
));
6675 /* Next, redistribute/reassociate the AND across the inner tests.
6676 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6677 if (TREE_CODE (inner1
) == SSA_NAME
6678 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6679 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6680 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6681 gimple_assign_rhs1 (s
),
6682 gimple_assign_rhs2 (s
),
6686 /* Handle the AND case, where we are reassociating:
6687 (inner1 AND inner2) AND (op2a code2 op2b)
6689 If the partial result t is a constant, we win. Otherwise
6690 continue on to try reassociating with the other inner test. */
6693 if (integer_onep (t
))
6695 else if (integer_zerop (t
))
6696 return boolean_false_node
;
6699 /* Handle the OR case, where we are redistributing:
6700 (inner1 OR inner2) AND (op2a code2 op2b)
6701 => (t OR (inner2 AND (op2a code2 op2b))) */
6702 else if (integer_onep (t
))
6703 return boolean_true_node
;
6705 /* Save partial result for later. */
6709 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6710 if (TREE_CODE (inner2
) == SSA_NAME
6711 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6712 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6713 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6714 gimple_assign_rhs1 (s
),
6715 gimple_assign_rhs2 (s
),
6719 /* Handle the AND case, where we are reassociating:
6720 (inner1 AND inner2) AND (op2a code2 op2b)
6721 => (inner1 AND t) */
6724 if (integer_onep (t
))
6726 else if (integer_zerop (t
))
6727 return boolean_false_node
;
6728 /* If both are the same, we can apply the identity
6730 else if (partial
&& same_bool_result_p (t
, partial
))
6734 /* Handle the OR case. where we are redistributing:
6735 (inner1 OR inner2) AND (op2a code2 op2b)
6736 => (t OR (inner1 AND (op2a code2 op2b)))
6737 => (t OR partial) */
6740 if (integer_onep (t
))
6741 return boolean_true_node
;
6744 /* We already got a simplification for the other
6745 operand to the redistributed OR expression. The
6746 interesting case is when at least one is false.
6747 Or, if both are the same, we can apply the identity
6749 if (integer_zerop (partial
))
6751 else if (integer_zerop (t
))
6753 else if (same_bool_result_p (t
, partial
))
6762 /* Try to simplify the AND of two comparisons defined by
6763 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6764 If this can be done without constructing an intermediate value,
6765 return the resulting tree; otherwise NULL_TREE is returned.
6766 This function is deliberately asymmetric as it recurses on SSA_DEFs
6767 in the first comparison but not the second. */
6770 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6771 enum tree_code code2
, tree op2a
, tree op2b
,
6772 basic_block outer_cond_bb
)
6774 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6776 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6777 if (operand_equal_p (op1a
, op2a
, 0)
6778 && operand_equal_p (op1b
, op2b
, 0))
6780 /* Result will be either NULL_TREE, or a combined comparison. */
6781 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6782 TRUTH_ANDIF_EXPR
, code1
, code2
,
6783 truth_type
, op1a
, op1b
);
6788 /* Likewise the swapped case of the above. */
6789 if (operand_equal_p (op1a
, op2b
, 0)
6790 && operand_equal_p (op1b
, op2a
, 0))
6792 /* Result will be either NULL_TREE, or a combined comparison. */
6793 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6794 TRUTH_ANDIF_EXPR
, code1
,
6795 swap_tree_comparison (code2
),
6796 truth_type
, op1a
, op1b
);
6801 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6802 NAME's definition is a truth value. See if there are any simplifications
6803 that can be done against the NAME's definition. */
6804 if (TREE_CODE (op1a
) == SSA_NAME
6805 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6806 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6808 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6809 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6810 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6811 switch (gimple_code (stmt
))
6814 /* Try to simplify by copy-propagating the definition. */
6815 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6816 op2b
, outer_cond_bb
);
6819 /* If every argument to the PHI produces the same result when
6820 ANDed with the second comparison, we win.
6821 Do not do this unless the type is bool since we need a bool
6822 result here anyway. */
6823 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6825 tree result
= NULL_TREE
;
6827 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6829 tree arg
= gimple_phi_arg_def (stmt
, i
);
6831 /* If this PHI has itself as an argument, ignore it.
6832 If all the other args produce the same result,
6834 if (arg
== gimple_phi_result (stmt
))
6836 else if (TREE_CODE (arg
) == INTEGER_CST
)
6838 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6841 result
= boolean_false_node
;
6842 else if (!integer_zerop (result
))
6846 result
= fold_build2 (code2
, boolean_type_node
,
6848 else if (!same_bool_comparison_p (result
,
6852 else if (TREE_CODE (arg
) == SSA_NAME
6853 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6856 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6857 /* In simple cases we can look through PHI nodes,
6858 but we have to be careful with loops.
6860 if (! dom_info_available_p (CDI_DOMINATORS
)
6861 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6862 || dominated_by_p (CDI_DOMINATORS
,
6863 gimple_bb (def_stmt
),
6866 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6873 else if (!same_bool_result_p (result
, temp
))
6889 static basic_block fosa_bb
;
6891 follow_outer_ssa_edges (tree val
)
6893 if (TREE_CODE (val
) == SSA_NAME
6894 && !SSA_NAME_IS_DEFAULT_DEF (val
))
6896 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (val
));
6898 || def_bb
== fosa_bb
6899 || (dom_info_available_p (CDI_DOMINATORS
)
6900 && (def_bb
== fosa_bb
6901 || dominated_by_p (CDI_DOMINATORS
, fosa_bb
, def_bb
))))
6908 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6909 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6910 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6911 simplify this to a single expression. As we are going to lower the cost
6912 of building SSA names / gimple stmts significantly, we need to allocate
6913 them ont the stack. This will cause the code to be a bit ugly. */
6916 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6917 enum tree_code code1
,
6918 tree op1a
, tree op1b
,
6919 enum tree_code code2
, tree op2a
,
6921 basic_block outer_cond_bb
)
6923 /* Allocate gimple stmt1 on the stack. */
6925 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6926 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6927 gimple_assign_set_rhs_code (stmt1
, code1
);
6928 gimple_assign_set_rhs1 (stmt1
, op1a
);
6929 gimple_assign_set_rhs2 (stmt1
, op1b
);
6930 gimple_set_bb (stmt1
, NULL
);
6932 /* Allocate gimple stmt2 on the stack. */
6934 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6935 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6936 gimple_assign_set_rhs_code (stmt2
, code2
);
6937 gimple_assign_set_rhs1 (stmt2
, op2a
);
6938 gimple_assign_set_rhs2 (stmt2
, op2b
);
6939 gimple_set_bb (stmt2
, NULL
);
6941 /* Allocate SSA names(lhs1) on the stack. */
6942 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6943 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6944 TREE_SET_CODE (lhs1
, SSA_NAME
);
6945 TREE_TYPE (lhs1
) = type
;
6946 init_ssa_name_imm_use (lhs1
);
6948 /* Allocate SSA names(lhs2) on the stack. */
6949 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6950 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6951 TREE_SET_CODE (lhs2
, SSA_NAME
);
6952 TREE_TYPE (lhs2
) = type
;
6953 init_ssa_name_imm_use (lhs2
);
6955 gimple_assign_set_lhs (stmt1
, lhs1
);
6956 gimple_assign_set_lhs (stmt2
, lhs2
);
6958 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6959 type
, gimple_assign_lhs (stmt1
),
6960 gimple_assign_lhs (stmt2
));
6961 fosa_bb
= outer_cond_bb
;
6962 if (op
.resimplify (NULL
, (!outer_cond_bb
6963 ? follow_all_ssa_edges
: follow_outer_ssa_edges
)))
6965 if (gimple_simplified_result_is_gimple_val (&op
))
6967 tree res
= op
.ops
[0];
6969 return build2 (code1
, type
, op1a
, op1b
);
6970 else if (res
== lhs2
)
6971 return build2 (code2
, type
, op2a
, op2b
);
6975 else if (op
.code
.is_tree_code ()
6976 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6978 tree op0
= op
.ops
[0];
6979 tree op1
= op
.ops
[1];
6980 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6981 return NULL_TREE
; /* not simple */
6983 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6990 /* Try to simplify the AND of two comparisons, specified by
6991 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6992 If this can be simplified to a single expression (without requiring
6993 introducing more SSA variables to hold intermediate values),
6994 return the resulting tree. Otherwise return NULL_TREE.
6995 If the result expression is non-null, it has boolean type. */
6998 maybe_fold_and_comparisons (tree type
,
6999 enum tree_code code1
, tree op1a
, tree op1b
,
7000 enum tree_code code2
, tree op2a
, tree op2b
,
7001 basic_block outer_cond_bb
)
7003 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
,
7007 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
,
7011 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
7012 op1a
, op1b
, code2
, op2a
,
7013 op2b
, outer_cond_bb
))
7019 /* Helper function for or_comparisons_1: try to simplify the OR of the
7020 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7021 If INVERT is true, invert the value of VAR before doing the OR.
7022 Return NULL_EXPR if we can't simplify this to a single expression. */
7025 or_var_with_comparison (tree type
, tree var
, bool invert
,
7026 enum tree_code code2
, tree op2a
, tree op2b
,
7027 basic_block outer_cond_bb
)
7030 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
7032 /* We can only deal with variables whose definitions are assignments. */
7033 if (!is_gimple_assign (stmt
))
7036 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7037 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7038 Then we only have to consider the simpler non-inverted cases. */
7040 t
= and_var_with_comparison_1 (type
, stmt
,
7041 invert_tree_comparison (code2
, false),
7042 op2a
, op2b
, outer_cond_bb
);
7044 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
,
7046 return canonicalize_bool (t
, invert
);
7049 /* Try to simplify the OR of the ssa variable defined by the assignment
7050 STMT with the comparison specified by (OP2A CODE2 OP2B).
7051 Return NULL_EXPR if we can't simplify this to a single expression. */
7054 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7055 enum tree_code code2
, tree op2a
, tree op2b
,
7056 basic_block outer_cond_bb
)
7058 tree var
= gimple_assign_lhs (stmt
);
7059 tree true_test_var
= NULL_TREE
;
7060 tree false_test_var
= NULL_TREE
;
7061 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7063 /* Check for identities like (var OR (var != 0)) => true . */
7064 if (TREE_CODE (op2a
) == SSA_NAME
7065 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7067 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7068 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7070 true_test_var
= op2a
;
7071 if (var
== true_test_var
)
7074 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7075 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7077 false_test_var
= op2a
;
7078 if (var
== false_test_var
)
7079 return boolean_true_node
;
7083 /* If the definition is a comparison, recurse on it. */
7084 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7086 tree t
= or_comparisons_1 (type
, innercode
,
7087 gimple_assign_rhs1 (stmt
),
7088 gimple_assign_rhs2 (stmt
),
7089 code2
, op2a
, op2b
, outer_cond_bb
);
7094 /* If the definition is an AND or OR expression, we may be able to
7095 simplify by reassociating. */
7096 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7097 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7099 tree inner1
= gimple_assign_rhs1 (stmt
);
7100 tree inner2
= gimple_assign_rhs2 (stmt
);
7103 tree partial
= NULL_TREE
;
7104 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7106 /* Check for boolean identities that don't require recursive examination
7108 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7109 inner1 OR (inner1 AND inner2) => inner1
7110 !inner1 OR (inner1 OR inner2) => true
7111 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7113 if (inner1
== true_test_var
)
7114 return (is_or
? var
: inner1
);
7115 else if (inner2
== true_test_var
)
7116 return (is_or
? var
: inner2
);
7117 else if (inner1
== false_test_var
)
7120 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7121 op2b
, outer_cond_bb
));
7122 else if (inner2
== false_test_var
)
7125 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7126 op2b
, outer_cond_bb
));
7128 /* Next, redistribute/reassociate the OR across the inner tests.
7129 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7130 if (TREE_CODE (inner1
) == SSA_NAME
7131 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7132 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7133 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7134 gimple_assign_rhs1 (s
),
7135 gimple_assign_rhs2 (s
),
7139 /* Handle the OR case, where we are reassociating:
7140 (inner1 OR inner2) OR (op2a code2 op2b)
7142 If the partial result t is a constant, we win. Otherwise
7143 continue on to try reassociating with the other inner test. */
7146 if (integer_onep (t
))
7147 return boolean_true_node
;
7148 else if (integer_zerop (t
))
7152 /* Handle the AND case, where we are redistributing:
7153 (inner1 AND inner2) OR (op2a code2 op2b)
7154 => (t AND (inner2 OR (op2a code op2b))) */
7155 else if (integer_zerop (t
))
7156 return boolean_false_node
;
7158 /* Save partial result for later. */
7162 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7163 if (TREE_CODE (inner2
) == SSA_NAME
7164 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7165 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7166 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7167 gimple_assign_rhs1 (s
),
7168 gimple_assign_rhs2 (s
),
7172 /* Handle the OR case, where we are reassociating:
7173 (inner1 OR inner2) OR (op2a code2 op2b)
7175 => (t OR partial) */
7178 if (integer_zerop (t
))
7180 else if (integer_onep (t
))
7181 return boolean_true_node
;
7182 /* If both are the same, we can apply the identity
7184 else if (partial
&& same_bool_result_p (t
, partial
))
7188 /* Handle the AND case, where we are redistributing:
7189 (inner1 AND inner2) OR (op2a code2 op2b)
7190 => (t AND (inner1 OR (op2a code2 op2b)))
7191 => (t AND partial) */
7194 if (integer_zerop (t
))
7195 return boolean_false_node
;
7198 /* We already got a simplification for the other
7199 operand to the redistributed AND expression. The
7200 interesting case is when at least one is true.
7201 Or, if both are the same, we can apply the identity
7203 if (integer_onep (partial
))
7205 else if (integer_onep (t
))
7207 else if (same_bool_result_p (t
, partial
))
7216 /* Try to simplify the OR of two comparisons defined by
7217 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7218 If this can be done without constructing an intermediate value,
7219 return the resulting tree; otherwise NULL_TREE is returned.
7220 This function is deliberately asymmetric as it recurses on SSA_DEFs
7221 in the first comparison but not the second. */
7224 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7225 enum tree_code code2
, tree op2a
, tree op2b
,
7226 basic_block outer_cond_bb
)
7228 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7230 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7231 if (operand_equal_p (op1a
, op2a
, 0)
7232 && operand_equal_p (op1b
, op2b
, 0))
7234 /* Result will be either NULL_TREE, or a combined comparison. */
7235 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7236 TRUTH_ORIF_EXPR
, code1
, code2
,
7237 truth_type
, op1a
, op1b
);
7242 /* Likewise the swapped case of the above. */
7243 if (operand_equal_p (op1a
, op2b
, 0)
7244 && operand_equal_p (op1b
, op2a
, 0))
7246 /* Result will be either NULL_TREE, or a combined comparison. */
7247 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7248 TRUTH_ORIF_EXPR
, code1
,
7249 swap_tree_comparison (code2
),
7250 truth_type
, op1a
, op1b
);
7255 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7256 NAME's definition is a truth value. See if there are any simplifications
7257 that can be done against the NAME's definition. */
7258 if (TREE_CODE (op1a
) == SSA_NAME
7259 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7260 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7262 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7263 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7264 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7265 switch (gimple_code (stmt
))
7268 /* Try to simplify by copy-propagating the definition. */
7269 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7270 op2b
, outer_cond_bb
);
7273 /* If every argument to the PHI produces the same result when
7274 ORed with the second comparison, we win.
7275 Do not do this unless the type is bool since we need a bool
7276 result here anyway. */
7277 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7279 tree result
= NULL_TREE
;
7281 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7283 tree arg
= gimple_phi_arg_def (stmt
, i
);
7285 /* If this PHI has itself as an argument, ignore it.
7286 If all the other args produce the same result,
7288 if (arg
== gimple_phi_result (stmt
))
7290 else if (TREE_CODE (arg
) == INTEGER_CST
)
7292 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7295 result
= boolean_true_node
;
7296 else if (!integer_onep (result
))
7300 result
= fold_build2 (code2
, boolean_type_node
,
7302 else if (!same_bool_comparison_p (result
,
7306 else if (TREE_CODE (arg
) == SSA_NAME
7307 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7310 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7311 /* In simple cases we can look through PHI nodes,
7312 but we have to be careful with loops.
7314 if (! dom_info_available_p (CDI_DOMINATORS
)
7315 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7316 || dominated_by_p (CDI_DOMINATORS
,
7317 gimple_bb (def_stmt
),
7320 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7321 op2a
, op2b
, outer_cond_bb
);
7326 else if (!same_bool_result_p (result
, temp
))
7342 /* Try to simplify the OR of two comparisons, specified by
7343 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7344 If this can be simplified to a single expression (without requiring
7345 introducing more SSA variables to hold intermediate values),
7346 return the resulting tree. Otherwise return NULL_TREE.
7347 If the result expression is non-null, it has boolean type. */
7350 maybe_fold_or_comparisons (tree type
,
7351 enum tree_code code1
, tree op1a
, tree op1b
,
7352 enum tree_code code2
, tree op2a
, tree op2b
,
7353 basic_block outer_cond_bb
)
7355 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
,
7359 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
,
7363 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7364 op1a
, op1b
, code2
, op2a
,
7365 op2b
, outer_cond_bb
))
7371 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7373 Either NULL_TREE, a simplified but non-constant or a constant
7376 ??? This should go into a gimple-fold-inline.h file to be eventually
7377 privatized with the single valueize function used in the various TUs
7378 to avoid the indirect function call overhead. */
7381 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7382 tree (*gvalueize
) (tree
))
7384 gimple_match_op res_op
;
7385 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7386 edges if there are intermediate VARYING defs. For this reason
7387 do not follow SSA edges here even though SCCVN can technically
7388 just deal fine with that. */
7389 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7391 tree res
= NULL_TREE
;
7392 if (gimple_simplified_result_is_gimple_val (&res_op
))
7393 res
= res_op
.ops
[0];
7394 else if (mprts_hook
)
7395 res
= mprts_hook (&res_op
);
7398 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7400 fprintf (dump_file
, "Match-and-simplified ");
7401 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7402 fprintf (dump_file
, " to ");
7403 print_generic_expr (dump_file
, res
);
7404 fprintf (dump_file
, "\n");
7410 location_t loc
= gimple_location (stmt
);
7411 switch (gimple_code (stmt
))
7415 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7417 switch (get_gimple_rhs_class (subcode
))
7419 case GIMPLE_SINGLE_RHS
:
7421 tree rhs
= gimple_assign_rhs1 (stmt
);
7422 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7424 if (TREE_CODE (rhs
) == SSA_NAME
)
7426 /* If the RHS is an SSA_NAME, return its known constant value,
7428 return (*valueize
) (rhs
);
7430 /* Handle propagating invariant addresses into address
7432 else if (TREE_CODE (rhs
) == ADDR_EXPR
7433 && !is_gimple_min_invariant (rhs
))
7435 poly_int64 offset
= 0;
7437 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7441 && (CONSTANT_CLASS_P (base
)
7442 || decl_address_invariant_p (base
)))
7443 return build_invariant_address (TREE_TYPE (rhs
),
7446 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7447 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7448 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7449 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7454 nelts
= CONSTRUCTOR_NELTS (rhs
);
7455 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7456 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7458 val
= (*valueize
) (val
);
7459 if (TREE_CODE (val
) == INTEGER_CST
7460 || TREE_CODE (val
) == REAL_CST
7461 || TREE_CODE (val
) == FIXED_CST
)
7462 vec
.quick_push (val
);
7467 return vec
.build ();
7469 if (subcode
== OBJ_TYPE_REF
)
7471 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7472 /* If callee is constant, we can fold away the wrapper. */
7473 if (is_gimple_min_invariant (val
))
7477 if (kind
== tcc_reference
)
7479 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7480 || TREE_CODE (rhs
) == REALPART_EXPR
7481 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7482 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7484 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7485 return fold_unary_loc (EXPR_LOCATION (rhs
),
7487 TREE_TYPE (rhs
), val
);
7489 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7490 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7492 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7493 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7495 TREE_TYPE (rhs
), val
,
7496 TREE_OPERAND (rhs
, 1),
7497 TREE_OPERAND (rhs
, 2));
7499 else if (TREE_CODE (rhs
) == MEM_REF
7500 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7502 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7503 if (TREE_CODE (val
) == ADDR_EXPR
7504 && is_gimple_min_invariant (val
))
7506 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7508 TREE_OPERAND (rhs
, 1));
7513 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7515 else if (kind
== tcc_declaration
)
7516 return get_symbol_constant_value (rhs
);
7520 case GIMPLE_UNARY_RHS
:
7523 case GIMPLE_BINARY_RHS
:
7524 /* Translate &x + CST into an invariant form suitable for
7525 further propagation. */
7526 if (subcode
== POINTER_PLUS_EXPR
)
7528 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7529 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7530 if (TREE_CODE (op0
) == ADDR_EXPR
7531 && TREE_CODE (op1
) == INTEGER_CST
)
7533 tree off
= fold_convert (ptr_type_node
, op1
);
7535 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7536 fold_build2 (MEM_REF
,
7537 TREE_TYPE (TREE_TYPE (op0
)),
7538 unshare_expr (op0
), off
));
7541 /* Canonicalize bool != 0 and bool == 0 appearing after
7542 valueization. While gimple_simplify handles this
7543 it can get confused by the ~X == 1 -> X == 0 transform
7544 which we cant reduce to a SSA name or a constant
7545 (and we have no way to tell gimple_simplify to not
7546 consider those transforms in the first place). */
7547 else if (subcode
== EQ_EXPR
7548 || subcode
== NE_EXPR
)
7550 tree lhs
= gimple_assign_lhs (stmt
);
7551 tree op0
= gimple_assign_rhs1 (stmt
);
7552 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7555 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7556 op0
= (*valueize
) (op0
);
7557 if (TREE_CODE (op0
) == INTEGER_CST
)
7558 std::swap (op0
, op1
);
7559 if (TREE_CODE (op1
) == INTEGER_CST
7560 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7561 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7567 case GIMPLE_TERNARY_RHS
:
7569 /* Handle ternary operators that can appear in GIMPLE form. */
7570 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7571 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7572 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7573 return fold_ternary_loc (loc
, subcode
,
7574 TREE_TYPE (gimple_assign_lhs (stmt
)),
7586 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7588 if (gimple_call_internal_p (stmt
))
7590 enum tree_code subcode
= ERROR_MARK
;
7591 switch (gimple_call_internal_fn (stmt
))
7593 case IFN_UBSAN_CHECK_ADD
:
7594 subcode
= PLUS_EXPR
;
7596 case IFN_UBSAN_CHECK_SUB
:
7597 subcode
= MINUS_EXPR
;
7599 case IFN_UBSAN_CHECK_MUL
:
7600 subcode
= MULT_EXPR
;
7602 case IFN_BUILTIN_EXPECT
:
7604 tree arg0
= gimple_call_arg (stmt
, 0);
7605 tree op0
= (*valueize
) (arg0
);
7606 if (TREE_CODE (op0
) == INTEGER_CST
)
7613 tree arg0
= gimple_call_arg (stmt
, 0);
7614 tree arg1
= gimple_call_arg (stmt
, 1);
7615 tree op0
= (*valueize
) (arg0
);
7616 tree op1
= (*valueize
) (arg1
);
7618 if (TREE_CODE (op0
) != INTEGER_CST
7619 || TREE_CODE (op1
) != INTEGER_CST
)
7624 /* x * 0 = 0 * x = 0 without overflow. */
7625 if (integer_zerop (op0
) || integer_zerop (op1
))
7626 return build_zero_cst (TREE_TYPE (arg0
));
7629 /* y - y = 0 without overflow. */
7630 if (operand_equal_p (op0
, op1
, 0))
7631 return build_zero_cst (TREE_TYPE (arg0
));
7638 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7640 && TREE_CODE (res
) == INTEGER_CST
7641 && !TREE_OVERFLOW (res
))
7646 fn
= (*valueize
) (gimple_call_fn (stmt
));
7647 if (TREE_CODE (fn
) == ADDR_EXPR
7648 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7649 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7650 && gimple_builtin_call_types_compatible_p (stmt
,
7651 TREE_OPERAND (fn
, 0)))
7653 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7656 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7657 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7658 retval
= fold_builtin_call_array (loc
,
7659 gimple_call_return_type (call_stmt
),
7660 fn
, gimple_call_num_args (stmt
), args
);
7663 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7664 STRIP_NOPS (retval
);
7665 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7678 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7679 Returns NULL_TREE if folding to a constant is not possible, otherwise
7680 returns a constant according to is_gimple_min_invariant. */
7683 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7685 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7686 if (res
&& is_gimple_min_invariant (res
))
7692 /* The following set of functions are supposed to fold references using
7693 their constant initializers. */
7695 /* See if we can find constructor defining value of BASE.
7696 When we know the consructor with constant offset (such as
7697 base is array[40] and we do know constructor of array), then
7698 BIT_OFFSET is adjusted accordingly.
7700 As a special case, return error_mark_node when constructor
7701 is not explicitly available, but it is known to be zero
7702 such as 'static const int a;'. */
7704 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7705 tree (*valueize
)(tree
))
7707 poly_int64 bit_offset2
, size
, max_size
;
7710 if (TREE_CODE (base
) == MEM_REF
)
7712 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7713 if (!boff
.to_shwi (bit_offset
))
7717 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7718 base
= valueize (TREE_OPERAND (base
, 0));
7719 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7721 base
= TREE_OPERAND (base
, 0);
7724 && TREE_CODE (base
) == SSA_NAME
)
7725 base
= valueize (base
);
7727 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7728 DECL_INITIAL. If BASE is a nested reference into another
7729 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7730 the inner reference. */
7731 switch (TREE_CODE (base
))
7736 tree init
= ctor_for_folding (base
);
7738 /* Our semantic is exact opposite of ctor_for_folding;
7739 NULL means unknown, while error_mark_node is 0. */
7740 if (init
== error_mark_node
)
7743 return error_mark_node
;
7747 case VIEW_CONVERT_EXPR
:
7748 return get_base_constructor (TREE_OPERAND (base
, 0),
7749 bit_offset
, valueize
);
7753 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7755 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7757 *bit_offset
+= bit_offset2
;
7758 return get_base_constructor (base
, bit_offset
, valueize
);
7764 if (CONSTANT_CLASS_P (base
))
7771 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7772 to the memory at bit OFFSET. When non-null, TYPE is the expected
7773 type of the reference; otherwise the type of the referenced element
7774 is used instead. When SIZE is zero, attempt to fold a reference to
7775 the entire element which OFFSET refers to. Increment *SUBOFF by
7776 the bit offset of the accessed element. */
7779 fold_array_ctor_reference (tree type
, tree ctor
,
7780 unsigned HOST_WIDE_INT offset
,
7781 unsigned HOST_WIDE_INT size
,
7783 unsigned HOST_WIDE_INT
*suboff
)
7785 offset_int low_bound
;
7786 offset_int elt_size
;
7787 offset_int access_index
;
7788 tree domain_type
= NULL_TREE
;
7789 HOST_WIDE_INT inner_offset
;
7791 /* Compute low bound and elt size. */
7792 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7793 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7794 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7796 /* Static constructors for variably sized objects make no sense. */
7797 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7799 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7803 /* Static constructors for variably sized objects make no sense. */
7804 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7806 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7808 /* When TYPE is non-null, verify that it specifies a constant-sized
7809 access of a multiple of the array element size. Avoid division
7810 by zero below when ELT_SIZE is zero, such as with the result of
7811 an initializer for a zero-length array or an empty struct. */
7814 && (!TYPE_SIZE_UNIT (type
)
7815 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7818 /* Compute the array index we look for. */
7819 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7821 access_index
+= low_bound
;
7823 /* And offset within the access. */
7824 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7826 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7827 if (size
> elt_sz
* BITS_PER_UNIT
)
7829 /* native_encode_expr constraints. */
7830 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7831 || size
% BITS_PER_UNIT
!= 0
7832 || inner_offset
% BITS_PER_UNIT
!= 0
7833 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7837 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7839 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7840 return build_zero_cst (type
);
7842 /* native-encode adjacent ctor elements. */
7843 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7844 unsigned bufoff
= 0;
7845 offset_int index
= 0;
7846 offset_int max_index
= access_index
;
7847 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7849 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7850 else if (!CONSTANT_CLASS_P (val
))
7854 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7856 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7857 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7860 index
= max_index
= wi::to_offset (elt
->index
);
7861 index
= wi::umax (index
, access_index
);
7864 if (bufoff
+ elt_sz
> sizeof (buf
))
7865 elt_sz
= sizeof (buf
) - bufoff
;
7866 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7867 inner_offset
/ BITS_PER_UNIT
);
7868 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7874 if (wi::cmpu (access_index
, index
) == 0)
7876 else if (wi::cmpu (access_index
, max_index
) > 0)
7879 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7881 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7886 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7888 max_index
= access_index
;
7891 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7893 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7894 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7897 index
= max_index
= wi::to_offset (elt
->index
);
7898 index
= wi::umax (index
, access_index
);
7899 if (wi::cmpu (access_index
, index
) == 0)
7902 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7906 while (bufoff
< size
/ BITS_PER_UNIT
);
7908 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7911 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7913 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7915 /* For the final reference to the entire accessed element
7916 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7917 may be null) in favor of the type of the element, and set
7918 SIZE to the size of the accessed element. */
7920 type
= TREE_TYPE (val
);
7921 size
= elt_sz
* BITS_PER_UNIT
;
7923 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7924 && TREE_CODE (val
) == CONSTRUCTOR
7925 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7926 /* If this isn't the last element in the CTOR and a CTOR itself
7927 and it does not cover the whole object we are requesting give up
7928 since we're not set up for combining from multiple CTORs. */
7931 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7932 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7936 /* Memory not explicitly mentioned in constructor is 0 (or
7937 the reference is out of range). */
7938 return type
? build_zero_cst (type
) : NULL_TREE
;
7941 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7942 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7943 is the expected type of the reference; otherwise the type of
7944 the referenced member is used instead. When SIZE is zero,
7945 attempt to fold a reference to the entire member which OFFSET
7946 refers to; in this case. Increment *SUBOFF by the bit offset
7947 of the accessed member. */
7950 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7951 unsigned HOST_WIDE_INT offset
,
7952 unsigned HOST_WIDE_INT size
,
7954 unsigned HOST_WIDE_INT
*suboff
)
7956 unsigned HOST_WIDE_INT cnt
;
7959 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7962 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7963 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7964 tree field_size
= DECL_SIZE (cfield
);
7968 /* Determine the size of the flexible array member from
7969 the size of the initializer provided for it. */
7970 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7973 /* Variable sized objects in static constructors makes no sense,
7974 but field_size can be NULL for flexible array members. */
7975 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7976 && TREE_CODE (byte_offset
) == INTEGER_CST
7977 && (field_size
!= NULL_TREE
7978 ? TREE_CODE (field_size
) == INTEGER_CST
7979 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7981 /* Compute bit offset of the field. */
7982 offset_int bitoffset
7983 = (wi::to_offset (field_offset
)
7984 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7985 /* Compute bit offset where the field ends. */
7986 offset_int bitoffset_end
;
7987 if (field_size
!= NULL_TREE
)
7988 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7992 /* Compute the bit offset of the end of the desired access.
7993 As a special case, if the size of the desired access is
7994 zero, assume the access is to the entire field (and let
7995 the caller make any necessary adjustments by storing
7996 the actual bounds of the field in FIELDBOUNDS). */
7997 offset_int access_end
= offset_int (offset
);
8001 access_end
= bitoffset_end
;
8003 /* Is there any overlap between the desired access at
8004 [OFFSET, OFFSET+SIZE) and the offset of the field within
8005 the object at [BITOFFSET, BITOFFSET_END)? */
8006 if (wi::cmps (access_end
, bitoffset
) > 0
8007 && (field_size
== NULL_TREE
8008 || wi::lts_p (offset
, bitoffset_end
)))
8010 *suboff
+= bitoffset
.to_uhwi ();
8012 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
8014 /* For the final reference to the entire accessed member
8015 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8016 be null) in favor of the type of the member, and set
8017 SIZE to the size of the accessed member. */
8018 offset
= bitoffset
.to_uhwi ();
8019 type
= TREE_TYPE (cval
);
8020 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
8023 /* We do have overlap. Now see if the field is large enough
8024 to cover the access. Give up for accesses that extend
8025 beyond the end of the object or that span multiple fields. */
8026 if (wi::cmps (access_end
, bitoffset_end
) > 0)
8028 if (offset
< bitoffset
)
8031 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
8032 return fold_ctor_reference (type
, cval
,
8033 inner_offset
.to_uhwi (), size
,
8041 return build_zero_cst (type
);
8044 /* CTOR is value initializing memory. Fold a reference of TYPE and
8045 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8046 is zero, attempt to fold a reference to the entire subobject
8047 which OFFSET refers to. This is used when folding accesses to
8048 string members of aggregates. When non-null, set *SUBOFF to
8049 the bit offset of the accessed subobject. */
8052 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
8053 const poly_uint64
&poly_size
, tree from_decl
,
8054 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
8058 /* We found the field with exact match. */
8060 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8061 && known_eq (poly_offset
, 0U))
8062 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8064 /* The remaining optimizations need a constant size and offset. */
8065 unsigned HOST_WIDE_INT size
, offset
;
8066 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8069 /* We are at the end of walk, see if we can view convert the
8071 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8072 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8073 && !compare_tree_int (TYPE_SIZE (type
), size
)
8074 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8076 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8079 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8081 STRIP_USELESS_TYPE_CONVERSION (ret
);
8085 /* For constants and byte-aligned/sized reads try to go through
8086 native_encode/interpret. */
8087 if (CONSTANT_CLASS_P (ctor
)
8088 && BITS_PER_UNIT
== 8
8089 && offset
% BITS_PER_UNIT
== 0
8090 && offset
/ BITS_PER_UNIT
<= INT_MAX
8091 && size
% BITS_PER_UNIT
== 0
8092 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8093 && can_native_interpret_type_p (type
))
8095 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8096 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8097 offset
/ BITS_PER_UNIT
);
8099 return native_interpret_expr (type
, buf
, len
);
8101 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8103 unsigned HOST_WIDE_INT dummy
= 0;
8108 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8109 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8110 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8113 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8116 /* Fall back to native_encode_initializer. Needs to be done
8117 only in the outermost fold_ctor_reference call (because it itself
8118 recurses into CONSTRUCTORs) and doesn't update suboff. */
8119 if (ret
== NULL_TREE
8121 && BITS_PER_UNIT
== 8
8122 && offset
% BITS_PER_UNIT
== 0
8123 && offset
/ BITS_PER_UNIT
<= INT_MAX
8124 && size
% BITS_PER_UNIT
== 0
8125 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8126 && can_native_interpret_type_p (type
))
8128 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8129 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8130 offset
/ BITS_PER_UNIT
);
8132 return native_interpret_expr (type
, buf
, len
);
8141 /* Return the tree representing the element referenced by T if T is an
8142 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8143 names using VALUEIZE. Return NULL_TREE otherwise. */
8146 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8148 tree ctor
, idx
, base
;
8149 poly_int64 offset
, size
, max_size
;
8153 if (TREE_THIS_VOLATILE (t
))
8157 return get_symbol_constant_value (t
);
8159 tem
= fold_read_from_constant_string (t
);
8163 switch (TREE_CODE (t
))
8166 case ARRAY_RANGE_REF
:
8167 /* Constant indexes are handled well by get_base_constructor.
8168 Only special case variable offsets.
8169 FIXME: This code can't handle nested references with variable indexes
8170 (they will be handled only by iteration of ccp). Perhaps we can bring
8171 get_ref_base_and_extent here and make it use a valueize callback. */
8172 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8174 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8175 && poly_int_tree_p (idx
))
8177 tree low_bound
, unit_size
;
8179 /* If the resulting bit-offset is constant, track it. */
8180 if ((low_bound
= array_ref_low_bound (t
),
8181 poly_int_tree_p (low_bound
))
8182 && (unit_size
= array_ref_element_size (t
),
8183 tree_fits_uhwi_p (unit_size
)))
8185 poly_offset_int woffset
8186 = wi::sext (wi::to_poly_offset (idx
)
8187 - wi::to_poly_offset (low_bound
),
8188 TYPE_PRECISION (sizetype
));
8189 woffset
*= tree_to_uhwi (unit_size
);
8190 woffset
*= BITS_PER_UNIT
;
8191 if (woffset
.to_shwi (&offset
))
8193 base
= TREE_OPERAND (t
, 0);
8194 ctor
= get_base_constructor (base
, &offset
, valueize
);
8195 /* Empty constructor. Always fold to 0. */
8196 if (ctor
== error_mark_node
)
8197 return build_zero_cst (TREE_TYPE (t
));
8198 /* Out of bound array access. Value is undefined,
8200 if (maybe_lt (offset
, 0))
8202 /* We cannot determine ctor. */
8205 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8206 tree_to_uhwi (unit_size
)
8216 case TARGET_MEM_REF
:
8218 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8219 ctor
= get_base_constructor (base
, &offset
, valueize
);
8221 /* Empty constructor. Always fold to 0. */
8222 if (ctor
== error_mark_node
)
8223 return build_zero_cst (TREE_TYPE (t
));
8224 /* We do not know precise address. */
8225 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8227 /* We cannot determine ctor. */
8231 /* Out of bound array access. Value is undefined, but don't fold. */
8232 if (maybe_lt (offset
, 0))
8235 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8239 /* For bit field reads try to read the representative and
8241 if (TREE_CODE (t
) == COMPONENT_REF
8242 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8243 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8245 HOST_WIDE_INT csize
, coffset
;
8246 tree field
= TREE_OPERAND (t
, 1);
8247 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8248 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8249 && size
.is_constant (&csize
)
8250 && offset
.is_constant (&coffset
)
8251 && (coffset
% BITS_PER_UNIT
!= 0
8252 || csize
% BITS_PER_UNIT
!= 0)
8254 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8256 poly_int64 bitoffset
;
8257 poly_uint64 field_offset
, repr_offset
;
8258 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8259 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8260 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8263 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8264 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8265 HOST_WIDE_INT bitoff
;
8266 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8267 - TYPE_PRECISION (TREE_TYPE (field
)));
8268 if (bitoffset
.is_constant (&bitoff
)
8273 size
= tree_to_uhwi (DECL_SIZE (repr
));
8275 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8277 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8279 if (!BYTES_BIG_ENDIAN
)
8280 tem
= wide_int_to_tree (TREE_TYPE (field
),
8281 wi::lrshift (wi::to_wide (tem
),
8284 tem
= wide_int_to_tree (TREE_TYPE (field
),
8285 wi::lrshift (wi::to_wide (tem
),
8297 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8298 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8299 return fold_build1_loc (EXPR_LOCATION (t
),
8300 TREE_CODE (t
), TREE_TYPE (t
), c
);
8312 fold_const_aggregate_ref (tree t
)
8314 return fold_const_aggregate_ref_1 (t
, NULL
);
8317 /* Lookup virtual method with index TOKEN in a virtual table V
8319 Set CAN_REFER if non-NULL to false if method
8320 is not referable or if the virtual table is ill-formed (such as rewriten
8321 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8324 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8326 unsigned HOST_WIDE_INT offset
,
8329 tree vtable
= v
, init
, fn
;
8330 unsigned HOST_WIDE_INT size
;
8331 unsigned HOST_WIDE_INT elt_size
, access_index
;
8337 /* First of all double check we have virtual table. */
8338 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8340 /* Pass down that we lost track of the target. */
8346 init
= ctor_for_folding (v
);
8348 /* The virtual tables should always be born with constructors
8349 and we always should assume that they are avaialble for
8350 folding. At the moment we do not stream them in all cases,
8351 but it should never happen that ctor seem unreachable. */
8353 if (init
== error_mark_node
)
8355 /* Pass down that we lost track of the target. */
8360 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8361 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8362 offset
*= BITS_PER_UNIT
;
8363 offset
+= token
* size
;
8365 /* Lookup the value in the constructor that is assumed to be array.
8366 This is equivalent to
8367 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8368 offset, size, NULL);
8369 but in a constant time. We expect that frontend produced a simple
8370 array without indexed initializers. */
8372 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8373 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8374 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8375 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8377 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8378 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8380 /* The C++ FE can now produce indexed fields, and we check if the indexes
8382 if (access_index
< CONSTRUCTOR_NELTS (init
))
8384 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8385 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8386 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8392 /* For type inconsistent program we may end up looking up virtual method
8393 in virtual table that does not contain TOKEN entries. We may overrun
8394 the virtual table and pick up a constant or RTTI info pointer.
8395 In any case the call is undefined. */
8397 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8398 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8399 fn
= builtin_decl_unreachable ();
8402 fn
= TREE_OPERAND (fn
, 0);
8404 /* When cgraph node is missing and function is not public, we cannot
8405 devirtualize. This can happen in WHOPR when the actual method
8406 ends up in other partition, because we found devirtualization
8407 possibility too late. */
8408 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8419 /* Make sure we create a cgraph node for functions we'll reference.
8420 They can be non-existent if the reference comes from an entry
8421 of an external vtable for example. */
8422 cgraph_node::get_create (fn
);
8427 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8428 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8429 KNOWN_BINFO carries the binfo describing the true type of
8430 OBJ_TYPE_REF_OBJECT(REF).
8431 Set CAN_REFER if non-NULL to false if method
8432 is not referable or if the virtual table is ill-formed (such as rewriten
8433 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8436 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8439 unsigned HOST_WIDE_INT offset
;
8442 v
= BINFO_VTABLE (known_binfo
);
8443 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8447 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8453 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8456 /* Given a pointer value T, return a simplified version of an
8457 indirection through T, or NULL_TREE if no simplification is
8458 possible. Note that the resulting type may be different from
8459 the type pointed to in the sense that it is still compatible
8460 from the langhooks point of view. */
8463 gimple_fold_indirect_ref (tree t
)
8465 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8470 subtype
= TREE_TYPE (sub
);
8471 if (!POINTER_TYPE_P (subtype
)
8472 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8475 if (TREE_CODE (sub
) == ADDR_EXPR
)
8477 tree op
= TREE_OPERAND (sub
, 0);
8478 tree optype
= TREE_TYPE (op
);
8480 if (useless_type_conversion_p (type
, optype
))
8483 /* *(foo *)&fooarray => fooarray[0] */
8484 if (TREE_CODE (optype
) == ARRAY_TYPE
8485 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8486 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8488 tree type_domain
= TYPE_DOMAIN (optype
);
8489 tree min_val
= size_zero_node
;
8490 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8491 min_val
= TYPE_MIN_VALUE (type_domain
);
8492 if (TREE_CODE (min_val
) == INTEGER_CST
)
8493 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8495 /* *(foo *)&complexfoo => __real__ complexfoo */
8496 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8497 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8498 return fold_build1 (REALPART_EXPR
, type
, op
);
8499 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8500 else if (TREE_CODE (optype
) == VECTOR_TYPE
8501 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8503 tree part_width
= TYPE_SIZE (type
);
8504 tree index
= bitsize_int (0);
8505 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8509 /* *(p + CST) -> ... */
8510 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8511 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8513 tree addr
= TREE_OPERAND (sub
, 0);
8514 tree off
= TREE_OPERAND (sub
, 1);
8518 addrtype
= TREE_TYPE (addr
);
8520 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8521 if (TREE_CODE (addr
) == ADDR_EXPR
8522 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8523 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8524 && tree_fits_uhwi_p (off
))
8526 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8527 tree part_width
= TYPE_SIZE (type
);
8528 unsigned HOST_WIDE_INT part_widthi
8529 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8530 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8531 tree index
= bitsize_int (indexi
);
8532 if (known_lt (offset
/ part_widthi
,
8533 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8534 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8538 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8539 if (TREE_CODE (addr
) == ADDR_EXPR
8540 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8541 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8543 tree size
= TYPE_SIZE_UNIT (type
);
8544 if (tree_int_cst_equal (size
, off
))
8545 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8548 /* *(p + CST) -> MEM_REF <p, CST>. */
8549 if (TREE_CODE (addr
) != ADDR_EXPR
8550 || DECL_P (TREE_OPERAND (addr
, 0)))
8551 return fold_build2 (MEM_REF
, type
,
8553 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8556 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8557 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8558 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8559 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8562 tree min_val
= size_zero_node
;
8564 sub
= gimple_fold_indirect_ref (sub
);
8566 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8567 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8568 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8569 min_val
= TYPE_MIN_VALUE (type_domain
);
8570 if (TREE_CODE (min_val
) == INTEGER_CST
)
8571 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8577 /* Return true if CODE is an operation that when operating on signed
8578 integer types involves undefined behavior on overflow and the
8579 operation can be expressed with unsigned arithmetic. */
8582 arith_code_with_undefined_signed_overflow (tree_code code
)
8591 case POINTER_PLUS_EXPR
:
8598 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8599 operation that can be transformed to unsigned arithmetic by converting
8600 its operand, carrying out the operation in the corresponding unsigned
8601 type and converting the result back to the original type.
8603 If IN_PLACE is true, adjust the stmt in place and return NULL.
8604 Otherwise returns a sequence of statements that replace STMT and also
8605 contain a modified form of STMT itself. */
8608 rewrite_to_defined_overflow (gimple
*stmt
, bool in_place
/* = false */)
8610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8612 fprintf (dump_file
, "rewriting stmt with undefined signed "
8614 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8617 tree lhs
= gimple_assign_lhs (stmt
);
8618 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8619 gimple_seq stmts
= NULL
;
8620 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8621 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8623 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8625 tree op
= gimple_op (stmt
, i
);
8626 op
= gimple_convert (&stmts
, type
, op
);
8627 gimple_set_op (stmt
, i
, op
);
8629 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8630 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8631 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8632 gimple_set_modified (stmt
, true);
8635 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
8637 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
8641 gimple_seq_add_stmt (&stmts
, stmt
);
8642 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8645 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
8646 gsi_insert_after (&gsi
, cvt
, GSI_SAME_STMT
);
8650 gimple_seq_add_stmt (&stmts
, cvt
);
8656 /* The valueization hook we use for the gimple_build API simplification.
8657 This makes us match fold_buildN behavior by only combining with
8658 statements in the sequence(s) we are currently building. */
8661 gimple_build_valueize (tree op
)
8663 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8668 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8671 gimple_build_insert_seq (gimple_stmt_iterator
*gsi
,
8672 bool before
, gsi_iterator_update update
,
8678 gsi_insert_seq_before (gsi
, seq
, update
);
8680 gsi_insert_seq_before_without_update (gsi
, seq
, update
);
8685 gsi_insert_seq_after (gsi
, seq
, update
);
8687 gsi_insert_seq_after_without_update (gsi
, seq
, update
);
8691 /* Build the expression CODE OP0 of type TYPE with location LOC,
8692 simplifying it first if possible. Returns the built
8693 expression value and inserts statements possibly defining it
8694 before GSI if BEFORE is true or after GSI if false and advance
8695 the iterator accordingly.
8696 If gsi refers to a basic block simplifying is allowed to look
8697 at all SSA defs while when it does not it is restricted to
8698 SSA defs that are not associated with a basic block yet,
8699 indicating they belong to the currently building sequence. */
8702 gimple_build (gimple_stmt_iterator
*gsi
,
8703 bool before
, gsi_iterator_update update
,
8704 location_t loc
, enum tree_code code
, tree type
, tree op0
)
8706 gimple_seq seq
= NULL
;
8708 = gimple_simplify (code
, type
, op0
, &seq
,
8709 gsi
->bb
? follow_all_ssa_edges
: gimple_build_valueize
);
8712 res
= create_tmp_reg_or_ssa_name (type
);
8714 if (code
== REALPART_EXPR
8715 || code
== IMAGPART_EXPR
8716 || code
== VIEW_CONVERT_EXPR
)
8717 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8719 stmt
= gimple_build_assign (res
, code
, op0
);
8720 gimple_set_location (stmt
, loc
);
8721 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8723 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8727 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8728 simplifying it first if possible. Returns the built
8729 expression value inserting any new statements at GSI honoring BEFORE
8733 gimple_build (gimple_stmt_iterator
*gsi
,
8734 bool before
, gsi_iterator_update update
,
8735 location_t loc
, enum tree_code code
, tree type
,
8738 gimple_seq seq
= NULL
;
8740 = gimple_simplify (code
, type
, op0
, op1
, &seq
,
8741 gsi
->bb
? follow_all_ssa_edges
: gimple_build_valueize
);
8744 res
= create_tmp_reg_or_ssa_name (type
);
8745 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8746 gimple_set_location (stmt
, loc
);
8747 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8749 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8753 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8754 simplifying it first if possible. Returns the built
8755 expression value inserting any new statements at GSI honoring BEFORE
8759 gimple_build (gimple_stmt_iterator
*gsi
,
8760 bool before
, gsi_iterator_update update
,
8761 location_t loc
, enum tree_code code
, tree type
,
8762 tree op0
, tree op1
, tree op2
)
8765 gimple_seq seq
= NULL
;
8767 = gimple_simplify (code
, type
, op0
, op1
, op2
, &seq
,
8768 gsi
->bb
? follow_all_ssa_edges
: gimple_build_valueize
);
8771 res
= create_tmp_reg_or_ssa_name (type
);
8773 if (code
== BIT_FIELD_REF
)
8774 stmt
= gimple_build_assign (res
, code
,
8775 build3 (code
, type
, op0
, op1
, op2
));
8777 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8778 gimple_set_location (stmt
, loc
);
8779 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8781 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8785 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8786 void) with a location LOC. Returns the built expression value (or NULL_TREE
8787 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8791 gimple_build (gimple_stmt_iterator
*gsi
,
8792 bool before
, gsi_iterator_update update
,
8793 location_t loc
, combined_fn fn
, tree type
)
8795 tree res
= NULL_TREE
;
8796 gimple_seq seq
= NULL
;
8798 if (internal_fn_p (fn
))
8799 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8802 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8803 stmt
= gimple_build_call (decl
, 0);
8805 if (!VOID_TYPE_P (type
))
8807 res
= create_tmp_reg_or_ssa_name (type
);
8808 gimple_call_set_lhs (stmt
, res
);
8810 gimple_set_location (stmt
, loc
);
8811 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8812 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8816 /* Build the call FN (ARG0) with a result of type TYPE
8817 (or no result if TYPE is void) with location LOC,
8818 simplifying it first if possible. Returns the built
8819 expression value (or NULL_TREE if TYPE is void) inserting any new
8820 statements at GSI honoring BEFORE and UPDATE. */
8823 gimple_build (gimple_stmt_iterator
*gsi
,
8824 bool before
, gsi_iterator_update update
,
8825 location_t loc
, combined_fn fn
,
8826 tree type
, tree arg0
)
8828 gimple_seq seq
= NULL
;
8829 tree res
= gimple_simplify (fn
, type
, arg0
, &seq
, gimple_build_valueize
);
8833 if (internal_fn_p (fn
))
8834 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8837 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8838 stmt
= gimple_build_call (decl
, 1, arg0
);
8840 if (!VOID_TYPE_P (type
))
8842 res
= create_tmp_reg_or_ssa_name (type
);
8843 gimple_call_set_lhs (stmt
, res
);
8845 gimple_set_location (stmt
, loc
);
8846 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8848 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8852 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8853 (or no result if TYPE is void) with location LOC,
8854 simplifying it first if possible. Returns the built
8855 expression value (or NULL_TREE if TYPE is void) inserting any new
8856 statements at GSI honoring BEFORE and UPDATE. */
8859 gimple_build (gimple_stmt_iterator
*gsi
,
8860 bool before
, gsi_iterator_update update
,
8861 location_t loc
, combined_fn fn
,
8862 tree type
, tree arg0
, tree arg1
)
8864 gimple_seq seq
= NULL
;
8865 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, &seq
,
8866 gimple_build_valueize
);
8870 if (internal_fn_p (fn
))
8871 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8874 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8875 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8877 if (!VOID_TYPE_P (type
))
8879 res
= create_tmp_reg_or_ssa_name (type
);
8880 gimple_call_set_lhs (stmt
, res
);
8882 gimple_set_location (stmt
, loc
);
8883 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8885 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8889 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8890 (or no result if TYPE is void) with location LOC,
8891 simplifying it first if possible. Returns the built
8892 expression value (or NULL_TREE if TYPE is void) inserting any new
8893 statements at GSI honoring BEFORE and UPDATE. */
8896 gimple_build (gimple_stmt_iterator
*gsi
,
8897 bool before
, gsi_iterator_update update
,
8898 location_t loc
, combined_fn fn
,
8899 tree type
, tree arg0
, tree arg1
, tree arg2
)
8901 gimple_seq seq
= NULL
;
8902 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8903 &seq
, gimple_build_valueize
);
8907 if (internal_fn_p (fn
))
8908 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8909 3, arg0
, arg1
, arg2
);
8912 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8913 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8915 if (!VOID_TYPE_P (type
))
8917 res
= create_tmp_reg_or_ssa_name (type
);
8918 gimple_call_set_lhs (stmt
, res
);
8920 gimple_set_location (stmt
, loc
);
8921 gimple_seq_add_stmt_without_update (&seq
, stmt
);
8923 gimple_build_insert_seq (gsi
, before
, update
, seq
);
8927 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8928 void) with location LOC, simplifying it first if possible. Returns the
8929 built expression value (or NULL_TREE if TYPE is void) inserting any new
8930 statements at GSI honoring BEFORE and UPDATE. */
8933 gimple_build (gimple_stmt_iterator
*gsi
,
8934 bool before
, gsi_iterator_update update
,
8935 location_t loc
, code_helper code
, tree type
, tree op0
)
8937 if (code
.is_tree_code ())
8938 return gimple_build (gsi
, before
, update
, loc
, tree_code (code
), type
, op0
);
8939 return gimple_build (gsi
, before
, update
, loc
, combined_fn (code
), type
, op0
);
8942 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8943 void) with location LOC, simplifying it first if possible. Returns the
8944 built expression value (or NULL_TREE if TYPE is void) inserting any new
8945 statements at GSI honoring BEFORE and UPDATE. */
8948 gimple_build (gimple_stmt_iterator
*gsi
,
8949 bool before
, gsi_iterator_update update
,
8950 location_t loc
, code_helper code
, tree type
, tree op0
, tree op1
)
8952 if (code
.is_tree_code ())
8953 return gimple_build (gsi
, before
, update
,
8954 loc
, tree_code (code
), type
, op0
, op1
);
8955 return gimple_build (gsi
, before
, update
,
8956 loc
, combined_fn (code
), type
, op0
, op1
);
8959 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8960 is void) with location LOC, simplifying it first if possible. Returns the
8961 built expression value (or NULL_TREE if TYPE is void) inserting any new
8962 statements at GSI honoring BEFORE and UPDATE. */
8965 gimple_build (gimple_stmt_iterator
*gsi
,
8966 bool before
, gsi_iterator_update update
,
8967 location_t loc
, code_helper code
,
8968 tree type
, tree op0
, tree op1
, tree op2
)
8970 if (code
.is_tree_code ())
8971 return gimple_build (gsi
, before
, update
,
8972 loc
, tree_code (code
), type
, op0
, op1
, op2
);
8973 return gimple_build (gsi
, before
, update
,
8974 loc
, combined_fn (code
), type
, op0
, op1
, op2
);
8977 /* Build the conversion (TYPE) OP with a result of type TYPE
8978 with location LOC if such conversion is neccesary in GIMPLE,
8979 simplifying it first.
8980 Returns the built expression inserting any new statements
8981 at GSI honoring BEFORE and UPDATE. */
8984 gimple_convert (gimple_stmt_iterator
*gsi
,
8985 bool before
, gsi_iterator_update update
,
8986 location_t loc
, tree type
, tree op
)
8988 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8990 return gimple_build (gsi
, before
, update
, loc
, NOP_EXPR
, type
, op
);
8993 /* Build the conversion (ptrofftype) OP with a result of a type
8994 compatible with ptrofftype with location LOC if such conversion
8995 is neccesary in GIMPLE, simplifying it first.
8996 Returns the built expression value inserting any new statements
8997 at GSI honoring BEFORE and UPDATE. */
9000 gimple_convert_to_ptrofftype (gimple_stmt_iterator
*gsi
,
9001 bool before
, gsi_iterator_update update
,
9002 location_t loc
, tree op
)
9004 if (ptrofftype_p (TREE_TYPE (op
)))
9006 return gimple_convert (gsi
, before
, update
, loc
, sizetype
, op
);
9009 /* Build a vector of type TYPE in which each element has the value OP.
9010 Return a gimple value for the result, inserting any new statements
9011 at GSI honoring BEFORE and UPDATE. */
9014 gimple_build_vector_from_val (gimple_stmt_iterator
*gsi
,
9015 bool before
, gsi_iterator_update update
,
9016 location_t loc
, tree type
, tree op
)
9018 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
9019 && !CONSTANT_CLASS_P (op
))
9020 return gimple_build (gsi
, before
, update
,
9021 loc
, VEC_DUPLICATE_EXPR
, type
, op
);
9023 tree res
, vec
= build_vector_from_val (type
, op
);
9024 if (is_gimple_val (vec
))
9026 if (gimple_in_ssa_p (cfun
))
9027 res
= make_ssa_name (type
);
9029 res
= create_tmp_reg (type
);
9030 gimple_seq seq
= NULL
;
9031 gimple
*stmt
= gimple_build_assign (res
, vec
);
9032 gimple_set_location (stmt
, loc
);
9033 gimple_seq_add_stmt_without_update (&seq
, stmt
);
9034 gimple_build_insert_seq (gsi
, before
, update
, seq
);
9038 /* Build a vector from BUILDER, handling the case in which some elements
9039 are non-constant. Return a gimple value for the result, inserting
9040 any new instructions to GSI honoring BEFORE and UPDATE.
9042 BUILDER must not have a stepped encoding on entry. This is because
9043 the function is not geared up to handle the arithmetic that would
9044 be needed in the variable case, and any code building a vector that
9045 is known to be constant should use BUILDER->build () directly. */
9048 gimple_build_vector (gimple_stmt_iterator
*gsi
,
9049 bool before
, gsi_iterator_update update
,
9050 location_t loc
, tree_vector_builder
*builder
)
9052 gcc_assert (builder
->nelts_per_pattern () <= 2);
9053 unsigned int encoded_nelts
= builder
->encoded_nelts ();
9054 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
9055 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
9057 gimple_seq seq
= NULL
;
9058 tree type
= builder
->type ();
9059 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
9060 vec
<constructor_elt
, va_gc
> *v
;
9061 vec_alloc (v
, nelts
);
9062 for (i
= 0; i
< nelts
; ++i
)
9063 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
9066 if (gimple_in_ssa_p (cfun
))
9067 res
= make_ssa_name (type
);
9069 res
= create_tmp_reg (type
);
9070 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
9071 gimple_set_location (stmt
, loc
);
9072 gimple_seq_add_stmt_without_update (&seq
, stmt
);
9073 gimple_build_insert_seq (gsi
, before
, update
, seq
);
9076 return builder
->build ();
9079 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9080 and generate a value guaranteed to be rounded upwards to ALIGN.
9082 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9085 gimple_build_round_up (gimple_stmt_iterator
*gsi
,
9086 bool before
, gsi_iterator_update update
,
9087 location_t loc
, tree type
,
9088 tree old_size
, unsigned HOST_WIDE_INT align
)
9090 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
9091 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9092 gcc_assert (INTEGRAL_TYPE_P (type
));
9093 tree tree_mask
= build_int_cst (type
, tg_mask
);
9094 tree oversize
= gimple_build (gsi
, before
, update
,
9095 loc
, PLUS_EXPR
, type
, old_size
, tree_mask
);
9097 tree mask
= build_int_cst (type
, -align
);
9098 return gimple_build (gsi
, before
, update
,
9099 loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
9102 /* Return true if the result of assignment STMT is known to be non-negative.
9103 If the return value is based on the assumption that signed overflow is
9104 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9105 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9108 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9111 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9112 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
9113 switch (get_gimple_rhs_class (code
))
9115 case GIMPLE_UNARY_RHS
:
9116 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
9118 gimple_assign_rhs1 (stmt
),
9119 strict_overflow_p
, depth
);
9120 case GIMPLE_BINARY_RHS
:
9121 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
9123 gimple_assign_rhs1 (stmt
),
9124 gimple_assign_rhs2 (stmt
),
9125 strict_overflow_p
, depth
);
9126 case GIMPLE_TERNARY_RHS
:
9128 case GIMPLE_SINGLE_RHS
:
9129 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
9130 strict_overflow_p
, depth
);
9131 case GIMPLE_INVALID_RHS
:
9137 /* Return true if return value of call STMT is known to be non-negative.
9138 If the return value is based on the assumption that signed overflow is
9139 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9140 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9143 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9146 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
9147 gimple_call_arg (stmt
, 0) : NULL_TREE
;
9148 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
9149 gimple_call_arg (stmt
, 1) : NULL_TREE
;
9150 tree lhs
= gimple_call_lhs (stmt
);
9152 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
9153 gimple_call_combined_fn (stmt
),
9155 strict_overflow_p
, depth
));
9158 /* Return true if return value of call STMT is known to be non-negative.
9159 If the return value is based on the assumption that signed overflow is
9160 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9161 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9164 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9167 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9169 tree arg
= gimple_phi_arg_def (stmt
, i
);
9170 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
9176 /* Return true if STMT is known to compute a non-negative value.
9177 If the return value is based on the assumption that signed overflow is
9178 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9179 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9182 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9185 switch (gimple_code (stmt
))
9188 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9191 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9194 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9201 /* Return true if the floating-point value computed by assignment STMT
9202 is known to have an integer value. We also allow +Inf, -Inf and NaN
9203 to be considered integer values. Return false for signaling NaN.
9205 DEPTH is the current nesting depth of the query. */
9208 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9210 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9211 switch (get_gimple_rhs_class (code
))
9213 case GIMPLE_UNARY_RHS
:
9214 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9215 gimple_assign_rhs1 (stmt
), depth
);
9216 case GIMPLE_BINARY_RHS
:
9217 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9218 gimple_assign_rhs1 (stmt
),
9219 gimple_assign_rhs2 (stmt
), depth
);
9220 case GIMPLE_TERNARY_RHS
:
9222 case GIMPLE_SINGLE_RHS
:
9223 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9224 case GIMPLE_INVALID_RHS
:
9230 /* Return true if the floating-point value computed by call STMT is known
9231 to have an integer value. We also allow +Inf, -Inf and NaN to be
9232 considered integer values. Return false for signaling NaN.
9234 DEPTH is the current nesting depth of the query. */
9237 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9239 tree arg0
= (gimple_call_num_args (stmt
) > 0
9240 ? gimple_call_arg (stmt
, 0)
9242 tree arg1
= (gimple_call_num_args (stmt
) > 1
9243 ? gimple_call_arg (stmt
, 1)
9245 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9249 /* Return true if the floating-point result of phi STMT is known to have
9250 an integer value. We also allow +Inf, -Inf and NaN to be considered
9251 integer values. Return false for signaling NaN.
9253 DEPTH is the current nesting depth of the query. */
9256 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9258 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9260 tree arg
= gimple_phi_arg_def (stmt
, i
);
9261 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9267 /* Return true if the floating-point value computed by STMT is known
9268 to have an integer value. We also allow +Inf, -Inf and NaN to be
9269 considered integer values. Return false for signaling NaN.
9271 DEPTH is the current nesting depth of the query. */
9274 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9276 switch (gimple_code (stmt
))
9279 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9281 return gimple_call_integer_valued_real_p (stmt
, depth
);
9283 return gimple_phi_integer_valued_real_p (stmt
, depth
);