1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
73 enum strlen_range_kind
{
74 /* Compute the exact constant string length. */
76 /* Compute the maximum constant string length. */
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
84 /* Determine the integer value of the argument (not string length). */
89 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
114 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
117 struct cgraph_node
*node
;
120 if (DECL_ABSTRACT_P (decl
))
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
125 || !VAR_OR_FUNCTION_DECL_P (decl
))
128 /* Static objects can be referred only if they are defined and not optimized
130 if (!TREE_PUBLIC (decl
))
132 if (DECL_EXTERNAL (decl
))
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab
->function_flags_ready
)
138 snode
= symtab_node::get (decl
);
139 if (!snode
|| !snode
->definition
)
141 node
= dyn_cast
<cgraph_node
*> (snode
);
142 return !node
|| !node
->inlined_to
;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
149 || !VAR_P (from_decl
)
150 || (!DECL_EXTERNAL (from_decl
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->definition
)
154 && (vnode
= varpool_node::get (from_decl
)) != NULL
155 && vnode
->in_other_partition
))
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl
)
161 && DECL_EXTERNAL (decl
)
162 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
163 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
181 if (!symtab
->function_flags_ready
)
184 snode
= symtab_node::get (decl
);
186 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
187 && (!snode
->in_other_partition
188 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
190 node
= dyn_cast
<cgraph_node
*> (snode
);
191 return !node
|| !node
->inlined_to
;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
199 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
201 if (gimple_in_ssa_p (cfun
))
202 return make_ssa_name (type
, stmt
);
204 return create_tmp_reg (type
);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
212 canonicalize_constructor_val (tree cval
, tree from_decl
)
214 if (CONSTANT_CLASS_P (cval
))
217 tree orig_cval
= cval
;
219 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
222 tree ptr
= TREE_OPERAND (cval
, 0);
223 if (is_gimple_min_invariant (ptr
))
224 cval
= build1_loc (EXPR_LOCATION (cval
),
225 ADDR_EXPR
, TREE_TYPE (ptr
),
226 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
228 fold_convert (ptr_type_node
,
229 TREE_OPERAND (cval
, 1))));
231 if (TREE_CODE (cval
) == ADDR_EXPR
)
233 tree base
= NULL_TREE
;
234 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
236 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
238 TREE_OPERAND (cval
, 0) = base
;
241 base
= get_base_address (TREE_OPERAND (cval
, 0));
245 if (VAR_OR_FUNCTION_DECL_P (base
)
246 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
248 if (TREE_TYPE (base
) == error_mark_node
)
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base
) == FUNCTION_DECL
)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base
);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
263 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
266 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval
) == INTEGER_CST
)
272 if (TREE_OVERFLOW_P (cval
))
273 cval
= drop_tree_overflow (cval
);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
275 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
285 get_symbol_constant_value (tree sym
)
287 tree val
= ctor_for_folding (sym
);
288 if (val
!= error_mark_node
)
292 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
293 if (val
&& is_gimple_min_invariant (val
))
298 /* Variables declared 'const' without an initializer
299 have zero as the initializer if they may not be
300 overridden at link or run time. */
302 && is_gimple_reg_type (TREE_TYPE (sym
)))
303 return build_zero_cst (TREE_TYPE (sym
));
311 /* Subroutine of fold_stmt. We perform constant folding of the
312 memory reference tree EXPR. */
315 maybe_fold_reference (tree expr
)
317 tree result
= NULL_TREE
;
319 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
320 || TREE_CODE (expr
) == REALPART_EXPR
321 || TREE_CODE (expr
) == IMAGPART_EXPR
)
322 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
323 result
= fold_unary_loc (EXPR_LOCATION (expr
),
326 TREE_OPERAND (expr
, 0));
327 else if (TREE_CODE (expr
) == BIT_FIELD_REF
328 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
329 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
332 TREE_OPERAND (expr
, 0),
333 TREE_OPERAND (expr
, 1),
334 TREE_OPERAND (expr
, 2));
336 result
= fold_const_aggregate_ref (expr
);
338 if (result
&& is_gimple_min_invariant (result
))
344 /* Return true if EXPR is an acceptable right-hand-side for a
345 GIMPLE assignment. We validate the entire tree, not just
346 the root node, thus catching expressions that embed complex
347 operands that are not permitted in GIMPLE. This function
348 is needed because the folding routines in fold-const.c
349 may return such expressions in some cases, e.g., an array
350 access with an embedded index addition. It may make more
351 sense to have folding routines that are sensitive to the
352 constraints on GIMPLE operands, rather than abandoning any
353 any attempt to fold if the usual folding turns out to be too
357 valid_gimple_rhs_p (tree expr
)
359 enum tree_code code
= TREE_CODE (expr
);
361 switch (TREE_CODE_CLASS (code
))
363 case tcc_declaration
:
364 if (!is_gimple_variable (expr
))
369 /* All constants are ok. */
373 /* GENERIC allows comparisons with non-boolean types, reject
374 those for GIMPLE. Let vector-typed comparisons pass - rules
375 for GENERIC and GIMPLE are the same here. */
376 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
377 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
378 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
379 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
384 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
385 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
390 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
400 if (is_gimple_min_invariant (expr
))
402 t
= TREE_OPERAND (expr
, 0);
403 while (handled_component_p (t
))
405 /* ??? More checks needed, see the GIMPLE verifier. */
406 if ((TREE_CODE (t
) == ARRAY_REF
407 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
408 && !is_gimple_val (TREE_OPERAND (t
, 1)))
410 t
= TREE_OPERAND (t
, 0);
412 if (!is_gimple_id (t
))
418 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
420 if ((code
== COND_EXPR
421 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
422 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
423 || !is_gimple_val (TREE_OPERAND (expr
, 1))
424 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
435 case tcc_exceptional
:
436 if (code
== CONSTRUCTOR
)
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
441 if (!is_gimple_val (elt
))
445 if (code
!= SSA_NAME
)
450 if (code
== BIT_FIELD_REF
)
451 return is_gimple_val (TREE_OPERAND (expr
, 0));
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
468 fold_gimple_assign (gimple_stmt_iterator
*si
)
470 gimple
*stmt
= gsi_stmt (*si
);
471 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
472 location_t loc
= gimple_location (stmt
);
474 tree result
= NULL_TREE
;
476 switch (get_gimple_rhs_class (subcode
))
478 case GIMPLE_SINGLE_RHS
:
480 tree rhs
= gimple_assign_rhs1 (stmt
);
482 if (TREE_CLOBBER_P (rhs
))
485 if (REFERENCE_CLASS_P (rhs
))
486 return maybe_fold_reference (rhs
);
488 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
490 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
491 if (is_gimple_min_invariant (val
))
493 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
496 vec
<cgraph_node
*>targets
497 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
498 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets
.length () == 1
506 ? targets
[0]->name ()
509 if (targets
.length () == 1)
511 val
= fold_convert (TREE_TYPE (val
),
512 build_fold_addr_expr_loc
513 (loc
, targets
[0]->decl
));
514 STRIP_USELESS_TYPE_CONVERSION (val
);
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val
= build_int_cst (TREE_TYPE (val
), 0);
525 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
527 tree ref
= TREE_OPERAND (rhs
, 0);
528 if (TREE_CODE (ref
) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref
, 1)))
531 result
= TREE_OPERAND (ref
, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
534 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
539 else if (TREE_CODE (rhs
) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
547 if (! CONSTANT_CLASS_P (val
))
550 return build_vector_from_ctor (TREE_TYPE (rhs
),
551 CONSTRUCTOR_ELTS (rhs
));
554 else if (DECL_P (rhs
)
555 && is_gimple_reg_type (TREE_TYPE (rhs
)))
556 return get_symbol_constant_value (rhs
);
560 case GIMPLE_UNARY_RHS
:
563 case GIMPLE_BINARY_RHS
:
566 case GIMPLE_TERNARY_RHS
:
567 result
= fold_ternary_loc (loc
, subcode
,
568 TREE_TYPE (gimple_assign_lhs (stmt
)),
569 gimple_assign_rhs1 (stmt
),
570 gimple_assign_rhs2 (stmt
),
571 gimple_assign_rhs3 (stmt
));
575 STRIP_USELESS_TYPE_CONVERSION (result
);
576 if (valid_gimple_rhs_p (result
))
581 case GIMPLE_INVALID_RHS
:
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
595 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
597 gimple
*stmt
= gsi_stmt (*si_p
);
599 if (gimple_has_location (stmt
))
600 annotate_all_with_location (stmts
, gimple_location (stmt
));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple
*laststore
= NULL
;
605 for (gimple_stmt_iterator i
= gsi_last (stmts
);
606 !gsi_end_p (i
); gsi_prev (&i
))
608 gimple
*new_stmt
= gsi_stmt (i
);
609 if ((gimple_assign_single_p (new_stmt
)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
611 || (is_gimple_call (new_stmt
)
612 && (gimple_call_flags (new_stmt
)
613 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
617 vdef
= gimple_vdef (stmt
);
619 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
620 gimple_set_vdef (new_stmt
, vdef
);
621 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
622 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
623 laststore
= new_stmt
;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse
= gimple_vuse (stmt
);
630 for (gimple_stmt_iterator i
= gsi_start (stmts
);
631 !gsi_end_p (i
); gsi_next (&i
))
633 gimple
*new_stmt
= gsi_stmt (i
);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt
))
637 gimple_set_vuse (new_stmt
, reaching_vuse
);
638 gimple_set_modified (new_stmt
, true);
639 if (gimple_vdef (new_stmt
))
640 reaching_vuse
= gimple_vdef (new_stmt
);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
646 && reaching_vuse
== gimple_vuse (stmt
))
648 tree vdef
= gimple_vdef (stmt
);
650 && TREE_CODE (vdef
) == SSA_NAME
)
652 unlink_stmt_vdef (stmt
);
653 release_ssa_name (vdef
);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p
, stmts
, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
666 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
669 tree lhs
= gimple_call_lhs (stmt
);
670 gimple_call_set_lhs (new_stmt
, lhs
);
671 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
672 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
673 gimple_move_vops (new_stmt
, stmt
);
674 gimple_set_location (new_stmt
, gimple_location (stmt
));
675 if (gimple_block (new_stmt
) == NULL_TREE
)
676 gimple_set_block (new_stmt
, gimple_block (stmt
));
677 gsi_replace (si_p
, new_stmt
, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
685 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
688 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
690 gcc_assert (is_gimple_call (stmt
));
691 va_start (ap
, nargs
);
692 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
693 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
703 valid_gimple_call_p (tree expr
)
707 if (TREE_CODE (expr
) != CALL_EXPR
)
710 nargs
= call_expr_nargs (expr
);
711 for (i
= 0; i
< nargs
; i
++)
713 tree arg
= CALL_EXPR_ARG (expr
, i
);
714 if (is_gimple_reg_type (TREE_TYPE (arg
)))
716 if (!is_gimple_val (arg
))
720 if (!is_gimple_lvalue (arg
))
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
741 gimple
*stmt
, *new_stmt
;
742 gimple_stmt_iterator i
;
743 gimple_seq stmts
= NULL
;
745 stmt
= gsi_stmt (*si_p
);
747 gcc_assert (is_gimple_call (stmt
));
749 if (valid_gimple_call_p (expr
))
751 /* The call has simplified to another call. */
752 tree fn
= CALL_EXPR_FN (expr
);
754 unsigned nargs
= call_expr_nargs (expr
);
755 vec
<tree
> args
= vNULL
;
761 args
.safe_grow_cleared (nargs
, true);
763 for (i
= 0; i
< nargs
; i
++)
764 args
[i
] = CALL_EXPR_ARG (expr
, i
);
767 new_stmt
= gimple_build_call_vec (fn
, args
);
768 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
773 lhs
= gimple_call_lhs (stmt
);
774 if (lhs
== NULL_TREE
)
776 push_gimplify_context (gimple_in_ssa_p (cfun
));
777 gimplify_and_add (expr
, &stmts
);
778 pop_gimplify_context (NULL
);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts
))
784 if (gimple_in_ssa_p (cfun
))
786 unlink_stmt_vdef (stmt
);
789 gsi_replace (si_p
, gimple_build_nop (), false);
795 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
796 new_stmt
= gimple_build_assign (lhs
, tmp
);
797 i
= gsi_last (stmts
);
798 gsi_insert_after_without_update (&i
, new_stmt
,
799 GSI_CONTINUE_LINKING
);
802 gsi_replace_with_seq_vops (si_p
, stmts
);
806 /* Replace the call at *GSI with the gimple value VAL. */
809 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
811 gimple
*stmt
= gsi_stmt (*gsi
);
812 tree lhs
= gimple_call_lhs (stmt
);
816 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
817 val
= fold_convert (TREE_TYPE (lhs
), val
);
818 repl
= gimple_build_assign (lhs
, val
);
821 repl
= gimple_build_nop ();
822 tree vdef
= gimple_vdef (stmt
);
823 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
825 unlink_stmt_vdef (stmt
);
826 release_ssa_name (vdef
);
828 gsi_replace (gsi
, repl
, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
835 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
837 gimple
*stmt
= gsi_stmt (*gsi
);
838 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
839 gimple_set_location (repl
, gimple_location (stmt
));
840 gimple_move_vops (repl
, stmt
);
841 gsi_replace (gsi
, repl
, false);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
848 var_decl_component_p (tree var
)
851 while (handled_component_p (inner
))
852 inner
= TREE_OPERAND (inner
, 0);
853 return (DECL_P (inner
)
854 || (TREE_CODE (inner
) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
862 size_must_be_zero_p (tree size
)
864 if (integer_zerop (size
))
867 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
870 tree type
= TREE_TYPE (size
);
871 int prec
= TYPE_PRECISION (type
);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
876 value_range
valid_range (build_int_cst (type
, 0),
877 wide_int_to_tree (type
, ssize_max
));
880 get_range_query (cfun
)->range_of_expr (vr
, size
);
882 get_global_range_query ()->range_of_expr (vr
, size
);
883 if (vr
.undefined_p ())
884 vr
.set_varying (TREE_TYPE (size
));
885 vr
.intersect (&valid_range
);
889 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
890 diagnose (otherwise undefined) overlapping copies without preventing
891 folding. When folded, GCC guarantees that overlapping memcpy has
892 the same semantics as memmove. Call to the library memcpy need not
893 provide the same guarantee. Return false if no simplification can
897 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
898 tree dest
, tree src
, enum built_in_function code
)
900 gimple
*stmt
= gsi_stmt (*gsi
);
901 tree lhs
= gimple_call_lhs (stmt
);
902 tree len
= gimple_call_arg (stmt
, 2);
903 location_t loc
= gimple_location (stmt
);
905 /* If the LEN parameter is a constant zero or in range where
906 the only valid value is zero, return DEST. */
907 if (size_must_be_zero_p (len
))
910 if (gimple_call_lhs (stmt
))
911 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
913 repl
= gimple_build_nop ();
914 tree vdef
= gimple_vdef (stmt
);
915 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
917 unlink_stmt_vdef (stmt
);
918 release_ssa_name (vdef
);
920 gsi_replace (gsi
, repl
, false);
924 /* If SRC and DEST are the same (and not volatile), return
925 DEST{,+LEN,+LEN-1}. */
926 if (operand_equal_p (src
, dest
, 0))
928 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
929 It's safe and may even be emitted by GCC itself (see bug
931 unlink_stmt_vdef (stmt
);
932 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
933 release_ssa_name (gimple_vdef (stmt
));
936 gsi_replace (gsi
, gimple_build_nop (), false);
943 /* We cannot (easily) change the type of the copy if it is a storage
944 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
945 modify the storage order of objects (see storage_order_barrier_p). */
947 = POINTER_TYPE_P (TREE_TYPE (src
))
948 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
950 = POINTER_TYPE_P (TREE_TYPE (dest
))
951 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
952 tree destvar
, srcvar
, srcoff
;
953 unsigned int src_align
, dest_align
;
954 unsigned HOST_WIDE_INT tmp_len
;
957 /* Build accesses at offset zero with a ref-all character type. */
959 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
962 /* If we can perform the copy efficiently with first doing all loads and
963 then all stores inline it that way. Currently efficiently means that
964 we can load all the memory with a single set operation and that the
965 total size is less than MOVE_MAX * MOVE_RATIO. */
966 src_align
= get_pointer_alignment (src
);
967 dest_align
= get_pointer_alignment (dest
);
968 if (tree_fits_uhwi_p (len
)
971 * MOVE_RATIO (optimize_function_for_size_p (cfun
))))
973 /* FIXME: Don't transform copies from strings with known length.
974 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
975 from being handled, and the case was XFAILed for that reason.
976 Now that it is handled and the XFAIL removed, as soon as other
977 strlenopt tests that rely on it for passing are adjusted, this
978 hack can be removed. */
979 && !c_strlen (src
, 1)
980 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
981 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
983 && AGGREGATE_TYPE_P (srctype
)
984 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
986 && AGGREGATE_TYPE_P (desttype
)
987 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
989 unsigned ilen
= tree_to_uhwi (len
);
990 if (pow2p_hwi (ilen
))
992 /* Detect out-of-bounds accesses without issuing warnings.
993 Avoid folding out-of-bounds copies but to avoid false
994 positives for unreachable code defer warning until after
995 DCE has worked its magic.
996 -Wrestrict is still diagnosed. */
997 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1000 if (warning
!= OPT_Wrestrict
)
1003 scalar_int_mode mode
;
1004 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1005 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1006 && have_insn_for (SET
, mode
)
1007 /* If the destination pointer is not aligned we must be able
1008 to emit an unaligned store. */
1009 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1010 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1011 || (optab_handler (movmisalign_optab
, mode
)
1012 != CODE_FOR_nothing
)))
1014 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1015 tree srctype
= type
;
1016 tree desttype
= type
;
1017 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1018 srctype
= build_aligned_type (type
, src_align
);
1019 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1020 tree tem
= fold_const_aggregate_ref (srcmem
);
1023 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1024 && targetm
.slow_unaligned_access (mode
, src_align
)
1025 && (optab_handler (movmisalign_optab
, mode
)
1026 == CODE_FOR_nothing
))
1031 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1033 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1035 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1037 gimple_assign_set_lhs (new_stmt
, srcmem
);
1038 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1039 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1041 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1042 desttype
= build_aligned_type (type
, dest_align
);
1044 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1047 gimple_move_vops (new_stmt
, stmt
);
1050 gsi_replace (gsi
, new_stmt
, false);
1053 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1060 if (code
== BUILT_IN_MEMMOVE
)
1062 /* Both DEST and SRC must be pointer types.
1063 ??? This is what old code did. Is the testing for pointer types
1066 If either SRC is readonly or length is 1, we can use memcpy. */
1067 if (!dest_align
|| !src_align
)
1069 if (readonly_data_expr (src
)
1070 || (tree_fits_uhwi_p (len
)
1071 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1072 >= tree_to_uhwi (len
))))
1074 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1077 gimple_call_set_fndecl (stmt
, fn
);
1078 gimple_call_set_arg (stmt
, 0, dest
);
1079 gimple_call_set_arg (stmt
, 1, src
);
1084 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1085 if (TREE_CODE (src
) == ADDR_EXPR
1086 && TREE_CODE (dest
) == ADDR_EXPR
)
1088 tree src_base
, dest_base
, fn
;
1089 poly_int64 src_offset
= 0, dest_offset
= 0;
1090 poly_uint64 maxsize
;
1092 srcvar
= TREE_OPERAND (src
, 0);
1093 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1094 if (src_base
== NULL
)
1096 destvar
= TREE_OPERAND (dest
, 0);
1097 dest_base
= get_addr_base_and_unit_offset (destvar
,
1099 if (dest_base
== NULL
)
1100 dest_base
= destvar
;
1101 if (!poly_int_tree_p (len
, &maxsize
))
1103 if (SSA_VAR_P (src_base
)
1104 && SSA_VAR_P (dest_base
))
1106 if (operand_equal_p (src_base
, dest_base
, 0)
1107 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1108 dest_offset
, maxsize
))
1111 else if (TREE_CODE (src_base
) == MEM_REF
1112 && TREE_CODE (dest_base
) == MEM_REF
)
1114 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1115 TREE_OPERAND (dest_base
, 0), 0))
1117 poly_offset_int full_src_offset
1118 = mem_ref_offset (src_base
) + src_offset
;
1119 poly_offset_int full_dest_offset
1120 = mem_ref_offset (dest_base
) + dest_offset
;
1121 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1122 full_dest_offset
, maxsize
))
1128 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1131 gimple_call_set_fndecl (stmt
, fn
);
1132 gimple_call_set_arg (stmt
, 0, dest
);
1133 gimple_call_set_arg (stmt
, 1, src
);
1138 /* If the destination and source do not alias optimize into
1140 if ((is_gimple_min_invariant (dest
)
1141 || TREE_CODE (dest
) == SSA_NAME
)
1142 && (is_gimple_min_invariant (src
)
1143 || TREE_CODE (src
) == SSA_NAME
))
1146 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1147 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1148 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1151 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1154 gimple_call_set_fndecl (stmt
, fn
);
1155 gimple_call_set_arg (stmt
, 0, dest
);
1156 gimple_call_set_arg (stmt
, 1, src
);
1165 if (!tree_fits_shwi_p (len
))
1168 || (AGGREGATE_TYPE_P (srctype
)
1169 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1172 || (AGGREGATE_TYPE_P (desttype
)
1173 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1175 /* In the following try to find a type that is most natural to be
1176 used for the memcpy source and destination and that allows
1177 the most optimization when memcpy is turned into a plain assignment
1178 using that type. In theory we could always use a char[len] type
1179 but that only gains us that the destination and source possibly
1180 no longer will have their address taken. */
1181 if (TREE_CODE (srctype
) == ARRAY_TYPE
1182 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1183 srctype
= TREE_TYPE (srctype
);
1184 if (TREE_CODE (desttype
) == ARRAY_TYPE
1185 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1186 desttype
= TREE_TYPE (desttype
);
1187 if (TREE_ADDRESSABLE (srctype
)
1188 || TREE_ADDRESSABLE (desttype
))
1191 /* Make sure we are not copying using a floating-point mode or
1192 a type whose size possibly does not match its precision. */
1193 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1194 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1195 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1196 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1197 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1198 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1199 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1200 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1208 src_align
= get_pointer_alignment (src
);
1209 dest_align
= get_pointer_alignment (dest
);
1211 /* Choose between src and destination type for the access based
1212 on alignment, whether the access constitutes a register access
1213 and whether it may actually expose a declaration for SSA rewrite
1214 or SRA decomposition. Also try to expose a string constant, we
1215 might be able to concatenate several of them later into a single
1217 destvar
= NULL_TREE
;
1219 if (TREE_CODE (dest
) == ADDR_EXPR
1220 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1221 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1222 && dest_align
>= TYPE_ALIGN (desttype
)
1223 && (is_gimple_reg_type (desttype
)
1224 || src_align
>= TYPE_ALIGN (desttype
)))
1225 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1226 else if (TREE_CODE (src
) == ADDR_EXPR
1227 && var_decl_component_p (TREE_OPERAND (src
, 0))
1228 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1229 && src_align
>= TYPE_ALIGN (srctype
)
1230 && (is_gimple_reg_type (srctype
)
1231 || dest_align
>= TYPE_ALIGN (srctype
)))
1232 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1233 /* FIXME: Don't transform copies from strings with known original length.
1234 As soon as strlenopt tests that rely on it for passing are adjusted,
1235 this hack can be removed. */
1236 else if (gimple_call_alloca_for_var_p (stmt
)
1237 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1238 && integer_zerop (srcoff
)
1239 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1240 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1241 srctype
= TREE_TYPE (srcvar
);
1245 /* Now that we chose an access type express the other side in
1246 terms of it if the target allows that with respect to alignment
1248 if (srcvar
== NULL_TREE
)
1250 if (src_align
>= TYPE_ALIGN (desttype
))
1251 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1254 if (STRICT_ALIGNMENT
)
1256 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1258 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1261 else if (destvar
== NULL_TREE
)
1263 if (dest_align
>= TYPE_ALIGN (srctype
))
1264 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1267 if (STRICT_ALIGNMENT
)
1269 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1271 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1275 /* Same as above, detect out-of-bounds accesses without issuing
1276 warnings. Avoid folding out-of-bounds copies but to avoid
1277 false positives for unreachable code defer warning until
1278 after DCE has worked its magic.
1279 -Wrestrict is still diagnosed. */
1280 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1281 dest
, src
, len
, len
,
1283 if (warning
!= OPT_Wrestrict
)
1287 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1289 tree tem
= fold_const_aggregate_ref (srcvar
);
1292 if (! is_gimple_min_invariant (srcvar
))
1294 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1295 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1297 gimple_assign_set_lhs (new_stmt
, srcvar
);
1298 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1299 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1301 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1302 goto set_vop_and_replace
;
1305 /* We get an aggregate copy. If the source is a STRING_CST, then
1306 directly use its type to perform the copy. */
1307 if (TREE_CODE (srcvar
) == STRING_CST
)
1310 /* Or else, use an unsigned char[] type to perform the copy in order
1311 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1312 types or float modes behavior on copying. */
1315 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1316 tree_to_uhwi (len
));
1318 if (src_align
> TYPE_ALIGN (srctype
))
1319 srctype
= build_aligned_type (srctype
, src_align
);
1320 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1323 if (dest_align
> TYPE_ALIGN (desttype
))
1324 desttype
= build_aligned_type (desttype
, dest_align
);
1325 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1326 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1328 set_vop_and_replace
:
1329 gimple_move_vops (new_stmt
, stmt
);
1332 gsi_replace (gsi
, new_stmt
, false);
1335 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1339 gimple_seq stmts
= NULL
;
1340 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1342 else if (code
== BUILT_IN_MEMPCPY
)
1344 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1345 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1346 TREE_TYPE (dest
), dest
, len
);
1351 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1352 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1353 gsi_replace (gsi
, repl
, false);
1357 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1358 to built-in memcmp (a, b, len). */
1361 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1363 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1368 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1370 gimple
*stmt
= gsi_stmt (*gsi
);
1371 tree a
= gimple_call_arg (stmt
, 0);
1372 tree b
= gimple_call_arg (stmt
, 1);
1373 tree len
= gimple_call_arg (stmt
, 2);
1375 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1376 replace_call_with_call_and_fold (gsi
, repl
);
1381 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1382 to built-in memmove (dest, src, len). */
1385 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1387 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1392 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1393 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1394 len) into memmove (dest, src, len). */
1396 gimple
*stmt
= gsi_stmt (*gsi
);
1397 tree src
= gimple_call_arg (stmt
, 0);
1398 tree dest
= gimple_call_arg (stmt
, 1);
1399 tree len
= gimple_call_arg (stmt
, 2);
1401 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1402 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1403 replace_call_with_call_and_fold (gsi
, repl
);
1408 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1409 to built-in memset (dest, 0, len). */
1412 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1414 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1419 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1421 gimple
*stmt
= gsi_stmt (*gsi
);
1422 tree dest
= gimple_call_arg (stmt
, 0);
1423 tree len
= gimple_call_arg (stmt
, 1);
1425 gimple_seq seq
= NULL
;
1426 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1427 gimple_seq_add_stmt_without_update (&seq
, repl
);
1428 gsi_replace_with_seq_vops (gsi
, seq
);
1434 /* Fold function call to builtin memset or bzero at *GSI setting the
1435 memory of size LEN to VAL. Return whether a simplification was made. */
1438 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1440 gimple
*stmt
= gsi_stmt (*gsi
);
1442 unsigned HOST_WIDE_INT length
, cval
;
1444 /* If the LEN parameter is zero, return DEST. */
1445 if (integer_zerop (len
))
1447 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1451 if (! tree_fits_uhwi_p (len
))
1454 if (TREE_CODE (c
) != INTEGER_CST
)
1457 tree dest
= gimple_call_arg (stmt
, 0);
1459 if (TREE_CODE (var
) != ADDR_EXPR
)
1462 var
= TREE_OPERAND (var
, 0);
1463 if (TREE_THIS_VOLATILE (var
))
1466 etype
= TREE_TYPE (var
);
1467 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1468 etype
= TREE_TYPE (etype
);
1470 if (!INTEGRAL_TYPE_P (etype
)
1471 && !POINTER_TYPE_P (etype
))
1474 if (! var_decl_component_p (var
))
1477 length
= tree_to_uhwi (len
);
1478 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1479 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1480 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1481 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1484 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1487 if (!type_has_mode_precision_p (etype
))
1488 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1489 TYPE_UNSIGNED (etype
));
1491 if (integer_zerop (c
))
1495 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1498 cval
= TREE_INT_CST_LOW (c
);
1502 cval
|= (cval
<< 31) << 1;
1505 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1506 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1507 gimple_move_vops (store
, stmt
);
1508 gimple_set_location (store
, gimple_location (stmt
));
1509 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1510 if (gimple_call_lhs (stmt
))
1512 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1513 gsi_replace (gsi
, asgn
, false);
1517 gimple_stmt_iterator gsi2
= *gsi
;
1519 gsi_remove (&gsi2
, true);
1525 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1528 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1529 c_strlen_data
*pdata
, unsigned eltsize
)
1531 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1533 /* The length computed by this invocation of the function. */
1534 tree val
= NULL_TREE
;
1536 /* True if VAL is an optimistic (tight) bound determined from
1537 the size of the character array in which the string may be
1538 stored. In that case, the computed VAL is used to set
1540 bool tight_bound
= false;
1542 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1543 if (TREE_CODE (arg
) == ADDR_EXPR
1544 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1546 tree op
= TREE_OPERAND (arg
, 0);
1547 if (integer_zerop (TREE_OPERAND (op
, 1)))
1549 tree aop0
= TREE_OPERAND (op
, 0);
1550 if (TREE_CODE (aop0
) == INDIRECT_REF
1551 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1552 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1555 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1556 && rkind
== SRK_LENRANGE
)
1558 /* Fail if an array is the last member of a struct object
1559 since it could be treated as a (fake) flexible array
1561 tree idx
= TREE_OPERAND (op
, 1);
1563 arg
= TREE_OPERAND (op
, 0);
1564 tree optype
= TREE_TYPE (arg
);
1565 if (tree dom
= TYPE_DOMAIN (optype
))
1566 if (tree bound
= TYPE_MAX_VALUE (dom
))
1567 if (TREE_CODE (bound
) == INTEGER_CST
1568 && TREE_CODE (idx
) == INTEGER_CST
1569 && tree_int_cst_lt (bound
, idx
))
1574 if (rkind
== SRK_INT_VALUE
)
1576 /* We are computing the maximum value (not string length). */
1578 if (TREE_CODE (val
) != INTEGER_CST
1579 || tree_int_cst_sgn (val
) < 0)
1584 c_strlen_data lendata
= { };
1585 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1587 if (!val
&& lendata
.decl
)
1589 /* ARG refers to an unterminated const character array.
1590 DATA.DECL with size DATA.LEN. */
1591 val
= lendata
.minlen
;
1592 pdata
->decl
= lendata
.decl
;
1596 /* Set if VAL represents the maximum length based on array size (set
1597 when exact length cannot be determined). */
1598 bool maxbound
= false;
1600 if (!val
&& rkind
== SRK_LENRANGE
)
1602 if (TREE_CODE (arg
) == ADDR_EXPR
)
1603 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1606 if (TREE_CODE (arg
) == ARRAY_REF
)
1608 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1610 /* Determine the "innermost" array type. */
1611 while (TREE_CODE (optype
) == ARRAY_TYPE
1612 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1613 optype
= TREE_TYPE (optype
);
1615 /* Avoid arrays of pointers. */
1616 tree eltype
= TREE_TYPE (optype
);
1617 if (TREE_CODE (optype
) != ARRAY_TYPE
1618 || !INTEGRAL_TYPE_P (eltype
))
1621 /* Fail when the array bound is unknown or zero. */
1622 val
= TYPE_SIZE_UNIT (optype
);
1624 || TREE_CODE (val
) != INTEGER_CST
1625 || integer_zerop (val
))
1628 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1631 /* Set the minimum size to zero since the string in
1632 the array could have zero length. */
1633 pdata
->minlen
= ssize_int (0);
1637 else if (TREE_CODE (arg
) == COMPONENT_REF
1638 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1641 /* Use the type of the member array to determine the upper
1642 bound on the length of the array. This may be overly
1643 optimistic if the array itself isn't NUL-terminated and
1644 the caller relies on the subsequent member to contain
1645 the NUL but that would only be considered valid if
1646 the array were the last member of a struct. */
1648 tree fld
= TREE_OPERAND (arg
, 1);
1650 tree optype
= TREE_TYPE (fld
);
1652 /* Determine the "innermost" array type. */
1653 while (TREE_CODE (optype
) == ARRAY_TYPE
1654 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1655 optype
= TREE_TYPE (optype
);
1657 /* Fail when the array bound is unknown or zero. */
1658 val
= TYPE_SIZE_UNIT (optype
);
1660 || TREE_CODE (val
) != INTEGER_CST
1661 || integer_zerop (val
))
1663 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1666 /* Set the minimum size to zero since the string in
1667 the array could have zero length. */
1668 pdata
->minlen
= ssize_int (0);
1670 /* The array size determined above is an optimistic bound
1671 on the length. If the array isn't nul-terminated the
1672 length computed by the library function would be greater.
1673 Even though using strlen to cross the subobject boundary
1674 is undefined, avoid drawing conclusions from the member
1675 type about the length here. */
1678 else if (TREE_CODE (arg
) == MEM_REF
1679 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1680 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1681 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1683 /* Handle a MEM_REF into a DECL accessing an array of integers,
1684 being conservative about references to extern structures with
1685 flexible array members that can be initialized to arbitrary
1686 numbers of elements as an extension (static structs are okay).
1687 FIXME: Make this less conservative -- see
1688 component_ref_size in tree.c. */
1689 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1690 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1691 && (decl_binds_to_current_def_p (ref
)
1692 || !array_at_struct_end_p (arg
)))
1694 /* Fail if the offset is out of bounds. Such accesses
1695 should be diagnosed at some point. */
1696 val
= DECL_SIZE_UNIT (ref
);
1698 || TREE_CODE (val
) != INTEGER_CST
1699 || integer_zerop (val
))
1702 poly_offset_int psiz
= wi::to_offset (val
);
1703 poly_offset_int poff
= mem_ref_offset (arg
);
1704 if (known_le (psiz
, poff
))
1707 pdata
->minlen
= ssize_int (0);
1709 /* Subtract the offset and one for the terminating nul. */
1712 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1713 /* Since VAL reflects the size of a declared object
1714 rather the type of the access it is not a tight bound. */
1717 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1719 /* Avoid handling pointers to arrays. GCC might misuse
1720 a pointer to an array of one bound to point to an array
1721 object of a greater bound. */
1722 tree argtype
= TREE_TYPE (arg
);
1723 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1725 val
= TYPE_SIZE_UNIT (argtype
);
1727 || TREE_CODE (val
) != INTEGER_CST
1728 || integer_zerop (val
))
1730 val
= wide_int_to_tree (TREE_TYPE (val
),
1731 wi::sub (wi::to_wide (val
), 1));
1733 /* Set the minimum size to zero since the string in
1734 the array could have zero length. */
1735 pdata
->minlen
= ssize_int (0);
1744 /* Adjust the lower bound on the string length as necessary. */
1746 || (rkind
!= SRK_STRLEN
1747 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1748 && TREE_CODE (val
) == INTEGER_CST
1749 && tree_int_cst_lt (val
, pdata
->minlen
)))
1750 pdata
->minlen
= val
;
1752 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1754 /* Adjust the tighter (more optimistic) string length bound
1755 if necessary and proceed to adjust the more conservative
1757 if (TREE_CODE (val
) == INTEGER_CST
)
1759 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1760 pdata
->maxbound
= val
;
1763 pdata
->maxbound
= val
;
1765 else if (pdata
->maxbound
|| maxbound
)
1766 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1767 if VAL corresponds to the maximum length determined based
1768 on the type of the object. */
1769 pdata
->maxbound
= val
;
1773 /* VAL computed above represents an optimistically tight bound
1774 on the length of the string based on the referenced object's
1775 or subobject's type. Determine the conservative upper bound
1776 based on the enclosing object's size if possible. */
1777 if (rkind
== SRK_LENRANGE
)
1780 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1783 /* When the call above fails due to a non-constant offset
1784 assume the offset is zero and use the size of the whole
1785 enclosing object instead. */
1786 base
= get_base_address (arg
);
1789 /* If the base object is a pointer no upper bound on the length
1790 can be determined. Otherwise the maximum length is equal to
1791 the size of the enclosing object minus the offset of
1792 the referenced subobject minus 1 (for the terminating nul). */
1793 tree type
= TREE_TYPE (base
);
1794 if (TREE_CODE (type
) == POINTER_TYPE
1795 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1796 || !(val
= DECL_SIZE_UNIT (base
)))
1797 val
= build_all_ones_cst (size_type_node
);
1800 val
= DECL_SIZE_UNIT (base
);
1801 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1802 size_int (offset
+ 1));
1811 /* Adjust the more conservative bound if possible/necessary
1812 and fail otherwise. */
1813 if (rkind
!= SRK_STRLEN
)
1815 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1816 || TREE_CODE (val
) != INTEGER_CST
)
1819 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1820 pdata
->maxlen
= val
;
1823 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1825 /* Fail if the length of this ARG is different from that
1826 previously determined from another ARG. */
1831 pdata
->maxlen
= val
;
1832 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1835 /* For an ARG referencing one or more strings, try to obtain the range
1836 of their lengths, or the size of the largest array ARG referes to if
1837 the range of lengths cannot be determined, and store all in *PDATA.
1838 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1839 the maximum constant value.
1840 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1841 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1842 length or if we are unable to determine the length, return false.
1843 VISITED is a bitmap of visited variables.
1844 RKIND determines the kind of value or range to obtain (see
1846 Set PDATA->DECL if ARG refers to an unterminated constant array.
1847 On input, set ELTSIZE to 1 for normal single byte character strings,
1848 and either 2 or 4 for wide characer strings (the size of wchar_t).
1849 Return true if *PDATA was successfully populated and false otherwise. */
1852 get_range_strlen (tree arg
, bitmap
*visited
,
1853 strlen_range_kind rkind
,
1854 c_strlen_data
*pdata
, unsigned eltsize
)
1857 if (TREE_CODE (arg
) != SSA_NAME
)
1858 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1860 /* If ARG is registered for SSA update we cannot look at its defining
1862 if (name_registered_for_update_p (arg
))
1865 /* If we were already here, break the infinite cycle. */
1867 *visited
= BITMAP_ALLOC (NULL
);
1868 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1872 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1874 switch (gimple_code (def_stmt
))
1877 /* The RHS of the statement defining VAR must either have a
1878 constant length or come from another SSA_NAME with a constant
1880 if (gimple_assign_single_p (def_stmt
)
1881 || gimple_assign_unary_nop_p (def_stmt
))
1883 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1884 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1886 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1888 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1889 gimple_assign_rhs3 (def_stmt
) };
1891 for (unsigned int i
= 0; i
< 2; i
++)
1892 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1894 if (rkind
!= SRK_LENRANGE
)
1896 /* Set the upper bound to the maximum to prevent
1897 it from being adjusted in the next iteration but
1898 leave MINLEN and the more conservative MAXBOUND
1899 determined so far alone (or leave them null if
1900 they haven't been set yet). That the MINLEN is
1901 in fact zero can be determined from MAXLEN being
1902 unbounded but the discovered minimum is used for
1904 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1911 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1912 must have a constant length. */
1913 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1915 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1917 /* If this PHI has itself as an argument, we cannot
1918 determine the string length of this argument. However,
1919 if we can find a constant string length for the other
1920 PHI args then we can still be sure that this is a
1921 constant string length. So be optimistic and just
1922 continue with the next argument. */
1923 if (arg
== gimple_phi_result (def_stmt
))
1926 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1928 if (rkind
!= SRK_LENRANGE
)
1930 /* Set the upper bound to the maximum to prevent
1931 it from being adjusted in the next iteration but
1932 leave MINLEN and the more conservative MAXBOUND
1933 determined so far alone (or leave them null if
1934 they haven't been set yet). That the MINLEN is
1935 in fact zero can be determined from MAXLEN being
1936 unbounded but the discovered minimum is used for
1938 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1948 /* Try to obtain the range of the lengths of the string(s) referenced
1949 by ARG, or the size of the largest array ARG refers to if the range
1950 of lengths cannot be determined, and store all in *PDATA which must
1951 be zero-initialized on input except PDATA->MAXBOUND may be set to
1952 a non-null tree node other than INTEGER_CST to request to have it
1953 set to the length of the longest string in a PHI. ELTSIZE is
1954 the expected size of the string element in bytes: 1 for char and
1955 some power of 2 for wide characters.
1956 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1957 for optimization. Returning false means that a nonzero PDATA->MINLEN
1958 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1959 is -1 (in that case, the actual range is indeterminate, i.e.,
1960 [0, PTRDIFF_MAX - 2]. */
1963 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1965 bitmap visited
= NULL
;
1966 tree maxbound
= pdata
->maxbound
;
1968 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1970 /* On failure extend the length range to an impossible maximum
1971 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1972 members can stay unchanged regardless. */
1973 pdata
->minlen
= ssize_int (0);
1974 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1976 else if (!pdata
->minlen
)
1977 pdata
->minlen
= ssize_int (0);
1979 /* If it's unchanged from it initial non-null value, set the conservative
1980 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1981 if (maxbound
&& pdata
->maxbound
== maxbound
)
1982 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1985 BITMAP_FREE (visited
);
1987 return !integer_all_onesp (pdata
->maxlen
);
1990 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1991 For ARG of pointer types, NONSTR indicates if the caller is prepared
1992 to handle unterminated strings. For integer ARG and when RKIND ==
1993 SRK_INT_VALUE, NONSTR must be null.
1995 If an unterminated array is discovered and our caller handles
1996 unterminated arrays, then bubble up the offending DECL and
1997 return the maximum size. Otherwise return NULL. */
2000 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
2002 /* A non-null NONSTR is meaningless when determining the maximum
2003 value of an integer ARG. */
2004 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2005 /* ARG must have an integral type when RKIND says so. */
2006 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2008 bitmap visited
= NULL
;
2010 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2012 c_strlen_data lendata
= { };
2013 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
2014 lendata
.maxlen
= NULL_TREE
;
2015 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2016 lendata
.maxlen
= NULL_TREE
;
2019 BITMAP_FREE (visited
);
2023 /* For callers prepared to handle unterminated arrays set
2024 *NONSTR to point to the declaration of the array and return
2025 the maximum length/size. */
2026 *nonstr
= lendata
.decl
;
2027 return lendata
.maxlen
;
2030 /* Fail if the constant array isn't nul-terminated. */
2031 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2035 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2036 If LEN is not NULL, it represents the length of the string to be
2037 copied. Return NULL_TREE if no simplification can be made. */
2040 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2041 tree dest
, tree src
)
2043 gimple
*stmt
= gsi_stmt (*gsi
);
2044 location_t loc
= gimple_location (stmt
);
2047 /* If SRC and DEST are the same (and not volatile), return DEST. */
2048 if (operand_equal_p (src
, dest
, 0))
2050 /* Issue -Wrestrict unless the pointers are null (those do
2051 not point to objects and so do not indicate an overlap;
2052 such calls could be the result of sanitization and jump
2054 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2056 tree func
= gimple_call_fndecl (stmt
);
2058 warning_at (loc
, OPT_Wrestrict
,
2059 "%qD source argument is the same as destination",
2063 replace_call_with_value (gsi
, dest
);
2067 if (optimize_function_for_size_p (cfun
))
2070 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2074 /* Set to non-null if ARG refers to an unterminated array. */
2076 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2080 /* Avoid folding calls with unterminated arrays. */
2081 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2082 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2083 suppress_warning (stmt
, OPT_Wstringop_overread
);
2090 len
= fold_convert_loc (loc
, size_type_node
, len
);
2091 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2092 len
= force_gimple_operand_gsi (gsi
, len
, true,
2093 NULL_TREE
, true, GSI_SAME_STMT
);
2094 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2095 replace_call_with_call_and_fold (gsi
, repl
);
2099 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2100 If SLEN is not NULL, it represents the length of the source string.
2101 Return NULL_TREE if no simplification can be made. */
2104 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2105 tree dest
, tree src
, tree len
)
2107 gimple
*stmt
= gsi_stmt (*gsi
);
2108 location_t loc
= gimple_location (stmt
);
2109 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2111 /* If the LEN parameter is zero, return DEST. */
2112 if (integer_zerop (len
))
2114 /* Avoid warning if the destination refers to an array/pointer
2115 decorate with attribute nonstring. */
2118 tree fndecl
= gimple_call_fndecl (stmt
);
2120 /* Warn about the lack of nul termination: the result is not
2121 a (nul-terminated) string. */
2122 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2123 if (slen
&& !integer_zerop (slen
))
2124 warning_at (loc
, OPT_Wstringop_truncation
,
2125 "%qD destination unchanged after copying no bytes "
2126 "from a string of length %E",
2129 warning_at (loc
, OPT_Wstringop_truncation
,
2130 "%qD destination unchanged after copying no bytes",
2134 replace_call_with_value (gsi
, dest
);
2138 /* We can't compare slen with len as constants below if len is not a
2140 if (TREE_CODE (len
) != INTEGER_CST
)
2143 /* Now, we must be passed a constant src ptr parameter. */
2144 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2145 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2148 /* The size of the source string including the terminating nul. */
2149 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2151 /* We do not support simplification of this case, though we do
2152 support it when expanding trees into RTL. */
2153 /* FIXME: generate a call to __builtin_memset. */
2154 if (tree_int_cst_lt (ssize
, len
))
2157 /* Diagnose truncation that leaves the copy unterminated. */
2158 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2160 /* OK transform into builtin memcpy. */
2161 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2165 len
= fold_convert_loc (loc
, size_type_node
, len
);
2166 len
= force_gimple_operand_gsi (gsi
, len
, true,
2167 NULL_TREE
, true, GSI_SAME_STMT
);
2168 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2169 replace_call_with_call_and_fold (gsi
, repl
);
2174 /* Fold function call to builtin strchr or strrchr.
2175 If both arguments are constant, evaluate and fold the result,
2176 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2177 In general strlen is significantly faster than strchr
2178 due to being a simpler operation. */
2180 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2182 gimple
*stmt
= gsi_stmt (*gsi
);
2183 tree str
= gimple_call_arg (stmt
, 0);
2184 tree c
= gimple_call_arg (stmt
, 1);
2185 location_t loc
= gimple_location (stmt
);
2189 if (!gimple_call_lhs (stmt
))
2192 /* Avoid folding if the first argument is not a nul-terminated array.
2193 Defer warning until later. */
2194 if (!check_nul_terminated_array (NULL_TREE
, str
))
2197 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2199 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2203 replace_call_with_value (gsi
, integer_zero_node
);
2207 tree len
= build_int_cst (size_type_node
, p1
- p
);
2208 gimple_seq stmts
= NULL
;
2209 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2210 POINTER_PLUS_EXPR
, str
, len
);
2211 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2212 gsi_replace_with_seq_vops (gsi
, stmts
);
2216 if (!integer_zerop (c
))
2219 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2220 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2222 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2226 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2227 replace_call_with_call_and_fold (gsi
, repl
);
2235 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2240 /* Create newstr = strlen (str). */
2241 gimple_seq stmts
= NULL
;
2242 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2243 gimple_set_location (new_stmt
, loc
);
2244 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2245 gimple_call_set_lhs (new_stmt
, len
);
2246 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2248 /* Create (str p+ strlen (str)). */
2249 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2250 POINTER_PLUS_EXPR
, str
, len
);
2251 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2252 gsi_replace_with_seq_vops (gsi
, stmts
);
2253 /* gsi now points at the assignment to the lhs, get a
2254 stmt iterator to the strlen.
2255 ??? We can't use gsi_for_stmt as that doesn't work when the
2256 CFG isn't built yet. */
2257 gimple_stmt_iterator gsi2
= *gsi
;
2263 /* Fold function call to builtin strstr.
2264 If both arguments are constant, evaluate and fold the result,
2265 additionally fold strstr (x, "") into x and strstr (x, "c")
2266 into strchr (x, 'c'). */
2268 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2270 gimple
*stmt
= gsi_stmt (*gsi
);
2271 if (!gimple_call_lhs (stmt
))
2274 tree haystack
= gimple_call_arg (stmt
, 0);
2275 tree needle
= gimple_call_arg (stmt
, 1);
2277 /* Avoid folding if either argument is not a nul-terminated array.
2278 Defer warning until later. */
2279 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2280 || !check_nul_terminated_array (NULL_TREE
, needle
))
2283 const char *q
= c_getstr (needle
);
2287 if (const char *p
= c_getstr (haystack
))
2289 const char *r
= strstr (p
, q
);
2293 replace_call_with_value (gsi
, integer_zero_node
);
2297 tree len
= build_int_cst (size_type_node
, r
- p
);
2298 gimple_seq stmts
= NULL
;
2300 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2302 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2303 gsi_replace_with_seq_vops (gsi
, stmts
);
2307 /* For strstr (x, "") return x. */
2310 replace_call_with_value (gsi
, haystack
);
2314 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2317 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2320 tree c
= build_int_cst (integer_type_node
, q
[0]);
2321 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2322 replace_call_with_call_and_fold (gsi
, repl
);
2330 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2333 Return NULL_TREE if no simplification was possible, otherwise return the
2334 simplified form of the call as a tree.
2336 The simplified form may be a constant or other expression which
2337 computes the same value, but in a more efficient manner (including
2338 calls to other builtin functions).
2340 The call may contain arguments which need to be evaluated, but
2341 which are not useful to determine the result of the call. In
2342 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2343 COMPOUND_EXPR will be an argument which must be evaluated.
2344 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2345 COMPOUND_EXPR in the chain will contain the tree for the simplified
2346 form of the builtin function call. */
2349 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2351 gimple
*stmt
= gsi_stmt (*gsi
);
2352 location_t loc
= gimple_location (stmt
);
2354 const char *p
= c_getstr (src
);
2356 /* If the string length is zero, return the dst parameter. */
2357 if (p
&& *p
== '\0')
2359 replace_call_with_value (gsi
, dst
);
2363 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2366 /* See if we can store by pieces into (dst + strlen(dst)). */
2368 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2369 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2371 if (!strlen_fn
|| !memcpy_fn
)
2374 /* If the length of the source string isn't computable don't
2375 split strcat into strlen and memcpy. */
2376 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2380 /* Create strlen (dst). */
2381 gimple_seq stmts
= NULL
, stmts2
;
2382 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2383 gimple_set_location (repl
, loc
);
2384 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2385 gimple_call_set_lhs (repl
, newdst
);
2386 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2388 /* Create (dst p+ strlen (dst)). */
2389 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2390 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2391 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2393 len
= fold_convert_loc (loc
, size_type_node
, len
);
2394 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2395 build_int_cst (size_type_node
, 1));
2396 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2397 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2399 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2400 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2401 if (gimple_call_lhs (stmt
))
2403 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2404 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2405 gsi_replace_with_seq_vops (gsi
, stmts
);
2406 /* gsi now points at the assignment to the lhs, get a
2407 stmt iterator to the memcpy call.
2408 ??? We can't use gsi_for_stmt as that doesn't work when the
2409 CFG isn't built yet. */
2410 gimple_stmt_iterator gsi2
= *gsi
;
2416 gsi_replace_with_seq_vops (gsi
, stmts
);
2422 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2423 are the arguments to the call. */
2426 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2428 gimple
*stmt
= gsi_stmt (*gsi
);
2429 tree dest
= gimple_call_arg (stmt
, 0);
2430 tree src
= gimple_call_arg (stmt
, 1);
2431 tree size
= gimple_call_arg (stmt
, 2);
2437 /* If the SRC parameter is "", return DEST. */
2438 if (p
&& *p
== '\0')
2440 replace_call_with_value (gsi
, dest
);
2444 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2447 /* If __builtin_strcat_chk is used, assume strcat is available. */
2448 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2452 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2453 replace_call_with_call_and_fold (gsi
, repl
);
2457 /* Simplify a call to the strncat builtin. */
2460 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2462 gimple
*stmt
= gsi_stmt (*gsi
);
2463 tree dst
= gimple_call_arg (stmt
, 0);
2464 tree src
= gimple_call_arg (stmt
, 1);
2465 tree len
= gimple_call_arg (stmt
, 2);
2467 const char *p
= c_getstr (src
);
2469 /* If the requested length is zero, or the src parameter string
2470 length is zero, return the dst parameter. */
2471 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2473 replace_call_with_value (gsi
, dst
);
2477 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2480 unsigned srclen
= strlen (p
);
2482 int cmpsrc
= compare_tree_int (len
, srclen
);
2484 /* Return early if the requested len is less than the string length.
2485 Warnings will be issued elsewhere later. */
2489 unsigned HOST_WIDE_INT dstsize
;
2491 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2493 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2495 int cmpdst
= compare_tree_int (len
, dstsize
);
2499 tree fndecl
= gimple_call_fndecl (stmt
);
2501 /* Strncat copies (at most) LEN bytes and always appends
2502 the terminating NUL so the specified bound should never
2503 be equal to (or greater than) the size of the destination.
2504 If it is, the copy could overflow. */
2505 location_t loc
= gimple_location (stmt
);
2506 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2508 ? G_("%qD specified bound %E equals "
2510 : G_("%qD specified bound %E exceeds "
2511 "destination size %wu"),
2512 fndecl
, len
, dstsize
);
2514 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2518 if (!nowarn
&& cmpsrc
== 0)
2520 tree fndecl
= gimple_call_fndecl (stmt
);
2521 location_t loc
= gimple_location (stmt
);
2523 /* To avoid possible overflow the specified bound should also
2524 not be equal to the length of the source, even when the size
2525 of the destination is unknown (it's not an uncommon mistake
2526 to specify as the bound to strncpy the length of the source). */
2527 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2528 "%qD specified bound %E equals source length",
2530 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2533 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2535 /* If the replacement _DECL isn't initialized, don't do the
2540 /* Otherwise, emit a call to strcat. */
2541 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2542 replace_call_with_call_and_fold (gsi
, repl
);
2546 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2550 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2552 gimple
*stmt
= gsi_stmt (*gsi
);
2553 tree dest
= gimple_call_arg (stmt
, 0);
2554 tree src
= gimple_call_arg (stmt
, 1);
2555 tree len
= gimple_call_arg (stmt
, 2);
2556 tree size
= gimple_call_arg (stmt
, 3);
2561 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2562 if ((p
&& *p
== '\0')
2563 || integer_zerop (len
))
2565 replace_call_with_value (gsi
, dest
);
2569 if (! tree_fits_uhwi_p (size
))
2572 if (! integer_all_onesp (size
))
2574 tree src_len
= c_strlen (src
, 1);
2576 && tree_fits_uhwi_p (src_len
)
2577 && tree_fits_uhwi_p (len
)
2578 && ! tree_int_cst_lt (len
, src_len
))
2580 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2581 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2585 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2586 replace_call_with_call_and_fold (gsi
, repl
);
2592 /* If __builtin_strncat_chk is used, assume strncat is available. */
2593 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2597 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2598 replace_call_with_call_and_fold (gsi
, repl
);
2602 /* Build and append gimple statements to STMTS that would load a first
2603 character of a memory location identified by STR. LOC is location
2604 of the statement. */
2607 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2611 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2612 tree cst_uchar_ptr_node
2613 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2614 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2616 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2617 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2618 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2620 gimple_assign_set_lhs (stmt
, var
);
2621 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2626 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2629 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2631 gimple
*stmt
= gsi_stmt (*gsi
);
2632 tree callee
= gimple_call_fndecl (stmt
);
2633 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2635 tree type
= integer_type_node
;
2636 tree str1
= gimple_call_arg (stmt
, 0);
2637 tree str2
= gimple_call_arg (stmt
, 1);
2638 tree lhs
= gimple_call_lhs (stmt
);
2640 tree bound_node
= NULL_TREE
;
2641 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2643 /* Handle strncmp and strncasecmp functions. */
2644 if (gimple_call_num_args (stmt
) == 3)
2646 bound_node
= gimple_call_arg (stmt
, 2);
2647 if (tree_fits_uhwi_p (bound_node
))
2648 bound
= tree_to_uhwi (bound_node
);
2651 /* If the BOUND parameter is zero, return zero. */
2654 replace_call_with_value (gsi
, integer_zero_node
);
2658 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2659 if (operand_equal_p (str1
, str2
, 0))
2661 replace_call_with_value (gsi
, integer_zero_node
);
2665 /* Initially set to the number of characters, including the terminating
2666 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2667 the array Sx is not terminated by a nul.
2668 For nul-terminated strings then adjusted to their length so that
2669 LENx == NULPOSx holds. */
2670 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2671 const char *p1
= getbyterep (str1
, &len1
);
2672 const char *p2
= getbyterep (str2
, &len2
);
2674 /* The position of the terminating nul character if one exists, otherwise
2675 a value greater than LENx. */
2676 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2680 size_t n
= strnlen (p1
, len1
);
2687 size_t n
= strnlen (p2
, len2
);
2692 /* For known strings, return an immediate value. */
2696 bool known_result
= false;
2700 case BUILT_IN_STRCMP
:
2701 case BUILT_IN_STRCMP_EQ
:
2702 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2705 r
= strcmp (p1
, p2
);
2706 known_result
= true;
2709 case BUILT_IN_STRNCMP
:
2710 case BUILT_IN_STRNCMP_EQ
:
2712 if (bound
== HOST_WIDE_INT_M1U
)
2715 /* Reduce the bound to be no more than the length
2716 of the shorter of the two strings, or the sizes
2717 of the unterminated arrays. */
2718 unsigned HOST_WIDE_INT n
= bound
;
2720 if (len1
== nulpos1
&& len1
< n
)
2722 if (len2
== nulpos2
&& len2
< n
)
2725 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2728 r
= strncmp (p1
, p2
, n
);
2729 known_result
= true;
2732 /* Only handleable situation is where the string are equal (result 0),
2733 which is already handled by operand_equal_p case. */
2734 case BUILT_IN_STRCASECMP
:
2736 case BUILT_IN_STRNCASECMP
:
2738 if (bound
== HOST_WIDE_INT_M1U
)
2740 r
= strncmp (p1
, p2
, bound
);
2742 known_result
= true;
2751 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2756 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2757 || fcode
== BUILT_IN_STRCMP
2758 || fcode
== BUILT_IN_STRCMP_EQ
2759 || fcode
== BUILT_IN_STRCASECMP
;
2761 location_t loc
= gimple_location (stmt
);
2763 /* If the second arg is "", return *(const unsigned char*)arg1. */
2764 if (p2
&& *p2
== '\0' && nonzero_bound
)
2766 gimple_seq stmts
= NULL
;
2767 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2770 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2771 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2774 gsi_replace_with_seq_vops (gsi
, stmts
);
2778 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2779 if (p1
&& *p1
== '\0' && nonzero_bound
)
2781 gimple_seq stmts
= NULL
;
2782 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2786 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2787 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2788 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2790 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2791 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2794 gsi_replace_with_seq_vops (gsi
, stmts
);
2798 /* If BOUND is one, return an expression corresponding to
2799 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2800 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2802 gimple_seq stmts
= NULL
;
2803 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2804 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2808 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2809 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2810 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2812 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2813 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2814 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2816 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2817 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2820 gsi_replace_with_seq_vops (gsi
, stmts
);
2824 /* If BOUND is greater than the length of one constant string,
2825 and the other argument is also a nul-terminated string, replace
2826 strncmp with strcmp. */
2827 if (fcode
== BUILT_IN_STRNCMP
2828 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2829 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2830 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2832 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2835 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2836 replace_call_with_call_and_fold (gsi
, repl
);
2843 /* Fold a call to the memchr pointed by GSI iterator. */
2846 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2848 gimple
*stmt
= gsi_stmt (*gsi
);
2849 tree lhs
= gimple_call_lhs (stmt
);
2850 tree arg1
= gimple_call_arg (stmt
, 0);
2851 tree arg2
= gimple_call_arg (stmt
, 1);
2852 tree len
= gimple_call_arg (stmt
, 2);
2854 /* If the LEN parameter is zero, return zero. */
2855 if (integer_zerop (len
))
2857 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2862 if (TREE_CODE (arg2
) != INTEGER_CST
2863 || !tree_fits_uhwi_p (len
)
2864 || !target_char_cst_p (arg2
, &c
))
2867 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2868 unsigned HOST_WIDE_INT string_length
;
2869 const char *p1
= getbyterep (arg1
, &string_length
);
2873 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2876 tree mem_size
, offset_node
;
2877 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2878 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2879 ? 0 : tree_to_uhwi (offset_node
);
2880 /* MEM_SIZE is the size of the array the string literal
2882 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2883 gcc_checking_assert (string_length
<= string_size
);
2884 if (length
<= string_size
)
2886 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2892 unsigned HOST_WIDE_INT offset
= r
- p1
;
2893 gimple_seq stmts
= NULL
;
2894 if (lhs
!= NULL_TREE
)
2896 tree offset_cst
= build_int_cst (sizetype
, offset
);
2897 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2899 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2902 gimple_seq_add_stmt_without_update (&stmts
,
2903 gimple_build_nop ());
2905 gsi_replace_with_seq_vops (gsi
, stmts
);
2913 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2914 to the call. IGNORE is true if the value returned
2915 by the builtin will be ignored. UNLOCKED is true is true if this
2916 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2917 the known length of the string. Return NULL_TREE if no simplification
2921 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2922 tree arg0
, tree arg1
,
2925 gimple
*stmt
= gsi_stmt (*gsi
);
2927 /* If we're using an unlocked function, assume the other unlocked
2928 functions exist explicitly. */
2929 tree
const fn_fputc
= (unlocked
2930 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2931 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2932 tree
const fn_fwrite
= (unlocked
2933 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2934 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2936 /* If the return value is used, don't do the transformation. */
2937 if (gimple_call_lhs (stmt
))
2940 /* Get the length of the string passed to fputs. If the length
2941 can't be determined, punt. */
2942 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2944 || TREE_CODE (len
) != INTEGER_CST
)
2947 switch (compare_tree_int (len
, 1))
2949 case -1: /* length is 0, delete the call entirely . */
2950 replace_call_with_value (gsi
, integer_zero_node
);
2953 case 0: /* length is 1, call fputc. */
2955 const char *p
= c_getstr (arg0
);
2961 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2963 (integer_type_node
, p
[0]), arg1
);
2964 replace_call_with_call_and_fold (gsi
, repl
);
2969 case 1: /* length is greater than 1, call fwrite. */
2971 /* If optimizing for size keep fputs. */
2972 if (optimize_function_for_size_p (cfun
))
2974 /* New argument list transforming fputs(string, stream) to
2975 fwrite(string, 1, len, stream). */
2979 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2980 size_one_node
, len
, arg1
);
2981 replace_call_with_call_and_fold (gsi
, repl
);
2990 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2991 DEST, SRC, LEN, and SIZE are the arguments to the call.
2992 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2993 code of the builtin. If MAXLEN is not NULL, it is maximum length
2994 passed as third argument. */
2997 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2998 tree dest
, tree src
, tree len
, tree size
,
2999 enum built_in_function fcode
)
3001 gimple
*stmt
= gsi_stmt (*gsi
);
3002 location_t loc
= gimple_location (stmt
);
3003 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3006 /* If SRC and DEST are the same (and not volatile), return DEST
3007 (resp. DEST+LEN for __mempcpy_chk). */
3008 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3010 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3012 replace_call_with_value (gsi
, dest
);
3017 gimple_seq stmts
= NULL
;
3018 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3019 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3020 TREE_TYPE (dest
), dest
, len
);
3021 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3022 replace_call_with_value (gsi
, temp
);
3027 if (! tree_fits_uhwi_p (size
))
3030 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3031 if (! integer_all_onesp (size
))
3033 if (! tree_fits_uhwi_p (len
))
3035 /* If LEN is not constant, try MAXLEN too.
3036 For MAXLEN only allow optimizing into non-_ocs function
3037 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3038 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3040 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3042 /* (void) __mempcpy_chk () can be optimized into
3043 (void) __memcpy_chk (). */
3044 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3048 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3049 replace_call_with_call_and_fold (gsi
, repl
);
3058 if (tree_int_cst_lt (size
, maxlen
))
3063 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3064 mem{cpy,pcpy,move,set} is available. */
3067 case BUILT_IN_MEMCPY_CHK
:
3068 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3070 case BUILT_IN_MEMPCPY_CHK
:
3071 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3073 case BUILT_IN_MEMMOVE_CHK
:
3074 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3076 case BUILT_IN_MEMSET_CHK
:
3077 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3086 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3087 replace_call_with_call_and_fold (gsi
, repl
);
3091 /* Fold a call to the __st[rp]cpy_chk builtin.
3092 DEST, SRC, and SIZE are the arguments to the call.
3093 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3094 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3095 strings passed as second argument. */
3098 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3100 tree src
, tree size
,
3101 enum built_in_function fcode
)
3103 gimple
*stmt
= gsi_stmt (*gsi
);
3104 location_t loc
= gimple_location (stmt
);
3105 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3108 /* If SRC and DEST are the same (and not volatile), return DEST. */
3109 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3111 /* Issue -Wrestrict unless the pointers are null (those do
3112 not point to objects and so do not indicate an overlap;
3113 such calls could be the result of sanitization and jump
3115 if (!integer_zerop (dest
)
3116 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3118 tree func
= gimple_call_fndecl (stmt
);
3120 warning_at (loc
, OPT_Wrestrict
,
3121 "%qD source argument is the same as destination",
3125 replace_call_with_value (gsi
, dest
);
3129 if (! tree_fits_uhwi_p (size
))
3132 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3133 if (! integer_all_onesp (size
))
3135 len
= c_strlen (src
, 1);
3136 if (! len
|| ! tree_fits_uhwi_p (len
))
3138 /* If LEN is not constant, try MAXLEN too.
3139 For MAXLEN only allow optimizing into non-_ocs function
3140 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3141 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3143 if (fcode
== BUILT_IN_STPCPY_CHK
)
3148 /* If return value of __stpcpy_chk is ignored,
3149 optimize into __strcpy_chk. */
3150 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3154 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3155 replace_call_with_call_and_fold (gsi
, repl
);
3159 if (! len
|| TREE_SIDE_EFFECTS (len
))
3162 /* If c_strlen returned something, but not a constant,
3163 transform __strcpy_chk into __memcpy_chk. */
3164 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3168 gimple_seq stmts
= NULL
;
3169 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3170 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3171 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3172 build_int_cst (size_type_node
, 1));
3173 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3174 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3175 replace_call_with_call_and_fold (gsi
, repl
);
3182 if (! tree_int_cst_lt (maxlen
, size
))
3186 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3187 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
3188 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3192 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3193 replace_call_with_call_and_fold (gsi
, repl
);
3197 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3198 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3199 length passed as third argument. IGNORE is true if return value can be
3200 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3203 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3204 tree dest
, tree src
,
3205 tree len
, tree size
,
3206 enum built_in_function fcode
)
3208 gimple
*stmt
= gsi_stmt (*gsi
);
3209 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3212 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3214 /* If return value of __stpncpy_chk is ignored,
3215 optimize into __strncpy_chk. */
3216 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3219 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3220 replace_call_with_call_and_fold (gsi
, repl
);
3225 if (! tree_fits_uhwi_p (size
))
3228 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3229 if (! integer_all_onesp (size
))
3231 if (! tree_fits_uhwi_p (len
))
3233 /* If LEN is not constant, try MAXLEN too.
3234 For MAXLEN only allow optimizing into non-_ocs function
3235 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3236 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3242 if (tree_int_cst_lt (size
, maxlen
))
3246 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3247 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
3248 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3252 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3253 replace_call_with_call_and_fold (gsi
, repl
);
3257 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3258 Return NULL_TREE if no simplification can be made. */
3261 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3263 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3264 location_t loc
= gimple_location (stmt
);
3265 tree dest
= gimple_call_arg (stmt
, 0);
3266 tree src
= gimple_call_arg (stmt
, 1);
3269 /* If the result is unused, replace stpcpy with strcpy. */
3270 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3272 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3275 gimple_call_set_fndecl (stmt
, fn
);
3280 /* Set to non-null if ARG refers to an unterminated array. */
3281 c_strlen_data data
= { };
3282 /* The size of the unterminated array if SRC referes to one. */
3284 /* True if the size is exact/constant, false if it's the lower bound
3287 tree len
= c_strlen (src
, 1, &data
, 1);
3289 || TREE_CODE (len
) != INTEGER_CST
)
3291 data
.decl
= unterminated_array (src
, &size
, &exact
);
3298 /* Avoid folding calls with unterminated arrays. */
3299 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3300 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3302 suppress_warning (stmt
, OPT_Wstringop_overread
);
3306 if (optimize_function_for_size_p (cfun
)
3307 /* If length is zero it's small enough. */
3308 && !integer_zerop (len
))
3311 /* If the source has a known length replace stpcpy with memcpy. */
3312 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3316 gimple_seq stmts
= NULL
;
3317 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3318 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3319 tem
, build_int_cst (size_type_node
, 1));
3320 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3321 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3322 gimple_move_vops (repl
, stmt
);
3323 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3324 /* Replace the result with dest + len. */
3326 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3327 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3328 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3329 POINTER_PLUS_EXPR
, dest
, tem
);
3330 gsi_replace (gsi
, ret
, false);
3331 /* Finally fold the memcpy call. */
3332 gimple_stmt_iterator gsi2
= *gsi
;
3338 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3339 NULL_TREE if a normal call should be emitted rather than expanding
3340 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3341 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3342 passed as second argument. */
3345 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3346 enum built_in_function fcode
)
3348 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3349 tree dest
, size
, len
, fn
, fmt
, flag
;
3350 const char *fmt_str
;
3352 /* Verify the required arguments in the original call. */
3353 if (gimple_call_num_args (stmt
) < 5)
3356 dest
= gimple_call_arg (stmt
, 0);
3357 len
= gimple_call_arg (stmt
, 1);
3358 flag
= gimple_call_arg (stmt
, 2);
3359 size
= gimple_call_arg (stmt
, 3);
3360 fmt
= gimple_call_arg (stmt
, 4);
3362 if (! tree_fits_uhwi_p (size
))
3365 if (! integer_all_onesp (size
))
3367 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3368 if (! tree_fits_uhwi_p (len
))
3370 /* If LEN is not constant, try MAXLEN too.
3371 For MAXLEN only allow optimizing into non-_ocs function
3372 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3373 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3379 if (tree_int_cst_lt (size
, maxlen
))
3383 if (!init_target_chars ())
3386 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3387 or if format doesn't contain % chars or is "%s". */
3388 if (! integer_zerop (flag
))
3390 fmt_str
= c_getstr (fmt
);
3391 if (fmt_str
== NULL
)
3393 if (strchr (fmt_str
, target_percent
) != NULL
3394 && strcmp (fmt_str
, target_percent_s
))
3398 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3400 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3401 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3405 /* Replace the called function and the first 5 argument by 3 retaining
3406 trailing varargs. */
3407 gimple_call_set_fndecl (stmt
, fn
);
3408 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3409 gimple_call_set_arg (stmt
, 0, dest
);
3410 gimple_call_set_arg (stmt
, 1, len
);
3411 gimple_call_set_arg (stmt
, 2, fmt
);
3412 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3413 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3414 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3419 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3420 Return NULL_TREE if a normal call should be emitted rather than
3421 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3422 or BUILT_IN_VSPRINTF_CHK. */
3425 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3426 enum built_in_function fcode
)
3428 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3429 tree dest
, size
, len
, fn
, fmt
, flag
;
3430 const char *fmt_str
;
3431 unsigned nargs
= gimple_call_num_args (stmt
);
3433 /* Verify the required arguments in the original call. */
3436 dest
= gimple_call_arg (stmt
, 0);
3437 flag
= gimple_call_arg (stmt
, 1);
3438 size
= gimple_call_arg (stmt
, 2);
3439 fmt
= gimple_call_arg (stmt
, 3);
3441 if (! tree_fits_uhwi_p (size
))
3446 if (!init_target_chars ())
3449 /* Check whether the format is a literal string constant. */
3450 fmt_str
= c_getstr (fmt
);
3451 if (fmt_str
!= NULL
)
3453 /* If the format doesn't contain % args or %%, we know the size. */
3454 if (strchr (fmt_str
, target_percent
) == 0)
3456 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3457 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3459 /* If the format is "%s" and first ... argument is a string literal,
3460 we know the size too. */
3461 else if (fcode
== BUILT_IN_SPRINTF_CHK
3462 && strcmp (fmt_str
, target_percent_s
) == 0)
3468 arg
= gimple_call_arg (stmt
, 4);
3469 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3471 len
= c_strlen (arg
, 1);
3472 if (! len
|| ! tree_fits_uhwi_p (len
))
3479 if (! integer_all_onesp (size
))
3481 if (! len
|| ! tree_int_cst_lt (len
, size
))
3485 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3486 or if format doesn't contain % chars or is "%s". */
3487 if (! integer_zerop (flag
))
3489 if (fmt_str
== NULL
)
3491 if (strchr (fmt_str
, target_percent
) != NULL
3492 && strcmp (fmt_str
, target_percent_s
))
3496 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3497 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3498 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3502 /* Replace the called function and the first 4 argument by 2 retaining
3503 trailing varargs. */
3504 gimple_call_set_fndecl (stmt
, fn
);
3505 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3506 gimple_call_set_arg (stmt
, 0, dest
);
3507 gimple_call_set_arg (stmt
, 1, fmt
);
3508 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3509 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3510 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3515 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3516 ORIG may be null if this is a 2-argument call. We don't attempt to
3517 simplify calls with more than 3 arguments.
3519 Return true if simplification was possible, otherwise false. */
3522 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3524 gimple
*stmt
= gsi_stmt (*gsi
);
3526 /* Verify the required arguments in the original call. We deal with two
3527 types of sprintf() calls: 'sprintf (str, fmt)' and
3528 'sprintf (dest, "%s", orig)'. */
3529 if (gimple_call_num_args (stmt
) > 3)
3532 tree orig
= NULL_TREE
;
3533 if (gimple_call_num_args (stmt
) == 3)
3534 orig
= gimple_call_arg (stmt
, 2);
3536 /* Check whether the format is a literal string constant. */
3537 tree fmt
= gimple_call_arg (stmt
, 1);
3538 const char *fmt_str
= c_getstr (fmt
);
3539 if (fmt_str
== NULL
)
3542 tree dest
= gimple_call_arg (stmt
, 0);
3544 if (!init_target_chars ())
3547 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3551 /* If the format doesn't contain % args or %%, use strcpy. */
3552 if (strchr (fmt_str
, target_percent
) == NULL
)
3554 /* Don't optimize sprintf (buf, "abc", ptr++). */
3558 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3559 'format' is known to contain no % formats. */
3560 gimple_seq stmts
= NULL
;
3561 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3563 /* Propagate the NO_WARNING bit to avoid issuing the same
3564 warning more than once. */
3565 copy_warning (repl
, stmt
);
3567 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3568 if (tree lhs
= gimple_call_lhs (stmt
))
3570 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3572 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3573 gsi_replace_with_seq_vops (gsi
, stmts
);
3574 /* gsi now points at the assignment to the lhs, get a
3575 stmt iterator to the memcpy call.
3576 ??? We can't use gsi_for_stmt as that doesn't work when the
3577 CFG isn't built yet. */
3578 gimple_stmt_iterator gsi2
= *gsi
;
3584 gsi_replace_with_seq_vops (gsi
, stmts
);
3590 /* If the format is "%s", use strcpy if the result isn't used. */
3591 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3593 /* Don't crash on sprintf (str1, "%s"). */
3597 /* Don't fold calls with source arguments of invalid (nonpointer)
3599 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3602 tree orig_len
= NULL_TREE
;
3603 if (gimple_call_lhs (stmt
))
3605 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3610 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3611 gimple_seq stmts
= NULL
;
3612 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3614 /* Propagate the NO_WARNING bit to avoid issuing the same
3615 warning more than once. */
3616 copy_warning (repl
, stmt
);
3618 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3619 if (tree lhs
= gimple_call_lhs (stmt
))
3621 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3622 TREE_TYPE (orig_len
)))
3623 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3624 repl
= gimple_build_assign (lhs
, orig_len
);
3625 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3626 gsi_replace_with_seq_vops (gsi
, stmts
);
3627 /* gsi now points at the assignment to the lhs, get a
3628 stmt iterator to the memcpy call.
3629 ??? We can't use gsi_for_stmt as that doesn't work when the
3630 CFG isn't built yet. */
3631 gimple_stmt_iterator gsi2
= *gsi
;
3637 gsi_replace_with_seq_vops (gsi
, stmts
);
3645 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3646 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3647 attempt to simplify calls with more than 4 arguments.
3649 Return true if simplification was possible, otherwise false. */
3652 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3654 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3655 tree dest
= gimple_call_arg (stmt
, 0);
3656 tree destsize
= gimple_call_arg (stmt
, 1);
3657 tree fmt
= gimple_call_arg (stmt
, 2);
3658 tree orig
= NULL_TREE
;
3659 const char *fmt_str
= NULL
;
3661 if (gimple_call_num_args (stmt
) > 4)
3664 if (gimple_call_num_args (stmt
) == 4)
3665 orig
= gimple_call_arg (stmt
, 3);
3667 if (!tree_fits_uhwi_p (destsize
))
3669 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3671 /* Check whether the format is a literal string constant. */
3672 fmt_str
= c_getstr (fmt
);
3673 if (fmt_str
== NULL
)
3676 if (!init_target_chars ())
3679 /* If the format doesn't contain % args or %%, use strcpy. */
3680 if (strchr (fmt_str
, target_percent
) == NULL
)
3682 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3686 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3690 /* We could expand this as
3691 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3693 memcpy (str, fmt_with_nul_at_cstm1, cst);
3694 but in the former case that might increase code size
3695 and in the latter case grow .rodata section too much.
3697 size_t len
= strlen (fmt_str
);
3701 gimple_seq stmts
= NULL
;
3702 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3703 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3704 if (tree lhs
= gimple_call_lhs (stmt
))
3706 repl
= gimple_build_assign (lhs
,
3707 build_int_cst (TREE_TYPE (lhs
), len
));
3708 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3709 gsi_replace_with_seq_vops (gsi
, stmts
);
3710 /* gsi now points at the assignment to the lhs, get a
3711 stmt iterator to the memcpy call.
3712 ??? We can't use gsi_for_stmt as that doesn't work when the
3713 CFG isn't built yet. */
3714 gimple_stmt_iterator gsi2
= *gsi
;
3720 gsi_replace_with_seq_vops (gsi
, stmts
);
3726 /* If the format is "%s", use strcpy if the result isn't used. */
3727 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3729 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3733 /* Don't crash on snprintf (str1, cst, "%s"). */
3737 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3738 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3741 /* We could expand this as
3742 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3744 memcpy (str1, str2_with_nul_at_cstm1, cst);
3745 but in the former case that might increase code size
3746 and in the latter case grow .rodata section too much.
3748 if (compare_tree_int (orig_len
, destlen
) >= 0)
3751 /* Convert snprintf (str1, cst, "%s", str2) into
3752 strcpy (str1, str2) if strlen (str2) < cst. */
3753 gimple_seq stmts
= NULL
;
3754 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3755 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3756 if (tree lhs
= gimple_call_lhs (stmt
))
3758 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3759 TREE_TYPE (orig_len
)))
3760 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3761 repl
= gimple_build_assign (lhs
, orig_len
);
3762 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3763 gsi_replace_with_seq_vops (gsi
, stmts
);
3764 /* gsi now points at the assignment to the lhs, get a
3765 stmt iterator to the memcpy call.
3766 ??? We can't use gsi_for_stmt as that doesn't work when the
3767 CFG isn't built yet. */
3768 gimple_stmt_iterator gsi2
= *gsi
;
3774 gsi_replace_with_seq_vops (gsi
, stmts
);
3782 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3783 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3784 more than 3 arguments, and ARG may be null in the 2-argument case.
3786 Return NULL_TREE if no simplification was possible, otherwise return the
3787 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3788 code of the function to be simplified. */
3791 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3792 tree fp
, tree fmt
, tree arg
,
3793 enum built_in_function fcode
)
3795 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3796 tree fn_fputc
, fn_fputs
;
3797 const char *fmt_str
= NULL
;
3799 /* If the return value is used, don't do the transformation. */
3800 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3803 /* Check whether the format is a literal string constant. */
3804 fmt_str
= c_getstr (fmt
);
3805 if (fmt_str
== NULL
)
3808 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3810 /* If we're using an unlocked function, assume the other
3811 unlocked functions exist explicitly. */
3812 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3813 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3817 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3818 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3821 if (!init_target_chars ())
3824 /* If the format doesn't contain % args or %%, use strcpy. */
3825 if (strchr (fmt_str
, target_percent
) == NULL
)
3827 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3831 /* If the format specifier was "", fprintf does nothing. */
3832 if (fmt_str
[0] == '\0')
3834 replace_call_with_value (gsi
, NULL_TREE
);
3838 /* When "string" doesn't contain %, replace all cases of
3839 fprintf (fp, string) with fputs (string, fp). The fputs
3840 builtin will take care of special cases like length == 1. */
3843 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3844 replace_call_with_call_and_fold (gsi
, repl
);
3849 /* The other optimizations can be done only on the non-va_list variants. */
3850 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3853 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3854 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3856 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3860 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3861 replace_call_with_call_and_fold (gsi
, repl
);
3866 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3867 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3870 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3874 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3875 replace_call_with_call_and_fold (gsi
, repl
);
3883 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3884 FMT and ARG are the arguments to the call; we don't fold cases with
3885 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3887 Return NULL_TREE if no simplification was possible, otherwise return the
3888 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3889 code of the function to be simplified. */
3892 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3893 tree arg
, enum built_in_function fcode
)
3895 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3896 tree fn_putchar
, fn_puts
, newarg
;
3897 const char *fmt_str
= NULL
;
3899 /* If the return value is used, don't do the transformation. */
3900 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3903 /* Check whether the format is a literal string constant. */
3904 fmt_str
= c_getstr (fmt
);
3905 if (fmt_str
== NULL
)
3908 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3910 /* If we're using an unlocked function, assume the other
3911 unlocked functions exist explicitly. */
3912 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3913 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3917 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3918 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3921 if (!init_target_chars ())
3924 if (strcmp (fmt_str
, target_percent_s
) == 0
3925 || strchr (fmt_str
, target_percent
) == NULL
)
3929 if (strcmp (fmt_str
, target_percent_s
) == 0)
3931 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3934 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3937 str
= c_getstr (arg
);
3943 /* The format specifier doesn't contain any '%' characters. */
3944 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3950 /* If the string was "", printf does nothing. */
3953 replace_call_with_value (gsi
, NULL_TREE
);
3957 /* If the string has length of 1, call putchar. */
3960 /* Given printf("c"), (where c is any one character,)
3961 convert "c"[0] to an int and pass that to the replacement
3963 newarg
= build_int_cst (integer_type_node
, str
[0]);
3966 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3967 replace_call_with_call_and_fold (gsi
, repl
);
3973 /* If the string was "string\n", call puts("string"). */
3974 size_t len
= strlen (str
);
3975 if ((unsigned char)str
[len
- 1] == target_newline
3976 && (size_t) (int) len
== len
3981 /* Create a NUL-terminated string that's one char shorter
3982 than the original, stripping off the trailing '\n'. */
3983 newstr
= xstrdup (str
);
3984 newstr
[len
- 1] = '\0';
3985 newarg
= build_string_literal (len
, newstr
);
3989 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3990 replace_call_with_call_and_fold (gsi
, repl
);
3995 /* We'd like to arrange to call fputs(string,stdout) here,
3996 but we need stdout and don't have a way to get it yet. */
4001 /* The other optimizations can be done only on the non-va_list variants. */
4002 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
4005 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4006 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
4008 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
4012 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
4013 replace_call_with_call_and_fold (gsi
, repl
);
4018 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4019 else if (strcmp (fmt_str
, target_percent_c
) == 0)
4021 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
4026 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
4027 replace_call_with_call_and_fold (gsi
, repl
);
4037 /* Fold a call to __builtin_strlen with known length LEN. */
4040 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
4042 gimple
*stmt
= gsi_stmt (*gsi
);
4043 tree arg
= gimple_call_arg (stmt
, 0);
4048 c_strlen_data lendata
= { };
4049 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4051 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4052 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4054 /* The range of lengths refers to either a single constant
4055 string or to the longest and shortest constant string
4056 referenced by the argument of the strlen() call, or to
4057 the strings that can possibly be stored in the arrays
4058 the argument refers to. */
4059 minlen
= wi::to_wide (lendata
.minlen
);
4060 maxlen
= wi::to_wide (lendata
.maxlen
);
4064 unsigned prec
= TYPE_PRECISION (sizetype
);
4066 minlen
= wi::shwi (0, prec
);
4067 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4070 if (minlen
== maxlen
)
4072 /* Fold the strlen call to a constant. */
4073 tree type
= TREE_TYPE (lendata
.minlen
);
4074 tree len
= force_gimple_operand_gsi (gsi
,
4075 wide_int_to_tree (type
, minlen
),
4076 true, NULL
, true, GSI_SAME_STMT
);
4077 replace_call_with_value (gsi
, len
);
4081 /* Set the strlen() range to [0, MAXLEN]. */
4082 if (tree lhs
= gimple_call_lhs (stmt
))
4083 set_strlen_range (lhs
, minlen
, maxlen
);
4088 /* Fold a call to __builtin_acc_on_device. */
4091 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4093 /* Defer folding until we know which compiler we're in. */
4094 if (symtab
->state
!= EXPANSION
)
4097 unsigned val_host
= GOMP_DEVICE_HOST
;
4098 unsigned val_dev
= GOMP_DEVICE_NONE
;
4100 #ifdef ACCEL_COMPILER
4101 val_host
= GOMP_DEVICE_NOT_HOST
;
4102 val_dev
= ACCEL_COMPILER_acc_device
;
4105 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4107 tree host_eq
= make_ssa_name (boolean_type_node
);
4108 gimple
*host_ass
= gimple_build_assign
4109 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4110 gimple_set_location (host_ass
, loc
);
4111 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4113 tree dev_eq
= make_ssa_name (boolean_type_node
);
4114 gimple
*dev_ass
= gimple_build_assign
4115 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4116 gimple_set_location (dev_ass
, loc
);
4117 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4119 tree result
= make_ssa_name (boolean_type_node
);
4120 gimple
*result_ass
= gimple_build_assign
4121 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4122 gimple_set_location (result_ass
, loc
);
4123 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4125 replace_call_with_value (gsi
, result
);
4130 /* Fold realloc (0, n) -> malloc (n). */
4133 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4135 gimple
*stmt
= gsi_stmt (*gsi
);
4136 tree arg
= gimple_call_arg (stmt
, 0);
4137 tree size
= gimple_call_arg (stmt
, 1);
4139 if (operand_equal_p (arg
, null_pointer_node
, 0))
4141 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4144 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4145 replace_call_with_call_and_fold (gsi
, repl
);
4152 /* Number of bytes into which any type but aggregate or vector types
4154 static constexpr size_t clear_padding_unit
4155 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4156 /* Buffer size on which __builtin_clear_padding folding code works. */
4157 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4159 /* Data passed through __builtin_clear_padding folding. */
4160 struct clear_padding_struct
{
4162 /* 0 during __builtin_clear_padding folding, nonzero during
4163 clear_type_padding_in_mask. In that case, instead of clearing the
4164 non-padding bits in union_ptr array clear the padding bits in there. */
4168 gimple_stmt_iterator
*gsi
;
4169 /* Alignment of buf->base + 0. */
4171 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4173 /* Number of padding bytes before buf->off that don't have padding clear
4174 code emitted yet. */
4175 HOST_WIDE_INT padding_bytes
;
4176 /* The size of the whole object. Never emit code to touch
4177 buf->base + buf->sz or following bytes. */
4179 /* Number of bytes recorded in buf->buf. */
4181 /* When inside union, instead of emitting code we and bits inside of
4182 the union_ptr array. */
4183 unsigned char *union_ptr
;
4184 /* Set bits mean padding bits that need to be cleared by the builtin. */
4185 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4188 /* Emit code to clear padding requested in BUF->buf - set bits
4189 in there stand for padding that should be cleared. FULL is true
4190 if everything from the buffer should be flushed, otherwise
4191 it can leave up to 2 * clear_padding_unit bytes for further
4195 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4197 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4198 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4200 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4201 size_t end
= buf
->size
;
4203 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4204 * clear_padding_unit
);
4205 size_t padding_bytes
= buf
->padding_bytes
;
4208 if (buf
->clear_in_mask
)
4210 /* During clear_type_padding_in_mask, clear the padding
4211 bits set in buf->buf in the buf->union_ptr mask. */
4212 for (size_t i
= 0; i
< end
; i
++)
4214 if (buf
->buf
[i
] == (unsigned char) ~0)
4218 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4221 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4226 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4230 buf
->padding_bytes
= 0;
4234 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4237 buf
->padding_bytes
= padding_bytes
;
4241 /* Inside of a union, instead of emitting any code, instead
4242 clear all bits in the union_ptr buffer that are clear
4243 in buf. Whole padding bytes don't clear anything. */
4244 for (size_t i
= 0; i
< end
; i
++)
4246 if (buf
->buf
[i
] == (unsigned char) ~0)
4251 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4258 buf
->padding_bytes
= 0;
4262 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4265 buf
->padding_bytes
= padding_bytes
;
4269 size_t wordsize
= UNITS_PER_WORD
;
4270 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4272 size_t nonzero_first
= wordsize
;
4273 size_t nonzero_last
= 0;
4274 size_t zero_first
= wordsize
;
4275 size_t zero_last
= 0;
4276 bool all_ones
= true, bytes_only
= true;
4277 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4278 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4280 gcc_assert (wordsize
> 1);
4285 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4289 if (nonzero_first
== wordsize
)
4291 nonzero_first
= j
- i
;
4292 nonzero_last
= j
- i
;
4294 if (nonzero_last
!= j
- i
)
4296 nonzero_last
= j
+ 1 - i
;
4300 if (zero_first
== wordsize
)
4302 zero_last
= j
+ 1 - i
;
4304 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4310 size_t padding_end
= i
;
4313 if (nonzero_first
== 0
4314 && nonzero_last
== wordsize
4317 /* All bits are padding and we had some padding
4318 before too. Just extend it. */
4319 padding_bytes
+= wordsize
;
4322 if (all_ones
&& nonzero_first
== 0)
4324 padding_bytes
+= nonzero_last
;
4325 padding_end
+= nonzero_last
;
4326 nonzero_first
= wordsize
;
4329 else if (bytes_only
&& nonzero_first
== 0)
4331 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4332 padding_bytes
+= zero_first
;
4333 padding_end
+= zero_first
;
4336 if (padding_bytes
== 1)
4338 atype
= char_type_node
;
4339 src
= build_zero_cst (char_type_node
);
4343 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4344 src
= build_constructor (atype
, NULL
);
4346 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4347 build_int_cst (buf
->alias_type
,
4348 buf
->off
+ padding_end
4350 gimple
*g
= gimple_build_assign (dst
, src
);
4351 gimple_set_location (g
, buf
->loc
);
4352 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4354 buf
->padding_bytes
= 0;
4356 if (nonzero_first
== wordsize
)
4357 /* All bits in a word are 0, there are no padding bits. */
4359 if (all_ones
&& nonzero_last
== wordsize
)
4361 /* All bits between nonzero_first and end of word are padding
4362 bits, start counting padding_bytes. */
4363 padding_bytes
= nonzero_last
- nonzero_first
;
4368 /* If bitfields aren't involved in this word, prefer storing
4369 individual bytes or groups of them over performing a RMW
4370 operation on the whole word. */
4371 gcc_assert (i
+ zero_last
<= end
);
4372 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4377 for (k
= j
; k
< i
+ zero_last
; k
++)
4378 if (buf
->buf
[k
] == 0)
4380 HOST_WIDE_INT off
= buf
->off
+ j
;
4384 atype
= char_type_node
;
4385 src
= build_zero_cst (char_type_node
);
4389 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4390 src
= build_constructor (atype
, NULL
);
4392 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4394 build_int_cst (buf
->alias_type
, off
));
4395 gimple
*g
= gimple_build_assign (dst
, src
);
4396 gimple_set_location (g
, buf
->loc
);
4397 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4401 if (nonzero_last
== wordsize
)
4402 padding_bytes
= nonzero_last
- zero_last
;
4405 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4407 if (nonzero_last
- nonzero_first
<= eltsz
4408 && ((nonzero_first
& ~(eltsz
- 1))
4409 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4413 type
= char_type_node
;
4415 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4417 size_t start
= nonzero_first
& ~(eltsz
- 1);
4418 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4420 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4421 atype
= build_aligned_type (type
, buf
->align
);
4422 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4423 build_int_cst (buf
->alias_type
, off
));
4427 && nonzero_first
== start
4428 && nonzero_last
== start
+ eltsz
)
4429 src
= build_zero_cst (type
);
4432 src
= make_ssa_name (type
);
4433 g
= gimple_build_assign (src
, unshare_expr (dst
));
4434 gimple_set_location (g
, buf
->loc
);
4435 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4436 tree mask
= native_interpret_expr (type
,
4437 buf
->buf
+ i
+ start
,
4439 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4440 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4441 tree src_masked
= make_ssa_name (type
);
4442 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4444 gimple_set_location (g
, buf
->loc
);
4445 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4448 g
= gimple_build_assign (dst
, src
);
4449 gimple_set_location (g
, buf
->loc
);
4450 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4460 if (padding_bytes
== 1)
4462 atype
= char_type_node
;
4463 src
= build_zero_cst (char_type_node
);
4467 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4468 src
= build_constructor (atype
, NULL
);
4470 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4471 build_int_cst (buf
->alias_type
,
4474 gimple
*g
= gimple_build_assign (dst
, src
);
4475 gimple_set_location (g
, buf
->loc
);
4476 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4478 size_t end_rem
= end
% UNITS_PER_WORD
;
4479 buf
->off
+= end
- end_rem
;
4480 buf
->size
= end_rem
;
4481 memset (buf
->buf
, 0, buf
->size
);
4482 buf
->padding_bytes
= 0;
4486 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4489 buf
->padding_bytes
= padding_bytes
;
4493 /* Append PADDING_BYTES padding bytes. */
4496 clear_padding_add_padding (clear_padding_struct
*buf
,
4497 HOST_WIDE_INT padding_bytes
)
4499 if (padding_bytes
== 0)
4501 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4502 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4503 clear_padding_flush (buf
, false);
4504 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4505 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4507 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4508 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4509 buf
->size
= clear_padding_buf_size
;
4510 clear_padding_flush (buf
, false);
4511 gcc_assert (buf
->padding_bytes
);
4512 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4513 is guaranteed to be all ones. */
4514 padding_bytes
+= buf
->size
;
4515 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4516 memset (buf
->buf
, ~0, buf
->size
);
4517 buf
->off
+= padding_bytes
- buf
->size
;
4518 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4522 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4523 buf
->size
+= padding_bytes
;
4527 static void clear_padding_type (clear_padding_struct
*, tree
,
4528 HOST_WIDE_INT
, bool);
4530 /* Clear padding bits of union type TYPE. */
4533 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4534 HOST_WIDE_INT sz
, bool for_auto_init
)
4536 clear_padding_struct
*union_buf
;
4537 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4538 size_t start_size
= 0;
4541 start_off
= buf
->off
+ buf
->size
;
4542 next_off
= start_off
+ sz
;
4543 start_size
= start_off
% UNITS_PER_WORD
;
4544 start_off
-= start_size
;
4545 clear_padding_flush (buf
, true);
4550 if (sz
+ buf
->size
> clear_padding_buf_size
)
4551 clear_padding_flush (buf
, false);
4552 union_buf
= XALLOCA (clear_padding_struct
);
4553 union_buf
->loc
= buf
->loc
;
4554 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4555 union_buf
->base
= NULL_TREE
;
4556 union_buf
->alias_type
= NULL_TREE
;
4557 union_buf
->gsi
= NULL
;
4558 union_buf
->align
= 0;
4560 union_buf
->padding_bytes
= 0;
4562 union_buf
->size
= 0;
4563 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4564 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4566 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4567 memset (union_buf
->union_ptr
, ~0, sz
);
4570 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4571 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4573 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4575 if (TREE_TYPE (field
) == error_mark_node
)
4577 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4578 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4579 if (!buf
->clear_in_mask
&& !for_auto_init
)
4580 error_at (buf
->loc
, "flexible array member %qD does not have "
4581 "well defined padding bits for %qs",
4582 field
, "__builtin_clear_padding");
4585 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4586 gcc_assert (union_buf
->size
== 0);
4587 union_buf
->off
= start_off
;
4588 union_buf
->size
= start_size
;
4589 memset (union_buf
->buf
, ~0, start_size
);
4590 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4591 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4592 clear_padding_flush (union_buf
, true);
4595 if (buf
== union_buf
)
4597 buf
->off
= next_off
;
4598 buf
->size
= next_off
% UNITS_PER_WORD
;
4599 buf
->off
-= buf
->size
;
4600 memset (buf
->buf
, ~0, buf
->size
);
4602 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4606 unsigned char *union_ptr
= union_buf
->union_ptr
;
4609 clear_padding_flush (buf
, false);
4610 HOST_WIDE_INT this_sz
4611 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4612 clear_padding_buf_size
- buf
->size
);
4613 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4614 buf
->size
+= this_sz
;
4615 union_ptr
+= this_sz
;
4618 XDELETE (union_buf
->union_ptr
);
4622 /* The only known floating point formats with padding bits are the
4623 IEEE extended ones. */
4626 clear_padding_real_needs_padding_p (tree type
)
4628 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4630 && fmt
->signbit_ro
== fmt
->signbit_rw
4631 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4634 /* Return true if TYPE might contain any padding bits. */
4637 clear_padding_type_may_have_padding_p (tree type
)
4639 switch (TREE_CODE (type
))
4647 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4649 return clear_padding_real_needs_padding_p (type
);
4655 /* Emit a runtime loop:
4656 for (; buf.base != end; buf.base += sz)
4657 __builtin_clear_padding (buf.base); */
4660 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4661 tree end
, bool for_auto_init
)
4663 tree l1
= create_artificial_label (buf
->loc
);
4664 tree l2
= create_artificial_label (buf
->loc
);
4665 tree l3
= create_artificial_label (buf
->loc
);
4666 gimple
*g
= gimple_build_goto (l2
);
4667 gimple_set_location (g
, buf
->loc
);
4668 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4669 g
= gimple_build_label (l1
);
4670 gimple_set_location (g
, buf
->loc
);
4671 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4672 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4673 clear_padding_flush (buf
, true);
4674 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4675 size_int (buf
->sz
));
4676 gimple_set_location (g
, buf
->loc
);
4677 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4678 g
= gimple_build_label (l2
);
4679 gimple_set_location (g
, buf
->loc
);
4680 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4681 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4682 gimple_set_location (g
, buf
->loc
);
4683 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4684 g
= gimple_build_label (l3
);
4685 gimple_set_location (g
, buf
->loc
);
4686 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4689 /* Clear padding bits for TYPE. Called recursively from
4690 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4691 the __builtin_clear_padding is not called by the end user,
4692 instead, it's inserted by the compiler to initialize the
4693 paddings of automatic variable. Therefore, we should not
4694 emit the error messages for flexible array members to confuse
4698 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4699 HOST_WIDE_INT sz
, bool for_auto_init
)
4701 switch (TREE_CODE (type
))
4704 HOST_WIDE_INT cur_pos
;
4706 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4707 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4709 tree ftype
= TREE_TYPE (field
);
4710 if (DECL_BIT_FIELD (field
))
4712 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4715 HOST_WIDE_INT pos
= int_byte_position (field
);
4719 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4720 bpos
%= BITS_PER_UNIT
;
4722 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4723 if (pos
+ end
> cur_pos
)
4725 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4726 cur_pos
= pos
+ end
;
4728 gcc_assert (cur_pos
> pos
4729 && ((unsigned HOST_WIDE_INT
) buf
->size
4730 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4731 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4732 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4733 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4734 " in %qs", "__builtin_clear_padding");
4735 else if (BYTES_BIG_ENDIAN
)
4738 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4739 *p
&= ~(((1 << fldsz
) - 1)
4740 << (BITS_PER_UNIT
- bpos
- fldsz
));
4745 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4747 fldsz
-= BITS_PER_UNIT
- bpos
;
4749 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4750 p
+= fldsz
/ BITS_PER_UNIT
;
4751 fldsz
%= BITS_PER_UNIT
;
4753 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4758 /* Little endian. */
4759 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4760 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4765 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4767 fldsz
-= BITS_PER_UNIT
- bpos
;
4769 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4770 p
+= fldsz
/ BITS_PER_UNIT
;
4771 fldsz
%= BITS_PER_UNIT
;
4773 *p
&= ~((1 << fldsz
) - 1);
4777 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4779 if (ftype
== error_mark_node
)
4781 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4782 && !COMPLETE_TYPE_P (ftype
));
4783 if (!buf
->clear_in_mask
&& !for_auto_init
)
4784 error_at (buf
->loc
, "flexible array member %qD does not "
4785 "have well defined padding bits for %qs",
4786 field
, "__builtin_clear_padding");
4788 else if (is_empty_type (TREE_TYPE (field
)))
4792 HOST_WIDE_INT pos
= int_byte_position (field
);
4795 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4796 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4797 clear_padding_add_padding (buf
, pos
- cur_pos
);
4799 clear_padding_type (buf
, TREE_TYPE (field
),
4800 fldsz
, for_auto_init
);
4804 gcc_assert (sz
>= cur_pos
);
4805 clear_padding_add_padding (buf
, sz
- cur_pos
);
4808 HOST_WIDE_INT nelts
, fldsz
;
4809 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4814 && sz
> 8 * UNITS_PER_WORD
4815 && buf
->union_ptr
== NULL
4816 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4818 /* For sufficiently large array of more than one elements,
4819 emit a runtime loop to keep code size manageable. */
4820 tree base
= buf
->base
;
4821 unsigned int prev_align
= buf
->align
;
4822 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4823 HOST_WIDE_INT prev_sz
= buf
->sz
;
4824 clear_padding_flush (buf
, true);
4825 tree elttype
= TREE_TYPE (type
);
4826 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4827 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4828 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4829 base
, size_int (off
));
4830 gimple_set_location (g
, buf
->loc
);
4831 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4832 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4834 gimple_set_location (g
, buf
->loc
);
4835 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4837 buf
->align
= TYPE_ALIGN (elttype
);
4840 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4843 buf
->align
= prev_align
;
4844 buf
->size
= off
% UNITS_PER_WORD
;
4845 buf
->off
= off
- buf
->size
;
4846 memset (buf
->buf
, 0, buf
->size
);
4849 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4850 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4853 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4856 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4857 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4858 clear_padding_flush (buf
, false);
4859 if (clear_padding_real_needs_padding_p (type
))
4861 /* Use native_interpret_expr + native_encode_expr to figure out
4862 which bits are padding. */
4863 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4864 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4865 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4866 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4867 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4868 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4869 buf
->buf
[buf
->size
+ i
] ^= ~0;
4872 memset (buf
->buf
+ buf
->size
, 0, sz
);
4876 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4877 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4878 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4881 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4882 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4883 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4884 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4887 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4888 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4889 clear_padding_flush (buf
, false);
4890 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4894 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4895 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4896 clear_padding_flush (buf
, false);
4897 memset (buf
->buf
+ buf
->size
, 0, sz
);
4903 /* Clear padding bits of TYPE in MASK. */
4906 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4908 clear_padding_struct buf
;
4909 buf
.loc
= UNKNOWN_LOCATION
;
4910 buf
.clear_in_mask
= true;
4911 buf
.base
= NULL_TREE
;
4912 buf
.alias_type
= NULL_TREE
;
4916 buf
.padding_bytes
= 0;
4917 buf
.sz
= int_size_in_bytes (type
);
4919 buf
.union_ptr
= mask
;
4920 clear_padding_type (&buf
, type
, buf
.sz
, false);
4921 clear_padding_flush (&buf
, true);
4924 /* Fold __builtin_clear_padding builtin. */
4927 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4929 gimple
*stmt
= gsi_stmt (*gsi
);
4930 gcc_assert (gimple_call_num_args (stmt
) == 3);
4931 tree ptr
= gimple_call_arg (stmt
, 0);
4932 tree typearg
= gimple_call_arg (stmt
, 1);
4933 /* the 3rd argument of __builtin_clear_padding is to distinguish whether
4934 this call is made by the user or by the compiler for automatic variable
4936 bool for_auto_init
= (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 2));
4937 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4938 location_t loc
= gimple_location (stmt
);
4939 clear_padding_struct buf
;
4940 gimple_stmt_iterator gsiprev
= *gsi
;
4941 /* This should be folded during the lower pass. */
4942 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4943 gcc_assert (COMPLETE_TYPE_P (type
));
4944 gsi_prev (&gsiprev
);
4947 buf
.clear_in_mask
= false;
4949 buf
.alias_type
= NULL_TREE
;
4951 buf
.align
= get_pointer_alignment (ptr
);
4952 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4953 buf
.align
= MAX (buf
.align
, talign
);
4955 buf
.padding_bytes
= 0;
4957 buf
.sz
= int_size_in_bytes (type
);
4958 buf
.union_ptr
= NULL
;
4959 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4960 sorry_at (loc
, "%s not supported for variable length aggregates",
4961 "__builtin_clear_padding");
4962 /* The implementation currently assumes 8-bit host and target
4963 chars which is the case for all currently supported targets
4964 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4965 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4966 sorry_at (loc
, "%s not supported on this target",
4967 "__builtin_clear_padding");
4968 else if (!clear_padding_type_may_have_padding_p (type
))
4970 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4972 tree sz
= TYPE_SIZE_UNIT (type
);
4973 tree elttype
= type
;
4974 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4975 while (TREE_CODE (elttype
) == ARRAY_TYPE
4976 && int_size_in_bytes (elttype
) < 0)
4977 elttype
= TREE_TYPE (elttype
);
4978 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4979 gcc_assert (eltsz
>= 0);
4982 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4983 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4984 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4985 gimple_set_location (g
, loc
);
4986 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4987 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4988 gimple_set_location (g
, loc
);
4989 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4991 buf
.align
= TYPE_ALIGN (elttype
);
4992 buf
.alias_type
= build_pointer_type (elttype
);
4993 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4998 if (!is_gimple_mem_ref_addr (buf
.base
))
5000 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
5001 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
5002 gimple_set_location (g
, loc
);
5003 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5005 buf
.alias_type
= build_pointer_type (type
);
5006 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
5007 clear_padding_flush (&buf
, true);
5010 gimple_stmt_iterator gsiprev2
= *gsi
;
5011 gsi_prev (&gsiprev2
);
5012 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
5013 gsi_replace (gsi
, gimple_build_nop (), true);
5016 gsi_remove (gsi
, true);
5022 /* Fold the non-target builtin at *GSI and return whether any simplification
5026 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
5028 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
5029 tree callee
= gimple_call_fndecl (stmt
);
5031 /* Give up for always_inline inline builtins until they are
5033 if (avoid_folding_inline_builtin (callee
))
5036 unsigned n
= gimple_call_num_args (stmt
);
5037 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5041 return gimple_fold_builtin_bcmp (gsi
);
5042 case BUILT_IN_BCOPY
:
5043 return gimple_fold_builtin_bcopy (gsi
);
5044 case BUILT_IN_BZERO
:
5045 return gimple_fold_builtin_bzero (gsi
);
5047 case BUILT_IN_MEMSET
:
5048 return gimple_fold_builtin_memset (gsi
,
5049 gimple_call_arg (stmt
, 1),
5050 gimple_call_arg (stmt
, 2));
5051 case BUILT_IN_MEMCPY
:
5052 case BUILT_IN_MEMPCPY
:
5053 case BUILT_IN_MEMMOVE
:
5054 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5055 gimple_call_arg (stmt
, 1), fcode
);
5056 case BUILT_IN_SPRINTF_CHK
:
5057 case BUILT_IN_VSPRINTF_CHK
:
5058 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5059 case BUILT_IN_STRCAT_CHK
:
5060 return gimple_fold_builtin_strcat_chk (gsi
);
5061 case BUILT_IN_STRNCAT_CHK
:
5062 return gimple_fold_builtin_strncat_chk (gsi
);
5063 case BUILT_IN_STRLEN
:
5064 return gimple_fold_builtin_strlen (gsi
);
5065 case BUILT_IN_STRCPY
:
5066 return gimple_fold_builtin_strcpy (gsi
,
5067 gimple_call_arg (stmt
, 0),
5068 gimple_call_arg (stmt
, 1));
5069 case BUILT_IN_STRNCPY
:
5070 return gimple_fold_builtin_strncpy (gsi
,
5071 gimple_call_arg (stmt
, 0),
5072 gimple_call_arg (stmt
, 1),
5073 gimple_call_arg (stmt
, 2));
5074 case BUILT_IN_STRCAT
:
5075 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5076 gimple_call_arg (stmt
, 1));
5077 case BUILT_IN_STRNCAT
:
5078 return gimple_fold_builtin_strncat (gsi
);
5079 case BUILT_IN_INDEX
:
5080 case BUILT_IN_STRCHR
:
5081 return gimple_fold_builtin_strchr (gsi
, false);
5082 case BUILT_IN_RINDEX
:
5083 case BUILT_IN_STRRCHR
:
5084 return gimple_fold_builtin_strchr (gsi
, true);
5085 case BUILT_IN_STRSTR
:
5086 return gimple_fold_builtin_strstr (gsi
);
5087 case BUILT_IN_STRCMP
:
5088 case BUILT_IN_STRCMP_EQ
:
5089 case BUILT_IN_STRCASECMP
:
5090 case BUILT_IN_STRNCMP
:
5091 case BUILT_IN_STRNCMP_EQ
:
5092 case BUILT_IN_STRNCASECMP
:
5093 return gimple_fold_builtin_string_compare (gsi
);
5094 case BUILT_IN_MEMCHR
:
5095 return gimple_fold_builtin_memchr (gsi
);
5096 case BUILT_IN_FPUTS
:
5097 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5098 gimple_call_arg (stmt
, 1), false);
5099 case BUILT_IN_FPUTS_UNLOCKED
:
5100 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5101 gimple_call_arg (stmt
, 1), true);
5102 case BUILT_IN_MEMCPY_CHK
:
5103 case BUILT_IN_MEMPCPY_CHK
:
5104 case BUILT_IN_MEMMOVE_CHK
:
5105 case BUILT_IN_MEMSET_CHK
:
5106 return gimple_fold_builtin_memory_chk (gsi
,
5107 gimple_call_arg (stmt
, 0),
5108 gimple_call_arg (stmt
, 1),
5109 gimple_call_arg (stmt
, 2),
5110 gimple_call_arg (stmt
, 3),
5112 case BUILT_IN_STPCPY
:
5113 return gimple_fold_builtin_stpcpy (gsi
);
5114 case BUILT_IN_STRCPY_CHK
:
5115 case BUILT_IN_STPCPY_CHK
:
5116 return gimple_fold_builtin_stxcpy_chk (gsi
,
5117 gimple_call_arg (stmt
, 0),
5118 gimple_call_arg (stmt
, 1),
5119 gimple_call_arg (stmt
, 2),
5121 case BUILT_IN_STRNCPY_CHK
:
5122 case BUILT_IN_STPNCPY_CHK
:
5123 return gimple_fold_builtin_stxncpy_chk (gsi
,
5124 gimple_call_arg (stmt
, 0),
5125 gimple_call_arg (stmt
, 1),
5126 gimple_call_arg (stmt
, 2),
5127 gimple_call_arg (stmt
, 3),
5129 case BUILT_IN_SNPRINTF_CHK
:
5130 case BUILT_IN_VSNPRINTF_CHK
:
5131 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5133 case BUILT_IN_FPRINTF
:
5134 case BUILT_IN_FPRINTF_UNLOCKED
:
5135 case BUILT_IN_VFPRINTF
:
5136 if (n
== 2 || n
== 3)
5137 return gimple_fold_builtin_fprintf (gsi
,
5138 gimple_call_arg (stmt
, 0),
5139 gimple_call_arg (stmt
, 1),
5141 ? gimple_call_arg (stmt
, 2)
5145 case BUILT_IN_FPRINTF_CHK
:
5146 case BUILT_IN_VFPRINTF_CHK
:
5147 if (n
== 3 || n
== 4)
5148 return gimple_fold_builtin_fprintf (gsi
,
5149 gimple_call_arg (stmt
, 0),
5150 gimple_call_arg (stmt
, 2),
5152 ? gimple_call_arg (stmt
, 3)
5156 case BUILT_IN_PRINTF
:
5157 case BUILT_IN_PRINTF_UNLOCKED
:
5158 case BUILT_IN_VPRINTF
:
5159 if (n
== 1 || n
== 2)
5160 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5162 ? gimple_call_arg (stmt
, 1)
5163 : NULL_TREE
, fcode
);
5165 case BUILT_IN_PRINTF_CHK
:
5166 case BUILT_IN_VPRINTF_CHK
:
5167 if (n
== 2 || n
== 3)
5168 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5170 ? gimple_call_arg (stmt
, 2)
5171 : NULL_TREE
, fcode
);
5173 case BUILT_IN_ACC_ON_DEVICE
:
5174 return gimple_fold_builtin_acc_on_device (gsi
,
5175 gimple_call_arg (stmt
, 0));
5176 case BUILT_IN_REALLOC
:
5177 return gimple_fold_builtin_realloc (gsi
);
5179 case BUILT_IN_CLEAR_PADDING
:
5180 return gimple_fold_builtin_clear_padding (gsi
);
5185 /* Try the generic builtin folder. */
5186 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5187 tree result
= fold_call_stmt (stmt
, ignore
);
5191 STRIP_NOPS (result
);
5193 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5194 gimplify_and_update_call_from_tree (gsi
, result
);
5201 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5202 function calls to constants, where possible. */
5205 fold_internal_goacc_dim (const gimple
*call
)
5207 int axis
= oacc_get_ifn_dim_arg (call
);
5208 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5209 tree result
= NULL_TREE
;
5210 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5212 switch (gimple_call_internal_fn (call
))
5214 case IFN_GOACC_DIM_POS
:
5215 /* If the size is 1, we know the answer. */
5217 result
= build_int_cst (type
, 0);
5219 case IFN_GOACC_DIM_SIZE
:
5220 /* If the size is not dynamic, we know the answer. */
5222 result
= build_int_cst (type
, size
);
5231 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5232 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5233 &var where var is only addressable because of such calls. */
5236 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5238 if (gimple_call_num_args (stmt
) != 6
5239 || !flag_inline_atomics
5241 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5242 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5243 || !gimple_vdef (stmt
)
5244 || !gimple_vuse (stmt
))
5247 tree fndecl
= gimple_call_fndecl (stmt
);
5248 switch (DECL_FUNCTION_CODE (fndecl
))
5250 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5251 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5252 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5253 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5254 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5260 tree expected
= gimple_call_arg (stmt
, 1);
5261 if (TREE_CODE (expected
) != ADDR_EXPR
5262 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5265 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5266 if (!is_gimple_reg_type (etype
)
5267 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5268 || TREE_THIS_VOLATILE (etype
)
5269 || VECTOR_TYPE_P (etype
)
5270 || TREE_CODE (etype
) == COMPLEX_TYPE
5271 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5272 might not preserve all the bits. See PR71716. */
5273 || SCALAR_FLOAT_TYPE_P (etype
)
5274 || maybe_ne (TYPE_PRECISION (etype
),
5275 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5278 tree weak
= gimple_call_arg (stmt
, 3);
5279 if (!integer_zerop (weak
) && !integer_onep (weak
))
5282 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5283 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5284 machine_mode mode
= TYPE_MODE (itype
);
5286 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5288 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5291 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5298 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5300 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5301 i = IMAGPART_EXPR <t>;
5303 e = REALPART_EXPR <t>; */
5306 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5308 gimple
*stmt
= gsi_stmt (*gsi
);
5309 tree fndecl
= gimple_call_fndecl (stmt
);
5310 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5311 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5312 tree ctype
= build_complex_type (itype
);
5313 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5314 bool throws
= false;
5316 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5318 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5319 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5320 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5322 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5323 build1 (VIEW_CONVERT_EXPR
, itype
,
5324 gimple_assign_lhs (g
)));
5325 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5327 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5328 + int_size_in_bytes (itype
);
5329 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5330 gimple_call_arg (stmt
, 0),
5331 gimple_assign_lhs (g
),
5332 gimple_call_arg (stmt
, 2),
5333 build_int_cst (integer_type_node
, flag
),
5334 gimple_call_arg (stmt
, 4),
5335 gimple_call_arg (stmt
, 5));
5336 tree lhs
= make_ssa_name (ctype
);
5337 gimple_call_set_lhs (g
, lhs
);
5338 gimple_move_vops (g
, stmt
);
5339 tree oldlhs
= gimple_call_lhs (stmt
);
5340 if (stmt_can_throw_internal (cfun
, stmt
))
5343 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5345 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5346 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5347 gimple_call_set_lhs (stmt
, NULL_TREE
);
5348 gsi_replace (gsi
, g
, true);
5351 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5352 build1 (IMAGPART_EXPR
, itype
, lhs
));
5355 gsi_insert_on_edge_immediate (e
, g
);
5356 *gsi
= gsi_for_stmt (g
);
5359 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5360 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5361 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5363 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5364 build1 (REALPART_EXPR
, itype
, lhs
));
5365 if (throws
&& oldlhs
== NULL_TREE
)
5367 gsi_insert_on_edge_immediate (e
, g
);
5368 *gsi
= gsi_for_stmt (g
);
5371 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5372 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5374 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5376 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5377 gimple_assign_lhs (g
)));
5378 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5380 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5381 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5385 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5386 doesn't fit into TYPE. The test for overflow should be regardless of
5387 -fwrapv, and even for unsigned types. */
5390 arith_overflowed_p (enum tree_code code
, const_tree type
,
5391 const_tree arg0
, const_tree arg1
)
5393 widest2_int warg0
= widest2_int_cst (arg0
);
5394 widest2_int warg1
= widest2_int_cst (arg1
);
5398 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5399 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5400 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5401 default: gcc_unreachable ();
5403 signop sign
= TYPE_SIGN (type
);
5404 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5406 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5409 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5410 for the memory it references, otherwise return null. VECTYPE is the
5411 type of the memory vector. */
5414 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5416 tree ptr
= gimple_call_arg (call
, 0);
5417 tree alias_align
= gimple_call_arg (call
, 1);
5418 tree mask
= gimple_call_arg (call
, 2);
5419 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5422 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5423 if (TYPE_ALIGN (vectype
) != align
)
5424 vectype
= build_aligned_type (vectype
, align
);
5425 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5426 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5429 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5432 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5434 tree lhs
= gimple_call_lhs (call
);
5438 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5440 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5441 gimple_set_location (new_stmt
, gimple_location (call
));
5442 gimple_move_vops (new_stmt
, call
);
5443 gsi_replace (gsi
, new_stmt
, false);
5449 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5452 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5454 tree rhs
= gimple_call_arg (call
, 3);
5455 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5457 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5458 gimple_set_location (new_stmt
, gimple_location (call
));
5459 gimple_move_vops (new_stmt
, call
);
5460 gsi_replace (gsi
, new_stmt
, false);
5466 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5467 The statement may be replaced by another statement, e.g., if the call
5468 simplifies to a constant value. Return true if any changes were made.
5469 It is assumed that the operands have been previously folded. */
5472 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5474 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5476 bool changed
= false;
5478 /* Check for virtual calls that became direct calls. */
5479 callee
= gimple_call_fn (stmt
);
5480 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5482 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5484 if (dump_file
&& virtual_method_call_p (callee
)
5485 && !possible_polymorphic_call_target_p
5486 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5487 (OBJ_TYPE_REF_EXPR (callee
)))))
5490 "Type inheritance inconsistent devirtualization of ");
5491 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5492 fprintf (dump_file
, " to ");
5493 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5494 fprintf (dump_file
, "\n");
5497 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5500 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5503 vec
<cgraph_node
*>targets
5504 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5505 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5507 tree lhs
= gimple_call_lhs (stmt
);
5508 if (dump_enabled_p ())
5510 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5511 "folding virtual function call to %s\n",
5512 targets
.length () == 1
5513 ? targets
[0]->name ()
5514 : "__builtin_unreachable");
5516 if (targets
.length () == 1)
5518 tree fndecl
= targets
[0]->decl
;
5519 gimple_call_set_fndecl (stmt
, fndecl
);
5521 /* If changing the call to __cxa_pure_virtual
5522 or similar noreturn function, adjust gimple_call_fntype
5524 if (gimple_call_noreturn_p (stmt
)
5525 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5526 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5527 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5529 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5530 /* If the call becomes noreturn, remove the lhs. */
5532 && gimple_call_noreturn_p (stmt
)
5533 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5534 || should_remove_lhs_p (lhs
)))
5536 if (TREE_CODE (lhs
) == SSA_NAME
)
5538 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5539 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5540 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5541 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5543 gimple_call_set_lhs (stmt
, NULL_TREE
);
5545 maybe_remove_unused_call_args (cfun
, stmt
);
5549 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5550 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5551 gimple_set_location (new_stmt
, gimple_location (stmt
));
5552 /* If the call had a SSA name as lhs morph that into
5553 an uninitialized value. */
5554 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5556 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5557 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5558 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5559 set_ssa_default_def (cfun
, var
, lhs
);
5561 gimple_move_vops (new_stmt
, stmt
);
5562 gsi_replace (gsi
, new_stmt
, false);
5569 /* Check for indirect calls that became direct calls, and then
5570 no longer require a static chain. */
5571 if (gimple_call_chain (stmt
))
5573 tree fn
= gimple_call_fndecl (stmt
);
5574 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5576 gimple_call_set_chain (stmt
, NULL
);
5584 /* Check for builtins that CCP can handle using information not
5585 available in the generic fold routines. */
5586 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5588 if (gimple_fold_builtin (gsi
))
5591 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5593 changed
|= targetm
.gimple_fold_builtin (gsi
);
5595 else if (gimple_call_internal_p (stmt
))
5597 enum tree_code subcode
= ERROR_MARK
;
5598 tree result
= NULL_TREE
;
5599 bool cplx_result
= false;
5600 tree overflow
= NULL_TREE
;
5601 switch (gimple_call_internal_fn (stmt
))
5603 case IFN_BUILTIN_EXPECT
:
5604 result
= fold_builtin_expect (gimple_location (stmt
),
5605 gimple_call_arg (stmt
, 0),
5606 gimple_call_arg (stmt
, 1),
5607 gimple_call_arg (stmt
, 2),
5610 case IFN_UBSAN_OBJECT_SIZE
:
5612 tree offset
= gimple_call_arg (stmt
, 1);
5613 tree objsize
= gimple_call_arg (stmt
, 2);
5614 if (integer_all_onesp (objsize
)
5615 || (TREE_CODE (offset
) == INTEGER_CST
5616 && TREE_CODE (objsize
) == INTEGER_CST
5617 && tree_int_cst_le (offset
, objsize
)))
5619 replace_call_with_value (gsi
, NULL_TREE
);
5625 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5627 replace_call_with_value (gsi
, NULL_TREE
);
5631 case IFN_UBSAN_BOUNDS
:
5633 tree index
= gimple_call_arg (stmt
, 1);
5634 tree bound
= gimple_call_arg (stmt
, 2);
5635 if (TREE_CODE (index
) == INTEGER_CST
5636 && TREE_CODE (bound
) == INTEGER_CST
)
5638 index
= fold_convert (TREE_TYPE (bound
), index
);
5639 if (TREE_CODE (index
) == INTEGER_CST
5640 && tree_int_cst_le (index
, bound
))
5642 replace_call_with_value (gsi
, NULL_TREE
);
5648 case IFN_GOACC_DIM_SIZE
:
5649 case IFN_GOACC_DIM_POS
:
5650 result
= fold_internal_goacc_dim (stmt
);
5652 case IFN_UBSAN_CHECK_ADD
:
5653 subcode
= PLUS_EXPR
;
5655 case IFN_UBSAN_CHECK_SUB
:
5656 subcode
= MINUS_EXPR
;
5658 case IFN_UBSAN_CHECK_MUL
:
5659 subcode
= MULT_EXPR
;
5661 case IFN_ADD_OVERFLOW
:
5662 subcode
= PLUS_EXPR
;
5665 case IFN_SUB_OVERFLOW
:
5666 subcode
= MINUS_EXPR
;
5669 case IFN_MUL_OVERFLOW
:
5670 subcode
= MULT_EXPR
;
5674 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5676 case IFN_MASK_STORE
:
5677 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5682 if (subcode
!= ERROR_MARK
)
5684 tree arg0
= gimple_call_arg (stmt
, 0);
5685 tree arg1
= gimple_call_arg (stmt
, 1);
5686 tree type
= TREE_TYPE (arg0
);
5689 tree lhs
= gimple_call_lhs (stmt
);
5690 if (lhs
== NULL_TREE
)
5693 type
= TREE_TYPE (TREE_TYPE (lhs
));
5695 if (type
== NULL_TREE
)
5697 /* x = y + 0; x = y - 0; x = y * 0; */
5698 else if (integer_zerop (arg1
))
5699 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5700 /* x = 0 + y; x = 0 * y; */
5701 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5702 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5704 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5705 result
= integer_zero_node
;
5706 /* x = y * 1; x = 1 * y; */
5707 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5709 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5711 else if (TREE_CODE (arg0
) == INTEGER_CST
5712 && TREE_CODE (arg1
) == INTEGER_CST
)
5715 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5716 fold_convert (type
, arg1
));
5718 result
= int_const_binop (subcode
, arg0
, arg1
);
5719 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5722 overflow
= build_one_cst (type
);
5729 if (result
== integer_zero_node
)
5730 result
= build_zero_cst (type
);
5731 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5733 if (TREE_CODE (result
) == INTEGER_CST
)
5735 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5737 overflow
= build_one_cst (type
);
5739 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5740 && TYPE_UNSIGNED (type
))
5741 || (TYPE_PRECISION (type
)
5742 < (TYPE_PRECISION (TREE_TYPE (result
))
5743 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5744 && !TYPE_UNSIGNED (type
)))))
5747 result
= fold_convert (type
, result
);
5754 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5755 result
= drop_tree_overflow (result
);
5758 if (overflow
== NULL_TREE
)
5759 overflow
= build_zero_cst (TREE_TYPE (result
));
5760 tree ctype
= build_complex_type (TREE_TYPE (result
));
5761 if (TREE_CODE (result
) == INTEGER_CST
5762 && TREE_CODE (overflow
) == INTEGER_CST
)
5763 result
= build_complex (ctype
, result
, overflow
);
5765 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5766 ctype
, result
, overflow
);
5768 gimplify_and_update_call_from_tree (gsi
, result
);
5777 /* Return true whether NAME has a use on STMT. */
5780 has_use_on_stmt (tree name
, gimple
*stmt
)
5782 imm_use_iterator iter
;
5783 use_operand_p use_p
;
5784 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5785 if (USE_STMT (use_p
) == stmt
)
5790 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5793 Replaces *GSI with the simplification result in RCODE and OPS
5794 and the associated statements in *SEQ. Does the replacement
5795 according to INPLACE and returns true if the operation succeeded. */
5798 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5799 gimple_match_op
*res_op
,
5800 gimple_seq
*seq
, bool inplace
)
5802 gimple
*stmt
= gsi_stmt (*gsi
);
5803 tree
*ops
= res_op
->ops
;
5804 unsigned int num_ops
= res_op
->num_ops
;
5806 /* Play safe and do not allow abnormals to be mentioned in
5807 newly created statements. See also maybe_push_res_to_seq.
5808 As an exception allow such uses if there was a use of the
5809 same SSA name on the old stmt. */
5810 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5811 if (TREE_CODE (ops
[i
]) == SSA_NAME
5812 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5813 && !has_use_on_stmt (ops
[i
], stmt
))
5816 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5817 for (unsigned int i
= 0; i
< 2; ++i
)
5818 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5819 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5820 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5823 /* Don't insert new statements when INPLACE is true, even if we could
5824 reuse STMT for the final statement. */
5825 if (inplace
&& !gimple_seq_empty_p (*seq
))
5828 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5830 gcc_assert (res_op
->code
.is_tree_code ());
5831 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
5832 /* GIMPLE_CONDs condition may not throw. */
5833 && (!flag_exceptions
5834 || !cfun
->can_throw_non_call_exceptions
5835 || !operation_could_trap_p (res_op
->code
,
5836 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5838 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
5839 else if (res_op
->code
== SSA_NAME
)
5840 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5841 build_zero_cst (TREE_TYPE (ops
[0])));
5842 else if (res_op
->code
== INTEGER_CST
)
5844 if (integer_zerop (ops
[0]))
5845 gimple_cond_make_false (cond_stmt
);
5847 gimple_cond_make_true (cond_stmt
);
5851 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5854 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5855 build_zero_cst (TREE_TYPE (res
)));
5859 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5861 fprintf (dump_file
, "gimple_simplified to ");
5862 if (!gimple_seq_empty_p (*seq
))
5863 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5864 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5867 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5870 else if (is_gimple_assign (stmt
)
5871 && res_op
->code
.is_tree_code ())
5874 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
5876 maybe_build_generic_op (res_op
);
5877 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
5878 res_op
->op_or_null (0),
5879 res_op
->op_or_null (1),
5880 res_op
->op_or_null (2));
5881 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5883 fprintf (dump_file
, "gimple_simplified to ");
5884 if (!gimple_seq_empty_p (*seq
))
5885 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5886 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5889 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5893 else if (res_op
->code
.is_fn_code ()
5894 && gimple_call_combined_fn (stmt
) == res_op
->code
)
5896 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5897 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5898 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5899 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5901 fprintf (dump_file
, "gimple_simplified to ");
5902 if (!gimple_seq_empty_p (*seq
))
5903 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5904 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5906 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5911 if (gimple_has_lhs (stmt
))
5913 tree lhs
= gimple_get_lhs (stmt
);
5914 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5916 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5918 fprintf (dump_file
, "gimple_simplified to ");
5919 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5921 gsi_replace_with_seq_vops (gsi
, *seq
);
5931 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5934 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5939 if (TREE_CODE (*t
) == ADDR_EXPR
)
5940 t
= &TREE_OPERAND (*t
, 0);
5942 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5943 generic vector extension. The actual vector referenced is
5944 view-converted to an array type for this purpose. If the index
5945 is constant the canonical representation in the middle-end is a
5946 BIT_FIELD_REF so re-write the former to the latter here. */
5947 if (TREE_CODE (*t
) == ARRAY_REF
5948 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5949 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5950 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5952 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5953 if (VECTOR_TYPE_P (vtype
))
5955 tree low
= array_ref_low_bound (*t
);
5956 if (TREE_CODE (low
) == INTEGER_CST
)
5958 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5960 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5961 wi::to_widest (low
));
5962 idx
= wi::mul (idx
, wi::to_widest
5963 (TYPE_SIZE (TREE_TYPE (*t
))));
5965 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5966 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5968 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5970 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5971 TYPE_SIZE (TREE_TYPE (*t
)),
5972 wide_int_to_tree (bitsizetype
, idx
));
5980 while (handled_component_p (*t
))
5981 t
= &TREE_OPERAND (*t
, 0);
5983 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5984 of invariant addresses into a SSA name MEM_REF address. */
5985 if (TREE_CODE (*t
) == MEM_REF
5986 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5988 tree addr
= TREE_OPERAND (*t
, 0);
5989 if (TREE_CODE (addr
) == ADDR_EXPR
5990 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5991 || handled_component_p (TREE_OPERAND (addr
, 0))))
5995 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
6004 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
6005 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
6006 TREE_OPERAND (*t
, 1),
6007 size_int (coffset
));
6010 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
6011 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
6014 /* Canonicalize back MEM_REFs to plain reference trees if the object
6015 accessed is a decl that has the same access semantics as the MEM_REF. */
6016 if (TREE_CODE (*t
) == MEM_REF
6017 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
6018 && integer_zerop (TREE_OPERAND (*t
, 1))
6019 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
6021 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6022 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
6023 if (/* Same volatile qualification. */
6024 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
6025 /* Same TBAA behavior with -fstrict-aliasing. */
6026 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
6027 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
6028 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
6029 /* Same alignment. */
6030 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
6031 /* We have to look out here to not drop a required conversion
6032 from the rhs to the lhs if *t appears on the lhs or vice-versa
6033 if it appears on the rhs. Thus require strict type
6035 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6037 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6042 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6043 && TREE_CODE (*t
) == MEM_REF
6044 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6048 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6052 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6054 if (mem_ref_offset (base
).to_shwi (&moffset
))
6057 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6060 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6067 /* Canonicalize TARGET_MEM_REF in particular with respect to
6068 the indexes becoming constant. */
6069 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6071 tree tem
= maybe_fold_tmr (*t
);
6075 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6076 recompute_tree_invariant_for_addr_expr (*orig_t
);
6084 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6085 distinguishes both cases. */
6088 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6090 bool changed
= false;
6091 gimple
*stmt
= gsi_stmt (*gsi
);
6092 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6094 fold_defer_overflow_warnings ();
6096 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6098 ??? This shouldn't be done in generic folding but in the
6099 propagation helpers which also know whether an address was
6101 Also canonicalize operand order. */
6102 switch (gimple_code (stmt
))
6105 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6107 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6108 if ((REFERENCE_CLASS_P (*rhs
)
6109 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6110 && maybe_canonicalize_mem_ref_addr (rhs
))
6112 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6113 if (REFERENCE_CLASS_P (*lhs
)
6114 && maybe_canonicalize_mem_ref_addr (lhs
))
6119 /* Canonicalize operand order. */
6120 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6121 if (TREE_CODE_CLASS (code
) == tcc_comparison
6122 || commutative_tree_code (code
)
6123 || commutative_ternary_tree_code (code
))
6125 tree rhs1
= gimple_assign_rhs1 (stmt
);
6126 tree rhs2
= gimple_assign_rhs2 (stmt
);
6127 if (tree_swap_operands_p (rhs1
, rhs2
))
6129 gimple_assign_set_rhs1 (stmt
, rhs2
);
6130 gimple_assign_set_rhs2 (stmt
, rhs1
);
6131 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6132 gimple_assign_set_rhs_code (stmt
,
6133 swap_tree_comparison (code
));
6141 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6143 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
6144 if (REFERENCE_CLASS_P (*arg
)
6145 && maybe_canonicalize_mem_ref_addr (arg
))
6148 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
6150 && REFERENCE_CLASS_P (*lhs
)
6151 && maybe_canonicalize_mem_ref_addr (lhs
))
6157 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6158 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6160 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6161 tree op
= TREE_VALUE (link
);
6162 if (REFERENCE_CLASS_P (op
)
6163 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6166 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6168 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6169 tree op
= TREE_VALUE (link
);
6170 if ((REFERENCE_CLASS_P (op
)
6171 || TREE_CODE (op
) == ADDR_EXPR
)
6172 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6178 if (gimple_debug_bind_p (stmt
))
6180 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6182 && (REFERENCE_CLASS_P (*val
)
6183 || TREE_CODE (*val
) == ADDR_EXPR
)
6184 && maybe_canonicalize_mem_ref_addr (val
, true))
6190 /* Canonicalize operand order. */
6191 tree lhs
= gimple_cond_lhs (stmt
);
6192 tree rhs
= gimple_cond_rhs (stmt
);
6193 if (tree_swap_operands_p (lhs
, rhs
))
6195 gcond
*gc
= as_a
<gcond
*> (stmt
);
6196 gimple_cond_set_lhs (gc
, rhs
);
6197 gimple_cond_set_rhs (gc
, lhs
);
6198 gimple_cond_set_code (gc
,
6199 swap_tree_comparison (gimple_cond_code (gc
)));
6206 /* Dispatch to pattern-based folding. */
6208 || is_gimple_assign (stmt
)
6209 || gimple_code (stmt
) == GIMPLE_COND
)
6211 gimple_seq seq
= NULL
;
6212 gimple_match_op res_op
;
6213 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6214 valueize
, valueize
))
6216 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6219 gimple_seq_discard (seq
);
6223 stmt
= gsi_stmt (*gsi
);
6225 /* Fold the main computation performed by the statement. */
6226 switch (gimple_code (stmt
))
6230 /* Try to canonicalize for boolean-typed X the comparisons
6231 X == 0, X == 1, X != 0, and X != 1. */
6232 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6233 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6235 tree lhs
= gimple_assign_lhs (stmt
);
6236 tree op1
= gimple_assign_rhs1 (stmt
);
6237 tree op2
= gimple_assign_rhs2 (stmt
);
6238 tree type
= TREE_TYPE (op1
);
6240 /* Check whether the comparison operands are of the same boolean
6241 type as the result type is.
6242 Check that second operand is an integer-constant with value
6244 if (TREE_CODE (op2
) == INTEGER_CST
6245 && (integer_zerop (op2
) || integer_onep (op2
))
6246 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6248 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6249 bool is_logical_not
= false;
6251 /* X == 0 and X != 1 is a logical-not.of X
6252 X == 1 and X != 0 is X */
6253 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6254 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6255 is_logical_not
= true;
6257 if (is_logical_not
== false)
6258 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6259 /* Only for one-bit precision typed X the transformation
6260 !X -> ~X is valied. */
6261 else if (TYPE_PRECISION (type
) == 1)
6262 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6263 /* Otherwise we use !X -> X ^ 1. */
6265 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6266 build_int_cst (type
, 1));
6272 unsigned old_num_ops
= gimple_num_ops (stmt
);
6273 tree lhs
= gimple_assign_lhs (stmt
);
6274 tree new_rhs
= fold_gimple_assign (gsi
);
6276 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6277 TREE_TYPE (new_rhs
)))
6278 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6281 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6283 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6290 changed
|= gimple_fold_call (gsi
, inplace
);
6294 if (gimple_debug_bind_p (stmt
))
6296 tree val
= gimple_debug_bind_get_value (stmt
);
6298 && REFERENCE_CLASS_P (val
))
6300 tree tem
= maybe_fold_reference (val
);
6303 gimple_debug_bind_set_value (stmt
, tem
);
6308 && TREE_CODE (val
) == ADDR_EXPR
)
6310 tree ref
= TREE_OPERAND (val
, 0);
6311 tree tem
= maybe_fold_reference (ref
);
6314 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6315 gimple_debug_bind_set_value (stmt
, tem
);
6324 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6325 tree ret
= gimple_return_retval(ret_stmt
);
6327 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6329 tree val
= valueize (ret
);
6330 if (val
&& val
!= ret
6331 && may_propagate_copy (ret
, val
))
6333 gimple_return_set_retval (ret_stmt
, val
);
6343 stmt
= gsi_stmt (*gsi
);
6345 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6349 /* Valueziation callback that ends up not following SSA edges. */
6352 no_follow_ssa_edges (tree
)
6357 /* Valueization callback that ends up following single-use SSA edges only. */
6360 follow_single_use_edges (tree val
)
6362 if (TREE_CODE (val
) == SSA_NAME
6363 && !has_single_use (val
))
6368 /* Valueization callback that follows all SSA edges. */
6371 follow_all_ssa_edges (tree val
)
6376 /* Fold the statement pointed to by GSI. In some cases, this function may
6377 replace the whole statement with a new one. Returns true iff folding
6379 The statement pointed to by GSI should be in valid gimple form but may
6380 be in unfolded state as resulting from for example constant propagation
6381 which can produce *&x = 0. */
6384 fold_stmt (gimple_stmt_iterator
*gsi
)
6386 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6390 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6392 return fold_stmt_1 (gsi
, false, valueize
);
6395 /* Perform the minimal folding on statement *GSI. Only operations like
6396 *&x created by constant propagation are handled. The statement cannot
6397 be replaced with a new one. Return true if the statement was
6398 changed, false otherwise.
6399 The statement *GSI should be in valid gimple form but may
6400 be in unfolded state as resulting from for example constant propagation
6401 which can produce *&x = 0. */
6404 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6406 gimple
*stmt
= gsi_stmt (*gsi
);
6407 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6408 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6412 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6413 if EXPR is null or we don't know how.
6414 If non-null, the result always has boolean type. */
6417 canonicalize_bool (tree expr
, bool invert
)
6423 if (integer_nonzerop (expr
))
6424 return boolean_false_node
;
6425 else if (integer_zerop (expr
))
6426 return boolean_true_node
;
6427 else if (TREE_CODE (expr
) == SSA_NAME
)
6428 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6429 build_int_cst (TREE_TYPE (expr
), 0));
6430 else if (COMPARISON_CLASS_P (expr
))
6431 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6433 TREE_OPERAND (expr
, 0),
6434 TREE_OPERAND (expr
, 1));
6440 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6442 if (integer_nonzerop (expr
))
6443 return boolean_true_node
;
6444 else if (integer_zerop (expr
))
6445 return boolean_false_node
;
6446 else if (TREE_CODE (expr
) == SSA_NAME
)
6447 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6448 build_int_cst (TREE_TYPE (expr
), 0));
6449 else if (COMPARISON_CLASS_P (expr
))
6450 return fold_build2 (TREE_CODE (expr
),
6452 TREE_OPERAND (expr
, 0),
6453 TREE_OPERAND (expr
, 1));
6459 /* Check to see if a boolean expression EXPR is logically equivalent to the
6460 comparison (OP1 CODE OP2). Check for various identities involving
6464 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6465 const_tree op1
, const_tree op2
)
6469 /* The obvious case. */
6470 if (TREE_CODE (expr
) == code
6471 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6472 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6475 /* Check for comparing (name, name != 0) and the case where expr
6476 is an SSA_NAME with a definition matching the comparison. */
6477 if (TREE_CODE (expr
) == SSA_NAME
6478 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6480 if (operand_equal_p (expr
, op1
, 0))
6481 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6482 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6483 s
= SSA_NAME_DEF_STMT (expr
);
6484 if (is_gimple_assign (s
)
6485 && gimple_assign_rhs_code (s
) == code
6486 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6487 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6491 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6492 of name is a comparison, recurse. */
6493 if (TREE_CODE (op1
) == SSA_NAME
6494 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6496 s
= SSA_NAME_DEF_STMT (op1
);
6497 if (is_gimple_assign (s
)
6498 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6500 enum tree_code c
= gimple_assign_rhs_code (s
);
6501 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6502 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6503 return same_bool_comparison_p (expr
, c
,
6504 gimple_assign_rhs1 (s
),
6505 gimple_assign_rhs2 (s
));
6506 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6507 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6508 return same_bool_comparison_p (expr
,
6509 invert_tree_comparison (c
, false),
6510 gimple_assign_rhs1 (s
),
6511 gimple_assign_rhs2 (s
));
6517 /* Check to see if two boolean expressions OP1 and OP2 are logically
6521 same_bool_result_p (const_tree op1
, const_tree op2
)
6523 /* Simple cases first. */
6524 if (operand_equal_p (op1
, op2
, 0))
6527 /* Check the cases where at least one of the operands is a comparison.
6528 These are a bit smarter than operand_equal_p in that they apply some
6529 identifies on SSA_NAMEs. */
6530 if (COMPARISON_CLASS_P (op2
)
6531 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6532 TREE_OPERAND (op2
, 0),
6533 TREE_OPERAND (op2
, 1)))
6535 if (COMPARISON_CLASS_P (op1
)
6536 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6537 TREE_OPERAND (op1
, 0),
6538 TREE_OPERAND (op1
, 1)))
6545 /* Forward declarations for some mutually recursive functions. */
6548 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6549 enum tree_code code2
, tree op2a
, tree op2b
);
6551 and_var_with_comparison (tree type
, tree var
, bool invert
,
6552 enum tree_code code2
, tree op2a
, tree op2b
);
6554 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6555 enum tree_code code2
, tree op2a
, tree op2b
);
6557 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6558 enum tree_code code2
, tree op2a
, tree op2b
);
6560 or_var_with_comparison (tree
, tree var
, bool invert
,
6561 enum tree_code code2
, tree op2a
, tree op2b
);
6563 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6564 enum tree_code code2
, tree op2a
, tree op2b
);
6566 /* Helper function for and_comparisons_1: try to simplify the AND of the
6567 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6568 If INVERT is true, invert the value of the VAR before doing the AND.
6569 Return NULL_EXPR if we can't simplify this to a single expression. */
6572 and_var_with_comparison (tree type
, tree var
, bool invert
,
6573 enum tree_code code2
, tree op2a
, tree op2b
)
6576 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6578 /* We can only deal with variables whose definitions are assignments. */
6579 if (!is_gimple_assign (stmt
))
6582 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6583 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6584 Then we only have to consider the simpler non-inverted cases. */
6586 t
= or_var_with_comparison_1 (type
, stmt
,
6587 invert_tree_comparison (code2
, false),
6590 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6591 return canonicalize_bool (t
, invert
);
6594 /* Try to simplify the AND of the ssa variable defined by the assignment
6595 STMT with the comparison specified by (OP2A CODE2 OP2B).
6596 Return NULL_EXPR if we can't simplify this to a single expression. */
6599 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6600 enum tree_code code2
, tree op2a
, tree op2b
)
6602 tree var
= gimple_assign_lhs (stmt
);
6603 tree true_test_var
= NULL_TREE
;
6604 tree false_test_var
= NULL_TREE
;
6605 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6607 /* Check for identities like (var AND (var == 0)) => false. */
6608 if (TREE_CODE (op2a
) == SSA_NAME
6609 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6611 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6612 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6614 true_test_var
= op2a
;
6615 if (var
== true_test_var
)
6618 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6619 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6621 false_test_var
= op2a
;
6622 if (var
== false_test_var
)
6623 return boolean_false_node
;
6627 /* If the definition is a comparison, recurse on it. */
6628 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6630 tree t
= and_comparisons_1 (type
, innercode
,
6631 gimple_assign_rhs1 (stmt
),
6632 gimple_assign_rhs2 (stmt
),
6640 /* If the definition is an AND or OR expression, we may be able to
6641 simplify by reassociating. */
6642 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6643 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6645 tree inner1
= gimple_assign_rhs1 (stmt
);
6646 tree inner2
= gimple_assign_rhs2 (stmt
);
6649 tree partial
= NULL_TREE
;
6650 bool is_and
= (innercode
== BIT_AND_EXPR
);
6652 /* Check for boolean identities that don't require recursive examination
6654 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6655 inner1 AND (inner1 OR inner2) => inner1
6656 !inner1 AND (inner1 AND inner2) => false
6657 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6658 Likewise for similar cases involving inner2. */
6659 if (inner1
== true_test_var
)
6660 return (is_and
? var
: inner1
);
6661 else if (inner2
== true_test_var
)
6662 return (is_and
? var
: inner2
);
6663 else if (inner1
== false_test_var
)
6665 ? boolean_false_node
6666 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6668 else if (inner2
== false_test_var
)
6670 ? boolean_false_node
6671 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6674 /* Next, redistribute/reassociate the AND across the inner tests.
6675 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6676 if (TREE_CODE (inner1
) == SSA_NAME
6677 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6678 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6679 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6680 gimple_assign_rhs1 (s
),
6681 gimple_assign_rhs2 (s
),
6682 code2
, op2a
, op2b
)))
6684 /* Handle the AND case, where we are reassociating:
6685 (inner1 AND inner2) AND (op2a code2 op2b)
6687 If the partial result t is a constant, we win. Otherwise
6688 continue on to try reassociating with the other inner test. */
6691 if (integer_onep (t
))
6693 else if (integer_zerop (t
))
6694 return boolean_false_node
;
6697 /* Handle the OR case, where we are redistributing:
6698 (inner1 OR inner2) AND (op2a code2 op2b)
6699 => (t OR (inner2 AND (op2a code2 op2b))) */
6700 else if (integer_onep (t
))
6701 return boolean_true_node
;
6703 /* Save partial result for later. */
6707 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6708 if (TREE_CODE (inner2
) == SSA_NAME
6709 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6710 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6711 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6712 gimple_assign_rhs1 (s
),
6713 gimple_assign_rhs2 (s
),
6714 code2
, op2a
, op2b
)))
6716 /* Handle the AND case, where we are reassociating:
6717 (inner1 AND inner2) AND (op2a code2 op2b)
6718 => (inner1 AND t) */
6721 if (integer_onep (t
))
6723 else if (integer_zerop (t
))
6724 return boolean_false_node
;
6725 /* If both are the same, we can apply the identity
6727 else if (partial
&& same_bool_result_p (t
, partial
))
6731 /* Handle the OR case. where we are redistributing:
6732 (inner1 OR inner2) AND (op2a code2 op2b)
6733 => (t OR (inner1 AND (op2a code2 op2b)))
6734 => (t OR partial) */
6737 if (integer_onep (t
))
6738 return boolean_true_node
;
6741 /* We already got a simplification for the other
6742 operand to the redistributed OR expression. The
6743 interesting case is when at least one is false.
6744 Or, if both are the same, we can apply the identity
6746 if (integer_zerop (partial
))
6748 else if (integer_zerop (t
))
6750 else if (same_bool_result_p (t
, partial
))
6759 /* Try to simplify the AND of two comparisons defined by
6760 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6761 If this can be done without constructing an intermediate value,
6762 return the resulting tree; otherwise NULL_TREE is returned.
6763 This function is deliberately asymmetric as it recurses on SSA_DEFs
6764 in the first comparison but not the second. */
6767 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6768 enum tree_code code2
, tree op2a
, tree op2b
)
6770 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6772 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6773 if (operand_equal_p (op1a
, op2a
, 0)
6774 && operand_equal_p (op1b
, op2b
, 0))
6776 /* Result will be either NULL_TREE, or a combined comparison. */
6777 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6778 TRUTH_ANDIF_EXPR
, code1
, code2
,
6779 truth_type
, op1a
, op1b
);
6784 /* Likewise the swapped case of the above. */
6785 if (operand_equal_p (op1a
, op2b
, 0)
6786 && operand_equal_p (op1b
, op2a
, 0))
6788 /* Result will be either NULL_TREE, or a combined comparison. */
6789 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6790 TRUTH_ANDIF_EXPR
, code1
,
6791 swap_tree_comparison (code2
),
6792 truth_type
, op1a
, op1b
);
6797 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6798 NAME's definition is a truth value. See if there are any simplifications
6799 that can be done against the NAME's definition. */
6800 if (TREE_CODE (op1a
) == SSA_NAME
6801 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6802 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6804 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6805 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6806 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6807 switch (gimple_code (stmt
))
6810 /* Try to simplify by copy-propagating the definition. */
6811 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6815 /* If every argument to the PHI produces the same result when
6816 ANDed with the second comparison, we win.
6817 Do not do this unless the type is bool since we need a bool
6818 result here anyway. */
6819 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6821 tree result
= NULL_TREE
;
6823 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6825 tree arg
= gimple_phi_arg_def (stmt
, i
);
6827 /* If this PHI has itself as an argument, ignore it.
6828 If all the other args produce the same result,
6830 if (arg
== gimple_phi_result (stmt
))
6832 else if (TREE_CODE (arg
) == INTEGER_CST
)
6834 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6837 result
= boolean_false_node
;
6838 else if (!integer_zerop (result
))
6842 result
= fold_build2 (code2
, boolean_type_node
,
6844 else if (!same_bool_comparison_p (result
,
6848 else if (TREE_CODE (arg
) == SSA_NAME
6849 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6852 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6853 /* In simple cases we can look through PHI nodes,
6854 but we have to be careful with loops.
6856 if (! dom_info_available_p (CDI_DOMINATORS
)
6857 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6858 || dominated_by_p (CDI_DOMINATORS
,
6859 gimple_bb (def_stmt
),
6862 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6868 else if (!same_bool_result_p (result
, temp
))
6884 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6885 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6886 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6887 simplify this to a single expression. As we are going to lower the cost
6888 of building SSA names / gimple stmts significantly, we need to allocate
6889 them ont the stack. This will cause the code to be a bit ugly. */
6892 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6893 enum tree_code code1
,
6894 tree op1a
, tree op1b
,
6895 enum tree_code code2
, tree op2a
,
6898 /* Allocate gimple stmt1 on the stack. */
6900 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6901 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6902 gimple_assign_set_rhs_code (stmt1
, code1
);
6903 gimple_assign_set_rhs1 (stmt1
, op1a
);
6904 gimple_assign_set_rhs2 (stmt1
, op1b
);
6906 /* Allocate gimple stmt2 on the stack. */
6908 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6909 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6910 gimple_assign_set_rhs_code (stmt2
, code2
);
6911 gimple_assign_set_rhs1 (stmt2
, op2a
);
6912 gimple_assign_set_rhs2 (stmt2
, op2b
);
6914 /* Allocate SSA names(lhs1) on the stack. */
6915 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6916 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6917 TREE_SET_CODE (lhs1
, SSA_NAME
);
6918 TREE_TYPE (lhs1
) = type
;
6919 init_ssa_name_imm_use (lhs1
);
6921 /* Allocate SSA names(lhs2) on the stack. */
6922 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6923 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6924 TREE_SET_CODE (lhs2
, SSA_NAME
);
6925 TREE_TYPE (lhs2
) = type
;
6926 init_ssa_name_imm_use (lhs2
);
6928 gimple_assign_set_lhs (stmt1
, lhs1
);
6929 gimple_assign_set_lhs (stmt2
, lhs2
);
6931 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6932 type
, gimple_assign_lhs (stmt1
),
6933 gimple_assign_lhs (stmt2
));
6934 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6936 if (gimple_simplified_result_is_gimple_val (&op
))
6938 tree res
= op
.ops
[0];
6940 return build2 (code1
, type
, op1a
, op1b
);
6941 else if (res
== lhs2
)
6942 return build2 (code2
, type
, op2a
, op2b
);
6946 else if (op
.code
.is_tree_code ()
6947 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6949 tree op0
= op
.ops
[0];
6950 tree op1
= op
.ops
[1];
6951 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6952 return NULL_TREE
; /* not simple */
6954 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6961 /* Try to simplify the AND of two comparisons, specified by
6962 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6963 If this can be simplified to a single expression (without requiring
6964 introducing more SSA variables to hold intermediate values),
6965 return the resulting tree. Otherwise return NULL_TREE.
6966 If the result expression is non-null, it has boolean type. */
6969 maybe_fold_and_comparisons (tree type
,
6970 enum tree_code code1
, tree op1a
, tree op1b
,
6971 enum tree_code code2
, tree op2a
, tree op2b
)
6973 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6976 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6979 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6980 op1a
, op1b
, code2
, op2a
,
6987 /* Helper function for or_comparisons_1: try to simplify the OR of the
6988 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6989 If INVERT is true, invert the value of VAR before doing the OR.
6990 Return NULL_EXPR if we can't simplify this to a single expression. */
6993 or_var_with_comparison (tree type
, tree var
, bool invert
,
6994 enum tree_code code2
, tree op2a
, tree op2b
)
6997 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6999 /* We can only deal with variables whose definitions are assignments. */
7000 if (!is_gimple_assign (stmt
))
7003 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7004 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7005 Then we only have to consider the simpler non-inverted cases. */
7007 t
= and_var_with_comparison_1 (type
, stmt
,
7008 invert_tree_comparison (code2
, false),
7011 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
7012 return canonicalize_bool (t
, invert
);
7015 /* Try to simplify the OR of the ssa variable defined by the assignment
7016 STMT with the comparison specified by (OP2A CODE2 OP2B).
7017 Return NULL_EXPR if we can't simplify this to a single expression. */
7020 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7021 enum tree_code code2
, tree op2a
, tree op2b
)
7023 tree var
= gimple_assign_lhs (stmt
);
7024 tree true_test_var
= NULL_TREE
;
7025 tree false_test_var
= NULL_TREE
;
7026 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7028 /* Check for identities like (var OR (var != 0)) => true . */
7029 if (TREE_CODE (op2a
) == SSA_NAME
7030 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7032 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7033 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7035 true_test_var
= op2a
;
7036 if (var
== true_test_var
)
7039 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7040 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7042 false_test_var
= op2a
;
7043 if (var
== false_test_var
)
7044 return boolean_true_node
;
7048 /* If the definition is a comparison, recurse on it. */
7049 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7051 tree t
= or_comparisons_1 (type
, innercode
,
7052 gimple_assign_rhs1 (stmt
),
7053 gimple_assign_rhs2 (stmt
),
7061 /* If the definition is an AND or OR expression, we may be able to
7062 simplify by reassociating. */
7063 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7064 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7066 tree inner1
= gimple_assign_rhs1 (stmt
);
7067 tree inner2
= gimple_assign_rhs2 (stmt
);
7070 tree partial
= NULL_TREE
;
7071 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7073 /* Check for boolean identities that don't require recursive examination
7075 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7076 inner1 OR (inner1 AND inner2) => inner1
7077 !inner1 OR (inner1 OR inner2) => true
7078 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7080 if (inner1
== true_test_var
)
7081 return (is_or
? var
: inner1
);
7082 else if (inner2
== true_test_var
)
7083 return (is_or
? var
: inner2
);
7084 else if (inner1
== false_test_var
)
7087 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7089 else if (inner2
== false_test_var
)
7092 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7095 /* Next, redistribute/reassociate the OR across the inner tests.
7096 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7097 if (TREE_CODE (inner1
) == SSA_NAME
7098 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7099 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7100 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7101 gimple_assign_rhs1 (s
),
7102 gimple_assign_rhs2 (s
),
7103 code2
, op2a
, op2b
)))
7105 /* Handle the OR case, where we are reassociating:
7106 (inner1 OR inner2) OR (op2a code2 op2b)
7108 If the partial result t is a constant, we win. Otherwise
7109 continue on to try reassociating with the other inner test. */
7112 if (integer_onep (t
))
7113 return boolean_true_node
;
7114 else if (integer_zerop (t
))
7118 /* Handle the AND case, where we are redistributing:
7119 (inner1 AND inner2) OR (op2a code2 op2b)
7120 => (t AND (inner2 OR (op2a code op2b))) */
7121 else if (integer_zerop (t
))
7122 return boolean_false_node
;
7124 /* Save partial result for later. */
7128 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7129 if (TREE_CODE (inner2
) == SSA_NAME
7130 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7131 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7132 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7133 gimple_assign_rhs1 (s
),
7134 gimple_assign_rhs2 (s
),
7135 code2
, op2a
, op2b
)))
7137 /* Handle the OR case, where we are reassociating:
7138 (inner1 OR inner2) OR (op2a code2 op2b)
7140 => (t OR partial) */
7143 if (integer_zerop (t
))
7145 else if (integer_onep (t
))
7146 return boolean_true_node
;
7147 /* If both are the same, we can apply the identity
7149 else if (partial
&& same_bool_result_p (t
, partial
))
7153 /* Handle the AND case, where we are redistributing:
7154 (inner1 AND inner2) OR (op2a code2 op2b)
7155 => (t AND (inner1 OR (op2a code2 op2b)))
7156 => (t AND partial) */
7159 if (integer_zerop (t
))
7160 return boolean_false_node
;
7163 /* We already got a simplification for the other
7164 operand to the redistributed AND expression. The
7165 interesting case is when at least one is true.
7166 Or, if both are the same, we can apply the identity
7168 if (integer_onep (partial
))
7170 else if (integer_onep (t
))
7172 else if (same_bool_result_p (t
, partial
))
7181 /* Try to simplify the OR of two comparisons defined by
7182 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7183 If this can be done without constructing an intermediate value,
7184 return the resulting tree; otherwise NULL_TREE is returned.
7185 This function is deliberately asymmetric as it recurses on SSA_DEFs
7186 in the first comparison but not the second. */
7189 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7190 enum tree_code code2
, tree op2a
, tree op2b
)
7192 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7194 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7195 if (operand_equal_p (op1a
, op2a
, 0)
7196 && operand_equal_p (op1b
, op2b
, 0))
7198 /* Result will be either NULL_TREE, or a combined comparison. */
7199 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7200 TRUTH_ORIF_EXPR
, code1
, code2
,
7201 truth_type
, op1a
, op1b
);
7206 /* Likewise the swapped case of the above. */
7207 if (operand_equal_p (op1a
, op2b
, 0)
7208 && operand_equal_p (op1b
, op2a
, 0))
7210 /* Result will be either NULL_TREE, or a combined comparison. */
7211 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7212 TRUTH_ORIF_EXPR
, code1
,
7213 swap_tree_comparison (code2
),
7214 truth_type
, op1a
, op1b
);
7219 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7220 NAME's definition is a truth value. See if there are any simplifications
7221 that can be done against the NAME's definition. */
7222 if (TREE_CODE (op1a
) == SSA_NAME
7223 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7224 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7226 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7227 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7228 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7229 switch (gimple_code (stmt
))
7232 /* Try to simplify by copy-propagating the definition. */
7233 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7237 /* If every argument to the PHI produces the same result when
7238 ORed with the second comparison, we win.
7239 Do not do this unless the type is bool since we need a bool
7240 result here anyway. */
7241 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7243 tree result
= NULL_TREE
;
7245 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7247 tree arg
= gimple_phi_arg_def (stmt
, i
);
7249 /* If this PHI has itself as an argument, ignore it.
7250 If all the other args produce the same result,
7252 if (arg
== gimple_phi_result (stmt
))
7254 else if (TREE_CODE (arg
) == INTEGER_CST
)
7256 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7259 result
= boolean_true_node
;
7260 else if (!integer_onep (result
))
7264 result
= fold_build2 (code2
, boolean_type_node
,
7266 else if (!same_bool_comparison_p (result
,
7270 else if (TREE_CODE (arg
) == SSA_NAME
7271 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7274 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7275 /* In simple cases we can look through PHI nodes,
7276 but we have to be careful with loops.
7278 if (! dom_info_available_p (CDI_DOMINATORS
)
7279 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7280 || dominated_by_p (CDI_DOMINATORS
,
7281 gimple_bb (def_stmt
),
7284 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7290 else if (!same_bool_result_p (result
, temp
))
7306 /* Try to simplify the OR of two comparisons, specified by
7307 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7308 If this can be simplified to a single expression (without requiring
7309 introducing more SSA variables to hold intermediate values),
7310 return the resulting tree. Otherwise return NULL_TREE.
7311 If the result expression is non-null, it has boolean type. */
7314 maybe_fold_or_comparisons (tree type
,
7315 enum tree_code code1
, tree op1a
, tree op1b
,
7316 enum tree_code code2
, tree op2a
, tree op2b
)
7318 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7321 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7324 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7325 op1a
, op1b
, code2
, op2a
,
7332 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7334 Either NULL_TREE, a simplified but non-constant or a constant
7337 ??? This should go into a gimple-fold-inline.h file to be eventually
7338 privatized with the single valueize function used in the various TUs
7339 to avoid the indirect function call overhead. */
7342 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7343 tree (*gvalueize
) (tree
))
7345 gimple_match_op res_op
;
7346 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7347 edges if there are intermediate VARYING defs. For this reason
7348 do not follow SSA edges here even though SCCVN can technically
7349 just deal fine with that. */
7350 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7352 tree res
= NULL_TREE
;
7353 if (gimple_simplified_result_is_gimple_val (&res_op
))
7354 res
= res_op
.ops
[0];
7355 else if (mprts_hook
)
7356 res
= mprts_hook (&res_op
);
7359 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7361 fprintf (dump_file
, "Match-and-simplified ");
7362 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7363 fprintf (dump_file
, " to ");
7364 print_generic_expr (dump_file
, res
);
7365 fprintf (dump_file
, "\n");
7371 location_t loc
= gimple_location (stmt
);
7372 switch (gimple_code (stmt
))
7376 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7378 switch (get_gimple_rhs_class (subcode
))
7380 case GIMPLE_SINGLE_RHS
:
7382 tree rhs
= gimple_assign_rhs1 (stmt
);
7383 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7385 if (TREE_CODE (rhs
) == SSA_NAME
)
7387 /* If the RHS is an SSA_NAME, return its known constant value,
7389 return (*valueize
) (rhs
);
7391 /* Handle propagating invariant addresses into address
7393 else if (TREE_CODE (rhs
) == ADDR_EXPR
7394 && !is_gimple_min_invariant (rhs
))
7396 poly_int64 offset
= 0;
7398 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7402 && (CONSTANT_CLASS_P (base
)
7403 || decl_address_invariant_p (base
)))
7404 return build_invariant_address (TREE_TYPE (rhs
),
7407 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7408 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7409 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7410 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7415 nelts
= CONSTRUCTOR_NELTS (rhs
);
7416 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7417 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7419 val
= (*valueize
) (val
);
7420 if (TREE_CODE (val
) == INTEGER_CST
7421 || TREE_CODE (val
) == REAL_CST
7422 || TREE_CODE (val
) == FIXED_CST
)
7423 vec
.quick_push (val
);
7428 return vec
.build ();
7430 if (subcode
== OBJ_TYPE_REF
)
7432 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7433 /* If callee is constant, we can fold away the wrapper. */
7434 if (is_gimple_min_invariant (val
))
7438 if (kind
== tcc_reference
)
7440 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7441 || TREE_CODE (rhs
) == REALPART_EXPR
7442 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7443 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7445 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7446 return fold_unary_loc (EXPR_LOCATION (rhs
),
7448 TREE_TYPE (rhs
), val
);
7450 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7451 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7453 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7454 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7456 TREE_TYPE (rhs
), val
,
7457 TREE_OPERAND (rhs
, 1),
7458 TREE_OPERAND (rhs
, 2));
7460 else if (TREE_CODE (rhs
) == MEM_REF
7461 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7463 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7464 if (TREE_CODE (val
) == ADDR_EXPR
7465 && is_gimple_min_invariant (val
))
7467 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7469 TREE_OPERAND (rhs
, 1));
7474 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7476 else if (kind
== tcc_declaration
)
7477 return get_symbol_constant_value (rhs
);
7481 case GIMPLE_UNARY_RHS
:
7484 case GIMPLE_BINARY_RHS
:
7485 /* Translate &x + CST into an invariant form suitable for
7486 further propagation. */
7487 if (subcode
== POINTER_PLUS_EXPR
)
7489 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7490 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7491 if (TREE_CODE (op0
) == ADDR_EXPR
7492 && TREE_CODE (op1
) == INTEGER_CST
)
7494 tree off
= fold_convert (ptr_type_node
, op1
);
7496 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7497 fold_build2 (MEM_REF
,
7498 TREE_TYPE (TREE_TYPE (op0
)),
7499 unshare_expr (op0
), off
));
7502 /* Canonicalize bool != 0 and bool == 0 appearing after
7503 valueization. While gimple_simplify handles this
7504 it can get confused by the ~X == 1 -> X == 0 transform
7505 which we cant reduce to a SSA name or a constant
7506 (and we have no way to tell gimple_simplify to not
7507 consider those transforms in the first place). */
7508 else if (subcode
== EQ_EXPR
7509 || subcode
== NE_EXPR
)
7511 tree lhs
= gimple_assign_lhs (stmt
);
7512 tree op0
= gimple_assign_rhs1 (stmt
);
7513 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7516 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7517 op0
= (*valueize
) (op0
);
7518 if (TREE_CODE (op0
) == INTEGER_CST
)
7519 std::swap (op0
, op1
);
7520 if (TREE_CODE (op1
) == INTEGER_CST
7521 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7522 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7528 case GIMPLE_TERNARY_RHS
:
7530 /* Handle ternary operators that can appear in GIMPLE form. */
7531 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7532 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7533 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7534 return fold_ternary_loc (loc
, subcode
,
7535 TREE_TYPE (gimple_assign_lhs (stmt
)),
7547 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7549 if (gimple_call_internal_p (stmt
))
7551 enum tree_code subcode
= ERROR_MARK
;
7552 switch (gimple_call_internal_fn (stmt
))
7554 case IFN_UBSAN_CHECK_ADD
:
7555 subcode
= PLUS_EXPR
;
7557 case IFN_UBSAN_CHECK_SUB
:
7558 subcode
= MINUS_EXPR
;
7560 case IFN_UBSAN_CHECK_MUL
:
7561 subcode
= MULT_EXPR
;
7563 case IFN_BUILTIN_EXPECT
:
7565 tree arg0
= gimple_call_arg (stmt
, 0);
7566 tree op0
= (*valueize
) (arg0
);
7567 if (TREE_CODE (op0
) == INTEGER_CST
)
7574 tree arg0
= gimple_call_arg (stmt
, 0);
7575 tree arg1
= gimple_call_arg (stmt
, 1);
7576 tree op0
= (*valueize
) (arg0
);
7577 tree op1
= (*valueize
) (arg1
);
7579 if (TREE_CODE (op0
) != INTEGER_CST
7580 || TREE_CODE (op1
) != INTEGER_CST
)
7585 /* x * 0 = 0 * x = 0 without overflow. */
7586 if (integer_zerop (op0
) || integer_zerop (op1
))
7587 return build_zero_cst (TREE_TYPE (arg0
));
7590 /* y - y = 0 without overflow. */
7591 if (operand_equal_p (op0
, op1
, 0))
7592 return build_zero_cst (TREE_TYPE (arg0
));
7599 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7601 && TREE_CODE (res
) == INTEGER_CST
7602 && !TREE_OVERFLOW (res
))
7607 fn
= (*valueize
) (gimple_call_fn (stmt
));
7608 if (TREE_CODE (fn
) == ADDR_EXPR
7609 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7610 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7611 && gimple_builtin_call_types_compatible_p (stmt
,
7612 TREE_OPERAND (fn
, 0)))
7614 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7617 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7618 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7619 retval
= fold_builtin_call_array (loc
,
7620 gimple_call_return_type (call_stmt
),
7621 fn
, gimple_call_num_args (stmt
), args
);
7624 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7625 STRIP_NOPS (retval
);
7626 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7639 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7640 Returns NULL_TREE if folding to a constant is not possible, otherwise
7641 returns a constant according to is_gimple_min_invariant. */
7644 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7646 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7647 if (res
&& is_gimple_min_invariant (res
))
7653 /* The following set of functions are supposed to fold references using
7654 their constant initializers. */
7656 /* See if we can find constructor defining value of BASE.
7657 When we know the consructor with constant offset (such as
7658 base is array[40] and we do know constructor of array), then
7659 BIT_OFFSET is adjusted accordingly.
7661 As a special case, return error_mark_node when constructor
7662 is not explicitly available, but it is known to be zero
7663 such as 'static const int a;'. */
7665 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7666 tree (*valueize
)(tree
))
7668 poly_int64 bit_offset2
, size
, max_size
;
7671 if (TREE_CODE (base
) == MEM_REF
)
7673 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7674 if (!boff
.to_shwi (bit_offset
))
7678 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7679 base
= valueize (TREE_OPERAND (base
, 0));
7680 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7682 base
= TREE_OPERAND (base
, 0);
7685 && TREE_CODE (base
) == SSA_NAME
)
7686 base
= valueize (base
);
7688 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7689 DECL_INITIAL. If BASE is a nested reference into another
7690 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7691 the inner reference. */
7692 switch (TREE_CODE (base
))
7697 tree init
= ctor_for_folding (base
);
7699 /* Our semantic is exact opposite of ctor_for_folding;
7700 NULL means unknown, while error_mark_node is 0. */
7701 if (init
== error_mark_node
)
7704 return error_mark_node
;
7708 case VIEW_CONVERT_EXPR
:
7709 return get_base_constructor (TREE_OPERAND (base
, 0),
7710 bit_offset
, valueize
);
7714 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7716 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7718 *bit_offset
+= bit_offset2
;
7719 return get_base_constructor (base
, bit_offset
, valueize
);
7725 if (CONSTANT_CLASS_P (base
))
7732 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7733 to the memory at bit OFFSET. When non-null, TYPE is the expected
7734 type of the reference; otherwise the type of the referenced element
7735 is used instead. When SIZE is zero, attempt to fold a reference to
7736 the entire element which OFFSET refers to. Increment *SUBOFF by
7737 the bit offset of the accessed element. */
7740 fold_array_ctor_reference (tree type
, tree ctor
,
7741 unsigned HOST_WIDE_INT offset
,
7742 unsigned HOST_WIDE_INT size
,
7744 unsigned HOST_WIDE_INT
*suboff
)
7746 offset_int low_bound
;
7747 offset_int elt_size
;
7748 offset_int access_index
;
7749 tree domain_type
= NULL_TREE
;
7750 HOST_WIDE_INT inner_offset
;
7752 /* Compute low bound and elt size. */
7753 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7754 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7755 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7757 /* Static constructors for variably sized objects make no sense. */
7758 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7760 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7764 /* Static constructors for variably sized objects make no sense. */
7765 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7767 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7769 /* When TYPE is non-null, verify that it specifies a constant-sized
7770 access of a multiple of the array element size. Avoid division
7771 by zero below when ELT_SIZE is zero, such as with the result of
7772 an initializer for a zero-length array or an empty struct. */
7775 && (!TYPE_SIZE_UNIT (type
)
7776 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7779 /* Compute the array index we look for. */
7780 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7782 access_index
+= low_bound
;
7784 /* And offset within the access. */
7785 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7787 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7788 if (size
> elt_sz
* BITS_PER_UNIT
)
7790 /* native_encode_expr constraints. */
7791 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7792 || size
% BITS_PER_UNIT
!= 0
7793 || inner_offset
% BITS_PER_UNIT
!= 0
7794 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7798 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7800 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7801 return build_zero_cst (type
);
7803 /* native-encode adjacent ctor elements. */
7804 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7805 unsigned bufoff
= 0;
7806 offset_int index
= 0;
7807 offset_int max_index
= access_index
;
7808 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7810 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7811 else if (!CONSTANT_CLASS_P (val
))
7815 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7817 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7818 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7821 index
= max_index
= wi::to_offset (elt
->index
);
7822 index
= wi::umax (index
, access_index
);
7825 if (bufoff
+ elt_sz
> sizeof (buf
))
7826 elt_sz
= sizeof (buf
) - bufoff
;
7827 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7828 inner_offset
/ BITS_PER_UNIT
);
7829 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7835 if (wi::cmpu (access_index
, index
) == 0)
7837 else if (wi::cmpu (access_index
, max_index
) > 0)
7840 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7842 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7847 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7849 max_index
= access_index
;
7852 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7854 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7855 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7858 index
= max_index
= wi::to_offset (elt
->index
);
7859 index
= wi::umax (index
, access_index
);
7860 if (wi::cmpu (access_index
, index
) == 0)
7863 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7867 while (bufoff
< size
/ BITS_PER_UNIT
);
7869 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7872 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7874 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7876 /* For the final reference to the entire accessed element
7877 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7878 may be null) in favor of the type of the element, and set
7879 SIZE to the size of the accessed element. */
7881 type
= TREE_TYPE (val
);
7882 size
= elt_sz
* BITS_PER_UNIT
;
7884 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7885 && TREE_CODE (val
) == CONSTRUCTOR
7886 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7887 /* If this isn't the last element in the CTOR and a CTOR itself
7888 and it does not cover the whole object we are requesting give up
7889 since we're not set up for combining from multiple CTORs. */
7892 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7893 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7897 /* Memory not explicitly mentioned in constructor is 0 (or
7898 the reference is out of range). */
7899 return type
? build_zero_cst (type
) : NULL_TREE
;
7902 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7903 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7904 is the expected type of the reference; otherwise the type of
7905 the referenced member is used instead. When SIZE is zero,
7906 attempt to fold a reference to the entire member which OFFSET
7907 refers to; in this case. Increment *SUBOFF by the bit offset
7908 of the accessed member. */
7911 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7912 unsigned HOST_WIDE_INT offset
,
7913 unsigned HOST_WIDE_INT size
,
7915 unsigned HOST_WIDE_INT
*suboff
)
7917 unsigned HOST_WIDE_INT cnt
;
7920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7923 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7924 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7925 tree field_size
= DECL_SIZE (cfield
);
7929 /* Determine the size of the flexible array member from
7930 the size of the initializer provided for it. */
7931 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7934 /* Variable sized objects in static constructors makes no sense,
7935 but field_size can be NULL for flexible array members. */
7936 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7937 && TREE_CODE (byte_offset
) == INTEGER_CST
7938 && (field_size
!= NULL_TREE
7939 ? TREE_CODE (field_size
) == INTEGER_CST
7940 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7942 /* Compute bit offset of the field. */
7943 offset_int bitoffset
7944 = (wi::to_offset (field_offset
)
7945 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7946 /* Compute bit offset where the field ends. */
7947 offset_int bitoffset_end
;
7948 if (field_size
!= NULL_TREE
)
7949 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7953 /* Compute the bit offset of the end of the desired access.
7954 As a special case, if the size of the desired access is
7955 zero, assume the access is to the entire field (and let
7956 the caller make any necessary adjustments by storing
7957 the actual bounds of the field in FIELDBOUNDS). */
7958 offset_int access_end
= offset_int (offset
);
7962 access_end
= bitoffset_end
;
7964 /* Is there any overlap between the desired access at
7965 [OFFSET, OFFSET+SIZE) and the offset of the field within
7966 the object at [BITOFFSET, BITOFFSET_END)? */
7967 if (wi::cmps (access_end
, bitoffset
) > 0
7968 && (field_size
== NULL_TREE
7969 || wi::lts_p (offset
, bitoffset_end
)))
7971 *suboff
+= bitoffset
.to_uhwi ();
7973 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7975 /* For the final reference to the entire accessed member
7976 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7977 be null) in favor of the type of the member, and set
7978 SIZE to the size of the accessed member. */
7979 offset
= bitoffset
.to_uhwi ();
7980 type
= TREE_TYPE (cval
);
7981 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7984 /* We do have overlap. Now see if the field is large enough
7985 to cover the access. Give up for accesses that extend
7986 beyond the end of the object or that span multiple fields. */
7987 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7989 if (offset
< bitoffset
)
7992 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7993 return fold_ctor_reference (type
, cval
,
7994 inner_offset
.to_uhwi (), size
,
8002 return build_zero_cst (type
);
8005 /* CTOR is value initializing memory. Fold a reference of TYPE and
8006 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8007 is zero, attempt to fold a reference to the entire subobject
8008 which OFFSET refers to. This is used when folding accesses to
8009 string members of aggregates. When non-null, set *SUBOFF to
8010 the bit offset of the accessed subobject. */
8013 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
8014 const poly_uint64
&poly_size
, tree from_decl
,
8015 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
8019 /* We found the field with exact match. */
8021 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8022 && known_eq (poly_offset
, 0U))
8023 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8025 /* The remaining optimizations need a constant size and offset. */
8026 unsigned HOST_WIDE_INT size
, offset
;
8027 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8030 /* We are at the end of walk, see if we can view convert the
8032 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8033 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8034 && !compare_tree_int (TYPE_SIZE (type
), size
)
8035 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8037 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8040 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8042 STRIP_USELESS_TYPE_CONVERSION (ret
);
8046 /* For constants and byte-aligned/sized reads try to go through
8047 native_encode/interpret. */
8048 if (CONSTANT_CLASS_P (ctor
)
8049 && BITS_PER_UNIT
== 8
8050 && offset
% BITS_PER_UNIT
== 0
8051 && offset
/ BITS_PER_UNIT
<= INT_MAX
8052 && size
% BITS_PER_UNIT
== 0
8053 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8054 && can_native_interpret_type_p (type
))
8056 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8057 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8058 offset
/ BITS_PER_UNIT
);
8060 return native_interpret_expr (type
, buf
, len
);
8062 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8064 unsigned HOST_WIDE_INT dummy
= 0;
8069 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8070 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8071 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8074 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8077 /* Fall back to native_encode_initializer. Needs to be done
8078 only in the outermost fold_ctor_reference call (because it itself
8079 recurses into CONSTRUCTORs) and doesn't update suboff. */
8080 if (ret
== NULL_TREE
8082 && BITS_PER_UNIT
== 8
8083 && offset
% BITS_PER_UNIT
== 0
8084 && offset
/ BITS_PER_UNIT
<= INT_MAX
8085 && size
% BITS_PER_UNIT
== 0
8086 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8087 && can_native_interpret_type_p (type
))
8089 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8090 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8091 offset
/ BITS_PER_UNIT
);
8093 return native_interpret_expr (type
, buf
, len
);
8102 /* Return the tree representing the element referenced by T if T is an
8103 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8104 names using VALUEIZE. Return NULL_TREE otherwise. */
8107 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8109 tree ctor
, idx
, base
;
8110 poly_int64 offset
, size
, max_size
;
8114 if (TREE_THIS_VOLATILE (t
))
8118 return get_symbol_constant_value (t
);
8120 tem
= fold_read_from_constant_string (t
);
8124 switch (TREE_CODE (t
))
8127 case ARRAY_RANGE_REF
:
8128 /* Constant indexes are handled well by get_base_constructor.
8129 Only special case variable offsets.
8130 FIXME: This code can't handle nested references with variable indexes
8131 (they will be handled only by iteration of ccp). Perhaps we can bring
8132 get_ref_base_and_extent here and make it use a valueize callback. */
8133 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8135 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8136 && poly_int_tree_p (idx
))
8138 tree low_bound
, unit_size
;
8140 /* If the resulting bit-offset is constant, track it. */
8141 if ((low_bound
= array_ref_low_bound (t
),
8142 poly_int_tree_p (low_bound
))
8143 && (unit_size
= array_ref_element_size (t
),
8144 tree_fits_uhwi_p (unit_size
)))
8146 poly_offset_int woffset
8147 = wi::sext (wi::to_poly_offset (idx
)
8148 - wi::to_poly_offset (low_bound
),
8149 TYPE_PRECISION (sizetype
));
8150 woffset
*= tree_to_uhwi (unit_size
);
8151 woffset
*= BITS_PER_UNIT
;
8152 if (woffset
.to_shwi (&offset
))
8154 base
= TREE_OPERAND (t
, 0);
8155 ctor
= get_base_constructor (base
, &offset
, valueize
);
8156 /* Empty constructor. Always fold to 0. */
8157 if (ctor
== error_mark_node
)
8158 return build_zero_cst (TREE_TYPE (t
));
8159 /* Out of bound array access. Value is undefined,
8161 if (maybe_lt (offset
, 0))
8163 /* We cannot determine ctor. */
8166 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8167 tree_to_uhwi (unit_size
)
8177 case TARGET_MEM_REF
:
8179 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8180 ctor
= get_base_constructor (base
, &offset
, valueize
);
8182 /* Empty constructor. Always fold to 0. */
8183 if (ctor
== error_mark_node
)
8184 return build_zero_cst (TREE_TYPE (t
));
8185 /* We do not know precise address. */
8186 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8188 /* We cannot determine ctor. */
8192 /* Out of bound array access. Value is undefined, but don't fold. */
8193 if (maybe_lt (offset
, 0))
8196 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8200 /* For bit field reads try to read the representative and
8202 if (TREE_CODE (t
) == COMPONENT_REF
8203 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8204 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8206 HOST_WIDE_INT csize
, coffset
;
8207 tree field
= TREE_OPERAND (t
, 1);
8208 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8209 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8210 && size
.is_constant (&csize
)
8211 && offset
.is_constant (&coffset
)
8212 && (coffset
% BITS_PER_UNIT
!= 0
8213 || csize
% BITS_PER_UNIT
!= 0)
8215 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8217 poly_int64 bitoffset
;
8218 poly_uint64 field_offset
, repr_offset
;
8219 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8220 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8221 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8224 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8225 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8226 HOST_WIDE_INT bitoff
;
8227 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8228 - TYPE_PRECISION (TREE_TYPE (field
)));
8229 if (bitoffset
.is_constant (&bitoff
)
8234 size
= tree_to_uhwi (DECL_SIZE (repr
));
8236 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8238 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8240 if (!BYTES_BIG_ENDIAN
)
8241 tem
= wide_int_to_tree (TREE_TYPE (field
),
8242 wi::lrshift (wi::to_wide (tem
),
8245 tem
= wide_int_to_tree (TREE_TYPE (field
),
8246 wi::lrshift (wi::to_wide (tem
),
8258 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8259 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8260 return fold_build1_loc (EXPR_LOCATION (t
),
8261 TREE_CODE (t
), TREE_TYPE (t
), c
);
8273 fold_const_aggregate_ref (tree t
)
8275 return fold_const_aggregate_ref_1 (t
, NULL
);
8278 /* Lookup virtual method with index TOKEN in a virtual table V
8280 Set CAN_REFER if non-NULL to false if method
8281 is not referable or if the virtual table is ill-formed (such as rewriten
8282 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8285 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8287 unsigned HOST_WIDE_INT offset
,
8290 tree vtable
= v
, init
, fn
;
8291 unsigned HOST_WIDE_INT size
;
8292 unsigned HOST_WIDE_INT elt_size
, access_index
;
8298 /* First of all double check we have virtual table. */
8299 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8301 /* Pass down that we lost track of the target. */
8307 init
= ctor_for_folding (v
);
8309 /* The virtual tables should always be born with constructors
8310 and we always should assume that they are avaialble for
8311 folding. At the moment we do not stream them in all cases,
8312 but it should never happen that ctor seem unreachable. */
8314 if (init
== error_mark_node
)
8316 /* Pass down that we lost track of the target. */
8321 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8322 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8323 offset
*= BITS_PER_UNIT
;
8324 offset
+= token
* size
;
8326 /* Lookup the value in the constructor that is assumed to be array.
8327 This is equivalent to
8328 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8329 offset, size, NULL);
8330 but in a constant time. We expect that frontend produced a simple
8331 array without indexed initializers. */
8333 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8334 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8335 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8336 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8338 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8339 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8341 /* The C++ FE can now produce indexed fields, and we check if the indexes
8343 if (access_index
< CONSTRUCTOR_NELTS (init
))
8345 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8346 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8347 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8353 /* For type inconsistent program we may end up looking up virtual method
8354 in virtual table that does not contain TOKEN entries. We may overrun
8355 the virtual table and pick up a constant or RTTI info pointer.
8356 In any case the call is undefined. */
8358 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8359 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8360 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8363 fn
= TREE_OPERAND (fn
, 0);
8365 /* When cgraph node is missing and function is not public, we cannot
8366 devirtualize. This can happen in WHOPR when the actual method
8367 ends up in other partition, because we found devirtualization
8368 possibility too late. */
8369 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8380 /* Make sure we create a cgraph node for functions we'll reference.
8381 They can be non-existent if the reference comes from an entry
8382 of an external vtable for example. */
8383 cgraph_node::get_create (fn
);
8388 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8389 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8390 KNOWN_BINFO carries the binfo describing the true type of
8391 OBJ_TYPE_REF_OBJECT(REF).
8392 Set CAN_REFER if non-NULL to false if method
8393 is not referable or if the virtual table is ill-formed (such as rewriten
8394 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8397 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8400 unsigned HOST_WIDE_INT offset
;
8403 v
= BINFO_VTABLE (known_binfo
);
8404 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8408 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8414 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8417 /* Given a pointer value T, return a simplified version of an
8418 indirection through T, or NULL_TREE if no simplification is
8419 possible. Note that the resulting type may be different from
8420 the type pointed to in the sense that it is still compatible
8421 from the langhooks point of view. */
8424 gimple_fold_indirect_ref (tree t
)
8426 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8431 subtype
= TREE_TYPE (sub
);
8432 if (!POINTER_TYPE_P (subtype
)
8433 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8436 if (TREE_CODE (sub
) == ADDR_EXPR
)
8438 tree op
= TREE_OPERAND (sub
, 0);
8439 tree optype
= TREE_TYPE (op
);
8441 if (useless_type_conversion_p (type
, optype
))
8444 /* *(foo *)&fooarray => fooarray[0] */
8445 if (TREE_CODE (optype
) == ARRAY_TYPE
8446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8447 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8449 tree type_domain
= TYPE_DOMAIN (optype
);
8450 tree min_val
= size_zero_node
;
8451 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8452 min_val
= TYPE_MIN_VALUE (type_domain
);
8453 if (TREE_CODE (min_val
) == INTEGER_CST
)
8454 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8456 /* *(foo *)&complexfoo => __real__ complexfoo */
8457 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8458 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8459 return fold_build1 (REALPART_EXPR
, type
, op
);
8460 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8461 else if (TREE_CODE (optype
) == VECTOR_TYPE
8462 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8464 tree part_width
= TYPE_SIZE (type
);
8465 tree index
= bitsize_int (0);
8466 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8470 /* *(p + CST) -> ... */
8471 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8472 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8474 tree addr
= TREE_OPERAND (sub
, 0);
8475 tree off
= TREE_OPERAND (sub
, 1);
8479 addrtype
= TREE_TYPE (addr
);
8481 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8482 if (TREE_CODE (addr
) == ADDR_EXPR
8483 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8484 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8485 && tree_fits_uhwi_p (off
))
8487 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8488 tree part_width
= TYPE_SIZE (type
);
8489 unsigned HOST_WIDE_INT part_widthi
8490 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8491 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8492 tree index
= bitsize_int (indexi
);
8493 if (known_lt (offset
/ part_widthi
,
8494 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8495 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8499 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8500 if (TREE_CODE (addr
) == ADDR_EXPR
8501 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8502 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8504 tree size
= TYPE_SIZE_UNIT (type
);
8505 if (tree_int_cst_equal (size
, off
))
8506 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8509 /* *(p + CST) -> MEM_REF <p, CST>. */
8510 if (TREE_CODE (addr
) != ADDR_EXPR
8511 || DECL_P (TREE_OPERAND (addr
, 0)))
8512 return fold_build2 (MEM_REF
, type
,
8514 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8517 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8518 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8519 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8520 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8523 tree min_val
= size_zero_node
;
8525 sub
= gimple_fold_indirect_ref (sub
);
8527 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8528 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8529 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8530 min_val
= TYPE_MIN_VALUE (type_domain
);
8531 if (TREE_CODE (min_val
) == INTEGER_CST
)
8532 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8538 /* Return true if CODE is an operation that when operating on signed
8539 integer types involves undefined behavior on overflow and the
8540 operation can be expressed with unsigned arithmetic. */
8543 arith_code_with_undefined_signed_overflow (tree_code code
)
8552 case POINTER_PLUS_EXPR
:
8559 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8560 operation that can be transformed to unsigned arithmetic by converting
8561 its operand, carrying out the operation in the corresponding unsigned
8562 type and converting the result back to the original type.
8564 Returns a sequence of statements that replace STMT and also contain
8565 a modified form of STMT itself. */
8568 rewrite_to_defined_overflow (gimple
*stmt
)
8570 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8572 fprintf (dump_file
, "rewriting stmt with undefined signed "
8574 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8577 tree lhs
= gimple_assign_lhs (stmt
);
8578 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8579 gimple_seq stmts
= NULL
;
8580 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8581 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8583 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8585 tree op
= gimple_op (stmt
, i
);
8586 op
= gimple_convert (&stmts
, type
, op
);
8587 gimple_set_op (stmt
, i
, op
);
8589 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8590 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8591 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8592 gimple_set_modified (stmt
, true);
8593 gimple_seq_add_stmt (&stmts
, stmt
);
8594 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8595 gimple_seq_add_stmt (&stmts
, cvt
);
8601 /* The valueization hook we use for the gimple_build API simplification.
8602 This makes us match fold_buildN behavior by only combining with
8603 statements in the sequence(s) we are currently building. */
8606 gimple_build_valueize (tree op
)
8608 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8613 /* Build the expression CODE OP0 of type TYPE with location LOC,
8614 simplifying it first if possible. Returns the built
8615 expression value and appends statements possibly defining it
8619 gimple_build (gimple_seq
*seq
, location_t loc
,
8620 enum tree_code code
, tree type
, tree op0
)
8622 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8625 res
= create_tmp_reg_or_ssa_name (type
);
8627 if (code
== REALPART_EXPR
8628 || code
== IMAGPART_EXPR
8629 || code
== VIEW_CONVERT_EXPR
)
8630 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8632 stmt
= gimple_build_assign (res
, code
, op0
);
8633 gimple_set_location (stmt
, loc
);
8634 gimple_seq_add_stmt_without_update (seq
, stmt
);
8639 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8640 simplifying it first if possible. Returns the built
8641 expression value and appends statements possibly defining it
8645 gimple_build (gimple_seq
*seq
, location_t loc
,
8646 enum tree_code code
, tree type
, tree op0
, tree op1
)
8648 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8651 res
= create_tmp_reg_or_ssa_name (type
);
8652 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8653 gimple_set_location (stmt
, loc
);
8654 gimple_seq_add_stmt_without_update (seq
, stmt
);
8659 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8660 simplifying it first if possible. Returns the built
8661 expression value and appends statements possibly defining it
8665 gimple_build (gimple_seq
*seq
, location_t loc
,
8666 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8668 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8669 seq
, gimple_build_valueize
);
8672 res
= create_tmp_reg_or_ssa_name (type
);
8674 if (code
== BIT_FIELD_REF
)
8675 stmt
= gimple_build_assign (res
, code
,
8676 build3 (code
, type
, op0
, op1
, op2
));
8678 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8679 gimple_set_location (stmt
, loc
);
8680 gimple_seq_add_stmt_without_update (seq
, stmt
);
8685 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8686 void) with a location LOC. Returns the built expression value (or NULL_TREE
8687 if TYPE is void) and appends statements possibly defining it to SEQ. */
8690 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8692 tree res
= NULL_TREE
;
8694 if (internal_fn_p (fn
))
8695 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8698 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8699 stmt
= gimple_build_call (decl
, 0);
8701 if (!VOID_TYPE_P (type
))
8703 res
= create_tmp_reg_or_ssa_name (type
);
8704 gimple_call_set_lhs (stmt
, res
);
8706 gimple_set_location (stmt
, loc
);
8707 gimple_seq_add_stmt_without_update (seq
, stmt
);
8711 /* Build the call FN (ARG0) with a result of type TYPE
8712 (or no result if TYPE is void) with location LOC,
8713 simplifying it first if possible. Returns the built
8714 expression value (or NULL_TREE if TYPE is void) and appends
8715 statements possibly defining it to SEQ. */
8718 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8719 tree type
, tree arg0
)
8721 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8725 if (internal_fn_p (fn
))
8726 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8729 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8730 stmt
= gimple_build_call (decl
, 1, arg0
);
8732 if (!VOID_TYPE_P (type
))
8734 res
= create_tmp_reg_or_ssa_name (type
);
8735 gimple_call_set_lhs (stmt
, res
);
8737 gimple_set_location (stmt
, loc
);
8738 gimple_seq_add_stmt_without_update (seq
, stmt
);
8743 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8744 (or no result if TYPE is void) with location LOC,
8745 simplifying it first if possible. Returns the built
8746 expression value (or NULL_TREE if TYPE is void) and appends
8747 statements possibly defining it to SEQ. */
8750 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8751 tree type
, tree arg0
, tree arg1
)
8753 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8757 if (internal_fn_p (fn
))
8758 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8761 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8762 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8764 if (!VOID_TYPE_P (type
))
8766 res
= create_tmp_reg_or_ssa_name (type
);
8767 gimple_call_set_lhs (stmt
, res
);
8769 gimple_set_location (stmt
, loc
);
8770 gimple_seq_add_stmt_without_update (seq
, stmt
);
8775 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8776 (or no result if TYPE is void) with location LOC,
8777 simplifying it first if possible. Returns the built
8778 expression value (or NULL_TREE if TYPE is void) and appends
8779 statements possibly defining it to SEQ. */
8782 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8783 tree type
, tree arg0
, tree arg1
, tree arg2
)
8785 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8786 seq
, gimple_build_valueize
);
8790 if (internal_fn_p (fn
))
8791 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8792 3, arg0
, arg1
, arg2
);
8795 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8796 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8798 if (!VOID_TYPE_P (type
))
8800 res
= create_tmp_reg_or_ssa_name (type
);
8801 gimple_call_set_lhs (stmt
, res
);
8803 gimple_set_location (stmt
, loc
);
8804 gimple_seq_add_stmt_without_update (seq
, stmt
);
8809 /* Build the conversion (TYPE) OP with a result of type TYPE
8810 with location LOC if such conversion is neccesary in GIMPLE,
8811 simplifying it first.
8812 Returns the built expression value and appends
8813 statements possibly defining it to SEQ. */
8816 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8818 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8820 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8823 /* Build the conversion (ptrofftype) OP with a result of a type
8824 compatible with ptrofftype with location LOC if such conversion
8825 is neccesary in GIMPLE, simplifying it first.
8826 Returns the built expression value and appends
8827 statements possibly defining it to SEQ. */
8830 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8832 if (ptrofftype_p (TREE_TYPE (op
)))
8834 return gimple_convert (seq
, loc
, sizetype
, op
);
8837 /* Build a vector of type TYPE in which each element has the value OP.
8838 Return a gimple value for the result, appending any new statements
8842 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8845 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8846 && !CONSTANT_CLASS_P (op
))
8847 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8849 tree res
, vec
= build_vector_from_val (type
, op
);
8850 if (is_gimple_val (vec
))
8852 if (gimple_in_ssa_p (cfun
))
8853 res
= make_ssa_name (type
);
8855 res
= create_tmp_reg (type
);
8856 gimple
*stmt
= gimple_build_assign (res
, vec
);
8857 gimple_set_location (stmt
, loc
);
8858 gimple_seq_add_stmt_without_update (seq
, stmt
);
8862 /* Build a vector from BUILDER, handling the case in which some elements
8863 are non-constant. Return a gimple value for the result, appending any
8864 new instructions to SEQ.
8866 BUILDER must not have a stepped encoding on entry. This is because
8867 the function is not geared up to handle the arithmetic that would
8868 be needed in the variable case, and any code building a vector that
8869 is known to be constant should use BUILDER->build () directly. */
8872 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8873 tree_vector_builder
*builder
)
8875 gcc_assert (builder
->nelts_per_pattern () <= 2);
8876 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8877 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8878 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8880 tree type
= builder
->type ();
8881 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8882 vec
<constructor_elt
, va_gc
> *v
;
8883 vec_alloc (v
, nelts
);
8884 for (i
= 0; i
< nelts
; ++i
)
8885 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8888 if (gimple_in_ssa_p (cfun
))
8889 res
= make_ssa_name (type
);
8891 res
= create_tmp_reg (type
);
8892 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8893 gimple_set_location (stmt
, loc
);
8894 gimple_seq_add_stmt_without_update (seq
, stmt
);
8897 return builder
->build ();
8900 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8901 and generate a value guaranteed to be rounded upwards to ALIGN.
8903 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8906 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8907 tree old_size
, unsigned HOST_WIDE_INT align
)
8909 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8910 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8911 gcc_assert (INTEGRAL_TYPE_P (type
));
8912 tree tree_mask
= build_int_cst (type
, tg_mask
);
8913 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8916 tree mask
= build_int_cst (type
, -align
);
8917 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8920 /* Return true if the result of assignment STMT is known to be non-negative.
8921 If the return value is based on the assumption that signed overflow is
8922 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8923 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8926 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8929 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8930 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
8931 switch (get_gimple_rhs_class (code
))
8933 case GIMPLE_UNARY_RHS
:
8934 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8936 gimple_assign_rhs1 (stmt
),
8937 strict_overflow_p
, depth
);
8938 case GIMPLE_BINARY_RHS
:
8939 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8941 gimple_assign_rhs1 (stmt
),
8942 gimple_assign_rhs2 (stmt
),
8943 strict_overflow_p
, depth
);
8944 case GIMPLE_TERNARY_RHS
:
8946 case GIMPLE_SINGLE_RHS
:
8947 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8948 strict_overflow_p
, depth
);
8949 case GIMPLE_INVALID_RHS
:
8955 /* Return true if return value of call STMT is known to be non-negative.
8956 If the return value is based on the assumption that signed overflow is
8957 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8958 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8961 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8964 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8965 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8966 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8967 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8968 tree lhs
= gimple_call_lhs (stmt
);
8970 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
8971 gimple_call_combined_fn (stmt
),
8973 strict_overflow_p
, depth
));
8976 /* Return true if return value of call STMT is known to be non-negative.
8977 If the return value is based on the assumption that signed overflow is
8978 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8979 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8982 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8985 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8987 tree arg
= gimple_phi_arg_def (stmt
, i
);
8988 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
8994 /* Return true if STMT is known to compute a non-negative value.
8995 If the return value is based on the assumption that signed overflow is
8996 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8997 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9000 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9003 switch (gimple_code (stmt
))
9006 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9009 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9012 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9019 /* Return true if the floating-point value computed by assignment STMT
9020 is known to have an integer value. We also allow +Inf, -Inf and NaN
9021 to be considered integer values. Return false for signaling NaN.
9023 DEPTH is the current nesting depth of the query. */
9026 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9028 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9029 switch (get_gimple_rhs_class (code
))
9031 case GIMPLE_UNARY_RHS
:
9032 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9033 gimple_assign_rhs1 (stmt
), depth
);
9034 case GIMPLE_BINARY_RHS
:
9035 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9036 gimple_assign_rhs1 (stmt
),
9037 gimple_assign_rhs2 (stmt
), depth
);
9038 case GIMPLE_TERNARY_RHS
:
9040 case GIMPLE_SINGLE_RHS
:
9041 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9042 case GIMPLE_INVALID_RHS
:
9048 /* Return true if the floating-point value computed by call STMT is known
9049 to have an integer value. We also allow +Inf, -Inf and NaN to be
9050 considered integer values. Return false for signaling NaN.
9052 DEPTH is the current nesting depth of the query. */
9055 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9057 tree arg0
= (gimple_call_num_args (stmt
) > 0
9058 ? gimple_call_arg (stmt
, 0)
9060 tree arg1
= (gimple_call_num_args (stmt
) > 1
9061 ? gimple_call_arg (stmt
, 1)
9063 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9067 /* Return true if the floating-point result of phi STMT is known to have
9068 an integer value. We also allow +Inf, -Inf and NaN to be considered
9069 integer values. Return false for signaling NaN.
9071 DEPTH is the current nesting depth of the query. */
9074 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9076 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9078 tree arg
= gimple_phi_arg_def (stmt
, i
);
9079 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9085 /* Return true if the floating-point value computed by STMT is known
9086 to have an integer value. We also allow +Inf, -Inf and NaN to be
9087 considered integer values. Return false for signaling NaN.
9089 DEPTH is the current nesting depth of the query. */
9092 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9094 switch (gimple_code (stmt
))
9097 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9099 return gimple_call_integer_valued_real_p (stmt
, depth
);
9101 return gimple_phi_integer_valued_real_p (stmt
, depth
);