1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
73 enum strlen_range_kind
{
74 /* Compute the exact constant string length. */
76 /* Compute the maximum constant string length. */
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
84 /* Determine the integer value of the argument (not string length). */
89 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
114 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
117 struct cgraph_node
*node
;
120 if (DECL_ABSTRACT_P (decl
))
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
125 || !VAR_OR_FUNCTION_DECL_P (decl
))
128 /* Static objects can be referred only if they are defined and not optimized
130 if (!TREE_PUBLIC (decl
))
132 if (DECL_EXTERNAL (decl
))
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab
->function_flags_ready
)
138 snode
= symtab_node::get (decl
);
139 if (!snode
|| !snode
->definition
)
141 node
= dyn_cast
<cgraph_node
*> (snode
);
142 return !node
|| !node
->inlined_to
;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
149 || !VAR_P (from_decl
)
150 || (!DECL_EXTERNAL (from_decl
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->definition
)
154 && (vnode
= varpool_node::get (from_decl
)) != NULL
155 && vnode
->in_other_partition
))
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl
)
161 && DECL_EXTERNAL (decl
)
162 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
163 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
181 if (!symtab
->function_flags_ready
)
184 snode
= symtab_node::get (decl
);
186 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
187 && (!snode
->in_other_partition
188 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
190 node
= dyn_cast
<cgraph_node
*> (snode
);
191 return !node
|| !node
->inlined_to
;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
199 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
201 if (gimple_in_ssa_p (cfun
))
202 return make_ssa_name (type
, stmt
);
204 return create_tmp_reg (type
);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
212 canonicalize_constructor_val (tree cval
, tree from_decl
)
214 if (CONSTANT_CLASS_P (cval
))
217 tree orig_cval
= cval
;
219 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
222 tree ptr
= TREE_OPERAND (cval
, 0);
223 if (is_gimple_min_invariant (ptr
))
224 cval
= build1_loc (EXPR_LOCATION (cval
),
225 ADDR_EXPR
, TREE_TYPE (ptr
),
226 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
228 fold_convert (ptr_type_node
,
229 TREE_OPERAND (cval
, 1))));
231 if (TREE_CODE (cval
) == ADDR_EXPR
)
233 tree base
= NULL_TREE
;
234 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
236 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
238 TREE_OPERAND (cval
, 0) = base
;
241 base
= get_base_address (TREE_OPERAND (cval
, 0));
245 if (VAR_OR_FUNCTION_DECL_P (base
)
246 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
248 if (TREE_TYPE (base
) == error_mark_node
)
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base
) == FUNCTION_DECL
)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base
);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
263 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
266 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval
) == INTEGER_CST
)
272 if (TREE_OVERFLOW_P (cval
))
273 cval
= drop_tree_overflow (cval
);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
275 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
285 get_symbol_constant_value (tree sym
)
287 tree val
= ctor_for_folding (sym
);
288 if (val
!= error_mark_node
)
292 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
293 if (val
&& is_gimple_min_invariant (val
))
298 /* Variables declared 'const' without an initializer
299 have zero as the initializer if they may not be
300 overridden at link or run time. */
302 && is_gimple_reg_type (TREE_TYPE (sym
)))
303 return build_zero_cst (TREE_TYPE (sym
));
311 /* Subroutine of fold_stmt. We perform constant folding of the
312 memory reference tree EXPR. */
315 maybe_fold_reference (tree expr
)
317 tree result
= NULL_TREE
;
319 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
320 || TREE_CODE (expr
) == REALPART_EXPR
321 || TREE_CODE (expr
) == IMAGPART_EXPR
)
322 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
323 result
= fold_unary_loc (EXPR_LOCATION (expr
),
326 TREE_OPERAND (expr
, 0));
327 else if (TREE_CODE (expr
) == BIT_FIELD_REF
328 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
329 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
332 TREE_OPERAND (expr
, 0),
333 TREE_OPERAND (expr
, 1),
334 TREE_OPERAND (expr
, 2));
336 result
= fold_const_aggregate_ref (expr
);
338 if (result
&& is_gimple_min_invariant (result
))
344 /* Return true if EXPR is an acceptable right-hand-side for a
345 GIMPLE assignment. We validate the entire tree, not just
346 the root node, thus catching expressions that embed complex
347 operands that are not permitted in GIMPLE. This function
348 is needed because the folding routines in fold-const.c
349 may return such expressions in some cases, e.g., an array
350 access with an embedded index addition. It may make more
351 sense to have folding routines that are sensitive to the
352 constraints on GIMPLE operands, rather than abandoning any
353 any attempt to fold if the usual folding turns out to be too
357 valid_gimple_rhs_p (tree expr
)
359 enum tree_code code
= TREE_CODE (expr
);
361 switch (TREE_CODE_CLASS (code
))
363 case tcc_declaration
:
364 if (!is_gimple_variable (expr
))
369 /* All constants are ok. */
373 /* GENERIC allows comparisons with non-boolean types, reject
374 those for GIMPLE. Let vector-typed comparisons pass - rules
375 for GENERIC and GIMPLE are the same here. */
376 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
377 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
378 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
379 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
384 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
385 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
390 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
400 if (is_gimple_min_invariant (expr
))
402 t
= TREE_OPERAND (expr
, 0);
403 while (handled_component_p (t
))
405 /* ??? More checks needed, see the GIMPLE verifier. */
406 if ((TREE_CODE (t
) == ARRAY_REF
407 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
408 && !is_gimple_val (TREE_OPERAND (t
, 1)))
410 t
= TREE_OPERAND (t
, 0);
412 if (!is_gimple_id (t
))
418 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
420 if ((code
== COND_EXPR
421 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
422 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
423 || !is_gimple_val (TREE_OPERAND (expr
, 1))
424 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
435 case tcc_exceptional
:
436 if (code
== CONSTRUCTOR
)
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
441 if (!is_gimple_val (elt
))
445 if (code
!= SSA_NAME
)
450 if (code
== BIT_FIELD_REF
)
451 return is_gimple_val (TREE_OPERAND (expr
, 0));
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
468 fold_gimple_assign (gimple_stmt_iterator
*si
)
470 gimple
*stmt
= gsi_stmt (*si
);
471 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
472 location_t loc
= gimple_location (stmt
);
474 tree result
= NULL_TREE
;
476 switch (get_gimple_rhs_class (subcode
))
478 case GIMPLE_SINGLE_RHS
:
480 tree rhs
= gimple_assign_rhs1 (stmt
);
482 if (TREE_CLOBBER_P (rhs
))
485 if (REFERENCE_CLASS_P (rhs
))
486 return maybe_fold_reference (rhs
);
488 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
490 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
491 if (is_gimple_min_invariant (val
))
493 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
496 vec
<cgraph_node
*>targets
497 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
498 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets
.length () == 1
506 ? targets
[0]->name ()
509 if (targets
.length () == 1)
511 val
= fold_convert (TREE_TYPE (val
),
512 build_fold_addr_expr_loc
513 (loc
, targets
[0]->decl
));
514 STRIP_USELESS_TYPE_CONVERSION (val
);
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val
= build_int_cst (TREE_TYPE (val
), 0);
525 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
527 tree ref
= TREE_OPERAND (rhs
, 0);
528 if (TREE_CODE (ref
) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref
, 1)))
531 result
= TREE_OPERAND (ref
, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
534 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
539 else if (TREE_CODE (rhs
) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
547 if (! CONSTANT_CLASS_P (val
))
550 return build_vector_from_ctor (TREE_TYPE (rhs
),
551 CONSTRUCTOR_ELTS (rhs
));
554 else if (DECL_P (rhs
)
555 && is_gimple_reg_type (TREE_TYPE (rhs
)))
556 return get_symbol_constant_value (rhs
);
560 case GIMPLE_UNARY_RHS
:
563 case GIMPLE_BINARY_RHS
:
566 case GIMPLE_TERNARY_RHS
:
567 result
= fold_ternary_loc (loc
, subcode
,
568 TREE_TYPE (gimple_assign_lhs (stmt
)),
569 gimple_assign_rhs1 (stmt
),
570 gimple_assign_rhs2 (stmt
),
571 gimple_assign_rhs3 (stmt
));
575 STRIP_USELESS_TYPE_CONVERSION (result
);
576 if (valid_gimple_rhs_p (result
))
581 case GIMPLE_INVALID_RHS
:
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
595 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
597 gimple
*stmt
= gsi_stmt (*si_p
);
599 if (gimple_has_location (stmt
))
600 annotate_all_with_location (stmts
, gimple_location (stmt
));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple
*laststore
= NULL
;
605 for (gimple_stmt_iterator i
= gsi_last (stmts
);
606 !gsi_end_p (i
); gsi_prev (&i
))
608 gimple
*new_stmt
= gsi_stmt (i
);
609 if ((gimple_assign_single_p (new_stmt
)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
611 || (is_gimple_call (new_stmt
)
612 && (gimple_call_flags (new_stmt
)
613 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
617 vdef
= gimple_vdef (stmt
);
619 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
620 gimple_set_vdef (new_stmt
, vdef
);
621 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
622 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
623 laststore
= new_stmt
;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse
= gimple_vuse (stmt
);
630 for (gimple_stmt_iterator i
= gsi_start (stmts
);
631 !gsi_end_p (i
); gsi_next (&i
))
633 gimple
*new_stmt
= gsi_stmt (i
);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt
))
637 gimple_set_vuse (new_stmt
, reaching_vuse
);
638 gimple_set_modified (new_stmt
, true);
639 if (gimple_vdef (new_stmt
))
640 reaching_vuse
= gimple_vdef (new_stmt
);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
646 && reaching_vuse
== gimple_vuse (stmt
))
648 tree vdef
= gimple_vdef (stmt
);
650 && TREE_CODE (vdef
) == SSA_NAME
)
652 unlink_stmt_vdef (stmt
);
653 release_ssa_name (vdef
);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p
, stmts
, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
666 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
669 tree lhs
= gimple_call_lhs (stmt
);
670 gimple_call_set_lhs (new_stmt
, lhs
);
671 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
672 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
673 gimple_move_vops (new_stmt
, stmt
);
674 gimple_set_location (new_stmt
, gimple_location (stmt
));
675 if (gimple_block (new_stmt
) == NULL_TREE
)
676 gimple_set_block (new_stmt
, gimple_block (stmt
));
677 gsi_replace (si_p
, new_stmt
, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
685 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
688 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
690 gcc_assert (is_gimple_call (stmt
));
691 va_start (ap
, nargs
);
692 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
693 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
703 valid_gimple_call_p (tree expr
)
707 if (TREE_CODE (expr
) != CALL_EXPR
)
710 nargs
= call_expr_nargs (expr
);
711 for (i
= 0; i
< nargs
; i
++)
713 tree arg
= CALL_EXPR_ARG (expr
, i
);
714 if (is_gimple_reg_type (TREE_TYPE (arg
)))
716 if (!is_gimple_val (arg
))
720 if (!is_gimple_lvalue (arg
))
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
741 gimple
*stmt
, *new_stmt
;
742 gimple_stmt_iterator i
;
743 gimple_seq stmts
= NULL
;
745 stmt
= gsi_stmt (*si_p
);
747 gcc_assert (is_gimple_call (stmt
));
749 if (valid_gimple_call_p (expr
))
751 /* The call has simplified to another call. */
752 tree fn
= CALL_EXPR_FN (expr
);
754 unsigned nargs
= call_expr_nargs (expr
);
755 vec
<tree
> args
= vNULL
;
761 args
.safe_grow_cleared (nargs
, true);
763 for (i
= 0; i
< nargs
; i
++)
764 args
[i
] = CALL_EXPR_ARG (expr
, i
);
767 new_stmt
= gimple_build_call_vec (fn
, args
);
768 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
773 lhs
= gimple_call_lhs (stmt
);
774 if (lhs
== NULL_TREE
)
776 push_gimplify_context (gimple_in_ssa_p (cfun
));
777 gimplify_and_add (expr
, &stmts
);
778 pop_gimplify_context (NULL
);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts
))
784 if (gimple_in_ssa_p (cfun
))
786 unlink_stmt_vdef (stmt
);
789 gsi_replace (si_p
, gimple_build_nop (), false);
795 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
796 new_stmt
= gimple_build_assign (lhs
, tmp
);
797 i
= gsi_last (stmts
);
798 gsi_insert_after_without_update (&i
, new_stmt
,
799 GSI_CONTINUE_LINKING
);
802 gsi_replace_with_seq_vops (si_p
, stmts
);
806 /* Replace the call at *GSI with the gimple value VAL. */
809 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
811 gimple
*stmt
= gsi_stmt (*gsi
);
812 tree lhs
= gimple_call_lhs (stmt
);
816 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
817 val
= fold_convert (TREE_TYPE (lhs
), val
);
818 repl
= gimple_build_assign (lhs
, val
);
821 repl
= gimple_build_nop ();
822 tree vdef
= gimple_vdef (stmt
);
823 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
825 unlink_stmt_vdef (stmt
);
826 release_ssa_name (vdef
);
828 gsi_replace (gsi
, repl
, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
835 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
837 gimple
*stmt
= gsi_stmt (*gsi
);
838 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
839 gimple_set_location (repl
, gimple_location (stmt
));
840 gimple_move_vops (repl
, stmt
);
841 gsi_replace (gsi
, repl
, false);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
848 var_decl_component_p (tree var
)
851 while (handled_component_p (inner
))
852 inner
= TREE_OPERAND (inner
, 0);
853 return (DECL_P (inner
)
854 || (TREE_CODE (inner
) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
862 size_must_be_zero_p (tree size
)
864 if (integer_zerop (size
))
867 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
870 tree type
= TREE_TYPE (size
);
871 int prec
= TYPE_PRECISION (type
);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
876 value_range
valid_range (build_int_cst (type
, 0),
877 wide_int_to_tree (type
, ssize_max
));
880 get_range_query (cfun
)->range_of_expr (vr
, size
);
882 get_global_range_query ()->range_of_expr (vr
, size
);
883 if (vr
.undefined_p ())
884 vr
.set_varying (TREE_TYPE (size
));
885 vr
.intersect (&valid_range
);
889 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
890 diagnose (otherwise undefined) overlapping copies without preventing
891 folding. When folded, GCC guarantees that overlapping memcpy has
892 the same semantics as memmove. Call to the library memcpy need not
893 provide the same guarantee. Return false if no simplification can
897 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
898 tree dest
, tree src
, enum built_in_function code
)
900 gimple
*stmt
= gsi_stmt (*gsi
);
901 tree lhs
= gimple_call_lhs (stmt
);
902 tree len
= gimple_call_arg (stmt
, 2);
903 location_t loc
= gimple_location (stmt
);
905 /* If the LEN parameter is a constant zero or in range where
906 the only valid value is zero, return DEST. */
907 if (size_must_be_zero_p (len
))
910 if (gimple_call_lhs (stmt
))
911 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
913 repl
= gimple_build_nop ();
914 tree vdef
= gimple_vdef (stmt
);
915 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
917 unlink_stmt_vdef (stmt
);
918 release_ssa_name (vdef
);
920 gsi_replace (gsi
, repl
, false);
924 /* If SRC and DEST are the same (and not volatile), return
925 DEST{,+LEN,+LEN-1}. */
926 if (operand_equal_p (src
, dest
, 0))
928 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
929 It's safe and may even be emitted by GCC itself (see bug
931 unlink_stmt_vdef (stmt
);
932 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
933 release_ssa_name (gimple_vdef (stmt
));
936 gsi_replace (gsi
, gimple_build_nop (), false);
943 /* We cannot (easily) change the type of the copy if it is a storage
944 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
945 modify the storage order of objects (see storage_order_barrier_p). */
947 = POINTER_TYPE_P (TREE_TYPE (src
))
948 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
950 = POINTER_TYPE_P (TREE_TYPE (dest
))
951 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
952 tree destvar
, srcvar
, srcoff
;
953 unsigned int src_align
, dest_align
;
954 unsigned HOST_WIDE_INT tmp_len
;
957 /* Build accesses at offset zero with a ref-all character type. */
959 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
962 /* If we can perform the copy efficiently with first doing all loads and
963 then all stores inline it that way. Currently efficiently means that
964 we can load all the memory with a single set operation and that the
965 total size is less than MOVE_MAX * MOVE_RATIO. */
966 src_align
= get_pointer_alignment (src
);
967 dest_align
= get_pointer_alignment (dest
);
968 if (tree_fits_uhwi_p (len
)
971 * MOVE_RATIO (optimize_function_for_size_p (cfun
))))
973 /* FIXME: Don't transform copies from strings with known length.
974 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
975 from being handled, and the case was XFAILed for that reason.
976 Now that it is handled and the XFAIL removed, as soon as other
977 strlenopt tests that rely on it for passing are adjusted, this
978 hack can be removed. */
979 && !c_strlen (src
, 1)
980 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
981 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
983 && AGGREGATE_TYPE_P (srctype
)
984 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
986 && AGGREGATE_TYPE_P (desttype
)
987 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
989 unsigned ilen
= tree_to_uhwi (len
);
990 if (pow2p_hwi (ilen
))
992 /* Detect out-of-bounds accesses without issuing warnings.
993 Avoid folding out-of-bounds copies but to avoid false
994 positives for unreachable code defer warning until after
995 DCE has worked its magic.
996 -Wrestrict is still diagnosed. */
997 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1000 if (warning
!= OPT_Wrestrict
)
1003 scalar_int_mode mode
;
1004 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1005 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1006 && have_insn_for (SET
, mode
)
1007 /* If the destination pointer is not aligned we must be able
1008 to emit an unaligned store. */
1009 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1010 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1011 || (optab_handler (movmisalign_optab
, mode
)
1012 != CODE_FOR_nothing
)))
1014 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1015 tree srctype
= type
;
1016 tree desttype
= type
;
1017 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1018 srctype
= build_aligned_type (type
, src_align
);
1019 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1020 tree tem
= fold_const_aggregate_ref (srcmem
);
1023 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1024 && targetm
.slow_unaligned_access (mode
, src_align
)
1025 && (optab_handler (movmisalign_optab
, mode
)
1026 == CODE_FOR_nothing
))
1031 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1033 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1035 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1037 gimple_assign_set_lhs (new_stmt
, srcmem
);
1038 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1039 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1041 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1042 desttype
= build_aligned_type (type
, dest_align
);
1044 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1047 gimple_move_vops (new_stmt
, stmt
);
1050 gsi_replace (gsi
, new_stmt
, false);
1053 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1060 if (code
== BUILT_IN_MEMMOVE
)
1062 /* Both DEST and SRC must be pointer types.
1063 ??? This is what old code did. Is the testing for pointer types
1066 If either SRC is readonly or length is 1, we can use memcpy. */
1067 if (!dest_align
|| !src_align
)
1069 if (readonly_data_expr (src
)
1070 || (tree_fits_uhwi_p (len
)
1071 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1072 >= tree_to_uhwi (len
))))
1074 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1077 gimple_call_set_fndecl (stmt
, fn
);
1078 gimple_call_set_arg (stmt
, 0, dest
);
1079 gimple_call_set_arg (stmt
, 1, src
);
1084 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1085 if (TREE_CODE (src
) == ADDR_EXPR
1086 && TREE_CODE (dest
) == ADDR_EXPR
)
1088 tree src_base
, dest_base
, fn
;
1089 poly_int64 src_offset
= 0, dest_offset
= 0;
1090 poly_uint64 maxsize
;
1092 srcvar
= TREE_OPERAND (src
, 0);
1093 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1094 if (src_base
== NULL
)
1096 destvar
= TREE_OPERAND (dest
, 0);
1097 dest_base
= get_addr_base_and_unit_offset (destvar
,
1099 if (dest_base
== NULL
)
1100 dest_base
= destvar
;
1101 if (!poly_int_tree_p (len
, &maxsize
))
1103 if (SSA_VAR_P (src_base
)
1104 && SSA_VAR_P (dest_base
))
1106 if (operand_equal_p (src_base
, dest_base
, 0)
1107 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1108 dest_offset
, maxsize
))
1111 else if (TREE_CODE (src_base
) == MEM_REF
1112 && TREE_CODE (dest_base
) == MEM_REF
)
1114 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1115 TREE_OPERAND (dest_base
, 0), 0))
1117 poly_offset_int full_src_offset
1118 = mem_ref_offset (src_base
) + src_offset
;
1119 poly_offset_int full_dest_offset
1120 = mem_ref_offset (dest_base
) + dest_offset
;
1121 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1122 full_dest_offset
, maxsize
))
1128 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1131 gimple_call_set_fndecl (stmt
, fn
);
1132 gimple_call_set_arg (stmt
, 0, dest
);
1133 gimple_call_set_arg (stmt
, 1, src
);
1138 /* If the destination and source do not alias optimize into
1140 if ((is_gimple_min_invariant (dest
)
1141 || TREE_CODE (dest
) == SSA_NAME
)
1142 && (is_gimple_min_invariant (src
)
1143 || TREE_CODE (src
) == SSA_NAME
))
1146 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1147 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1148 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1151 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1154 gimple_call_set_fndecl (stmt
, fn
);
1155 gimple_call_set_arg (stmt
, 0, dest
);
1156 gimple_call_set_arg (stmt
, 1, src
);
1165 if (!tree_fits_shwi_p (len
))
1168 || (AGGREGATE_TYPE_P (srctype
)
1169 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1172 || (AGGREGATE_TYPE_P (desttype
)
1173 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1175 /* In the following try to find a type that is most natural to be
1176 used for the memcpy source and destination and that allows
1177 the most optimization when memcpy is turned into a plain assignment
1178 using that type. In theory we could always use a char[len] type
1179 but that only gains us that the destination and source possibly
1180 no longer will have their address taken. */
1181 if (TREE_CODE (srctype
) == ARRAY_TYPE
1182 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1183 srctype
= TREE_TYPE (srctype
);
1184 if (TREE_CODE (desttype
) == ARRAY_TYPE
1185 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1186 desttype
= TREE_TYPE (desttype
);
1187 if (TREE_ADDRESSABLE (srctype
)
1188 || TREE_ADDRESSABLE (desttype
))
1191 /* Make sure we are not copying using a floating-point mode or
1192 a type whose size possibly does not match its precision. */
1193 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1194 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1195 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1196 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1197 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1198 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1199 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1200 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1208 src_align
= get_pointer_alignment (src
);
1209 dest_align
= get_pointer_alignment (dest
);
1211 /* Choose between src and destination type for the access based
1212 on alignment, whether the access constitutes a register access
1213 and whether it may actually expose a declaration for SSA rewrite
1214 or SRA decomposition. Also try to expose a string constant, we
1215 might be able to concatenate several of them later into a single
1217 destvar
= NULL_TREE
;
1219 if (TREE_CODE (dest
) == ADDR_EXPR
1220 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1221 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1222 && dest_align
>= TYPE_ALIGN (desttype
)
1223 && (is_gimple_reg_type (desttype
)
1224 || src_align
>= TYPE_ALIGN (desttype
)))
1225 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1226 else if (TREE_CODE (src
) == ADDR_EXPR
1227 && var_decl_component_p (TREE_OPERAND (src
, 0))
1228 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1229 && src_align
>= TYPE_ALIGN (srctype
)
1230 && (is_gimple_reg_type (srctype
)
1231 || dest_align
>= TYPE_ALIGN (srctype
)))
1232 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1233 /* FIXME: Don't transform copies from strings with known original length.
1234 As soon as strlenopt tests that rely on it for passing are adjusted,
1235 this hack can be removed. */
1236 else if (gimple_call_alloca_for_var_p (stmt
)
1237 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1238 && integer_zerop (srcoff
)
1239 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1240 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1241 srctype
= TREE_TYPE (srcvar
);
1245 /* Now that we chose an access type express the other side in
1246 terms of it if the target allows that with respect to alignment
1248 if (srcvar
== NULL_TREE
)
1250 if (src_align
>= TYPE_ALIGN (desttype
))
1251 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1254 if (STRICT_ALIGNMENT
)
1256 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1258 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1261 else if (destvar
== NULL_TREE
)
1263 if (dest_align
>= TYPE_ALIGN (srctype
))
1264 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1267 if (STRICT_ALIGNMENT
)
1269 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1271 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1275 /* Same as above, detect out-of-bounds accesses without issuing
1276 warnings. Avoid folding out-of-bounds copies but to avoid
1277 false positives for unreachable code defer warning until
1278 after DCE has worked its magic.
1279 -Wrestrict is still diagnosed. */
1280 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1281 dest
, src
, len
, len
,
1283 if (warning
!= OPT_Wrestrict
)
1287 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1289 tree tem
= fold_const_aggregate_ref (srcvar
);
1292 if (! is_gimple_min_invariant (srcvar
))
1294 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1295 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1297 gimple_assign_set_lhs (new_stmt
, srcvar
);
1298 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1299 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1301 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1302 goto set_vop_and_replace
;
1305 /* We get an aggregate copy. If the source is a STRING_CST, then
1306 directly use its type to perform the copy. */
1307 if (TREE_CODE (srcvar
) == STRING_CST
)
1310 /* Or else, use an unsigned char[] type to perform the copy in order
1311 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1312 types or float modes behavior on copying. */
1315 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1316 tree_to_uhwi (len
));
1318 if (src_align
> TYPE_ALIGN (srctype
))
1319 srctype
= build_aligned_type (srctype
, src_align
);
1320 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1323 if (dest_align
> TYPE_ALIGN (desttype
))
1324 desttype
= build_aligned_type (desttype
, dest_align
);
1325 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1326 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1328 set_vop_and_replace
:
1329 gimple_move_vops (new_stmt
, stmt
);
1332 gsi_replace (gsi
, new_stmt
, false);
1335 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1339 gimple_seq stmts
= NULL
;
1340 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1342 else if (code
== BUILT_IN_MEMPCPY
)
1344 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1345 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1346 TREE_TYPE (dest
), dest
, len
);
1351 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1352 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1353 gsi_replace (gsi
, repl
, false);
1357 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1358 to built-in memcmp (a, b, len). */
1361 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1363 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1368 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1370 gimple
*stmt
= gsi_stmt (*gsi
);
1371 tree a
= gimple_call_arg (stmt
, 0);
1372 tree b
= gimple_call_arg (stmt
, 1);
1373 tree len
= gimple_call_arg (stmt
, 2);
1375 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1376 replace_call_with_call_and_fold (gsi
, repl
);
1381 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1382 to built-in memmove (dest, src, len). */
1385 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1387 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1392 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1393 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1394 len) into memmove (dest, src, len). */
1396 gimple
*stmt
= gsi_stmt (*gsi
);
1397 tree src
= gimple_call_arg (stmt
, 0);
1398 tree dest
= gimple_call_arg (stmt
, 1);
1399 tree len
= gimple_call_arg (stmt
, 2);
1401 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1402 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1403 replace_call_with_call_and_fold (gsi
, repl
);
1408 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1409 to built-in memset (dest, 0, len). */
1412 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1414 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1419 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1421 gimple
*stmt
= gsi_stmt (*gsi
);
1422 tree dest
= gimple_call_arg (stmt
, 0);
1423 tree len
= gimple_call_arg (stmt
, 1);
1425 gimple_seq seq
= NULL
;
1426 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1427 gimple_seq_add_stmt_without_update (&seq
, repl
);
1428 gsi_replace_with_seq_vops (gsi
, seq
);
1434 /* Fold function call to builtin memset or bzero at *GSI setting the
1435 memory of size LEN to VAL. Return whether a simplification was made. */
1438 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1440 gimple
*stmt
= gsi_stmt (*gsi
);
1442 unsigned HOST_WIDE_INT length
, cval
;
1444 /* If the LEN parameter is zero, return DEST. */
1445 if (integer_zerop (len
))
1447 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1451 if (! tree_fits_uhwi_p (len
))
1454 if (TREE_CODE (c
) != INTEGER_CST
)
1457 tree dest
= gimple_call_arg (stmt
, 0);
1459 if (TREE_CODE (var
) != ADDR_EXPR
)
1462 var
= TREE_OPERAND (var
, 0);
1463 if (TREE_THIS_VOLATILE (var
))
1466 etype
= TREE_TYPE (var
);
1467 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1468 etype
= TREE_TYPE (etype
);
1470 if (!INTEGRAL_TYPE_P (etype
)
1471 && !POINTER_TYPE_P (etype
))
1474 if (! var_decl_component_p (var
))
1477 length
= tree_to_uhwi (len
);
1478 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1479 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1480 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1481 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1484 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1487 if (!type_has_mode_precision_p (etype
))
1488 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1489 TYPE_UNSIGNED (etype
));
1491 if (integer_zerop (c
))
1495 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1498 cval
= TREE_INT_CST_LOW (c
);
1502 cval
|= (cval
<< 31) << 1;
1505 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1506 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1507 gimple_move_vops (store
, stmt
);
1508 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1509 if (gimple_call_lhs (stmt
))
1511 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1512 gsi_replace (gsi
, asgn
, false);
1516 gimple_stmt_iterator gsi2
= *gsi
;
1518 gsi_remove (&gsi2
, true);
1524 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1527 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1528 c_strlen_data
*pdata
, unsigned eltsize
)
1530 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1532 /* The length computed by this invocation of the function. */
1533 tree val
= NULL_TREE
;
1535 /* True if VAL is an optimistic (tight) bound determined from
1536 the size of the character array in which the string may be
1537 stored. In that case, the computed VAL is used to set
1539 bool tight_bound
= false;
1541 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1542 if (TREE_CODE (arg
) == ADDR_EXPR
1543 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1545 tree op
= TREE_OPERAND (arg
, 0);
1546 if (integer_zerop (TREE_OPERAND (op
, 1)))
1548 tree aop0
= TREE_OPERAND (op
, 0);
1549 if (TREE_CODE (aop0
) == INDIRECT_REF
1550 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1551 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1554 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1555 && rkind
== SRK_LENRANGE
)
1557 /* Fail if an array is the last member of a struct object
1558 since it could be treated as a (fake) flexible array
1560 tree idx
= TREE_OPERAND (op
, 1);
1562 arg
= TREE_OPERAND (op
, 0);
1563 tree optype
= TREE_TYPE (arg
);
1564 if (tree dom
= TYPE_DOMAIN (optype
))
1565 if (tree bound
= TYPE_MAX_VALUE (dom
))
1566 if (TREE_CODE (bound
) == INTEGER_CST
1567 && TREE_CODE (idx
) == INTEGER_CST
1568 && tree_int_cst_lt (bound
, idx
))
1573 if (rkind
== SRK_INT_VALUE
)
1575 /* We are computing the maximum value (not string length). */
1577 if (TREE_CODE (val
) != INTEGER_CST
1578 || tree_int_cst_sgn (val
) < 0)
1583 c_strlen_data lendata
= { };
1584 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1586 if (!val
&& lendata
.decl
)
1588 /* ARG refers to an unterminated const character array.
1589 DATA.DECL with size DATA.LEN. */
1590 val
= lendata
.minlen
;
1591 pdata
->decl
= lendata
.decl
;
1595 /* Set if VAL represents the maximum length based on array size (set
1596 when exact length cannot be determined). */
1597 bool maxbound
= false;
1599 if (!val
&& rkind
== SRK_LENRANGE
)
1601 if (TREE_CODE (arg
) == ADDR_EXPR
)
1602 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1605 if (TREE_CODE (arg
) == ARRAY_REF
)
1607 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1609 /* Determine the "innermost" array type. */
1610 while (TREE_CODE (optype
) == ARRAY_TYPE
1611 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1612 optype
= TREE_TYPE (optype
);
1614 /* Avoid arrays of pointers. */
1615 tree eltype
= TREE_TYPE (optype
);
1616 if (TREE_CODE (optype
) != ARRAY_TYPE
1617 || !INTEGRAL_TYPE_P (eltype
))
1620 /* Fail when the array bound is unknown or zero. */
1621 val
= TYPE_SIZE_UNIT (optype
);
1623 || TREE_CODE (val
) != INTEGER_CST
1624 || integer_zerop (val
))
1627 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1630 /* Set the minimum size to zero since the string in
1631 the array could have zero length. */
1632 pdata
->minlen
= ssize_int (0);
1636 else if (TREE_CODE (arg
) == COMPONENT_REF
1637 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1640 /* Use the type of the member array to determine the upper
1641 bound on the length of the array. This may be overly
1642 optimistic if the array itself isn't NUL-terminated and
1643 the caller relies on the subsequent member to contain
1644 the NUL but that would only be considered valid if
1645 the array were the last member of a struct. */
1647 tree fld
= TREE_OPERAND (arg
, 1);
1649 tree optype
= TREE_TYPE (fld
);
1651 /* Determine the "innermost" array type. */
1652 while (TREE_CODE (optype
) == ARRAY_TYPE
1653 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1654 optype
= TREE_TYPE (optype
);
1656 /* Fail when the array bound is unknown or zero. */
1657 val
= TYPE_SIZE_UNIT (optype
);
1659 || TREE_CODE (val
) != INTEGER_CST
1660 || integer_zerop (val
))
1662 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1665 /* Set the minimum size to zero since the string in
1666 the array could have zero length. */
1667 pdata
->minlen
= ssize_int (0);
1669 /* The array size determined above is an optimistic bound
1670 on the length. If the array isn't nul-terminated the
1671 length computed by the library function would be greater.
1672 Even though using strlen to cross the subobject boundary
1673 is undefined, avoid drawing conclusions from the member
1674 type about the length here. */
1677 else if (TREE_CODE (arg
) == MEM_REF
1678 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1679 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1680 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1682 /* Handle a MEM_REF into a DECL accessing an array of integers,
1683 being conservative about references to extern structures with
1684 flexible array members that can be initialized to arbitrary
1685 numbers of elements as an extension (static structs are okay).
1686 FIXME: Make this less conservative -- see
1687 component_ref_size in tree.c. */
1688 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1689 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1690 && (decl_binds_to_current_def_p (ref
)
1691 || !array_at_struct_end_p (arg
)))
1693 /* Fail if the offset is out of bounds. Such accesses
1694 should be diagnosed at some point. */
1695 val
= DECL_SIZE_UNIT (ref
);
1697 || TREE_CODE (val
) != INTEGER_CST
1698 || integer_zerop (val
))
1701 poly_offset_int psiz
= wi::to_offset (val
);
1702 poly_offset_int poff
= mem_ref_offset (arg
);
1703 if (known_le (psiz
, poff
))
1706 pdata
->minlen
= ssize_int (0);
1708 /* Subtract the offset and one for the terminating nul. */
1711 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1712 /* Since VAL reflects the size of a declared object
1713 rather the type of the access it is not a tight bound. */
1716 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1718 /* Avoid handling pointers to arrays. GCC might misuse
1719 a pointer to an array of one bound to point to an array
1720 object of a greater bound. */
1721 tree argtype
= TREE_TYPE (arg
);
1722 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1724 val
= TYPE_SIZE_UNIT (argtype
);
1726 || TREE_CODE (val
) != INTEGER_CST
1727 || integer_zerop (val
))
1729 val
= wide_int_to_tree (TREE_TYPE (val
),
1730 wi::sub (wi::to_wide (val
), 1));
1732 /* Set the minimum size to zero since the string in
1733 the array could have zero length. */
1734 pdata
->minlen
= ssize_int (0);
1743 /* Adjust the lower bound on the string length as necessary. */
1745 || (rkind
!= SRK_STRLEN
1746 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1747 && TREE_CODE (val
) == INTEGER_CST
1748 && tree_int_cst_lt (val
, pdata
->minlen
)))
1749 pdata
->minlen
= val
;
1751 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1753 /* Adjust the tighter (more optimistic) string length bound
1754 if necessary and proceed to adjust the more conservative
1756 if (TREE_CODE (val
) == INTEGER_CST
)
1758 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1759 pdata
->maxbound
= val
;
1762 pdata
->maxbound
= val
;
1764 else if (pdata
->maxbound
|| maxbound
)
1765 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1766 if VAL corresponds to the maximum length determined based
1767 on the type of the object. */
1768 pdata
->maxbound
= val
;
1772 /* VAL computed above represents an optimistically tight bound
1773 on the length of the string based on the referenced object's
1774 or subobject's type. Determine the conservative upper bound
1775 based on the enclosing object's size if possible. */
1776 if (rkind
== SRK_LENRANGE
)
1779 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1782 /* When the call above fails due to a non-constant offset
1783 assume the offset is zero and use the size of the whole
1784 enclosing object instead. */
1785 base
= get_base_address (arg
);
1788 /* If the base object is a pointer no upper bound on the length
1789 can be determined. Otherwise the maximum length is equal to
1790 the size of the enclosing object minus the offset of
1791 the referenced subobject minus 1 (for the terminating nul). */
1792 tree type
= TREE_TYPE (base
);
1793 if (TREE_CODE (type
) == POINTER_TYPE
1794 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1795 || !(val
= DECL_SIZE_UNIT (base
)))
1796 val
= build_all_ones_cst (size_type_node
);
1799 val
= DECL_SIZE_UNIT (base
);
1800 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1801 size_int (offset
+ 1));
1810 /* Adjust the more conservative bound if possible/necessary
1811 and fail otherwise. */
1812 if (rkind
!= SRK_STRLEN
)
1814 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1815 || TREE_CODE (val
) != INTEGER_CST
)
1818 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1819 pdata
->maxlen
= val
;
1822 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1824 /* Fail if the length of this ARG is different from that
1825 previously determined from another ARG. */
1830 pdata
->maxlen
= val
;
1831 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1834 /* For an ARG referencing one or more strings, try to obtain the range
1835 of their lengths, or the size of the largest array ARG referes to if
1836 the range of lengths cannot be determined, and store all in *PDATA.
1837 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1838 the maximum constant value.
1839 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1840 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1841 length or if we are unable to determine the length, return false.
1842 VISITED is a bitmap of visited variables.
1843 RKIND determines the kind of value or range to obtain (see
1845 Set PDATA->DECL if ARG refers to an unterminated constant array.
1846 On input, set ELTSIZE to 1 for normal single byte character strings,
1847 and either 2 or 4 for wide characer strings (the size of wchar_t).
1848 Return true if *PDATA was successfully populated and false otherwise. */
1851 get_range_strlen (tree arg
, bitmap
*visited
,
1852 strlen_range_kind rkind
,
1853 c_strlen_data
*pdata
, unsigned eltsize
)
1856 if (TREE_CODE (arg
) != SSA_NAME
)
1857 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1859 /* If ARG is registered for SSA update we cannot look at its defining
1861 if (name_registered_for_update_p (arg
))
1864 /* If we were already here, break the infinite cycle. */
1866 *visited
= BITMAP_ALLOC (NULL
);
1867 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1871 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1873 switch (gimple_code (def_stmt
))
1876 /* The RHS of the statement defining VAR must either have a
1877 constant length or come from another SSA_NAME with a constant
1879 if (gimple_assign_single_p (def_stmt
)
1880 || gimple_assign_unary_nop_p (def_stmt
))
1882 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1883 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1885 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1887 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1888 gimple_assign_rhs3 (def_stmt
) };
1890 for (unsigned int i
= 0; i
< 2; i
++)
1891 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1893 if (rkind
!= SRK_LENRANGE
)
1895 /* Set the upper bound to the maximum to prevent
1896 it from being adjusted in the next iteration but
1897 leave MINLEN and the more conservative MAXBOUND
1898 determined so far alone (or leave them null if
1899 they haven't been set yet). That the MINLEN is
1900 in fact zero can be determined from MAXLEN being
1901 unbounded but the discovered minimum is used for
1903 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1910 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1911 must have a constant length. */
1912 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1914 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1916 /* If this PHI has itself as an argument, we cannot
1917 determine the string length of this argument. However,
1918 if we can find a constant string length for the other
1919 PHI args then we can still be sure that this is a
1920 constant string length. So be optimistic and just
1921 continue with the next argument. */
1922 if (arg
== gimple_phi_result (def_stmt
))
1925 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1927 if (rkind
!= SRK_LENRANGE
)
1929 /* Set the upper bound to the maximum to prevent
1930 it from being adjusted in the next iteration but
1931 leave MINLEN and the more conservative MAXBOUND
1932 determined so far alone (or leave them null if
1933 they haven't been set yet). That the MINLEN is
1934 in fact zero can be determined from MAXLEN being
1935 unbounded but the discovered minimum is used for
1937 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1947 /* Try to obtain the range of the lengths of the string(s) referenced
1948 by ARG, or the size of the largest array ARG refers to if the range
1949 of lengths cannot be determined, and store all in *PDATA which must
1950 be zero-initialized on input except PDATA->MAXBOUND may be set to
1951 a non-null tree node other than INTEGER_CST to request to have it
1952 set to the length of the longest string in a PHI. ELTSIZE is
1953 the expected size of the string element in bytes: 1 for char and
1954 some power of 2 for wide characters.
1955 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1956 for optimization. Returning false means that a nonzero PDATA->MINLEN
1957 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1958 is -1 (in that case, the actual range is indeterminate, i.e.,
1959 [0, PTRDIFF_MAX - 2]. */
1962 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1964 bitmap visited
= NULL
;
1965 tree maxbound
= pdata
->maxbound
;
1967 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1969 /* On failure extend the length range to an impossible maximum
1970 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1971 members can stay unchanged regardless. */
1972 pdata
->minlen
= ssize_int (0);
1973 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1975 else if (!pdata
->minlen
)
1976 pdata
->minlen
= ssize_int (0);
1978 /* If it's unchanged from it initial non-null value, set the conservative
1979 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1980 if (maxbound
&& pdata
->maxbound
== maxbound
)
1981 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1984 BITMAP_FREE (visited
);
1986 return !integer_all_onesp (pdata
->maxlen
);
1989 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1990 For ARG of pointer types, NONSTR indicates if the caller is prepared
1991 to handle unterminated strings. For integer ARG and when RKIND ==
1992 SRK_INT_VALUE, NONSTR must be null.
1994 If an unterminated array is discovered and our caller handles
1995 unterminated arrays, then bubble up the offending DECL and
1996 return the maximum size. Otherwise return NULL. */
1999 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
2001 /* A non-null NONSTR is meaningless when determining the maximum
2002 value of an integer ARG. */
2003 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2004 /* ARG must have an integral type when RKIND says so. */
2005 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2007 bitmap visited
= NULL
;
2009 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2011 c_strlen_data lendata
= { };
2012 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
2013 lendata
.maxlen
= NULL_TREE
;
2014 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2015 lendata
.maxlen
= NULL_TREE
;
2018 BITMAP_FREE (visited
);
2022 /* For callers prepared to handle unterminated arrays set
2023 *NONSTR to point to the declaration of the array and return
2024 the maximum length/size. */
2025 *nonstr
= lendata
.decl
;
2026 return lendata
.maxlen
;
2029 /* Fail if the constant array isn't nul-terminated. */
2030 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2034 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2035 If LEN is not NULL, it represents the length of the string to be
2036 copied. Return NULL_TREE if no simplification can be made. */
2039 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2040 tree dest
, tree src
)
2042 gimple
*stmt
= gsi_stmt (*gsi
);
2043 location_t loc
= gimple_location (stmt
);
2046 /* If SRC and DEST are the same (and not volatile), return DEST. */
2047 if (operand_equal_p (src
, dest
, 0))
2049 /* Issue -Wrestrict unless the pointers are null (those do
2050 not point to objects and so do not indicate an overlap;
2051 such calls could be the result of sanitization and jump
2053 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2055 tree func
= gimple_call_fndecl (stmt
);
2057 warning_at (loc
, OPT_Wrestrict
,
2058 "%qD source argument is the same as destination",
2062 replace_call_with_value (gsi
, dest
);
2066 if (optimize_function_for_size_p (cfun
))
2069 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2073 /* Set to non-null if ARG refers to an unterminated array. */
2075 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2079 /* Avoid folding calls with unterminated arrays. */
2080 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2081 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2082 suppress_warning (stmt
, OPT_Wstringop_overread
);
2089 len
= fold_convert_loc (loc
, size_type_node
, len
);
2090 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2091 len
= force_gimple_operand_gsi (gsi
, len
, true,
2092 NULL_TREE
, true, GSI_SAME_STMT
);
2093 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2094 replace_call_with_call_and_fold (gsi
, repl
);
2098 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2099 If SLEN is not NULL, it represents the length of the source string.
2100 Return NULL_TREE if no simplification can be made. */
2103 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2104 tree dest
, tree src
, tree len
)
2106 gimple
*stmt
= gsi_stmt (*gsi
);
2107 location_t loc
= gimple_location (stmt
);
2108 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2110 /* If the LEN parameter is zero, return DEST. */
2111 if (integer_zerop (len
))
2113 /* Avoid warning if the destination refers to an array/pointer
2114 decorate with attribute nonstring. */
2117 tree fndecl
= gimple_call_fndecl (stmt
);
2119 /* Warn about the lack of nul termination: the result is not
2120 a (nul-terminated) string. */
2121 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2122 if (slen
&& !integer_zerop (slen
))
2123 warning_at (loc
, OPT_Wstringop_truncation
,
2124 "%qD destination unchanged after copying no bytes "
2125 "from a string of length %E",
2128 warning_at (loc
, OPT_Wstringop_truncation
,
2129 "%qD destination unchanged after copying no bytes",
2133 replace_call_with_value (gsi
, dest
);
2137 /* We can't compare slen with len as constants below if len is not a
2139 if (TREE_CODE (len
) != INTEGER_CST
)
2142 /* Now, we must be passed a constant src ptr parameter. */
2143 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2144 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2147 /* The size of the source string including the terminating nul. */
2148 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2150 /* We do not support simplification of this case, though we do
2151 support it when expanding trees into RTL. */
2152 /* FIXME: generate a call to __builtin_memset. */
2153 if (tree_int_cst_lt (ssize
, len
))
2156 /* Diagnose truncation that leaves the copy unterminated. */
2157 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2159 /* OK transform into builtin memcpy. */
2160 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2164 len
= fold_convert_loc (loc
, size_type_node
, len
);
2165 len
= force_gimple_operand_gsi (gsi
, len
, true,
2166 NULL_TREE
, true, GSI_SAME_STMT
);
2167 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2168 replace_call_with_call_and_fold (gsi
, repl
);
2173 /* Fold function call to builtin strchr or strrchr.
2174 If both arguments are constant, evaluate and fold the result,
2175 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2176 In general strlen is significantly faster than strchr
2177 due to being a simpler operation. */
2179 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2181 gimple
*stmt
= gsi_stmt (*gsi
);
2182 tree str
= gimple_call_arg (stmt
, 0);
2183 tree c
= gimple_call_arg (stmt
, 1);
2184 location_t loc
= gimple_location (stmt
);
2188 if (!gimple_call_lhs (stmt
))
2191 /* Avoid folding if the first argument is not a nul-terminated array.
2192 Defer warning until later. */
2193 if (!check_nul_terminated_array (NULL_TREE
, str
))
2196 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2198 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2202 replace_call_with_value (gsi
, integer_zero_node
);
2206 tree len
= build_int_cst (size_type_node
, p1
- p
);
2207 gimple_seq stmts
= NULL
;
2208 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2209 POINTER_PLUS_EXPR
, str
, len
);
2210 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2211 gsi_replace_with_seq_vops (gsi
, stmts
);
2215 if (!integer_zerop (c
))
2218 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2219 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2221 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2225 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2226 replace_call_with_call_and_fold (gsi
, repl
);
2234 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2239 /* Create newstr = strlen (str). */
2240 gimple_seq stmts
= NULL
;
2241 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2242 gimple_set_location (new_stmt
, loc
);
2243 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2244 gimple_call_set_lhs (new_stmt
, len
);
2245 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2247 /* Create (str p+ strlen (str)). */
2248 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2249 POINTER_PLUS_EXPR
, str
, len
);
2250 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2251 gsi_replace_with_seq_vops (gsi
, stmts
);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the strlen.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2
= *gsi
;
2262 /* Fold function call to builtin strstr.
2263 If both arguments are constant, evaluate and fold the result,
2264 additionally fold strstr (x, "") into x and strstr (x, "c")
2265 into strchr (x, 'c'). */
2267 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2269 gimple
*stmt
= gsi_stmt (*gsi
);
2270 if (!gimple_call_lhs (stmt
))
2273 tree haystack
= gimple_call_arg (stmt
, 0);
2274 tree needle
= gimple_call_arg (stmt
, 1);
2276 /* Avoid folding if either argument is not a nul-terminated array.
2277 Defer warning until later. */
2278 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2279 || !check_nul_terminated_array (NULL_TREE
, needle
))
2282 const char *q
= c_getstr (needle
);
2286 if (const char *p
= c_getstr (haystack
))
2288 const char *r
= strstr (p
, q
);
2292 replace_call_with_value (gsi
, integer_zero_node
);
2296 tree len
= build_int_cst (size_type_node
, r
- p
);
2297 gimple_seq stmts
= NULL
;
2299 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2301 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2302 gsi_replace_with_seq_vops (gsi
, stmts
);
2306 /* For strstr (x, "") return x. */
2309 replace_call_with_value (gsi
, haystack
);
2313 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2316 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2319 tree c
= build_int_cst (integer_type_node
, q
[0]);
2320 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2321 replace_call_with_call_and_fold (gsi
, repl
);
2329 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2332 Return NULL_TREE if no simplification was possible, otherwise return the
2333 simplified form of the call as a tree.
2335 The simplified form may be a constant or other expression which
2336 computes the same value, but in a more efficient manner (including
2337 calls to other builtin functions).
2339 The call may contain arguments which need to be evaluated, but
2340 which are not useful to determine the result of the call. In
2341 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2342 COMPOUND_EXPR will be an argument which must be evaluated.
2343 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2344 COMPOUND_EXPR in the chain will contain the tree for the simplified
2345 form of the builtin function call. */
2348 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2350 gimple
*stmt
= gsi_stmt (*gsi
);
2351 location_t loc
= gimple_location (stmt
);
2353 const char *p
= c_getstr (src
);
2355 /* If the string length is zero, return the dst parameter. */
2356 if (p
&& *p
== '\0')
2358 replace_call_with_value (gsi
, dst
);
2362 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2365 /* See if we can store by pieces into (dst + strlen(dst)). */
2367 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2368 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2370 if (!strlen_fn
|| !memcpy_fn
)
2373 /* If the length of the source string isn't computable don't
2374 split strcat into strlen and memcpy. */
2375 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2379 /* Create strlen (dst). */
2380 gimple_seq stmts
= NULL
, stmts2
;
2381 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2382 gimple_set_location (repl
, loc
);
2383 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2384 gimple_call_set_lhs (repl
, newdst
);
2385 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2387 /* Create (dst p+ strlen (dst)). */
2388 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2389 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2390 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2392 len
= fold_convert_loc (loc
, size_type_node
, len
);
2393 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2394 build_int_cst (size_type_node
, 1));
2395 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2396 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2398 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2399 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2400 if (gimple_call_lhs (stmt
))
2402 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2403 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2404 gsi_replace_with_seq_vops (gsi
, stmts
);
2405 /* gsi now points at the assignment to the lhs, get a
2406 stmt iterator to the memcpy call.
2407 ??? We can't use gsi_for_stmt as that doesn't work when the
2408 CFG isn't built yet. */
2409 gimple_stmt_iterator gsi2
= *gsi
;
2415 gsi_replace_with_seq_vops (gsi
, stmts
);
2421 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2422 are the arguments to the call. */
2425 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2427 gimple
*stmt
= gsi_stmt (*gsi
);
2428 tree dest
= gimple_call_arg (stmt
, 0);
2429 tree src
= gimple_call_arg (stmt
, 1);
2430 tree size
= gimple_call_arg (stmt
, 2);
2436 /* If the SRC parameter is "", return DEST. */
2437 if (p
&& *p
== '\0')
2439 replace_call_with_value (gsi
, dest
);
2443 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2446 /* If __builtin_strcat_chk is used, assume strcat is available. */
2447 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2451 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2452 replace_call_with_call_and_fold (gsi
, repl
);
2456 /* Simplify a call to the strncat builtin. */
2459 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2461 gimple
*stmt
= gsi_stmt (*gsi
);
2462 tree dst
= gimple_call_arg (stmt
, 0);
2463 tree src
= gimple_call_arg (stmt
, 1);
2464 tree len
= gimple_call_arg (stmt
, 2);
2466 const char *p
= c_getstr (src
);
2468 /* If the requested length is zero, or the src parameter string
2469 length is zero, return the dst parameter. */
2470 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2472 replace_call_with_value (gsi
, dst
);
2476 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2479 unsigned srclen
= strlen (p
);
2481 int cmpsrc
= compare_tree_int (len
, srclen
);
2483 /* Return early if the requested len is less than the string length.
2484 Warnings will be issued elsewhere later. */
2488 unsigned HOST_WIDE_INT dstsize
;
2490 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2492 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2494 int cmpdst
= compare_tree_int (len
, dstsize
);
2498 tree fndecl
= gimple_call_fndecl (stmt
);
2500 /* Strncat copies (at most) LEN bytes and always appends
2501 the terminating NUL so the specified bound should never
2502 be equal to (or greater than) the size of the destination.
2503 If it is, the copy could overflow. */
2504 location_t loc
= gimple_location (stmt
);
2505 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2507 ? G_("%qD specified bound %E equals "
2509 : G_("%qD specified bound %E exceeds "
2510 "destination size %wu"),
2511 fndecl
, len
, dstsize
);
2513 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2517 if (!nowarn
&& cmpsrc
== 0)
2519 tree fndecl
= gimple_call_fndecl (stmt
);
2520 location_t loc
= gimple_location (stmt
);
2522 /* To avoid possible overflow the specified bound should also
2523 not be equal to the length of the source, even when the size
2524 of the destination is unknown (it's not an uncommon mistake
2525 to specify as the bound to strncpy the length of the source). */
2526 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2527 "%qD specified bound %E equals source length",
2529 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2532 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2534 /* If the replacement _DECL isn't initialized, don't do the
2539 /* Otherwise, emit a call to strcat. */
2540 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2541 replace_call_with_call_and_fold (gsi
, repl
);
2545 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2549 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2551 gimple
*stmt
= gsi_stmt (*gsi
);
2552 tree dest
= gimple_call_arg (stmt
, 0);
2553 tree src
= gimple_call_arg (stmt
, 1);
2554 tree len
= gimple_call_arg (stmt
, 2);
2555 tree size
= gimple_call_arg (stmt
, 3);
2560 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2561 if ((p
&& *p
== '\0')
2562 || integer_zerop (len
))
2564 replace_call_with_value (gsi
, dest
);
2568 if (! tree_fits_uhwi_p (size
))
2571 if (! integer_all_onesp (size
))
2573 tree src_len
= c_strlen (src
, 1);
2575 && tree_fits_uhwi_p (src_len
)
2576 && tree_fits_uhwi_p (len
)
2577 && ! tree_int_cst_lt (len
, src_len
))
2579 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2580 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2584 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2585 replace_call_with_call_and_fold (gsi
, repl
);
2591 /* If __builtin_strncat_chk is used, assume strncat is available. */
2592 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2596 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2597 replace_call_with_call_and_fold (gsi
, repl
);
2601 /* Build and append gimple statements to STMTS that would load a first
2602 character of a memory location identified by STR. LOC is location
2603 of the statement. */
2606 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2610 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2611 tree cst_uchar_ptr_node
2612 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2613 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2615 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2616 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2617 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2619 gimple_assign_set_lhs (stmt
, var
);
2620 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2625 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2628 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2630 gimple
*stmt
= gsi_stmt (*gsi
);
2631 tree callee
= gimple_call_fndecl (stmt
);
2632 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2634 tree type
= integer_type_node
;
2635 tree str1
= gimple_call_arg (stmt
, 0);
2636 tree str2
= gimple_call_arg (stmt
, 1);
2637 tree lhs
= gimple_call_lhs (stmt
);
2639 tree bound_node
= NULL_TREE
;
2640 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2642 /* Handle strncmp and strncasecmp functions. */
2643 if (gimple_call_num_args (stmt
) == 3)
2645 bound_node
= gimple_call_arg (stmt
, 2);
2646 if (tree_fits_uhwi_p (bound_node
))
2647 bound
= tree_to_uhwi (bound_node
);
2650 /* If the BOUND parameter is zero, return zero. */
2653 replace_call_with_value (gsi
, integer_zero_node
);
2657 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2658 if (operand_equal_p (str1
, str2
, 0))
2660 replace_call_with_value (gsi
, integer_zero_node
);
2664 /* Initially set to the number of characters, including the terminating
2665 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2666 the array Sx is not terminated by a nul.
2667 For nul-terminated strings then adjusted to their length so that
2668 LENx == NULPOSx holds. */
2669 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2670 const char *p1
= getbyterep (str1
, &len1
);
2671 const char *p2
= getbyterep (str2
, &len2
);
2673 /* The position of the terminating nul character if one exists, otherwise
2674 a value greater than LENx. */
2675 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2679 size_t n
= strnlen (p1
, len1
);
2686 size_t n
= strnlen (p2
, len2
);
2691 /* For known strings, return an immediate value. */
2695 bool known_result
= false;
2699 case BUILT_IN_STRCMP
:
2700 case BUILT_IN_STRCMP_EQ
:
2701 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2704 r
= strcmp (p1
, p2
);
2705 known_result
= true;
2708 case BUILT_IN_STRNCMP
:
2709 case BUILT_IN_STRNCMP_EQ
:
2711 if (bound
== HOST_WIDE_INT_M1U
)
2714 /* Reduce the bound to be no more than the length
2715 of the shorter of the two strings, or the sizes
2716 of the unterminated arrays. */
2717 unsigned HOST_WIDE_INT n
= bound
;
2719 if (len1
== nulpos1
&& len1
< n
)
2721 if (len2
== nulpos2
&& len2
< n
)
2724 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2727 r
= strncmp (p1
, p2
, n
);
2728 known_result
= true;
2731 /* Only handleable situation is where the string are equal (result 0),
2732 which is already handled by operand_equal_p case. */
2733 case BUILT_IN_STRCASECMP
:
2735 case BUILT_IN_STRNCASECMP
:
2737 if (bound
== HOST_WIDE_INT_M1U
)
2739 r
= strncmp (p1
, p2
, bound
);
2741 known_result
= true;
2750 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2755 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2756 || fcode
== BUILT_IN_STRCMP
2757 || fcode
== BUILT_IN_STRCMP_EQ
2758 || fcode
== BUILT_IN_STRCASECMP
;
2760 location_t loc
= gimple_location (stmt
);
2762 /* If the second arg is "", return *(const unsigned char*)arg1. */
2763 if (p2
&& *p2
== '\0' && nonzero_bound
)
2765 gimple_seq stmts
= NULL
;
2766 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2769 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2770 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2773 gsi_replace_with_seq_vops (gsi
, stmts
);
2777 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2778 if (p1
&& *p1
== '\0' && nonzero_bound
)
2780 gimple_seq stmts
= NULL
;
2781 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2785 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2786 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2787 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2789 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2790 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2793 gsi_replace_with_seq_vops (gsi
, stmts
);
2797 /* If BOUND is one, return an expression corresponding to
2798 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2799 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2801 gimple_seq stmts
= NULL
;
2802 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2803 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2807 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2808 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2809 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2811 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2812 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2813 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2815 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2816 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2819 gsi_replace_with_seq_vops (gsi
, stmts
);
2823 /* If BOUND is greater than the length of one constant string,
2824 and the other argument is also a nul-terminated string, replace
2825 strncmp with strcmp. */
2826 if (fcode
== BUILT_IN_STRNCMP
2827 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2828 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2829 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2831 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2834 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2835 replace_call_with_call_and_fold (gsi
, repl
);
2842 /* Fold a call to the memchr pointed by GSI iterator. */
2845 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2847 gimple
*stmt
= gsi_stmt (*gsi
);
2848 tree lhs
= gimple_call_lhs (stmt
);
2849 tree arg1
= gimple_call_arg (stmt
, 0);
2850 tree arg2
= gimple_call_arg (stmt
, 1);
2851 tree len
= gimple_call_arg (stmt
, 2);
2853 /* If the LEN parameter is zero, return zero. */
2854 if (integer_zerop (len
))
2856 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2861 if (TREE_CODE (arg2
) != INTEGER_CST
2862 || !tree_fits_uhwi_p (len
)
2863 || !target_char_cst_p (arg2
, &c
))
2866 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2867 unsigned HOST_WIDE_INT string_length
;
2868 const char *p1
= getbyterep (arg1
, &string_length
);
2872 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2875 tree mem_size
, offset_node
;
2876 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2877 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2878 ? 0 : tree_to_uhwi (offset_node
);
2879 /* MEM_SIZE is the size of the array the string literal
2881 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2882 gcc_checking_assert (string_length
<= string_size
);
2883 if (length
<= string_size
)
2885 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2891 unsigned HOST_WIDE_INT offset
= r
- p1
;
2892 gimple_seq stmts
= NULL
;
2893 if (lhs
!= NULL_TREE
)
2895 tree offset_cst
= build_int_cst (sizetype
, offset
);
2896 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2898 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2901 gimple_seq_add_stmt_without_update (&stmts
,
2902 gimple_build_nop ());
2904 gsi_replace_with_seq_vops (gsi
, stmts
);
2912 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2913 to the call. IGNORE is true if the value returned
2914 by the builtin will be ignored. UNLOCKED is true is true if this
2915 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2916 the known length of the string. Return NULL_TREE if no simplification
2920 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2921 tree arg0
, tree arg1
,
2924 gimple
*stmt
= gsi_stmt (*gsi
);
2926 /* If we're using an unlocked function, assume the other unlocked
2927 functions exist explicitly. */
2928 tree
const fn_fputc
= (unlocked
2929 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2930 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2931 tree
const fn_fwrite
= (unlocked
2932 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2933 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2935 /* If the return value is used, don't do the transformation. */
2936 if (gimple_call_lhs (stmt
))
2939 /* Get the length of the string passed to fputs. If the length
2940 can't be determined, punt. */
2941 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2943 || TREE_CODE (len
) != INTEGER_CST
)
2946 switch (compare_tree_int (len
, 1))
2948 case -1: /* length is 0, delete the call entirely . */
2949 replace_call_with_value (gsi
, integer_zero_node
);
2952 case 0: /* length is 1, call fputc. */
2954 const char *p
= c_getstr (arg0
);
2960 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2962 (integer_type_node
, p
[0]), arg1
);
2963 replace_call_with_call_and_fold (gsi
, repl
);
2968 case 1: /* length is greater than 1, call fwrite. */
2970 /* If optimizing for size keep fputs. */
2971 if (optimize_function_for_size_p (cfun
))
2973 /* New argument list transforming fputs(string, stream) to
2974 fwrite(string, 1, len, stream). */
2978 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2979 size_one_node
, len
, arg1
);
2980 replace_call_with_call_and_fold (gsi
, repl
);
2989 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2990 DEST, SRC, LEN, and SIZE are the arguments to the call.
2991 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2992 code of the builtin. If MAXLEN is not NULL, it is maximum length
2993 passed as third argument. */
2996 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2997 tree dest
, tree src
, tree len
, tree size
,
2998 enum built_in_function fcode
)
3000 gimple
*stmt
= gsi_stmt (*gsi
);
3001 location_t loc
= gimple_location (stmt
);
3002 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3005 /* If SRC and DEST are the same (and not volatile), return DEST
3006 (resp. DEST+LEN for __mempcpy_chk). */
3007 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3009 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3011 replace_call_with_value (gsi
, dest
);
3016 gimple_seq stmts
= NULL
;
3017 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3018 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3019 TREE_TYPE (dest
), dest
, len
);
3020 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3021 replace_call_with_value (gsi
, temp
);
3026 if (! tree_fits_uhwi_p (size
))
3029 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3030 if (! integer_all_onesp (size
))
3032 if (! tree_fits_uhwi_p (len
))
3034 /* If LEN is not constant, try MAXLEN too.
3035 For MAXLEN only allow optimizing into non-_ocs function
3036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3037 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3039 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3041 /* (void) __mempcpy_chk () can be optimized into
3042 (void) __memcpy_chk (). */
3043 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3047 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3048 replace_call_with_call_and_fold (gsi
, repl
);
3057 if (tree_int_cst_lt (size
, maxlen
))
3062 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3063 mem{cpy,pcpy,move,set} is available. */
3066 case BUILT_IN_MEMCPY_CHK
:
3067 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3069 case BUILT_IN_MEMPCPY_CHK
:
3070 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3072 case BUILT_IN_MEMMOVE_CHK
:
3073 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3075 case BUILT_IN_MEMSET_CHK
:
3076 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3085 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3086 replace_call_with_call_and_fold (gsi
, repl
);
3090 /* Fold a call to the __st[rp]cpy_chk builtin.
3091 DEST, SRC, and SIZE are the arguments to the call.
3092 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3093 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3094 strings passed as second argument. */
3097 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3099 tree src
, tree size
,
3100 enum built_in_function fcode
)
3102 gimple
*stmt
= gsi_stmt (*gsi
);
3103 location_t loc
= gimple_location (stmt
);
3104 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3107 /* If SRC and DEST are the same (and not volatile), return DEST. */
3108 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3110 /* Issue -Wrestrict unless the pointers are null (those do
3111 not point to objects and so do not indicate an overlap;
3112 such calls could be the result of sanitization and jump
3114 if (!integer_zerop (dest
)
3115 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3117 tree func
= gimple_call_fndecl (stmt
);
3119 warning_at (loc
, OPT_Wrestrict
,
3120 "%qD source argument is the same as destination",
3124 replace_call_with_value (gsi
, dest
);
3128 if (! tree_fits_uhwi_p (size
))
3131 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3132 if (! integer_all_onesp (size
))
3134 len
= c_strlen (src
, 1);
3135 if (! len
|| ! tree_fits_uhwi_p (len
))
3137 /* If LEN is not constant, try MAXLEN too.
3138 For MAXLEN only allow optimizing into non-_ocs function
3139 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3140 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3142 if (fcode
== BUILT_IN_STPCPY_CHK
)
3147 /* If return value of __stpcpy_chk is ignored,
3148 optimize into __strcpy_chk. */
3149 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3153 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3154 replace_call_with_call_and_fold (gsi
, repl
);
3158 if (! len
|| TREE_SIDE_EFFECTS (len
))
3161 /* If c_strlen returned something, but not a constant,
3162 transform __strcpy_chk into __memcpy_chk. */
3163 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3167 gimple_seq stmts
= NULL
;
3168 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3169 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3170 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3171 build_int_cst (size_type_node
, 1));
3172 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3173 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3174 replace_call_with_call_and_fold (gsi
, repl
);
3181 if (! tree_int_cst_lt (maxlen
, size
))
3185 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3186 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
3187 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3191 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3192 replace_call_with_call_and_fold (gsi
, repl
);
3196 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3197 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3198 length passed as third argument. IGNORE is true if return value can be
3199 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3202 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3203 tree dest
, tree src
,
3204 tree len
, tree size
,
3205 enum built_in_function fcode
)
3207 gimple
*stmt
= gsi_stmt (*gsi
);
3208 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3211 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3213 /* If return value of __stpncpy_chk is ignored,
3214 optimize into __strncpy_chk. */
3215 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3218 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3219 replace_call_with_call_and_fold (gsi
, repl
);
3224 if (! tree_fits_uhwi_p (size
))
3227 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3228 if (! integer_all_onesp (size
))
3230 if (! tree_fits_uhwi_p (len
))
3232 /* If LEN is not constant, try MAXLEN too.
3233 For MAXLEN only allow optimizing into non-_ocs function
3234 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3235 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3241 if (tree_int_cst_lt (size
, maxlen
))
3245 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3246 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
3247 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3251 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3252 replace_call_with_call_and_fold (gsi
, repl
);
3256 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3257 Return NULL_TREE if no simplification can be made. */
3260 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3262 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3263 location_t loc
= gimple_location (stmt
);
3264 tree dest
= gimple_call_arg (stmt
, 0);
3265 tree src
= gimple_call_arg (stmt
, 1);
3268 /* If the result is unused, replace stpcpy with strcpy. */
3269 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3271 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3274 gimple_call_set_fndecl (stmt
, fn
);
3279 /* Set to non-null if ARG refers to an unterminated array. */
3280 c_strlen_data data
= { };
3281 /* The size of the unterminated array if SRC referes to one. */
3283 /* True if the size is exact/constant, false if it's the lower bound
3286 tree len
= c_strlen (src
, 1, &data
, 1);
3288 || TREE_CODE (len
) != INTEGER_CST
)
3290 data
.decl
= unterminated_array (src
, &size
, &exact
);
3297 /* Avoid folding calls with unterminated arrays. */
3298 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3299 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3301 suppress_warning (stmt
, OPT_Wstringop_overread
);
3305 if (optimize_function_for_size_p (cfun
)
3306 /* If length is zero it's small enough. */
3307 && !integer_zerop (len
))
3310 /* If the source has a known length replace stpcpy with memcpy. */
3311 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3315 gimple_seq stmts
= NULL
;
3316 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3317 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3318 tem
, build_int_cst (size_type_node
, 1));
3319 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3320 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3321 gimple_move_vops (repl
, stmt
);
3322 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3323 /* Replace the result with dest + len. */
3325 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3326 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3327 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3328 POINTER_PLUS_EXPR
, dest
, tem
);
3329 gsi_replace (gsi
, ret
, false);
3330 /* Finally fold the memcpy call. */
3331 gimple_stmt_iterator gsi2
= *gsi
;
3337 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3338 NULL_TREE if a normal call should be emitted rather than expanding
3339 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3340 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3341 passed as second argument. */
3344 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3345 enum built_in_function fcode
)
3347 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3348 tree dest
, size
, len
, fn
, fmt
, flag
;
3349 const char *fmt_str
;
3351 /* Verify the required arguments in the original call. */
3352 if (gimple_call_num_args (stmt
) < 5)
3355 dest
= gimple_call_arg (stmt
, 0);
3356 len
= gimple_call_arg (stmt
, 1);
3357 flag
= gimple_call_arg (stmt
, 2);
3358 size
= gimple_call_arg (stmt
, 3);
3359 fmt
= gimple_call_arg (stmt
, 4);
3361 if (! tree_fits_uhwi_p (size
))
3364 if (! integer_all_onesp (size
))
3366 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3367 if (! tree_fits_uhwi_p (len
))
3369 /* If LEN is not constant, try MAXLEN too.
3370 For MAXLEN only allow optimizing into non-_ocs function
3371 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3372 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3378 if (tree_int_cst_lt (size
, maxlen
))
3382 if (!init_target_chars ())
3385 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3386 or if format doesn't contain % chars or is "%s". */
3387 if (! integer_zerop (flag
))
3389 fmt_str
= c_getstr (fmt
);
3390 if (fmt_str
== NULL
)
3392 if (strchr (fmt_str
, target_percent
) != NULL
3393 && strcmp (fmt_str
, target_percent_s
))
3397 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3399 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3400 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3404 /* Replace the called function and the first 5 argument by 3 retaining
3405 trailing varargs. */
3406 gimple_call_set_fndecl (stmt
, fn
);
3407 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3408 gimple_call_set_arg (stmt
, 0, dest
);
3409 gimple_call_set_arg (stmt
, 1, len
);
3410 gimple_call_set_arg (stmt
, 2, fmt
);
3411 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3412 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3413 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3418 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3419 Return NULL_TREE if a normal call should be emitted rather than
3420 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3421 or BUILT_IN_VSPRINTF_CHK. */
3424 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3425 enum built_in_function fcode
)
3427 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3428 tree dest
, size
, len
, fn
, fmt
, flag
;
3429 const char *fmt_str
;
3430 unsigned nargs
= gimple_call_num_args (stmt
);
3432 /* Verify the required arguments in the original call. */
3435 dest
= gimple_call_arg (stmt
, 0);
3436 flag
= gimple_call_arg (stmt
, 1);
3437 size
= gimple_call_arg (stmt
, 2);
3438 fmt
= gimple_call_arg (stmt
, 3);
3440 if (! tree_fits_uhwi_p (size
))
3445 if (!init_target_chars ())
3448 /* Check whether the format is a literal string constant. */
3449 fmt_str
= c_getstr (fmt
);
3450 if (fmt_str
!= NULL
)
3452 /* If the format doesn't contain % args or %%, we know the size. */
3453 if (strchr (fmt_str
, target_percent
) == 0)
3455 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3456 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3458 /* If the format is "%s" and first ... argument is a string literal,
3459 we know the size too. */
3460 else if (fcode
== BUILT_IN_SPRINTF_CHK
3461 && strcmp (fmt_str
, target_percent_s
) == 0)
3467 arg
= gimple_call_arg (stmt
, 4);
3468 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3470 len
= c_strlen (arg
, 1);
3471 if (! len
|| ! tree_fits_uhwi_p (len
))
3478 if (! integer_all_onesp (size
))
3480 if (! len
|| ! tree_int_cst_lt (len
, size
))
3484 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3485 or if format doesn't contain % chars or is "%s". */
3486 if (! integer_zerop (flag
))
3488 if (fmt_str
== NULL
)
3490 if (strchr (fmt_str
, target_percent
) != NULL
3491 && strcmp (fmt_str
, target_percent_s
))
3495 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3496 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3497 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3501 /* Replace the called function and the first 4 argument by 2 retaining
3502 trailing varargs. */
3503 gimple_call_set_fndecl (stmt
, fn
);
3504 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3505 gimple_call_set_arg (stmt
, 0, dest
);
3506 gimple_call_set_arg (stmt
, 1, fmt
);
3507 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3508 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3509 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3514 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3515 ORIG may be null if this is a 2-argument call. We don't attempt to
3516 simplify calls with more than 3 arguments.
3518 Return true if simplification was possible, otherwise false. */
3521 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3523 gimple
*stmt
= gsi_stmt (*gsi
);
3525 /* Verify the required arguments in the original call. We deal with two
3526 types of sprintf() calls: 'sprintf (str, fmt)' and
3527 'sprintf (dest, "%s", orig)'. */
3528 if (gimple_call_num_args (stmt
) > 3)
3531 tree orig
= NULL_TREE
;
3532 if (gimple_call_num_args (stmt
) == 3)
3533 orig
= gimple_call_arg (stmt
, 2);
3535 /* Check whether the format is a literal string constant. */
3536 tree fmt
= gimple_call_arg (stmt
, 1);
3537 const char *fmt_str
= c_getstr (fmt
);
3538 if (fmt_str
== NULL
)
3541 tree dest
= gimple_call_arg (stmt
, 0);
3543 if (!init_target_chars ())
3546 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3550 /* If the format doesn't contain % args or %%, use strcpy. */
3551 if (strchr (fmt_str
, target_percent
) == NULL
)
3553 /* Don't optimize sprintf (buf, "abc", ptr++). */
3557 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3558 'format' is known to contain no % formats. */
3559 gimple_seq stmts
= NULL
;
3560 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3562 /* Propagate the NO_WARNING bit to avoid issuing the same
3563 warning more than once. */
3564 copy_warning (repl
, stmt
);
3566 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3567 if (tree lhs
= gimple_call_lhs (stmt
))
3569 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3571 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3572 gsi_replace_with_seq_vops (gsi
, stmts
);
3573 /* gsi now points at the assignment to the lhs, get a
3574 stmt iterator to the memcpy call.
3575 ??? We can't use gsi_for_stmt as that doesn't work when the
3576 CFG isn't built yet. */
3577 gimple_stmt_iterator gsi2
= *gsi
;
3583 gsi_replace_with_seq_vops (gsi
, stmts
);
3589 /* If the format is "%s", use strcpy if the result isn't used. */
3590 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3592 /* Don't crash on sprintf (str1, "%s"). */
3596 /* Don't fold calls with source arguments of invalid (nonpointer)
3598 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3601 tree orig_len
= NULL_TREE
;
3602 if (gimple_call_lhs (stmt
))
3604 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3609 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3610 gimple_seq stmts
= NULL
;
3611 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3613 /* Propagate the NO_WARNING bit to avoid issuing the same
3614 warning more than once. */
3615 copy_warning (repl
, stmt
);
3617 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3618 if (tree lhs
= gimple_call_lhs (stmt
))
3620 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3621 TREE_TYPE (orig_len
)))
3622 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3623 repl
= gimple_build_assign (lhs
, orig_len
);
3624 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3625 gsi_replace_with_seq_vops (gsi
, stmts
);
3626 /* gsi now points at the assignment to the lhs, get a
3627 stmt iterator to the memcpy call.
3628 ??? We can't use gsi_for_stmt as that doesn't work when the
3629 CFG isn't built yet. */
3630 gimple_stmt_iterator gsi2
= *gsi
;
3636 gsi_replace_with_seq_vops (gsi
, stmts
);
3644 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3645 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3646 attempt to simplify calls with more than 4 arguments.
3648 Return true if simplification was possible, otherwise false. */
3651 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3653 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3654 tree dest
= gimple_call_arg (stmt
, 0);
3655 tree destsize
= gimple_call_arg (stmt
, 1);
3656 tree fmt
= gimple_call_arg (stmt
, 2);
3657 tree orig
= NULL_TREE
;
3658 const char *fmt_str
= NULL
;
3660 if (gimple_call_num_args (stmt
) > 4)
3663 if (gimple_call_num_args (stmt
) == 4)
3664 orig
= gimple_call_arg (stmt
, 3);
3666 if (!tree_fits_uhwi_p (destsize
))
3668 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3670 /* Check whether the format is a literal string constant. */
3671 fmt_str
= c_getstr (fmt
);
3672 if (fmt_str
== NULL
)
3675 if (!init_target_chars ())
3678 /* If the format doesn't contain % args or %%, use strcpy. */
3679 if (strchr (fmt_str
, target_percent
) == NULL
)
3681 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3685 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3689 /* We could expand this as
3690 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3692 memcpy (str, fmt_with_nul_at_cstm1, cst);
3693 but in the former case that might increase code size
3694 and in the latter case grow .rodata section too much.
3696 size_t len
= strlen (fmt_str
);
3700 gimple_seq stmts
= NULL
;
3701 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3702 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3703 if (tree lhs
= gimple_call_lhs (stmt
))
3705 repl
= gimple_build_assign (lhs
,
3706 build_int_cst (TREE_TYPE (lhs
), len
));
3707 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3708 gsi_replace_with_seq_vops (gsi
, stmts
);
3709 /* gsi now points at the assignment to the lhs, get a
3710 stmt iterator to the memcpy call.
3711 ??? We can't use gsi_for_stmt as that doesn't work when the
3712 CFG isn't built yet. */
3713 gimple_stmt_iterator gsi2
= *gsi
;
3719 gsi_replace_with_seq_vops (gsi
, stmts
);
3725 /* If the format is "%s", use strcpy if the result isn't used. */
3726 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3728 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3732 /* Don't crash on snprintf (str1, cst, "%s"). */
3736 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3737 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3740 /* We could expand this as
3741 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3743 memcpy (str1, str2_with_nul_at_cstm1, cst);
3744 but in the former case that might increase code size
3745 and in the latter case grow .rodata section too much.
3747 if (compare_tree_int (orig_len
, destlen
) >= 0)
3750 /* Convert snprintf (str1, cst, "%s", str2) into
3751 strcpy (str1, str2) if strlen (str2) < cst. */
3752 gimple_seq stmts
= NULL
;
3753 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3754 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3755 if (tree lhs
= gimple_call_lhs (stmt
))
3757 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3758 TREE_TYPE (orig_len
)))
3759 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3760 repl
= gimple_build_assign (lhs
, orig_len
);
3761 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3762 gsi_replace_with_seq_vops (gsi
, stmts
);
3763 /* gsi now points at the assignment to the lhs, get a
3764 stmt iterator to the memcpy call.
3765 ??? We can't use gsi_for_stmt as that doesn't work when the
3766 CFG isn't built yet. */
3767 gimple_stmt_iterator gsi2
= *gsi
;
3773 gsi_replace_with_seq_vops (gsi
, stmts
);
3781 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3782 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3783 more than 3 arguments, and ARG may be null in the 2-argument case.
3785 Return NULL_TREE if no simplification was possible, otherwise return the
3786 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3787 code of the function to be simplified. */
3790 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3791 tree fp
, tree fmt
, tree arg
,
3792 enum built_in_function fcode
)
3794 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3795 tree fn_fputc
, fn_fputs
;
3796 const char *fmt_str
= NULL
;
3798 /* If the return value is used, don't do the transformation. */
3799 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3802 /* Check whether the format is a literal string constant. */
3803 fmt_str
= c_getstr (fmt
);
3804 if (fmt_str
== NULL
)
3807 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3809 /* If we're using an unlocked function, assume the other
3810 unlocked functions exist explicitly. */
3811 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3812 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3816 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3817 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3820 if (!init_target_chars ())
3823 /* If the format doesn't contain % args or %%, use strcpy. */
3824 if (strchr (fmt_str
, target_percent
) == NULL
)
3826 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3830 /* If the format specifier was "", fprintf does nothing. */
3831 if (fmt_str
[0] == '\0')
3833 replace_call_with_value (gsi
, NULL_TREE
);
3837 /* When "string" doesn't contain %, replace all cases of
3838 fprintf (fp, string) with fputs (string, fp). The fputs
3839 builtin will take care of special cases like length == 1. */
3842 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3843 replace_call_with_call_and_fold (gsi
, repl
);
3848 /* The other optimizations can be done only on the non-va_list variants. */
3849 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3852 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3853 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3855 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3859 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3860 replace_call_with_call_and_fold (gsi
, repl
);
3865 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3866 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3869 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3873 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3874 replace_call_with_call_and_fold (gsi
, repl
);
3882 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3883 FMT and ARG are the arguments to the call; we don't fold cases with
3884 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3886 Return NULL_TREE if no simplification was possible, otherwise return the
3887 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3888 code of the function to be simplified. */
3891 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3892 tree arg
, enum built_in_function fcode
)
3894 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3895 tree fn_putchar
, fn_puts
, newarg
;
3896 const char *fmt_str
= NULL
;
3898 /* If the return value is used, don't do the transformation. */
3899 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3902 /* Check whether the format is a literal string constant. */
3903 fmt_str
= c_getstr (fmt
);
3904 if (fmt_str
== NULL
)
3907 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3909 /* If we're using an unlocked function, assume the other
3910 unlocked functions exist explicitly. */
3911 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3912 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3916 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3917 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3920 if (!init_target_chars ())
3923 if (strcmp (fmt_str
, target_percent_s
) == 0
3924 || strchr (fmt_str
, target_percent
) == NULL
)
3928 if (strcmp (fmt_str
, target_percent_s
) == 0)
3930 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3933 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3936 str
= c_getstr (arg
);
3942 /* The format specifier doesn't contain any '%' characters. */
3943 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3949 /* If the string was "", printf does nothing. */
3952 replace_call_with_value (gsi
, NULL_TREE
);
3956 /* If the string has length of 1, call putchar. */
3959 /* Given printf("c"), (where c is any one character,)
3960 convert "c"[0] to an int and pass that to the replacement
3962 newarg
= build_int_cst (integer_type_node
, str
[0]);
3965 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3966 replace_call_with_call_and_fold (gsi
, repl
);
3972 /* If the string was "string\n", call puts("string"). */
3973 size_t len
= strlen (str
);
3974 if ((unsigned char)str
[len
- 1] == target_newline
3975 && (size_t) (int) len
== len
3980 /* Create a NUL-terminated string that's one char shorter
3981 than the original, stripping off the trailing '\n'. */
3982 newstr
= xstrdup (str
);
3983 newstr
[len
- 1] = '\0';
3984 newarg
= build_string_literal (len
, newstr
);
3988 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3989 replace_call_with_call_and_fold (gsi
, repl
);
3994 /* We'd like to arrange to call fputs(string,stdout) here,
3995 but we need stdout and don't have a way to get it yet. */
4000 /* The other optimizations can be done only on the non-va_list variants. */
4001 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
4004 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4005 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
4007 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
4011 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
4012 replace_call_with_call_and_fold (gsi
, repl
);
4017 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4018 else if (strcmp (fmt_str
, target_percent_c
) == 0)
4020 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
4025 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
4026 replace_call_with_call_and_fold (gsi
, repl
);
4036 /* Fold a call to __builtin_strlen with known length LEN. */
4039 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
4041 gimple
*stmt
= gsi_stmt (*gsi
);
4042 tree arg
= gimple_call_arg (stmt
, 0);
4047 c_strlen_data lendata
= { };
4048 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4050 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4051 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4053 /* The range of lengths refers to either a single constant
4054 string or to the longest and shortest constant string
4055 referenced by the argument of the strlen() call, or to
4056 the strings that can possibly be stored in the arrays
4057 the argument refers to. */
4058 minlen
= wi::to_wide (lendata
.minlen
);
4059 maxlen
= wi::to_wide (lendata
.maxlen
);
4063 unsigned prec
= TYPE_PRECISION (sizetype
);
4065 minlen
= wi::shwi (0, prec
);
4066 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4069 if (minlen
== maxlen
)
4071 /* Fold the strlen call to a constant. */
4072 tree type
= TREE_TYPE (lendata
.minlen
);
4073 tree len
= force_gimple_operand_gsi (gsi
,
4074 wide_int_to_tree (type
, minlen
),
4075 true, NULL
, true, GSI_SAME_STMT
);
4076 replace_call_with_value (gsi
, len
);
4080 /* Set the strlen() range to [0, MAXLEN]. */
4081 if (tree lhs
= gimple_call_lhs (stmt
))
4082 set_strlen_range (lhs
, minlen
, maxlen
);
4087 /* Fold a call to __builtin_acc_on_device. */
4090 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4092 /* Defer folding until we know which compiler we're in. */
4093 if (symtab
->state
!= EXPANSION
)
4096 unsigned val_host
= GOMP_DEVICE_HOST
;
4097 unsigned val_dev
= GOMP_DEVICE_NONE
;
4099 #ifdef ACCEL_COMPILER
4100 val_host
= GOMP_DEVICE_NOT_HOST
;
4101 val_dev
= ACCEL_COMPILER_acc_device
;
4104 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4106 tree host_eq
= make_ssa_name (boolean_type_node
);
4107 gimple
*host_ass
= gimple_build_assign
4108 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4109 gimple_set_location (host_ass
, loc
);
4110 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4112 tree dev_eq
= make_ssa_name (boolean_type_node
);
4113 gimple
*dev_ass
= gimple_build_assign
4114 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4115 gimple_set_location (dev_ass
, loc
);
4116 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4118 tree result
= make_ssa_name (boolean_type_node
);
4119 gimple
*result_ass
= gimple_build_assign
4120 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4121 gimple_set_location (result_ass
, loc
);
4122 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4124 replace_call_with_value (gsi
, result
);
4129 /* Fold realloc (0, n) -> malloc (n). */
4132 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4134 gimple
*stmt
= gsi_stmt (*gsi
);
4135 tree arg
= gimple_call_arg (stmt
, 0);
4136 tree size
= gimple_call_arg (stmt
, 1);
4138 if (operand_equal_p (arg
, null_pointer_node
, 0))
4140 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4143 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4144 replace_call_with_call_and_fold (gsi
, repl
);
4151 /* Number of bytes into which any type but aggregate or vector types
4153 static constexpr size_t clear_padding_unit
4154 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4155 /* Buffer size on which __builtin_clear_padding folding code works. */
4156 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4158 /* Data passed through __builtin_clear_padding folding. */
4159 struct clear_padding_struct
{
4161 /* 0 during __builtin_clear_padding folding, nonzero during
4162 clear_type_padding_in_mask. In that case, instead of clearing the
4163 non-padding bits in union_ptr array clear the padding bits in there. */
4167 gimple_stmt_iterator
*gsi
;
4168 /* Alignment of buf->base + 0. */
4170 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4172 /* Number of padding bytes before buf->off that don't have padding clear
4173 code emitted yet. */
4174 HOST_WIDE_INT padding_bytes
;
4175 /* The size of the whole object. Never emit code to touch
4176 buf->base + buf->sz or following bytes. */
4178 /* Number of bytes recorded in buf->buf. */
4180 /* When inside union, instead of emitting code we and bits inside of
4181 the union_ptr array. */
4182 unsigned char *union_ptr
;
4183 /* Set bits mean padding bits that need to be cleared by the builtin. */
4184 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4187 /* Emit code to clear padding requested in BUF->buf - set bits
4188 in there stand for padding that should be cleared. FULL is true
4189 if everything from the buffer should be flushed, otherwise
4190 it can leave up to 2 * clear_padding_unit bytes for further
4194 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4196 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4197 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4199 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4200 size_t end
= buf
->size
;
4202 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4203 * clear_padding_unit
);
4204 size_t padding_bytes
= buf
->padding_bytes
;
4207 if (buf
->clear_in_mask
)
4209 /* During clear_type_padding_in_mask, clear the padding
4210 bits set in buf->buf in the buf->union_ptr mask. */
4211 for (size_t i
= 0; i
< end
; i
++)
4213 if (buf
->buf
[i
] == (unsigned char) ~0)
4217 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4220 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4225 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4229 buf
->padding_bytes
= 0;
4233 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4236 buf
->padding_bytes
= padding_bytes
;
4240 /* Inside of a union, instead of emitting any code, instead
4241 clear all bits in the union_ptr buffer that are clear
4242 in buf. Whole padding bytes don't clear anything. */
4243 for (size_t i
= 0; i
< end
; i
++)
4245 if (buf
->buf
[i
] == (unsigned char) ~0)
4250 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4257 buf
->padding_bytes
= 0;
4261 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4264 buf
->padding_bytes
= padding_bytes
;
4268 size_t wordsize
= UNITS_PER_WORD
;
4269 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4271 size_t nonzero_first
= wordsize
;
4272 size_t nonzero_last
= 0;
4273 size_t zero_first
= wordsize
;
4274 size_t zero_last
= 0;
4275 bool all_ones
= true, bytes_only
= true;
4276 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4277 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4279 gcc_assert (wordsize
> 1);
4284 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4288 if (nonzero_first
== wordsize
)
4290 nonzero_first
= j
- i
;
4291 nonzero_last
= j
- i
;
4293 if (nonzero_last
!= j
- i
)
4295 nonzero_last
= j
+ 1 - i
;
4299 if (zero_first
== wordsize
)
4301 zero_last
= j
+ 1 - i
;
4303 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4309 size_t padding_end
= i
;
4312 if (nonzero_first
== 0
4313 && nonzero_last
== wordsize
4316 /* All bits are padding and we had some padding
4317 before too. Just extend it. */
4318 padding_bytes
+= wordsize
;
4321 if (all_ones
&& nonzero_first
== 0)
4323 padding_bytes
+= nonzero_last
;
4324 padding_end
+= nonzero_last
;
4325 nonzero_first
= wordsize
;
4328 else if (bytes_only
&& nonzero_first
== 0)
4330 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4331 padding_bytes
+= zero_first
;
4332 padding_end
+= zero_first
;
4335 if (padding_bytes
== 1)
4337 atype
= char_type_node
;
4338 src
= build_zero_cst (char_type_node
);
4342 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4343 src
= build_constructor (atype
, NULL
);
4345 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4346 build_int_cst (buf
->alias_type
,
4347 buf
->off
+ padding_end
4349 gimple
*g
= gimple_build_assign (dst
, src
);
4350 gimple_set_location (g
, buf
->loc
);
4351 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4353 buf
->padding_bytes
= 0;
4355 if (nonzero_first
== wordsize
)
4356 /* All bits in a word are 0, there are no padding bits. */
4358 if (all_ones
&& nonzero_last
== wordsize
)
4360 /* All bits between nonzero_first and end of word are padding
4361 bits, start counting padding_bytes. */
4362 padding_bytes
= nonzero_last
- nonzero_first
;
4367 /* If bitfields aren't involved in this word, prefer storing
4368 individual bytes or groups of them over performing a RMW
4369 operation on the whole word. */
4370 gcc_assert (i
+ zero_last
<= end
);
4371 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4376 for (k
= j
; k
< i
+ zero_last
; k
++)
4377 if (buf
->buf
[k
] == 0)
4379 HOST_WIDE_INT off
= buf
->off
+ j
;
4383 atype
= char_type_node
;
4384 src
= build_zero_cst (char_type_node
);
4388 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4389 src
= build_constructor (atype
, NULL
);
4391 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4393 build_int_cst (buf
->alias_type
, off
));
4394 gimple
*g
= gimple_build_assign (dst
, src
);
4395 gimple_set_location (g
, buf
->loc
);
4396 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4400 if (nonzero_last
== wordsize
)
4401 padding_bytes
= nonzero_last
- zero_last
;
4404 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4406 if (nonzero_last
- nonzero_first
<= eltsz
4407 && ((nonzero_first
& ~(eltsz
- 1))
4408 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4412 type
= char_type_node
;
4414 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4416 size_t start
= nonzero_first
& ~(eltsz
- 1);
4417 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4419 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4420 atype
= build_aligned_type (type
, buf
->align
);
4421 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4422 build_int_cst (buf
->alias_type
, off
));
4426 && nonzero_first
== start
4427 && nonzero_last
== start
+ eltsz
)
4428 src
= build_zero_cst (type
);
4431 src
= make_ssa_name (type
);
4432 g
= gimple_build_assign (src
, unshare_expr (dst
));
4433 gimple_set_location (g
, buf
->loc
);
4434 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4435 tree mask
= native_interpret_expr (type
,
4436 buf
->buf
+ i
+ start
,
4438 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4439 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4440 tree src_masked
= make_ssa_name (type
);
4441 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4443 gimple_set_location (g
, buf
->loc
);
4444 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4447 g
= gimple_build_assign (dst
, src
);
4448 gimple_set_location (g
, buf
->loc
);
4449 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4459 if (padding_bytes
== 1)
4461 atype
= char_type_node
;
4462 src
= build_zero_cst (char_type_node
);
4466 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4467 src
= build_constructor (atype
, NULL
);
4469 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4470 build_int_cst (buf
->alias_type
,
4473 gimple
*g
= gimple_build_assign (dst
, src
);
4474 gimple_set_location (g
, buf
->loc
);
4475 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4477 size_t end_rem
= end
% UNITS_PER_WORD
;
4478 buf
->off
+= end
- end_rem
;
4479 buf
->size
= end_rem
;
4480 memset (buf
->buf
, 0, buf
->size
);
4481 buf
->padding_bytes
= 0;
4485 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4488 buf
->padding_bytes
= padding_bytes
;
4492 /* Append PADDING_BYTES padding bytes. */
4495 clear_padding_add_padding (clear_padding_struct
*buf
,
4496 HOST_WIDE_INT padding_bytes
)
4498 if (padding_bytes
== 0)
4500 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4501 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4502 clear_padding_flush (buf
, false);
4503 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4504 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4506 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4507 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4508 buf
->size
= clear_padding_buf_size
;
4509 clear_padding_flush (buf
, false);
4510 gcc_assert (buf
->padding_bytes
);
4511 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4512 is guaranteed to be all ones. */
4513 padding_bytes
+= buf
->size
;
4514 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4515 memset (buf
->buf
, ~0, buf
->size
);
4516 buf
->off
+= padding_bytes
- buf
->size
;
4517 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4521 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4522 buf
->size
+= padding_bytes
;
4526 static void clear_padding_type (clear_padding_struct
*, tree
,
4527 HOST_WIDE_INT
, bool);
4529 /* Clear padding bits of union type TYPE. */
4532 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4533 HOST_WIDE_INT sz
, bool for_auto_init
)
4535 clear_padding_struct
*union_buf
;
4536 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4537 size_t start_size
= 0;
4540 start_off
= buf
->off
+ buf
->size
;
4541 next_off
= start_off
+ sz
;
4542 start_size
= start_off
% UNITS_PER_WORD
;
4543 start_off
-= start_size
;
4544 clear_padding_flush (buf
, true);
4549 if (sz
+ buf
->size
> clear_padding_buf_size
)
4550 clear_padding_flush (buf
, false);
4551 union_buf
= XALLOCA (clear_padding_struct
);
4552 union_buf
->loc
= buf
->loc
;
4553 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4554 union_buf
->base
= NULL_TREE
;
4555 union_buf
->alias_type
= NULL_TREE
;
4556 union_buf
->gsi
= NULL
;
4557 union_buf
->align
= 0;
4559 union_buf
->padding_bytes
= 0;
4561 union_buf
->size
= 0;
4562 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4563 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4565 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4566 memset (union_buf
->union_ptr
, ~0, sz
);
4569 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4570 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4572 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4574 if (TREE_TYPE (field
) == error_mark_node
)
4576 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4577 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4578 if (!buf
->clear_in_mask
&& !for_auto_init
)
4579 error_at (buf
->loc
, "flexible array member %qD does not have "
4580 "well defined padding bits for %qs",
4581 field
, "__builtin_clear_padding");
4584 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4585 gcc_assert (union_buf
->size
== 0);
4586 union_buf
->off
= start_off
;
4587 union_buf
->size
= start_size
;
4588 memset (union_buf
->buf
, ~0, start_size
);
4589 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4590 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4591 clear_padding_flush (union_buf
, true);
4594 if (buf
== union_buf
)
4596 buf
->off
= next_off
;
4597 buf
->size
= next_off
% UNITS_PER_WORD
;
4598 buf
->off
-= buf
->size
;
4599 memset (buf
->buf
, ~0, buf
->size
);
4601 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4605 unsigned char *union_ptr
= union_buf
->union_ptr
;
4608 clear_padding_flush (buf
, false);
4609 HOST_WIDE_INT this_sz
4610 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4611 clear_padding_buf_size
- buf
->size
);
4612 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4613 buf
->size
+= this_sz
;
4614 union_ptr
+= this_sz
;
4617 XDELETE (union_buf
->union_ptr
);
4621 /* The only known floating point formats with padding bits are the
4622 IEEE extended ones. */
4625 clear_padding_real_needs_padding_p (tree type
)
4627 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4629 && fmt
->signbit_ro
== fmt
->signbit_rw
4630 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4633 /* Return true if TYPE might contain any padding bits. */
4636 clear_padding_type_may_have_padding_p (tree type
)
4638 switch (TREE_CODE (type
))
4646 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4648 return clear_padding_real_needs_padding_p (type
);
4654 /* Emit a runtime loop:
4655 for (; buf.base != end; buf.base += sz)
4656 __builtin_clear_padding (buf.base); */
4659 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4660 tree end
, bool for_auto_init
)
4662 tree l1
= create_artificial_label (buf
->loc
);
4663 tree l2
= create_artificial_label (buf
->loc
);
4664 tree l3
= create_artificial_label (buf
->loc
);
4665 gimple
*g
= gimple_build_goto (l2
);
4666 gimple_set_location (g
, buf
->loc
);
4667 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4668 g
= gimple_build_label (l1
);
4669 gimple_set_location (g
, buf
->loc
);
4670 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4671 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4672 clear_padding_flush (buf
, true);
4673 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4674 size_int (buf
->sz
));
4675 gimple_set_location (g
, buf
->loc
);
4676 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4677 g
= gimple_build_label (l2
);
4678 gimple_set_location (g
, buf
->loc
);
4679 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4680 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4681 gimple_set_location (g
, buf
->loc
);
4682 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4683 g
= gimple_build_label (l3
);
4684 gimple_set_location (g
, buf
->loc
);
4685 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4688 /* Clear padding bits for TYPE. Called recursively from
4689 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4690 the __builtin_clear_padding is not called by the end user,
4691 instead, it's inserted by the compiler to initialize the
4692 paddings of automatic variable. Therefore, we should not
4693 emit the error messages for flexible array members to confuse
4697 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4698 HOST_WIDE_INT sz
, bool for_auto_init
)
4700 switch (TREE_CODE (type
))
4703 HOST_WIDE_INT cur_pos
;
4705 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4706 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4708 tree ftype
= TREE_TYPE (field
);
4709 if (DECL_BIT_FIELD (field
))
4711 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4714 HOST_WIDE_INT pos
= int_byte_position (field
);
4718 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4719 bpos
%= BITS_PER_UNIT
;
4721 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4722 if (pos
+ end
> cur_pos
)
4724 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4725 cur_pos
= pos
+ end
;
4727 gcc_assert (cur_pos
> pos
4728 && ((unsigned HOST_WIDE_INT
) buf
->size
4729 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4730 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4731 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4732 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4733 " in %qs", "__builtin_clear_padding");
4734 else if (BYTES_BIG_ENDIAN
)
4737 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4738 *p
&= ~(((1 << fldsz
) - 1)
4739 << (BITS_PER_UNIT
- bpos
- fldsz
));
4744 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4746 fldsz
-= BITS_PER_UNIT
- bpos
;
4748 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4749 p
+= fldsz
/ BITS_PER_UNIT
;
4750 fldsz
%= BITS_PER_UNIT
;
4752 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4757 /* Little endian. */
4758 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4759 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4764 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4766 fldsz
-= BITS_PER_UNIT
- bpos
;
4768 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4769 p
+= fldsz
/ BITS_PER_UNIT
;
4770 fldsz
%= BITS_PER_UNIT
;
4772 *p
&= ~((1 << fldsz
) - 1);
4776 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4778 if (ftype
== error_mark_node
)
4780 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4781 && !COMPLETE_TYPE_P (ftype
));
4782 if (!buf
->clear_in_mask
&& !for_auto_init
)
4783 error_at (buf
->loc
, "flexible array member %qD does not "
4784 "have well defined padding bits for %qs",
4785 field
, "__builtin_clear_padding");
4787 else if (is_empty_type (TREE_TYPE (field
)))
4791 HOST_WIDE_INT pos
= int_byte_position (field
);
4794 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4795 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4796 clear_padding_add_padding (buf
, pos
- cur_pos
);
4798 clear_padding_type (buf
, TREE_TYPE (field
),
4799 fldsz
, for_auto_init
);
4803 gcc_assert (sz
>= cur_pos
);
4804 clear_padding_add_padding (buf
, sz
- cur_pos
);
4807 HOST_WIDE_INT nelts
, fldsz
;
4808 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4813 && sz
> 8 * UNITS_PER_WORD
4814 && buf
->union_ptr
== NULL
4815 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4817 /* For sufficiently large array of more than one elements,
4818 emit a runtime loop to keep code size manageable. */
4819 tree base
= buf
->base
;
4820 unsigned int prev_align
= buf
->align
;
4821 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4822 HOST_WIDE_INT prev_sz
= buf
->sz
;
4823 clear_padding_flush (buf
, true);
4824 tree elttype
= TREE_TYPE (type
);
4825 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4826 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4827 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4828 base
, size_int (off
));
4829 gimple_set_location (g
, buf
->loc
);
4830 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4831 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4833 gimple_set_location (g
, buf
->loc
);
4834 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4836 buf
->align
= TYPE_ALIGN (elttype
);
4839 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4842 buf
->align
= prev_align
;
4843 buf
->size
= off
% UNITS_PER_WORD
;
4844 buf
->off
= off
- buf
->size
;
4845 memset (buf
->buf
, 0, buf
->size
);
4848 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4849 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4852 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4855 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4856 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4857 clear_padding_flush (buf
, false);
4858 if (clear_padding_real_needs_padding_p (type
))
4860 /* Use native_interpret_expr + native_encode_expr to figure out
4861 which bits are padding. */
4862 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4863 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4864 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4865 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4866 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4867 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4868 buf
->buf
[buf
->size
+ i
] ^= ~0;
4871 memset (buf
->buf
+ buf
->size
, 0, sz
);
4875 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4876 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4877 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4880 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4881 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4882 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4883 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4886 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4887 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4888 clear_padding_flush (buf
, false);
4889 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4893 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4894 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4895 clear_padding_flush (buf
, false);
4896 memset (buf
->buf
+ buf
->size
, 0, sz
);
4902 /* Clear padding bits of TYPE in MASK. */
4905 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4907 clear_padding_struct buf
;
4908 buf
.loc
= UNKNOWN_LOCATION
;
4909 buf
.clear_in_mask
= true;
4910 buf
.base
= NULL_TREE
;
4911 buf
.alias_type
= NULL_TREE
;
4915 buf
.padding_bytes
= 0;
4916 buf
.sz
= int_size_in_bytes (type
);
4918 buf
.union_ptr
= mask
;
4919 clear_padding_type (&buf
, type
, buf
.sz
, false);
4920 clear_padding_flush (&buf
, true);
4923 /* Fold __builtin_clear_padding builtin. */
4926 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4928 gimple
*stmt
= gsi_stmt (*gsi
);
4929 gcc_assert (gimple_call_num_args (stmt
) == 3);
4930 tree ptr
= gimple_call_arg (stmt
, 0);
4931 tree typearg
= gimple_call_arg (stmt
, 1);
4932 /* the 3rd argument of __builtin_clear_padding is to distinguish whether
4933 this call is made by the user or by the compiler for automatic variable
4935 bool for_auto_init
= (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 2));
4936 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4937 location_t loc
= gimple_location (stmt
);
4938 clear_padding_struct buf
;
4939 gimple_stmt_iterator gsiprev
= *gsi
;
4940 /* This should be folded during the lower pass. */
4941 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4942 gcc_assert (COMPLETE_TYPE_P (type
));
4943 gsi_prev (&gsiprev
);
4946 buf
.clear_in_mask
= false;
4948 buf
.alias_type
= NULL_TREE
;
4950 buf
.align
= get_pointer_alignment (ptr
);
4951 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4952 buf
.align
= MAX (buf
.align
, talign
);
4954 buf
.padding_bytes
= 0;
4956 buf
.sz
= int_size_in_bytes (type
);
4957 buf
.union_ptr
= NULL
;
4958 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4959 sorry_at (loc
, "%s not supported for variable length aggregates",
4960 "__builtin_clear_padding");
4961 /* The implementation currently assumes 8-bit host and target
4962 chars which is the case for all currently supported targets
4963 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4964 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4965 sorry_at (loc
, "%s not supported on this target",
4966 "__builtin_clear_padding");
4967 else if (!clear_padding_type_may_have_padding_p (type
))
4969 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4971 tree sz
= TYPE_SIZE_UNIT (type
);
4972 tree elttype
= type
;
4973 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4974 while (TREE_CODE (elttype
) == ARRAY_TYPE
4975 && int_size_in_bytes (elttype
) < 0)
4976 elttype
= TREE_TYPE (elttype
);
4977 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4978 gcc_assert (eltsz
>= 0);
4981 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4982 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4983 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4984 gimple_set_location (g
, loc
);
4985 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4986 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4987 gimple_set_location (g
, loc
);
4988 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4990 buf
.align
= TYPE_ALIGN (elttype
);
4991 buf
.alias_type
= build_pointer_type (elttype
);
4992 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4997 if (!is_gimple_mem_ref_addr (buf
.base
))
4999 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
5000 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
5001 gimple_set_location (g
, loc
);
5002 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5004 buf
.alias_type
= build_pointer_type (type
);
5005 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
5006 clear_padding_flush (&buf
, true);
5009 gimple_stmt_iterator gsiprev2
= *gsi
;
5010 gsi_prev (&gsiprev2
);
5011 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
5012 gsi_replace (gsi
, gimple_build_nop (), true);
5015 gsi_remove (gsi
, true);
5021 /* Fold the non-target builtin at *GSI and return whether any simplification
5025 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
5027 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
5028 tree callee
= gimple_call_fndecl (stmt
);
5030 /* Give up for always_inline inline builtins until they are
5032 if (avoid_folding_inline_builtin (callee
))
5035 unsigned n
= gimple_call_num_args (stmt
);
5036 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5040 return gimple_fold_builtin_bcmp (gsi
);
5041 case BUILT_IN_BCOPY
:
5042 return gimple_fold_builtin_bcopy (gsi
);
5043 case BUILT_IN_BZERO
:
5044 return gimple_fold_builtin_bzero (gsi
);
5046 case BUILT_IN_MEMSET
:
5047 return gimple_fold_builtin_memset (gsi
,
5048 gimple_call_arg (stmt
, 1),
5049 gimple_call_arg (stmt
, 2));
5050 case BUILT_IN_MEMCPY
:
5051 case BUILT_IN_MEMPCPY
:
5052 case BUILT_IN_MEMMOVE
:
5053 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5054 gimple_call_arg (stmt
, 1), fcode
);
5055 case BUILT_IN_SPRINTF_CHK
:
5056 case BUILT_IN_VSPRINTF_CHK
:
5057 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5058 case BUILT_IN_STRCAT_CHK
:
5059 return gimple_fold_builtin_strcat_chk (gsi
);
5060 case BUILT_IN_STRNCAT_CHK
:
5061 return gimple_fold_builtin_strncat_chk (gsi
);
5062 case BUILT_IN_STRLEN
:
5063 return gimple_fold_builtin_strlen (gsi
);
5064 case BUILT_IN_STRCPY
:
5065 return gimple_fold_builtin_strcpy (gsi
,
5066 gimple_call_arg (stmt
, 0),
5067 gimple_call_arg (stmt
, 1));
5068 case BUILT_IN_STRNCPY
:
5069 return gimple_fold_builtin_strncpy (gsi
,
5070 gimple_call_arg (stmt
, 0),
5071 gimple_call_arg (stmt
, 1),
5072 gimple_call_arg (stmt
, 2));
5073 case BUILT_IN_STRCAT
:
5074 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5075 gimple_call_arg (stmt
, 1));
5076 case BUILT_IN_STRNCAT
:
5077 return gimple_fold_builtin_strncat (gsi
);
5078 case BUILT_IN_INDEX
:
5079 case BUILT_IN_STRCHR
:
5080 return gimple_fold_builtin_strchr (gsi
, false);
5081 case BUILT_IN_RINDEX
:
5082 case BUILT_IN_STRRCHR
:
5083 return gimple_fold_builtin_strchr (gsi
, true);
5084 case BUILT_IN_STRSTR
:
5085 return gimple_fold_builtin_strstr (gsi
);
5086 case BUILT_IN_STRCMP
:
5087 case BUILT_IN_STRCMP_EQ
:
5088 case BUILT_IN_STRCASECMP
:
5089 case BUILT_IN_STRNCMP
:
5090 case BUILT_IN_STRNCMP_EQ
:
5091 case BUILT_IN_STRNCASECMP
:
5092 return gimple_fold_builtin_string_compare (gsi
);
5093 case BUILT_IN_MEMCHR
:
5094 return gimple_fold_builtin_memchr (gsi
);
5095 case BUILT_IN_FPUTS
:
5096 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5097 gimple_call_arg (stmt
, 1), false);
5098 case BUILT_IN_FPUTS_UNLOCKED
:
5099 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5100 gimple_call_arg (stmt
, 1), true);
5101 case BUILT_IN_MEMCPY_CHK
:
5102 case BUILT_IN_MEMPCPY_CHK
:
5103 case BUILT_IN_MEMMOVE_CHK
:
5104 case BUILT_IN_MEMSET_CHK
:
5105 return gimple_fold_builtin_memory_chk (gsi
,
5106 gimple_call_arg (stmt
, 0),
5107 gimple_call_arg (stmt
, 1),
5108 gimple_call_arg (stmt
, 2),
5109 gimple_call_arg (stmt
, 3),
5111 case BUILT_IN_STPCPY
:
5112 return gimple_fold_builtin_stpcpy (gsi
);
5113 case BUILT_IN_STRCPY_CHK
:
5114 case BUILT_IN_STPCPY_CHK
:
5115 return gimple_fold_builtin_stxcpy_chk (gsi
,
5116 gimple_call_arg (stmt
, 0),
5117 gimple_call_arg (stmt
, 1),
5118 gimple_call_arg (stmt
, 2),
5120 case BUILT_IN_STRNCPY_CHK
:
5121 case BUILT_IN_STPNCPY_CHK
:
5122 return gimple_fold_builtin_stxncpy_chk (gsi
,
5123 gimple_call_arg (stmt
, 0),
5124 gimple_call_arg (stmt
, 1),
5125 gimple_call_arg (stmt
, 2),
5126 gimple_call_arg (stmt
, 3),
5128 case BUILT_IN_SNPRINTF_CHK
:
5129 case BUILT_IN_VSNPRINTF_CHK
:
5130 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5132 case BUILT_IN_FPRINTF
:
5133 case BUILT_IN_FPRINTF_UNLOCKED
:
5134 case BUILT_IN_VFPRINTF
:
5135 if (n
== 2 || n
== 3)
5136 return gimple_fold_builtin_fprintf (gsi
,
5137 gimple_call_arg (stmt
, 0),
5138 gimple_call_arg (stmt
, 1),
5140 ? gimple_call_arg (stmt
, 2)
5144 case BUILT_IN_FPRINTF_CHK
:
5145 case BUILT_IN_VFPRINTF_CHK
:
5146 if (n
== 3 || n
== 4)
5147 return gimple_fold_builtin_fprintf (gsi
,
5148 gimple_call_arg (stmt
, 0),
5149 gimple_call_arg (stmt
, 2),
5151 ? gimple_call_arg (stmt
, 3)
5155 case BUILT_IN_PRINTF
:
5156 case BUILT_IN_PRINTF_UNLOCKED
:
5157 case BUILT_IN_VPRINTF
:
5158 if (n
== 1 || n
== 2)
5159 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5161 ? gimple_call_arg (stmt
, 1)
5162 : NULL_TREE
, fcode
);
5164 case BUILT_IN_PRINTF_CHK
:
5165 case BUILT_IN_VPRINTF_CHK
:
5166 if (n
== 2 || n
== 3)
5167 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5169 ? gimple_call_arg (stmt
, 2)
5170 : NULL_TREE
, fcode
);
5172 case BUILT_IN_ACC_ON_DEVICE
:
5173 return gimple_fold_builtin_acc_on_device (gsi
,
5174 gimple_call_arg (stmt
, 0));
5175 case BUILT_IN_REALLOC
:
5176 return gimple_fold_builtin_realloc (gsi
);
5178 case BUILT_IN_CLEAR_PADDING
:
5179 return gimple_fold_builtin_clear_padding (gsi
);
5184 /* Try the generic builtin folder. */
5185 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5186 tree result
= fold_call_stmt (stmt
, ignore
);
5190 STRIP_NOPS (result
);
5192 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5193 gimplify_and_update_call_from_tree (gsi
, result
);
5200 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5201 function calls to constants, where possible. */
5204 fold_internal_goacc_dim (const gimple
*call
)
5206 int axis
= oacc_get_ifn_dim_arg (call
);
5207 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5208 tree result
= NULL_TREE
;
5209 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5211 switch (gimple_call_internal_fn (call
))
5213 case IFN_GOACC_DIM_POS
:
5214 /* If the size is 1, we know the answer. */
5216 result
= build_int_cst (type
, 0);
5218 case IFN_GOACC_DIM_SIZE
:
5219 /* If the size is not dynamic, we know the answer. */
5221 result
= build_int_cst (type
, size
);
5230 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5231 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5232 &var where var is only addressable because of such calls. */
5235 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5237 if (gimple_call_num_args (stmt
) != 6
5238 || !flag_inline_atomics
5240 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5241 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5242 || !gimple_vdef (stmt
)
5243 || !gimple_vuse (stmt
))
5246 tree fndecl
= gimple_call_fndecl (stmt
);
5247 switch (DECL_FUNCTION_CODE (fndecl
))
5249 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5250 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5251 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5252 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5253 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5259 tree expected
= gimple_call_arg (stmt
, 1);
5260 if (TREE_CODE (expected
) != ADDR_EXPR
5261 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5264 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5265 if (!is_gimple_reg_type (etype
)
5266 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5267 || TREE_THIS_VOLATILE (etype
)
5268 || VECTOR_TYPE_P (etype
)
5269 || TREE_CODE (etype
) == COMPLEX_TYPE
5270 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5271 might not preserve all the bits. See PR71716. */
5272 || SCALAR_FLOAT_TYPE_P (etype
)
5273 || maybe_ne (TYPE_PRECISION (etype
),
5274 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5277 tree weak
= gimple_call_arg (stmt
, 3);
5278 if (!integer_zerop (weak
) && !integer_onep (weak
))
5281 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5282 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5283 machine_mode mode
= TYPE_MODE (itype
);
5285 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5287 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5290 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5297 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5299 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5300 i = IMAGPART_EXPR <t>;
5302 e = REALPART_EXPR <t>; */
5305 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5307 gimple
*stmt
= gsi_stmt (*gsi
);
5308 tree fndecl
= gimple_call_fndecl (stmt
);
5309 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5310 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5311 tree ctype
= build_complex_type (itype
);
5312 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5313 bool throws
= false;
5315 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5317 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5318 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5319 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5321 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5322 build1 (VIEW_CONVERT_EXPR
, itype
,
5323 gimple_assign_lhs (g
)));
5324 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5326 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5327 + int_size_in_bytes (itype
);
5328 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5329 gimple_call_arg (stmt
, 0),
5330 gimple_assign_lhs (g
),
5331 gimple_call_arg (stmt
, 2),
5332 build_int_cst (integer_type_node
, flag
),
5333 gimple_call_arg (stmt
, 4),
5334 gimple_call_arg (stmt
, 5));
5335 tree lhs
= make_ssa_name (ctype
);
5336 gimple_call_set_lhs (g
, lhs
);
5337 gimple_move_vops (g
, stmt
);
5338 tree oldlhs
= gimple_call_lhs (stmt
);
5339 if (stmt_can_throw_internal (cfun
, stmt
))
5342 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5344 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5345 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5346 gimple_call_set_lhs (stmt
, NULL_TREE
);
5347 gsi_replace (gsi
, g
, true);
5350 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5351 build1 (IMAGPART_EXPR
, itype
, lhs
));
5354 gsi_insert_on_edge_immediate (e
, g
);
5355 *gsi
= gsi_for_stmt (g
);
5358 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5359 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5360 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5362 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5363 build1 (REALPART_EXPR
, itype
, lhs
));
5364 if (throws
&& oldlhs
== NULL_TREE
)
5366 gsi_insert_on_edge_immediate (e
, g
);
5367 *gsi
= gsi_for_stmt (g
);
5370 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5371 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5373 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5375 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5376 gimple_assign_lhs (g
)));
5377 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5379 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5380 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5384 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5385 doesn't fit into TYPE. The test for overflow should be regardless of
5386 -fwrapv, and even for unsigned types. */
5389 arith_overflowed_p (enum tree_code code
, const_tree type
,
5390 const_tree arg0
, const_tree arg1
)
5392 widest2_int warg0
= widest2_int_cst (arg0
);
5393 widest2_int warg1
= widest2_int_cst (arg1
);
5397 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5398 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5399 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5400 default: gcc_unreachable ();
5402 signop sign
= TYPE_SIGN (type
);
5403 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5405 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5408 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5409 for the memory it references, otherwise return null. VECTYPE is the
5410 type of the memory vector. */
5413 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5415 tree ptr
= gimple_call_arg (call
, 0);
5416 tree alias_align
= gimple_call_arg (call
, 1);
5417 tree mask
= gimple_call_arg (call
, 2);
5418 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5421 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5422 if (TYPE_ALIGN (vectype
) != align
)
5423 vectype
= build_aligned_type (vectype
, align
);
5424 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5425 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5428 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5431 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5433 tree lhs
= gimple_call_lhs (call
);
5437 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5439 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5440 gimple_set_location (new_stmt
, gimple_location (call
));
5441 gimple_move_vops (new_stmt
, call
);
5442 gsi_replace (gsi
, new_stmt
, false);
5448 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5451 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5453 tree rhs
= gimple_call_arg (call
, 3);
5454 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5456 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5457 gimple_set_location (new_stmt
, gimple_location (call
));
5458 gimple_move_vops (new_stmt
, call
);
5459 gsi_replace (gsi
, new_stmt
, false);
5465 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5466 The statement may be replaced by another statement, e.g., if the call
5467 simplifies to a constant value. Return true if any changes were made.
5468 It is assumed that the operands have been previously folded. */
5471 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5473 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5475 bool changed
= false;
5477 /* Check for virtual calls that became direct calls. */
5478 callee
= gimple_call_fn (stmt
);
5479 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5481 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5483 if (dump_file
&& virtual_method_call_p (callee
)
5484 && !possible_polymorphic_call_target_p
5485 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5486 (OBJ_TYPE_REF_EXPR (callee
)))))
5489 "Type inheritance inconsistent devirtualization of ");
5490 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5491 fprintf (dump_file
, " to ");
5492 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5493 fprintf (dump_file
, "\n");
5496 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5499 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5502 vec
<cgraph_node
*>targets
5503 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5504 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5506 tree lhs
= gimple_call_lhs (stmt
);
5507 if (dump_enabled_p ())
5509 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5510 "folding virtual function call to %s\n",
5511 targets
.length () == 1
5512 ? targets
[0]->name ()
5513 : "__builtin_unreachable");
5515 if (targets
.length () == 1)
5517 tree fndecl
= targets
[0]->decl
;
5518 gimple_call_set_fndecl (stmt
, fndecl
);
5520 /* If changing the call to __cxa_pure_virtual
5521 or similar noreturn function, adjust gimple_call_fntype
5523 if (gimple_call_noreturn_p (stmt
)
5524 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5525 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5526 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5528 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5529 /* If the call becomes noreturn, remove the lhs. */
5531 && gimple_call_noreturn_p (stmt
)
5532 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5533 || should_remove_lhs_p (lhs
)))
5535 if (TREE_CODE (lhs
) == SSA_NAME
)
5537 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5538 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5539 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5540 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5542 gimple_call_set_lhs (stmt
, NULL_TREE
);
5544 maybe_remove_unused_call_args (cfun
, stmt
);
5548 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5549 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5550 gimple_set_location (new_stmt
, gimple_location (stmt
));
5551 /* If the call had a SSA name as lhs morph that into
5552 an uninitialized value. */
5553 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5555 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5556 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5557 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5558 set_ssa_default_def (cfun
, var
, lhs
);
5560 gimple_move_vops (new_stmt
, stmt
);
5561 gsi_replace (gsi
, new_stmt
, false);
5568 /* Check for indirect calls that became direct calls, and then
5569 no longer require a static chain. */
5570 if (gimple_call_chain (stmt
))
5572 tree fn
= gimple_call_fndecl (stmt
);
5573 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5575 gimple_call_set_chain (stmt
, NULL
);
5583 /* Check for builtins that CCP can handle using information not
5584 available in the generic fold routines. */
5585 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5587 if (gimple_fold_builtin (gsi
))
5590 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5592 changed
|= targetm
.gimple_fold_builtin (gsi
);
5594 else if (gimple_call_internal_p (stmt
))
5596 enum tree_code subcode
= ERROR_MARK
;
5597 tree result
= NULL_TREE
;
5598 bool cplx_result
= false;
5599 tree overflow
= NULL_TREE
;
5600 switch (gimple_call_internal_fn (stmt
))
5602 case IFN_BUILTIN_EXPECT
:
5603 result
= fold_builtin_expect (gimple_location (stmt
),
5604 gimple_call_arg (stmt
, 0),
5605 gimple_call_arg (stmt
, 1),
5606 gimple_call_arg (stmt
, 2),
5609 case IFN_UBSAN_OBJECT_SIZE
:
5611 tree offset
= gimple_call_arg (stmt
, 1);
5612 tree objsize
= gimple_call_arg (stmt
, 2);
5613 if (integer_all_onesp (objsize
)
5614 || (TREE_CODE (offset
) == INTEGER_CST
5615 && TREE_CODE (objsize
) == INTEGER_CST
5616 && tree_int_cst_le (offset
, objsize
)))
5618 replace_call_with_value (gsi
, NULL_TREE
);
5624 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5626 replace_call_with_value (gsi
, NULL_TREE
);
5630 case IFN_UBSAN_BOUNDS
:
5632 tree index
= gimple_call_arg (stmt
, 1);
5633 tree bound
= gimple_call_arg (stmt
, 2);
5634 if (TREE_CODE (index
) == INTEGER_CST
5635 && TREE_CODE (bound
) == INTEGER_CST
)
5637 index
= fold_convert (TREE_TYPE (bound
), index
);
5638 if (TREE_CODE (index
) == INTEGER_CST
5639 && tree_int_cst_le (index
, bound
))
5641 replace_call_with_value (gsi
, NULL_TREE
);
5647 case IFN_GOACC_DIM_SIZE
:
5648 case IFN_GOACC_DIM_POS
:
5649 result
= fold_internal_goacc_dim (stmt
);
5651 case IFN_UBSAN_CHECK_ADD
:
5652 subcode
= PLUS_EXPR
;
5654 case IFN_UBSAN_CHECK_SUB
:
5655 subcode
= MINUS_EXPR
;
5657 case IFN_UBSAN_CHECK_MUL
:
5658 subcode
= MULT_EXPR
;
5660 case IFN_ADD_OVERFLOW
:
5661 subcode
= PLUS_EXPR
;
5664 case IFN_SUB_OVERFLOW
:
5665 subcode
= MINUS_EXPR
;
5668 case IFN_MUL_OVERFLOW
:
5669 subcode
= MULT_EXPR
;
5673 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5675 case IFN_MASK_STORE
:
5676 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5681 if (subcode
!= ERROR_MARK
)
5683 tree arg0
= gimple_call_arg (stmt
, 0);
5684 tree arg1
= gimple_call_arg (stmt
, 1);
5685 tree type
= TREE_TYPE (arg0
);
5688 tree lhs
= gimple_call_lhs (stmt
);
5689 if (lhs
== NULL_TREE
)
5692 type
= TREE_TYPE (TREE_TYPE (lhs
));
5694 if (type
== NULL_TREE
)
5696 /* x = y + 0; x = y - 0; x = y * 0; */
5697 else if (integer_zerop (arg1
))
5698 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5699 /* x = 0 + y; x = 0 * y; */
5700 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5701 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5703 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5704 result
= integer_zero_node
;
5705 /* x = y * 1; x = 1 * y; */
5706 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5708 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5710 else if (TREE_CODE (arg0
) == INTEGER_CST
5711 && TREE_CODE (arg1
) == INTEGER_CST
)
5714 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5715 fold_convert (type
, arg1
));
5717 result
= int_const_binop (subcode
, arg0
, arg1
);
5718 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5721 overflow
= build_one_cst (type
);
5728 if (result
== integer_zero_node
)
5729 result
= build_zero_cst (type
);
5730 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5732 if (TREE_CODE (result
) == INTEGER_CST
)
5734 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5736 overflow
= build_one_cst (type
);
5738 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5739 && TYPE_UNSIGNED (type
))
5740 || (TYPE_PRECISION (type
)
5741 < (TYPE_PRECISION (TREE_TYPE (result
))
5742 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5743 && !TYPE_UNSIGNED (type
)))))
5746 result
= fold_convert (type
, result
);
5753 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5754 result
= drop_tree_overflow (result
);
5757 if (overflow
== NULL_TREE
)
5758 overflow
= build_zero_cst (TREE_TYPE (result
));
5759 tree ctype
= build_complex_type (TREE_TYPE (result
));
5760 if (TREE_CODE (result
) == INTEGER_CST
5761 && TREE_CODE (overflow
) == INTEGER_CST
)
5762 result
= build_complex (ctype
, result
, overflow
);
5764 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5765 ctype
, result
, overflow
);
5767 gimplify_and_update_call_from_tree (gsi
, result
);
5776 /* Return true whether NAME has a use on STMT. */
5779 has_use_on_stmt (tree name
, gimple
*stmt
)
5781 imm_use_iterator iter
;
5782 use_operand_p use_p
;
5783 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5784 if (USE_STMT (use_p
) == stmt
)
5789 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5792 Replaces *GSI with the simplification result in RCODE and OPS
5793 and the associated statements in *SEQ. Does the replacement
5794 according to INPLACE and returns true if the operation succeeded. */
5797 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5798 gimple_match_op
*res_op
,
5799 gimple_seq
*seq
, bool inplace
)
5801 gimple
*stmt
= gsi_stmt (*gsi
);
5802 tree
*ops
= res_op
->ops
;
5803 unsigned int num_ops
= res_op
->num_ops
;
5805 /* Play safe and do not allow abnormals to be mentioned in
5806 newly created statements. See also maybe_push_res_to_seq.
5807 As an exception allow such uses if there was a use of the
5808 same SSA name on the old stmt. */
5809 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5810 if (TREE_CODE (ops
[i
]) == SSA_NAME
5811 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5812 && !has_use_on_stmt (ops
[i
], stmt
))
5815 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5816 for (unsigned int i
= 0; i
< 2; ++i
)
5817 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5818 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5819 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5822 /* Don't insert new statements when INPLACE is true, even if we could
5823 reuse STMT for the final statement. */
5824 if (inplace
&& !gimple_seq_empty_p (*seq
))
5827 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5829 gcc_assert (res_op
->code
.is_tree_code ());
5830 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
5831 /* GIMPLE_CONDs condition may not throw. */
5832 && (!flag_exceptions
5833 || !cfun
->can_throw_non_call_exceptions
5834 || !operation_could_trap_p (res_op
->code
,
5835 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5837 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
5838 else if (res_op
->code
== SSA_NAME
)
5839 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5840 build_zero_cst (TREE_TYPE (ops
[0])));
5841 else if (res_op
->code
== INTEGER_CST
)
5843 if (integer_zerop (ops
[0]))
5844 gimple_cond_make_false (cond_stmt
);
5846 gimple_cond_make_true (cond_stmt
);
5850 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5853 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5854 build_zero_cst (TREE_TYPE (res
)));
5858 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5860 fprintf (dump_file
, "gimple_simplified to ");
5861 if (!gimple_seq_empty_p (*seq
))
5862 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5863 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5866 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5869 else if (is_gimple_assign (stmt
)
5870 && res_op
->code
.is_tree_code ())
5873 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
5875 maybe_build_generic_op (res_op
);
5876 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
5877 res_op
->op_or_null (0),
5878 res_op
->op_or_null (1),
5879 res_op
->op_or_null (2));
5880 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5882 fprintf (dump_file
, "gimple_simplified to ");
5883 if (!gimple_seq_empty_p (*seq
))
5884 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5885 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5888 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5892 else if (res_op
->code
.is_fn_code ()
5893 && gimple_call_combined_fn (stmt
) == res_op
->code
)
5895 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5896 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5897 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5898 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5900 fprintf (dump_file
, "gimple_simplified to ");
5901 if (!gimple_seq_empty_p (*seq
))
5902 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5903 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5905 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5910 if (gimple_has_lhs (stmt
))
5912 tree lhs
= gimple_get_lhs (stmt
);
5913 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5915 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5917 fprintf (dump_file
, "gimple_simplified to ");
5918 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5920 gsi_replace_with_seq_vops (gsi
, *seq
);
5930 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5933 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5938 if (TREE_CODE (*t
) == ADDR_EXPR
)
5939 t
= &TREE_OPERAND (*t
, 0);
5941 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5942 generic vector extension. The actual vector referenced is
5943 view-converted to an array type for this purpose. If the index
5944 is constant the canonical representation in the middle-end is a
5945 BIT_FIELD_REF so re-write the former to the latter here. */
5946 if (TREE_CODE (*t
) == ARRAY_REF
5947 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5948 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5949 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5951 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5952 if (VECTOR_TYPE_P (vtype
))
5954 tree low
= array_ref_low_bound (*t
);
5955 if (TREE_CODE (low
) == INTEGER_CST
)
5957 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5959 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5960 wi::to_widest (low
));
5961 idx
= wi::mul (idx
, wi::to_widest
5962 (TYPE_SIZE (TREE_TYPE (*t
))));
5964 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5965 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5967 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5969 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5970 TYPE_SIZE (TREE_TYPE (*t
)),
5971 wide_int_to_tree (bitsizetype
, idx
));
5979 while (handled_component_p (*t
))
5980 t
= &TREE_OPERAND (*t
, 0);
5982 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5983 of invariant addresses into a SSA name MEM_REF address. */
5984 if (TREE_CODE (*t
) == MEM_REF
5985 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5987 tree addr
= TREE_OPERAND (*t
, 0);
5988 if (TREE_CODE (addr
) == ADDR_EXPR
5989 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5990 || handled_component_p (TREE_OPERAND (addr
, 0))))
5994 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
6003 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
6004 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
6005 TREE_OPERAND (*t
, 1),
6006 size_int (coffset
));
6009 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
6010 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
6013 /* Canonicalize back MEM_REFs to plain reference trees if the object
6014 accessed is a decl that has the same access semantics as the MEM_REF. */
6015 if (TREE_CODE (*t
) == MEM_REF
6016 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
6017 && integer_zerop (TREE_OPERAND (*t
, 1))
6018 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
6020 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6021 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
6022 if (/* Same volatile qualification. */
6023 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
6024 /* Same TBAA behavior with -fstrict-aliasing. */
6025 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
6026 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
6027 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
6028 /* Same alignment. */
6029 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
6030 /* We have to look out here to not drop a required conversion
6031 from the rhs to the lhs if *t appears on the lhs or vice-versa
6032 if it appears on the rhs. Thus require strict type
6034 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6036 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6041 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6042 && TREE_CODE (*t
) == MEM_REF
6043 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6047 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6051 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6053 if (mem_ref_offset (base
).to_shwi (&moffset
))
6056 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6059 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6066 /* Canonicalize TARGET_MEM_REF in particular with respect to
6067 the indexes becoming constant. */
6068 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6070 tree tem
= maybe_fold_tmr (*t
);
6074 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6075 recompute_tree_invariant_for_addr_expr (*orig_t
);
6083 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6084 distinguishes both cases. */
6087 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6089 bool changed
= false;
6090 gimple
*stmt
= gsi_stmt (*gsi
);
6091 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6093 fold_defer_overflow_warnings ();
6095 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6097 ??? This shouldn't be done in generic folding but in the
6098 propagation helpers which also know whether an address was
6100 Also canonicalize operand order. */
6101 switch (gimple_code (stmt
))
6104 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6106 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6107 if ((REFERENCE_CLASS_P (*rhs
)
6108 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6109 && maybe_canonicalize_mem_ref_addr (rhs
))
6111 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6112 if (REFERENCE_CLASS_P (*lhs
)
6113 && maybe_canonicalize_mem_ref_addr (lhs
))
6118 /* Canonicalize operand order. */
6119 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6120 if (TREE_CODE_CLASS (code
) == tcc_comparison
6121 || commutative_tree_code (code
)
6122 || commutative_ternary_tree_code (code
))
6124 tree rhs1
= gimple_assign_rhs1 (stmt
);
6125 tree rhs2
= gimple_assign_rhs2 (stmt
);
6126 if (tree_swap_operands_p (rhs1
, rhs2
))
6128 gimple_assign_set_rhs1 (stmt
, rhs2
);
6129 gimple_assign_set_rhs2 (stmt
, rhs1
);
6130 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6131 gimple_assign_set_rhs_code (stmt
,
6132 swap_tree_comparison (code
));
6140 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6142 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
6143 if (REFERENCE_CLASS_P (*arg
)
6144 && maybe_canonicalize_mem_ref_addr (arg
))
6147 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
6149 && REFERENCE_CLASS_P (*lhs
)
6150 && maybe_canonicalize_mem_ref_addr (lhs
))
6156 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6157 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6159 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6160 tree op
= TREE_VALUE (link
);
6161 if (REFERENCE_CLASS_P (op
)
6162 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6165 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6167 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6168 tree op
= TREE_VALUE (link
);
6169 if ((REFERENCE_CLASS_P (op
)
6170 || TREE_CODE (op
) == ADDR_EXPR
)
6171 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6177 if (gimple_debug_bind_p (stmt
))
6179 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6181 && (REFERENCE_CLASS_P (*val
)
6182 || TREE_CODE (*val
) == ADDR_EXPR
)
6183 && maybe_canonicalize_mem_ref_addr (val
, true))
6189 /* Canonicalize operand order. */
6190 tree lhs
= gimple_cond_lhs (stmt
);
6191 tree rhs
= gimple_cond_rhs (stmt
);
6192 if (tree_swap_operands_p (lhs
, rhs
))
6194 gcond
*gc
= as_a
<gcond
*> (stmt
);
6195 gimple_cond_set_lhs (gc
, rhs
);
6196 gimple_cond_set_rhs (gc
, lhs
);
6197 gimple_cond_set_code (gc
,
6198 swap_tree_comparison (gimple_cond_code (gc
)));
6205 /* Dispatch to pattern-based folding. */
6207 || is_gimple_assign (stmt
)
6208 || gimple_code (stmt
) == GIMPLE_COND
)
6210 gimple_seq seq
= NULL
;
6211 gimple_match_op res_op
;
6212 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6213 valueize
, valueize
))
6215 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6218 gimple_seq_discard (seq
);
6222 stmt
= gsi_stmt (*gsi
);
6224 /* Fold the main computation performed by the statement. */
6225 switch (gimple_code (stmt
))
6229 /* Try to canonicalize for boolean-typed X the comparisons
6230 X == 0, X == 1, X != 0, and X != 1. */
6231 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6232 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6234 tree lhs
= gimple_assign_lhs (stmt
);
6235 tree op1
= gimple_assign_rhs1 (stmt
);
6236 tree op2
= gimple_assign_rhs2 (stmt
);
6237 tree type
= TREE_TYPE (op1
);
6239 /* Check whether the comparison operands are of the same boolean
6240 type as the result type is.
6241 Check that second operand is an integer-constant with value
6243 if (TREE_CODE (op2
) == INTEGER_CST
6244 && (integer_zerop (op2
) || integer_onep (op2
))
6245 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6247 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6248 bool is_logical_not
= false;
6250 /* X == 0 and X != 1 is a logical-not.of X
6251 X == 1 and X != 0 is X */
6252 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6253 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6254 is_logical_not
= true;
6256 if (is_logical_not
== false)
6257 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6258 /* Only for one-bit precision typed X the transformation
6259 !X -> ~X is valied. */
6260 else if (TYPE_PRECISION (type
) == 1)
6261 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6262 /* Otherwise we use !X -> X ^ 1. */
6264 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6265 build_int_cst (type
, 1));
6271 unsigned old_num_ops
= gimple_num_ops (stmt
);
6272 tree lhs
= gimple_assign_lhs (stmt
);
6273 tree new_rhs
= fold_gimple_assign (gsi
);
6275 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6276 TREE_TYPE (new_rhs
)))
6277 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6280 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6282 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6289 changed
|= gimple_fold_call (gsi
, inplace
);
6293 if (gimple_debug_bind_p (stmt
))
6295 tree val
= gimple_debug_bind_get_value (stmt
);
6297 && REFERENCE_CLASS_P (val
))
6299 tree tem
= maybe_fold_reference (val
);
6302 gimple_debug_bind_set_value (stmt
, tem
);
6307 && TREE_CODE (val
) == ADDR_EXPR
)
6309 tree ref
= TREE_OPERAND (val
, 0);
6310 tree tem
= maybe_fold_reference (ref
);
6313 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6314 gimple_debug_bind_set_value (stmt
, tem
);
6323 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6324 tree ret
= gimple_return_retval(ret_stmt
);
6326 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6328 tree val
= valueize (ret
);
6329 if (val
&& val
!= ret
6330 && may_propagate_copy (ret
, val
))
6332 gimple_return_set_retval (ret_stmt
, val
);
6342 stmt
= gsi_stmt (*gsi
);
6344 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6348 /* Valueziation callback that ends up not following SSA edges. */
6351 no_follow_ssa_edges (tree
)
6356 /* Valueization callback that ends up following single-use SSA edges only. */
6359 follow_single_use_edges (tree val
)
6361 if (TREE_CODE (val
) == SSA_NAME
6362 && !has_single_use (val
))
6367 /* Valueization callback that follows all SSA edges. */
6370 follow_all_ssa_edges (tree val
)
6375 /* Fold the statement pointed to by GSI. In some cases, this function may
6376 replace the whole statement with a new one. Returns true iff folding
6378 The statement pointed to by GSI should be in valid gimple form but may
6379 be in unfolded state as resulting from for example constant propagation
6380 which can produce *&x = 0. */
6383 fold_stmt (gimple_stmt_iterator
*gsi
)
6385 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6389 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6391 return fold_stmt_1 (gsi
, false, valueize
);
6394 /* Perform the minimal folding on statement *GSI. Only operations like
6395 *&x created by constant propagation are handled. The statement cannot
6396 be replaced with a new one. Return true if the statement was
6397 changed, false otherwise.
6398 The statement *GSI should be in valid gimple form but may
6399 be in unfolded state as resulting from for example constant propagation
6400 which can produce *&x = 0. */
6403 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6405 gimple
*stmt
= gsi_stmt (*gsi
);
6406 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6407 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6411 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6412 if EXPR is null or we don't know how.
6413 If non-null, the result always has boolean type. */
6416 canonicalize_bool (tree expr
, bool invert
)
6422 if (integer_nonzerop (expr
))
6423 return boolean_false_node
;
6424 else if (integer_zerop (expr
))
6425 return boolean_true_node
;
6426 else if (TREE_CODE (expr
) == SSA_NAME
)
6427 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6428 build_int_cst (TREE_TYPE (expr
), 0));
6429 else if (COMPARISON_CLASS_P (expr
))
6430 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6432 TREE_OPERAND (expr
, 0),
6433 TREE_OPERAND (expr
, 1));
6439 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6441 if (integer_nonzerop (expr
))
6442 return boolean_true_node
;
6443 else if (integer_zerop (expr
))
6444 return boolean_false_node
;
6445 else if (TREE_CODE (expr
) == SSA_NAME
)
6446 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6447 build_int_cst (TREE_TYPE (expr
), 0));
6448 else if (COMPARISON_CLASS_P (expr
))
6449 return fold_build2 (TREE_CODE (expr
),
6451 TREE_OPERAND (expr
, 0),
6452 TREE_OPERAND (expr
, 1));
6458 /* Check to see if a boolean expression EXPR is logically equivalent to the
6459 comparison (OP1 CODE OP2). Check for various identities involving
6463 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6464 const_tree op1
, const_tree op2
)
6468 /* The obvious case. */
6469 if (TREE_CODE (expr
) == code
6470 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6471 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6474 /* Check for comparing (name, name != 0) and the case where expr
6475 is an SSA_NAME with a definition matching the comparison. */
6476 if (TREE_CODE (expr
) == SSA_NAME
6477 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6479 if (operand_equal_p (expr
, op1
, 0))
6480 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6481 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6482 s
= SSA_NAME_DEF_STMT (expr
);
6483 if (is_gimple_assign (s
)
6484 && gimple_assign_rhs_code (s
) == code
6485 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6486 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6490 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6491 of name is a comparison, recurse. */
6492 if (TREE_CODE (op1
) == SSA_NAME
6493 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6495 s
= SSA_NAME_DEF_STMT (op1
);
6496 if (is_gimple_assign (s
)
6497 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6499 enum tree_code c
= gimple_assign_rhs_code (s
);
6500 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6501 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6502 return same_bool_comparison_p (expr
, c
,
6503 gimple_assign_rhs1 (s
),
6504 gimple_assign_rhs2 (s
));
6505 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6506 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6507 return same_bool_comparison_p (expr
,
6508 invert_tree_comparison (c
, false),
6509 gimple_assign_rhs1 (s
),
6510 gimple_assign_rhs2 (s
));
6516 /* Check to see if two boolean expressions OP1 and OP2 are logically
6520 same_bool_result_p (const_tree op1
, const_tree op2
)
6522 /* Simple cases first. */
6523 if (operand_equal_p (op1
, op2
, 0))
6526 /* Check the cases where at least one of the operands is a comparison.
6527 These are a bit smarter than operand_equal_p in that they apply some
6528 identifies on SSA_NAMEs. */
6529 if (COMPARISON_CLASS_P (op2
)
6530 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6531 TREE_OPERAND (op2
, 0),
6532 TREE_OPERAND (op2
, 1)))
6534 if (COMPARISON_CLASS_P (op1
)
6535 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6536 TREE_OPERAND (op1
, 0),
6537 TREE_OPERAND (op1
, 1)))
6544 /* Forward declarations for some mutually recursive functions. */
6547 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6548 enum tree_code code2
, tree op2a
, tree op2b
);
6550 and_var_with_comparison (tree type
, tree var
, bool invert
,
6551 enum tree_code code2
, tree op2a
, tree op2b
);
6553 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6554 enum tree_code code2
, tree op2a
, tree op2b
);
6556 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6557 enum tree_code code2
, tree op2a
, tree op2b
);
6559 or_var_with_comparison (tree
, tree var
, bool invert
,
6560 enum tree_code code2
, tree op2a
, tree op2b
);
6562 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6563 enum tree_code code2
, tree op2a
, tree op2b
);
6565 /* Helper function for and_comparisons_1: try to simplify the AND of the
6566 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6567 If INVERT is true, invert the value of the VAR before doing the AND.
6568 Return NULL_EXPR if we can't simplify this to a single expression. */
6571 and_var_with_comparison (tree type
, tree var
, bool invert
,
6572 enum tree_code code2
, tree op2a
, tree op2b
)
6575 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6577 /* We can only deal with variables whose definitions are assignments. */
6578 if (!is_gimple_assign (stmt
))
6581 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6582 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6583 Then we only have to consider the simpler non-inverted cases. */
6585 t
= or_var_with_comparison_1 (type
, stmt
,
6586 invert_tree_comparison (code2
, false),
6589 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6590 return canonicalize_bool (t
, invert
);
6593 /* Try to simplify the AND of the ssa variable defined by the assignment
6594 STMT with the comparison specified by (OP2A CODE2 OP2B).
6595 Return NULL_EXPR if we can't simplify this to a single expression. */
6598 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6599 enum tree_code code2
, tree op2a
, tree op2b
)
6601 tree var
= gimple_assign_lhs (stmt
);
6602 tree true_test_var
= NULL_TREE
;
6603 tree false_test_var
= NULL_TREE
;
6604 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6606 /* Check for identities like (var AND (var == 0)) => false. */
6607 if (TREE_CODE (op2a
) == SSA_NAME
6608 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6610 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6611 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6613 true_test_var
= op2a
;
6614 if (var
== true_test_var
)
6617 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6618 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6620 false_test_var
= op2a
;
6621 if (var
== false_test_var
)
6622 return boolean_false_node
;
6626 /* If the definition is a comparison, recurse on it. */
6627 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6629 tree t
= and_comparisons_1 (type
, innercode
,
6630 gimple_assign_rhs1 (stmt
),
6631 gimple_assign_rhs2 (stmt
),
6639 /* If the definition is an AND or OR expression, we may be able to
6640 simplify by reassociating. */
6641 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6642 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6644 tree inner1
= gimple_assign_rhs1 (stmt
);
6645 tree inner2
= gimple_assign_rhs2 (stmt
);
6648 tree partial
= NULL_TREE
;
6649 bool is_and
= (innercode
== BIT_AND_EXPR
);
6651 /* Check for boolean identities that don't require recursive examination
6653 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6654 inner1 AND (inner1 OR inner2) => inner1
6655 !inner1 AND (inner1 AND inner2) => false
6656 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6657 Likewise for similar cases involving inner2. */
6658 if (inner1
== true_test_var
)
6659 return (is_and
? var
: inner1
);
6660 else if (inner2
== true_test_var
)
6661 return (is_and
? var
: inner2
);
6662 else if (inner1
== false_test_var
)
6664 ? boolean_false_node
6665 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6667 else if (inner2
== false_test_var
)
6669 ? boolean_false_node
6670 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6673 /* Next, redistribute/reassociate the AND across the inner tests.
6674 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6675 if (TREE_CODE (inner1
) == SSA_NAME
6676 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6677 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6678 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6679 gimple_assign_rhs1 (s
),
6680 gimple_assign_rhs2 (s
),
6681 code2
, op2a
, op2b
)))
6683 /* Handle the AND case, where we are reassociating:
6684 (inner1 AND inner2) AND (op2a code2 op2b)
6686 If the partial result t is a constant, we win. Otherwise
6687 continue on to try reassociating with the other inner test. */
6690 if (integer_onep (t
))
6692 else if (integer_zerop (t
))
6693 return boolean_false_node
;
6696 /* Handle the OR case, where we are redistributing:
6697 (inner1 OR inner2) AND (op2a code2 op2b)
6698 => (t OR (inner2 AND (op2a code2 op2b))) */
6699 else if (integer_onep (t
))
6700 return boolean_true_node
;
6702 /* Save partial result for later. */
6706 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6707 if (TREE_CODE (inner2
) == SSA_NAME
6708 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6709 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6710 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6711 gimple_assign_rhs1 (s
),
6712 gimple_assign_rhs2 (s
),
6713 code2
, op2a
, op2b
)))
6715 /* Handle the AND case, where we are reassociating:
6716 (inner1 AND inner2) AND (op2a code2 op2b)
6717 => (inner1 AND t) */
6720 if (integer_onep (t
))
6722 else if (integer_zerop (t
))
6723 return boolean_false_node
;
6724 /* If both are the same, we can apply the identity
6726 else if (partial
&& same_bool_result_p (t
, partial
))
6730 /* Handle the OR case. where we are redistributing:
6731 (inner1 OR inner2) AND (op2a code2 op2b)
6732 => (t OR (inner1 AND (op2a code2 op2b)))
6733 => (t OR partial) */
6736 if (integer_onep (t
))
6737 return boolean_true_node
;
6740 /* We already got a simplification for the other
6741 operand to the redistributed OR expression. The
6742 interesting case is when at least one is false.
6743 Or, if both are the same, we can apply the identity
6745 if (integer_zerop (partial
))
6747 else if (integer_zerop (t
))
6749 else if (same_bool_result_p (t
, partial
))
6758 /* Try to simplify the AND of two comparisons defined by
6759 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6760 If this can be done without constructing an intermediate value,
6761 return the resulting tree; otherwise NULL_TREE is returned.
6762 This function is deliberately asymmetric as it recurses on SSA_DEFs
6763 in the first comparison but not the second. */
6766 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6767 enum tree_code code2
, tree op2a
, tree op2b
)
6769 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6771 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6772 if (operand_equal_p (op1a
, op2a
, 0)
6773 && operand_equal_p (op1b
, op2b
, 0))
6775 /* Result will be either NULL_TREE, or a combined comparison. */
6776 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6777 TRUTH_ANDIF_EXPR
, code1
, code2
,
6778 truth_type
, op1a
, op1b
);
6783 /* Likewise the swapped case of the above. */
6784 if (operand_equal_p (op1a
, op2b
, 0)
6785 && operand_equal_p (op1b
, op2a
, 0))
6787 /* Result will be either NULL_TREE, or a combined comparison. */
6788 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6789 TRUTH_ANDIF_EXPR
, code1
,
6790 swap_tree_comparison (code2
),
6791 truth_type
, op1a
, op1b
);
6796 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6797 NAME's definition is a truth value. See if there are any simplifications
6798 that can be done against the NAME's definition. */
6799 if (TREE_CODE (op1a
) == SSA_NAME
6800 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6801 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6803 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6804 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6805 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6806 switch (gimple_code (stmt
))
6809 /* Try to simplify by copy-propagating the definition. */
6810 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6814 /* If every argument to the PHI produces the same result when
6815 ANDed with the second comparison, we win.
6816 Do not do this unless the type is bool since we need a bool
6817 result here anyway. */
6818 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6820 tree result
= NULL_TREE
;
6822 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6824 tree arg
= gimple_phi_arg_def (stmt
, i
);
6826 /* If this PHI has itself as an argument, ignore it.
6827 If all the other args produce the same result,
6829 if (arg
== gimple_phi_result (stmt
))
6831 else if (TREE_CODE (arg
) == INTEGER_CST
)
6833 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6836 result
= boolean_false_node
;
6837 else if (!integer_zerop (result
))
6841 result
= fold_build2 (code2
, boolean_type_node
,
6843 else if (!same_bool_comparison_p (result
,
6847 else if (TREE_CODE (arg
) == SSA_NAME
6848 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6851 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6852 /* In simple cases we can look through PHI nodes,
6853 but we have to be careful with loops.
6855 if (! dom_info_available_p (CDI_DOMINATORS
)
6856 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6857 || dominated_by_p (CDI_DOMINATORS
,
6858 gimple_bb (def_stmt
),
6861 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6867 else if (!same_bool_result_p (result
, temp
))
6883 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6884 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6885 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6886 simplify this to a single expression. As we are going to lower the cost
6887 of building SSA names / gimple stmts significantly, we need to allocate
6888 them ont the stack. This will cause the code to be a bit ugly. */
6891 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6892 enum tree_code code1
,
6893 tree op1a
, tree op1b
,
6894 enum tree_code code2
, tree op2a
,
6897 /* Allocate gimple stmt1 on the stack. */
6899 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6900 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6901 gimple_assign_set_rhs_code (stmt1
, code1
);
6902 gimple_assign_set_rhs1 (stmt1
, op1a
);
6903 gimple_assign_set_rhs2 (stmt1
, op1b
);
6905 /* Allocate gimple stmt2 on the stack. */
6907 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6908 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6909 gimple_assign_set_rhs_code (stmt2
, code2
);
6910 gimple_assign_set_rhs1 (stmt2
, op2a
);
6911 gimple_assign_set_rhs2 (stmt2
, op2b
);
6913 /* Allocate SSA names(lhs1) on the stack. */
6914 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6915 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6916 TREE_SET_CODE (lhs1
, SSA_NAME
);
6917 TREE_TYPE (lhs1
) = type
;
6918 init_ssa_name_imm_use (lhs1
);
6920 /* Allocate SSA names(lhs2) on the stack. */
6921 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6922 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6923 TREE_SET_CODE (lhs2
, SSA_NAME
);
6924 TREE_TYPE (lhs2
) = type
;
6925 init_ssa_name_imm_use (lhs2
);
6927 gimple_assign_set_lhs (stmt1
, lhs1
);
6928 gimple_assign_set_lhs (stmt2
, lhs2
);
6930 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6931 type
, gimple_assign_lhs (stmt1
),
6932 gimple_assign_lhs (stmt2
));
6933 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6935 if (gimple_simplified_result_is_gimple_val (&op
))
6937 tree res
= op
.ops
[0];
6939 return build2 (code1
, type
, op1a
, op1b
);
6940 else if (res
== lhs2
)
6941 return build2 (code2
, type
, op2a
, op2b
);
6945 else if (op
.code
.is_tree_code ()
6946 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6948 tree op0
= op
.ops
[0];
6949 tree op1
= op
.ops
[1];
6950 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6951 return NULL_TREE
; /* not simple */
6953 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6960 /* Try to simplify the AND of two comparisons, specified by
6961 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6962 If this can be simplified to a single expression (without requiring
6963 introducing more SSA variables to hold intermediate values),
6964 return the resulting tree. Otherwise return NULL_TREE.
6965 If the result expression is non-null, it has boolean type. */
6968 maybe_fold_and_comparisons (tree type
,
6969 enum tree_code code1
, tree op1a
, tree op1b
,
6970 enum tree_code code2
, tree op2a
, tree op2b
)
6972 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6975 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6978 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6979 op1a
, op1b
, code2
, op2a
,
6986 /* Helper function for or_comparisons_1: try to simplify the OR of the
6987 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6988 If INVERT is true, invert the value of VAR before doing the OR.
6989 Return NULL_EXPR if we can't simplify this to a single expression. */
6992 or_var_with_comparison (tree type
, tree var
, bool invert
,
6993 enum tree_code code2
, tree op2a
, tree op2b
)
6996 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6998 /* We can only deal with variables whose definitions are assignments. */
6999 if (!is_gimple_assign (stmt
))
7002 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7003 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7004 Then we only have to consider the simpler non-inverted cases. */
7006 t
= and_var_with_comparison_1 (type
, stmt
,
7007 invert_tree_comparison (code2
, false),
7010 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
7011 return canonicalize_bool (t
, invert
);
7014 /* Try to simplify the OR of the ssa variable defined by the assignment
7015 STMT with the comparison specified by (OP2A CODE2 OP2B).
7016 Return NULL_EXPR if we can't simplify this to a single expression. */
7019 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7020 enum tree_code code2
, tree op2a
, tree op2b
)
7022 tree var
= gimple_assign_lhs (stmt
);
7023 tree true_test_var
= NULL_TREE
;
7024 tree false_test_var
= NULL_TREE
;
7025 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7027 /* Check for identities like (var OR (var != 0)) => true . */
7028 if (TREE_CODE (op2a
) == SSA_NAME
7029 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7031 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7032 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7034 true_test_var
= op2a
;
7035 if (var
== true_test_var
)
7038 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7039 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7041 false_test_var
= op2a
;
7042 if (var
== false_test_var
)
7043 return boolean_true_node
;
7047 /* If the definition is a comparison, recurse on it. */
7048 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7050 tree t
= or_comparisons_1 (type
, innercode
,
7051 gimple_assign_rhs1 (stmt
),
7052 gimple_assign_rhs2 (stmt
),
7060 /* If the definition is an AND or OR expression, we may be able to
7061 simplify by reassociating. */
7062 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7063 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7065 tree inner1
= gimple_assign_rhs1 (stmt
);
7066 tree inner2
= gimple_assign_rhs2 (stmt
);
7069 tree partial
= NULL_TREE
;
7070 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7072 /* Check for boolean identities that don't require recursive examination
7074 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7075 inner1 OR (inner1 AND inner2) => inner1
7076 !inner1 OR (inner1 OR inner2) => true
7077 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7079 if (inner1
== true_test_var
)
7080 return (is_or
? var
: inner1
);
7081 else if (inner2
== true_test_var
)
7082 return (is_or
? var
: inner2
);
7083 else if (inner1
== false_test_var
)
7086 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7088 else if (inner2
== false_test_var
)
7091 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7094 /* Next, redistribute/reassociate the OR across the inner tests.
7095 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7096 if (TREE_CODE (inner1
) == SSA_NAME
7097 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7098 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7099 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7100 gimple_assign_rhs1 (s
),
7101 gimple_assign_rhs2 (s
),
7102 code2
, op2a
, op2b
)))
7104 /* Handle the OR case, where we are reassociating:
7105 (inner1 OR inner2) OR (op2a code2 op2b)
7107 If the partial result t is a constant, we win. Otherwise
7108 continue on to try reassociating with the other inner test. */
7111 if (integer_onep (t
))
7112 return boolean_true_node
;
7113 else if (integer_zerop (t
))
7117 /* Handle the AND case, where we are redistributing:
7118 (inner1 AND inner2) OR (op2a code2 op2b)
7119 => (t AND (inner2 OR (op2a code op2b))) */
7120 else if (integer_zerop (t
))
7121 return boolean_false_node
;
7123 /* Save partial result for later. */
7127 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7128 if (TREE_CODE (inner2
) == SSA_NAME
7129 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7130 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7131 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7132 gimple_assign_rhs1 (s
),
7133 gimple_assign_rhs2 (s
),
7134 code2
, op2a
, op2b
)))
7136 /* Handle the OR case, where we are reassociating:
7137 (inner1 OR inner2) OR (op2a code2 op2b)
7139 => (t OR partial) */
7142 if (integer_zerop (t
))
7144 else if (integer_onep (t
))
7145 return boolean_true_node
;
7146 /* If both are the same, we can apply the identity
7148 else if (partial
&& same_bool_result_p (t
, partial
))
7152 /* Handle the AND case, where we are redistributing:
7153 (inner1 AND inner2) OR (op2a code2 op2b)
7154 => (t AND (inner1 OR (op2a code2 op2b)))
7155 => (t AND partial) */
7158 if (integer_zerop (t
))
7159 return boolean_false_node
;
7162 /* We already got a simplification for the other
7163 operand to the redistributed AND expression. The
7164 interesting case is when at least one is true.
7165 Or, if both are the same, we can apply the identity
7167 if (integer_onep (partial
))
7169 else if (integer_onep (t
))
7171 else if (same_bool_result_p (t
, partial
))
7180 /* Try to simplify the OR of two comparisons defined by
7181 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7182 If this can be done without constructing an intermediate value,
7183 return the resulting tree; otherwise NULL_TREE is returned.
7184 This function is deliberately asymmetric as it recurses on SSA_DEFs
7185 in the first comparison but not the second. */
7188 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7189 enum tree_code code2
, tree op2a
, tree op2b
)
7191 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7193 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7194 if (operand_equal_p (op1a
, op2a
, 0)
7195 && operand_equal_p (op1b
, op2b
, 0))
7197 /* Result will be either NULL_TREE, or a combined comparison. */
7198 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7199 TRUTH_ORIF_EXPR
, code1
, code2
,
7200 truth_type
, op1a
, op1b
);
7205 /* Likewise the swapped case of the above. */
7206 if (operand_equal_p (op1a
, op2b
, 0)
7207 && operand_equal_p (op1b
, op2a
, 0))
7209 /* Result will be either NULL_TREE, or a combined comparison. */
7210 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7211 TRUTH_ORIF_EXPR
, code1
,
7212 swap_tree_comparison (code2
),
7213 truth_type
, op1a
, op1b
);
7218 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7219 NAME's definition is a truth value. See if there are any simplifications
7220 that can be done against the NAME's definition. */
7221 if (TREE_CODE (op1a
) == SSA_NAME
7222 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7223 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7225 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7226 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7227 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7228 switch (gimple_code (stmt
))
7231 /* Try to simplify by copy-propagating the definition. */
7232 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7236 /* If every argument to the PHI produces the same result when
7237 ORed with the second comparison, we win.
7238 Do not do this unless the type is bool since we need a bool
7239 result here anyway. */
7240 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7242 tree result
= NULL_TREE
;
7244 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7246 tree arg
= gimple_phi_arg_def (stmt
, i
);
7248 /* If this PHI has itself as an argument, ignore it.
7249 If all the other args produce the same result,
7251 if (arg
== gimple_phi_result (stmt
))
7253 else if (TREE_CODE (arg
) == INTEGER_CST
)
7255 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7258 result
= boolean_true_node
;
7259 else if (!integer_onep (result
))
7263 result
= fold_build2 (code2
, boolean_type_node
,
7265 else if (!same_bool_comparison_p (result
,
7269 else if (TREE_CODE (arg
) == SSA_NAME
7270 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7273 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7274 /* In simple cases we can look through PHI nodes,
7275 but we have to be careful with loops.
7277 if (! dom_info_available_p (CDI_DOMINATORS
)
7278 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7279 || dominated_by_p (CDI_DOMINATORS
,
7280 gimple_bb (def_stmt
),
7283 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7289 else if (!same_bool_result_p (result
, temp
))
7305 /* Try to simplify the OR of two comparisons, specified by
7306 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7307 If this can be simplified to a single expression (without requiring
7308 introducing more SSA variables to hold intermediate values),
7309 return the resulting tree. Otherwise return NULL_TREE.
7310 If the result expression is non-null, it has boolean type. */
7313 maybe_fold_or_comparisons (tree type
,
7314 enum tree_code code1
, tree op1a
, tree op1b
,
7315 enum tree_code code2
, tree op2a
, tree op2b
)
7317 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7320 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7323 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7324 op1a
, op1b
, code2
, op2a
,
7331 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7333 Either NULL_TREE, a simplified but non-constant or a constant
7336 ??? This should go into a gimple-fold-inline.h file to be eventually
7337 privatized with the single valueize function used in the various TUs
7338 to avoid the indirect function call overhead. */
7341 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7342 tree (*gvalueize
) (tree
))
7344 gimple_match_op res_op
;
7345 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7346 edges if there are intermediate VARYING defs. For this reason
7347 do not follow SSA edges here even though SCCVN can technically
7348 just deal fine with that. */
7349 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7351 tree res
= NULL_TREE
;
7352 if (gimple_simplified_result_is_gimple_val (&res_op
))
7353 res
= res_op
.ops
[0];
7354 else if (mprts_hook
)
7355 res
= mprts_hook (&res_op
);
7358 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7360 fprintf (dump_file
, "Match-and-simplified ");
7361 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7362 fprintf (dump_file
, " to ");
7363 print_generic_expr (dump_file
, res
);
7364 fprintf (dump_file
, "\n");
7370 location_t loc
= gimple_location (stmt
);
7371 switch (gimple_code (stmt
))
7375 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7377 switch (get_gimple_rhs_class (subcode
))
7379 case GIMPLE_SINGLE_RHS
:
7381 tree rhs
= gimple_assign_rhs1 (stmt
);
7382 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7384 if (TREE_CODE (rhs
) == SSA_NAME
)
7386 /* If the RHS is an SSA_NAME, return its known constant value,
7388 return (*valueize
) (rhs
);
7390 /* Handle propagating invariant addresses into address
7392 else if (TREE_CODE (rhs
) == ADDR_EXPR
7393 && !is_gimple_min_invariant (rhs
))
7395 poly_int64 offset
= 0;
7397 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7401 && (CONSTANT_CLASS_P (base
)
7402 || decl_address_invariant_p (base
)))
7403 return build_invariant_address (TREE_TYPE (rhs
),
7406 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7407 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7408 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7409 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7414 nelts
= CONSTRUCTOR_NELTS (rhs
);
7415 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7416 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7418 val
= (*valueize
) (val
);
7419 if (TREE_CODE (val
) == INTEGER_CST
7420 || TREE_CODE (val
) == REAL_CST
7421 || TREE_CODE (val
) == FIXED_CST
)
7422 vec
.quick_push (val
);
7427 return vec
.build ();
7429 if (subcode
== OBJ_TYPE_REF
)
7431 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7432 /* If callee is constant, we can fold away the wrapper. */
7433 if (is_gimple_min_invariant (val
))
7437 if (kind
== tcc_reference
)
7439 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7440 || TREE_CODE (rhs
) == REALPART_EXPR
7441 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7442 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7444 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7445 return fold_unary_loc (EXPR_LOCATION (rhs
),
7447 TREE_TYPE (rhs
), val
);
7449 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7450 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7452 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7453 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7455 TREE_TYPE (rhs
), val
,
7456 TREE_OPERAND (rhs
, 1),
7457 TREE_OPERAND (rhs
, 2));
7459 else if (TREE_CODE (rhs
) == MEM_REF
7460 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7462 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7463 if (TREE_CODE (val
) == ADDR_EXPR
7464 && is_gimple_min_invariant (val
))
7466 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7468 TREE_OPERAND (rhs
, 1));
7473 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7475 else if (kind
== tcc_declaration
)
7476 return get_symbol_constant_value (rhs
);
7480 case GIMPLE_UNARY_RHS
:
7483 case GIMPLE_BINARY_RHS
:
7484 /* Translate &x + CST into an invariant form suitable for
7485 further propagation. */
7486 if (subcode
== POINTER_PLUS_EXPR
)
7488 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7489 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7490 if (TREE_CODE (op0
) == ADDR_EXPR
7491 && TREE_CODE (op1
) == INTEGER_CST
)
7493 tree off
= fold_convert (ptr_type_node
, op1
);
7495 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7496 fold_build2 (MEM_REF
,
7497 TREE_TYPE (TREE_TYPE (op0
)),
7498 unshare_expr (op0
), off
));
7501 /* Canonicalize bool != 0 and bool == 0 appearing after
7502 valueization. While gimple_simplify handles this
7503 it can get confused by the ~X == 1 -> X == 0 transform
7504 which we cant reduce to a SSA name or a constant
7505 (and we have no way to tell gimple_simplify to not
7506 consider those transforms in the first place). */
7507 else if (subcode
== EQ_EXPR
7508 || subcode
== NE_EXPR
)
7510 tree lhs
= gimple_assign_lhs (stmt
);
7511 tree op0
= gimple_assign_rhs1 (stmt
);
7512 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7515 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7516 op0
= (*valueize
) (op0
);
7517 if (TREE_CODE (op0
) == INTEGER_CST
)
7518 std::swap (op0
, op1
);
7519 if (TREE_CODE (op1
) == INTEGER_CST
7520 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7521 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7527 case GIMPLE_TERNARY_RHS
:
7529 /* Handle ternary operators that can appear in GIMPLE form. */
7530 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7531 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7532 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7533 return fold_ternary_loc (loc
, subcode
,
7534 TREE_TYPE (gimple_assign_lhs (stmt
)),
7546 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7548 if (gimple_call_internal_p (stmt
))
7550 enum tree_code subcode
= ERROR_MARK
;
7551 switch (gimple_call_internal_fn (stmt
))
7553 case IFN_UBSAN_CHECK_ADD
:
7554 subcode
= PLUS_EXPR
;
7556 case IFN_UBSAN_CHECK_SUB
:
7557 subcode
= MINUS_EXPR
;
7559 case IFN_UBSAN_CHECK_MUL
:
7560 subcode
= MULT_EXPR
;
7562 case IFN_BUILTIN_EXPECT
:
7564 tree arg0
= gimple_call_arg (stmt
, 0);
7565 tree op0
= (*valueize
) (arg0
);
7566 if (TREE_CODE (op0
) == INTEGER_CST
)
7573 tree arg0
= gimple_call_arg (stmt
, 0);
7574 tree arg1
= gimple_call_arg (stmt
, 1);
7575 tree op0
= (*valueize
) (arg0
);
7576 tree op1
= (*valueize
) (arg1
);
7578 if (TREE_CODE (op0
) != INTEGER_CST
7579 || TREE_CODE (op1
) != INTEGER_CST
)
7584 /* x * 0 = 0 * x = 0 without overflow. */
7585 if (integer_zerop (op0
) || integer_zerop (op1
))
7586 return build_zero_cst (TREE_TYPE (arg0
));
7589 /* y - y = 0 without overflow. */
7590 if (operand_equal_p (op0
, op1
, 0))
7591 return build_zero_cst (TREE_TYPE (arg0
));
7598 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7600 && TREE_CODE (res
) == INTEGER_CST
7601 && !TREE_OVERFLOW (res
))
7606 fn
= (*valueize
) (gimple_call_fn (stmt
));
7607 if (TREE_CODE (fn
) == ADDR_EXPR
7608 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7609 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7610 && gimple_builtin_call_types_compatible_p (stmt
,
7611 TREE_OPERAND (fn
, 0)))
7613 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7616 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7617 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7618 retval
= fold_builtin_call_array (loc
,
7619 gimple_call_return_type (call_stmt
),
7620 fn
, gimple_call_num_args (stmt
), args
);
7623 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7624 STRIP_NOPS (retval
);
7625 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7638 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7639 Returns NULL_TREE if folding to a constant is not possible, otherwise
7640 returns a constant according to is_gimple_min_invariant. */
7643 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7645 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7646 if (res
&& is_gimple_min_invariant (res
))
7652 /* The following set of functions are supposed to fold references using
7653 their constant initializers. */
7655 /* See if we can find constructor defining value of BASE.
7656 When we know the consructor with constant offset (such as
7657 base is array[40] and we do know constructor of array), then
7658 BIT_OFFSET is adjusted accordingly.
7660 As a special case, return error_mark_node when constructor
7661 is not explicitly available, but it is known to be zero
7662 such as 'static const int a;'. */
7664 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7665 tree (*valueize
)(tree
))
7667 poly_int64 bit_offset2
, size
, max_size
;
7670 if (TREE_CODE (base
) == MEM_REF
)
7672 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7673 if (!boff
.to_shwi (bit_offset
))
7677 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7678 base
= valueize (TREE_OPERAND (base
, 0));
7679 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7681 base
= TREE_OPERAND (base
, 0);
7684 && TREE_CODE (base
) == SSA_NAME
)
7685 base
= valueize (base
);
7687 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7688 DECL_INITIAL. If BASE is a nested reference into another
7689 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7690 the inner reference. */
7691 switch (TREE_CODE (base
))
7696 tree init
= ctor_for_folding (base
);
7698 /* Our semantic is exact opposite of ctor_for_folding;
7699 NULL means unknown, while error_mark_node is 0. */
7700 if (init
== error_mark_node
)
7703 return error_mark_node
;
7707 case VIEW_CONVERT_EXPR
:
7708 return get_base_constructor (TREE_OPERAND (base
, 0),
7709 bit_offset
, valueize
);
7713 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7715 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7717 *bit_offset
+= bit_offset2
;
7718 return get_base_constructor (base
, bit_offset
, valueize
);
7724 if (CONSTANT_CLASS_P (base
))
7731 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7732 to the memory at bit OFFSET. When non-null, TYPE is the expected
7733 type of the reference; otherwise the type of the referenced element
7734 is used instead. When SIZE is zero, attempt to fold a reference to
7735 the entire element which OFFSET refers to. Increment *SUBOFF by
7736 the bit offset of the accessed element. */
7739 fold_array_ctor_reference (tree type
, tree ctor
,
7740 unsigned HOST_WIDE_INT offset
,
7741 unsigned HOST_WIDE_INT size
,
7743 unsigned HOST_WIDE_INT
*suboff
)
7745 offset_int low_bound
;
7746 offset_int elt_size
;
7747 offset_int access_index
;
7748 tree domain_type
= NULL_TREE
;
7749 HOST_WIDE_INT inner_offset
;
7751 /* Compute low bound and elt size. */
7752 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7753 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7754 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7756 /* Static constructors for variably sized objects make no sense. */
7757 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7759 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7763 /* Static constructors for variably sized objects make no sense. */
7764 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7766 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7768 /* When TYPE is non-null, verify that it specifies a constant-sized
7769 access of a multiple of the array element size. Avoid division
7770 by zero below when ELT_SIZE is zero, such as with the result of
7771 an initializer for a zero-length array or an empty struct. */
7774 && (!TYPE_SIZE_UNIT (type
)
7775 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7778 /* Compute the array index we look for. */
7779 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7781 access_index
+= low_bound
;
7783 /* And offset within the access. */
7784 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7786 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7787 if (size
> elt_sz
* BITS_PER_UNIT
)
7789 /* native_encode_expr constraints. */
7790 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7791 || size
% BITS_PER_UNIT
!= 0
7792 || inner_offset
% BITS_PER_UNIT
!= 0
7793 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7797 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7799 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7800 return build_zero_cst (type
);
7802 /* native-encode adjacent ctor elements. */
7803 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7804 unsigned bufoff
= 0;
7805 offset_int index
= 0;
7806 offset_int max_index
= access_index
;
7807 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7809 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7810 else if (!CONSTANT_CLASS_P (val
))
7814 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7816 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7817 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7820 index
= max_index
= wi::to_offset (elt
->index
);
7821 index
= wi::umax (index
, access_index
);
7824 if (bufoff
+ elt_sz
> sizeof (buf
))
7825 elt_sz
= sizeof (buf
) - bufoff
;
7826 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7827 inner_offset
/ BITS_PER_UNIT
);
7828 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7834 if (wi::cmpu (access_index
, index
) == 0)
7836 else if (wi::cmpu (access_index
, max_index
) > 0)
7839 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7841 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7846 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7848 max_index
= access_index
;
7851 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7853 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7854 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7857 index
= max_index
= wi::to_offset (elt
->index
);
7858 index
= wi::umax (index
, access_index
);
7859 if (wi::cmpu (access_index
, index
) == 0)
7862 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7866 while (bufoff
< size
/ BITS_PER_UNIT
);
7868 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7871 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7873 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7875 /* For the final reference to the entire accessed element
7876 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7877 may be null) in favor of the type of the element, and set
7878 SIZE to the size of the accessed element. */
7880 type
= TREE_TYPE (val
);
7881 size
= elt_sz
* BITS_PER_UNIT
;
7883 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7884 && TREE_CODE (val
) == CONSTRUCTOR
7885 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7886 /* If this isn't the last element in the CTOR and a CTOR itself
7887 and it does not cover the whole object we are requesting give up
7888 since we're not set up for combining from multiple CTORs. */
7891 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7892 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7896 /* Memory not explicitly mentioned in constructor is 0 (or
7897 the reference is out of range). */
7898 return type
? build_zero_cst (type
) : NULL_TREE
;
7901 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7902 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7903 is the expected type of the reference; otherwise the type of
7904 the referenced member is used instead. When SIZE is zero,
7905 attempt to fold a reference to the entire member which OFFSET
7906 refers to; in this case. Increment *SUBOFF by the bit offset
7907 of the accessed member. */
7910 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7911 unsigned HOST_WIDE_INT offset
,
7912 unsigned HOST_WIDE_INT size
,
7914 unsigned HOST_WIDE_INT
*suboff
)
7916 unsigned HOST_WIDE_INT cnt
;
7919 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7922 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7923 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7924 tree field_size
= DECL_SIZE (cfield
);
7928 /* Determine the size of the flexible array member from
7929 the size of the initializer provided for it. */
7930 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7933 /* Variable sized objects in static constructors makes no sense,
7934 but field_size can be NULL for flexible array members. */
7935 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7936 && TREE_CODE (byte_offset
) == INTEGER_CST
7937 && (field_size
!= NULL_TREE
7938 ? TREE_CODE (field_size
) == INTEGER_CST
7939 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7941 /* Compute bit offset of the field. */
7942 offset_int bitoffset
7943 = (wi::to_offset (field_offset
)
7944 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7945 /* Compute bit offset where the field ends. */
7946 offset_int bitoffset_end
;
7947 if (field_size
!= NULL_TREE
)
7948 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7952 /* Compute the bit offset of the end of the desired access.
7953 As a special case, if the size of the desired access is
7954 zero, assume the access is to the entire field (and let
7955 the caller make any necessary adjustments by storing
7956 the actual bounds of the field in FIELDBOUNDS). */
7957 offset_int access_end
= offset_int (offset
);
7961 access_end
= bitoffset_end
;
7963 /* Is there any overlap between the desired access at
7964 [OFFSET, OFFSET+SIZE) and the offset of the field within
7965 the object at [BITOFFSET, BITOFFSET_END)? */
7966 if (wi::cmps (access_end
, bitoffset
) > 0
7967 && (field_size
== NULL_TREE
7968 || wi::lts_p (offset
, bitoffset_end
)))
7970 *suboff
+= bitoffset
.to_uhwi ();
7972 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7974 /* For the final reference to the entire accessed member
7975 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7976 be null) in favor of the type of the member, and set
7977 SIZE to the size of the accessed member. */
7978 offset
= bitoffset
.to_uhwi ();
7979 type
= TREE_TYPE (cval
);
7980 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7983 /* We do have overlap. Now see if the field is large enough
7984 to cover the access. Give up for accesses that extend
7985 beyond the end of the object or that span multiple fields. */
7986 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7988 if (offset
< bitoffset
)
7991 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7992 return fold_ctor_reference (type
, cval
,
7993 inner_offset
.to_uhwi (), size
,
8001 return build_zero_cst (type
);
8004 /* CTOR is value initializing memory. Fold a reference of TYPE and
8005 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8006 is zero, attempt to fold a reference to the entire subobject
8007 which OFFSET refers to. This is used when folding accesses to
8008 string members of aggregates. When non-null, set *SUBOFF to
8009 the bit offset of the accessed subobject. */
8012 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
8013 const poly_uint64
&poly_size
, tree from_decl
,
8014 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
8018 /* We found the field with exact match. */
8020 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8021 && known_eq (poly_offset
, 0U))
8022 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8024 /* The remaining optimizations need a constant size and offset. */
8025 unsigned HOST_WIDE_INT size
, offset
;
8026 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8029 /* We are at the end of walk, see if we can view convert the
8031 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8032 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8033 && !compare_tree_int (TYPE_SIZE (type
), size
)
8034 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8036 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8039 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8041 STRIP_USELESS_TYPE_CONVERSION (ret
);
8045 /* For constants and byte-aligned/sized reads try to go through
8046 native_encode/interpret. */
8047 if (CONSTANT_CLASS_P (ctor
)
8048 && BITS_PER_UNIT
== 8
8049 && offset
% BITS_PER_UNIT
== 0
8050 && offset
/ BITS_PER_UNIT
<= INT_MAX
8051 && size
% BITS_PER_UNIT
== 0
8052 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8053 && can_native_interpret_type_p (type
))
8055 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8056 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8057 offset
/ BITS_PER_UNIT
);
8059 return native_interpret_expr (type
, buf
, len
);
8061 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8063 unsigned HOST_WIDE_INT dummy
= 0;
8068 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8069 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8070 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8073 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8076 /* Fall back to native_encode_initializer. Needs to be done
8077 only in the outermost fold_ctor_reference call (because it itself
8078 recurses into CONSTRUCTORs) and doesn't update suboff. */
8079 if (ret
== NULL_TREE
8081 && BITS_PER_UNIT
== 8
8082 && offset
% BITS_PER_UNIT
== 0
8083 && offset
/ BITS_PER_UNIT
<= INT_MAX
8084 && size
% BITS_PER_UNIT
== 0
8085 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8086 && can_native_interpret_type_p (type
))
8088 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8089 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8090 offset
/ BITS_PER_UNIT
);
8092 return native_interpret_expr (type
, buf
, len
);
8101 /* Return the tree representing the element referenced by T if T is an
8102 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8103 names using VALUEIZE. Return NULL_TREE otherwise. */
8106 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8108 tree ctor
, idx
, base
;
8109 poly_int64 offset
, size
, max_size
;
8113 if (TREE_THIS_VOLATILE (t
))
8117 return get_symbol_constant_value (t
);
8119 tem
= fold_read_from_constant_string (t
);
8123 switch (TREE_CODE (t
))
8126 case ARRAY_RANGE_REF
:
8127 /* Constant indexes are handled well by get_base_constructor.
8128 Only special case variable offsets.
8129 FIXME: This code can't handle nested references with variable indexes
8130 (they will be handled only by iteration of ccp). Perhaps we can bring
8131 get_ref_base_and_extent here and make it use a valueize callback. */
8132 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8134 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8135 && poly_int_tree_p (idx
))
8137 tree low_bound
, unit_size
;
8139 /* If the resulting bit-offset is constant, track it. */
8140 if ((low_bound
= array_ref_low_bound (t
),
8141 poly_int_tree_p (low_bound
))
8142 && (unit_size
= array_ref_element_size (t
),
8143 tree_fits_uhwi_p (unit_size
)))
8145 poly_offset_int woffset
8146 = wi::sext (wi::to_poly_offset (idx
)
8147 - wi::to_poly_offset (low_bound
),
8148 TYPE_PRECISION (sizetype
));
8149 woffset
*= tree_to_uhwi (unit_size
);
8150 woffset
*= BITS_PER_UNIT
;
8151 if (woffset
.to_shwi (&offset
))
8153 base
= TREE_OPERAND (t
, 0);
8154 ctor
= get_base_constructor (base
, &offset
, valueize
);
8155 /* Empty constructor. Always fold to 0. */
8156 if (ctor
== error_mark_node
)
8157 return build_zero_cst (TREE_TYPE (t
));
8158 /* Out of bound array access. Value is undefined,
8160 if (maybe_lt (offset
, 0))
8162 /* We cannot determine ctor. */
8165 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8166 tree_to_uhwi (unit_size
)
8176 case TARGET_MEM_REF
:
8178 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8179 ctor
= get_base_constructor (base
, &offset
, valueize
);
8181 /* Empty constructor. Always fold to 0. */
8182 if (ctor
== error_mark_node
)
8183 return build_zero_cst (TREE_TYPE (t
));
8184 /* We do not know precise address. */
8185 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8187 /* We cannot determine ctor. */
8191 /* Out of bound array access. Value is undefined, but don't fold. */
8192 if (maybe_lt (offset
, 0))
8195 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8199 /* For bit field reads try to read the representative and
8201 if (TREE_CODE (t
) == COMPONENT_REF
8202 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8203 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8205 HOST_WIDE_INT csize
, coffset
;
8206 tree field
= TREE_OPERAND (t
, 1);
8207 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8208 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8209 && size
.is_constant (&csize
)
8210 && offset
.is_constant (&coffset
)
8211 && (coffset
% BITS_PER_UNIT
!= 0
8212 || csize
% BITS_PER_UNIT
!= 0)
8214 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8216 poly_int64 bitoffset
;
8217 poly_uint64 field_offset
, repr_offset
;
8218 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8219 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8220 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8223 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8224 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8225 HOST_WIDE_INT bitoff
;
8226 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8227 - TYPE_PRECISION (TREE_TYPE (field
)));
8228 if (bitoffset
.is_constant (&bitoff
)
8233 size
= tree_to_uhwi (DECL_SIZE (repr
));
8235 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8237 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8239 if (!BYTES_BIG_ENDIAN
)
8240 tem
= wide_int_to_tree (TREE_TYPE (field
),
8241 wi::lrshift (wi::to_wide (tem
),
8244 tem
= wide_int_to_tree (TREE_TYPE (field
),
8245 wi::lrshift (wi::to_wide (tem
),
8257 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8258 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8259 return fold_build1_loc (EXPR_LOCATION (t
),
8260 TREE_CODE (t
), TREE_TYPE (t
), c
);
8272 fold_const_aggregate_ref (tree t
)
8274 return fold_const_aggregate_ref_1 (t
, NULL
);
8277 /* Lookup virtual method with index TOKEN in a virtual table V
8279 Set CAN_REFER if non-NULL to false if method
8280 is not referable or if the virtual table is ill-formed (such as rewriten
8281 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8284 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8286 unsigned HOST_WIDE_INT offset
,
8289 tree vtable
= v
, init
, fn
;
8290 unsigned HOST_WIDE_INT size
;
8291 unsigned HOST_WIDE_INT elt_size
, access_index
;
8297 /* First of all double check we have virtual table. */
8298 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8300 /* Pass down that we lost track of the target. */
8306 init
= ctor_for_folding (v
);
8308 /* The virtual tables should always be born with constructors
8309 and we always should assume that they are avaialble for
8310 folding. At the moment we do not stream them in all cases,
8311 but it should never happen that ctor seem unreachable. */
8313 if (init
== error_mark_node
)
8315 /* Pass down that we lost track of the target. */
8320 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8321 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8322 offset
*= BITS_PER_UNIT
;
8323 offset
+= token
* size
;
8325 /* Lookup the value in the constructor that is assumed to be array.
8326 This is equivalent to
8327 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8328 offset, size, NULL);
8329 but in a constant time. We expect that frontend produced a simple
8330 array without indexed initializers. */
8332 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8333 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8334 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8335 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8337 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8338 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8340 /* The C++ FE can now produce indexed fields, and we check if the indexes
8342 if (access_index
< CONSTRUCTOR_NELTS (init
))
8344 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8345 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8346 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8352 /* For type inconsistent program we may end up looking up virtual method
8353 in virtual table that does not contain TOKEN entries. We may overrun
8354 the virtual table and pick up a constant or RTTI info pointer.
8355 In any case the call is undefined. */
8357 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8358 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8359 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8362 fn
= TREE_OPERAND (fn
, 0);
8364 /* When cgraph node is missing and function is not public, we cannot
8365 devirtualize. This can happen in WHOPR when the actual method
8366 ends up in other partition, because we found devirtualization
8367 possibility too late. */
8368 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8379 /* Make sure we create a cgraph node for functions we'll reference.
8380 They can be non-existent if the reference comes from an entry
8381 of an external vtable for example. */
8382 cgraph_node::get_create (fn
);
8387 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8388 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8389 KNOWN_BINFO carries the binfo describing the true type of
8390 OBJ_TYPE_REF_OBJECT(REF).
8391 Set CAN_REFER if non-NULL to false if method
8392 is not referable or if the virtual table is ill-formed (such as rewriten
8393 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8396 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8399 unsigned HOST_WIDE_INT offset
;
8402 v
= BINFO_VTABLE (known_binfo
);
8403 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8407 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8413 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8416 /* Given a pointer value T, return a simplified version of an
8417 indirection through T, or NULL_TREE if no simplification is
8418 possible. Note that the resulting type may be different from
8419 the type pointed to in the sense that it is still compatible
8420 from the langhooks point of view. */
8423 gimple_fold_indirect_ref (tree t
)
8425 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8430 subtype
= TREE_TYPE (sub
);
8431 if (!POINTER_TYPE_P (subtype
)
8432 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8435 if (TREE_CODE (sub
) == ADDR_EXPR
)
8437 tree op
= TREE_OPERAND (sub
, 0);
8438 tree optype
= TREE_TYPE (op
);
8440 if (useless_type_conversion_p (type
, optype
))
8443 /* *(foo *)&fooarray => fooarray[0] */
8444 if (TREE_CODE (optype
) == ARRAY_TYPE
8445 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8446 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8448 tree type_domain
= TYPE_DOMAIN (optype
);
8449 tree min_val
= size_zero_node
;
8450 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8451 min_val
= TYPE_MIN_VALUE (type_domain
);
8452 if (TREE_CODE (min_val
) == INTEGER_CST
)
8453 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8455 /* *(foo *)&complexfoo => __real__ complexfoo */
8456 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8457 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8458 return fold_build1 (REALPART_EXPR
, type
, op
);
8459 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8460 else if (TREE_CODE (optype
) == VECTOR_TYPE
8461 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8463 tree part_width
= TYPE_SIZE (type
);
8464 tree index
= bitsize_int (0);
8465 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8469 /* *(p + CST) -> ... */
8470 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8471 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8473 tree addr
= TREE_OPERAND (sub
, 0);
8474 tree off
= TREE_OPERAND (sub
, 1);
8478 addrtype
= TREE_TYPE (addr
);
8480 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8481 if (TREE_CODE (addr
) == ADDR_EXPR
8482 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8483 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8484 && tree_fits_uhwi_p (off
))
8486 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8487 tree part_width
= TYPE_SIZE (type
);
8488 unsigned HOST_WIDE_INT part_widthi
8489 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8490 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8491 tree index
= bitsize_int (indexi
);
8492 if (known_lt (offset
/ part_widthi
,
8493 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8494 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8498 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8499 if (TREE_CODE (addr
) == ADDR_EXPR
8500 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8501 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8503 tree size
= TYPE_SIZE_UNIT (type
);
8504 if (tree_int_cst_equal (size
, off
))
8505 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8508 /* *(p + CST) -> MEM_REF <p, CST>. */
8509 if (TREE_CODE (addr
) != ADDR_EXPR
8510 || DECL_P (TREE_OPERAND (addr
, 0)))
8511 return fold_build2 (MEM_REF
, type
,
8513 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8516 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8517 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8518 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8519 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8522 tree min_val
= size_zero_node
;
8524 sub
= gimple_fold_indirect_ref (sub
);
8526 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8527 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8528 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8529 min_val
= TYPE_MIN_VALUE (type_domain
);
8530 if (TREE_CODE (min_val
) == INTEGER_CST
)
8531 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8537 /* Return true if CODE is an operation that when operating on signed
8538 integer types involves undefined behavior on overflow and the
8539 operation can be expressed with unsigned arithmetic. */
8542 arith_code_with_undefined_signed_overflow (tree_code code
)
8551 case POINTER_PLUS_EXPR
:
8558 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8559 operation that can be transformed to unsigned arithmetic by converting
8560 its operand, carrying out the operation in the corresponding unsigned
8561 type and converting the result back to the original type.
8563 Returns a sequence of statements that replace STMT and also contain
8564 a modified form of STMT itself. */
8567 rewrite_to_defined_overflow (gimple
*stmt
)
8569 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8571 fprintf (dump_file
, "rewriting stmt with undefined signed "
8573 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8576 tree lhs
= gimple_assign_lhs (stmt
);
8577 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8578 gimple_seq stmts
= NULL
;
8579 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8580 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8582 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8584 tree op
= gimple_op (stmt
, i
);
8585 op
= gimple_convert (&stmts
, type
, op
);
8586 gimple_set_op (stmt
, i
, op
);
8588 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8589 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8590 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8591 gimple_set_modified (stmt
, true);
8592 gimple_seq_add_stmt (&stmts
, stmt
);
8593 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8594 gimple_seq_add_stmt (&stmts
, cvt
);
8600 /* The valueization hook we use for the gimple_build API simplification.
8601 This makes us match fold_buildN behavior by only combining with
8602 statements in the sequence(s) we are currently building. */
8605 gimple_build_valueize (tree op
)
8607 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8612 /* Build the expression CODE OP0 of type TYPE with location LOC,
8613 simplifying it first if possible. Returns the built
8614 expression value and appends statements possibly defining it
8618 gimple_build (gimple_seq
*seq
, location_t loc
,
8619 enum tree_code code
, tree type
, tree op0
)
8621 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8624 res
= create_tmp_reg_or_ssa_name (type
);
8626 if (code
== REALPART_EXPR
8627 || code
== IMAGPART_EXPR
8628 || code
== VIEW_CONVERT_EXPR
)
8629 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8631 stmt
= gimple_build_assign (res
, code
, op0
);
8632 gimple_set_location (stmt
, loc
);
8633 gimple_seq_add_stmt_without_update (seq
, stmt
);
8638 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8639 simplifying it first if possible. Returns the built
8640 expression value and appends statements possibly defining it
8644 gimple_build (gimple_seq
*seq
, location_t loc
,
8645 enum tree_code code
, tree type
, tree op0
, tree op1
)
8647 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8650 res
= create_tmp_reg_or_ssa_name (type
);
8651 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8652 gimple_set_location (stmt
, loc
);
8653 gimple_seq_add_stmt_without_update (seq
, stmt
);
8658 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8659 simplifying it first if possible. Returns the built
8660 expression value and appends statements possibly defining it
8664 gimple_build (gimple_seq
*seq
, location_t loc
,
8665 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8667 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8668 seq
, gimple_build_valueize
);
8671 res
= create_tmp_reg_or_ssa_name (type
);
8673 if (code
== BIT_FIELD_REF
)
8674 stmt
= gimple_build_assign (res
, code
,
8675 build3 (code
, type
, op0
, op1
, op2
));
8677 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8678 gimple_set_location (stmt
, loc
);
8679 gimple_seq_add_stmt_without_update (seq
, stmt
);
8684 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8685 void) with a location LOC. Returns the built expression value (or NULL_TREE
8686 if TYPE is void) and appends statements possibly defining it to SEQ. */
8689 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8691 tree res
= NULL_TREE
;
8693 if (internal_fn_p (fn
))
8694 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8697 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8698 stmt
= gimple_build_call (decl
, 0);
8700 if (!VOID_TYPE_P (type
))
8702 res
= create_tmp_reg_or_ssa_name (type
);
8703 gimple_call_set_lhs (stmt
, res
);
8705 gimple_set_location (stmt
, loc
);
8706 gimple_seq_add_stmt_without_update (seq
, stmt
);
8710 /* Build the call FN (ARG0) with a result of type TYPE
8711 (or no result if TYPE is void) with location LOC,
8712 simplifying it first if possible. Returns the built
8713 expression value (or NULL_TREE if TYPE is void) and appends
8714 statements possibly defining it to SEQ. */
8717 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8718 tree type
, tree arg0
)
8720 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8724 if (internal_fn_p (fn
))
8725 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8728 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8729 stmt
= gimple_build_call (decl
, 1, arg0
);
8731 if (!VOID_TYPE_P (type
))
8733 res
= create_tmp_reg_or_ssa_name (type
);
8734 gimple_call_set_lhs (stmt
, res
);
8736 gimple_set_location (stmt
, loc
);
8737 gimple_seq_add_stmt_without_update (seq
, stmt
);
8742 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8743 (or no result if TYPE is void) with location LOC,
8744 simplifying it first if possible. Returns the built
8745 expression value (or NULL_TREE if TYPE is void) and appends
8746 statements possibly defining it to SEQ. */
8749 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8750 tree type
, tree arg0
, tree arg1
)
8752 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8756 if (internal_fn_p (fn
))
8757 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8760 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8761 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8763 if (!VOID_TYPE_P (type
))
8765 res
= create_tmp_reg_or_ssa_name (type
);
8766 gimple_call_set_lhs (stmt
, res
);
8768 gimple_set_location (stmt
, loc
);
8769 gimple_seq_add_stmt_without_update (seq
, stmt
);
8774 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8775 (or no result if TYPE is void) with location LOC,
8776 simplifying it first if possible. Returns the built
8777 expression value (or NULL_TREE if TYPE is void) and appends
8778 statements possibly defining it to SEQ. */
8781 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8782 tree type
, tree arg0
, tree arg1
, tree arg2
)
8784 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8785 seq
, gimple_build_valueize
);
8789 if (internal_fn_p (fn
))
8790 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8791 3, arg0
, arg1
, arg2
);
8794 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8795 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8797 if (!VOID_TYPE_P (type
))
8799 res
= create_tmp_reg_or_ssa_name (type
);
8800 gimple_call_set_lhs (stmt
, res
);
8802 gimple_set_location (stmt
, loc
);
8803 gimple_seq_add_stmt_without_update (seq
, stmt
);
8808 /* Build the conversion (TYPE) OP with a result of type TYPE
8809 with location LOC if such conversion is neccesary in GIMPLE,
8810 simplifying it first.
8811 Returns the built expression value and appends
8812 statements possibly defining it to SEQ. */
8815 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8817 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8819 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8822 /* Build the conversion (ptrofftype) OP with a result of a type
8823 compatible with ptrofftype with location LOC if such conversion
8824 is neccesary in GIMPLE, simplifying it first.
8825 Returns the built expression value and appends
8826 statements possibly defining it to SEQ. */
8829 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8831 if (ptrofftype_p (TREE_TYPE (op
)))
8833 return gimple_convert (seq
, loc
, sizetype
, op
);
8836 /* Build a vector of type TYPE in which each element has the value OP.
8837 Return a gimple value for the result, appending any new statements
8841 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8844 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8845 && !CONSTANT_CLASS_P (op
))
8846 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8848 tree res
, vec
= build_vector_from_val (type
, op
);
8849 if (is_gimple_val (vec
))
8851 if (gimple_in_ssa_p (cfun
))
8852 res
= make_ssa_name (type
);
8854 res
= create_tmp_reg (type
);
8855 gimple
*stmt
= gimple_build_assign (res
, vec
);
8856 gimple_set_location (stmt
, loc
);
8857 gimple_seq_add_stmt_without_update (seq
, stmt
);
8861 /* Build a vector from BUILDER, handling the case in which some elements
8862 are non-constant. Return a gimple value for the result, appending any
8863 new instructions to SEQ.
8865 BUILDER must not have a stepped encoding on entry. This is because
8866 the function is not geared up to handle the arithmetic that would
8867 be needed in the variable case, and any code building a vector that
8868 is known to be constant should use BUILDER->build () directly. */
8871 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8872 tree_vector_builder
*builder
)
8874 gcc_assert (builder
->nelts_per_pattern () <= 2);
8875 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8876 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8877 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8879 tree type
= builder
->type ();
8880 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8881 vec
<constructor_elt
, va_gc
> *v
;
8882 vec_alloc (v
, nelts
);
8883 for (i
= 0; i
< nelts
; ++i
)
8884 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8887 if (gimple_in_ssa_p (cfun
))
8888 res
= make_ssa_name (type
);
8890 res
= create_tmp_reg (type
);
8891 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8892 gimple_set_location (stmt
, loc
);
8893 gimple_seq_add_stmt_without_update (seq
, stmt
);
8896 return builder
->build ();
8899 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8900 and generate a value guaranteed to be rounded upwards to ALIGN.
8902 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8905 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8906 tree old_size
, unsigned HOST_WIDE_INT align
)
8908 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8909 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8910 gcc_assert (INTEGRAL_TYPE_P (type
));
8911 tree tree_mask
= build_int_cst (type
, tg_mask
);
8912 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8915 tree mask
= build_int_cst (type
, -align
);
8916 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8919 /* Return true if the result of assignment STMT is known to be non-negative.
8920 If the return value is based on the assumption that signed overflow is
8921 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8922 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8925 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8928 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8929 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
8930 switch (get_gimple_rhs_class (code
))
8932 case GIMPLE_UNARY_RHS
:
8933 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8935 gimple_assign_rhs1 (stmt
),
8936 strict_overflow_p
, depth
);
8937 case GIMPLE_BINARY_RHS
:
8938 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8940 gimple_assign_rhs1 (stmt
),
8941 gimple_assign_rhs2 (stmt
),
8942 strict_overflow_p
, depth
);
8943 case GIMPLE_TERNARY_RHS
:
8945 case GIMPLE_SINGLE_RHS
:
8946 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8947 strict_overflow_p
, depth
);
8948 case GIMPLE_INVALID_RHS
:
8954 /* Return true if return value of call STMT is known to be non-negative.
8955 If the return value is based on the assumption that signed overflow is
8956 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8957 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8960 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8963 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8964 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8965 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8966 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8967 tree lhs
= gimple_call_lhs (stmt
);
8969 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
8970 gimple_call_combined_fn (stmt
),
8972 strict_overflow_p
, depth
));
8975 /* Return true if return value of call STMT is known to be non-negative.
8976 If the return value is based on the assumption that signed overflow is
8977 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8978 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8981 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8984 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8986 tree arg
= gimple_phi_arg_def (stmt
, i
);
8987 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
8993 /* Return true if STMT is known to compute a non-negative value.
8994 If the return value is based on the assumption that signed overflow is
8995 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8996 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8999 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
9002 switch (gimple_code (stmt
))
9005 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9008 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9011 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
9018 /* Return true if the floating-point value computed by assignment STMT
9019 is known to have an integer value. We also allow +Inf, -Inf and NaN
9020 to be considered integer values. Return false for signaling NaN.
9022 DEPTH is the current nesting depth of the query. */
9025 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9027 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9028 switch (get_gimple_rhs_class (code
))
9030 case GIMPLE_UNARY_RHS
:
9031 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9032 gimple_assign_rhs1 (stmt
), depth
);
9033 case GIMPLE_BINARY_RHS
:
9034 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9035 gimple_assign_rhs1 (stmt
),
9036 gimple_assign_rhs2 (stmt
), depth
);
9037 case GIMPLE_TERNARY_RHS
:
9039 case GIMPLE_SINGLE_RHS
:
9040 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9041 case GIMPLE_INVALID_RHS
:
9047 /* Return true if the floating-point value computed by call STMT is known
9048 to have an integer value. We also allow +Inf, -Inf and NaN to be
9049 considered integer values. Return false for signaling NaN.
9051 DEPTH is the current nesting depth of the query. */
9054 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9056 tree arg0
= (gimple_call_num_args (stmt
) > 0
9057 ? gimple_call_arg (stmt
, 0)
9059 tree arg1
= (gimple_call_num_args (stmt
) > 1
9060 ? gimple_call_arg (stmt
, 1)
9062 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9066 /* Return true if the floating-point result of phi STMT is known to have
9067 an integer value. We also allow +Inf, -Inf and NaN to be considered
9068 integer values. Return false for signaling NaN.
9070 DEPTH is the current nesting depth of the query. */
9073 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9075 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9077 tree arg
= gimple_phi_arg_def (stmt
, i
);
9078 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9084 /* Return true if the floating-point value computed by STMT is known
9085 to have an integer value. We also allow +Inf, -Inf and NaN to be
9086 considered integer values. Return false for signaling NaN.
9088 DEPTH is the current nesting depth of the query. */
9091 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9093 switch (gimple_code (stmt
))
9096 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9098 return gimple_call_integer_valued_real_p (stmt
, depth
);
9100 return gimple_phi_integer_valued_real_p (stmt
, depth
);